././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.7401564 deap-1.4.1/0000755000076500000240000000000014456461475011702 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/INSTALL.txt0000644000076500000240000000123414456461441013542 0ustar00runnerstaff================================ UNIX based platforms and Windows ================================ In order to install DEAP from sources, change directory to the root of deap and type in : $ python setup.py install This will try to install deap into your package directory, you might need permissions to write to this directory. ======= Options ======= Prefix ++++++ You might want to install this software somewhere else by adding the prefix options to the installation. $ python setup.py install --prefix=somewhere/else Other +++++ Other basic options are provided by the building tools of Python, see http://docs.python.org/install/ for more information. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/LICENSE.txt0000644000076500000240000001672514456461441013531 0ustar00runnerstaff GNU LESSER GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. This version of the GNU Lesser General Public License incorporates the terms and conditions of version 3 of the GNU General Public License, supplemented by the additional permissions listed below. 0. Additional Definitions. As used herein, "this License" refers to version 3 of the GNU Lesser General Public License, and the "GNU GPL" refers to version 3 of the GNU General Public License. "The Library" refers to a covered work governed by this License, other than an Application or a Combined Work as defined below. An "Application" is any work that makes use of an interface provided by the Library, but which is not otherwise based on the Library. Defining a subclass of a class defined by the Library is deemed a mode of using an interface provided by the Library. A "Combined Work" is a work produced by combining or linking an Application with the Library. The particular version of the Library with which the Combined Work was made is also called the "Linked Version". The "Minimal Corresponding Source" for a Combined Work means the Corresponding Source for the Combined Work, excluding any source code for portions of the Combined Work that, considered in isolation, are based on the Application, and not on the Linked Version. The "Corresponding Application Code" for a Combined Work means the object code and/or source code for the Application, including any data and utility programs needed for reproducing the Combined Work from the Application, but excluding the System Libraries of the Combined Work. 1. Exception to Section 3 of the GNU GPL. You may convey a covered work under sections 3 and 4 of this License without being bound by section 3 of the GNU GPL. 2. Conveying Modified Versions. If you modify a copy of the Library, and, in your modifications, a facility refers to a function or data to be supplied by an Application that uses the facility (other than as an argument passed when the facility is invoked), then you may convey a copy of the modified version: a) under this License, provided that you make a good faith effort to ensure that, in the event an Application does not supply the function or data, the facility still operates, and performs whatever part of its purpose remains meaningful, or b) under the GNU GPL, with none of the additional permissions of this License applicable to that copy. 3. Object Code Incorporating Material from Library Header Files. The object code form of an Application may incorporate material from a header file that is part of the Library. You may convey such object code under terms of your choice, provided that, if the incorporated material is not limited to numerical parameters, data structure layouts and accessors, or small macros, inline functions and templates (ten or fewer lines in length), you do both of the following: a) Give prominent notice with each copy of the object code that the Library is used in it and that the Library and its use are covered by this License. b) Accompany the object code with a copy of the GNU GPL and this license document. 4. Combined Works. You may convey a Combined Work under terms of your choice that, taken together, effectively do not restrict modification of the portions of the Library contained in the Combined Work and reverse engineering for debugging such modifications, if you also do each of the following: a) Give prominent notice with each copy of the Combined Work that the Library is used in it and that the Library and its use are covered by this License. b) Accompany the Combined Work with a copy of the GNU GPL and this license document. c) For a Combined Work that displays copyright notices during execution, include the copyright notice for the Library among these notices, as well as a reference directing the user to the copies of the GNU GPL and this license document. d) Do one of the following: 0) Convey the Minimal Corresponding Source under the terms of this License, and the Corresponding Application Code in a form suitable for, and under terms that permit, the user to recombine or relink the Application with a modified version of the Linked Version to produce a modified Combined Work, in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source. 1) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (a) uses at run time a copy of the Library already present on the user's computer system, and (b) will operate properly with a modified version of the Library that is interface-compatible with the Linked Version. e) Provide Installation Information, but only if you would otherwise be required to provide such information under section 6 of the GNU GPL, and only to the extent that such information is necessary to install and execute a modified version of the Combined Work produced by recombining or relinking the Application with a modified version of the Linked Version. (If you use option 4d0, the Installation Information must accompany the Minimal Corresponding Source and Corresponding Application Code. If you use option 4d1, you must provide the Installation Information in the manner specified by section 6 of the GNU GPL for conveying Corresponding Source.) 5. Combined Libraries. You may place library facilities that are a work based on the Library side by side in a single library together with other library facilities that are not Applications and are not covered by this License, and convey such a combined library under terms of your choice, if you do both of the following: a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities, conveyed under the terms of this License. b) Give prominent notice with the combined library that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. 6. Revised Versions of the GNU Lesser General Public License. The Free Software Foundation may publish revised and/or new versions of the GNU Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Library as you received it specifies that a certain numbered version of the GNU Lesser General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that published version or of any later version published by the Free Software Foundation. If the Library as you received it does not specify a version number of the GNU Lesser General Public License, you may choose any version of the GNU Lesser General Public License ever published by the Free Software Foundation. If the Library as you received it specifies that a proxy can decide whether future versions of the GNU Lesser General Public License shall apply, that proxy's public statement of acceptance of any version is permanent authorization for you to choose that version for the Library. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/MANIFEST.in0000644000076500000240000000037214456461441013433 0ustar00runnerstaffinclude *.txt include *.md recursive-include deap *.cpp *.c *.hpp *.h recursive-include examples *.py *.csv *.json *.txt *.cpp *.hpp *.h recursive-include doc * recursive-include tests * prune doc/_build global-exclude .DS_Store global-exclude *.pyc ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.7396955 deap-1.4.1/PKG-INFO0000644000076500000240000003166314456461475013010 0ustar00runnerstaffMetadata-Version: 2.1 Name: deap Version: 1.4.1 Summary: Distributed Evolutionary Algorithms in Python Home-page: https://www.github.com/deap Author: deap Development Team Author-email: deap-users@googlegroups.com License: LGPL Keywords: evolutionary algorithms,genetic algorithms,genetic programming,cma-es,ga,gp,es,pso Platform: any Classifier: Development Status :: 4 - Beta Classifier: Intended Audience :: Developers Classifier: Intended Audience :: Education Classifier: Intended Audience :: Science/Research Classifier: License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Topic :: Scientific/Engineering Classifier: Topic :: Software Development Description-Content-Type: text/markdown License-File: LICENSE.txt # DEAP [![Build status](https://travis-ci.org/DEAP/deap.svg?branch=master)](https://travis-ci.org/DEAP/deap) [![Download](https://img.shields.io/pypi/dm/deap.svg)](https://pypi.python.org/pypi/deap) [![Join the chat at https://gitter.im/DEAP/deap](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/DEAP/deap?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) [![Build Status](https://dev.azure.com/fderainville/DEAP/_apis/build/status/DEAP.deap?branchName=master)](https://dev.azure.com/fderainville/DEAP/_build/latest?definitionId=1&branchName=master) [![Documentation Status](https://readthedocs.org/projects/deap/badge/?version=master)](https://deap.readthedocs.io/en/master/?badge=master) DEAP is a novel evolutionary computation framework for rapid prototyping and testing of ideas. It seeks to make algorithms explicit and data structures transparent. It works in perfect harmony with parallelisation mechanisms such as multiprocessing and [SCOOP](https://github.com/soravux/scoop). DEAP includes the following features: * Genetic algorithm using any imaginable representation * List, Array, Set, Dictionary, Tree, Numpy Array, etc. * Genetic programming using prefix trees * Loosely typed, Strongly typed * Automatically defined functions * Evolution strategies (including CMA-ES) * Multi-objective optimisation (NSGA-II, NSGA-III, SPEA2, MO-CMA-ES) * Co-evolution (cooperative and competitive) of multiple populations * Parallelization of the evaluations (and more) * Hall of Fame of the best individuals that lived in the population * Checkpoints that take snapshots of a system regularly * Benchmarks module containing most common test functions * Genealogy of an evolution (that is compatible with [NetworkX](https://github.com/networkx/networkx)) * Examples of alternative algorithms : Particle Swarm Optimization, Differential Evolution, Estimation of Distribution Algorithm ## Downloads Following acceptance of [PEP 438](http://www.python.org/dev/peps/pep-0438/) by the Python community, we have moved DEAP's source releases on [PyPI](https://pypi.python.org). You can find the most recent releases at: https://pypi.python.org/pypi/deap/. ## Documentation See the [DEAP User's Guide](http://deap.readthedocs.org/) for DEAP documentation. In order to get the tip documentation, change directory to the `doc` subfolder and type in `make html`, the documentation will be under `_build/html`. You will need [Sphinx](http://sphinx.pocoo.org) to build the documentation. ### Notebooks Also checkout our new [notebook examples](https://github.com/DEAP/notebooks). Using [Jupyter notebooks](http://jupyter.org) you'll be able to navigate and execute each block of code individually and tell what every line is doing. Either, look at the notebooks online using the notebook viewer links at the botom of the page or download the notebooks, navigate to the you download directory and run ```bash jupyter notebook ``` ## Installation We encourage you to use easy_install or pip to install DEAP on your system. Other installation procedure like apt-get, yum, etc. usually provide an outdated version. ```bash pip install deap ``` The latest version can be installed with ```bash pip install git+https://github.com/DEAP/deap@master ``` If you wish to build from sources, download or clone the repository and type ```bash python setup.py install ``` ## Build Status DEAP build status is available on Travis-CI https://travis-ci.org/DEAP/deap. ## Requirements The most basic features of DEAP requires Python2.6. In order to combine the toolbox and the multiprocessing module Python2.7 is needed for its support to pickle partial functions. CMA-ES requires Numpy, and we recommend matplotlib for visualization of results as it is fully compatible with DEAP's API. Since version 0.8, DEAP is compatible out of the box with Python 3. The installation procedure automatically translates the source to Python 3 with 2to3, however this requires having `setuptools<=58`. It is recommended to use `pip install setuptools==57.5.0` to address this issue. ## Example The following code gives a quick overview how simple it is to implement the Onemax problem optimization with genetic algorithm using DEAP. More examples are provided [here](http://deap.readthedocs.org/en/master/examples/index.html). ```python import random from deap import creator, base, tools, algorithms creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", list, fitness=creator.FitnessMax) toolbox = base.Toolbox() toolbox.register("attr_bool", random.randint, 0, 1) toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_bool, n=100) toolbox.register("population", tools.initRepeat, list, toolbox.individual) def evalOneMax(individual): return sum(individual), toolbox.register("evaluate", evalOneMax) toolbox.register("mate", tools.cxTwoPoint) toolbox.register("mutate", tools.mutFlipBit, indpb=0.05) toolbox.register("select", tools.selTournament, tournsize=3) population = toolbox.population(n=300) NGEN=40 for gen in range(NGEN): offspring = algorithms.varAnd(population, toolbox, cxpb=0.5, mutpb=0.1) fits = toolbox.map(toolbox.evaluate, offspring) for fit, ind in zip(fits, offspring): ind.fitness.values = fit population = toolbox.select(offspring, k=len(population)) top10 = tools.selBest(population, k=10) ``` ## How to cite DEAP Authors of scientific papers including results generated using DEAP are encouraged to cite the following paper. ```xml @article{DEAP_JMLR2012, author = " F\'elix-Antoine Fortin and Fran\c{c}ois-Michel {De Rainville} and Marc-Andr\'e Gardner and Marc Parizeau and Christian Gagn\'e ", title = { {DEAP}: Evolutionary Algorithms Made Easy }, pages = { 2171--2175 }, volume = { 13 }, month = { jul }, year = { 2012 }, journal = { Journal of Machine Learning Research } } ``` ## Publications on DEAP * François-Michel De Rainville, Félix-Antoine Fortin, Marc-André Gardner, Marc Parizeau and Christian Gagné, "DEAP -- Enabling Nimbler Evolutions", SIGEVOlution, vol. 6, no 2, pp. 17-26, February 2014. [Paper](http://goo.gl/tOrXTp) * Félix-Antoine Fortin, François-Michel De Rainville, Marc-André Gardner, Marc Parizeau and Christian Gagné, "DEAP: Evolutionary Algorithms Made Easy", Journal of Machine Learning Research, vol. 13, pp. 2171-2175, jul 2012. [Paper](http://goo.gl/amJ3x) * François-Michel De Rainville, Félix-Antoine Fortin, Marc-André Gardner, Marc Parizeau and Christian Gagné, "DEAP: A Python Framework for Evolutionary Algorithms", in !EvoSoft Workshop, Companion proc. of the Genetic and Evolutionary Computation Conference (GECCO 2012), July 07-11 2012. [Paper](http://goo.gl/pXXug) ## Projects using DEAP * Ribaric, T., & Houghten, S. (2017, June). Genetic programming for improved cryptanalysis of elliptic curve cryptosystems. In 2017 IEEE Congress on Evolutionary Computation (CEC) (pp. 419-426). IEEE. * Ellefsen, Kai Olav, Herman Augusto Lepikson, and Jan C. Albiez. "Multiobjective coverage path planning: Enabling automated inspection of complex, real-world structures." Applied Soft Computing 61 (2017): 264-282. * S. Chardon, B. Brangeon, E. Bozonnet, C. Inard (2016), Construction cost and energy performance of single family houses : From integrated design to automated optimization, Automation in Construction, Volume 70, p.1-13. * B. Brangeon, E. Bozonnet, C. Inard (2016), Integrated refurbishment of collective housing and optimization process with real products databases, Building Simulation Optimization, pp. 531–538 Newcastle, England. * Randal S. Olson, Ryan J. Urbanowicz, Peter C. Andrews, Nicole A. Lavender, La Creis Kidd, and Jason H. Moore (2016). Automating biomedical data science through tree-based pipeline optimization. Applications of Evolutionary Computation, pages 123-137. * Randal S. Olson, Nathan Bartley, Ryan J. Urbanowicz, and Jason H. Moore (2016). Evaluation of a Tree-based Pipeline Optimization Tool for Automating Data Science. Proceedings of GECCO 2016, pages 485-492. * Van Geit W, Gevaert M, Chindemi G, Rössert C, Courcol J, Muller EB, Schürmann F, Segev I and Markram H (2016). BluePyOpt: Leveraging open source software and cloud infrastructure to optimise model parameters in neuroscience. Front. Neuroinform. 10:17. doi: 10.3389/fninf.2016.00017 https://github.com/BlueBrain/BluePyOpt * Lara-Cabrera, R., Cotta, C. and Fernández-Leiva, A.J. (2014). Geometrical vs topological measures for the evolution of aesthetic maps in a rts game, Entertainment Computing, * Macret, M. and Pasquier, P. (2013). Automatic Tuning of the OP-1 Synthesizer Using a Multi-objective Genetic Algorithm. In Proceedings of the 10th Sound and Music Computing Conference (SMC). (pp 614-621). * Fortin, F. A., Grenier, S., & Parizeau, M. (2013, July). Generalizing the improved run-time complexity algorithm for non-dominated sorting. In Proceeding of the fifteenth annual conference on Genetic and evolutionary computation conference (pp. 615-622). ACM. * Fortin, F. A., & Parizeau, M. (2013, July). Revisiting the NSGA-II crowding-distance computation. In Proceeding of the fifteenth annual conference on Genetic and evolutionary computation conference (pp. 623-630). ACM. * Marc-André Gardner, Christian Gagné, and Marc Parizeau. Estimation of Distribution Algorithm based on Hidden Markov Models for Combinatorial Optimization. in Comp. Proc. Genetic and Evolutionary Computation Conference (GECCO 2013), July 2013. * J. T. Zhai, M. A. Bamakhrama, and T. Stefanov. "Exploiting Just-enough Parallelism when Mapping Streaming Applications in Hard Real-time Systems". Design Automation Conference (DAC 2013), 2013. * V. Akbarzadeh, C. Gagné, M. Parizeau, M. Argany, M. A Mostafavi, "Probabilistic Sensing Model for Sensor Placement Optimization Based on Line-of-Sight Coverage", Accepted in IEEE Transactions on Instrumentation and Measurement, 2012. * M. Reif, F. Shafait, and A. Dengel. "Dataset Generation for Meta-Learning". Proceedings of the German Conference on Artificial Intelligence (KI'12). 2012. * M. T. Ribeiro, A. Lacerda, A. Veloso, and N. Ziviani. "Pareto-Efficient Hybridization for Multi-Objective Recommender Systems". Proceedings of the Conference on Recommanders Systems (!RecSys'12). 2012. * M. Pérez-Ortiz, A. Arauzo-Azofra, C. Hervás-Martínez, L. García-Hernández and L. Salas-Morera. "A system learning user preferences for multiobjective optimization of facility layouts". Pr,oceedings on the Int. Conference on Soft Computing Models in Industrial and Environmental Applications (SOCO'12). 2012. * Lévesque, J.C., Durand, A., Gagné, C., and Sabourin, R., Multi-Objective Evolutionary Optimization for Generating Ensembles of Classifiers in the ROC Space, Genetic and Evolutionary Computation Conference (GECCO 2012), 2012. * Marc-André Gardner, Christian Gagné, and Marc Parizeau, "Bloat Control in Genetic Programming with Histogram-based Accept-Reject Method", in Proc. Genetic and Evolutionary Computation Conference (GECCO 2011), 2011. * Vahab Akbarzadeh, Albert Ko, Christian Gagné, and Marc Parizeau, "Topography-Aware Sensor Deployment Optimization with CMA-ES", in Proc. of Parallel Problem Solving from Nature (PPSN 2010), Springer, 2010. * DEAP is used in [TPOT](https://github.com/rhiever/tpot), an open source tool that uses genetic programming to optimize machine learning pipelines. * DEAP is also used in ROS as an optimization package http://www.ros.org/wiki/deap. * DEAP is an optional dependency for [PyXRD](https://github.com/mathijs-dumon/PyXRD), a Python implementation of the matrix algorithm developed for the X-ray diffraction analysis of disordered lamellar structures. * DEAP is used in [glyph](https://github.com/Ambrosys/glyph), a library for symbolic regression with applications to [MLC](https://en.wikipedia.org/wiki/Machine_learning_control). * DEAP is used in [Sklearn-genetic-opt](https://github.com/rodrigo-arenas/Sklearn-genetic-opt), an open source tool that uses evolutionary programming to fine tune machine learning hyperparameters. If you want your project listed here, send us a link and a brief description and we'll be glad to add it. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/README.md0000644000076500000240000003014114456461441013151 0ustar00runnerstaff# DEAP [![Build status](https://travis-ci.org/DEAP/deap.svg?branch=master)](https://travis-ci.org/DEAP/deap) [![Download](https://img.shields.io/pypi/dm/deap.svg)](https://pypi.python.org/pypi/deap) [![Join the chat at https://gitter.im/DEAP/deap](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/DEAP/deap?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) [![Build Status](https://dev.azure.com/fderainville/DEAP/_apis/build/status/DEAP.deap?branchName=master)](https://dev.azure.com/fderainville/DEAP/_build/latest?definitionId=1&branchName=master) [![Documentation Status](https://readthedocs.org/projects/deap/badge/?version=master)](https://deap.readthedocs.io/en/master/?badge=master) DEAP is a novel evolutionary computation framework for rapid prototyping and testing of ideas. It seeks to make algorithms explicit and data structures transparent. It works in perfect harmony with parallelisation mechanisms such as multiprocessing and [SCOOP](https://github.com/soravux/scoop). DEAP includes the following features: * Genetic algorithm using any imaginable representation * List, Array, Set, Dictionary, Tree, Numpy Array, etc. * Genetic programming using prefix trees * Loosely typed, Strongly typed * Automatically defined functions * Evolution strategies (including CMA-ES) * Multi-objective optimisation (NSGA-II, NSGA-III, SPEA2, MO-CMA-ES) * Co-evolution (cooperative and competitive) of multiple populations * Parallelization of the evaluations (and more) * Hall of Fame of the best individuals that lived in the population * Checkpoints that take snapshots of a system regularly * Benchmarks module containing most common test functions * Genealogy of an evolution (that is compatible with [NetworkX](https://github.com/networkx/networkx)) * Examples of alternative algorithms : Particle Swarm Optimization, Differential Evolution, Estimation of Distribution Algorithm ## Downloads Following acceptance of [PEP 438](http://www.python.org/dev/peps/pep-0438/) by the Python community, we have moved DEAP's source releases on [PyPI](https://pypi.python.org). You can find the most recent releases at: https://pypi.python.org/pypi/deap/. ## Documentation See the [DEAP User's Guide](http://deap.readthedocs.org/) for DEAP documentation. In order to get the tip documentation, change directory to the `doc` subfolder and type in `make html`, the documentation will be under `_build/html`. You will need [Sphinx](http://sphinx.pocoo.org) to build the documentation. ### Notebooks Also checkout our new [notebook examples](https://github.com/DEAP/notebooks). Using [Jupyter notebooks](http://jupyter.org) you'll be able to navigate and execute each block of code individually and tell what every line is doing. Either, look at the notebooks online using the notebook viewer links at the botom of the page or download the notebooks, navigate to the you download directory and run ```bash jupyter notebook ``` ## Installation We encourage you to use easy_install or pip to install DEAP on your system. Other installation procedure like apt-get, yum, etc. usually provide an outdated version. ```bash pip install deap ``` The latest version can be installed with ```bash pip install git+https://github.com/DEAP/deap@master ``` If you wish to build from sources, download or clone the repository and type ```bash python setup.py install ``` ## Build Status DEAP build status is available on Travis-CI https://travis-ci.org/DEAP/deap. ## Requirements The most basic features of DEAP requires Python2.6. In order to combine the toolbox and the multiprocessing module Python2.7 is needed for its support to pickle partial functions. CMA-ES requires Numpy, and we recommend matplotlib for visualization of results as it is fully compatible with DEAP's API. Since version 0.8, DEAP is compatible out of the box with Python 3. The installation procedure automatically translates the source to Python 3 with 2to3, however this requires having `setuptools<=58`. It is recommended to use `pip install setuptools==57.5.0` to address this issue. ## Example The following code gives a quick overview how simple it is to implement the Onemax problem optimization with genetic algorithm using DEAP. More examples are provided [here](http://deap.readthedocs.org/en/master/examples/index.html). ```python import random from deap import creator, base, tools, algorithms creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", list, fitness=creator.FitnessMax) toolbox = base.Toolbox() toolbox.register("attr_bool", random.randint, 0, 1) toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_bool, n=100) toolbox.register("population", tools.initRepeat, list, toolbox.individual) def evalOneMax(individual): return sum(individual), toolbox.register("evaluate", evalOneMax) toolbox.register("mate", tools.cxTwoPoint) toolbox.register("mutate", tools.mutFlipBit, indpb=0.05) toolbox.register("select", tools.selTournament, tournsize=3) population = toolbox.population(n=300) NGEN=40 for gen in range(NGEN): offspring = algorithms.varAnd(population, toolbox, cxpb=0.5, mutpb=0.1) fits = toolbox.map(toolbox.evaluate, offspring) for fit, ind in zip(fits, offspring): ind.fitness.values = fit population = toolbox.select(offspring, k=len(population)) top10 = tools.selBest(population, k=10) ``` ## How to cite DEAP Authors of scientific papers including results generated using DEAP are encouraged to cite the following paper. ```xml @article{DEAP_JMLR2012, author = " F\'elix-Antoine Fortin and Fran\c{c}ois-Michel {De Rainville} and Marc-Andr\'e Gardner and Marc Parizeau and Christian Gagn\'e ", title = { {DEAP}: Evolutionary Algorithms Made Easy }, pages = { 2171--2175 }, volume = { 13 }, month = { jul }, year = { 2012 }, journal = { Journal of Machine Learning Research } } ``` ## Publications on DEAP * François-Michel De Rainville, Félix-Antoine Fortin, Marc-André Gardner, Marc Parizeau and Christian Gagné, "DEAP -- Enabling Nimbler Evolutions", SIGEVOlution, vol. 6, no 2, pp. 17-26, February 2014. [Paper](http://goo.gl/tOrXTp) * Félix-Antoine Fortin, François-Michel De Rainville, Marc-André Gardner, Marc Parizeau and Christian Gagné, "DEAP: Evolutionary Algorithms Made Easy", Journal of Machine Learning Research, vol. 13, pp. 2171-2175, jul 2012. [Paper](http://goo.gl/amJ3x) * François-Michel De Rainville, Félix-Antoine Fortin, Marc-André Gardner, Marc Parizeau and Christian Gagné, "DEAP: A Python Framework for Evolutionary Algorithms", in !EvoSoft Workshop, Companion proc. of the Genetic and Evolutionary Computation Conference (GECCO 2012), July 07-11 2012. [Paper](http://goo.gl/pXXug) ## Projects using DEAP * Ribaric, T., & Houghten, S. (2017, June). Genetic programming for improved cryptanalysis of elliptic curve cryptosystems. In 2017 IEEE Congress on Evolutionary Computation (CEC) (pp. 419-426). IEEE. * Ellefsen, Kai Olav, Herman Augusto Lepikson, and Jan C. Albiez. "Multiobjective coverage path planning: Enabling automated inspection of complex, real-world structures." Applied Soft Computing 61 (2017): 264-282. * S. Chardon, B. Brangeon, E. Bozonnet, C. Inard (2016), Construction cost and energy performance of single family houses : From integrated design to automated optimization, Automation in Construction, Volume 70, p.1-13. * B. Brangeon, E. Bozonnet, C. Inard (2016), Integrated refurbishment of collective housing and optimization process with real products databases, Building Simulation Optimization, pp. 531–538 Newcastle, England. * Randal S. Olson, Ryan J. Urbanowicz, Peter C. Andrews, Nicole A. Lavender, La Creis Kidd, and Jason H. Moore (2016). Automating biomedical data science through tree-based pipeline optimization. Applications of Evolutionary Computation, pages 123-137. * Randal S. Olson, Nathan Bartley, Ryan J. Urbanowicz, and Jason H. Moore (2016). Evaluation of a Tree-based Pipeline Optimization Tool for Automating Data Science. Proceedings of GECCO 2016, pages 485-492. * Van Geit W, Gevaert M, Chindemi G, Rössert C, Courcol J, Muller EB, Schürmann F, Segev I and Markram H (2016). BluePyOpt: Leveraging open source software and cloud infrastructure to optimise model parameters in neuroscience. Front. Neuroinform. 10:17. doi: 10.3389/fninf.2016.00017 https://github.com/BlueBrain/BluePyOpt * Lara-Cabrera, R., Cotta, C. and Fernández-Leiva, A.J. (2014). Geometrical vs topological measures for the evolution of aesthetic maps in a rts game, Entertainment Computing, * Macret, M. and Pasquier, P. (2013). Automatic Tuning of the OP-1 Synthesizer Using a Multi-objective Genetic Algorithm. In Proceedings of the 10th Sound and Music Computing Conference (SMC). (pp 614-621). * Fortin, F. A., Grenier, S., & Parizeau, M. (2013, July). Generalizing the improved run-time complexity algorithm for non-dominated sorting. In Proceeding of the fifteenth annual conference on Genetic and evolutionary computation conference (pp. 615-622). ACM. * Fortin, F. A., & Parizeau, M. (2013, July). Revisiting the NSGA-II crowding-distance computation. In Proceeding of the fifteenth annual conference on Genetic and evolutionary computation conference (pp. 623-630). ACM. * Marc-André Gardner, Christian Gagné, and Marc Parizeau. Estimation of Distribution Algorithm based on Hidden Markov Models for Combinatorial Optimization. in Comp. Proc. Genetic and Evolutionary Computation Conference (GECCO 2013), July 2013. * J. T. Zhai, M. A. Bamakhrama, and T. Stefanov. "Exploiting Just-enough Parallelism when Mapping Streaming Applications in Hard Real-time Systems". Design Automation Conference (DAC 2013), 2013. * V. Akbarzadeh, C. Gagné, M. Parizeau, M. Argany, M. A Mostafavi, "Probabilistic Sensing Model for Sensor Placement Optimization Based on Line-of-Sight Coverage", Accepted in IEEE Transactions on Instrumentation and Measurement, 2012. * M. Reif, F. Shafait, and A. Dengel. "Dataset Generation for Meta-Learning". Proceedings of the German Conference on Artificial Intelligence (KI'12). 2012. * M. T. Ribeiro, A. Lacerda, A. Veloso, and N. Ziviani. "Pareto-Efficient Hybridization for Multi-Objective Recommender Systems". Proceedings of the Conference on Recommanders Systems (!RecSys'12). 2012. * M. Pérez-Ortiz, A. Arauzo-Azofra, C. Hervás-Martínez, L. García-Hernández and L. Salas-Morera. "A system learning user preferences for multiobjective optimization of facility layouts". Pr,oceedings on the Int. Conference on Soft Computing Models in Industrial and Environmental Applications (SOCO'12). 2012. * Lévesque, J.C., Durand, A., Gagné, C., and Sabourin, R., Multi-Objective Evolutionary Optimization for Generating Ensembles of Classifiers in the ROC Space, Genetic and Evolutionary Computation Conference (GECCO 2012), 2012. * Marc-André Gardner, Christian Gagné, and Marc Parizeau, "Bloat Control in Genetic Programming with Histogram-based Accept-Reject Method", in Proc. Genetic and Evolutionary Computation Conference (GECCO 2011), 2011. * Vahab Akbarzadeh, Albert Ko, Christian Gagné, and Marc Parizeau, "Topography-Aware Sensor Deployment Optimization with CMA-ES", in Proc. of Parallel Problem Solving from Nature (PPSN 2010), Springer, 2010. * DEAP is used in [TPOT](https://github.com/rhiever/tpot), an open source tool that uses genetic programming to optimize machine learning pipelines. * DEAP is also used in ROS as an optimization package http://www.ros.org/wiki/deap. * DEAP is an optional dependency for [PyXRD](https://github.com/mathijs-dumon/PyXRD), a Python implementation of the matrix algorithm developed for the X-ray diffraction analysis of disordered lamellar structures. * DEAP is used in [glyph](https://github.com/Ambrosys/glyph), a library for symbolic regression with applications to [MLC](https://en.wikipedia.org/wiki/Machine_learning_control). * DEAP is used in [Sklearn-genetic-opt](https://github.com/rodrigo-arenas/Sklearn-genetic-opt), an open source tool that uses evolutionary programming to fine tune machine learning hyperparameters. If you want your project listed here, send us a link and a brief description and we'll be glad to add it. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.6244712 deap-1.4.1/deap/0000755000076500000240000000000014456461475012613 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/__init__.py0000644000076500000240000000137114456461441014717 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . __author__ = "DEAP Team" __version__ = "1.4" __revision__ = "1.4.1" ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/algorithms.py0000644000076500000240000005473014456461441015340 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . """The :mod:`algorithms` module is intended to contain some specific algorithms in order to execute very common evolutionary algorithms. The method used here are more for convenience than reference as the implementation of every evolutionary algorithm may vary infinitely. Most of the algorithms in this module use operators registered in the toolbox. Generally, the keyword used are :meth:`mate` for crossover, :meth:`mutate` for mutation, :meth:`~deap.select` for selection and :meth:`evaluate` for evaluation. You are encouraged to write your own algorithms in order to make them do what you really want them to do. """ import random from . import tools def varAnd(population, toolbox, cxpb, mutpb): r"""Part of an evolutionary algorithm applying only the variation part (crossover **and** mutation). The modified individuals have their fitness invalidated. The individuals are cloned so returned population is independent of the input population. :param population: A list of individuals to vary. :param toolbox: A :class:`~deap.base.Toolbox` that contains the evolution operators. :param cxpb: The probability of mating two individuals. :param mutpb: The probability of mutating an individual. :returns: A list of varied individuals that are independent of their parents. The variation goes as follow. First, the parental population :math:`P_\mathrm{p}` is duplicated using the :meth:`toolbox.clone` method and the result is put into the offspring population :math:`P_\mathrm{o}`. A first loop over :math:`P_\mathrm{o}` is executed to mate pairs of consecutive individuals. According to the crossover probability *cxpb*, the individuals :math:`\mathbf{x}_i` and :math:`\mathbf{x}_{i+1}` are mated using the :meth:`toolbox.mate` method. The resulting children :math:`\mathbf{y}_i` and :math:`\mathbf{y}_{i+1}` replace their respective parents in :math:`P_\mathrm{o}`. A second loop over the resulting :math:`P_\mathrm{o}` is executed to mutate every individual with a probability *mutpb*. When an individual is mutated it replaces its not mutated version in :math:`P_\mathrm{o}`. The resulting :math:`P_\mathrm{o}` is returned. This variation is named *And* because of its propensity to apply both crossover and mutation on the individuals. Note that both operators are not applied systematically, the resulting individuals can be generated from crossover only, mutation only, crossover and mutation, and reproduction according to the given probabilities. Both probabilities should be in :math:`[0, 1]`. """ offspring = [toolbox.clone(ind) for ind in population] # Apply crossover and mutation on the offspring for i in range(1, len(offspring), 2): if random.random() < cxpb: offspring[i - 1], offspring[i] = toolbox.mate(offspring[i - 1], offspring[i]) del offspring[i - 1].fitness.values, offspring[i].fitness.values for i in range(len(offspring)): if random.random() < mutpb: offspring[i], = toolbox.mutate(offspring[i]) del offspring[i].fitness.values return offspring def eaSimple(population, toolbox, cxpb, mutpb, ngen, stats=None, halloffame=None, verbose=__debug__): """This algorithm reproduce the simplest evolutionary algorithm as presented in chapter 7 of [Back2000]_. :param population: A list of individuals. :param toolbox: A :class:`~deap.base.Toolbox` that contains the evolution operators. :param cxpb: The probability of mating two individuals. :param mutpb: The probability of mutating an individual. :param ngen: The number of generation. :param stats: A :class:`~deap.tools.Statistics` object that is updated inplace, optional. :param halloffame: A :class:`~deap.tools.HallOfFame` object that will contain the best individuals, optional. :param verbose: Whether or not to log the statistics. :returns: The final population :returns: A class:`~deap.tools.Logbook` with the statistics of the evolution The algorithm takes in a population and evolves it in place using the :meth:`varAnd` method. It returns the optimized population and a :class:`~deap.tools.Logbook` with the statistics of the evolution. The logbook will contain the generation number, the number of evaluations for each generation and the statistics if a :class:`~deap.tools.Statistics` is given as argument. The *cxpb* and *mutpb* arguments are passed to the :func:`varAnd` function. The pseudocode goes as follow :: evaluate(population) for g in range(ngen): population = select(population, len(population)) offspring = varAnd(population, toolbox, cxpb, mutpb) evaluate(offspring) population = offspring As stated in the pseudocode above, the algorithm goes as follow. First, it evaluates the individuals with an invalid fitness. Second, it enters the generational loop where the selection procedure is applied to entirely replace the parental population. The 1:1 replacement ratio of this algorithm **requires** the selection procedure to be stochastic and to select multiple times the same individual, for example, :func:`~deap.tools.selTournament` and :func:`~deap.tools.selRoulette`. Third, it applies the :func:`varAnd` function to produce the next generation population. Fourth, it evaluates the new individuals and compute the statistics on this population. Finally, when *ngen* generations are done, the algorithm returns a tuple with the final population and a :class:`~deap.tools.Logbook` of the evolution. .. note:: Using a non-stochastic selection method will result in no selection as the operator selects *n* individuals from a pool of *n*. This function expects the :meth:`toolbox.mate`, :meth:`toolbox.mutate`, :meth:`toolbox.select` and :meth:`toolbox.evaluate` aliases to be registered in the toolbox. .. [Back2000] Back, Fogel and Michalewicz, "Evolutionary Computation 1 : Basic Algorithms and Operators", 2000. """ logbook = tools.Logbook() logbook.header = ['gen', 'nevals'] + (stats.fields if stats else []) # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in population if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit if halloffame is not None: halloffame.update(population) record = stats.compile(population) if stats else {} logbook.record(gen=0, nevals=len(invalid_ind), **record) if verbose: print(logbook.stream) # Begin the generational process for gen in range(1, ngen + 1): # Select the next generation individuals offspring = toolbox.select(population, len(population)) # Vary the pool of individuals offspring = varAnd(offspring, toolbox, cxpb, mutpb) # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in offspring if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit # Update the hall of fame with the generated individuals if halloffame is not None: halloffame.update(offspring) # Replace the current population by the offspring population[:] = offspring # Append the current generation statistics to the logbook record = stats.compile(population) if stats else {} logbook.record(gen=gen, nevals=len(invalid_ind), **record) if verbose: print(logbook.stream) return population, logbook def varOr(population, toolbox, lambda_, cxpb, mutpb): r"""Part of an evolutionary algorithm applying only the variation part (crossover, mutation **or** reproduction). The modified individuals have their fitness invalidated. The individuals are cloned so returned population is independent of the input population. :param population: A list of individuals to vary. :param toolbox: A :class:`~deap.base.Toolbox` that contains the evolution operators. :param lambda\_: The number of children to produce :param cxpb: The probability of mating two individuals. :param mutpb: The probability of mutating an individual. :returns: The final population. The variation goes as follow. On each of the *lambda_* iteration, it selects one of the three operations; crossover, mutation or reproduction. In the case of a crossover, two individuals are selected at random from the parental population :math:`P_\mathrm{p}`, those individuals are cloned using the :meth:`toolbox.clone` method and then mated using the :meth:`toolbox.mate` method. Only the first child is appended to the offspring population :math:`P_\mathrm{o}`, the second child is discarded. In the case of a mutation, one individual is selected at random from :math:`P_\mathrm{p}`, it is cloned and then mutated using using the :meth:`toolbox.mutate` method. The resulting mutant is appended to :math:`P_\mathrm{o}`. In the case of a reproduction, one individual is selected at random from :math:`P_\mathrm{p}`, cloned and appended to :math:`P_\mathrm{o}`. This variation is named *Or* because an offspring will never result from both operations crossover and mutation. The sum of both probabilities shall be in :math:`[0, 1]`, the reproduction probability is 1 - *cxpb* - *mutpb*. """ assert (cxpb + mutpb) <= 1.0, ( "The sum of the crossover and mutation probabilities must be smaller " "or equal to 1.0.") offspring = [] for _ in range(lambda_): op_choice = random.random() if op_choice < cxpb: # Apply crossover ind1, ind2 = [toolbox.clone(i) for i in random.sample(population, 2)] ind1, ind2 = toolbox.mate(ind1, ind2) del ind1.fitness.values offspring.append(ind1) elif op_choice < cxpb + mutpb: # Apply mutation ind = toolbox.clone(random.choice(population)) ind, = toolbox.mutate(ind) del ind.fitness.values offspring.append(ind) else: # Apply reproduction offspring.append(random.choice(population)) return offspring def eaMuPlusLambda(population, toolbox, mu, lambda_, cxpb, mutpb, ngen, stats=None, halloffame=None, verbose=__debug__): r"""This is the :math:`(\mu + \lambda)` evolutionary algorithm. :param population: A list of individuals. :param toolbox: A :class:`~deap.base.Toolbox` that contains the evolution operators. :param mu: The number of individuals to select for the next generation. :param lambda\_: The number of children to produce at each generation. :param cxpb: The probability that an offspring is produced by crossover. :param mutpb: The probability that an offspring is produced by mutation. :param ngen: The number of generation. :param stats: A :class:`~deap.tools.Statistics` object that is updated inplace, optional. :param halloffame: A :class:`~deap.tools.HallOfFame` object that will contain the best individuals, optional. :param verbose: Whether or not to log the statistics. :returns: The final population :returns: A class:`~deap.tools.Logbook` with the statistics of the evolution. The algorithm takes in a population and evolves it in place using the :func:`varOr` function. It returns the optimized population and a :class:`~deap.tools.Logbook` with the statistics of the evolution. The logbook will contain the generation number, the number of evaluations for each generation and the statistics if a :class:`~deap.tools.Statistics` is given as argument. The *cxpb* and *mutpb* arguments are passed to the :func:`varOr` function. The pseudocode goes as follow :: evaluate(population) for g in range(ngen): offspring = varOr(population, toolbox, lambda_, cxpb, mutpb) evaluate(offspring) population = select(population + offspring, mu) First, the individuals having an invalid fitness are evaluated. Second, the evolutionary loop begins by producing *lambda_* offspring from the population, the offspring are generated by the :func:`varOr` function. The offspring are then evaluated and the next generation population is selected from both the offspring **and** the population. Finally, when *ngen* generations are done, the algorithm returns a tuple with the final population and a :class:`~deap.tools.Logbook` of the evolution. This function expects :meth:`toolbox.mate`, :meth:`toolbox.mutate`, :meth:`toolbox.select` and :meth:`toolbox.evaluate` aliases to be registered in the toolbox. This algorithm uses the :func:`varOr` variation. """ logbook = tools.Logbook() logbook.header = ['gen', 'nevals'] + (stats.fields if stats else []) # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in population if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit if halloffame is not None: halloffame.update(population) record = stats.compile(population) if stats is not None else {} logbook.record(gen=0, nevals=len(invalid_ind), **record) if verbose: print(logbook.stream) # Begin the generational process for gen in range(1, ngen + 1): # Vary the population offspring = varOr(population, toolbox, lambda_, cxpb, mutpb) # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in offspring if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit # Update the hall of fame with the generated individuals if halloffame is not None: halloffame.update(offspring) # Select the next generation population population[:] = toolbox.select(population + offspring, mu) # Update the statistics with the new population record = stats.compile(population) if stats is not None else {} logbook.record(gen=gen, nevals=len(invalid_ind), **record) if verbose: print(logbook.stream) return population, logbook def eaMuCommaLambda(population, toolbox, mu, lambda_, cxpb, mutpb, ngen, stats=None, halloffame=None, verbose=__debug__): r"""This is the :math:`(\mu~,~\lambda)` evolutionary algorithm. :param population: A list of individuals. :param toolbox: A :class:`~deap.base.Toolbox` that contains the evolution operators. :param mu: The number of individuals to select for the next generation. :param lambda\_: The number of children to produce at each generation. :param cxpb: The probability that an offspring is produced by crossover. :param mutpb: The probability that an offspring is produced by mutation. :param ngen: The number of generation. :param stats: A :class:`~deap.tools.Statistics` object that is updated inplace, optional. :param halloffame: A :class:`~deap.tools.HallOfFame` object that will contain the best individuals, optional. :param verbose: Whether or not to log the statistics. :returns: The final population :returns: A class:`~deap.tools.Logbook` with the statistics of the evolution The algorithm takes in a population and evolves it in place using the :func:`varOr` function. It returns the optimized population and a :class:`~deap.tools.Logbook` with the statistics of the evolution. The logbook will contain the generation number, the number of evaluations for each generation and the statistics if a :class:`~deap.tools.Statistics` is given as argument. The *cxpb* and *mutpb* arguments are passed to the :func:`varOr` function. The pseudocode goes as follow :: evaluate(population) for g in range(ngen): offspring = varOr(population, toolbox, lambda_, cxpb, mutpb) evaluate(offspring) population = select(offspring, mu) First, the individuals having an invalid fitness are evaluated. Second, the evolutionary loop begins by producing *lambda_* offspring from the population, the offspring are generated by the :func:`varOr` function. The offspring are then evaluated and the next generation population is selected from **only** the offspring. Finally, when *ngen* generations are done, the algorithm returns a tuple with the final population and a :class:`~deap.tools.Logbook` of the evolution. .. note:: Care must be taken when the lambda:mu ratio is 1 to 1 as a non-stochastic selection will result in no selection at all as the operator selects *lambda* individuals from a pool of *mu*. This function expects :meth:`toolbox.mate`, :meth:`toolbox.mutate`, :meth:`toolbox.select` and :meth:`toolbox.evaluate` aliases to be registered in the toolbox. This algorithm uses the :func:`varOr` variation. """ assert lambda_ >= mu, "lambda must be greater or equal to mu." # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in population if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit if halloffame is not None: halloffame.update(population) logbook = tools.Logbook() logbook.header = ['gen', 'nevals'] + (stats.fields if stats else []) record = stats.compile(population) if stats is not None else {} logbook.record(gen=0, nevals=len(invalid_ind), **record) if verbose: print(logbook.stream) # Begin the generational process for gen in range(1, ngen + 1): # Vary the population offspring = varOr(population, toolbox, lambda_, cxpb, mutpb) # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in offspring if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit # Update the hall of fame with the generated individuals if halloffame is not None: halloffame.update(offspring) # Select the next generation population population[:] = toolbox.select(offspring, mu) # Update the statistics with the new population record = stats.compile(population) if stats is not None else {} logbook.record(gen=gen, nevals=len(invalid_ind), **record) if verbose: print(logbook.stream) return population, logbook def eaGenerateUpdate(toolbox, ngen, halloffame=None, stats=None, verbose=__debug__): """This is algorithm implements the ask-tell model proposed in [Colette2010]_, where ask is called `generate` and tell is called `update`. :param toolbox: A :class:`~deap.base.Toolbox` that contains the evolution operators. :param ngen: The number of generation. :param stats: A :class:`~deap.tools.Statistics` object that is updated inplace, optional. :param halloffame: A :class:`~deap.tools.HallOfFame` object that will contain the best individuals, optional. :param verbose: Whether or not to log the statistics. :returns: The final population :returns: A class:`~deap.tools.Logbook` with the statistics of the evolution The algorithm generates the individuals using the :func:`toolbox.generate` function and updates the generation method with the :func:`toolbox.update` function. It returns the optimized population and a :class:`~deap.tools.Logbook` with the statistics of the evolution. The logbook will contain the generation number, the number of evaluations for each generation and the statistics if a :class:`~deap.tools.Statistics` is given as argument. The pseudocode goes as follow :: for g in range(ngen): population = toolbox.generate() evaluate(population) toolbox.update(population) This function expects :meth:`toolbox.generate` and :meth:`toolbox.evaluate` aliases to be registered in the toolbox. .. [Colette2010] Collette, Y., N. Hansen, G. Pujol, D. Salazar Aponte and R. Le Riche (2010). On Object-Oriented Programming of Optimizers - Examples in Scilab. In P. Breitkopf and R. F. Coelho, eds.: Multidisciplinary Design Optimization in Computational Mechanics, Wiley, pp. 527-565; """ logbook = tools.Logbook() logbook.header = ['gen', 'nevals'] + (stats.fields if stats else []) for gen in range(ngen): # Generate a new population population = toolbox.generate() # Evaluate the individuals fitnesses = toolbox.map(toolbox.evaluate, population) for ind, fit in zip(population, fitnesses): ind.fitness.values = fit if halloffame is not None: halloffame.update(population) # Update the strategy with the evaluated individuals toolbox.update(population) record = stats.compile(population) if stats is not None else {} logbook.record(gen=gen, nevals=len(population), **record) if verbose: print(logbook.stream) return population, logbook ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/base.py0000644000076500000240000003352614456461441014101 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . """The :mod:`~deap.base` module provides basic structures to build evolutionary algorithms. It contains the :class:`~deap.base.Toolbox`, useful to store evolutionary operators, and a virtual :class:`~deap.base.Fitness` class used as base class, for the fitness member of any individual. """ import sys try: from collections.abc import Sequence except ImportError: from collections import Sequence from copy import deepcopy from functools import partial from operator import mul, truediv class Toolbox(object): """A toolbox for evolution that contains the evolutionary operators. At first the toolbox contains a :meth:`~deap.toolbox.clone` method that duplicates any element it is passed as argument, this method defaults to the :func:`copy.deepcopy` function. and a :meth:`~deap.toolbox.map` method that applies the function given as first argument to every items of the iterables given as next arguments, this method defaults to the :func:`map` function. You may populate the toolbox with any other function by using the :meth:`~deap.base.Toolbox.register` method. Concrete usages of the toolbox are shown for initialization in the :ref:`creating-types` tutorial and for tools container in the :ref:`next-step` tutorial. """ def __init__(self): self.register("clone", deepcopy) self.register("map", map) def register(self, alias, function, *args, **kargs): """Register a *function* in the toolbox under the name *alias*. You may provide default arguments that will be passed automatically when calling the registered function. Fixed arguments can then be overridden at function call time. :param alias: The name the operator will take in the toolbox. If the alias already exist it will overwrite the operator already present. :param function: The function to which refer the alias. :param argument: One or more argument (and keyword argument) to pass automatically to the registered function when called, optional. The following code block is an example of how the toolbox is used. :: >>> def func(a, b, c=3): ... print a, b, c ... >>> tools = Toolbox() >>> tools.register("myFunc", func, 2, c=4) >>> tools.myFunc(3) 2 3 4 The registered function will be given the attributes :attr:`__name__` set to the alias and :attr:`__doc__` set to the original function's documentation. The :attr:`__dict__` attribute will also be updated with the original function's instance dictionary, if any. """ pfunc = partial(function, *args, **kargs) pfunc.__name__ = alias pfunc.__doc__ = function.__doc__ if hasattr(function, "__dict__") and not isinstance(function, type): # Some functions don't have a dictionary, in these cases # simply don't copy it. Moreover, if the function is actually # a class, we do not want to copy the dictionary. pfunc.__dict__.update(function.__dict__.copy()) setattr(self, alias, pfunc) def unregister(self, alias): """Unregister *alias* from the toolbox. :param alias: The name of the operator to remove from the toolbox. """ delattr(self, alias) def decorate(self, alias, *decorators): """Decorate *alias* with the specified *decorators*, *alias* has to be a registered function in the current toolbox. :param alias: The name of the operator to decorate. :param decorator: One or more function decorator. If multiple decorators are provided they will be applied in order, with the last decorator decorating all the others. .. note:: Decorate a function using the toolbox makes it unpicklable, and will produce an error on pickling. Although this limitation is not relevant in most cases, it may have an impact on distributed environments like multiprocessing. A function can still be decorated manually before it is added to the toolbox (using the @ notation) in order to be picklable. """ pfunc = getattr(self, alias) function, args, kargs = pfunc.func, pfunc.args, pfunc.keywords for decorator in decorators: function = decorator(function) self.register(alias, function, *args, **kargs) class Fitness(object): """The fitness is a measure of quality of a solution. If *values* are provided as a tuple, the fitness is initialized using those values, otherwise it is empty (or invalid). :param values: The initial values of the fitness as a tuple, optional. Fitnesses may be compared using the ``>``, ``<``, ``>=``, ``<=``, ``==``, ``!=``. The comparison of those operators is made lexicographically. Maximization and minimization are taken care off by a multiplication between the :attr:`weights` and the fitness :attr:`values`. The comparison can be made between fitnesses of different size, if the fitnesses are equal until the extra elements, the longer fitness will be superior to the shorter. Different types of fitnesses are created in the :ref:`creating-types` tutorial. .. note:: When comparing fitness values that are **minimized**, ``a > b`` will return :data:`True` if *a* is **smaller** than *b*. """ weights = None """The weights are used in the fitness comparison. They are shared among all fitnesses of the same type. When subclassing :class:`Fitness`, the weights must be defined as a tuple where each element is associated to an objective. A negative weight element corresponds to the minimization of the associated objective and positive weight to the maximization. .. note:: If weights is not defined during subclassing, the following error will occur at instantiation of a subclass fitness object: ``TypeError: Can't instantiate abstract with abstract attribute weights.`` """ wvalues = () """Contains the weighted values of the fitness, the multiplication with the weights is made when the values are set via the property :attr:`values`. Multiplication is made on setting of the values for efficiency. Generally it is unnecessary to manipulate wvalues as it is an internal attribute of the fitness used in the comparison operators. """ def __init__(self, values=()): if self.weights is None: raise TypeError("Can't instantiate abstract %r with abstract " "attribute weights." % (self.__class__)) if not isinstance(self.weights, Sequence): raise TypeError("Attribute weights of %r must be a sequence." % self.__class__) if len(values) > 0: self.values = values def getValues(self): return tuple(map(truediv, self.wvalues, self.weights)) def setValues(self, values): assert len(values) == len(self.weights), "Assigned values have not the same length than fitness weights" try: self.wvalues = tuple(map(mul, values, self.weights)) except TypeError: _, _, traceback = sys.exc_info() raise TypeError("Both weights and assigned values must be a " "sequence of numbers when assigning to values of " "%r. Currently assigning value(s) %r of %r to a " "fitness with weights %s." % (self.__class__, values, type(values), self.weights)).with_traceback(traceback) def delValues(self): self.wvalues = () values = property(getValues, setValues, delValues, ("Fitness values. Use directly ``individual.fitness.values = values`` " "in order to set the fitness and ``del individual.fitness.values`` " "in order to clear (invalidate) the fitness. The (unweighted) fitness " "can be directly accessed via ``individual.fitness.values``.")) def dominates(self, other, obj=slice(None)): """Return true if each objective of *self* is not strictly worse than the corresponding objective of *other* and at least one objective is strictly better. :param obj: Slice indicating on which objectives the domination is tested. The default value is `slice(None)`, representing every objectives. """ not_equal = False for self_wvalue, other_wvalue in zip(self.wvalues[obj], other.wvalues[obj]): if self_wvalue > other_wvalue: not_equal = True elif self_wvalue < other_wvalue: return False return not_equal @property def valid(self): """Assess if a fitness is valid or not.""" return len(self.wvalues) != 0 def __hash__(self): return hash(self.wvalues) def __gt__(self, other): return not self.__le__(other) def __ge__(self, other): return not self.__lt__(other) def __le__(self, other): return self.wvalues <= other.wvalues def __lt__(self, other): return self.wvalues < other.wvalues def __eq__(self, other): return self.wvalues == other.wvalues def __ne__(self, other): return not self.__eq__(other) def __deepcopy__(self, memo): """Replace the basic deepcopy function with a faster one. It assumes that the elements in the :attr:`values` tuple are immutable and the fitness does not contain any other object than :attr:`values` and :attr:`weights`. """ copy_ = self.__class__() copy_.wvalues = self.wvalues return copy_ def __str__(self): """Return the values of the Fitness object.""" return str(self.values if self.valid else tuple()) def __repr__(self): """Return the Python code to build a copy of the object.""" return "%s.%s(%r)" % (self.__module__, self.__class__.__name__, self.values if self.valid else tuple()) def _violates_constraint(fitness): return not fitness.valid \ and fitness.constraint_violation is not None \ and sum(fitness.constraint_violation) > 0 class ConstrainedFitness(Fitness): def __init__(self, values=(), constraint_violation=None): super(ConstrainedFitness, self).__init__(values) self.constraint_violation = constraint_violation @Fitness.values.deleter def values(self): self.wvalues = () self.constraint_violation = None def __gt__(self, other): return not self.__le__(other) def __ge__(self, other): return not self.__lt__(other) def __le__(self, other): self_violates_constraints = _violates_constraint(self) other_violates_constraints = _violates_constraint(other) if self_violates_constraints and other_violates_constraints: return True elif self_violates_constraints: return True elif other_violates_constraints: return False return self.wvalues <= other.wvalues def __lt__(self, other): self_violates_constraints = _violates_constraint(self) other_violates_constraints = _violates_constraint(other) if self_violates_constraints and other_violates_constraints: return False elif self_violates_constraints: return True elif other_violates_constraints: return False return self.wvalues < other.wvalues def __eq__(self, other): self_violates_constraints = _violates_constraint(self) other_violates_constraints = _violates_constraint(other) if self_violates_constraints and other_violates_constraints: return True elif self_violates_constraints: return False elif other_violates_constraints: return False return self.wvalues == other.wvalues def __ne__(self, other): return not self.__eq__(other) def dominates(self, other): self_violates_constraints = _violates_constraint(self) other_violates_constraints = _violates_constraint(other) if self_violates_constraints and other_violates_constraints: return False elif self_violates_constraints: return False elif other_violates_constraints: return True return super(ConstrainedFitness, self).dominates(other) def __str__(self): """Return the values of the Fitness object.""" return str((self.values if self.valid else tuple(), self.constraint_violation)) def __repr__(self): """Return the Python code to build a copy of the object.""" return "%s.%s(%r, %r)" % (self.__module__, self.__class__.__name__, self.values if self.valid else tuple(), self.constraint_violation)././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.6303043 deap-1.4.1/deap/benchmarks/0000755000076500000240000000000014456461475014730 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/benchmarks/__init__.py0000644000076500000240000006226714456461441017047 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . """ Regroup typical EC benchmarks functions to import easily and benchmark examples. """ import random from math import sin, cos, pi, exp, e, sqrt from operator import mul from functools import reduce # Unimodal def rand(individual): r"""Random test objective function. .. list-table:: :widths: 10 50 :stub-columns: 1 * - Type - minimization or maximization * - Range - none * - Global optima - none * - Function - :math:`f(\mathbf{x}) = \text{\texttt{random}}(0,1)` """ return random.random(), def plane(individual): r"""Plane test objective function. .. list-table:: :widths: 10 50 :stub-columns: 1 * - Type - minimization * - Range - none * - Global optima - :math:`x_i = 0, \forall i \in \lbrace 1 \ldots N\rbrace`, :math:`f(\mathbf{x}) = 0` * - Function - :math:`f(\mathbf{x}) = x_0` """ return individual[0], def sphere(individual): r"""Sphere test objective function. .. list-table:: :widths: 10 50 :stub-columns: 1 * - Type - minimization * - Range - none * - Global optima - :math:`x_i = 0, \forall i \in \lbrace 1 \ldots N\rbrace`, :math:`f(\mathbf{x}) = 0` * - Function - :math:`f(\mathbf{x}) = \sum_{i=1}^Nx_i^2` """ return sum(gene * gene for gene in individual), def cigar(individual): r"""Cigar test objective function. .. list-table:: :widths: 10 50 :stub-columns: 1 * - Type - minimization * - Range - none * - Global optima - :math:`x_i = 0, \forall i \in \lbrace 1 \ldots N\rbrace`, :math:`f(\mathbf{x}) = 0` * - Function - :math:`f(\mathbf{x}) = x_0^2 + 10^6\sum_{i=1}^N\,x_i^2` """ return individual[0]**2 + 1e6 * sum(gene * gene for gene in individual[1:]), def rosenbrock(individual): r"""Rosenbrock test objective function. .. list-table:: :widths: 10 50 :stub-columns: 1 * - Type - minimization * - Range - none * - Global optima - :math:`x_i = 1, \forall i \in \lbrace 1 \ldots N\rbrace`, :math:`f(\mathbf{x}) = 0` * - Function - :math:`f(\mathbf{x}) = \sum_{i=1}^{N-1} (1-x_i)^2 + 100 (x_{i+1} - x_i^2 )^2` .. plot:: code/benchmarks/rosenbrock.py :width: 67 % """ return sum(100 * (x * x - y)**2 + (1. - x)**2 \ for x, y in zip(individual[:-1], individual[1:])), def h1(individual): r""" Simple two-dimensional function containing several local maxima. From: The Merits of a Parallel Genetic Algorithm in Solving Hard Optimization Problems, A. J. Knoek van Soest and L. J. R. Richard Casius, J. Biomech. Eng. 125, 141 (2003) .. list-table:: :widths: 10 50 :stub-columns: 1 * - Type - maximization * - Range - :math:`x_i \in [-100, 100]` * - Global optima - :math:`\mathbf{x} = (8.6998, 6.7665)`, :math:`f(\mathbf{x}) = 2`\n * - Function - :math:`f(\mathbf{x}) = \frac{\sin(x_1 - \frac{x_2}{8})^2 + \ \sin(x_2 + \frac{x_1}{8})^2}{\sqrt{(x_1 - 8.6998)^2 + \ (x_2 - 6.7665)^2} + 1}` .. plot:: code/benchmarks/h1.py :width: 67 % """ num = (sin(individual[0] - individual[1] / 8))**2 + (sin(individual[1] + individual[0] / 8))**2 denum = ((individual[0] - 8.6998)**2 + (individual[1] - 6.7665)**2)**0.5 + 1 return num / denum, # Multimodal def ackley(individual): r"""Ackley test objective function. .. list-table:: :widths: 10 50 :stub-columns: 1 * - Type - minimization * - Range - :math:`x_i \in [-15, 30]` * - Global optima - :math:`x_i = 0, \forall i \in \lbrace 1 \ldots N\rbrace`, :math:`f(\mathbf{x}) = 0` * - Function - :math:`f(\mathbf{x}) = 20 - 20\exp\left(-0.2\sqrt{\frac{1}{N} \ \sum_{i=1}^N x_i^2} \right) + e - \exp\left(\frac{1}{N}\sum_{i=1}^N \cos(2\pi x_i) \right)` .. plot:: code/benchmarks/ackley.py :width: 67 % """ N = len(individual) return 20 - 20 * exp(-0.2 * sqrt(1.0 / N * sum(x**2 for x in individual))) \ + e - exp(1.0 / N * sum(cos(2 * pi * x) for x in individual)), def bohachevsky(individual): r"""Bohachevsky test objective function. .. list-table:: :widths: 10 50 :stub-columns: 1 * - Type - minimization * - Range - :math:`x_i \in [-100, 100]` * - Global optima - :math:`x_i = 0, \forall i \in \lbrace 1 \ldots N\rbrace`, :math:`f(\mathbf{x}) = 0` * - Function - :math:`f(\mathbf{x}) = \sum_{i=1}^{N-1}(x_i^2 + 2x_{i+1}^2 - \ 0.3\cos(3\pi x_i) - 0.4\cos(4\pi x_{i+1}) + 0.7)` .. plot:: code/benchmarks/bohachevsky.py :width: 67 % """ return sum(x**2 + 2 * x1**2 - 0.3 * cos(3 * pi * x) - 0.4 * cos(4 * pi * x1) + 0.7 for x, x1 in zip(individual[:-1], individual[1:])), def griewank(individual): r"""Griewank test objective function. .. list-table:: :widths: 10 50 :stub-columns: 1 * - Type - minimization * - Range - :math:`x_i \in [-600, 600]` * - Global optima - :math:`x_i = 0, \forall i \in \lbrace 1 \ldots N\rbrace`, :math:`f(\mathbf{x}) = 0` * - Function - :math:`f(\mathbf{x}) = \frac{1}{4000}\sum_{i=1}^N\,x_i^2 - \ \prod_{i=1}^N\cos\left(\frac{x_i}{\sqrt{i}}\right) + 1` .. plot:: code/benchmarks/griewank.py :width: 67 % """ return 1.0 / 4000.0 * sum(x ** 2 for x in individual) - \ reduce(mul, (cos(x / sqrt(i + 1.0)) for i, x in enumerate(individual)), 1) + 1, def rastrigin(individual): r"""Rastrigin test objective function. .. list-table:: :widths: 10 50 :stub-columns: 1 * - Type - minimization * - Range - :math:`x_i \in [-5.12, 5.12]` * - Global optima - :math:`x_i = 0, \forall i \in \lbrace 1 \ldots N\rbrace`, :math:`f(\mathbf{x}) = 0` * - Function - :math:`f(\mathbf{x}) = 10N + \sum_{i=1}^N x_i^2 - 10 \cos(2\pi x_i)` .. plot:: code/benchmarks/rastrigin.py :width: 67 % """ return 10 * len(individual) + sum(gene * gene - 10 * \ cos(2 * pi * gene) for gene in individual), def rastrigin_scaled(individual): r"""Scaled Rastrigin test objective function. :math:`f_{\text{RastScaled}}(\mathbf{x}) = 10N + \sum_{i=1}^N \ \left(10^{\left(\frac{i-1}{N-1}\right)} x_i \right)^2 - \ 10\cos\left(2\pi 10^{\left(\frac{i-1}{N-1}\right)} x_i \right)` """ N = len(individual) return 10 * N + sum((10 ** (i / (N - 1)) * x) ** 2 - 10 * cos(2 * pi * 10 ** (i / (N - 1)) * x) for i, x in enumerate(individual)), def rastrigin_skew(individual): r"""Skewed Rastrigin test objective function. :math:`f_{\text{RastSkew}}(\mathbf{x}) = 10N + \sum_{i=1}^N \left(y_i^2 - 10 \cos(2\pi x_i)\right)` :math:`\text{with } y_i = \ \begin{cases} \ 10\cdot x_i & \text{ if } x_i > 0,\\ \ x_i & \text{ otherwise } \ \end{cases}` """ N = len(individual) return 10*N + sum((10*x if x > 0 else x)**2 - 10*cos(2*pi*(10*x if x > 0 else x)) for x in individual), def schaffer(individual): r"""Schaffer test objective function. .. list-table:: :widths: 10 50 :stub-columns: 1 * - Type - minimization * - Range - :math:`x_i \in [-100, 100]` * - Global optima - :math:`x_i = 0, \forall i \in \lbrace 1 \ldots N\rbrace`, :math:`f(\mathbf{x}) = 0` * - Function - :math:`f(\mathbf{x}) = \sum_{i=1}^{N-1} (x_i^2+x_{i+1}^2)^{0.25} \cdot \ \left[ \sin^2(50\cdot(x_i^2+x_{i+1}^2)^{0.10}) + 1.0 \ \right]` .. plot:: code/benchmarks/schaffer.py :width: 67 % """ return sum((x**2 + x1**2)**0.25 * ((sin(50 * (x**2 + x1**2)**0.1))**2 + 1.0) for x, x1 in zip(individual[:-1], individual[1:])), def schwefel(individual): r"""Schwefel test objective function. .. list-table:: :widths: 10 50 :stub-columns: 1 * - Type - minimization * - Range - :math:`x_i \in [-500, 500]` * - Global optima - :math:`x_i = 420.96874636, \forall i \in \lbrace 1 \ldots N\rbrace`, :math:`f(\mathbf{x}) = 0` * - Function - :math:`f(\mathbf{x}) = 418.9828872724339\cdot N - \ \sum_{i=1}^N\,x_i\sin\left(\sqrt{|x_i|}\right)` .. plot:: code/benchmarks/schwefel.py :width: 67 % """ N = len(individual) return 418.9828872724339 * N - sum(x * sin(sqrt(abs(x))) for x in individual), def himmelblau(individual): r"""The Himmelblau's function is multimodal with 4 defined minimums in :math:`[-6, 6]^2`. .. list-table:: :widths: 10 50 :stub-columns: 1 * - Type - minimization * - Range - :math:`x_i \in [-6, 6]` * - Global optima - :math:`\mathbf{x}_1 = (3.0, 2.0)`, :math:`f(\mathbf{x}_1) = 0`\n :math:`\mathbf{x}_2 = (-2.805118, 3.131312)`, :math:`f(\mathbf{x}_2) = 0`\n :math:`\mathbf{x}_3 = (-3.779310, -3.283186)`, :math:`f(\mathbf{x}_3) = 0`\n :math:`\mathbf{x}_4 = (3.584428, -1.848126)`, :math:`f(\mathbf{x}_4) = 0`\n * - Function - :math:`f(x_1, x_2) = (x_1^2 + x_2 - 11)^2 + (x_1 + x_2^2 -7)^2` .. plot:: code/benchmarks/himmelblau.py :width: 67 % """ return (individual[0] * individual[0] + individual[1] - 11)**2 + \ (individual[0] + individual[1] * individual[1] - 7)**2, def shekel(individual, a, c): r"""The Shekel multimodal function can have any number of maxima. The number of maxima is given by the length of any of the arguments *a* or *c*, *a* is a matrix of size :math:`M\times N`, where *M* is the number of maxima and *N* the number of dimensions and *c* is a :math:`M\times 1` vector. :math:`f_\text{Shekel}(\mathbf{x}) = \sum_{i = 1}^{M} \frac{1}{c_{i} + \sum_{j = 1}^{N} (x_{j} - a_{ij})^2 }` The following figure uses :math:`\mathcal{A} = \begin{bmatrix} 0.5 & 0.5 \\ 0.25 & 0.25 \\ 0.25 & 0.75 \\ 0.75 & 0.25 \\ 0.75 & 0.75 \end{bmatrix}` and :math:`\mathbf{c} = \begin{bmatrix} 0.002 \\ 0.005 \\ 0.005 \\ 0.005 \\ 0.005 \end{bmatrix}`, thus defining 5 maximums in :math:`\mathbb{R}^2`. .. plot:: code/benchmarks/shekel.py :width: 67 % """ return sum((1. / (c[i] + sum((individual[j] - aij)**2 for j, aij in enumerate(a[i])))) for i in range(len(c))), # Multiobjectives def kursawe(individual): r"""Kursawe multiobjective function. :math:`f_{\text{Kursawe}1}(\mathbf{x}) = \sum_{i=1}^{N-1} -10 e^{-0.2 \sqrt{x_i^2 + x_{i+1}^2} }` :math:`f_{\text{Kursawe}2}(\mathbf{x}) = \sum_{i=1}^{N} |x_i|^{0.8} + 5 \sin(x_i^3)` .. plot:: code/benchmarks/kursawe.py :width: 100 % """ f1 = sum(-10 * exp(-0.2 * sqrt(x * x + y * y)) for x, y in zip(individual[:-1], individual[1:])) f2 = sum(abs(x)**0.8 + 5 * sin(x * x * x) for x in individual) return f1, f2 def schaffer_mo(individual): r"""Schaffer's multiobjective function on a one attribute *individual*. From: J. D. Schaffer, "Multiple objective optimization with vector evaluated genetic algorithms", in Proceedings of the First International Conference on Genetic Algorithms, 1987. :math:`f_{\text{Schaffer}1}(\mathbf{x}) = x_1^2` :math:`f_{\text{Schaffer}2}(\mathbf{x}) = (x_1-2)^2` """ return individual[0] ** 2, (individual[0] - 2) ** 2 def zdt1(individual): r"""ZDT1 multiobjective function. :math:`g(\mathbf{x}) = 1 + \frac{9}{n-1}\sum_{i=2}^n x_i` :math:`f_{\text{ZDT1}1}(\mathbf{x}) = x_1` :math:`f_{\text{ZDT1}2}(\mathbf{x}) = g(\mathbf{x})\left[1 - \sqrt{\frac{x_1}{g(\mathbf{x})}}\right]` """ g = 1.0 + 9.0 * sum(individual[1:]) / (len(individual) - 1) f1 = individual[0] f2 = g * (1 - sqrt(f1 / g)) return f1, f2 def zdt2(individual): r"""ZDT2 multiobjective function. :math:`g(\mathbf{x}) = 1 + \frac{9}{n-1}\sum_{i=2}^n x_i` :math:`f_{\text{ZDT2}1}(\mathbf{x}) = x_1` :math:`f_{\text{ZDT2}2}(\mathbf{x}) = g(\mathbf{x})\left[1 - \left(\frac{x_1}{g(\mathbf{x})}\right)^2\right]` """ g = 1.0 + 9.0 * sum(individual[1:]) / (len(individual) - 1) f1 = individual[0] f2 = g * (1 - (f1 / g)**2) return f1, f2 def zdt3(individual): r"""ZDT3 multiobjective function. :math:`g(\mathbf{x}) = 1 + \frac{9}{n-1}\sum_{i=2}^n x_i` :math:`f_{\text{ZDT3}1}(\mathbf{x}) = x_1` :math:`f_{\text{ZDT3}2}(\mathbf{x}) = g(\mathbf{x})\left[1 - \sqrt{\frac{x_1}{g(\mathbf{x})}} - \frac{x_1}{g(\mathbf{x})}\sin(10\pi x_1)\right]` """ g = 1.0 + 9.0 * sum(individual[1:]) / (len(individual) - 1) f1 = individual[0] f2 = g * (1 - sqrt(f1 / g) - f1 / g * sin(10 * pi * f1)) return f1, f2 def zdt4(individual): r"""ZDT4 multiobjective function. :math:`g(\mathbf{x}) = 1 + 10(n-1) + \sum_{i=2}^n \left[ x_i^2 - 10\cos(4\pi x_i) \right]` :math:`f_{\text{ZDT4}1}(\mathbf{x}) = x_1` :math:`f_{\text{ZDT4}2}(\mathbf{x}) = g(\mathbf{x})\left[ 1 - \sqrt{x_1/g(\mathbf{x})} \right]` """ g = 1 + 10 * (len(individual) - 1) + sum(xi**2 - 10 * cos(4 * pi * xi) for xi in individual[1:]) f1 = individual[0] f2 = g * (1 - sqrt(f1 / g)) return f1, f2 def zdt6(individual): r"""ZDT6 multiobjective function. :math:`g(\mathbf{x}) = 1 + 9 \left[ \left(\sum_{i=2}^n x_i\right)/(n-1) \right]^{0.25}` :math:`f_{\text{ZDT6}1}(\mathbf{x}) = 1 - \exp(-4x_1)\sin^6(6\pi x_1)` :math:`f_{\text{ZDT6}2}(\mathbf{x}) = g(\mathbf{x}) \left[ 1 - (f_{\text{ZDT6}1}(\mathbf{x})/g(\mathbf{x}))^2 \right]` """ g = 1 + 9 * (sum(individual[1:]) / (len(individual) - 1))**0.25 f1 = 1 - exp(-4 * individual[0]) * sin(6 * pi * individual[0])**6 f2 = g * (1 - (f1 / g)**2) return f1, f2 def dtlz1(individual, obj): r"""DTLZ1 multiobjective function. It returns a tuple of *obj* values. The individual must have at least *obj* elements. From: K. Deb, L. Thiele, M. Laumanns and E. Zitzler. Scalable Multi-Objective Optimization Test Problems. CEC 2002, p. 825 - 830, IEEE Press, 2002. :math:`g(\mathbf{x}_m) = 100\left(|\mathbf{x}_m| + \sum_{x_i \in \mathbf{x}_m}\left((x_i - 0.5)^2 - \cos(20\pi(x_i - 0.5))\right)\right)` :math:`f_{\text{DTLZ1}1}(\mathbf{x}) = \frac{1}{2} (1 + g(\mathbf{x}_m)) \prod_{i=1}^{m-1}x_i` :math:`f_{\text{DTLZ1}2}(\mathbf{x}) = \frac{1}{2} (1 + g(\mathbf{x}_m)) (1-x_{m-1}) \prod_{i=1}^{m-2}x_i` :math:`\ldots` :math:`f_{\text{DTLZ1}m-1}(\mathbf{x}) = \frac{1}{2} (1 + g(\mathbf{x}_m)) (1 - x_2) x_1` :math:`f_{\text{DTLZ1}m}(\mathbf{x}) = \frac{1}{2} (1 - x_1)(1 + g(\mathbf{x}_m))` Where :math:`m` is the number of objectives and :math:`\mathbf{x}_m` is a vector of the remaining attributes :math:`[x_m~\ldots~x_n]` of the individual in :math:`n > m` dimensions. """ g = 100 * (len(individual[obj - 1:]) + sum((xi - 0.5)**2 - cos(20 * pi * (xi - 0.5)) for xi in individual[obj - 1:])) f = [0.5 * reduce(mul, individual[:obj - 1], 1) * (1 + g)] f.extend(0.5 * reduce(mul, individual[:m], 1) * (1 - individual[m]) * (1 + g) for m in reversed(range(obj - 1))) return f def dtlz2(individual, obj): r"""DTLZ2 multiobjective function. It returns a tuple of *obj* values. The individual must have at least *obj* elements. From: K. Deb, L. Thiele, M. Laumanns and E. Zitzler. Scalable Multi-Objective Optimization Test Problems. CEC 2002, p. 825 - 830, IEEE Press, 2002. :math:`g(\mathbf{x}_m) = \sum_{x_i \in \mathbf{x}_m} (x_i - 0.5)^2` :math:`f_{\text{DTLZ2}1}(\mathbf{x}) = (1 + g(\mathbf{x}_m)) \prod_{i=1}^{m-1} \cos(0.5x_i\pi)` :math:`f_{\text{DTLZ2}2}(\mathbf{x}) = (1 + g(\mathbf{x}_m)) \sin(0.5x_{m-1}\pi ) \prod_{i=1}^{m-2} \cos(0.5x_i\pi)` :math:`\ldots` :math:`f_{\text{DTLZ2}m}(\mathbf{x}) = (1 + g(\mathbf{x}_m)) \sin(0.5x_{1}\pi )` Where :math:`m` is the number of objectives and :math:`\mathbf{x}_m` is a vector of the remaining attributes :math:`[x_m~\ldots~x_n]` of the individual in :math:`n > m` dimensions. """ xc = individual[:obj - 1] xm = individual[obj - 1:] g = sum((xi - 0.5)**2 for xi in xm) f = [(1.0 + g) * reduce(mul, (cos(0.5 * xi * pi) for xi in xc), 1.0)] f.extend((1.0 + g) * reduce(mul, (cos(0.5 * xi * pi) for xi in xc[:m]), 1) * sin(0.5 * xc[m]*pi) for m in range(obj-2, -1, -1)) return f def dtlz3(individual, obj): r"""DTLZ3 multiobjective function. It returns a tuple of *obj* values. The individual must have at least *obj* elements. From: K. Deb, L. Thiele, M. Laumanns and E. Zitzler. Scalable Multi-Objective Optimization Test Problems. CEC 2002, p. 825 - 830, IEEE Press, 2002. :math:`g(\mathbf{x}_m) = 100\left(|\mathbf{x}_m| + \sum_{x_i \in \mathbf{x}_m}\left((x_i - 0.5)^2 - \cos(20\pi(x_i - 0.5))\right)\right)` :math:`f_{\text{DTLZ3}1}(\mathbf{x}) = (1 + g(\mathbf{x}_m)) \prod_{i=1}^{m-1} \cos(0.5x_i\pi)` :math:`f_{\text{DTLZ3}2}(\mathbf{x}) = (1 + g(\mathbf{x}_m)) \sin(0.5x_{m-1}\pi ) \prod_{i=1}^{m-2} \cos(0.5x_i\pi)` :math:`\ldots` :math:`f_{\text{DTLZ3}m}(\mathbf{x}) = (1 + g(\mathbf{x}_m)) \sin(0.5x_{1}\pi )` Where :math:`m` is the number of objectives and :math:`\mathbf{x}_m` is a vector of the remaining attributes :math:`[x_m~\ldots~x_n]` of the individual in :math:`n > m` dimensions. """ xc = individual[:obj - 1] xm = individual[obj - 1:] g = 100 * (len(xm) + sum((xi - 0.5)**2 - cos(20 * pi * (xi - 0.5)) for xi in xm)) f = [(1.0 + g) * reduce(mul, (cos(0.5 * xi * pi) for xi in xc), 1.0)] f.extend((1.0 + g) * reduce(mul, (cos(0.5 * xi * pi) for xi in xc[:m]), 1) * sin(0.5 * xc[m] * pi) for m in range(obj - 2, -1, -1)) return f def dtlz4(individual, obj, alpha): r"""DTLZ4 multiobjective function. It returns a tuple of *obj* values. The individual must have at least *obj* elements. The *alpha* parameter allows for a meta-variable mapping in :func:`dtlz2` :math:`x_i \rightarrow x_i^\alpha`, the authors suggest :math:`\alpha = 100`. From: K. Deb, L. Thiele, M. Laumanns and E. Zitzler. Scalable Multi-Objective Optimization Test Problems. CEC 2002, p. 825 - 830, IEEE Press, 2002. :math:`g(\mathbf{x}_m) = \sum_{x_i \in \mathbf{x}_m} (x_i - 0.5)^2` :math:`f_{\text{DTLZ4}1}(\mathbf{x}) = (1 + g(\mathbf{x}_m)) \prod_{i=1}^{m-1} \cos(0.5x_i^\alpha\pi)` :math:`f_{\text{DTLZ4}2}(\mathbf{x}) = (1 + g(\mathbf{x}_m)) \sin(0.5x_{m-1}^\alpha\pi ) \prod_{i=1}^{m-2} \cos(0.5x_i^\alpha\pi)` :math:`\ldots` :math:`f_{\text{DTLZ4}m}(\mathbf{x}) = (1 + g(\mathbf{x}_m)) \sin(0.5x_{1}^\alpha\pi )` Where :math:`m` is the number of objectives and :math:`\mathbf{x}_m` is a vector of the remaining attributes :math:`[x_m~\ldots~x_n]` of the individual in :math:`n > m` dimensions. """ xc = individual[:obj - 1] xm = individual[obj - 1:] g = sum((xi - 0.5)**2 for xi in xm) f = [(1.0 + g) * reduce(mul, (cos(0.5 * xi ** alpha * pi) for xi in xc), 1.0)] f.extend((1.0 + g) * reduce(mul, (cos(0.5 * xi**alpha * pi) for xi in xc[:m]), 1) * sin(0.5 * xc[m]**alpha * pi) for m in range(obj - 2, -1, -1)) return f def dtlz5(ind, n_objs): r"""DTLZ5 multiobjective function. It returns a tuple of *obj* values. The individual must have at least *obj* elements. From: K. Deb, L. Thiele, M. Laumanns and E. Zitzler. Scalable Multi-Objective Optimization Test Problems. CEC 2002, p. 825-830, IEEE Press, 2002. """ g = lambda x: sum([(a - 0.5)**2 for a in x]) gval = g(ind[n_objs - 1:]) theta = lambda x: pi / (4.0 * (1 + gval)) * (1 + 2 * gval * x) fit = [(1 + gval) * cos(pi / 2.0 * ind[0]) * reduce(lambda x, y: x * y, [cos(theta(a)) for a in ind[1:]])] for m in reversed(range(1, n_objs)): if m == 1: fit.append((1 + gval) * sin(pi / 2.0 * ind[0])) else: fit.append((1 + gval) * cos(pi / 2.0 * ind[0]) * reduce(lambda x, y: x * y, [cos(theta(a)) for a in ind[1:m - 1]], 1) * sin(theta(ind[m - 1]))) return fit def dtlz6(ind, n_objs): r"""DTLZ6 multiobjective function. It returns a tuple of *obj* values. The individual must have at least *obj* elements. From: K. Deb, L. Thiele, M. Laumanns and E. Zitzler. Scalable Multi-Objective Optimization Test Problems. CEC 2002, p. 825-830, IEEE Press, 2002. """ gval = sum([a**0.1 for a in ind[n_objs - 1:]]) theta = lambda x: pi / (4.0 * (1 + gval)) * (1 + 2 * gval * x) fit = [(1 + gval) * cos(pi / 2.0 * ind[0]) * reduce(lambda x, y: x * y, [cos(theta(a)) for a in ind[1:]])] for m in reversed(range(1, n_objs)): if m == 1: fit.append((1 + gval) * sin(pi / 2.0 * ind[0])) else: fit.append((1 + gval) * cos(pi / 2.0 * ind[0]) * reduce(lambda x, y: x * y, [cos(theta(a)) for a in ind[1: m - 1]], 1) * sin(theta(ind[m - 1]))) return fit def dtlz7(ind, n_objs): r"""DTLZ7 multiobjective function. It returns a tuple of *obj* values. The individual must have at least *obj* elements. From: K. Deb, L. Thiele, M. Laumanns and E. Zitzler. Scalable Multi-Objective Optimization Test Problems. CEC 2002, p. 825-830, IEEE Press, 2002. """ gval = 1 + 9.0 / len(ind[n_objs-1:]) * sum([a for a in ind[n_objs-1:]]) fit = [x for x in ind[:n_objs-1]] fit.append((1 + gval) * (n_objs - sum([a / (1.0 + gval) * (1 + sin(3 * pi * a)) for a in ind[:n_objs-1]]))) return fit def fonseca(individual): r"""Fonseca and Fleming's multiobjective function. From: C. M. Fonseca and P. J. Fleming, "Multiobjective optimization and multiple constraint handling with evolutionary algorithms -- Part II: Application example", IEEE Transactions on Systems, Man and Cybernetics, 1998. :math:`f_{\text{Fonseca}1}(\mathbf{x}) = 1 - e^{-\sum_{i=1}^{3}(x_i - \frac{1}{\sqrt{3}})^2}` :math:`f_{\text{Fonseca}2}(\mathbf{x}) = 1 - e^{-\sum_{i=1}^{3}(x_i + \frac{1}{\sqrt{3}})^2}` """ f_1 = 1 - exp(-sum((xi - 1/sqrt(3))**2 for xi in individual[:3])) f_2 = 1 - exp(-sum((xi + 1/sqrt(3))**2 for xi in individual[:3])) return f_1, f_2 def poloni(individual): r"""Poloni's multiobjective function on a two attribute *individual*. From: C. Poloni, "Hybrid GA for multi objective aerodynamic shape optimization", in Genetic Algorithms in Engineering and Computer Science, 1997. :math:`A_1 = 0.5 \sin (1) - 2 \cos (1) + \sin (2) - 1.5 \cos (2)` :math:`A_2 = 1.5 \sin (1) - \cos (1) + 2 \sin (2) - 0.5 \cos (2)` :math:`B_1 = 0.5 \sin (x_1) - 2 \cos (x_1) + \sin (x_2) - 1.5 \cos (x_2)` :math:`B_2 = 1.5 \sin (x_1) - cos(x_1) + 2 \sin (x_2) - 0.5 \cos (x_2)` :math:`f_{\text{Poloni}1}(\mathbf{x}) = 1 + (A_1 - B_1)^2 + (A_2 - B_2)^2` :math:`f_{\text{Poloni}2}(\mathbf{x}) = (x_1 + 3)^2 + (x_2 + 1)^2` """ x_1 = individual[0] x_2 = individual[1] A_1 = 0.5 * sin(1) - 2 * cos(1) + sin(2) - 1.5 * cos(2) A_2 = 1.5 * sin(1) - cos(1) + 2 * sin(2) - 0.5 * cos(2) B_1 = 0.5 * sin(x_1) - 2 * cos(x_1) + sin(x_2) - 1.5 * cos(x_2) B_2 = 1.5 * sin(x_1) - cos(x_1) + 2 * sin(x_2) - 0.5 * cos(x_2) return 1 + (A_1 - B_1)**2 + (A_2 - B_2)**2, (x_1 + 3)**2 + (x_2 + 1)**2 def dent(individual, lambda_=0.85): r"""Test problem Dent. Two-objective problem with a "dent". *individual* has two attributes that take values in [-1.5, 1.5]. From: Schuetze, O., Laumanns, M., Tantar, E., Coello Coello, C.A., & Talbi, E.-G. (2010). Computing gap free Pareto front approximations with stochastic search algorithms. Evolutionary Computation, 18(1), 65--96. doi:10.1162/evco.2010.18.1.18103 Note that in that paper Dent source is stated as: K. Witting and M. Hessel von Molo. Private communication, 2006. """ d = lambda_ * exp(-(individual[0] - individual[1]) ** 2) f1 = 0.5 * (sqrt(1 + (individual[0] + individual[1]) ** 2) + sqrt(1 + (individual[0] - individual[1]) ** 2) + individual[0] - individual[1]) + d f2 = 0.5 * (sqrt(1 + (individual[0] + individual[1]) ** 2) + sqrt(1 + (individual[0] - individual[1]) ** 2) - individual[0] + individual[1]) + d return f1, f2 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/benchmarks/binary.py0000644000076500000240000001137614456461441016567 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . from functools import wraps def bin2float(min_, max_, nbits): """Convert a binary array into an array of float where each float is composed of *nbits* and is between *min_* and *max_* and return the result of the decorated function. """ def wrap(function): @wraps(function) def wrapped_function(individual, *args, **kargs): # User must take care to make nelem an integer. nelem = len(individual) // nbits decoded = [0] * nelem for i in range(nelem): gene = int("".join(map(str, individual[i*nbits:i*nbits+nbits])), 2) div = 2**nbits - 1 temp = gene/div decoded[i] = min_ + (temp * (max_ - min_)) return function(decoded, *args, **kargs) return wrapped_function return wrap def trap(individual): u = sum(individual) k = len(individual) if u == k: return k else: return k - 1 - u def inv_trap(individual): u = sum(individual) k = len(individual) if u == 0: return k else: return u - 1 def chuang_f1(individual): """Binary deceptive function from : Multivariate Multi-Model Approach for Globally Multimodal Problems by Chung-Yao Chuang and Wen-Lian Hsu. The function takes individual of 40+1 dimensions and has two global optima in [1,1,...,1] and [0,0,...,0]. """ total = 0 if individual[-1] == 0: for i in range(0, len(individual)-1, 4): total += inv_trap(individual[i:i+4]) else: for i in range(0, len(individual)-1, 4): total += trap(individual[i:i+4]) return total, def chuang_f2(individual): """Binary deceptive function from : Multivariate Multi-Model Approach for Globally Multimodal Problems by Chung-Yao Chuang and Wen-Lian Hsu. The function takes individual of 40+1 dimensions and has four global optima in [1,1,...,0,0], [0,0,...,1,1], [1,1,...,1] and [0,0,...,0]. """ total = 0 if individual[-2] == 0 and individual[-1] == 0: for i in range(0, len(individual)-2, 8): total += inv_trap(individual[i:i+4]) + inv_trap(individual[i+4:i+8]) elif individual[-2] == 0 and individual[-1] == 1: for i in range(0, len(individual)-2, 8): total += inv_trap(individual[i:i+4]) + trap(individual[i+4:i+8]) elif individual[-2] == 1 and individual[-1] == 0: for i in range(0, len(individual)-2, 8): total += trap(individual[i:i+4]) + inv_trap(individual[i+4:i+8]) else: for i in range(0, len(individual)-2, 8): total += trap(individual[i:i+4]) + trap(individual[i+4:i+8]) return total, def chuang_f3(individual): """Binary deceptive function from : Multivariate Multi-Model Approach for Globally Multimodal Problems by Chung-Yao Chuang and Wen-Lian Hsu. The function takes individual of 40+1 dimensions and has two global optima in [1,1,...,1] and [0,0,...,0]. """ total = 0 if individual[-1] == 0: for i in range(0, len(individual)-1, 4): total += inv_trap(individual[i:i+4]) else: for i in range(2, len(individual)-3, 4): total += inv_trap(individual[i:i+4]) total += trap(individual[-2:]+individual[:2]) return total, # Royal Road Functions def royal_road1(individual, order): """Royal Road Function R1 as presented by Melanie Mitchell in : "An introduction to Genetic Algorithms". """ nelem = len(individual) // order max_value = int(2**order - 1) total = 0 for i in range(nelem): value = int("".join(map(str, individual[i*order:i*order+order])), 2) total += int(order) * int(value/max_value) return total, def royal_road2(individual, order): """Royal Road Function R2 as presented by Melanie Mitchell in : "An introduction to Genetic Algorithms". """ total = 0 norder = order while norder < order**2: total += royal_road1(individual, norder)[0] norder *= 2 return total, ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/benchmarks/gp.py0000644000076500000240000000735214456461441015710 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . from math import exp, sin, cos def kotanchek(data): r"""Kotanchek benchmark function. .. list-table:: :widths: 10 50 :stub-columns: 1 * - Range - :math:`\mathbf{x} \in [-1, 7]^2` * - Function - :math:`f(\mathbf{x}) = \\frac{e^{-(x_1 - 1)^2}}{3.2 + (x_2 - 2.5)^2}` """ return exp(-(data[0] - 1)**2) / (3.2 + (data[1] - 2.5)**2) def salustowicz_1d(data): r"""Salustowicz benchmark function. .. list-table:: :widths: 10 50 :stub-columns: 1 * - Range - :math:`x \in [0, 10]` * - Function - :math:`f(x) = e^{-x} x^3 \cos(x) \sin(x) (\cos(x) \sin^2(x) - 1)` """ return exp(-data[0]) * data[0]**3 * cos(data[0]) * sin(data[0]) * (cos(data[0]) * sin(data[0])**2 - 1) def salustowicz_2d(data): r"""Salustowicz benchmark function. .. list-table:: :widths: 10 50 :stub-columns: 1 * - Range - :math:`\mathbf{x} \in [0, 7]^2` * - Function - :math:`f(\mathbf{x}) = e^{-x_1} x_1^3 \cos(x_1) \sin(x_1) (\cos(x_1) \sin^2(x_1) - 1) (x_2 -5)` """ return exp(-data[0]) * data[0]**3 * cos(data[0]) * sin(data[0]) * (cos(data[0]) * sin(data[0])**2 - 1) * (data[1] - 5) def unwrapped_ball(data): r"""Unwrapped ball benchmark function. .. list-table:: :widths: 10 50 :stub-columns: 1 * - Range - :math:`\mathbf{x} \in [-2, 8]^n` * - Function - :math:`f(\mathbf{x}) = \\frac{10}{5 + \sum_{i=1}^n (x_i - 3)^2}` """ return 10. / (5. + sum((d - 3)**2 for d in data)) def rational_polynomial(data): r"""Rational polynomial ball benchmark function. .. list-table:: :widths: 10 50 :stub-columns: 1 * - Range - :math:`\mathbf{x} \in [0, 2]^3` * - Function - :math:`f(\mathbf{x}) = \\frac{30 * (x_1 - 1) (x_3 - 1)}{x_2^2 (x_1 - 10)}` """ return 30. * (data[0] - 1) * (data[2] - 1) / (data[1]**2 * (data[0] - 10)) def sin_cos(data): r"""Sine cosine benchmark function. .. list-table:: :widths: 10 50 :stub-columns: 1 * - Range - :math:`\mathbf{x} \in [0, 6]^2` * - Function - :math:`f(\mathbf{x}) = 6\sin(x_1)\cos(x_2)` """ 6 * sin(data[0]) * cos(data[1]) def ripple(data): r"""Ripple benchmark function. .. list-table:: :widths: 10 50 :stub-columns: 1 * - Range - :math:`\mathbf{x} \in [-5, 5]^2` * - Function - :math:`f(\mathbf{x}) = (x_1 - 3) (x_2 - 3) + 2 \sin((x_1 - 4) (x_2 -4))` """ return (data[0] - 3) * (data[1] - 3) + 2 * sin((data[0] - 4) * (data[1] - 4)) def rational_polynomial2(data): r"""Rational polynomial benchmark function. .. list-table:: :widths: 10 50 :stub-columns: 1 * - Range - :math:`\mathbf{x} \in [0, 6]^2` * - Function - :math:`f(\mathbf{x}) = \\frac{(x_1 - 3)^4 + (x_2 - 3)^3 - (x_2 - 3)}{(x_2 - 2)^4 + 10}` """ return ((data[0] - 3)**4 + (data[1] - 3)**3 - (data[1] - 3)) / ((data[1] - 2)**4 + 10) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/benchmarks/movingpeaks.py0000644000076500000240000004372114456461441017625 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . """ Re-implementation of the `Moving Peaks Benchmark `_ by Jurgen Branke. With the addition of the fluctuating number of peaks presented in *du Plessis and Engelbrecht, 2013, Self-Adaptive Environment with Fluctuating Number of Optima.* """ import math import itertools import random try: from collections.abc import Sequence except: from collections import Sequence def cone(individual, position, height, width): r"""The cone peak function to be used with scenario 2 and 3. :math:`f(\mathbf{x}) = h - w \sqrt{\sum_{i=1}^N (x_i - p_i)^2}` """ value = 0.0 for x, p in zip(individual, position): value += (x - p)**2 return height - width * math.sqrt(value) def sphere(individual, position, height, width): value = 0.0 for x, p in zip(individual, position): value += (x - p)**2 return height * value def function1(individual, position, height, width): r"""The function1 peak function to be used with scenario 1. :math:`f(\mathbf{x}) = \\frac{h}{1 + w \sqrt{\sum_{i=1}^N (x_i - p_i)^2}}` """ value = 0.0 for x, p in zip(individual, position): value += (x - p)**2 return height / (1 + width * value) class MovingPeaks: """The Moving Peaks Benchmark is a fitness function changing over time. It consists of a number of peaks, changing in height, width and location. The peaks function is given by *pfunc*, which is either a function object or a list of function objects (the default is :func:`function1`). The number of peaks is determined by *npeaks* (which defaults to 5). This parameter can be either a integer or a sequence. If it is set to an integer the number of peaks won't change, while if set to a sequence of 3 elements, the number of peaks will fluctuate between the first and third element of that sequence, the second element is the initial number of peaks. When fluctuating the number of peaks, the parameter *number_severity* must be included, it represents the number of peak fraction that is allowed to change. The dimensionality of the search domain is *dim*. A basis function *bfunc* can also be given to act as static landscape (the default is no basis function). The argument *random* serves to grant an independent random number generator to the moving peaks so that the evolution is not influenced by number drawn by this object (the default uses random functions from the Python module :mod:`random`). Various other keyword parameters listed in the table below are required to setup the benchmark, default parameters are based on scenario 1 of this benchmark. =================== ============================= =================== =================== ====================================================================================================================== Parameter :data:`SCENARIO_1` (Default) :data:`SCENARIO_2` :data:`SCENARIO_3` Details =================== ============================= =================== =================== ====================================================================================================================== ``pfunc`` :func:`function1` :func:`cone` :func:`cone` The peak function or a list of peak function. ``npeaks`` 5 10 50 Number of peaks. If an integer, the number of peaks won't change, if a sequence it will fluctuate [min, current, max]. ``bfunc`` :obj:`None` :obj:`None` ``lambda x: 10`` Basis static function. ``min_coord`` 0.0 0.0 0.0 Minimum coordinate for the centre of the peaks. ``max_coord`` 100.0 100.0 100.0 Maximum coordinate for the centre of the peaks. ``min_height`` 30.0 30.0 30.0 Minimum height of the peaks. ``max_height`` 70.0 70.0 70.0 Maximum height of the peaks. ``uniform_height`` 50.0 50.0 0 Starting height for all peaks, if ``uniform_height <= 0`` the initial height is set randomly for each peak. ``min_width`` 0.0001 1.0 1.0 Minimum width of the peaks. ``max_width`` 0.2 12.0 12.0 Maximum width of the peaks ``uniform_width`` 0.1 0 0 Starting width for all peaks, if ``uniform_width <= 0`` the initial width is set randomly for each peak. ``lambda_`` 0.0 0.5 0.5 Correlation between changes. ``move_severity`` 1.0 1.5 1.0 The distance a single peak moves when peaks change. ``height_severity`` 7.0 7.0 1.0 The standard deviation of the change made to the height of a peak when peaks change. ``width_severity`` 0.01 1.0 0.5 The standard deviation of the change made to the width of a peak when peaks change. ``period`` 5000 5000 1000 Period between two changes. =================== ============================= =================== =================== ====================================================================================================================== Dictionaries :data:`SCENARIO_1`, :data:`SCENARIO_2` and :data:`SCENARIO_3` of this module define the defaults for these parameters. The scenario 3 requires a constant basis function which can be given as a lambda function ``lambda x: constant``. The following shows an example of scenario 1 with non uniform heights and widths. .. plot:: code/benchmarks/movingsc1.py :width: 67 % """ def __init__(self, dim, random=random, **kargs): # Scenario 1 is the default sc = SCENARIO_1.copy() sc.update(kargs) pfunc = sc.get("pfunc") npeaks = sc.get("npeaks") self.dim = dim self.minpeaks, self.maxpeaks = None, None if hasattr(npeaks, "__getitem__"): self.minpeaks, npeaks, self.maxpeaks = npeaks self.number_severity = sc.get("number_severity") try: if len(pfunc) == npeaks: self.peaks_function = pfunc else: self.peaks_function = self.random.sample(pfunc, npeaks) self.pfunc_pool = tuple(pfunc) except TypeError: self.peaks_function = list(itertools.repeat(pfunc, npeaks)) self.pfunc_pool = (pfunc,) self.random = random self.basis_function = sc.get("bfunc") self.min_coord = sc.get("min_coord") self.max_coord = sc.get("max_coord") self.min_height = sc.get("min_height") self.max_height = sc.get("max_height") uniform_height = sc.get("uniform_height") self.min_width = sc.get("min_width") self.max_width = sc.get("max_width") uniform_width = sc.get("uniform_width") self.lambda_ = sc.get("lambda_") self.move_severity = sc.get("move_severity") self.height_severity = sc.get("height_severity") self.width_severity = sc.get("width_severity") self.peaks_position = [[self.random.uniform(self.min_coord, self.max_coord) for _ in range(dim)] for _ in range(npeaks)] if uniform_height != 0: self.peaks_height = [uniform_height for _ in range(npeaks)] else: self.peaks_height = [self.random.uniform(self.min_height, self.max_height) for _ in range(npeaks)] if uniform_width != 0: self.peaks_width = [uniform_width for _ in range(npeaks)] else: self.peaks_width = [self.random.uniform(self.min_width, self.max_width) for _ in range(npeaks)] self.last_change_vector = [[self.random.random() - 0.5 for _ in range(dim)] for _ in range(npeaks)] self.period = sc.get("period") # Used by the Offline Error calculation self._optimum = None self._error = None self._offline_error = 0 # Also used for auto change self.nevals = 0 def globalMaximum(self): """Returns the global maximum value and position.""" # The global maximum is at one peak's position potential_max = list() for func, pos, height, width in zip(self.peaks_function, self.peaks_position, self.peaks_height, self.peaks_width): potential_max.append((func(pos, pos, height, width), pos)) return max(potential_max) def maximums(self): """Returns all visible maximums value and position sorted with the global maximum first. """ # The maximums are at the peaks position but might be swallowed by # other peaks maximums = list() for func, pos, height, width in zip(self.peaks_function, self.peaks_position, self.peaks_height, self.peaks_width): val = func(pos, pos, height, width) if val >= self.__call__(pos, count=False)[0]: maximums.append((val, pos)) return sorted(maximums, reverse=True) def __call__(self, individual, count=True): """Evaluate a given *individual* with the current benchmark configuration. :param indidivudal: The individual to evaluate. :param count: Whether or not to count this evaluation in the total evaluation count. (Defaults to :data:`True`) """ possible_values = [] for func, pos, height, width in zip(self.peaks_function, self.peaks_position, self.peaks_height, self.peaks_width): possible_values.append(func(individual, pos, height, width)) if self.basis_function: possible_values.append(self.basis_function(individual)) fitness = max(possible_values) if count: # Compute the offline error self.nevals += 1 if self._optimum is None: self._optimum = self.globalMaximum()[0] self._error = abs(fitness - self._optimum) self._error = min(self._error, abs(fitness - self._optimum)) self._offline_error += self._error # We exhausted the number of evaluation, change peaks for the next one. if self.period > 0 and self.nevals % self.period == 0: self.changePeaks() return fitness, def offlineError(self): return self._offline_error / self.nevals def currentError(self): return self._error def changePeaks(self): """Order the peaks to change position, height, width and number.""" # Change the number of peaks if self.minpeaks is not None and self.maxpeaks is not None: npeaks = len(self.peaks_function) u = self.random.random() r = self.maxpeaks - self.minpeaks if u < 0.5: # Remove n peaks or less depending on the minimum number of peaks u = self.random.random() n = min(npeaks - self.minpeaks, int(round(r * u * self.number_severity))) for i in range(n): idx = self.random.randrange(len(self.peaks_function)) self.peaks_function.pop(idx) self.peaks_position.pop(idx) self.peaks_height.pop(idx) self.peaks_width.pop(idx) self.last_change_vector.pop(idx) else: # Add n peaks or less depending on the maximum number of peaks u = self.random.random() n = min(self.maxpeaks - npeaks, int(round(r * u * self.number_severity))) for i in range(n): self.peaks_function.append(self.random.choice(self.pfunc_pool)) self.peaks_position.append([self.random.uniform(self.min_coord, self.max_coord) for _ in range(self.dim)]) self.peaks_height.append(self.random.uniform(self.min_height, self.max_height)) self.peaks_width.append(self.random.uniform(self.min_width, self.max_width)) self.last_change_vector.append([self.random.random() - 0.5 for _ in range(self.dim)]) for i in range(len(self.peaks_function)): # Change peak position shift = [self.random.random() - 0.5 for _ in range(len(self.peaks_position[i]))] shift_length = sum(s**2 for s in shift) shift_length = self.move_severity / math.sqrt(shift_length) if shift_length > 0 else 0 shift = [shift_length * (1.0 - self.lambda_) * s + self.lambda_ * c for s, c in zip(shift, self.last_change_vector[i])] shift_length = sum(s**2 for s in shift) shift_length = self.move_severity / math.sqrt(shift_length) if shift_length > 0 else 0 shift = [s*shift_length for s in shift] new_position = [] final_shift = [] for pp, s in zip(self.peaks_position[i], shift): new_coord = pp + s if new_coord < self.min_coord: new_position.append(2.0 * self.min_coord - pp - s) final_shift.append(-1.0 * s) elif new_coord > self.max_coord: new_position.append(2.0 * self.max_coord - pp - s) final_shift.append(-1.0 * s) else: new_position.append(new_coord) final_shift.append(s) self.peaks_position[i] = new_position self.last_change_vector[i] = final_shift # Change peak height change = self.random.gauss(0, 1) * self.height_severity new_value = change + self.peaks_height[i] if new_value < self.min_height: self.peaks_height[i] = 2.0 * self.min_height - self.peaks_height[i] - change elif new_value > self.max_height: self.peaks_height[i] = 2.0 * self.max_height - self.peaks_height[i] - change else: self.peaks_height[i] = new_value # Change peak width change = self.random.gauss(0, 1) * self.width_severity new_value = change + self.peaks_width[i] if new_value < self.min_width: self.peaks_width[i] = 2.0 * self.min_width - self.peaks_width[i] - change elif new_value > self.max_width: self.peaks_width[i] = 2.0 * self.max_width - self.peaks_width[i] - change else: self.peaks_width[i] = new_value self._optimum = None SCENARIO_1 = {"pfunc": function1, "npeaks": 5, "bfunc": None, "min_coord": 0.0, "max_coord": 100.0, "min_height": 30.0, "max_height": 70.0, "uniform_height": 50.0, "min_width": 0.0001, "max_width": 0.2, "uniform_width": 0.1, "lambda_": 0.0, "move_severity": 1.0, "height_severity": 7.0, "width_severity": 0.01, "period": 5000} SCENARIO_2 = {"pfunc": cone, "npeaks": 10, "bfunc": None, "min_coord": 0.0, "max_coord": 100.0, "min_height": 30.0, "max_height": 70.0, "uniform_height": 50.0, "min_width": 1.0, "max_width": 12.0, "uniform_width": 0, "lambda_": 0.5, "move_severity": 1.0, "height_severity": 7.0, "width_severity": 1.0, "period": 5000} SCENARIO_3 = {"pfunc": cone, "npeaks": 50, "bfunc": lambda x: 10, "min_coord": 0.0, "max_coord": 100.0, "min_height": 30.0, "max_height": 70.0, "uniform_height": 0, "min_width": 1.0, "max_width": 12.0, "uniform_width": 0, "lambda_": 0.5, "move_severity": 1.0, "height_severity": 1.0, "width_severity": 0.5, "period": 1000} def diversity(population): nind = len(population) ndim = len(population[0]) d = [0.0] * ndim for x in population: d = [di + xi for di, xi in zip(d, x)] d = [di / nind for di in d] return math.sqrt(sum((di - xi)**2 for x in population for di, xi in zip(d, x))) if __name__ == "__main__": mpb = MovingPeaks(dim=2, npeaks=[1, 1, 10], number_severity=0.1) print(mpb.maximums()) mpb.changePeaks() print(mpb.maximums()) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/benchmarks/tools.py0000644000076500000240000002760414456461441016444 0ustar00runnerstaff"""Module containing tools that are useful when benchmarking algorithms """ from math import hypot, sqrt from functools import wraps from itertools import repeat try: import numpy numpy_imported = True except ImportError: numpy_imported = False try: import scipy.spatial scipy_imported = True except ImportError: scipy_imported = False try: # try importing the C version from ..tools._hypervolume import hv except ImportError: # fallback on python version from ..tools._hypervolume import pyhv as hv class translate(object): """Decorator for evaluation functions, it translates the objective function by *vector* which should be the same length as the individual size. When called the decorated function should take as first argument the individual to be evaluated. The inverse translation vector is actually applied to the individual and the resulting list is given to the evaluation function. Thus, the evaluation function shall not be expecting an individual as it will receive a plain list. This decorator adds a :func:`translate` method to the decorated function. """ def __init__(self, vector): self.vector = vector def __call__(self, func): # wraps is used to combine stacked decorators that would add functions @wraps(func) def wrapper(individual, *args, **kargs): # A subtraction is applied since the translation is applied to the # individual and not the function return func([v - t for v, t in zip(individual, self.vector)], *args, **kargs) wrapper.translate = self.translate return wrapper def translate(self, vector): """Set the current translation to *vector*. After decorating the evaluation function, this function will be available directly from the function object. :: @translate([0.25, 0.5, ..., 0.1]) def evaluate(individual): return sum(individual), # This will cancel the translation evaluate.translate([0.0, 0.0, ..., 0.0]) """ self.vector = vector class rotate(object): """Decorator for evaluation functions, it rotates the objective function by *matrix* which should be a valid orthogonal NxN rotation matrix, with N the length of an individual. When called the decorated function should take as first argument the individual to be evaluated. The inverse rotation matrix is actually applied to the individual and the resulting list is given to the evaluation function. Thus, the evaluation function shall not be expecting an individual as it will receive a plain list (numpy.array). The multiplication is done using numpy. This decorator adds a :func:`rotate` method to the decorated function. .. note:: A random orthogonal matrix Q can be created via QR decomposition. :: A = numpy.random.random((n,n)) Q, _ = numpy.linalg.qr(A) """ def __init__(self, matrix): if not numpy_imported: raise RuntimeError("Numpy is required for using the rotation " "decorator") # The inverse is taken since the rotation is applied to the individual # and not the function which is the inverse self.matrix = numpy.linalg.inv(matrix) def __call__(self, func): # wraps is used to combine stacked decorators that would add functions @wraps(func) def wrapper(individual, *args, **kargs): return func(numpy.dot(self.matrix, individual), *args, **kargs) wrapper.rotate = self.rotate return wrapper def rotate(self, matrix): """Set the current rotation to *matrix*. After decorating the evaluation function, this function will be available directly from the function object. :: # Create a random orthogonal matrix A = numpy.random.random((n,n)) Q, _ = numpy.linalg.qr(A) @rotate(Q) def evaluate(individual): return sum(individual), # This will reset rotation to identity evaluate.rotate(numpy.identity(n)) """ self.matrix = numpy.linalg.inv(matrix) class noise(object): """Decorator for evaluation functions, it evaluates the objective function and adds noise by calling the function(s) provided in the *noise* argument. The noise functions are called without any argument, consider using the :class:`~deap.base.Toolbox` or Python's :func:`functools.partial` to provide any required argument. If a single function is provided it is applied to all objectives of the evaluation function. If a list of noise functions is provided, it must be of length equal to the number of objectives. The noise argument also accept :obj:`None`, which will leave the objective without noise. This decorator adds a :func:`noise` method to the decorated function. """ def __init__(self, noise): try: self.rand_funcs = tuple(noise) except TypeError: self.rand_funcs = repeat(noise) def __call__(self, func): # wraps is used to combine stacked decorators that would add functions @wraps(func) def wrapper(individual, *args, **kargs): result = func(individual, *args, **kargs) noisy = list() for r, f in zip(result, self.rand_funcs): if f is None: noisy.append(r) else: noisy.append(r + f()) return tuple(noisy) wrapper.noise = self.noise return wrapper def noise(self, noise): """Set the current noise to *noise*. After decorating the evaluation function, this function will be available directly from the function object. :: prand = functools.partial(random.gauss, mu=0.0, sigma=1.0) @noise(prand) def evaluate(individual): return sum(individual), # This will remove noise from the evaluation function evaluate.noise(None) """ try: self.rand_funcs = tuple(noise) except TypeError: self.rand_funcs = repeat(noise) class scale(object): """Decorator for evaluation functions, it scales the objective function by *factor* which should be the same length as the individual size. When called the decorated function should take as first argument the individual to be evaluated. The inverse factor vector is actually applied to the individual and the resulting list is given to the evaluation function. Thus, the evaluation function shall not be expecting an individual as it will receive a plain list. This decorator adds a :func:`scale` method to the decorated function. """ def __init__(self, factor): # Factor is inverted since it is applied to the individual and not the # objective function self.factor = tuple(1.0/f for f in factor) def __call__(self, func): # wraps is used to combine stacked decorators that would add functions @wraps(func) def wrapper(individual, *args, **kargs): return func([v * f for v, f in zip(individual, self.factor)], *args, **kargs) wrapper.scale = self.scale return wrapper def scale(self, factor): """Set the current scale to *factor*. After decorating the evaluation function, this function will be available directly from the function object. :: @scale([0.25, 2.0, ..., 0.1]) def evaluate(individual): return sum(individual), # This will cancel the scaling evaluate.scale([1.0, 1.0, ..., 1.0]) """ # Factor is inverted since it is applied to the individual and not the # objective function self.factor = tuple(1.0/f for f in factor) class bound(object): """Decorator for crossover and mutation functions, it changes the individuals after the modification is done to bring it back in the allowed *bounds*. The *bounds* are functions taking individual and returning whether of not the variable is allowed. You can provide one or multiple such functions. In the former case, the function is used on all dimensions and in the latter case, the number of functions must be greater or equal to the number of dimension of the individuals. The *type* determines how the attributes are brought back into the valid range This decorator adds a :func:`bound` method to the decorated function. """ def _clip(self, individual): return individual def _wrap(self, individual): return individual def _mirror(self, individual): return individual def __call__(self, func): @wraps(func) def wrapper(*args, **kargs): individuals = func(*args, **kargs) return self.bound(individuals) wrapper.bound = self.bound return wrapper def __init__(self, bounds, type): try: self.bounds = tuple(bounds) except TypeError: self.bounds = repeat(bounds) if type == "mirror": self.bound = self._mirror elif type == "wrap": self.bound = self._wrap elif type == "clip": self.bound = self._clip def diversity(first_front, first, last): """Given a Pareto front `first_front` and the two extreme points of the optimal Pareto front, this function returns a metric of the diversity of the front as explained in the original NSGA-II article by K. Deb. The smaller the value is, the better the front is. """ df = hypot(first_front[0].fitness.values[0] - first[0], first_front[0].fitness.values[1] - first[1]) dl = hypot(first_front[-1].fitness.values[0] - last[0], first_front[-1].fitness.values[1] - last[1]) dt = [hypot(first.fitness.values[0] - second.fitness.values[0], first.fitness.values[1] - second.fitness.values[1]) for first, second in zip(first_front[:-1], first_front[1:])] if len(first_front) == 1: return df + dl dm = sum(dt)/len(dt) di = sum(abs(d_i - dm) for d_i in dt) delta = (df + dl + di)/(df + dl + len(dt) * dm) return delta def convergence(first_front, optimal_front): """Given a Pareto front `first_front` and the optimal Pareto front, this function returns a metric of convergence of the front as explained in the original NSGA-II article by K. Deb. The smaller the value is, the closer the front is to the optimal one. """ distances = [] for ind in first_front: distances.append(float("inf")) for opt_ind in optimal_front: dist = 0. for i in range(len(opt_ind)): dist += (ind.fitness.values[i] - opt_ind[i])**2 if dist < distances[-1]: distances[-1] = dist distances[-1] = sqrt(distances[-1]) return sum(distances) / len(distances) def hypervolume(front, ref=None): """Return the hypervolume of a *front*. If the *ref* point is not given, the worst value for each objective +1 is used. :param front: The population (usually a list of undominated individuals) on which to compute the hypervolume. :param ref: A point of the same dimensionality as the individuals in *front*. """ # Must use wvalues * -1 since hypervolume use implicit minimization wobj = numpy.array([ind.fitness.wvalues for ind in front]) * -1 if ref is None: ref = numpy.max(wobj, axis=0) + 1 return hv.hypervolume(wobj, ref) def igd(A, Z): """Inverse generational distance. """ if not scipy_imported: raise ImportError("idg requires scipy module") distances = scipy.spatial.distance.cdist(A, Z) return numpy.average(numpy.min(distances, axis=0)) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/cma.py0000644000076500000240000012153114456461441013721 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . # Special thanks to Nikolaus Hansen for providing major part of # this code. The CMA-ES algorithm is provided in many other languages # and advanced versions at http://www.lri.fr/~hansen/cmaesintro.html. """A module that provides support for the Covariance Matrix Adaptation Evolution Strategy. """ import copy from math import sqrt, log, exp from itertools import cycle import warnings import numpy from . import tools class Strategy(object): """ A strategy that will keep track of the basic parameters of the CMA-ES algorithm ([Hansen2001]_). :param centroid: An iterable object that indicates where to start the evolution. :param sigma: The initial standard deviation of the distribution. :param parameter: One or more parameter to pass to the strategy as described in the following table, optional. +----------------+---------------------------+----------------------------+ | Parameter | Default | Details | +================+===========================+============================+ | ``lambda_`` | ``int(4 + 3 * log(N))`` | Number of children to | | | | produce at each generation,| | | | ``N`` is the individual's | | | | size (integer). | +----------------+---------------------------+----------------------------+ | ``mu`` | ``int(lambda_ / 2)`` | The number of parents to | | | | keep from the | | | | lambda children (integer). | +----------------+---------------------------+----------------------------+ | ``cmatrix`` | ``identity(N)`` | The initial covariance | | | | matrix of the distribution | | | | that will be sampled. | +----------------+---------------------------+----------------------------+ | ``weights`` | ``"superlinear"`` | Decrease speed, can be | | | | ``"superlinear"``, | | | | ``"linear"`` or | | | | ``"equal"``. | +----------------+---------------------------+----------------------------+ | ``cs`` | ``(mueff + 2) / | Cumulation constant for | | | (N + mueff + 3)`` | step-size. | +----------------+---------------------------+----------------------------+ | ``damps`` | ``1 + 2 * max(0, sqrt(( | Damping for step-size. | | | mueff - 1) / (N + 1)) - 1)| | | | + cs`` | | +----------------+---------------------------+----------------------------+ | ``ccum`` | ``4 / (N + 4)`` | Cumulation constant for | | | | covariance matrix. | +----------------+---------------------------+----------------------------+ | ``ccov1`` | ``2 / ((N + 1.3)^2 + | Learning rate for rank-one | | | mueff)`` | update. | +----------------+---------------------------+----------------------------+ | ``ccovmu`` | ``2 * (mueff - 2 + 1 / | Learning rate for rank-mu | | | mueff) / ((N + 2)^2 + | update. | | | mueff)`` | | +----------------+---------------------------+----------------------------+ .. [Hansen2001] Hansen and Ostermeier, 2001. Completely Derandomized Self-Adaptation in Evolution Strategies. *Evolutionary Computation* """ def __init__(self, centroid, sigma, **kargs): self.params = kargs # Create a centroid as a numpy array self.centroid = numpy.array(centroid) self.dim = len(self.centroid) self.sigma = sigma self.pc = numpy.zeros(self.dim) self.ps = numpy.zeros(self.dim) self.chiN = sqrt(self.dim) * (1 - 1. / (4. * self.dim) + 1. / (21. * self.dim ** 2)) self.C = self.params.get("cmatrix", numpy.identity(self.dim)) self.diagD, self.B = numpy.linalg.eigh(self.C) indx = numpy.argsort(self.diagD) self.diagD = self.diagD[indx] ** 0.5 self.B = self.B[:, indx] self.BD = self.B * self.diagD self.cond = self.diagD[indx[-1]] / self.diagD[indx[0]] self.lambda_ = self.params.get("lambda_", int(4 + 3 * log(self.dim))) self.update_count = 0 self.computeParams(self.params) def generate(self, ind_init): r"""Generate a population of :math:`\lambda` individuals of type *ind_init* from the current strategy. :param ind_init: A function object that is able to initialize an individual from a list. :returns: A list of individuals. """ arz = numpy.random.standard_normal((self.lambda_, self.dim)) arz = self.centroid + self.sigma * numpy.dot(arz, self.BD.T) return [ind_init(a) for a in arz] def update(self, population): """Update the current covariance matrix strategy from the *population*. :param population: A list of individuals from which to update the parameters. """ population.sort(key=lambda ind: ind.fitness, reverse=True) old_centroid = self.centroid self.centroid = numpy.dot(self.weights, population[0:self.mu]) c_diff = self.centroid - old_centroid # Cumulation : update evolution path self.ps = (1 - self.cs) * self.ps \ + sqrt(self.cs * (2 - self.cs) * self.mueff) / self.sigma \ * numpy.dot(self.B, (1. / self.diagD) * numpy.dot(self.B.T, c_diff)) hsig = float((numpy.linalg.norm(self.ps) / sqrt(1. - (1. - self.cs) ** (2. * (self.update_count + 1.))) / self.chiN < (1.4 + 2. / (self.dim + 1.)))) self.update_count += 1 self.pc = (1 - self.cc) * self.pc + hsig \ * sqrt(self.cc * (2 - self.cc) * self.mueff) / self.sigma \ * c_diff # Update covariance matrix artmp = population[0:self.mu] - old_centroid self.C = (1 - self.ccov1 - self.ccovmu + (1 - hsig) * self.ccov1 * self.cc * (2 - self.cc)) * self.C \ + self.ccov1 * numpy.outer(self.pc, self.pc) \ + self.ccovmu * numpy.dot((self.weights * artmp.T), artmp) \ / self.sigma ** 2 self.sigma *= numpy.exp((numpy.linalg.norm(self.ps) / self.chiN - 1.) * self.cs / self.damps) self.diagD, self.B = numpy.linalg.eigh(self.C) indx = numpy.argsort(self.diagD) self.cond = self.diagD[indx[-1]] / self.diagD[indx[0]] self.diagD = self.diagD[indx] ** 0.5 self.B = self.B[:, indx] self.BD = self.B * self.diagD def computeParams(self, params): r"""Computes the parameters depending on :math:`\lambda`. It needs to be called again if :math:`\lambda` changes during evolution. :param params: A dictionary of the manually set parameters. """ self.mu = params.get("mu", int(self.lambda_ / 2)) rweights = params.get("weights", "superlinear") if rweights == "superlinear": self.weights = log(self.mu + 0.5) - \ numpy.log(numpy.arange(1, self.mu + 1)) elif rweights == "linear": self.weights = self.mu + 0.5 - numpy.arange(1, self.mu + 1) elif rweights == "equal": self.weights = numpy.ones(self.mu) else: raise RuntimeError("Unknown weights : %s" % rweights) self.weights /= sum(self.weights) self.mueff = 1. / sum(self.weights ** 2) self.cc = params.get("ccum", 4. / (self.dim + 4.)) self.cs = params.get("cs", (self.mueff + 2.) / (self.dim + self.mueff + 3.)) self.ccov1 = params.get("ccov1", 2. / ((self.dim + 1.3) ** 2 + self.mueff)) self.ccovmu = params.get("ccovmu", 2. * (self.mueff - 2. + 1. / self.mueff) / ((self.dim + 2.) ** 2 + self.mueff)) self.ccovmu = min(1 - self.ccov1, self.ccovmu) self.damps = 1. + 2. * max(0, sqrt((self.mueff - 1.) / (self.dim + 1.)) - 1.) + self.cs self.damps = params.get("damps", self.damps) class StrategyOnePlusLambda(object): r""" A CMA-ES strategy that uses the :math:`1 + \lambda` paradigm ([Igel2007]_). :param parent: An iterable object that indicates where to start the evolution. The parent requires a fitness attribute. :param sigma: The initial standard deviation of the distribution. :param lambda_: Number of offspring to produce from the parent. (optional, defaults to 1) :param parameter: One or more parameter to pass to the strategy as described in the following table. (optional) Other parameters can be provided as described in the next table +----------------+---------------------------+----------------------------+ | Parameter | Default | Details | +================+===========================+============================+ | ``d`` | ``1.0 + N / (2.0 * | Damping for step-size. | | | lambda_)`` | | +----------------+---------------------------+----------------------------+ | ``ptarg`` | ``1.0 / (5 + sqrt(lambda_)| Target success rate. | | | / 2.0)`` | | +----------------+---------------------------+----------------------------+ | ``cp`` | ``ptarg * lambda_ / (2.0 +| Step size learning rate. | | | ptarg * lambda_)`` | | +----------------+---------------------------+----------------------------+ | ``cc`` | ``2.0 / (N + 2.0)`` | Cumulation time horizon. | +----------------+---------------------------+----------------------------+ | ``ccov`` | ``2.0 / (N**2 + 6.0)`` | Covariance matrix learning | | | | rate. | +----------------+---------------------------+----------------------------+ | ``pthresh`` | ``0.44`` | Threshold success rate. | +----------------+---------------------------+----------------------------+ .. [Igel2007] Igel, Hansen, Roth, 2007. Covariance matrix adaptation for multi-objective optimization. *Evolutionary Computation* Spring;15(1):1-28 """ def __init__(self, parent, sigma, **kargs): self.parent = parent self.sigma = sigma self.dim = len(self.parent) self.C = numpy.identity(self.dim) self.A = numpy.identity(self.dim) self.pc = numpy.zeros(self.dim) self.computeParams(kargs) self.psucc = self.ptarg def computeParams(self, params): r"""Computes the parameters depending on :math:`\lambda`. It needs to be called again if :math:`\lambda` changes during evolution. :param params: A dictionary of the manually set parameters. """ # Selection : self.lambda_ = params.get("lambda_", 1) # Step size control : self.d = params.get("d", 1.0 + self.dim / (2.0 * self.lambda_)) self.ptarg = params.get("ptarg", 1.0 / (5 + sqrt(self.lambda_) / 2.0)) self.cp = params.get("cp", self.ptarg * self.lambda_ / (2 + self.ptarg * self.lambda_)) # Covariance matrix adaptation self.cc = params.get("cc", 2.0 / (self.dim + 2.0)) self.ccov = params.get("ccov", 2.0 / (self.dim ** 2 + 6.0)) self.pthresh = params.get("pthresh", 0.44) def generate(self, ind_init): r"""Generate a population of :math:`\lambda` individuals of type *ind_init* from the current strategy. :param ind_init: A function object that is able to initialize an individual from a list. :returns: A list of individuals. """ # self.y = numpy.dot(self.A, numpy.random.standard_normal(self.dim)) arz = numpy.random.standard_normal((self.lambda_, self.dim)) arz = self.parent + self.sigma * numpy.dot(arz, self.A.T) return [ind_init(a) for a in arz] def update(self, population): """Update the current covariance matrix strategy from the *population*. :param population: A list of individuals from which to update the parameters. """ population.sort(key=lambda ind: ind.fitness, reverse=True) lambda_succ = sum(self.parent.fitness <= ind.fitness for ind in population) p_succ = float(lambda_succ) / self.lambda_ self.psucc = (1 - self.cp) * self.psucc + self.cp * p_succ if self.parent.fitness <= population[0].fitness: x_step = (population[0] - numpy.array(self.parent)) / self.sigma self.parent = copy.deepcopy(population[0]) if self.psucc < self.pthresh: self.pc = (1 - self.cc) * self.pc + sqrt(self.cc * (2 - self.cc)) * x_step self.C = (1 - self.ccov) * self.C + self.ccov * numpy.outer(self.pc, self.pc) else: self.pc = (1 - self.cc) * self.pc self.C = (1 - self.ccov) * self.C + self.ccov * (numpy.outer(self.pc, self.pc) + self.cc * (2 - self.cc) * self.C) self.sigma = self.sigma * exp(1.0 / self.d * (self.psucc - self.ptarg) / (1.0 - self.ptarg)) # We use Cholesky since for now we have no use of eigen decomposition # Basically, Cholesky returns a matrix A as C = A*A.T # Eigen decomposition returns two matrix B and D^2 as C = B*D^2*B.T = B*D*D*B.T # So A == B*D # To compute the new individual we need to multiply each vector z by A # as y = centroid + sigma * A*z # So the Cholesky is more straightforward as we don't need to compute # the squareroot of D^2, and multiply B and D in order to get A, we directly get A. # This can't be done (without cost) with the standard CMA-ES as the eigen decomposition is used # to compute covariance matrix inverse in the step-size evolutionary path computation. self.A = numpy.linalg.cholesky(self.C) class StrategyMultiObjective(object): """Multiobjective CMA-ES strategy based on the paper [Voss2010]_. It is used similarly as the standard CMA-ES strategy with a generate-update scheme. :param population: An initial population of individual. :param sigma: The initial step size of the complete system. :param mu: The number of parents to use in the evolution. When not provided it defaults to the length of *population*. (optional) :param lambda_: The number of offspring to produce at each generation. (optional, defaults to 1) :param indicator: The indicator function to use. (optional, default to :func:`~deap.tools.hypervolume`) Other parameters can be provided as described in the next table +----------------+---------------------------+----------------------------+ | Parameter | Default | Details | +================+===========================+============================+ | ``d`` | ``1.0 + N / 2.0`` | Damping for step-size. | +----------------+---------------------------+----------------------------+ | ``ptarg`` | ``1.0 / (5 + 1.0 / 2.0)`` | Target success rate. | +----------------+---------------------------+----------------------------+ | ``cp`` | ``ptarg / (2.0 + ptarg)`` | Step size learning rate. | +----------------+---------------------------+----------------------------+ | ``cc`` | ``2.0 / (N + 2.0)`` | Cumulation time horizon. | +----------------+---------------------------+----------------------------+ | ``ccov`` | ``2.0 / (N**2 + 6.0)`` | Covariance matrix learning | | | | rate. | +----------------+---------------------------+----------------------------+ | ``pthresh`` | ``0.44`` | Threshold success rate. | +----------------+---------------------------+----------------------------+ .. [Voss2010] Voss, Hansen, Igel, "Improved Step Size Adaptation for the MO-CMA-ES", 2010. """ def __init__(self, population, sigma, **params): self.parents = population self.dim = len(self.parents[0]) # Selection self.mu = params.get("mu", len(self.parents)) self.lambda_ = params.get("lambda_", 1) # Step size control self.d = params.get("d", 1.0 + self.dim / 2.0) self.ptarg = params.get("ptarg", 1.0 / (5.0 + 0.5)) self.cp = params.get("cp", self.ptarg / (2.0 + self.ptarg)) # Covariance matrix adaptation self.cc = params.get("cc", 2.0 / (self.dim + 2.0)) self.ccov = params.get("ccov", 2.0 / (self.dim ** 2 + 6.0)) self.pthresh = params.get("pthresh", 0.44) # Internal parameters associated to the mu parent self.sigmas = [sigma] * len(population) # Lower Cholesky matrix (Sampling matrix) self.A = [numpy.identity(self.dim) for _ in range(len(population))] # Inverse Cholesky matrix (Used in the update of A) self.invCholesky = [numpy.identity(self.dim) for _ in range(len(population))] self.pc = [numpy.zeros(self.dim) for _ in range(len(population))] self.psucc = [self.ptarg] * len(population) self.indicator = params.get("indicator", tools.hypervolume) def generate(self, ind_init): r"""Generate a population of :math:`\lambda` individuals of type *ind_init* from the current strategy. :param ind_init: A function object that is able to initialize an individual from a list. :returns: A list of individuals with a private attribute :attr:`_ps`. This last attribute is essential to the update function, it indicates that the individual is an offspring and the index of its parent. """ arz = numpy.random.randn(self.lambda_, self.dim) individuals = list() # Make sure every parent has a parent tag and index for i, p in enumerate(self.parents): p._ps = "p", i # Each parent produce an offspring if self.lambda_ == self.mu: for i in range(self.lambda_): # print "Z", list(arz[i]) individuals.append(ind_init(self.parents[i] + self.sigmas[i] * numpy.dot(self.A[i], arz[i]))) individuals[-1]._ps = "o", i # Parents producing an offspring are chosen at random from the first front else: ndom = tools.sortLogNondominated(self.parents, len(self.parents), first_front_only=True) for i in range(self.lambda_): j = numpy.random.randint(0, len(ndom)) _, p_idx = ndom[j]._ps individuals.append(ind_init(self.parents[p_idx] + self.sigmas[p_idx] * numpy.dot(self.A[p_idx], arz[i]))) individuals[-1]._ps = "o", p_idx return individuals def _select(self, candidates): if len(candidates) <= self.mu: return candidates, [] pareto_fronts = tools.sortLogNondominated(candidates, len(candidates)) chosen = list() mid_front = None not_chosen = list() # Fill the next population (chosen) with the fronts until there is not enough space # When an entire front does not fit in the space left we rely on the hypervolume # for this front # The remaining fronts are explicitly not chosen full = False for front in pareto_fronts: if len(chosen) + len(front) <= self.mu and not full: chosen += front elif mid_front is None and len(chosen) < self.mu: mid_front = front # With this front, we selected enough individuals full = True else: not_chosen += front # Separate the mid front to accept only k individuals k = self.mu - len(chosen) if k > 0: # reference point is chosen in the complete population # as the worst in each dimension +1 ref = numpy.array([ind.fitness.wvalues for ind in candidates]) * -1 ref = numpy.max(ref, axis=0) + 1 for _ in range(len(mid_front) - k): idx = self.indicator(mid_front, ref=ref) not_chosen.append(mid_front.pop(idx)) chosen += mid_front return chosen, not_chosen def _rankOneUpdate(self, invCholesky, A, alpha, beta, v): w = numpy.dot(invCholesky, v) # Under this threshold, the update is mostly noise if w.max() > 1e-20: w_inv = numpy.dot(w, invCholesky) norm_w2 = numpy.sum(w ** 2) a = sqrt(alpha) root = numpy.sqrt(1 + beta / alpha * norm_w2) b = a / norm_w2 * (root - 1) A = a * A + b * numpy.outer(v, w) invCholesky = 1.0 / a * invCholesky - b / (a ** 2 + a * b * norm_w2) * numpy.outer(w, w_inv) return invCholesky, A def update(self, population): """Update the current covariance matrix strategies from the *population*. :param population: A list of individuals from which to update the parameters. """ chosen, not_chosen = self._select(population + self.parents) cp, cc, ccov = self.cp, self.cc, self.ccov d, ptarg, pthresh = self.d, self.ptarg, self.pthresh # Make copies for chosen offspring only last_steps = [self.sigmas[ind._ps[1]] if ind._ps[0] == "o" else None for ind in chosen] sigmas = [self.sigmas[ind._ps[1]] if ind._ps[0] == "o" else None for ind in chosen] invCholesky = [self.invCholesky[ind._ps[1]].copy() if ind._ps[0] == "o" else None for ind in chosen] A = [self.A[ind._ps[1]].copy() if ind._ps[0] == "o" else None for ind in chosen] pc = [self.pc[ind._ps[1]].copy() if ind._ps[0] == "o" else None for ind in chosen] psucc = [self.psucc[ind._ps[1]] if ind._ps[0] == "o" else None for ind in chosen] # Update the internal parameters for successful offspring for i, ind in enumerate(chosen): t, p_idx = ind._ps # Only the offspring update the parameter set if t == "o": # Update (Success = 1 since it is chosen) psucc[i] = (1.0 - cp) * psucc[i] + cp sigmas[i] = sigmas[i] * exp((psucc[i] - ptarg) / (d * (1.0 - ptarg))) if psucc[i] < pthresh: xp = numpy.array(ind) x = numpy.array(self.parents[p_idx]) pc[i] = (1.0 - cc) * pc[i] + sqrt(cc * (2.0 - cc)) * (xp - x) / last_steps[i] invCholesky[i], A[i] = self._rankOneUpdate(invCholesky[i], A[i], 1 - ccov, ccov, pc[i]) else: pc[i] = (1.0 - cc) * pc[i] pc_weight = cc * (2.0 - cc) invCholesky[i], A[i] = self._rankOneUpdate(invCholesky[i], A[i], 1 - ccov + pc_weight, ccov, pc[i]) self.psucc[p_idx] = (1.0 - cp) * self.psucc[p_idx] + cp self.sigmas[p_idx] = self.sigmas[p_idx] * exp((self.psucc[p_idx] - ptarg) / (d * (1.0 - ptarg))) # It is unnecessary to update the entire parameter set for not chosen individuals # Their parameters will not make it to the next generation for ind in not_chosen: t, p_idx = ind._ps # Only the offspring update the parameter set if t == "o": self.psucc[p_idx] = (1.0 - cp) * self.psucc[p_idx] self.sigmas[p_idx] = self.sigmas[p_idx] * exp((self.psucc[p_idx] - ptarg) / (d * (1.0 - ptarg))) # Make a copy of the internal parameters # The parameter is in the temporary variable for offspring and in the original one for parents self.parents = chosen self.sigmas = [sigmas[i] if ind._ps[0] == "o" else self.sigmas[ind._ps[1]] for i, ind in enumerate(chosen)] self.invCholesky = [invCholesky[i] if ind._ps[0] == "o" else self.invCholesky[ind._ps[1]] for i, ind in enumerate(chosen)] self.A = [A[i] if ind._ps[0] == "o" else self.A[ind._ps[1]] for i, ind in enumerate(chosen)] self.pc = [pc[i] if ind._ps[0] == "o" else self.pc[ind._ps[1]] for i, ind in enumerate(chosen)] self.psucc = [psucc[i] if ind._ps[0] == "o" else self.psucc[ind._ps[1]] for i, ind in enumerate(chosen)] class StrategyActiveOnePlusLambda(object): """A CMA-ES strategy that combines the :math:`(1 + \\lambda)` paradigm [Igel2007]_, the mixed integer modification [Hansen2011]_, active covariance update [Arnold2010]_ and constraint handling [Arnold2012]_. This version of CMA-ES requires the random vector and the mutation that created each individual. The vector and mutation are stored in each individual as :attr:`_z` and :attr:`_y` respectively. Updating with individuals not containing these attributes will result in an :class:`AttributeError`. Notes: When using this strategy (especially when using constraints) you should monitor the strategy :attr:`condition_number`. If it goes above a given threshold (say :math:`10^{12}`), you should think of restarting the optimization as the covariance matrix is going degenerate. See the constrained active CMA-ES example for a simple example of restart. :param parent: An iterable object that indicates where to start the evolution. The parent requires a fitness attribute. :param sigma: The initial standard deviation of the distribution. :param step: The minimal step size for each dimension. Use 0 for continuous dimensions. :param lambda_: Number of offspring to produce from the parent. (optional, defaults to 1) :param **kwargs: One or more parameter to pass to the strategy as described in the following table. (optional) +----------------+---------------------------+------------------------------+ | Parameter | Default | Details | +================+===========================+==============================+ | ``d`` | ``1.0 + N / (2.0 * | Damping for step-size. | | | lambda_)`` | | +----------------+---------------------------+------------------------------+ | ``ptarg`` | ``1.0 / (5 + sqrt(lambda_)| Taget success rate | | | / 2.0)`` | (from 1 + lambda algorithm). | +----------------+---------------------------+------------------------------+ | ``cp`` | ``ptarg * lambda_ / (2.0 +| Step size learning rate. | | | ptarg * lambda_)`` | | +----------------+---------------------------+------------------------------+ | ``cc`` | ``2.0 / (N + 2.0)`` | Cumulation time horizon. | +----------------+---------------------------+------------------------------+ | ``ccov`` | ``2.0 / (N**2 + 6.0)`` | Covariance matrix learning | | | | rate. | +----------------+---------------------------+------------------------------+ | ``ccovn`` | ``0.4 / (N**1.6 + 1.0)`` | Covariance matrix negative | | | | learning rate. | +----------------+---------------------------+------------------------------+ | ``cconst`` | ``1.0 / (N + 2.0)`` | Constraint vectors learning | | | | rate. | +----------------+---------------------------+------------------------------+ | ``beta`` | ``0.1 / (lambda_ * (N + | Covariance matrix learning | | | 2.0))`` | rate for constraints. | | | | | +----------------+---------------------------+------------------------------+ | ``pthresh`` | ``0.44`` | Threshold success rate. | +----------------+---------------------------+------------------------------+ .. [Igel2007] Igel, Hansen and Roth. Covariance matrix adaptation for multi-objective optimization. 2007 .. [Arnold2010] Arnold and Hansen. Active covariance matrix adaptation for the (1+1)-CMA-ES. 2010. .. [Hansen2011] Hansen. A CMA-ES for Mixed-Integer Nonlinear Optimization. Research Report] RR-7751, INRIA. 2011 .. [Arnold2012] Arnold and Hansen. A (1+1)-CMA-ES for Constrained Optimisation. 2012 """ def __init__(self, parent, sigma, steps, **kargs): self.parent = parent self.sigma = sigma self.dim = len(self.parent) self.A = numpy.identity(self.dim) self.invA = numpy.identity(self.dim) self.condition_number = numpy.linalg.cond(self.A) self.pc = numpy.zeros(self.dim) # Save parameters self.params = kargs.copy() # Covariance matrix adaptation self.cc = self.params.get("cc", 2.0 / (self.dim + 2.0)) self.ccovp = self.params.get("ccovp", 2.0 / (self.dim ** 2 + 6.0)) self.ccovn = self.params.get("ccovn", 0.4 / (self.dim ** 1.6 + 1.0)) self.cconst = self.params.get("cconst", 1.0 / (self.dim + 2.0)) self.pthresh = self.params.get("pthresh", 0.44) self.lambda_ = self.params.get("lambda_", 1) self.psucc = self.ptarg self.S_int = numpy.array(steps) self.i_I_R = numpy.flatnonzero(2 * self.sigma * numpy.diag(self.A)**0.5 < self.S_int) self.constraint_vecs = None self.ancestors_fitness = list() @property def lambda_(self): return self._lambda @lambda_.setter def lambda_(self, value): self._lambda = value self._compute_lambda_parameters() def _compute_lambda_parameters(self): """Computes the parameters depending on :math:`\lambda`. It needs to be called again if :math:`\lambda` changes during evolution. """ # Step size control : self.d = self.params.get("d", 1.0 + self.dim / (2.0 * self.lambda_)) self.ptarg = self.params.get("ptarg", 1.0 / (5 + numpy.sqrt(self.lambda_) / 2.0)) self.cp = self.params.get("cp", (self.ptarg * self.lambda_ / (2 + self.ptarg * self.lambda_))) self.beta = self.params.get("beta", 0.1 / (self.lambda_ * (self.dim + 2.0))) def generate(self, ind_init): """Generate a population of :math:`\lambda` individuals of type *ind_init* from the current strategy. :param ind_init: A function object that is able to initialize an individual from a list. :returns: A list of individuals. """ # Generate individuals z = numpy.random.standard_normal((self.lambda_, self.dim)) y = numpy.dot(self.A, z.T).T x = self.parent + self.sigma * y + self.S_int * self._integer_mutation() if any(self.S_int > 0): # Bring values to the integer steps round_values = numpy.tile(self.S_int > 0, (self.lambda_, 1)) steps = numpy.tile(self.S_int, (self.lambda_, 1)) x[round_values] = steps[round_values] * numpy.around(x[round_values] / steps[round_values]) # The update method requires to remember the y of each individual population = list(map(ind_init, x)) for ind, yi, zi in zip(population, y, z): ind._y = yi ind._z = zi return population def _integer_mutation(self): n_I_R = self.i_I_R.shape[0] # Mixed integer CMA-ES is developped for (mu/mu , lambda) # We have a (1 + lambda) setting, thus we make the integer mutation # probabilistic. The integer mutation is lambda / 2 if all dimensions # are integers or min(lambda / 2 - 1, lambda / 10 + n_I_R + 1). The minus # 1 accounts for the last new candidate getting its integer mutation from # the last best solution. We skip this last best solution part. if n_I_R == 0: return numpy.zeros((self.lambda_, self.dim)) elif n_I_R == self.dim: p = self.lambda_ / 2.0 / self.lambda_ # lambda_int = int(numpy.floor(self.lambda_ / 2)) else: p = (min(self.lambda_ / 2.0, self.lambda_ / 10.0 + n_I_R / self.dim) / self.lambda_) # lambda_int = int(min(numpy.floor(self.lambda_ / 10) + n_I_R + 1, # numpy.floor(self.lambda_ / 2) - 1)) Rp = numpy.zeros((self.lambda_, self.dim)) Rpp = numpy.zeros((self.lambda_, self.dim)) # Ri' has exactly one of its components set to one. # The Ri' are dependent in that the number of mutations for each coordinate # differs at most by one for i, j in zip(range(self.lambda_), cycle(self.i_I_R)): # Probabilistically choose lambda_int individuals if numpy.random.rand() < p: Rp[i, j] = 1 Rpp[i, j] = numpy.random.geometric(p=0.7**(1.0/n_I_R)) - 1 I_pm1 = (-1)**numpy.random.randint(0, 2, (self.lambda_, self.dim)) R_int = I_pm1 * (Rp + Rpp) # Usually in mu/mu, lambda the last individual is set to the step taken. # We don't use this sheme in the 1 + lambda scheme # if self.update_count > 0: # R_int[-1, :] = (numpy.floor(-self.S_int - self.last_best) # - numpy.floor(-self.S_int - self.centroid)) return R_int def _rank1update(self, individual, p_succ): update_cov = False self.psucc = (1 - self.cp) * self.psucc + self.cp * p_succ if not hasattr(self.parent, "fitness") \ or self.parent.fitness <= individual.fitness: self.parent = copy.deepcopy(individual) self.ancestors_fitness.append(copy.deepcopy(individual.fitness)) if len(self.ancestors_fitness) > 5: self.ancestors_fitness.pop() # Must guard if pc is all 0 to prevent w_norm_sqrd to be 0 if self.psucc < self.pthresh or numpy.allclose(self.pc, 0): self.pc = (1 - self.cc) * self.pc + (numpy.sqrt(self.cc * (2 - self.cc)) * individual._y) a = numpy.sqrt(1 - self.ccovp) w = numpy.dot(self.invA, self.pc) w_norm_sqrd = numpy.linalg.norm(w) ** 2 b = numpy.sqrt(1 - self.ccovp) / w_norm_sqrd \ * (numpy.sqrt(1 + self.ccovp / (1 - self.ccovp) * w_norm_sqrd) - 1) else: self.pc = (1 - self.cc) * self.pc d = self.ccovp * (1 + self.cc * (2 - self.cc)) a = numpy.sqrt(1 - d) w = numpy.dot(self.invA, self.pc) w_norm_sqrd = numpy.linalg.norm(w) ** 2 b = numpy.sqrt(1 - d) \ * (numpy.sqrt(1 + self.ccovp * w_norm_sqrd / (1 - d)) - 1) \ / w_norm_sqrd update_cov = True elif len(self.ancestors_fitness) >= 5 \ and individual.fitness < self.ancestors_fitness[0] \ and self.psucc < self.pthresh: # Active covariance update requires w = z and not w = inv(A)s w = individual._z w_norm_sqrd = numpy.linalg.norm(w) ** 2 if 1 < self.ccovn * (2 * w_norm_sqrd - 1): ccovn = 1 / (2 * w_norm_sqrd - 1) else: ccovn = self.ccovn a = numpy.sqrt(1 + ccovn) b = numpy.sqrt(1 + ccovn) / w_norm_sqrd \ * (numpy.sqrt(1 - ccovn / (1 + ccovn) * w_norm_sqrd) - 1) update_cov = True if update_cov: self.A = self.A * a + b * numpy.outer(numpy.dot(self.A, w), w) self.invA = (1 / a * self.invA - b / (a ** 2 + a * b * w_norm_sqrd) * numpy.dot(self.invA, numpy.outer(w, w))) # TODO: Add integer mutation i_I_R component self.sigma = self.sigma * numpy.exp(1.0 / self.d * ((self.psucc - self.ptarg) / (1.0 - self.ptarg))) def _infeasible_update(self, individual): if not hasattr(individual.fitness, "constraint_violation"): return if self.constraint_vecs is None: shape = len(individual.fitness.constraint_violation), self.dim self.constraint_vecs = numpy.zeros(shape) for i in range(self.constraint_vecs.shape[0]): if individual.fitness.constraint_violation[i]: self.constraint_vecs[i] = (1 - self.cconst) * self.constraint_vecs[i] \ + self.cconst * individual._y W = numpy.dot(self.invA, self.constraint_vecs.T).T # M x N constraint_violation = numpy.sum(individual.fitness.constraint_violation) A_prime = ( self.A - self.beta / constraint_violation * numpy.sum( list( numpy.outer(self.constraint_vecs[i], W[i]) / numpy.dot(W[i], W[i]) for i in range(self.constraint_vecs.shape[0]) if individual.fitness.constraint_violation[i] ), axis=0 ) ) try: self.invA = numpy.linalg.inv(A_prime) except numpy.linalg.LinAlgError: warnings.warn("Singular matrix inversion, " "invalid update in CMA-ES ignored", RuntimeWarning) else: self.A = A_prime def update(self, population): """Update the current covariance matrix strategy from the *population*. :param population: A list of individuals from which to update the parameters. """ valid_population = [ind for ind in population if ind.fitness.valid] invalid_population = [ind for ind in population if not ind.fitness.valid] if len(valid_population) > 0: # Rank 1 update valid_population.sort(key=lambda ind: ind.fitness, reverse=True) if not hasattr(self.parent, "fitness"): lambda_succ = len(valid_population) else: lambda_succ = sum(self.parent.fitness <= ind.fitness for ind in valid_population) # Use len(valid) to not account for individuals violating constraints self._rank1update(valid_population[0], float(lambda_succ) / len(valid_population)) if len(invalid_population) > 0 : # Learn constraint from all invalid individuals for ind in invalid_population: self._infeasible_update(ind) # Used to monitor the convariance matrix conditioning self.condition_number = numpy.linalg.cond(self.A) C = numpy.dot(self.A, self.A.T) self.i_I_R = numpy.flatnonzero(2 * self.sigma * numpy.diag(C)**0.5 < self.S_int)././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/creator.py0000644000076500000240000001602214456461441014616 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . """The :mod:`~deap.creator` is a meta-factory allowing to create classes that will fulfill the needs of your evolutionary algorithms. In effect, new classes can be built from any imaginable type, from :class:`list` to :class:`set`, :class:`dict`, :class:`~deap.gp.PrimitiveTree` and more, providing the possibility to implement genetic algorithms, genetic programming, evolution strategies, particle swarm optimizers, and many more. """ import array import copy import copyreg import warnings class_replacers = {} """Some classes in Python's standard library as well as third party library may be in part incompatible with the logic used in DEAP. To palliate this problem, the method :func:`create` uses the dictionary `class_replacers` to identify if the base type provided is problematic, and if so the new class inherits from the replacement class instead of the original base class. `class_replacers` keys are classes to be replaced and the values are the replacing classes. """ try: import numpy _ = (numpy.ndarray, numpy.array) except ImportError: # Numpy is not present, skip the definition of the replacement class. pass except AttributeError: # Numpy is present, but there is either no ndarray or array in numpy, # also skip the definition of the replacement class. pass else: class _numpy_array(numpy.ndarray): def __deepcopy__(self, memo): """Overrides the deepcopy from numpy.ndarray that does not copy the object's attributes. This one will deepcopy the array and its :attr:`__dict__` attribute. """ copy_ = numpy.ndarray.copy(self) copy_.__dict__.update(copy.deepcopy(self.__dict__, memo)) return copy_ @staticmethod def __new__(cls, iterable): """Creates a new instance of a numpy.ndarray from a function call. Adds the possibility to instantiate from an iterable.""" return numpy.array(list(iterable)).view(cls) def __setstate__(self, state): self.__dict__.update(state) def __reduce__(self): return (self.__class__, (list(self),), self.__dict__) class_replacers[numpy.ndarray] = _numpy_array class _array(array.array): @staticmethod def __new__(cls, seq=()): return super(_array, cls).__new__(cls, cls.typecode, seq) def __deepcopy__(self, memo): """Overrides the deepcopy from array.array that does not copy the object's attributes and class type. """ cls = self.__class__ copy_ = cls.__new__(cls, self) memo[id(self)] = copy_ copy_.__dict__.update(copy.deepcopy(self.__dict__, memo)) return copy_ def __reduce__(self): return (self.__class__, (list(self),), self.__dict__) class_replacers[array.array] = _array class MetaCreator(type): def __new__(cls, name, base, dct): return super(MetaCreator, cls).__new__(cls, name, (base,), dct) def __init__(cls, name, base, dct): # A DeprecationWarning is raised when the object inherits from the # class "object" which leave the option of passing arguments, but # raise a warning stating that it will eventually stop permitting # this option. Usually this happens when the base class does not # override the __init__ method from object. dict_inst = {} dict_cls = {} for obj_name, obj in dct.items(): if isinstance(obj, type): dict_inst[obj_name] = obj else: dict_cls[obj_name] = obj def init_type(self, *args, **kargs): """Replace the __init__ function of the new type, in order to add attributes that were defined with **kargs to the instance. """ for obj_name, obj in dict_inst.items(): setattr(self, obj_name, obj()) if base.__init__ is not object.__init__: base.__init__(self, *args, **kargs) cls.__init__ = init_type cls.reduce_args = (name, base, dct) super(MetaCreator, cls).__init__(name, (base,), dict_cls) def __reduce__(cls): return (meta_create, cls.reduce_args) copyreg.pickle(MetaCreator, MetaCreator.__reduce__) def meta_create(name, base, dct): class_ = MetaCreator(name, base, dct) globals()[name] = class_ return class_ def create(name, base, **kargs): """Creates a new class named *name* inheriting from *base* in the :mod:`~deap.creator` module. The new class can have attributes defined by the subsequent keyword arguments passed to the function create. If the argument is a class (without the parenthesis), the __init__ function is called in the initialization of an instance of the new object and the returned instance is added as an attribute of the class' instance. Otherwise, if the argument is not a class, (for example an :class:`int`), it is added as a "static" attribute of the class. :param name: The name of the class to create. :param base: A base class from which to inherit. :param attribute: One or more attributes to add on instantiation of this class, optional. The following is used to create a class :class:`Foo` inheriting from the standard :class:`list` and having an attribute :attr:`bar` being an empty dictionary and a static attribute :attr:`spam` initialized to 1. :: create("Foo", list, bar=dict, spam=1) This above line is exactly the same as defining in the :mod:`creator` module something like the following. :: class Foo(list): spam = 1 def __init__(self): self.bar = dict() The :ref:`creating-types` tutorial gives more examples of the creator usage. .. warning:: If your are inheriting from :class:`numpy.ndarray` see the :doc:`tutorials/advanced/numpy` tutorial and the :doc:`/examples/ga_onemax_numpy` example. """ if name in globals(): warnings.warn("A class named '{0}' has already been created and it " "will be overwritten. Consider deleting previous " "creation of that class or rename it.".format(name), RuntimeWarning) # Check if the base class has to be replaced if base in class_replacers: base = class_replacers[base] meta_create(name, base, kargs) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/gp.py0000644000076500000240000014626514456461441013602 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . """The :mod:`gp` module provides the methods and classes to perform Genetic Programming with DEAP. It essentially contains the classes to build a Genetic Program Tree, and the functions to evaluate it. This module support both strongly and loosely typed GP. """ import copy import math import copyreg import random import re import sys import types import warnings from collections import defaultdict, deque from functools import partial, wraps from operator import eq, lt from . import tools # Needed by HARM-GP ###################################### # GP Data structure # ###################################### # Define the name of type for any types. __type__ = object class PrimitiveTree(list): """Tree specifically formatted for optimization of genetic programming operations. The tree is represented with a list, where the nodes are appended, or are assumed to have been appended when initializing an object of this class with a list of primitives and terminals e.g. generated with the method **gp.generate**, in a depth-first order. The nodes appended to the tree are required to have an attribute *arity*, which defines the arity of the primitive. An arity of 0 is expected from terminals nodes. """ def __init__(self, content): list.__init__(self, content) def __deepcopy__(self, memo): new = self.__class__(self) new.__dict__.update(copy.deepcopy(self.__dict__, memo)) return new def __setitem__(self, key, val): # Check for most common errors # Does NOT check for STGP constraints if isinstance(key, slice): if key.start >= len(self): raise IndexError("Invalid slice object (try to assign a %s" " in a tree of size %d). Even if this is allowed by the" " list object slice setter, this should not be done in" " the PrimitiveTree context, as this may lead to an" " unpredictable behavior for searchSubtree or evaluate." % (key, len(self))) total = val[0].arity for node in val[1:]: total += node.arity - 1 if total != 0: raise ValueError("Invalid slice assignation : insertion of" " an incomplete subtree is not allowed in PrimitiveTree." " A tree is defined as incomplete when some nodes cannot" " be mapped to any position in the tree, considering the" " primitives' arity. For instance, the tree [sub, 4, 5," " 6] is incomplete if the arity of sub is 2, because it" " would produce an orphan node (the 6).") elif val.arity != self[key].arity: raise ValueError("Invalid node replacement with a node of a" " different arity.") list.__setitem__(self, key, val) def __str__(self): """Return the expression in a human readable string. """ string = "" stack = [] for node in self: stack.append((node, [])) while len(stack[-1][1]) == stack[-1][0].arity: prim, args = stack.pop() string = prim.format(*args) if len(stack) == 0: break # If stack is empty, all nodes should have been seen stack[-1][1].append(string) return string @classmethod def from_string(cls, string, pset): """Try to convert a string expression into a PrimitiveTree given a PrimitiveSet *pset*. The primitive set needs to contain every primitive present in the expression. :param string: String representation of a Python expression. :param pset: Primitive set from which primitives are selected. :returns: PrimitiveTree populated with the deserialized primitives. """ tokens = re.split("[ \t\n\r\f\v(),]", string) expr = [] ret_types = deque() for token in tokens: if token == '': continue if len(ret_types) != 0: type_ = ret_types.popleft() else: type_ = None if token in pset.mapping: primitive = pset.mapping[token] if type_ is not None and not issubclass(primitive.ret, type_): raise TypeError("Primitive {} return type {} does not " "match the expected one: {}." .format(primitive, primitive.ret, type_)) expr.append(primitive) if isinstance(primitive, Primitive): ret_types.extendleft(reversed(primitive.args)) else: try: token = eval(token) except NameError: raise TypeError("Unable to evaluate terminal: {}.".format(token)) if type_ is None: type_ = type(token) if not issubclass(type(token), type_): raise TypeError("Terminal {} type {} does not " "match the expected one: {}." .format(token, type(token), type_)) expr.append(Terminal(token, False, type_)) return cls(expr) @property def height(self): """Return the height of the tree, or the depth of the deepest node. """ stack = [0] max_depth = 0 for elem in self: depth = stack.pop() max_depth = max(max_depth, depth) stack.extend([depth + 1] * elem.arity) return max_depth @property def root(self): """Root of the tree, the element 0 of the list. """ return self[0] def searchSubtree(self, begin): """Return a slice object that corresponds to the range of values that defines the subtree which has the element with index *begin* as its root. """ end = begin + 1 total = self[begin].arity while total > 0: total += self[end].arity - 1 end += 1 return slice(begin, end) class Primitive(object): """Class that encapsulates a primitive and when called with arguments it returns the Python code to call the primitive with the arguments. >>> pr = Primitive("mul", (int, int), int) >>> pr.format(1, 2) 'mul(1, 2)' """ __slots__ = ('name', 'arity', 'args', 'ret', 'seq') def __init__(self, name, args, ret): self.name = name self.arity = len(args) self.args = args self.ret = ret args = ", ".join(map("{{{0}}}".format, range(self.arity))) self.seq = "{name}({args})".format(name=self.name, args=args) def format(self, *args): return self.seq.format(*args) def __eq__(self, other): if type(self) is type(other): return all(getattr(self, slot) == getattr(other, slot) for slot in self.__slots__) else: return NotImplemented class Terminal(object): """Class that encapsulates terminal primitive in expression. Terminals can be values or 0-arity functions. """ __slots__ = ('name', 'value', 'ret', 'conv_fct') def __init__(self, terminal, symbolic, ret): self.ret = ret self.value = terminal self.name = str(terminal) self.conv_fct = str if symbolic else repr @property def arity(self): return 0 def format(self): return self.conv_fct(self.value) def __eq__(self, other): if type(self) is type(other): return all(getattr(self, slot) == getattr(other, slot) for slot in self.__slots__) else: return NotImplemented class MetaEphemeral(type): """Meta-Class that creates a terminal which value is set when the object is created. To mutate the value, a new object has to be generated. """ cache = {} def __new__(meta, name, func, ret=__type__, id_=None): if id_ in MetaEphemeral.cache: return MetaEphemeral.cache[id_] if isinstance(func, types.LambdaType) and func.__name__ == '': warnings.warn("Ephemeral {name} function cannot be " "pickled because its generating function " "is a lambda function. Use functools.partial " "instead.".format(name=name), RuntimeWarning) def __init__(self): self.value = func() attr = {'__init__' : __init__, 'name' : name, 'func' : func, 'ret' : ret, 'conv_fct' : repr} cls = super(MetaEphemeral, meta).__new__(meta, name, (Terminal,), attr) MetaEphemeral.cache[id(cls)] = cls return cls def __init__(cls, name, func, ret=__type__, id_=None): super(MetaEphemeral, cls).__init__(name, (Terminal,), {}) def __reduce__(cls): return (MetaEphemeral, (cls.name, cls.func, cls.ret, id(cls))) copyreg.pickle(MetaEphemeral, MetaEphemeral.__reduce__) class PrimitiveSetTyped(object): """Class that contains the primitives that can be used to solve a Strongly Typed GP problem. The set also defined the researched function return type, and input arguments type and number. """ def __init__(self, name, in_types, ret_type, prefix="ARG"): self.terminals = defaultdict(list) self.primitives = defaultdict(list) self.arguments = [] # setting "__builtins__" to None avoid the context # being polluted by builtins function when evaluating # GP expression. self.context = {"__builtins__": None} self.mapping = dict() self.terms_count = 0 self.prims_count = 0 self.name = name self.ret = ret_type self.ins = in_types for i, type_ in enumerate(in_types): arg_str = "{prefix}{index}".format(prefix=prefix, index=i) self.arguments.append(arg_str) term = Terminal(arg_str, True, type_) self._add(term) self.terms_count += 1 def renameArguments(self, **kargs): """Rename function arguments with new names from *kargs*. """ for i, old_name in enumerate(self.arguments): if old_name in kargs: new_name = kargs[old_name] self.arguments[i] = new_name self.mapping[new_name] = self.mapping[old_name] self.mapping[new_name].value = new_name del self.mapping[old_name] def _add(self, prim): def addType(dict_, ret_type): if ret_type not in dict_: new_list = [] for type_, list_ in dict_.items(): if issubclass(type_, ret_type): for item in list_: if item not in new_list: new_list.append(item) dict_[ret_type] = new_list addType(self.primitives, prim.ret) addType(self.terminals, prim.ret) self.mapping[prim.name] = prim if isinstance(prim, Primitive): for type_ in prim.args: addType(self.primitives, type_) addType(self.terminals, type_) dict_ = self.primitives else: dict_ = self.terminals for type_ in dict_: if issubclass(prim.ret, type_): dict_[type_].append(prim) def addPrimitive(self, primitive, in_types, ret_type, name=None): """Add a primitive to the set. :param primitive: callable object or a function. :param in_types: list of primitives arguments' type :param ret_type: type returned by the primitive. :param name: alternative name for the primitive instead of its __name__ attribute. """ if name is None: name = primitive.__name__ prim = Primitive(name, in_types, ret_type) assert name not in self.context or \ self.context[name] is primitive, \ "Primitives are required to have a unique name. " \ "Consider using the argument 'name' to rename your " \ "second '%s' primitive." % (name,) self._add(prim) self.context[prim.name] = primitive self.prims_count += 1 def addTerminal(self, terminal, ret_type, name=None): """Add a terminal to the set. Terminals can be named using the optional *name* argument. This should be used : to define named constant (i.e.: pi); to speed the evaluation time when the object is long to build; when the object does not have a __repr__ functions that returns the code to build the object; when the object class is not a Python built-in. :param terminal: Object, or a function with no arguments. :param ret_type: Type of the terminal. :param name: defines the name of the terminal in the expression. """ symbolic = False if name is None and callable(terminal): name = terminal.__name__ assert name not in self.context, \ "Terminals are required to have a unique name. " \ "Consider using the argument 'name' to rename your " \ "second %s terminal." % (name,) if name is not None: self.context[name] = terminal terminal = name symbolic = True elif terminal in (True, False): # To support True and False terminals with Python 2. self.context[str(terminal)] = terminal prim = Terminal(terminal, symbolic, ret_type) self._add(prim) self.terms_count += 1 def addEphemeralConstant(self, name, ephemeral, ret_type): """Add an ephemeral constant to the set. An ephemeral constant is a no argument function that returns a random value. The value of the constant is constant for a Tree, but may differ from one Tree to another. :param name: name used to refers to this ephemeral type. :param ephemeral: function with no arguments returning a random value. :param ret_type: type of the object returned by *ephemeral*. """ if not name in self.mapping: class_ = MetaEphemeral(name, ephemeral, ret_type) else: class_ = self.mapping[name] if class_.func is not ephemeral: raise Exception("Ephemerals with different functions should " "be named differently, even between psets.") if class_.ret is not ret_type: raise Exception("Ephemerals with the same name and function " "should have the same type, even between psets.") self._add(class_) self.terms_count += 1 def addADF(self, adfset): """Add an Automatically Defined Function (ADF) to the set. :param adfset: PrimitiveSetTyped containing the primitives with which the ADF can be built. """ prim = Primitive(adfset.name, adfset.ins, adfset.ret) self._add(prim) self.prims_count += 1 @property def terminalRatio(self): """Return the ratio of the number of terminals on the number of all kind of primitives. """ return self.terms_count / float(self.terms_count + self.prims_count) class PrimitiveSet(PrimitiveSetTyped): """Class same as :class:`~deap.gp.PrimitiveSetTyped`, except there is no definition of type. """ def __init__(self, name, arity, prefix="ARG"): args = [__type__] * arity PrimitiveSetTyped.__init__(self, name, args, __type__, prefix) def addPrimitive(self, primitive, arity, name=None): """Add primitive *primitive* with arity *arity* to the set. If a name *name* is provided, it will replace the attribute __name__ attribute to represent/identify the primitive. """ assert arity > 0, "arity should be >= 1" args = [__type__] * arity PrimitiveSetTyped.addPrimitive(self, primitive, args, __type__, name) def addTerminal(self, terminal, name=None): """Add a terminal to the set.""" PrimitiveSetTyped.addTerminal(self, terminal, __type__, name) def addEphemeralConstant(self, name, ephemeral): """Add an ephemeral constant to the set.""" PrimitiveSetTyped.addEphemeralConstant(self, name, ephemeral, __type__) ###################################### # GP Tree compilation functions # ###################################### def compile(expr, pset): """Compile the expression *expr*. :param expr: Expression to compile. It can either be a PrimitiveTree, a string of Python code or any object that when converted into string produced a valid Python code expression. :param pset: Primitive set against which the expression is compile. :returns: a function if the primitive set has 1 or more arguments, or return the results produced by evaluating the tree. """ code = str(expr) if len(pset.arguments) > 0: # This section is a stripped version of the lambdify # function of SymPy 0.6.6. args = ",".join(arg for arg in pset.arguments) code = "lambda {args}: {code}".format(args=args, code=code) try: return eval(code, pset.context, {}) except MemoryError: _, _, traceback = sys.exc_info() raise MemoryError("DEAP : Error in tree evaluation :" " Python cannot evaluate a tree higher than 90. " "To avoid this problem, you should use bloat control on your " "operators. See the DEAP documentation for more information. " "DEAP will now abort.").with_traceback(traceback) def compileADF(expr, psets): """Compile the expression represented by a list of trees. The first element of the list is the main tree, and the following elements are automatically defined functions (ADF) that can be called by the first tree. :param expr: Expression to compile. It can either be a PrimitiveTree, a string of Python code or any object that when converted into string produced a valid Python code expression. :param psets: List of primitive sets. Each set corresponds to an ADF while the last set is associated with the expression and should contain reference to the preceding ADFs. :returns: a function if the main primitive set has 1 or more arguments, or return the results produced by evaluating the tree. """ adfdict = {} func = None for pset, subexpr in reversed(list(zip(psets, expr))): pset.context.update(adfdict) func = compile(subexpr, pset) adfdict.update({pset.name: func}) return func ###################################### # GP Program generation functions # ###################################### def genFull(pset, min_, max_, type_=None): """Generate an expression where each leaf has the same depth between *min* and *max*. :param pset: Primitive set from which primitives are selected. :param min_: Minimum height of the produced trees. :param max_: Maximum Height of the produced trees. :param type_: The type that should return the tree when called, when :obj:`None` (default) the type of :pset: (pset.ret) is assumed. :returns: A full tree with all leaves at the same depth. """ def condition(height, depth): """Expression generation stops when the depth is equal to height.""" return depth == height return generate(pset, min_, max_, condition, type_) def genGrow(pset, min_, max_, type_=None): """Generate an expression where each leaf might have a different depth between *min* and *max*. :param pset: Primitive set from which primitives are selected. :param min_: Minimum height of the produced trees. :param max_: Maximum Height of the produced trees. :param type_: The type that should return the tree when called, when :obj:`None` (default) the type of :pset: (pset.ret) is assumed. :returns: A grown tree with leaves at possibly different depths. """ def condition(height, depth): """Expression generation stops when the depth is equal to height or when it is randomly determined that a node should be a terminal. """ return depth == height or \ (depth >= min_ and random.random() < pset.terminalRatio) return generate(pset, min_, max_, condition, type_) def genHalfAndHalf(pset, min_, max_, type_=None): """Generate an expression with a PrimitiveSet *pset*. Half the time, the expression is generated with :func:`~deap.gp.genGrow`, the other half, the expression is generated with :func:`~deap.gp.genFull`. :param pset: Primitive set from which primitives are selected. :param min_: Minimum height of the produced trees. :param max_: Maximum Height of the produced trees. :param type_: The type that should return the tree when called, when :obj:`None` (default) the type of :pset: (pset.ret) is assumed. :returns: Either, a full or a grown tree. """ method = random.choice((genGrow, genFull)) return method(pset, min_, max_, type_) def genRamped(pset, min_, max_, type_=None): """ .. deprecated:: 1.0 The function has been renamed. Use :func:`~deap.gp.genHalfAndHalf` instead. """ warnings.warn("gp.genRamped has been renamed. Use genHalfAndHalf instead.", FutureWarning) return genHalfAndHalf(pset, min_, max_, type_) def generate(pset, min_, max_, condition, type_=None): """Generate a tree as a list of primitives and terminals in a depth-first order. The tree is built from the root to the leaves, and it stops growing the current branch when the *condition* is fulfilled: in which case, it back-tracks, then tries to grow another branch until the *condition* is fulfilled again, and so on. The returned list can then be passed to the constructor of the class *PrimitiveTree* to build an actual tree object. :param pset: Primitive set from which primitives are selected. :param min_: Minimum height of the produced trees. :param max_: Maximum Height of the produced trees. :param condition: The condition is a function that takes two arguments, the height of the tree to build and the current depth in the tree. :param type_: The type that should return the tree when called, when :obj:`None` (default) the type of :pset: (pset.ret) is assumed. :returns: A grown tree with leaves at possibly different depths depending on the condition function. """ if type_ is None: type_ = pset.ret expr = [] height = random.randint(min_, max_) stack = [(0, type_)] while len(stack) != 0: depth, type_ = stack.pop() if condition(height, depth): try: term = random.choice(pset.terminals[type_]) except IndexError: _, _, traceback = sys.exc_info() raise IndexError("The gp.generate function tried to add " "a terminal of type '%s', but there is " "none available." % (type_,)).with_traceback(traceback) if type(term) is MetaEphemeral: term = term() expr.append(term) else: try: prim = random.choice(pset.primitives[type_]) except IndexError: _, _, traceback = sys.exc_info() raise IndexError("The gp.generate function tried to add " "a primitive of type '%s', but there is " "none available." % (type_,)).with_traceback(traceback) expr.append(prim) for arg in reversed(prim.args): stack.append((depth + 1, arg)) return expr ###################################### # GP Crossovers # ###################################### def cxOnePoint(ind1, ind2): """Randomly select crossover point in each individual and exchange each subtree with the point as root between each individual. :param ind1: First tree participating in the crossover. :param ind2: Second tree participating in the crossover. :returns: A tuple of two trees. """ if len(ind1) < 2 or len(ind2) < 2: # No crossover on single node tree return ind1, ind2 # List all available primitive types in each individual types1 = defaultdict(list) types2 = defaultdict(list) if ind1.root.ret == __type__: # Not STGP optimization types1[__type__] = list(range(1, len(ind1))) types2[__type__] = list(range(1, len(ind2))) common_types = [__type__] else: for idx, node in enumerate(ind1[1:], 1): types1[node.ret].append(idx) for idx, node in enumerate(ind2[1:], 1): types2[node.ret].append(idx) common_types = set(types1.keys()).intersection(set(types2.keys())) if len(common_types) > 0: type_ = random.choice(list(common_types)) index1 = random.choice(types1[type_]) index2 = random.choice(types2[type_]) slice1 = ind1.searchSubtree(index1) slice2 = ind2.searchSubtree(index2) ind1[slice1], ind2[slice2] = ind2[slice2], ind1[slice1] return ind1, ind2 def cxOnePointLeafBiased(ind1, ind2, termpb): """Randomly select crossover point in each individual and exchange each subtree with the point as root between each individual. :param ind1: First typed tree participating in the crossover. :param ind2: Second typed tree participating in the crossover. :param termpb: The probability of choosing a terminal node (leaf). :returns: A tuple of two typed trees. When the nodes are strongly typed, the operator makes sure the second node type corresponds to the first node type. The parameter *termpb* sets the probability to choose between a terminal or non-terminal crossover point. For instance, as defined by Koza, non- terminal primitives are selected for 90% of the crossover points, and terminals for 10%, so *termpb* should be set to 0.1. """ if len(ind1) < 2 or len(ind2) < 2: # No crossover on single node tree return ind1, ind2 # Determine whether to keep terminals or primitives for each individual terminal_op = partial(eq, 0) primitive_op = partial(lt, 0) arity_op1 = terminal_op if random.random() < termpb else primitive_op arity_op2 = terminal_op if random.random() < termpb else primitive_op # List all available primitive or terminal types in each individual types1 = defaultdict(list) types2 = defaultdict(list) for idx, node in enumerate(ind1[1:], 1): if arity_op1(node.arity): types1[node.ret].append(idx) for idx, node in enumerate(ind2[1:], 1): if arity_op2(node.arity): types2[node.ret].append(idx) common_types = set(types1.keys()).intersection(set(types2.keys())) if len(common_types) > 0: # Set does not support indexing type_ = random.sample(common_types, 1)[0] index1 = random.choice(types1[type_]) index2 = random.choice(types2[type_]) slice1 = ind1.searchSubtree(index1) slice2 = ind2.searchSubtree(index2) ind1[slice1], ind2[slice2] = ind2[slice2], ind1[slice1] return ind1, ind2 ###################################### # GP Mutations # ###################################### def mutUniform(individual, expr, pset): """Randomly select a point in the tree *individual*, then replace the subtree at that point as a root by the expression generated using method :func:`expr`. :param individual: The tree to be mutated. :param expr: A function object that can generate an expression when called. :returns: A tuple of one tree. """ index = random.randrange(len(individual)) slice_ = individual.searchSubtree(index) type_ = individual[index].ret individual[slice_] = expr(pset=pset, type_=type_) return individual, def mutNodeReplacement(individual, pset): """Replaces a randomly chosen primitive from *individual* by a randomly chosen primitive with the same number of arguments from the :attr:`pset` attribute of the individual. :param individual: The normal or typed tree to be mutated. :returns: A tuple of one tree. """ if len(individual) < 2: return individual, index = random.randrange(1, len(individual)) node = individual[index] if node.arity == 0: # Terminal term = random.choice(pset.terminals[node.ret]) if type(term) is MetaEphemeral: term = term() individual[index] = term else: # Primitive prims = [p for p in pset.primitives[node.ret] if p.args == node.args] individual[index] = random.choice(prims) return individual, def mutEphemeral(individual, mode): """This operator works on the constants of the tree *individual*. In *mode* ``"one"``, it will change the value of one of the individual ephemeral constants by calling its generator function. In *mode* ``"all"``, it will change the value of **all** the ephemeral constants. :param individual: The normal or typed tree to be mutated. :param mode: A string to indicate to change ``"one"`` or ``"all"`` ephemeral constants. :returns: A tuple of one tree. """ if mode not in ["one", "all"]: raise ValueError("Mode must be one of \"one\" or \"all\"") ephemerals_idx = [index for index, node in enumerate(individual) if isinstance(type(node), MetaEphemeral)] if len(ephemerals_idx) > 0: if mode == "one": ephemerals_idx = (random.choice(ephemerals_idx),) for i in ephemerals_idx: individual[i] = type(individual[i])() return individual, def mutInsert(individual, pset): """Inserts a new branch at a random position in *individual*. The subtree at the chosen position is used as child node of the created subtree, in that way, it is really an insertion rather than a replacement. Note that the original subtree will become one of the children of the new primitive inserted, but not perforce the first (its position is randomly selected if the new primitive has more than one child). :param individual: The normal or typed tree to be mutated. :returns: A tuple of one tree. """ index = random.randrange(len(individual)) node = individual[index] slice_ = individual.searchSubtree(index) choice = random.choice # As we want to keep the current node as children of the new one, # it must accept the return value of the current node primitives = [p for p in pset.primitives[node.ret] if node.ret in p.args] if len(primitives) == 0: return individual, new_node = choice(primitives) new_subtree = [None] * len(new_node.args) position = choice([i for i, a in enumerate(new_node.args) if a == node.ret]) for i, arg_type in enumerate(new_node.args): if i != position: term = choice(pset.terminals[arg_type]) if isclass(term): term = term() new_subtree[i] = term new_subtree[position:position + 1] = individual[slice_] new_subtree.insert(0, new_node) individual[slice_] = new_subtree return individual, def mutShrink(individual): """This operator shrinks the *individual* by choosing randomly a branch and replacing it with one of the branch's arguments (also randomly chosen). :param individual: The tree to be shrunk. :returns: A tuple of one tree. """ # We don't want to "shrink" the root if len(individual) < 3 or individual.height <= 1: return individual, iprims = [] for i, node in enumerate(individual[1:], 1): if isinstance(node, Primitive) and node.ret in node.args: iprims.append((i, node)) if len(iprims) != 0: index, prim = random.choice(iprims) arg_idx = random.choice([i for i, type_ in enumerate(prim.args) if type_ == prim.ret]) rindex = index + 1 for _ in range(arg_idx + 1): rslice = individual.searchSubtree(rindex) subtree = individual[rslice] rindex += len(subtree) slice_ = individual.searchSubtree(index) individual[slice_] = subtree return individual, ###################################### # GP bloat control decorators # ###################################### def staticLimit(key, max_value): """Implement a static limit on some measurement on a GP tree, as defined by Koza in [Koza1989]. It may be used to decorate both crossover and mutation operators. When an invalid (over the limit) child is generated, it is simply replaced by one of its parents, randomly selected. This operator can be used to avoid memory errors occurring when the tree gets higher than 90 levels (as Python puts a limit on the call stack depth), because it can ensure that no tree higher than this limit will ever be accepted in the population, except if it was generated at initialization time. :param key: The function to use in order the get the wanted value. For instance, on a GP tree, ``operator.attrgetter('height')`` may be used to set a depth limit, and ``len`` to set a size limit. :param max_value: The maximum value allowed for the given measurement. :returns: A decorator that can be applied to a GP operator using \ :func:`~deap.base.Toolbox.decorate` .. note:: If you want to reproduce the exact behavior intended by Koza, set *key* to ``operator.attrgetter('height')`` and *max_value* to 17. .. [Koza1989] J.R. Koza, Genetic Programming - On the Programming of Computers by Means of Natural Selection (MIT Press, Cambridge, MA, 1992) """ def decorator(func): @wraps(func) def wrapper(*args, **kwargs): keep_inds = [copy.deepcopy(ind) for ind in args] new_inds = list(func(*args, **kwargs)) for i, ind in enumerate(new_inds): if key(ind) > max_value: new_inds[i] = random.choice(keep_inds) return new_inds return wrapper return decorator ###################################### # GP bloat control algorithms # ###################################### def harm(population, toolbox, cxpb, mutpb, ngen, alpha, beta, gamma, rho, nbrindsmodel=-1, mincutoff=20, stats=None, halloffame=None, verbose=__debug__): """Implement bloat control on a GP evolution using HARM-GP, as defined in [Gardner2015]. It is implemented in the form of an evolution algorithm (similar to :func:`~deap.algorithms.eaSimple`). :param population: A list of individuals. :param toolbox: A :class:`~deap.base.Toolbox` that contains the evolution operators. :param cxpb: The probability of mating two individuals. :param mutpb: The probability of mutating an individual. :param ngen: The number of generation. :param alpha: The HARM *alpha* parameter. :param beta: The HARM *beta* parameter. :param gamma: The HARM *gamma* parameter. :param rho: The HARM *rho* parameter. :param nbrindsmodel: The number of individuals to generate in order to model the natural distribution. -1 is a special value which uses the equation proposed in [Gardner2015] to set the value of this parameter : max(2000, len(population)) :param mincutoff: The absolute minimum value for the cutoff point. It is used to ensure that HARM does not shrink the population too much at the beginning of the evolution. The default value is usually fine. :param stats: A :class:`~deap.tools.Statistics` object that is updated inplace, optional. :param halloffame: A :class:`~deap.tools.HallOfFame` object that will contain the best individuals, optional. :param verbose: Whether or not to log the statistics. :returns: The final population :returns: A class:`~deap.tools.Logbook` with the statistics of the evolution This function expects the :meth:`toolbox.mate`, :meth:`toolbox.mutate`, :meth:`toolbox.select` and :meth:`toolbox.evaluate` aliases to be registered in the toolbox. .. note:: The recommended values for the HARM-GP parameters are *alpha=0.05*, *beta=10*, *gamma=0.25*, *rho=0.9*. However, these parameters can be adjusted to perform better on a specific problem (see the relevant paper for tuning information). The number of individuals used to model the natural distribution and the minimum cutoff point are less important, their default value being effective in most cases. .. [Gardner2015] M.-A. Gardner, C. Gagne, and M. Parizeau, Controlling Code Growth by Dynamically Shaping the Genotype Size Distribution, Genetic Programming and Evolvable Machines, 2015, DOI 10.1007/s10710-015-9242-8 """ def _genpop(n, pickfrom=[], acceptfunc=lambda s: True, producesizes=False): # Generate a population of n individuals, using individuals in # *pickfrom* if possible, with a *acceptfunc* acceptance function. # If *producesizes* is true, also return a list of the produced # individuals sizes. # This function is used 1) to generate the natural distribution # (in this case, pickfrom and acceptfunc should be let at their # default values) and 2) to generate the final population, in which # case pickfrom should be the natural population previously generated # and acceptfunc a function implementing the HARM-GP algorithm. producedpop = [] producedpopsizes = [] while len(producedpop) < n: if len(pickfrom) > 0: # If possible, use the already generated # individuals (more efficient) aspirant = pickfrom.pop() if acceptfunc(len(aspirant)): producedpop.append(aspirant) if producesizes: producedpopsizes.append(len(aspirant)) else: opRandom = random.random() if opRandom < cxpb: # Crossover aspirant1, aspirant2 = toolbox.mate(*map(toolbox.clone, toolbox.select(population, 2))) del aspirant1.fitness.values, aspirant2.fitness.values if acceptfunc(len(aspirant1)): producedpop.append(aspirant1) if producesizes: producedpopsizes.append(len(aspirant1)) if len(producedpop) < n and acceptfunc(len(aspirant2)): producedpop.append(aspirant2) if producesizes: producedpopsizes.append(len(aspirant2)) else: aspirant = toolbox.clone(toolbox.select(population, 1)[0]) if opRandom - cxpb < mutpb: # Mutation aspirant = toolbox.mutate(aspirant)[0] del aspirant.fitness.values if acceptfunc(len(aspirant)): producedpop.append(aspirant) if producesizes: producedpopsizes.append(len(aspirant)) if producesizes: return producedpop, producedpopsizes else: return producedpop def halflifefunc(x): return x * float(alpha) + beta if nbrindsmodel == -1: nbrindsmodel = max(2000, len(population)) logbook = tools.Logbook() logbook.header = ['gen', 'nevals'] + (stats.fields if stats else []) # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in population if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit if halloffame is not None: halloffame.update(population) record = stats.compile(population) if stats else {} logbook.record(gen=0, nevals=len(invalid_ind), **record) if verbose: print(logbook.stream) # Begin the generational process for gen in range(1, ngen + 1): # Estimation population natural distribution of sizes naturalpop, naturalpopsizes = _genpop(nbrindsmodel, producesizes=True) naturalhist = [0] * (max(naturalpopsizes) + 3) for indsize in naturalpopsizes: # Kernel density estimation application naturalhist[indsize] += 0.4 naturalhist[indsize - 1] += 0.2 naturalhist[indsize + 1] += 0.2 naturalhist[indsize + 2] += 0.1 if indsize - 2 >= 0: naturalhist[indsize - 2] += 0.1 # Normalization naturalhist = [val * len(population) / nbrindsmodel for val in naturalhist] # Cutoff point selection sortednatural = sorted(naturalpop, key=lambda ind: ind.fitness) cutoffcandidates = sortednatural[int(len(population) * rho - 1):] # Select the cutoff point, with an absolute minimum applied # to avoid weird cases in the first generations cutoffsize = max(mincutoff, len(min(cutoffcandidates, key=len))) # Compute the target distribution def targetfunc(x): return (gamma * len(population) * math.log(2) / halflifefunc(x)) * math.exp(-math.log(2) * (x - cutoffsize) / halflifefunc(x)) targethist = [naturalhist[binidx] if binidx <= cutoffsize else targetfunc(binidx) for binidx in range(len(naturalhist))] # Compute the probabilities distribution probhist = [t / n if n > 0 else t for n, t in zip(naturalhist, targethist)] def probfunc(s): return probhist[s] if s < len(probhist) else targetfunc(s) def acceptfunc(s): return random.random() <= probfunc(s) # Generate offspring using the acceptance probabilities # previously computed offspring = _genpop(len(population), pickfrom=naturalpop, acceptfunc=acceptfunc, producesizes=False) # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in offspring if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit # Update the hall of fame with the generated individuals if halloffame is not None: halloffame.update(offspring) # Replace the current population by the offspring population[:] = offspring # Append the current generation statistics to the logbook record = stats.compile(population) if stats else {} logbook.record(gen=gen, nevals=len(invalid_ind), **record) if verbose: print(logbook.stream) return population, logbook def graph(expr): """Construct the graph of a tree expression. The tree expression must be valid. It returns in order a node list, an edge list, and a dictionary of the per node labels. The node are represented by numbers, the edges are tuples connecting two nodes (number), and the labels are values of a dictionary for which keys are the node numbers. :param expr: A tree expression to convert into a graph. :returns: A node list, an edge list, and a dictionary of labels. The returned objects can be used directly to populate a `pygraphviz `_ graph:: import pygraphviz as pgv # [...] Execution of code that produce a tree expression nodes, edges, labels = graph(expr) g = pgv.AGraph() g.add_nodes_from(nodes) g.add_edges_from(edges) g.layout(prog="dot") for i in nodes: n = g.get_node(i) n.attr["label"] = labels[i] g.draw("tree.pdf") or a `NetworX `_ graph:: import matplotlib.pyplot as plt import networkx as nx # [...] Execution of code that produce a tree expression nodes, edges, labels = graph(expr) g = nx.Graph() g.add_nodes_from(nodes) g.add_edges_from(edges) pos = nx.graphviz_layout(g, prog="dot") nx.draw_networkx_nodes(g, pos) nx.draw_networkx_edges(g, pos) nx.draw_networkx_labels(g, pos, labels) plt.show() .. note:: We encourage you to use `pygraphviz `_ as the nodes might be plotted out of order when using `NetworX `_. """ nodes = list(range(len(expr))) edges = list() labels = dict() stack = [] for i, node in enumerate(expr): if stack: edges.append((stack[-1][0], i)) stack[-1][1] -= 1 labels[i] = node.name if isinstance(node, Primitive) else node.value stack.append([i, node.arity]) while stack and stack[-1][1] == 0: stack.pop() return nodes, edges, labels ###################################### # GSGP Mutation # ###################################### def mutSemantic(individual, gen_func=genGrow, pset=None, ms=None, min=2, max=6): """ Implementation of the Semantic Mutation operator. [Geometric semantic genetic programming, Moraglio et al., 2012] mutated_individual = individual + logistic * (random_tree1 - random_tree2) :param individual: individual to mutate :param gen_func: function responsible for the generation of the random tree that will be used during the mutation :param pset: Primitive Set, which contains terminal and operands to be used during the evolution :param ms: Mutation Step :param min: min depth of the random tree :param max: max depth of the random tree :return: mutated individual The mutated contains the original individual >>> import operator >>> def lf(x): return 1 / (1 + math.exp(-x)); >>> pset = PrimitiveSet("main", 2) >>> pset.addPrimitive(operator.sub, 2) >>> pset.addTerminal(3) >>> pset.addPrimitive(lf, 1, name="lf") >>> pset.addPrimitive(operator.add, 2) >>> pset.addPrimitive(operator.mul, 2) >>> individual = genGrow(pset, 1, 3) >>> mutated = mutSemantic(individual, pset=pset, max=2) >>> ctr = sum([m.name == individual[i].name for i, m in enumerate(mutated[0])]) >>> ctr == len(individual) True """ for p in ['lf', 'mul', 'add', 'sub']: assert p in pset.mapping, "A '" + p + "' function is required in order to perform semantic mutation" tr1 = gen_func(pset, min, max) tr2 = gen_func(pset, min, max) # Wrap mutation with a logistic function tr1.insert(0, pset.mapping['lf']) tr2.insert(0, pset.mapping['lf']) if ms is None: ms = random.uniform(0, 2) mutation_step = Terminal(ms, False, object) # Create the root new_ind = individual new_ind.insert(0, pset.mapping["add"]) # Append the left branch new_ind.append(pset.mapping["mul"]) new_ind.append(mutation_step) new_ind.append(pset.mapping["sub"]) # Append the right branch new_ind.extend(tr1) new_ind.extend(tr2) return new_ind, def cxSemantic(ind1, ind2, gen_func=genGrow, pset=None, min=2, max=6): """ Implementation of the Semantic Crossover operator [Geometric semantic genetic programming, Moraglio et al., 2012] offspring1 = random_tree1 * ind1 + (1 - random_tree1) * ind2 offspring2 = random_tree1 * ind2 + (1 - random_tree1) * ind1 :param ind1: first parent :param ind2: second parent :param gen_func: function responsible for the generation of the random tree that will be used during the mutation :param pset: Primitive Set, which contains terminal and operands to be used during the evolution :param min: min depth of the random tree :param max: max depth of the random tree :return: offsprings The mutated offspring contains parents >>> import operator >>> def lf(x): return 1 / (1 + math.exp(-x)); >>> pset = PrimitiveSet("main", 2) >>> pset.addPrimitive(operator.sub, 2) >>> pset.addTerminal(3) >>> pset.addPrimitive(lf, 1, name="lf") >>> pset.addPrimitive(operator.add, 2) >>> pset.addPrimitive(operator.mul, 2) >>> ind1 = genGrow(pset, 1, 3) >>> ind2 = genGrow(pset, 1, 3) >>> new_ind1, new_ind2 = cxSemantic(ind1, ind2, pset=pset, max=2) >>> ctr = sum([n.name == ind1[i].name for i, n in enumerate(new_ind1)]) >>> ctr == len(ind1) True >>> ctr = sum([n.name == ind2[i].name for i, n in enumerate(new_ind2)]) >>> ctr == len(ind2) True """ for p in ['lf', 'mul', 'add', 'sub']: assert p in pset.mapping, "A '" + p + "' function is required in order to perform semantic crossover" tr = gen_func(pset, min, max) tr.insert(0, pset.mapping['lf']) new_ind1 = ind1 new_ind1.insert(0, pset.mapping["mul"]) new_ind1.insert(0, pset.mapping["add"]) new_ind1.extend(tr) new_ind1.append(pset.mapping["mul"]) new_ind1.append(pset.mapping["sub"]) new_ind1.append(Terminal(1.0, False, object)) new_ind1.extend(tr) new_ind1.extend(ind2) new_ind2 = ind2 new_ind2.insert(0, pset.mapping["mul"]) new_ind2.insert(0, pset.mapping["add"]) new_ind2.extend(tr) new_ind2.append(pset.mapping["mul"]) new_ind2.append(pset.mapping["sub"]) new_ind2.append(Terminal(1.0, False, object)) new_ind2.extend(tr) new_ind2.extend(ind1) return new_ind1, new_ind2 if __name__ == "__main__": import doctest doctest.testmod() ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1689936700.636043 deap-1.4.1/deap/tools/0000755000076500000240000000000014456461475013753 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/tools/__init__.py0000644000076500000240000000247214456461441016062 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . """The :mod:`~deap.tools` module contains the operators for evolutionary algorithms. They are used to modify, select and move the individuals in their environment. The set of operators it contains are readily usable in the :class:`~deap.base.Toolbox`. In addition to the basic operators this module also contains utility tools to enhance the basic algorithms with :class:`Statistics`, :class:`HallOfFame`, and :class:`History`. """ from .constraint import * from .crossover import * from .emo import * from .indicator import * from .init import * from .migration import * from .mutation import * from .selection import * from .support import * ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1689936700.638874 deap-1.4.1/deap/tools/_hypervolume/0000755000076500000240000000000014456461475016471 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/tools/_hypervolume/__init__.py0000644000076500000240000000126514456461441020577 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/tools/_hypervolume/_hv.c0000644000076500000240000013255314456461441017413 0ustar00runnerstaff/************************************************************************* hypervolume computation --------------------------------------------------------------------- Copyright (c) 2010 Carlos M. Fonseca Manuel Lopez-Ibanez Luis Paquete Andreia P. Guerreiro This program is free software (software libre); you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. As a particular exception, the contents of this file (hv.c) may also be redistributed and/or modified under the terms of the GNU Lesser General Public License (LGPL) as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, you can obtain a copy of the GNU General Public License at: http://www.gnu.org/copyleft/gpl.html or by writing to: Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ---------------------------------------------------------------------- Relevant literature: [1] C. M. Fonseca, L. Paquete, and M. Lopez-Ibanez. An improved dimension-sweep algorithm for the hypervolume indicator. In IEEE Congress on Evolutionary Computation, pages 1157-1163, Vancouver, Canada, July 2006. [2] Nicola Beume, Carlos M. Fonseca, Manuel López-Ibáñez, Luís Paquete, and J. Vahrenhold. On the complexity of computing the hypervolume indicator. IEEE Transactions on Evolutionary Computation, 13(5):1075-1082, 2009. *************************************************************************/ #include "_hv.h" #include #include #include #include #include // Default to variant 4 without having to "make VARIANT=4" #define VARIANT 4 static int compare_tree_asc(const void *p1, const void *p2); /*----------------------------------------------------------------------------- The following is a reduced version of the AVL-tree library used here according to the terms of the GPL. See the copyright notice below. */ #define AVL_DEPTH /***************************************************************************** avl.h - Source code for the AVL-tree library. Copyright (C) 1998 Michael H. Buselli Copyright (C) 2000-2002 Wessel Dankers This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Augmented AVL-tree. Original by Michael H. Buselli . Modified by Wessel Dankers to add a bunch of bloat to the sourcecode, change the interface and squash a few bugs. Mail him if you find new bugs. *****************************************************************************/ /* User supplied function to compare two items like strcmp() does. * For example: cmp(a,b) will return: * -1 if a < b * 0 if a = b * 1 if a > b */ typedef int (*avl_compare_t)(const void *, const void *); /* User supplied function to delete an item when a node is free()d. * If NULL, the item is not free()d. */ typedef void (*avl_freeitem_t)(void *); typedef struct avl_node_t { struct avl_node_t *next; struct avl_node_t *prev; struct avl_node_t *parent; struct avl_node_t *left; struct avl_node_t *right; void *item; double domr; #ifdef AVL_DEPTH unsigned char depth; #endif } avl_node_t; typedef struct avl_tree_t { avl_node_t *head; avl_node_t *tail; avl_node_t *top; avl_compare_t cmp; avl_freeitem_t freeitem; } avl_tree_t; /***************************************************************************** avl.c - Source code for the AVL-tree library. *****************************************************************************/ static void avl_rebalance(avl_tree_t *, avl_node_t *); #ifdef AVL_DEPTH #define NODE_DEPTH(n) ((n) ? (n)->depth : 0) #define L_DEPTH(n) (NODE_DEPTH((n)->left)) #define R_DEPTH(n) (NODE_DEPTH((n)->right)) #define CALC_DEPTH(n) ((L_DEPTH(n)>R_DEPTH(n)?L_DEPTH(n):R_DEPTH(n)) + 1) #endif static int avl_check_balance(avl_node_t *avlnode) { #ifdef AVL_DEPTH int d; d = R_DEPTH(avlnode) - L_DEPTH(avlnode); return d<-1?-1:d>1?1:0; #endif } static int avl_search_closest(const avl_tree_t *avltree, const void *item, avl_node_t **avlnode) { avl_node_t *node; int c; if(!avlnode) avlnode = &node; node = avltree->top; if(!node) return *avlnode = NULL, 0; for(;;) { c = compare_tree_asc(item, node->item); if(c < 0) { if(node->left) node = node->left; else return *avlnode = node, -1; } else if(c > 0) { if(node->right) node = node->right; else return *avlnode = node, 1; } else { return *avlnode = node, 0; } } } static avl_tree_t * avl_init_tree(avl_tree_t *rc, avl_compare_t cmp, avl_freeitem_t freeitem) { if(rc) { rc->head = NULL; rc->tail = NULL; rc->top = NULL; rc->cmp = cmp; rc->freeitem = freeitem; } return rc; } static avl_tree_t * avl_alloc_tree(avl_compare_t cmp, avl_freeitem_t freeitem) { return avl_init_tree(malloc(sizeof(avl_tree_t)), cmp, freeitem); } static void avl_clear_tree(avl_tree_t *avltree) { avltree->top = avltree->head = avltree->tail = NULL; } static void avl_clear_node(avl_node_t *newnode) { newnode->left = newnode->right = NULL; #ifdef AVL_COUNT newnode->count = 1; #endif #ifdef AVL_DEPTH newnode->depth = 1; #endif } static avl_node_t * avl_insert_top(avl_tree_t *avltree, avl_node_t *newnode) { avl_clear_node(newnode); newnode->prev = newnode->next = newnode->parent = NULL; avltree->head = avltree->tail = avltree->top = newnode; return newnode; } static avl_node_t * avl_insert_before(avl_tree_t *avltree, avl_node_t *node, avl_node_t *newnode) { /* if(!node) return avltree->tail ? avl_insert_after(avltree, avltree->tail, newnode) : avl_insert_top(avltree, newnode); if(node->left) return avl_insert_after(avltree, node->prev, newnode); */ assert (node); assert (!node->left); avl_clear_node(newnode); newnode->next = node; newnode->parent = node; newnode->prev = node->prev; if(node->prev) node->prev->next = newnode; else avltree->head = newnode; node->prev = newnode; node->left = newnode; avl_rebalance(avltree, node); return newnode; } static avl_node_t * avl_insert_after(avl_tree_t *avltree, avl_node_t *node, avl_node_t *newnode) { /* if(!node) return avltree->head ? avl_insert_before(avltree, avltree->head, newnode) : avl_insert_top(avltree, newnode); if(node->right) return avl_insert_before(avltree, node->next, newnode); */ assert (node); assert (!node->right); avl_clear_node(newnode); newnode->prev = node; newnode->parent = node; newnode->next = node->next; if(node->next) node->next->prev = newnode; else avltree->tail = newnode; node->next = newnode; node->right = newnode; avl_rebalance(avltree, node); return newnode; } /* * avl_unlink_node: * Removes the given node. Does not delete the item at that node. * The item of the node may be freed before calling avl_unlink_node. * (In other words, it is not referenced by this function.) */ static void avl_unlink_node(avl_tree_t *avltree, avl_node_t *avlnode) { avl_node_t *parent; avl_node_t **superparent; avl_node_t *subst, *left, *right; avl_node_t *balnode; if(avlnode->prev) avlnode->prev->next = avlnode->next; else avltree->head = avlnode->next; if(avlnode->next) avlnode->next->prev = avlnode->prev; else avltree->tail = avlnode->prev; parent = avlnode->parent; superparent = parent ? avlnode == parent->left ? &parent->left : &parent->right : &avltree->top; left = avlnode->left; right = avlnode->right; if(!left) { *superparent = right; if(right) right->parent = parent; balnode = parent; } else if(!right) { *superparent = left; left->parent = parent; balnode = parent; } else { subst = avlnode->prev; if(subst == left) { balnode = subst; } else { balnode = subst->parent; balnode->right = subst->left; if(balnode->right) balnode->right->parent = balnode; subst->left = left; left->parent = subst; } subst->right = right; subst->parent = parent; right->parent = subst; *superparent = subst; } avl_rebalance(avltree, balnode); } /* * avl_rebalance: * Rebalances the tree if one side becomes too heavy. This function * assumes that both subtrees are AVL-trees with consistent data. The * function has the additional side effect of recalculating the count of * the tree at this node. It should be noted that at the return of this * function, if a rebalance takes place, the top of this subtree is no * longer going to be the same node. */ static void avl_rebalance(avl_tree_t *avltree, avl_node_t *avlnode) { avl_node_t *child; avl_node_t *gchild; avl_node_t *parent; avl_node_t **superparent; parent = avlnode; while(avlnode) { parent = avlnode->parent; superparent = parent ? avlnode == parent->left ? &parent->left : &parent->right : &avltree->top; switch(avl_check_balance(avlnode)) { case -1: child = avlnode->left; #ifdef AVL_DEPTH if(L_DEPTH(child) >= R_DEPTH(child)) { #else #ifdef AVL_COUNT if(L_COUNT(child) >= R_COUNT(child)) { #else #error No balancing possible. #endif #endif avlnode->left = child->right; if(avlnode->left) avlnode->left->parent = avlnode; child->right = avlnode; avlnode->parent = child; *superparent = child; child->parent = parent; #ifdef AVL_COUNT avlnode->count = CALC_COUNT(avlnode); child->count = CALC_COUNT(child); #endif #ifdef AVL_DEPTH avlnode->depth = CALC_DEPTH(avlnode); child->depth = CALC_DEPTH(child); #endif } else { gchild = child->right; avlnode->left = gchild->right; if(avlnode->left) avlnode->left->parent = avlnode; child->right = gchild->left; if(child->right) child->right->parent = child; gchild->right = avlnode; if(gchild->right) gchild->right->parent = gchild; gchild->left = child; if(gchild->left) gchild->left->parent = gchild; *superparent = gchild; gchild->parent = parent; #ifdef AVL_COUNT avlnode->count = CALC_COUNT(avlnode); child->count = CALC_COUNT(child); gchild->count = CALC_COUNT(gchild); #endif #ifdef AVL_DEPTH avlnode->depth = CALC_DEPTH(avlnode); child->depth = CALC_DEPTH(child); gchild->depth = CALC_DEPTH(gchild); #endif } break; case 1: child = avlnode->right; #ifdef AVL_DEPTH if(R_DEPTH(child) >= L_DEPTH(child)) { #else #ifdef AVL_COUNT if(R_COUNT(child) >= L_COUNT(child)) { #else #error No balancing possible. #endif #endif avlnode->right = child->left; if(avlnode->right) avlnode->right->parent = avlnode; child->left = avlnode; avlnode->parent = child; *superparent = child; child->parent = parent; #ifdef AVL_COUNT avlnode->count = CALC_COUNT(avlnode); child->count = CALC_COUNT(child); #endif #ifdef AVL_DEPTH avlnode->depth = CALC_DEPTH(avlnode); child->depth = CALC_DEPTH(child); #endif } else { gchild = child->left; avlnode->right = gchild->left; if(avlnode->right) avlnode->right->parent = avlnode; child->left = gchild->right; if(child->left) child->left->parent = child; gchild->left = avlnode; if(gchild->left) gchild->left->parent = gchild; gchild->right = child; if(gchild->right) gchild->right->parent = gchild; *superparent = gchild; gchild->parent = parent; #ifdef AVL_COUNT avlnode->count = CALC_COUNT(avlnode); child->count = CALC_COUNT(child); gchild->count = CALC_COUNT(gchild); #endif #ifdef AVL_DEPTH avlnode->depth = CALC_DEPTH(avlnode); child->depth = CALC_DEPTH(child); gchild->depth = CALC_DEPTH(gchild); #endif } break; default: #ifdef AVL_COUNT avlnode->count = CALC_COUNT(avlnode); #endif #ifdef AVL_DEPTH avlnode->depth = CALC_DEPTH(avlnode); #endif } avlnode = parent; } } /*------------------------------------------------------------------------------ end of functions from AVL-tree library. *******************************************************************************/ #if !defined(VARIANT) || VARIANT < 1 || VARIANT > 4 #error VARIANT must be either 1, 2, 3 or 4, e.g., 'make VARIANT=4' #endif #if __GNUC__ >= 3 # define __hv_unused __attribute__ ((unused)) #else # define __hv_unused /* no 'unused' attribute available */ #endif #if VARIANT < 3 # define __variant3_only __hv_unused #else # define __variant3_only #endif #if VARIANT < 2 # define __variant2_only __hv_unused #else # define __variant2_only #endif typedef struct dlnode { double *x; /* The data vector */ struct dlnode **next; /* Next-node vector */ struct dlnode **prev; /* Previous-node vector */ struct avl_node_t * tnode; int ignore; int ignore_best; //used in define_order #if VARIANT >= 2 double *area; /* Area */ #endif #if VARIANT >= 3 double *vol; /* Volume */ #endif } dlnode_t; static avl_tree_t *tree; #if VARIANT < 4 int stop_dimension = 1; /* default: stop on dimension 2 */ #else int stop_dimension = 2; /* default: stop on dimension 3 */ #endif static int compare_node(const void *p1, const void* p2) { const double x1 = *((*(const dlnode_t **)p1)->x); const double x2 = *((*(const dlnode_t **)p2)->x); return (x1 < x2) ? -1 : (x1 > x2) ? 1 : 0; } static int compare_tree_asc(const void *p1, const void *p2) { const double *x1 = (const double *)p1; const double *x2 = (const double *)p2; return (x1[1] > x2[1]) ? -1 : (x1[1] < x2[1]) ? 1 : (x1[0] >= x2[0]) ? -1 : 1; } /* * Setup circular double-linked list in each dimension */ static dlnode_t * setup_cdllist(double *data, int d, int n) { dlnode_t *head; dlnode_t **scratch; int i, j; head = malloc ((n+1) * sizeof(dlnode_t)); head->x = data; head->ignore = 0; /* should never get used */ head->next = malloc( d * (n+1) * sizeof(dlnode_t*)); head->prev = malloc( d * (n+1) * sizeof(dlnode_t*)); head->tnode = malloc ((n+1) * sizeof(avl_node_t)); #if VARIANT >= 2 head->area = malloc(d * (n+1) * sizeof(double)); #endif #if VARIANT >= 3 head->vol = malloc(d * (n+1) * sizeof(double)); #endif for (i = 1; i <= n; i++) { head[i].x = head[i-1].x + d;/* this will be fixed a few lines below... */ head[i].ignore = 0; head[i].next = head[i-1].next + d; head[i].prev = head[i-1].prev + d; head[i].tnode = head[i-1].tnode + 1; #if VARIANT >= 2 head[i].area = head[i-1].area + d; #endif #if VARIANT >= 3 head[i].vol = head[i-1].vol + d; #endif } head->x = NULL; /* head contains no data */ scratch = malloc(n * sizeof(dlnode_t*)); for (i = 0; i < n; i++) scratch[i] = head + i + 1; for (j = d-1; j >= 0; j--) { for (i = 0; i < n; i++) scratch[i]->x--; qsort(scratch, n, sizeof(dlnode_t*), compare_node); head->next[j] = scratch[0]; scratch[0]->prev[j] = head; for (i = 1; i < n; i++) { scratch[i-1]->next[j] = scratch[i]; scratch[i]->prev[j] = scratch[i-1]; } scratch[n-1]->next[j] = head; head->prev[j] = scratch[n-1]; } free(scratch); for (i = 1; i <= n; i++) { (head[i].tnode)->item = head[i].x; } return head; } static void free_cdllist(dlnode_t * head) { free(head->tnode); /* Frees _all_ nodes. */ free(head->next); free(head->prev); #if VARIANT >= 2 free(head->area); #endif #if VARIANT >= 3 free(head->vol); #endif free(head); } static void delete (dlnode_t *nodep, int dim, double * bound __variant3_only) { int i; for (i = stop_dimension; i < dim; i++) { nodep->prev[i]->next[i] = nodep->next[i]; nodep->next[i]->prev[i] = nodep->prev[i]; #if VARIANT >= 3 if (bound[i] > nodep->x[i]) bound[i] = nodep->x[i]; #endif } } #if VARIANT >= 2 static void delete_dom (dlnode_t *nodep, int dim) { int i; for (i = stop_dimension; i < dim; i++) { nodep->prev[i]->next[i] = nodep->next[i]; nodep->next[i]->prev[i] = nodep->prev[i]; } } #endif static void reinsert (dlnode_t *nodep, int dim, double * bound __variant3_only) { int i; for (i = stop_dimension; i < dim; i++) { nodep->prev[i]->next[i] = nodep; nodep->next[i]->prev[i] = nodep; #if VARIANT >= 3 if (bound[i] > nodep->x[i]) bound[i] = nodep->x[i]; #endif } } #if VARIANT >= 2 static void reinsert_dom (dlnode_t *nodep, int dim) { int i; for (i = stop_dimension; i < dim; i++) { dlnode_t *p = nodep->prev[i]; p->next[i] = nodep; nodep->next[i]->prev[i] = nodep; nodep->area[i] = p->area[i]; #if VARIANT >= 3 nodep->vol[i] = p->vol[i] + p->area[i] * (nodep->x[i] - p->x[i]); #endif } } #endif static double hv_recursive(dlnode_t *list, int dim, int c, const double * ref, double * bound) { /* ------------------------------------------------------ General case for dimensions higher than stop_dimension ------------------------------------------------------ */ if ( dim > stop_dimension ) { dlnode_t *p0 = list; dlnode_t *p1 = list->prev[dim]; double hyperv = 0; #if VARIANT == 1 double hypera; #endif #if VARIANT >= 2 dlnode_t *pp; for (pp = p1; pp->x; pp = pp->prev[dim]) { if (pp->ignore < dim) pp->ignore = 0; } #endif while (c > 1 #if VARIANT >= 3 /* We delete all points x[dim] > bound[dim]. In case of repeated coordinates, we also delete all points x[dim] == bound[dim] except one. */ && (p1->x[dim] > bound[dim] || p1->prev[dim]->x[dim] >= bound[dim]) #endif ) { p0 = p1; #if VARIANT >=2 if (p0->ignore >= dim) delete_dom(p0, dim); else delete(p0, dim, bound); #else delete(p0, dim, bound); #endif p1 = p0->prev[dim]; c--; } #if VARIANT == 1 hypera = hv_recursive(list, dim-1, c, ref, bound); #elif VARIANT == 2 int i; p1->area[0] = 1; for (i = 1; i <= dim; i++) p1->area[i] = p1->area[i-1] * (ref[i-1] - p1->x[i-1]); #elif VARIANT >= 3 if (c > 1) { hyperv = p1->prev[dim]->vol[dim] + p1->prev[dim]->area[dim] * (p1->x[dim] - p1->prev[dim]->x[dim]); if (p1->ignore >= dim) p1->area[dim] = p1->prev[dim]->area[dim]; else { p1->area[dim] = hv_recursive(list, dim - 1, c, ref, bound); /* At this point, p1 is the point with the highest value in dimension dim in the list, so if it is dominated in dimension dim-1, so it is also dominated in dimension dim. */ if (p1->ignore == (dim - 1)) p1->ignore = dim; } } else { int i; p1->area[0] = 1; for (i = 1; i <= dim; i++) p1->area[i] = p1->area[i-1] * (ref[i-1] - p1->x[i-1]); } p1->vol[dim] = hyperv; #endif while (p0->x != NULL) { #if VARIANT == 1 hyperv += hypera * (p0->x[dim] - p1->x[dim]); #else hyperv += p1->area[dim] * (p0->x[dim] - p1->x[dim]); #endif c++; #if VARIANT >= 2 if (p0->ignore >= dim) { reinsert_dom (p0, dim); p0->area[dim] = p1->area[dim]; } else { #endif reinsert (p0, dim, bound); #if VARIANT >= 2 p0->area[dim] = hv_recursive (list, dim-1, c, ref, bound); if (p0->ignore == (dim - 1)) p0->ignore = dim; } #elif VARIANT == 1 hypera = hv_recursive (list, dim-1, c, ref, NULL); #endif p1 = p0; p0 = p0->next[dim]; #if VARIANT >= 3 p1->vol[dim] = hyperv; #endif } #if VARIANT >= 3 bound[dim] = p1->x[dim]; #endif #if VARIANT == 1 hyperv += hypera * (ref[dim] - p1->x[dim]); #else hyperv += p1->area[dim] * (ref[dim] - p1->x[dim]); #endif return hyperv; } /* --------------------------- special case of dimension 3 --------------------------- */ else if (dim == 2) { double hyperv; double hypera; double height; #if VARIANT >= 3 dlnode_t *pp = list->prev[2]; avl_node_t *tnode; /* All the points that have value of x[2] lower than bound[2] are points that were previously processed, so there's no need to process them again. In this case, every point was processed before, so the volume is known. */ if (pp->x[2] < bound[2]) return pp->vol[2] + pp->area[2] * (ref[2] - pp->x[2]); pp = list->next[2]; /* In this case, every point has to be processed. */ if (pp->x[2] >= bound[2]) { pp->tnode->domr = ref[2]; pp->area[2] = (ref[0] - pp->x[0]) * (ref[1] - pp->x[1]); pp->vol[2] = 0; pp->ignore = 0; } else { /* Otherwise, we look for the first point that has to be in the tree, by searching for the first point that isn't dominated or that is dominated by a point with value of x[2] higher or equal than bound[2] (domr keeps the value of the x[2] of the point that dominates pp, or ref[2] if it isn't dominated). */ while (pp->tnode->domr < bound[2]) { pp = pp->next[2]; } } pp->ignore = 0; avl_insert_top(tree,pp->tnode); pp->tnode->domr = ref[2]; /* Connect all points that aren't dominated or that are dominated and the point that dominates it has value x[2] (pp->tnode->domr) equal or higher than bound[2]. */ for (pp = pp->next[2]; pp->x[2] < bound[2]; pp = pp->next[2]) { if (pp->tnode->domr >= bound[2]) { avl_node_t *tnodeaux = pp->tnode; tnodeaux->domr = ref[2]; if (avl_search_closest(tree, pp->x, &tnode) <= 0) avl_insert_before(tree, tnode, tnodeaux); else avl_insert_after(tree, tnode, tnodeaux); } } pp = pp->prev[2]; hyperv = pp->vol[2]; hypera = pp->area[2]; height = (pp->next[2]->x) ? pp->next[2]->x[2] - pp->x[2] : ref[2] - pp->x[2]; bound[2] = list->prev[2]->x[2]; #else /* VARIANT <= 2 */ dlnode_t *pp = list->next[2]; hyperv = 0; hypera = (ref[0] - pp->x[0])*(ref[1] - pp->x[1]); height = (c == 1) ? ref[2] - pp->x[2] : pp->next[2]->x[2] - pp->x[2]; avl_insert_top(tree,pp->tnode); #endif hyperv += hypera * height; for (pp = pp->next[2]; pp->x != NULL; pp = pp->next[2]) { const double * prv_ip, * nxt_ip; avl_node_t *tnode; int cmp; #if VARIANT >= 3 pp->vol[2] = hyperv; #endif height = (pp == list->prev[2]) ? ref[2] - pp->x[2] : pp->next[2]->x[2] - pp->x[2]; #if VARIANT >= 2 if (pp->ignore >= 2) { hyperv += hypera * height; #if VARIANT >= 3 pp->area[2] = hypera; #endif continue; } #endif cmp = avl_search_closest(tree, pp->x, &tnode); if (cmp <= 0) { nxt_ip = (double *)(tnode->item); } else { nxt_ip = (tnode->next != NULL) ? (double *)(tnode->next->item) : ref; } if (nxt_ip[0] <= pp->x[0]) { pp->ignore = 2; #if VARIANT >= 3 pp->tnode->domr = pp->x[2]; pp->area[2] = hypera; #endif if (height > 0) hyperv += hypera * height; continue; } if (cmp <= 0) { avl_insert_before(tree, tnode, pp->tnode); tnode = pp->tnode->prev; } else { avl_insert_after(tree, tnode, pp->tnode); } #if VARIANT >= 3 pp->tnode->domr = ref[2]; #endif if (tnode != NULL) { prv_ip = (double *)(tnode->item); if (prv_ip[0] >= pp->x[0]) { const double * cur_ip; tnode = pp->tnode->prev; /* cur_ip = point dominated by pp with highest [0]-coordinate. */ cur_ip = (double *)(tnode->item); while (tnode->prev) { prv_ip = (double *)(tnode->prev->item); hypera -= (prv_ip[1] - cur_ip[1]) * (nxt_ip[0] - cur_ip[0]); if (prv_ip[0] < pp->x[0]) break; /* prv is not dominated by pp */ cur_ip = prv_ip; avl_unlink_node(tree,tnode); #if VARIANT >= 3 /* saves the value of x[2] of the point that dominates tnode. */ tnode->domr = pp->x[2]; #endif tnode = tnode->prev; } avl_unlink_node(tree, tnode); #if VARIANT >= 3 tnode->domr = pp->x[2]; #endif if (!tnode->prev) { hypera -= (ref[1] - cur_ip[1]) * (nxt_ip[0] - cur_ip[0]); prv_ip = ref; } } } else prv_ip = ref; hypera += (prv_ip[1] - pp->x[1]) * (nxt_ip[0] - pp->x[0]); if (height > 0) hyperv += hypera * height; #if VARIANT >= 3 pp->area[2] = hypera; #endif } avl_clear_tree(tree); return hyperv; } /* special case of dimension 2 */ else if (dim == 1) { const dlnode_t *p1 = list->next[1]; double hypera = p1->x[0]; double hyperv = 0; dlnode_t *p0; while ((p0 = p1->next[1])->x) { hyperv += (ref[0] - hypera) * (p0->x[1] - p1->x[1]); if (p0->x[0] < hypera) hypera = p0->x[0]; else if (p0->ignore == 0) p0->ignore = 1; p1 = p0; } hyperv += (ref[0] - hypera) * (ref[1] - p1->x[1]); return hyperv; } /* special case of dimension 1 */ else if (dim == 0) { list->next[0]->ignore = -1; return (ref[0] - list->next[0]->x[0]); } else { fprintf(stderr, "%s:%d: unreachable condition! \n" "This is a bug, please report it to " "manuel.lopez-ibanez@ulb.ac.be\n", __FILE__, __LINE__); exit(EXIT_FAILURE); } } /* Removes the point from the circular double-linked list, but it doesn't remove the data. */ static void filter_delete_node(dlnode_t *node, int d) { int i; for (i = 0; i < d; i++) { node->next[i]->prev[i] = node->prev[i]; node->prev[i]->next[i] = node->next[i]; } } /* Filters those points that do not strictly dominate the reference point. This is needed to assure that the points left are only those that are needed to calculate the hypervolume. */ static int filter(dlnode_t *list, int d, int n, const double *ref) { int i, j; /* fprintf (stderr, "%d points initially\n", n); */ for (i = 0; i < d; i++) { dlnode_t *aux = list->prev[i]; int np = n; for (j = 0; j < np; j++) { if (aux->x[i] < ref[i]) break; filter_delete_node (aux, d); aux = aux->prev[i]; n--; } } /* fprintf (stderr, "%d points remain\n", n); */ return n; } #ifdef EXPERIMENTAL /* Verifies up to which dimension k, domr dominates p and returns k (it is assumed that domr doesn't dominate p in dimensions higher than dim). */ static int test_domr(dlnode_t *p, dlnode_t *domr, int dim, int *order) { int i; for(i = 1; i <= dim; i++){ if (p->x[order[i]] < domr->x[order[i]]) return i - 1; } return dim; } /* Verifies up to which dimension k the point pp is dominated and returns k. This functions is called only to verify points that aren't dominated for more than dim dimensions, so k will always be lower or equal to dim. */ static int test_dom(dlnode_t *list, dlnode_t *pp, int dim, int *order) { dlnode_t *p0; int r, r_b = 0; int i = order[0]; p0 = list->next[i]; /* In every iteration, it is verified if p0 dominates pp and up to which dimension. The goal is to find the point that dominates pp in more dimension, starting in dimension 0. Points are processed in ascending order of the first dimension. This means that if a point p0 is dominated in the first k dimensions, where k >=dim, then the point that dominates it (in the first k dimensions) was already processed, so p0 won't dominate pp in more dimensions that the point that dominates p0 (because pp can be dominated, at most, up to dim dimensions, and so if p0 dominates pp in the first y dimensions (y < dim), the point that dominates p0 also dominates pp in the first y dimensions or more, and this information is already stored in r_b), so p0 is skipped. */ while (p0 != pp) { if (p0->ignore < dim) { r = test_domr (pp, p0, dim, order); /* if pp is dominated in the first dim + 1 dimensions, it is not necessary to verify other points that might dominate pp, because pp won't be dominated in more that dim+1 dimensions. */ if (r == dim) return r; else if (r > r_b) r_b = r; } p0 = p0->next[i]; } return r_b; } /* Determines the number of dominated points from dimension 0 to k, where k <= dim. */ static void determine_ndom(dlnode_t *list, int dim, int *order, int *count) { dlnode_t *p1; int i, dom; int ord = order[0]; for (i = 0; i <= dim; i++) count[i] = 0; p1 = list->next[ord]; p1->ignore = 0; p1 = list->next[ord]; while (p1 != list) { if (p1->ignore <= dim) { dom = test_dom(list, p1, dim, order); count[dom]++; p1->ignore = dom; } p1 = p1->next[ord]; } } static void delete_dominated(dlnode_t *nodep, int dim) { int i; for (i = 0; i <= dim; i++) { nodep->prev[i]->next[i] = nodep->next[i]; nodep->next[i]->prev[i] = nodep->prev[i]; } } /* Determines the number of dominated points from dimension 0 to k, where k <= dim, for the original order of objectives. Also defines that this order is the best order so far, so every point has the information up to which dimension it is dominated (ignore) and it is considered the highest number of dimensions in which it is dominated (so ignore_best is also updated). If there is any point dominated in every dimension, seen that it doesn't contribute to the hypervolume, it is removed as soon as possible, this way there's no waste of time with these points. Returns the number of total points. */ static int determine_ndomf(dlnode_t *list, int dim, int c, int *order, int *count) { dlnode_t *p1; int i, dom; int ord = order[0]; for(i = 0; i <= dim; i++) count[i] = 0; p1 = list->next[ord]; p1->ignore = p1->ignore_best = 0; p1 = list->next[ord]; /* Determines up to which dimension each point is dominated and uses this information to count the number of dominated points from dimension 0 to k, where k <= dim. Points that are dominated in more than the first 'dim' dimensions will continue to be dominated in those dimensions, and so they're skipped, it's not necessary to find out again up to which dimension they're dominated. */ while (p1 != list){ if (p1->ignore <= dim) { dom = test_dom(list, p1, dim, order); count[dom]++; p1->ignore = p1->ignore_best = dom; } p1 = p1->next[ord]; } /* If there is any point dominated in every dimension, it is removed and the number of total points is updated. */ if (count[dim] > 0) { p1 = list->prev[0]; while (p1->x) { if (p1->ignore == dim) { delete_dominated(p1, dim); c--; } p1 = p1->prev[0]; } } return c; } /* This function implements the iterative version of MDP heuristic described in L. While, L. Bradstreet, L. Barone, and P. Hingston, "Heuristics for optimising the calculation of hypervolume for multi-objective optimisation problems", in Congress on Evolutionary Computation, B. McKay, Ed. IEEE, 2005, pp. 2225-2232 Tries to find a good order to process the objectives. This algorithm tries to maximize the number of dominated points dominated in more dimensions. For example, for a problem with d dimensions, an order with 20 points dominated from dimension 0 to dimension d-1 is preferred to an order of objectives in which the number of points dominated from dimension 0 to d-1 is 10. An order with the same number of points dominated up to dimension d-1 as a second order is preferred if it has more points dominated up to dimension d-2 than the second order. */ static int define_order(dlnode_t *list, int dim, int c, int *order) { dlnode_t *p; // order - keeps the current order of objectives /* best_order - keeps the current best order for the objectives. At the end, this array (and the array order) will have the best order found, to process the objectives. This array keeps the indexes of the objectives, where best_order[0] keeps the index of the first objective, best_order[1] keeps the index of the second objective and so on. */ int *best_order = malloc(dim * sizeof(int)); /* count - keeps the counting of the dominated points corresponding to the order of objectives in 'order'. When it's found that a point is dominated at most, for the first four dimensions, then count[3] is incremented. So, count[i] is incremented every time it's found a point that is dominated from dimension 0 to i, but not in dimension i+1. */ int *count = malloc(dim * sizeof(int)); /* keeps the best counting of the dominated points (that is obtained using the order in best_order). */ int *best_count = malloc(dim * sizeof(int)); int i, j, k; for (i = 0; i < dim; i++) { best_order[i] = order[i] = i; best_count[i] = count[i] = 0; } // determines the number of dominated points in the original order. // c - total number of points excluding points totally dominated c = determine_ndomf(list, dim-1, c, order, count); /* the best order so far is the original order, so it's necessary to register the number of points dominated in the best order. */ for (i = 0; i < dim; i++) { best_count[i] = count[i]; } /* Objectives are chosen from highest to lowest. So we start defining which is the objective in position dim-1 and then which is the objective in position dim, and so on. The objective chosen to be in position i is chosen in a way to maximize the number of dominated points from dimension 0 to i-1. So, this cycle, selects a position i, and then we find the objective (from the remaining objectives that haven't a position yet, the objectives that are in positions lower or equal to i) that by being in position i maximizes the number of points dominated from dimension 0 to i-1. */ for (i = dim - 1; i > 2; i--) { /* This cycle, in every iteration, assigns a different objective to position i. It's important to notice that if we want to maximize the number of dominated points from dimension 0 to i-1, when we want to now if an objective k in position i is the one that maximizes it, it doesn't matter the order of the objectives in positions lower than i, the number of dominated points from dimension 0 to i-1 will always be the same, so it's not necessary to worry about the order of those objectives. When this cycle starts, 'order' has the original order and so 'count' has the number of points dominated from 0 to every k, where k < dim or 'order' has the last order of objectives used to calculate the best objective to put in position i+1 that maximizes the number of dominated points from dimension 0 to i and so 'count' has the number of points dominated from dimension 0 to every k, where k < dim, that was calculated previously. There on, it is not necessary to calculate the number of dominated points from dimension 0 to i-1 with the actual objective in position i (order[i]), because this value was previously calculated and so it is only necessary to calculate the number of dominated points when the current objectives in order[k], where k < i, are in position i. */ for (j = 0; j < i; j++) { int aux = order[i]; order[i] = order[j]; order[j] = aux; /* Determine the number of dominated points from dimension 0 to k, where k < i (the number of points dominated from dimension 0 to t, where t >= i, is already known from previous calculations) with a different objective in position i. */ determine_ndom(list, i-1, order, count); /* If the order in 'order' is better than the previously best order, than the actual order is now the best. An order is better than another if the number of dominated points from dimension 0 to i-1 is higher. If this number is equal, then the best is the one that has the most dominated points from dimension 0 to i-2. If this number is equal, than the last order considered the best, still remains the best order so far. */ if (best_count[i-1] < count[i-1] || (best_count[i-1] == count[i-1] && best_count[i-2] < count[i-2])) { for (k = 0; k <= i; k++) { best_count[k] = count[k]; best_order[k] = order[k]; } p = list->prev[0]; while (p != list) { p->ignore_best = p->ignore; p = p->prev[0]; } } } /* If necessary, update 'order' with the best order so far and the corresponding number of dominated points. In this way, in the next iteration it is not necessary to recalculate the number of dominated points from dimension 0 to i-2, when in position i-1 is the objective that is currently in position i-1, in the best order so far (best_order[i-1]). */ if (order[i] != best_order[i]) { for (j = 0; j <= i; j++) { count[j] = best_count[j]; order[j] = best_order[j]; } p = list->prev[0]; /* The information about a point being dominated is updated because, this way, in some cases it is not necessary to find out (again) if a point is dominated. */ while (p != list) { p->ignore = p->ignore_best; p = p->prev[0]; } } } free(count); free(best_count); free(best_order); return c; } /* Reorders the reference point's objectives according to an order 'order'. */ static void reorder_reference(double *reference, int d, int *order) { int j; double *tmp = (double *) malloc(d * sizeof(double)); for (j = 0; j < d; j++) { tmp[j] = reference[j]; } for (j = 0; j < d; j++) { reference[j] = tmp[order[j]]; } free(tmp); } /* Reorders the dimensions for every point according to an order. */ void reorder_list(dlnode_t *list, int d, int *order) { int j; double *x; double *tmp = (double *) malloc(d * sizeof(double)); dlnode_t **prev = (dlnode_t **) malloc(d * sizeof(dlnode_t *)); dlnode_t **next = (dlnode_t **) malloc(d * sizeof(dlnode_t *)); dlnode_t *p; for(j = 0; j < d; j++) { prev[j] = list->prev[j]; next[j] = list->next[j]; } for(j = 0; j < d; j++) { list->prev[j] = prev[order[j]]; list->next[j] = next[order[j]]; } p = list->next[0]; while (p != list) { p->ignore = 0; x = p->x; for(j = 0; j < d; j++) { tmp[j] = x[j]; prev[j] = p->prev[j]; next[j] = p->next[j]; } for(j = 0; j < d; j++) { x[j] = tmp[order[j]]; p->prev[j] = prev[order[j]]; p->next[j] = next[order[j]]; } p = p->next[0]; } free(tmp); free(prev); free(next); } #endif double fpli_hv(double *data, int d, int n, const double *ref) { dlnode_t *list; double hyperv; double * bound = NULL; int i; #if VARIANT >= 3 bound = malloc (d * sizeof(double)); for (i = 0; i < d; i++) bound[i] = -DBL_MAX; #endif tree = avl_alloc_tree ((avl_compare_t) compare_tree_asc, (avl_freeitem_t) NULL); list = setup_cdllist(data, d, n); n = filter(list, d, n, ref); if (n == 0) { hyperv = 0.0; } else if (n == 1) { dlnode_t * p = list->next[0]; hyperv = 1; for (i = 0; i < d; i++) hyperv *= ref[i] - p->x[i]; } else { hyperv = hv_recursive(list, d-1, n, ref, bound); } /* Clean up. */ free_cdllist (list); free (tree); /* The nodes are freed by free_cdllist (). */ free (bound); return hyperv; } #ifdef EXPERIMENTAL #include "timer.h" /* FIXME: Avoid calling Timer functions here. */ double fpli_hv_order(double *data, int d, int n, const double *ref, int *order, double *order_time, double *hv_time) { dlnode_t *list; double hyperv; double * bound = NULL; double * ref_ord = (double *) malloc(d * sizeof(double)); #if VARIANT >= 3 int i; bound = malloc (d * sizeof(double)); for (i = 0; i < d; i++) bound[i] = -DBL_MAX; #endif tree = avl_alloc_tree ((avl_compare_t) compare_tree_asc, (avl_freeitem_t) NULL); list = setup_cdllist(data, d, n); if (d > 3) { n = define_order(list, d, n, order); reorder_list(list, d, order); // copy reference so it will be unchanged for the next data sets. for (i = 0; i < d; i++) ref_ord[i] = ref[i]; reorder_reference(ref_ord, d, order); } else { for(i = 0; i < d; i++) ref_ord[i] = ref[i]; } *order_time = Timer_elapsed_virtual (); Timer_start(); n = filter(list, d, n, ref_ord); if (n == 0) { hyperv = 0.0; } else if (n == 1) { hyperv = 1; dlnode_t * p = list->next[0]; for (i = 0; i < d; i++) hyperv *= ref[i] - p->x[i]; } else { hyperv = hv_recursive(list, d-1, n, ref, bound); } /* Clean up. */ free_cdllist (list); free (tree); /* The nodes are freed by free_cdllist (). */ free (bound); free (ref_ord); *hv_time = Timer_elapsed_virtual (); return hyperv; } #endif ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/tools/_hypervolume/_hv.h0000644000076500000240000000410514456461441017407 0ustar00runnerstaff/************************************************************************* hv.h --------------------------------------------------------------------- Copyright (c) 2010 Carlos M. Fonseca Manuel Lopez-Ibanez Luis Paquete Andreia P. Guerreiro This program is free software (software libre); you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. As a particular exception, the contents of this file (hv.h) may also be redistributed and/or modified under the terms of the GNU Lesser General Public License (LGPL) as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, you can obtain a copy of the GNU General Public License at: http://www.gnu.org/copyleft/gpl.html or by writing to: Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ---------------------------------------------------------------------- *************************************************************************/ #ifndef HV_H_ #define HV_H_ #ifdef __cplusplus extern "C" { #endif extern int stop_dimension; double fpli_hv(double *data, int d, int n, const double *ref); #ifdef EXPERIMENTAL double fpli_hv_order(double *data, int d, int n, const double *ref, int *order, double *order_time, double *hv_time); #endif #ifdef __cplusplus } #endif #endif ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/tools/_hypervolume/hv.cpp0000644000076500000240000001111014456461441017575 0ustar00runnerstaff/* * This file is part of DEAP. * * DEAP is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation, either version 3 of * the License, or (at your option) any later version. * * DEAP is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with DEAP. If not, see . */ #include #if PY_MAJOR_VERSION >= 3 #define PY3K #endif #include #include #include "_hv.h" static PyObject* hypervolume(PyObject *self, PyObject *args){ // Args[0]: Point list // Args[1]: Reference point // Return: The hypervolume as a double PyObject *lPyPointSet = PyTuple_GetItem(args, 0); PyObject *lPyReference = PyTuple_GetItem(args, 1); int lNumPoints = 0; int lDim = -1; double *lPointSet = NULL; if(PySequence_Check(lPyPointSet)){ lNumPoints = PySequence_Size(lPyPointSet); unsigned int lPointCount = 0; for(int i = 0; i < lNumPoints; ++i){ PyObject *lPyPoint = PySequence_GetItem(lPyPointSet, i); if(PySequence_Check(lPyPoint)){ if(lDim < 0){ lDim = PySequence_Size(lPyPoint); lPointSet = new double[lNumPoints*lDim]; } for(int j = 0; j < lDim; ++j){ PyObject *lPyCoord = PySequence_GetItem(lPyPoint, j); lPointSet[lPointCount++] = PyFloat_AsDouble(lPyCoord); Py_DECREF(lPyCoord); lPyCoord = NULL; if(PyErr_Occurred()){ PyErr_SetString(PyExc_TypeError,"Points must contain double type values"); delete[] lPointSet; return NULL; } } Py_DECREF(lPyPoint); lPyPoint = NULL; } else { Py_DECREF(lPyPoint); lPyPoint = NULL; PyErr_SetString(PyExc_TypeError,"First argument must contain only points"); free(lPointSet); return NULL; } } } else { PyErr_SetString(PyExc_TypeError,"First argument must be a list of points"); return NULL; } double *lReference = NULL; if(PySequence_Check(lPyReference)){ if(PySequence_Size(lPyReference) == lDim){ lReference = new double[lDim]; for(int i = 0; i < lDim; ++i){ PyObject *lPyCoord = PySequence_GetItem(lPyReference, i); lReference[i] = PyFloat_AsDouble(lPyCoord); Py_DECREF(lPyCoord); lPyCoord = NULL; if(PyErr_Occurred()){ PyErr_SetString(PyExc_TypeError,"Reference point must contain double type values"); delete[] lReference; return NULL; } } } else { PyErr_SetString(PyExc_TypeError,"Reference point is not of same dimensionality as point set"); return NULL; } } else { PyErr_SetString(PyExc_TypeError,"Second argument must be a point"); return NULL; } double lHypervolume = fpli_hv(lPointSet, lDim, lNumPoints, lReference); delete[] lPointSet; delete[] lReference; return PyFloat_FromDouble(lHypervolume); } static PyMethodDef hvMethods[] = { {"hypervolume", hypervolume, METH_VARARGS, "Hypervolume Computation"}, {NULL, NULL, 0, NULL} /* Sentinel (?!?) */ }; #ifdef PY3K static struct PyModuleDef moduledef = { PyModuleDef_HEAD_INIT, "hv", /* m_name */ "C Hypervolumes methods.", /* m_doc */ -1, /* m_size */ hvMethods, /* m_methods */ NULL, /* m_reload */ NULL, /* m_traverse */ NULL, /* m_clear */ NULL, /* m_free */ }; #endif PyMODINIT_FUNC #ifdef PY3K PyInit_hv(void) #else inithv(void) #endif { #ifdef PY3K PyObject *lModule = PyModule_Create(&moduledef); if(lModule == NULL) return NULL; return lModule; #else (void) Py_InitModule("hv", hvMethods); #endif }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/tools/_hypervolume/pyhv.py0000644000076500000240000002660014456461441020026 0ustar00runnerstaff# This file is part of DEAP. # # Copyright (C) 2010 Simon Wessing # TU Dortmund University # # In personal communication, the original authors authorized DEAP team # to use this file under the Lesser General Public License. # # You can find the original library here : # http://ls11-www.cs.uni-dortmund.de/_media/rudolph/hypervolume/hv_python.zip # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import warnings import numpy def hypervolume(pointset, ref): """Compute the absolute hypervolume of a *pointset* according to the reference point *ref*. """ warnings.warn("Falling back to the python version of hypervolume " "module. Expect this to be very slow.", RuntimeWarning) hv = _HyperVolume(ref) return hv.compute(pointset) class _HyperVolume: """ Hypervolume computation based on variant 3 of the algorithm in the paper: C. M. Fonseca, L. Paquete, and M. Lopez-Ibanez. An improved dimension-sweep algorithm for the hypervolume indicator. In IEEE Congress on Evolutionary Computation, pages 1157-1163, Vancouver, Canada, July 2006. Minimization is implicitly assumed here! """ def __init__(self, referencePoint): """Constructor.""" self.referencePoint = referencePoint self.list = [] def compute(self, front): """Returns the hypervolume that is dominated by a non-dominated front. Before the HV computation, front and reference point are translated, so that the reference point is [0, ..., 0]. """ def weaklyDominates(point, other): for i in range(len(point)): if point[i] > other[i]: return False return True relevantPoints = [] referencePoint = self.referencePoint dimensions = len(referencePoint) ####### # fmder: Here it is assumed that every point dominates the reference point # for point in front: # # only consider points that dominate the reference point # if weaklyDominates(point, referencePoint): # relevantPoints.append(point) relevantPoints = front # fmder ####### if any(referencePoint): # shift points so that referencePoint == [0, ..., 0] # this way the reference point doesn't have to be explicitly used # in the HV computation ####### # fmder: Assume relevantPoints are numpy array # for j in xrange(len(relevantPoints)): # relevantPoints[j] = [relevantPoints[j][i] - referencePoint[i] for i in xrange(dimensions)] relevantPoints -= referencePoint # fmder ####### self.preProcess(relevantPoints) bounds = [-1.0e308] * dimensions hyperVolume = self.hvRecursive(dimensions - 1, len(relevantPoints), bounds) return hyperVolume def hvRecursive(self, dimIndex, length, bounds): """Recursive call to hypervolume calculation. In contrast to the paper, the code assumes that the reference point is [0, ..., 0]. This allows the avoidance of a few operations. """ hvol = 0.0 sentinel = self.list.sentinel if length == 0: return hvol elif dimIndex == 0: # special case: only one dimension # why using hypervolume at all? return -sentinel.next[0].cargo[0] elif dimIndex == 1: # special case: two dimensions, end recursion q = sentinel.next[1] h = q.cargo[0] p = q.next[1] while p is not sentinel: pCargo = p.cargo hvol += h * (q.cargo[1] - pCargo[1]) if pCargo[0] < h: h = pCargo[0] q = p p = q.next[1] hvol += h * q.cargo[1] return hvol else: remove = self.list.remove reinsert = self.list.reinsert hvRecursive = self.hvRecursive p = sentinel q = p.prev[dimIndex] while q.cargo != None: if q.ignore < dimIndex: q.ignore = 0 q = q.prev[dimIndex] q = p.prev[dimIndex] while length > 1 and (q.cargo[dimIndex] > bounds[dimIndex] or q.prev[dimIndex].cargo[dimIndex] >= bounds[dimIndex]): p = q remove(p, dimIndex, bounds) q = p.prev[dimIndex] length -= 1 qArea = q.area qCargo = q.cargo qPrevDimIndex = q.prev[dimIndex] if length > 1: hvol = qPrevDimIndex.volume[dimIndex] + qPrevDimIndex.area[dimIndex] * (qCargo[dimIndex] - qPrevDimIndex.cargo[dimIndex]) else: qArea[0] = 1 qArea[1:dimIndex+1] = [qArea[i] * -qCargo[i] for i in range(dimIndex)] q.volume[dimIndex] = hvol if q.ignore >= dimIndex: qArea[dimIndex] = qPrevDimIndex.area[dimIndex] else: qArea[dimIndex] = hvRecursive(dimIndex - 1, length, bounds) if qArea[dimIndex] <= qPrevDimIndex.area[dimIndex]: q.ignore = dimIndex while p is not sentinel: pCargoDimIndex = p.cargo[dimIndex] hvol += q.area[dimIndex] * (pCargoDimIndex - q.cargo[dimIndex]) bounds[dimIndex] = pCargoDimIndex reinsert(p, dimIndex, bounds) length += 1 q = p p = p.next[dimIndex] q.volume[dimIndex] = hvol if q.ignore >= dimIndex: q.area[dimIndex] = q.prev[dimIndex].area[dimIndex] else: q.area[dimIndex] = hvRecursive(dimIndex - 1, length, bounds) if q.area[dimIndex] <= q.prev[dimIndex].area[dimIndex]: q.ignore = dimIndex hvol -= q.area[dimIndex] * q.cargo[dimIndex] return hvol def preProcess(self, front): """Sets up the list data structure needed for calculation.""" dimensions = len(self.referencePoint) nodeList = _MultiList(dimensions) nodes = [_MultiList.Node(dimensions, point) for point in front] for i in range(dimensions): self.sortByDimension(nodes, i) nodeList.extend(nodes, i) self.list = nodeList def sortByDimension(self, nodes, i): """Sorts the list of nodes by the i-th value of the contained points.""" # build a list of tuples of (point[i], node) decorated = [(node.cargo[i], node) for node in nodes] # sort by this value decorated.sort() # write back to original list nodes[:] = [node for (_, node) in decorated] class _MultiList: """A special data structure needed by FonsecaHyperVolume. It consists of several doubly linked lists that share common nodes. So, every node has multiple predecessors and successors, one in every list. """ class Node: def __init__(self, numberLists, cargo=None): self.cargo = cargo self.next = [None] * numberLists self.prev = [None] * numberLists self.ignore = 0 self.area = [0.0] * numberLists self.volume = [0.0] * numberLists def __str__(self): return str(self.cargo) def __lt__(self, other): return all(self.cargo < other.cargo) def __init__(self, numberLists): """Constructor. Builds 'numberLists' doubly linked lists. """ self.numberLists = numberLists self.sentinel = _MultiList.Node(numberLists) self.sentinel.next = [self.sentinel] * numberLists self.sentinel.prev = [self.sentinel] * numberLists def __str__(self): strings = [] for i in range(self.numberLists): currentList = [] node = self.sentinel.next[i] while node != self.sentinel: currentList.append(str(node)) node = node.next[i] strings.append(str(currentList)) stringRepr = "" for string in strings: stringRepr += string + "\n" return stringRepr def __len__(self): """Returns the number of lists that are included in this _MultiList.""" return self.numberLists def getLength(self, i): """Returns the length of the i-th list.""" length = 0 sentinel = self.sentinel node = sentinel.next[i] while node != sentinel: length += 1 node = node.next[i] return length def append(self, node, index): """Appends a node to the end of the list at the given index.""" lastButOne = self.sentinel.prev[index] node.next[index] = self.sentinel node.prev[index] = lastButOne # set the last element as the new one self.sentinel.prev[index] = node lastButOne.next[index] = node def extend(self, nodes, index): """Extends the list at the given index with the nodes.""" sentinel = self.sentinel for node in nodes: lastButOne = sentinel.prev[index] node.next[index] = sentinel node.prev[index] = lastButOne # set the last element as the new one sentinel.prev[index] = node lastButOne.next[index] = node def remove(self, node, index, bounds): """Removes and returns 'node' from all lists in [0, 'index'[.""" for i in range(index): predecessor = node.prev[i] successor = node.next[i] predecessor.next[i] = successor successor.prev[i] = predecessor if bounds[i] > node.cargo[i]: bounds[i] = node.cargo[i] return node def reinsert(self, node, index, bounds): """ Inserts 'node' at the position it had in all lists in [0, 'index'[ before it was removed. This method assumes that the next and previous nodes of the node that is reinserted are in the list. """ for i in range(index): node.prev[i].next[i] = node node.next[i].prev[i] = node if bounds[i] > node.cargo[i]: bounds[i] = node.cargo[i] __all__ = ["hypervolume_kmax", "hypervolume"] if __name__ == "__main__": try: from deap.tools import hv except ImportError: hv = None print("Cannot import C version of hypervolume") pointset = [(a, a) for a in numpy.arange(1, 0, -0.01)] ref = numpy.array([2, 2]) print("Python version: %f" % hypervolume(pointset, ref)) if hv: print("C version: %f" % hv.hypervolume(pointset, ref)) print("Approximated: %f" % hypervolume_approximation(pointset, ref)) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/tools/constraint.py0000644000076500000240000001740514456461441016511 0ustar00runnerstaff from functools import wraps from itertools import repeat try: from collections.abc import Sequence except ImportError: from collections import Sequence class DeltaPenalty(object): r"""This decorator returns penalized fitness for invalid individuals and the original fitness value for valid individuals. The penalized fitness is made of a constant factor *delta* added with an (optional) *distance* penalty. The distance function, if provided, shall return a value growing as the individual moves away the valid zone. :param feasibility: A function returning the validity status of any individual. :param delta: Constant or array of constants returned for an invalid individual. :param distance: A function returning the distance between the individual and a given valid point. The distance function can also return a sequence of length equal to the number of objectives to affect multi-objective fitnesses differently (optional, defaults to 0). :returns: A decorator for evaluation function. This function relies on the fitness weights to add correctly the distance. The fitness value of the ith objective is defined as .. math:: f^\mathrm{penalty}_i(\mathbf{x}) = \Delta_i - w_i d_i(\mathbf{x}) where :math:`\mathbf{x}` is the individual, :math:`\Delta_i` is a user defined constant and :math:`w_i` is the weight of the ith objective. :math:`\Delta` should be worst than the fitness of any possible individual, this means higher than any fitness for minimization and lower than any fitness for maximization. See the :doc:`/tutorials/advanced/constraints` for an example. """ def __init__(self, feasibility, delta, distance=None): self.fbty_fct = feasibility if not isinstance(delta, Sequence): self.delta = repeat(delta) else: self.delta = delta self.dist_fct = distance def __call__(self, func): @wraps(func) def wrapper(individual, *args, **kwargs): if self.fbty_fct(individual): return func(individual, *args, **kwargs) weights = tuple(1 if w >= 0 else -1 for w in individual.fitness.weights) dists = tuple(0 for w in individual.fitness.weights) if self.dist_fct is not None: dists = self.dist_fct(individual) if not isinstance(dists, Sequence): dists = repeat(dists) return tuple(d - w * dist for d, w, dist in zip(self.delta, weights, dists)) return wrapper DeltaPenality = DeltaPenalty class ClosestValidPenalty(object): r"""This decorator returns penalized fitness for invalid individuals and the original fitness value for valid individuals. The penalized fitness is made of the fitness of the closest valid individual added with a weighted (optional) *distance* penalty. The distance function, if provided, shall return a value growing as the individual moves away the valid zone. :param feasibility: A function returning the validity status of any individual. :param feasible: A function returning the closest feasible individual from the current invalid individual. :param alpha: Multiplication factor on the distance between the valid and invalid individual. :param distance: A function returning the distance between the individual and a given valid point. The distance function can also return a sequence of length equal to the number of objectives to affect multi-objective fitnesses differently (optional, defaults to 0). :returns: A decorator for evaluation function. This function relies on the fitness weights to add correctly the distance. The fitness value of the ith objective is defined as .. math:: f^\mathrm{penalty}_i(\mathbf{x}) = f_i(\operatorname{valid}(\mathbf{x})) - \\alpha w_i d_i(\operatorname{valid}(\mathbf{x}), \mathbf{x}) where :math:`\mathbf{x}` is the individual, :math:`\operatorname{valid}(\mathbf{x})` is a function returning the closest valid individual to :math:`\mathbf{x}`, :math:`\\alpha` is the distance multiplicative factor and :math:`w_i` is the weight of the ith objective. """ def __init__(self, feasibility, feasible, alpha, distance=None): self.fbty_fct = feasibility self.fbl_fct = feasible self.alpha = alpha self.dist_fct = distance def __call__(self, func): @wraps(func) def wrapper(individual, *args, **kwargs): if self.fbty_fct(individual): return func(individual, *args, **kwargs) f_ind = self.fbl_fct(individual) # print("individual", f_ind) f_fbl = func(f_ind, *args, **kwargs) # print("feasible", f_fbl) weights = tuple(1.0 if w >= 0 else -1.0 for w in individual.fitness.weights) if len(weights) != len(f_fbl): raise IndexError("Fitness weights and computed fitness are of different size.") dists = tuple(0 for w in individual.fitness.weights) if self.dist_fct is not None: dists = self.dist_fct(f_ind, individual) if not isinstance(dists, Sequence): dists = repeat(dists) # print("penalty ", tuple( - w * self.alpha * d for f, w, d in zip(f_fbl, weights, dists))) # print("returned", tuple(f - w * self.alpha * d for f, w, d in zip(f_fbl, weights, dists))) return tuple(f - w * self.alpha * d for f, w, d in zip(f_fbl, weights, dists)) return wrapper ClosestValidPenality = ClosestValidPenalty # List of exported function names. __all__ = ['DeltaPenalty', 'ClosestValidPenalty', 'DeltaPenality', 'ClosestValidPenality'] if __name__ == "__main__": from deap import base from deap import benchmarks from deap import creator import numpy MIN_BOUND = numpy.array([0] * 30) MAX_BOUND = numpy.array([1] * 30) creator.create("FitnessMin", base.Fitness, weights=(-1.0, -1.0)) creator.create("Individual", list, fitness=creator.FitnessMin) def distance(feasible_ind, original_ind): """A distance function to the feasibility region.""" return sum((f - o)**2 for f, o in zip(feasible_ind, original_ind)) def closest_feasible(individual): """A function returning a valid individual from an invalid one.""" feasible_ind = numpy.array(individual) feasible_ind = numpy.maximum(MIN_BOUND, feasible_ind) feasible_ind = numpy.minimum(MAX_BOUND, feasible_ind) return feasible_ind def valid(individual): """Determines if the individual is valid or not.""" if any(individual < MIN_BOUND) or any(individual > MAX_BOUND): return False return True toolbox = base.Toolbox() toolbox.register("evaluate", benchmarks.zdt2) toolbox.decorate("evaluate", ClosestValidPenalty(valid, closest_feasible, 1.0e-6, distance)) ind1 = creator.Individual((-5.6468535666e-01, 2.2483050478e+00, -1.1087909644e+00, -1.2710112861e-01, 1.1682438733e+00, -1.3642007438e+00, -2.1916417835e-01, -5.9137308999e-01, -1.0870160336e+00, 6.0515070232e-01, 2.1532075914e+00, -2.6164718271e-01, 1.5244071578e+00, -1.0324305612e+00, 1.2858152343e+00, -1.2584683962e+00, 1.2054392372e+00, -1.7429571973e+00, -1.3517256013e-01, -2.6493429355e+00, -1.3051320798e-01, 2.2641961090e+00, -2.5027232340e+00, -1.2844874148e+00, 1.9955852925e+00, -1.2942218834e+00, 3.1340109155e+00, 1.6440111097e+00, -1.7750105857e+00, 7.7610242710e-01)) print(toolbox.evaluate(ind1)) print("Individuals is valid: %s" % ("True" if valid(ind1) else "False")) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/tools/crossover.py0000644000076500000240000004163714456461441016356 0ustar00runnerstaffimport random import warnings try: from collections.abc import Sequence except ImportError: from collections import Sequence from itertools import repeat ###################################### # GA Crossovers # ###################################### def cxOnePoint(ind1, ind2): """Executes a one point crossover on the input :term:`sequence` individuals. The two individuals are modified in place. The resulting individuals will respectively have the length of the other. :param ind1: The first individual participating in the crossover. :param ind2: The second individual participating in the crossover. :returns: A tuple of two individuals. This function uses the :func:`~random.randint` function from the python base :mod:`random` module. """ size = min(len(ind1), len(ind2)) cxpoint = random.randint(1, size - 1) ind1[cxpoint:], ind2[cxpoint:] = ind2[cxpoint:], ind1[cxpoint:] return ind1, ind2 def cxTwoPoint(ind1, ind2): """Executes a two-point crossover on the input :term:`sequence` individuals. The two individuals are modified in place and both keep their original length. :param ind1: The first individual participating in the crossover. :param ind2: The second individual participating in the crossover. :returns: A tuple of two individuals. This function uses the :func:`~random.randint` function from the Python base :mod:`random` module. """ size = min(len(ind1), len(ind2)) cxpoint1 = random.randint(1, size) cxpoint2 = random.randint(1, size - 1) if cxpoint2 >= cxpoint1: cxpoint2 += 1 else: # Swap the two cx points cxpoint1, cxpoint2 = cxpoint2, cxpoint1 ind1[cxpoint1:cxpoint2], ind2[cxpoint1:cxpoint2] \ = ind2[cxpoint1:cxpoint2], ind1[cxpoint1:cxpoint2] return ind1, ind2 def cxTwoPoints(ind1, ind2): """ .. deprecated:: 1.0 The function has been renamed. Use :func:`~deap.tools.cxTwoPoint` instead. """ warnings.warn("tools.cxTwoPoints has been renamed. Use cxTwoPoint instead.", FutureWarning) return cxTwoPoint(ind1, ind2) def cxUniform(ind1, ind2, indpb): """Executes a uniform crossover that modify in place the two :term:`sequence` individuals. The attributes are swapped according to the *indpb* probability. :param ind1: The first individual participating in the crossover. :param ind2: The second individual participating in the crossover. :param indpb: Independent probability for each attribute to be exchanged. :returns: A tuple of two individuals. This function uses the :func:`~random.random` function from the python base :mod:`random` module. """ size = min(len(ind1), len(ind2)) for i in range(size): if random.random() < indpb: ind1[i], ind2[i] = ind2[i], ind1[i] return ind1, ind2 def cxPartialyMatched(ind1, ind2): """Executes a partially matched crossover (PMX) on the input individuals. The two individuals are modified in place. This crossover expects :term:`sequence` individuals of indices, the result for any other type of individuals is unpredictable. :param ind1: The first individual participating in the crossover. :param ind2: The second individual participating in the crossover. :returns: A tuple of two individuals. Moreover, this crossover generates two children by matching pairs of values in a certain range of the two parents and swapping the values of those indexes. For more details see [Goldberg1985]_. This function uses the :func:`~random.randint` function from the python base :mod:`random` module. .. [Goldberg1985] Goldberg and Lingel, "Alleles, loci, and the traveling salesman problem", 1985. """ size = min(len(ind1), len(ind2)) p1, p2 = [0] * size, [0] * size # Initialize the position of each indices in the individuals for i in range(size): p1[ind1[i]] = i p2[ind2[i]] = i # Choose crossover points cxpoint1 = random.randint(0, size) cxpoint2 = random.randint(0, size - 1) if cxpoint2 >= cxpoint1: cxpoint2 += 1 else: # Swap the two cx points cxpoint1, cxpoint2 = cxpoint2, cxpoint1 # Apply crossover between cx points for i in range(cxpoint1, cxpoint2): # Keep track of the selected values temp1 = ind1[i] temp2 = ind2[i] # Swap the matched value ind1[i], ind1[p1[temp2]] = temp2, temp1 ind2[i], ind2[p2[temp1]] = temp1, temp2 # Position bookkeeping p1[temp1], p1[temp2] = p1[temp2], p1[temp1] p2[temp1], p2[temp2] = p2[temp2], p2[temp1] return ind1, ind2 def cxUniformPartialyMatched(ind1, ind2, indpb): """Executes a uniform partially matched crossover (UPMX) on the input individuals. The two individuals are modified in place. This crossover expects :term:`sequence` individuals of indices, the result for any other type of individuals is unpredictable. :param ind1: The first individual participating in the crossover. :param ind2: The second individual participating in the crossover. :returns: A tuple of two individuals. Moreover, this crossover generates two children by matching pairs of values chosen at random with a probability of *indpb* in the two parents and swapping the values of those indexes. For more details see [Cicirello2000]_. This function uses the :func:`~random.random` and :func:`~random.randint` functions from the python base :mod:`random` module. .. [Cicirello2000] Cicirello and Smith, "Modeling GA performance for control parameter optimization", 2000. """ size = min(len(ind1), len(ind2)) p1, p2 = [0] * size, [0] * size # Initialize the position of each indices in the individuals for i in range(size): p1[ind1[i]] = i p2[ind2[i]] = i for i in range(size): if random.random() < indpb: # Keep track of the selected values temp1 = ind1[i] temp2 = ind2[i] # Swap the matched value ind1[i], ind1[p1[temp2]] = temp2, temp1 ind2[i], ind2[p2[temp1]] = temp1, temp2 # Position bookkeeping p1[temp1], p1[temp2] = p1[temp2], p1[temp1] p2[temp1], p2[temp2] = p2[temp2], p2[temp1] return ind1, ind2 def cxOrdered(ind1, ind2): """Executes an ordered crossover (OX) on the input individuals. The two individuals are modified in place. This crossover expects :term:`sequence` individuals of indices, the result for any other type of individuals is unpredictable. :param ind1: The first individual participating in the crossover. :param ind2: The second individual participating in the crossover. :returns: A tuple of two individuals. Moreover, this crossover generates holes in the input individuals. A hole is created when an attribute of an individual is between the two crossover points of the other individual. Then it rotates the element so that all holes are between the crossover points and fills them with the removed elements in order. For more details see [Goldberg1989]_. This function uses the :func:`~random.sample` function from the python base :mod:`random` module. .. [Goldberg1989] Goldberg. Genetic algorithms in search, optimization and machine learning. Addison Wesley, 1989 """ size = min(len(ind1), len(ind2)) a, b = random.sample(range(size), 2) if a > b: a, b = b, a holes1, holes2 = [True] * size, [True] * size for i in range(size): if i < a or i > b: holes1[ind2[i]] = False holes2[ind1[i]] = False # We must keep the original values somewhere before scrambling everything temp1, temp2 = ind1, ind2 k1, k2 = b + 1, b + 1 for i in range(size): if not holes1[temp1[(i + b + 1) % size]]: ind1[k1 % size] = temp1[(i + b + 1) % size] k1 += 1 if not holes2[temp2[(i + b + 1) % size]]: ind2[k2 % size] = temp2[(i + b + 1) % size] k2 += 1 # Swap the content between a and b (included) for i in range(a, b + 1): ind1[i], ind2[i] = ind2[i], ind1[i] return ind1, ind2 def cxBlend(ind1, ind2, alpha): """Executes a blend crossover that modify in-place the input individuals. The blend crossover expects :term:`sequence` individuals of floating point numbers. :param ind1: The first individual participating in the crossover. :param ind2: The second individual participating in the crossover. :param alpha: Extent of the interval in which the new values can be drawn for each attribute on both side of the parents' attributes. :returns: A tuple of two individuals. This function uses the :func:`~random.random` function from the python base :mod:`random` module. """ for i, (x1, x2) in enumerate(zip(ind1, ind2)): gamma = (1. + 2. * alpha) * random.random() - alpha ind1[i] = (1. - gamma) * x1 + gamma * x2 ind2[i] = gamma * x1 + (1. - gamma) * x2 return ind1, ind2 def cxSimulatedBinary(ind1, ind2, eta): """Executes a simulated binary crossover that modify in-place the input individuals. The simulated binary crossover expects :term:`sequence` individuals of floating point numbers. :param ind1: The first individual participating in the crossover. :param ind2: The second individual participating in the crossover. :param eta: Crowding degree of the crossover. A high eta will produce children resembling to their parents, while a small eta will produce solutions much more different. :returns: A tuple of two individuals. This function uses the :func:`~random.random` function from the python base :mod:`random` module. """ for i, (x1, x2) in enumerate(zip(ind1, ind2)): rand = random.random() if rand <= 0.5: beta = 2. * rand else: beta = 1. / (2. * (1. - rand)) beta **= 1. / (eta + 1.) ind1[i] = 0.5 * (((1 + beta) * x1) + ((1 - beta) * x2)) ind2[i] = 0.5 * (((1 - beta) * x1) + ((1 + beta) * x2)) return ind1, ind2 def cxSimulatedBinaryBounded(ind1, ind2, eta, low, up): """Executes a simulated binary crossover that modify in-place the input individuals. The simulated binary crossover expects :term:`sequence` individuals of floating point numbers. :param ind1: The first individual participating in the crossover. :param ind2: The second individual participating in the crossover. :param eta: Crowding degree of the crossover. A high eta will produce children resembling to their parents, while a small eta will produce solutions much more different. :param low: A value or a :term:`python:sequence` of values that is the lower bound of the search space. :param up: A value or a :term:`python:sequence` of values that is the upper bound of the search space. :returns: A tuple of two individuals. This function uses the :func:`~random.random` function from the python base :mod:`random` module. .. note:: This implementation is similar to the one implemented in the original NSGA-II C code presented by Deb. """ size = min(len(ind1), len(ind2)) if not isinstance(low, Sequence): low = repeat(low, size) elif len(low) < size: raise IndexError("low must be at least the size of the shorter individual: %d < %d" % (len(low), size)) if not isinstance(up, Sequence): up = repeat(up, size) elif len(up) < size: raise IndexError("up must be at least the size of the shorter individual: %d < %d" % (len(up), size)) for i, xl, xu in zip(range(size), low, up): if random.random() <= 0.5: # This epsilon should probably be changed for 0 since # floating point arithmetic in Python is safer if abs(ind1[i] - ind2[i]) > 1e-14: x1 = min(ind1[i], ind2[i]) x2 = max(ind1[i], ind2[i]) rand = random.random() beta = 1.0 + (2.0 * (x1 - xl) / (x2 - x1)) alpha = 2.0 - beta ** -(eta + 1) if rand <= 1.0 / alpha: beta_q = (rand * alpha) ** (1.0 / (eta + 1)) else: beta_q = (1.0 / (2.0 - rand * alpha)) ** (1.0 / (eta + 1)) c1 = 0.5 * (x1 + x2 - beta_q * (x2 - x1)) beta = 1.0 + (2.0 * (xu - x2) / (x2 - x1)) alpha = 2.0 - beta ** -(eta + 1) if rand <= 1.0 / alpha: beta_q = (rand * alpha) ** (1.0 / (eta + 1)) else: beta_q = (1.0 / (2.0 - rand * alpha)) ** (1.0 / (eta + 1)) c2 = 0.5 * (x1 + x2 + beta_q * (x2 - x1)) c1 = min(max(c1, xl), xu) c2 = min(max(c2, xl), xu) if random.random() <= 0.5: ind1[i] = c2 ind2[i] = c1 else: ind1[i] = c1 ind2[i] = c2 return ind1, ind2 ###################################### # Messy Crossovers # ###################################### def cxMessyOnePoint(ind1, ind2): """Executes a one point crossover on :term:`sequence` individual. The crossover will in most cases change the individuals size. The two individuals are modified in place. :param ind1: The first individual participating in the crossover. :param ind2: The second individual participating in the crossover. :returns: A tuple of two individuals. This function uses the :func:`~random.randint` function from the python base :mod:`random` module. """ cxpoint1 = random.randint(0, len(ind1)) cxpoint2 = random.randint(0, len(ind2)) ind1[cxpoint1:], ind2[cxpoint2:] = ind2[cxpoint2:], ind1[cxpoint1:] return ind1, ind2 ###################################### # ES Crossovers # ###################################### def cxESBlend(ind1, ind2, alpha): """Executes a blend crossover on both, the individual and the strategy. The individuals shall be a :term:`sequence` and must have a :term:`sequence` :attr:`strategy` attribute. Adjustment of the minimal strategy shall be done after the call to this function, consider using a decorator. :param ind1: The first evolution strategy participating in the crossover. :param ind2: The second evolution strategy participating in the crossover. :param alpha: Extent of the interval in which the new values can be drawn for each attribute on both side of the parents' attributes. :returns: A tuple of two evolution strategies. This function uses the :func:`~random.random` function from the python base :mod:`random` module. """ for i, (x1, s1, x2, s2) in enumerate(zip(ind1, ind1.strategy, ind2, ind2.strategy)): # Blend the values gamma = (1. + 2. * alpha) * random.random() - alpha ind1[i] = (1. - gamma) * x1 + gamma * x2 ind2[i] = gamma * x1 + (1. - gamma) * x2 # Blend the strategies gamma = (1. + 2. * alpha) * random.random() - alpha ind1.strategy[i] = (1. - gamma) * s1 + gamma * s2 ind2.strategy[i] = gamma * s1 + (1. - gamma) * s2 return ind1, ind2 def cxESTwoPoint(ind1, ind2): """Executes a classical two points crossover on both the individuals and their strategy. The individuals shall be a :term:`sequence` and must have a :term:`sequence` :attr:`strategy` attribute. The crossover points for the individual and the strategy are the same. :param ind1: The first evolution strategy participating in the crossover. :param ind2: The second evolution strategy participating in the crossover. :returns: A tuple of two evolution strategies. This function uses the :func:`~random.randint` function from the python base :mod:`random` module. """ size = min(len(ind1), len(ind2)) pt1 = random.randint(1, size) pt2 = random.randint(1, size - 1) if pt2 >= pt1: pt2 += 1 else: # Swap the two cx points pt1, pt2 = pt2, pt1 ind1[pt1:pt2], ind2[pt1:pt2] = ind2[pt1:pt2], ind1[pt1:pt2] ind1.strategy[pt1:pt2], ind2.strategy[pt1:pt2] = \ ind2.strategy[pt1:pt2], ind1.strategy[pt1:pt2] return ind1, ind2 def cxESTwoPoints(ind1, ind2): """ .. deprecated:: 1.0 The function has been renamed. Use :func:`cxESTwoPoint` instead. """ return cxESTwoPoint(ind1, ind2) # List of exported function names. __all__ = ['cxOnePoint', 'cxTwoPoint', 'cxUniform', 'cxPartialyMatched', 'cxUniformPartialyMatched', 'cxOrdered', 'cxBlend', 'cxSimulatedBinary', 'cxSimulatedBinaryBounded', 'cxMessyOnePoint', 'cxESBlend', 'cxESTwoPoint'] # Deprecated functions __all__.extend(['cxTwoPoints', 'cxESTwoPoints']) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/tools/emo.py0000644000076500000240000010053014456461441015075 0ustar00runnerstaffimport bisect from collections import defaultdict, namedtuple from itertools import chain import math from operator import attrgetter, itemgetter import random import numpy ###################################### # Non-Dominated Sorting (NSGA-II) # ###################################### def selNSGA2(individuals, k, nd='standard'): """Apply NSGA-II selection operator on the *individuals*. Usually, the size of *individuals* will be larger than *k* because any individual present in *individuals* will appear in the returned list at most once. Having the size of *individuals* equals to *k* will have no effect other than sorting the population according to their front rank. The list returned contains references to the input *individuals*. For more details on the NSGA-II operator see [Deb2002]_. :param individuals: A list of individuals to select from. :param k: The number of individuals to select. :param nd: Specify the non-dominated algorithm to use: 'standard' or 'log'. :returns: A list of selected individuals. .. [Deb2002] Deb, Pratab, Agarwal, and Meyarivan, "A fast elitist non-dominated sorting genetic algorithm for multi-objective optimization: NSGA-II", 2002. """ if nd == 'standard': pareto_fronts = sortNondominated(individuals, k) elif nd == 'log': pareto_fronts = sortLogNondominated(individuals, k) else: raise Exception('selNSGA2: The choice of non-dominated sorting ' 'method "{0}" is invalid.'.format(nd)) for front in pareto_fronts: assignCrowdingDist(front) chosen = list(chain(*pareto_fronts[:-1])) k = k - len(chosen) if k > 0: sorted_front = sorted(pareto_fronts[-1], key=attrgetter("fitness.crowding_dist"), reverse=True) chosen.extend(sorted_front[:k]) return chosen def sortNondominated(individuals, k, first_front_only=False): """Sort the first *k* *individuals* into different nondomination levels using the "Fast Nondominated Sorting Approach" proposed by Deb et al., see [Deb2002]_. This algorithm has a time complexity of :math:`O(MN^2)`, where :math:`M` is the number of objectives and :math:`N` the number of individuals. :param individuals: A list of individuals to select from. :param k: The number of individuals to select. :param first_front_only: If :obj:`True` sort only the first front and exit. :returns: A list of Pareto fronts (lists), the first list includes nondominated individuals. .. [Deb2002] Deb, Pratab, Agarwal, and Meyarivan, "A fast elitist non-dominated sorting genetic algorithm for multi-objective optimization: NSGA-II", 2002. """ if k == 0: return [] map_fit_ind = defaultdict(list) for ind in individuals: map_fit_ind[ind.fitness].append(ind) fits = list(map_fit_ind.keys()) current_front = [] next_front = [] dominating_fits = defaultdict(int) dominated_fits = defaultdict(list) # Rank first Pareto front for i, fit_i in enumerate(fits): for fit_j in fits[i+1:]: if fit_i.dominates(fit_j): dominating_fits[fit_j] += 1 dominated_fits[fit_i].append(fit_j) elif fit_j.dominates(fit_i): dominating_fits[fit_i] += 1 dominated_fits[fit_j].append(fit_i) if dominating_fits[fit_i] == 0: current_front.append(fit_i) fronts = [[]] for fit in current_front: fronts[-1].extend(map_fit_ind[fit]) pareto_sorted = len(fronts[-1]) # Rank the next front until all individuals are sorted or # the given number of individual are sorted. if not first_front_only: N = min(len(individuals), k) while pareto_sorted < N: fronts.append([]) for fit_p in current_front: for fit_d in dominated_fits[fit_p]: dominating_fits[fit_d] -= 1 if dominating_fits[fit_d] == 0: next_front.append(fit_d) pareto_sorted += len(map_fit_ind[fit_d]) fronts[-1].extend(map_fit_ind[fit_d]) current_front = next_front next_front = [] return fronts def assignCrowdingDist(individuals): """Assign a crowding distance to each individual's fitness. The crowding distance can be retrieve via the :attr:`crowding_dist` attribute of each individual's fitness. """ if len(individuals) == 0: return distances = [0.0] * len(individuals) crowd = [(ind.fitness.values, i) for i, ind in enumerate(individuals)] nobj = len(individuals[0].fitness.values) for i in range(nobj): crowd.sort(key=lambda element: element[0][i]) distances[crowd[0][1]] = float("inf") distances[crowd[-1][1]] = float("inf") if crowd[-1][0][i] == crowd[0][0][i]: continue norm = nobj * float(crowd[-1][0][i] - crowd[0][0][i]) for prev, cur, next in zip(crowd[:-2], crowd[1:-1], crowd[2:]): distances[cur[1]] += (next[0][i] - prev[0][i]) / norm for i, dist in enumerate(distances): individuals[i].fitness.crowding_dist = dist def selTournamentDCD(individuals, k): """Tournament selection based on dominance (D) between two individuals, if the two individuals do not interdominate the selection is made based on crowding distance (CD). The *individuals* sequence length has to be a multiple of 4 only if k is equal to the length of individuals. Starting from the beginning of the selected individuals, two consecutive individuals will be different (assuming all individuals in the input list are unique). Each individual from the input list won't be selected more than twice. This selection requires the individuals to have a :attr:`crowding_dist` attribute, which can be set by the :func:`assignCrowdingDist` function. :param individuals: A list of individuals to select from. :param k: The number of individuals to select. Must be less than or equal to len(individuals). :returns: A list of selected individuals. """ if k > len(individuals): raise ValueError("selTournamentDCD: k must be less than or equal to individuals length") if k == len(individuals) and k % 4 != 0: raise ValueError("selTournamentDCD: k must be divisible by four if k == len(individuals)") def tourn(ind1, ind2): if ind1.fitness.dominates(ind2.fitness): return ind1 elif ind2.fitness.dominates(ind1.fitness): return ind2 if ind1.fitness.crowding_dist < ind2.fitness.crowding_dist: return ind2 elif ind1.fitness.crowding_dist > ind2.fitness.crowding_dist: return ind1 if random.random() <= 0.5: return ind1 return ind2 individuals_1 = random.sample(individuals, len(individuals)) individuals_2 = random.sample(individuals, len(individuals)) chosen = [] for i in range(0, k, 4): chosen.append(tourn(individuals_1[i], individuals_1[i+1])) chosen.append(tourn(individuals_1[i+2], individuals_1[i+3])) chosen.append(tourn(individuals_2[i], individuals_2[i+1])) chosen.append(tourn(individuals_2[i+2], individuals_2[i+3])) return chosen ####################################### # Generalized Reduced runtime ND sort # ####################################### def identity(obj): """Returns directly the argument *obj*. """ return obj def isDominated(wvalues1, wvalues2): """Returns whether or not *wvalues2* dominates *wvalues1*. :param wvalues1: The weighted fitness values that would be dominated. :param wvalues2: The weighted fitness values of the dominant. :returns: :obj:`True` if wvalues2 dominates wvalues1, :obj:`False` otherwise. """ not_equal = False for self_wvalue, other_wvalue in zip(wvalues1, wvalues2): if self_wvalue > other_wvalue: return False elif self_wvalue < other_wvalue: not_equal = True return not_equal def median(seq, key=identity): """Returns the median of *seq* - the numeric value separating the higher half of a sample from the lower half. If there is an even number of elements in *seq*, it returns the mean of the two middle values. """ sseq = sorted(seq, key=key) length = len(seq) if length % 2 == 1: return key(sseq[(length - 1) // 2]) else: return (key(sseq[(length - 1) // 2]) + key(sseq[length // 2])) / 2.0 def sortLogNondominated(individuals, k, first_front_only=False): """Sort *individuals* in pareto non-dominated fronts using the Generalized Reduced Run-Time Complexity Non-Dominated Sorting Algorithm presented by Fortin et al. (2013). :param individuals: A list of individuals to select from. :returns: A list of Pareto fronts (lists), with the first list being the true Pareto front. """ if k == 0: return [] # Separate individuals according to unique fitnesses unique_fits = defaultdict(list) for i, ind in enumerate(individuals): unique_fits[ind.fitness.wvalues].append(ind) # Launch the sorting algorithm obj = len(individuals[0].fitness.wvalues)-1 fitnesses = list(unique_fits.keys()) front = dict.fromkeys(fitnesses, 0) # Sort the fitnesses lexicographically. fitnesses.sort(reverse=True) sortNDHelperA(fitnesses, obj, front) # Extract individuals from front list here nbfronts = max(front.values())+1 pareto_fronts = [[] for i in range(nbfronts)] for fit in fitnesses: index = front[fit] pareto_fronts[index].extend(unique_fits[fit]) # Keep only the fronts required to have k individuals. if not first_front_only: count = 0 for i, front in enumerate(pareto_fronts): count += len(front) if count >= k: return pareto_fronts[:i+1] return pareto_fronts else: return pareto_fronts[0] def sortNDHelperA(fitnesses, obj, front): """Create a non-dominated sorting of S on the first M objectives""" if len(fitnesses) < 2: return elif len(fitnesses) == 2: # Only two individuals, compare them and adjust front number s1, s2 = fitnesses[0], fitnesses[1] if isDominated(s2[:obj+1], s1[:obj+1]): front[s2] = max(front[s2], front[s1] + 1) elif obj == 1: sweepA(fitnesses, front) elif len(frozenset(map(itemgetter(obj), fitnesses))) == 1: # All individuals for objective M are equal: go to objective M-1 sortNDHelperA(fitnesses, obj-1, front) else: # More than two individuals, split list and then apply recursion best, worst = splitA(fitnesses, obj) sortNDHelperA(best, obj, front) sortNDHelperB(best, worst, obj-1, front) sortNDHelperA(worst, obj, front) def splitA(fitnesses, obj): """Partition the set of fitnesses in two according to the median of the objective index *obj*. The values equal to the median are put in the set containing the least elements. """ median_ = median(fitnesses, itemgetter(obj)) best_a, worst_a = [], [] best_b, worst_b = [], [] for fit in fitnesses: if fit[obj] > median_: best_a.append(fit) best_b.append(fit) elif fit[obj] < median_: worst_a.append(fit) worst_b.append(fit) else: best_a.append(fit) worst_b.append(fit) balance_a = abs(len(best_a) - len(worst_a)) balance_b = abs(len(best_b) - len(worst_b)) if balance_a <= balance_b: return best_a, worst_a else: return best_b, worst_b def sweepA(fitnesses, front): """Update rank number associated to the fitnesses according to the first two objectives using a geometric sweep procedure. """ stairs = [-fitnesses[0][1]] fstairs = [fitnesses[0]] for fit in fitnesses[1:]: idx = bisect.bisect_right(stairs, -fit[1]) if 0 < idx <= len(stairs): fstair = max(fstairs[:idx], key=front.__getitem__) front[fit] = max(front[fit], front[fstair]+1) for i, fstair in enumerate(fstairs[idx:], idx): if front[fstair] == front[fit]: del stairs[i] del fstairs[i] break stairs.insert(idx, -fit[1]) fstairs.insert(idx, fit) def sortNDHelperB(best, worst, obj, front): """Assign front numbers to the solutions in H according to the solutions in L. The solutions in L are assumed to have correct front numbers and the solutions in H are not compared with each other, as this is supposed to happen after sortNDHelperB is called.""" key = itemgetter(obj) if len(worst) == 0 or len(best) == 0: # One of the lists is empty: nothing to do return elif len(best) == 1 or len(worst) == 1: # One of the lists has one individual: compare directly for hi in worst: for li in best: if isDominated(hi[:obj+1], li[:obj+1]) or hi[:obj+1] == li[:obj+1]: front[hi] = max(front[hi], front[li] + 1) elif obj == 1: sweepB(best, worst, front) elif key(min(best, key=key)) >= key(max(worst, key=key)): # All individuals from L dominate H for objective M: # Also supports the case where every individuals in L and H # has the same value for the current objective # Skip to objective M-1 sortNDHelperB(best, worst, obj-1, front) elif key(max(best, key=key)) >= key(min(worst, key=key)): best1, best2, worst1, worst2 = splitB(best, worst, obj) sortNDHelperB(best1, worst1, obj, front) sortNDHelperB(best1, worst2, obj-1, front) sortNDHelperB(best2, worst2, obj, front) def splitB(best, worst, obj): """Split both best individual and worst sets of fitnesses according to the median of objective *obj* computed on the set containing the most elements. The values equal to the median are attributed so as to balance the four resulting sets as much as possible. """ median_ = median(best if len(best) > len(worst) else worst, itemgetter(obj)) best1_a, best2_a, best1_b, best2_b = [], [], [], [] for fit in best: if fit[obj] > median_: best1_a.append(fit) best1_b.append(fit) elif fit[obj] < median_: best2_a.append(fit) best2_b.append(fit) else: best1_a.append(fit) best2_b.append(fit) worst1_a, worst2_a, worst1_b, worst2_b = [], [], [], [] for fit in worst: if fit[obj] > median_: worst1_a.append(fit) worst1_b.append(fit) elif fit[obj] < median_: worst2_a.append(fit) worst2_b.append(fit) else: worst1_a.append(fit) worst2_b.append(fit) balance_a = abs(len(best1_a) - len(best2_a) + len(worst1_a) - len(worst2_a)) balance_b = abs(len(best1_b) - len(best2_b) + len(worst1_b) - len(worst2_b)) if balance_a <= balance_b: return best1_a, best2_a, worst1_a, worst2_a else: return best1_b, best2_b, worst1_b, worst2_b def sweepB(best, worst, front): """Adjust the rank number of the worst fitnesses according to the best fitnesses on the first two objectives using a sweep procedure. """ stairs, fstairs = [], [] iter_best = iter(best) next_best = next(iter_best, False) for h in worst: while next_best and h[:2] <= next_best[:2]: insert = True for i, fstair in enumerate(fstairs): if front[fstair] == front[next_best]: if fstair[1] > next_best[1]: insert = False else: del stairs[i], fstairs[i] break if insert: idx = bisect.bisect_right(stairs, -next_best[1]) stairs.insert(idx, -next_best[1]) fstairs.insert(idx, next_best) next_best = next(iter_best, False) idx = bisect.bisect_right(stairs, -h[1]) if 0 < idx <= len(stairs): fstair = max(fstairs[:idx], key=front.__getitem__) front[h] = max(front[h], front[fstair]+1) ###################################### # Non-Dominated Sorting (NSGA-III) # ###################################### NSGA3Memory = namedtuple("NSGA3Memory", ["best_point", "worst_point", "extreme_points"]) class selNSGA3WithMemory(object): """Class version of NSGA-III selection including memory for best, worst and extreme points. Registering this operator in a toolbox is a bit different than classical operators, it requires to instantiate the class instead of just registering the function:: >>> from deap import base >>> ref_points = uniform_reference_points(nobj=3, p=12) >>> toolbox = base.Toolbox() >>> toolbox.register("select", selNSGA3WithMemory(ref_points)) """ def __init__(self, ref_points, nd="log"): self.ref_points = ref_points self.nd = nd self.best_point = numpy.full((1, ref_points.shape[1]), numpy.inf) self.worst_point = numpy.full((1, ref_points.shape[1]), -numpy.inf) self.extreme_points = None def __call__(self, individuals, k): chosen, memory = selNSGA3(individuals, k, self.ref_points, self.nd, self.best_point, self.worst_point, self.extreme_points, True) self.best_point = memory.best_point.reshape((1, -1)) self.worst_point = memory.worst_point.reshape((1, -1)) self.extreme_points = memory.extreme_points return chosen def selNSGA3(individuals, k, ref_points, nd="log", best_point=None, worst_point=None, extreme_points=None, return_memory=False): """Implementation of NSGA-III selection as presented in [Deb2014]_. This implementation is partly based on `lmarti/nsgaiii `_. It departs slightly from the original implementation in that it does not use memory to keep track of ideal and extreme points. This choice has been made to fit the functional api of DEAP. For a version of NSGA-III see :class:`~deap.tools.selNSGA3WithMemory`. :param individuals: A list of individuals to select from. :param k: The number of individuals to select. :param ref_points: Reference points to use for niching. :param nd: Specify the non-dominated algorithm to use: 'standard' or 'log'. :param best_point: Best point found at previous generation. If not provided find the best point only from current individuals. :param worst_point: Worst point found at previous generation. If not provided find the worst point only from current individuals. :param extreme_points: Extreme points found at previous generation. If not provided find the extreme points only from current individuals. :param return_memory: If :data:`True`, return the best, worst and extreme points in addition to the chosen individuals. :returns: A list of selected individuals. :returns: If `return_memory` is :data:`True`, a namedtuple with the `best_point`, `worst_point`, and `extreme_points`. You can generate the reference points using the :func:`uniform_reference_points` function:: >>> ref_points = tools.uniform_reference_points(nobj=3, p=12) # doctest: +SKIP >>> selected = selNSGA3(population, k, ref_points) # doctest: +SKIP .. [Deb2014] Deb, K., & Jain, H. (2014). An Evolutionary Many-Objective Optimization Algorithm Using Reference-Point-Based Nondominated Sorting Approach, Part I: Solving Problems With Box Constraints. IEEE Transactions on Evolutionary Computation, 18(4), 577-601. doi:10.1109/TEVC.2013.2281535. """ if nd == "standard": pareto_fronts = sortNondominated(individuals, k) elif nd == "log": pareto_fronts = sortLogNondominated(individuals, k) else: raise Exception("selNSGA3: The choice of non-dominated sorting " "method '{0}' is invalid.".format(nd)) # Extract fitnesses as a numpy array in the nd-sort order # Use wvalues * -1 to tackle always as a minimization problem fitnesses = numpy.array([ind.fitness.wvalues for f in pareto_fronts for ind in f]) fitnesses *= -1 # Get best and worst point of population, contrary to pymoo # we don't use memory if best_point is not None and worst_point is not None: best_point = numpy.min(numpy.concatenate((fitnesses, best_point), axis=0), axis=0) worst_point = numpy.max(numpy.concatenate((fitnesses, worst_point), axis=0), axis=0) else: best_point = numpy.min(fitnesses, axis=0) worst_point = numpy.max(fitnesses, axis=0) extreme_points = find_extreme_points(fitnesses, best_point, extreme_points) front_worst = numpy.max(fitnesses[:sum(len(f) for f in pareto_fronts), :], axis=0) intercepts = find_intercepts(extreme_points, best_point, worst_point, front_worst) niches, dist = associate_to_niche(fitnesses, ref_points, best_point, intercepts) # Get counts per niche for individuals in all front but the last niche_counts = numpy.zeros(len(ref_points), dtype=numpy.int64) index, counts = numpy.unique(niches[:-len(pareto_fronts[-1])], return_counts=True) niche_counts[index] = counts # Choose individuals from all fronts but the last chosen = list(chain(*pareto_fronts[:-1])) # Use niching to select the remaining individuals sel_count = len(chosen) n = k - sel_count selected = niching(pareto_fronts[-1], n, niches[sel_count:], dist[sel_count:], niche_counts) chosen.extend(selected) if return_memory: return chosen, NSGA3Memory(best_point, worst_point, extreme_points) return chosen def find_extreme_points(fitnesses, best_point, extreme_points=None): 'Finds the individuals with extreme values for each objective function.' # Keep track of last generation extreme points if extreme_points is not None: fitnesses = numpy.concatenate((fitnesses, extreme_points), axis=0) # Translate objectives ft = fitnesses - best_point # Find achievement scalarizing function (asf) asf = numpy.eye(best_point.shape[0]) asf[asf == 0] = 1e6 asf = numpy.max(ft * asf[:, numpy.newaxis, :], axis=2) # Extreme point are the fitnesses with minimal asf min_asf_idx = numpy.argmin(asf, axis=1) return fitnesses[min_asf_idx, :] def find_intercepts(extreme_points, best_point, current_worst, front_worst): """Find intercepts between the hyperplane and each axis with the ideal point as origin.""" # Construct hyperplane sum(f_i^n) = 1 b = numpy.ones(extreme_points.shape[1]) A = extreme_points - best_point try: x = numpy.linalg.solve(A, b) except numpy.linalg.LinAlgError: intercepts = current_worst else: if numpy.count_nonzero(x) != len(x): intercepts = front_worst else: intercepts = 1 / x if (not numpy.allclose(numpy.dot(A, x), b) or numpy.any(intercepts <= 1e-6) or numpy.any((intercepts + best_point) > current_worst)): intercepts = front_worst return intercepts def associate_to_niche(fitnesses, reference_points, best_point, intercepts): """Associates individuals to reference points and calculates niche number. Corresponds to Algorithm 3 of Deb & Jain (2014).""" # Normalize by ideal point and intercepts fn = (fitnesses - best_point) / (intercepts - best_point + numpy.finfo(float).eps) # Create distance matrix fn = numpy.repeat(numpy.expand_dims(fn, axis=1), len(reference_points), axis=1) norm = numpy.linalg.norm(reference_points, axis=1) distances = numpy.sum(fn * reference_points, axis=2) / norm.reshape(1, -1) distances = distances[:, :, numpy.newaxis] * reference_points[numpy.newaxis, :, :] / norm[numpy.newaxis, :, numpy.newaxis] distances = numpy.linalg.norm(distances - fn, axis=2) # Retrieve min distance niche index niches = numpy.argmin(distances, axis=1) distances = distances[list(range(niches.shape[0])), niches] return niches, distances def niching(individuals, k, niches, distances, niche_counts): selected = [] available = numpy.ones(len(individuals), dtype=bool) while len(selected) < k: # Maximum number of individuals (niches) to select in that round n = k - len(selected) # Find the available niches and the minimum niche count in them available_niches = numpy.zeros(len(niche_counts), dtype=bool) available_niches[numpy.unique(niches[available])] = True min_count = numpy.min(niche_counts[available_niches]) # Select at most n niches with the minimum count selected_niches = numpy.flatnonzero(numpy.logical_and(available_niches, niche_counts == min_count)) numpy.random.shuffle(selected_niches) selected_niches = selected_niches[:n] for niche in selected_niches: # Select from available individuals in niche niche_individuals = numpy.flatnonzero(numpy.logical_and(niches == niche, available)) numpy.random.shuffle(niche_individuals) # If no individual in that niche, select the closest to reference # Else select randomly if niche_counts[niche] == 0: sel_index = niche_individuals[numpy.argmin(distances[niche_individuals])] else: sel_index = niche_individuals[0] # Update availability, counts and selection available[sel_index] = False niche_counts[niche] += 1 selected.append(individuals[sel_index]) return selected def uniform_reference_points(nobj, p=4, scaling=None): """Generate reference points uniformly on the hyperplane intersecting each axis at 1. The scaling factor is used to combine multiple layers of reference points. """ def gen_refs_recursive(ref, nobj, left, total, depth): points = [] if depth == nobj - 1: ref[depth] = left / total points.append(ref) else: for i in range(left + 1): ref[depth] = i / total points.extend(gen_refs_recursive(ref.copy(), nobj, left - i, total, depth + 1)) return points ref_points = numpy.array(gen_refs_recursive(numpy.zeros(nobj), nobj, p, p, 0)) if scaling is not None: ref_points *= scaling ref_points += (1 - scaling) / nobj return ref_points ###################################### # Strength Pareto (SPEA-II) # ###################################### def selSPEA2(individuals, k): """Apply SPEA-II selection operator on the *individuals*. Usually, the size of *individuals* will be larger than *n* because any individual present in *individuals* will appear in the returned list at most once. Having the size of *individuals* equals to *n* will have no effect other than sorting the population according to a strength Pareto scheme. The list returned contains references to the input *individuals*. For more details on the SPEA-II operator see [Zitzler2001]_. :param individuals: A list of individuals to select from. :param k: The number of individuals to select. :returns: A list of selected individuals. .. [Zitzler2001] Zitzler, Laumanns and Thiele, "SPEA 2: Improving the strength Pareto evolutionary algorithm", 2001. """ N = len(individuals) L = len(individuals[0].fitness.values) K = math.sqrt(N) strength_fits = [0] * N fits = [0] * N dominating_inds = [list() for i in range(N)] for i, ind_i in enumerate(individuals): for j, ind_j in enumerate(individuals[i+1:], i+1): if ind_i.fitness.dominates(ind_j.fitness): strength_fits[i] += 1 dominating_inds[j].append(i) elif ind_j.fitness.dominates(ind_i.fitness): strength_fits[j] += 1 dominating_inds[i].append(j) for i in range(N): for j in dominating_inds[i]: fits[i] += strength_fits[j] # Choose all non-dominated individuals chosen_indices = [i for i in range(N) if fits[i] < 1] if len(chosen_indices) < k: # The archive is too small for i in range(N): distances = [0.0] * N for j in range(i + 1, N): dist = 0.0 for l in range(L): val = individuals[i].fitness.values[l] - \ individuals[j].fitness.values[l] dist += val * val distances[j] = dist kth_dist = _randomizedSelect(distances, 0, N - 1, K) density = 1.0 / (kth_dist + 2.0) fits[i] += density next_indices = [(fits[i], i) for i in range(N) if not i in chosen_indices] next_indices.sort() # print next_indices chosen_indices += [i for _, i in next_indices[:k - len(chosen_indices)]] elif len(chosen_indices) > k: # The archive is too large N = len(chosen_indices) distances = [[0.0] * N for i in range(N)] sorted_indices = [[0] * N for i in range(N)] for i in range(N): for j in range(i + 1, N): dist = 0.0 for l in range(L): val = individuals[chosen_indices[i]].fitness.values[l] - \ individuals[chosen_indices[j]].fitness.values[l] dist += val * val distances[i][j] = dist distances[j][i] = dist distances[i][i] = -1 # Insert sort is faster than quick sort for short arrays for i in range(N): for j in range(1, N): l = j while l > 0 and distances[i][j] < distances[i][sorted_indices[i][l - 1]]: sorted_indices[i][l] = sorted_indices[i][l - 1] l -= 1 sorted_indices[i][l] = j size = N to_remove = [] while size > k: # Search for minimal distance min_pos = 0 for i in range(1, N): for j in range(1, size): dist_i_sorted_j = distances[i][sorted_indices[i][j]] dist_min_sorted_j = distances[min_pos][sorted_indices[min_pos][j]] if dist_i_sorted_j < dist_min_sorted_j: min_pos = i break elif dist_i_sorted_j > dist_min_sorted_j: break # Remove minimal distance from sorted_indices for i in range(N): distances[i][min_pos] = float("inf") distances[min_pos][i] = float("inf") for j in range(1, size - 1): if sorted_indices[i][j] == min_pos: sorted_indices[i][j] = sorted_indices[i][j + 1] sorted_indices[i][j + 1] = min_pos # Remove corresponding individual from chosen_indices to_remove.append(min_pos) size -= 1 for index in reversed(sorted(to_remove)): del chosen_indices[index] return [individuals[i] for i in chosen_indices] def _randomizedSelect(array, begin, end, i): """Allows to select the ith smallest element from array without sorting it. Runtime is expected to be O(n). """ if begin == end: return array[begin] q = _randomizedPartition(array, begin, end) k = q - begin + 1 if i < k: return _randomizedSelect(array, begin, q, i) else: return _randomizedSelect(array, q + 1, end, i - k) def _randomizedPartition(array, begin, end): i = random.randint(begin, end) array[begin], array[i] = array[i], array[begin] return _partition(array, begin, end) def _partition(array, begin, end): x = array[begin] i = begin - 1 j = end + 1 while True: j -= 1 while array[j] > x: j -= 1 i += 1 while array[i] < x: i += 1 if i < j: array[i], array[j] = array[j], array[i] else: return j __all__ = ['selNSGA2', 'selNSGA3', 'selNSGA3WithMemory', 'selSPEA2', 'sortNondominated', 'sortLogNondominated', 'selTournamentDCD', 'uniform_reference_points'] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/tools/indicator.py0000644000076500000240000000225214456461441016273 0ustar00runnerstaffimport numpy try: # try importing the C version from ._hypervolume import hv as hv except ImportError: # fallback on python version from ._hypervolume import pyhv as hv def hypervolume(front, **kargs): """Returns the index of the individual with the least the hypervolume contribution. The provided *front* should be a set of non-dominated individuals having each a :attr:`fitness` attribute. """ # Must use wvalues * -1 since hypervolume use implicit minimization # And minimization in deap use max on -obj wobj = numpy.array([ind.fitness.wvalues for ind in front]) * -1 ref = kargs.get("ref", None) if ref is None: ref = numpy.max(wobj, axis=0) + 1 def contribution(i): # The contribution of point p_i in point set P # is the hypervolume of P without p_i return hv.hypervolume(numpy.concatenate((wobj[:i], wobj[i+1:])), ref) # Parallelization note: Cannot pickle local function contrib_values = [contribution(i) for i in range(len(front))] # Select the maximum hypervolume value (correspond to the minimum difference) return numpy.argmax(contrib_values) __all__ = ["hypervolume"] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/tools/init.py0000644000076500000240000000632314456461441015265 0ustar00runnerstaffdef initRepeat(container, func, n): """Call the function *func* *n* times and return the results in a container type `container` :param container: The type to put in the data from func. :param func: The function that will be called n times to fill the container. :param n: The number of times to repeat func. :returns: An instance of the container filled with data from func. This helper function can be used in conjunction with a Toolbox to register a generator of filled containers, as individuals or population. >>> import random >>> random.seed(42) >>> initRepeat(list, random.random, 2) # doctest: +ELLIPSIS, ... # doctest: +NORMALIZE_WHITESPACE [0.6394..., 0.0250...] See the :ref:`list-of-floats` and :ref:`population` tutorials for more examples. """ return container(func() for _ in range(n)) def initIterate(container, generator): """Call the function *container* with an iterable as its only argument. The iterable must be returned by the method or the object *generator*. :param container: The type to put in the data from func. :param generator: A function returning an iterable (list, tuple, ...), the content of this iterable will fill the container. :returns: An instance of the container filled with data from the generator. This helper function can be used in conjunction with a Toolbox to register a generator of filled containers, as individuals or population. >>> import random >>> from functools import partial >>> random.seed(42) >>> gen_idx = partial(random.sample, range(10), 10) >>> initIterate(list, gen_idx) # doctest: +SKIP [1, 0, 4, 9, 6, 5, 8, 2, 3, 7] See the :ref:`permutation` and :ref:`arithmetic-expr` tutorials for more examples. """ return container(generator()) def initCycle(container, seq_func, n=1): """Call the function *container* with a generator function corresponding to the calling *n* times the functions present in *seq_func*. :param container: The type to put in the data from func. :param seq_func: A list of function objects to be called in order to fill the container. :param n: Number of times to iterate through the list of functions. :returns: An instance of the container filled with data from the returned by the functions. This helper function can be used in conjunction with a Toolbox to register a generator of filled containers, as individuals or population. >>> func_seq = [lambda:1 , lambda:'a', lambda:3] >>> initCycle(list, func_seq, n=2) [1, 'a', 3, 1, 'a', 3] See the :ref:`funky` tutorial for an example. """ return container(func() for _ in range(n) for func in seq_func) __all__ = ['initRepeat', 'initIterate', 'initCycle'] if __name__ == "__main__": import doctest import random random.seed(64) doctest.run_docstring_examples(initRepeat, globals()) random.seed(64) doctest.run_docstring_examples(initIterate, globals()) doctest.run_docstring_examples(initCycle, globals()) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/tools/migration.py0000644000076500000240000000524614456461441016316 0ustar00runnerstaffdef migRing(populations, k, selection, replacement=None, migarray=None): """Perform a ring migration between the *populations*. The migration first select *k* emigrants from each population using the specified *selection* operator and then replace *k* individuals from the associated population in the *migarray* by the emigrants. If no *replacement* operator is specified, the immigrants will replace the emigrants of the population, otherwise, the immigrants will replace the individuals selected by the *replacement* operator. The migration array, if provided, shall contain each population's index once and only once. If no migration array is provided, it defaults to a serial ring migration (1 -- 2 -- ... -- n -- 1). Selection and replacement function are called using the signature ``selection(populations[i], k)`` and ``replacement(populations[i], k)``. It is important to note that the replacement strategy must select *k* **different** individuals. For example, using a traditional tournament for replacement strategy will thus give undesirable effects, two individuals will most likely try to enter the same slot. :param populations: A list of (sub-)populations on which to operate migration. :param k: The number of individuals to migrate. :param selection: The function to use for selection. :param replacement: The function to use to select which individuals will be replaced. If :obj:`None` (default) the individuals that leave the population are directly replaced. :param migarray: A list of indices indicating where the individuals from a particular position in the list goes. This defaults to a ring migration. """ nbr_demes = len(populations) if migarray is None: migarray = list(range(1, nbr_demes)) + [0] immigrants = [[] for i in range(nbr_demes)] emigrants = [[] for i in range(nbr_demes)] for from_deme in range(nbr_demes): emigrants[from_deme].extend(selection(populations[from_deme], k)) if replacement is None: # If no replacement strategy is selected, replace those who migrate immigrants[from_deme] = emigrants[from_deme] else: # Else select those who will be replaced immigrants[from_deme].extend(replacement(populations[from_deme], k)) for from_deme, to_deme in enumerate(migarray): for i, immigrant in enumerate(immigrants[to_deme]): indx = populations[to_deme].index(immigrant) populations[to_deme][indx] = emigrants[from_deme][i] __all__ = ['migRing'] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/tools/mutation.py0000644000076500000240000002304114456461441016156 0ustar00runnerstaffimport math import random from itertools import repeat try: from collections.abc import Sequence except ImportError: from collections import Sequence ###################################### # GA Mutations # ###################################### def mutGaussian(individual, mu, sigma, indpb): """This function applies a gaussian mutation of mean *mu* and standard deviation *sigma* on the input individual. This mutation expects a :term:`sequence` individual composed of real valued attributes. The *indpb* argument is the probability of each attribute to be mutated. :param individual: Individual to be mutated. :param mu: Mean or :term:`python:sequence` of means for the gaussian addition mutation. :param sigma: Standard deviation or :term:`python:sequence` of standard deviations for the gaussian addition mutation. :param indpb: Independent probability for each attribute to be mutated. :returns: A tuple of one individual. This function uses the :func:`~random.random` and :func:`~random.gauss` functions from the python base :mod:`random` module. """ size = len(individual) if not isinstance(mu, Sequence): mu = repeat(mu, size) elif len(mu) < size: raise IndexError("mu must be at least the size of individual: %d < %d" % (len(mu), size)) if not isinstance(sigma, Sequence): sigma = repeat(sigma, size) elif len(sigma) < size: raise IndexError("sigma must be at least the size of individual: %d < %d" % (len(sigma), size)) for i, m, s in zip(range(size), mu, sigma): if random.random() < indpb: individual[i] += random.gauss(m, s) return individual, def mutPolynomialBounded(individual, eta, low, up, indpb): """Polynomial mutation as implemented in original NSGA-II algorithm in C by Deb. :param individual: :term:`Sequence ` individual to be mutated. :param eta: Crowding degree of the mutation. A high eta will produce a mutant resembling its parent, while a small eta will produce a solution much more different. :param low: A value or a :term:`python:sequence` of values that is the lower bound of the search space. :param up: A value or a :term:`python:sequence` of values that is the upper bound of the search space. :returns: A tuple of one individual. """ size = len(individual) if not isinstance(low, Sequence): low = repeat(low, size) elif len(low) < size: raise IndexError("low must be at least the size of individual: %d < %d" % (len(low), size)) if not isinstance(up, Sequence): up = repeat(up, size) elif len(up) < size: raise IndexError("up must be at least the size of individual: %d < %d" % (len(up), size)) for i, xl, xu in zip(range(size), low, up): if random.random() <= indpb: x = individual[i] delta_1 = (x - xl) / (xu - xl) delta_2 = (xu - x) / (xu - xl) rand = random.random() mut_pow = 1.0 / (eta + 1.) if rand < 0.5: xy = 1.0 - delta_1 val = 2.0 * rand + (1.0 - 2.0 * rand) * xy ** (eta + 1) delta_q = val ** mut_pow - 1.0 else: xy = 1.0 - delta_2 val = 2.0 * (1.0 - rand) + 2.0 * (rand - 0.5) * xy ** (eta + 1) delta_q = 1.0 - val ** mut_pow x = x + delta_q * (xu - xl) x = min(max(x, xl), xu) individual[i] = x return individual, def mutShuffleIndexes(individual, indpb): """Shuffle the attributes of the input individual and return the mutant. The *individual* is expected to be a :term:`sequence`. The *indpb* argument is the probability of each attribute to be moved. Usually this mutation is applied on vector of indices. :param individual: Individual to be mutated. :param indpb: Independent probability for each attribute to be exchanged to another position. :returns: A tuple of one individual. This function uses the :func:`~random.random` and :func:`~random.randint` functions from the python base :mod:`random` module. """ size = len(individual) for i in range(size): if random.random() < indpb: swap_indx = random.randint(0, size - 2) if swap_indx >= i: swap_indx += 1 individual[i], individual[swap_indx] = \ individual[swap_indx], individual[i] return individual, def mutFlipBit(individual, indpb): """Flip the value of the attributes of the input individual and return the mutant. The *individual* is expected to be a :term:`sequence` and the values of the attributes shall stay valid after the ``not`` operator is called on them. The *indpb* argument is the probability of each attribute to be flipped. This mutation is usually applied on boolean individuals. :param individual: Individual to be mutated. :param indpb: Independent probability for each attribute to be flipped. :returns: A tuple of one individual. This function uses the :func:`~random.random` function from the python base :mod:`random` module. """ for i in range(len(individual)): if random.random() < indpb: individual[i] = type(individual[i])(not individual[i]) return individual, def mutUniformInt(individual, low, up, indpb): """Mutate an individual by replacing attributes, with probability *indpb*, by a integer uniformly drawn between *low* and *up* inclusively. :param individual: :term:`Sequence ` individual to be mutated. :param low: The lower bound or a :term:`python:sequence` of of lower bounds of the range from which to draw the new integer. :param up: The upper bound or a :term:`python:sequence` of of upper bounds of the range from which to draw the new integer. :param indpb: Independent probability for each attribute to be mutated. :returns: A tuple of one individual. """ size = len(individual) if not isinstance(low, Sequence): low = repeat(low, size) elif len(low) < size: raise IndexError("low must be at least the size of individual: %d < %d" % (len(low), size)) if not isinstance(up, Sequence): up = repeat(up, size) elif len(up) < size: raise IndexError("up must be at least the size of individual: %d < %d" % (len(up), size)) for i, xl, xu in zip(range(size), low, up): if random.random() < indpb: individual[i] = random.randint(xl, xu) return individual, def mutInversion(individual): """Select two points (indices) in the individual, reverse the order of the attributes between these points [low, high] and return the mutated individual. This implementation allows for the length of the inversion to be 0 and 1, which would cause no change. This mutation is useful in situations where the order/adjacency of elements is important. :param individual: Individual to be mutated. :returns: A tuple of one individual. This function uses the :func:`~random.random` function from the python base :mod:`random` module. """ size = len(individual) if size == 0: return individual, index_one = random.randrange(size) index_two = random.randrange(size) start_index = min(index_one, index_two) end_index = max(index_one, index_two) # Reverse the contents of the individual between the indices individual[start_index:end_index] = individual[start_index:end_index][::-1] return individual, ###################################### # ES Mutations # ###################################### def mutESLogNormal(individual, c, indpb): r"""Mutate an evolution strategy according to its :attr:`strategy` attribute as described in [Beyer2002]_. First the strategy is mutated according to an extended log normal rule, :math:`\\boldsymbol{\sigma}_t = \\exp(\\tau_0 \mathcal{N}_0(0, 1)) \\left[ \\sigma_{t-1, 1}\\exp(\\tau \mathcal{N}_1(0, 1)), \ldots, \\sigma_{t-1, n} \\exp(\\tau \mathcal{N}_n(0, 1))\\right]`, with :math:`\\tau_0 = \\frac{c}{\\sqrt{2n}}` and :math:`\\tau = \\frac{c}{\\sqrt{2\\sqrt{n}}}`, the the individual is mutated by a normal distribution of mean 0 and standard deviation of :math:`\\boldsymbol{\sigma}_{t}` (its current strategy) then . A recommended choice is ``c=1`` when using a :math:`(10, 100)` evolution strategy [Beyer2002]_ [Schwefel1995]_. :param individual: :term:`Sequence ` individual to be mutated. :param c: The learning parameter. :param indpb: Independent probability for each attribute to be mutated. :returns: A tuple of one individual. .. [Beyer2002] Beyer and Schwefel, 2002, Evolution strategies - A Comprehensive Introduction .. [Schwefel1995] Schwefel, 1995, Evolution and Optimum Seeking. Wiley, New York, NY """ size = len(individual) t = c / math.sqrt(2. * math.sqrt(size)) t0 = c / math.sqrt(2. * size) n = random.gauss(0, 1) t0_n = t0 * n for indx in range(size): if random.random() < indpb: individual.strategy[indx] *= math.exp(t0_n + t * random.gauss(0, 1)) individual[indx] += individual.strategy[indx] * random.gauss(0, 1) return individual, __all__ = ['mutGaussian', 'mutPolynomialBounded', 'mutShuffleIndexes', 'mutFlipBit', 'mutUniformInt', 'mutInversion', 'mutESLogNormal'] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/tools/selection.py0000644000076500000240000003201514456461441016304 0ustar00runnerstaffimport random import numpy as np from functools import partial from operator import attrgetter ###################################### # Selections # ###################################### def selRandom(individuals, k): """Select *k* individuals at random from the input *individuals* with replacement. The list returned contains references to the input *individuals*. :param individuals: A list of individuals to select from. :param k: The number of individuals to select. :returns: A list of selected individuals. This function uses the :func:`~random.choice` function from the python base :mod:`random` module. """ return [random.choice(individuals) for i in range(k)] def selBest(individuals, k, fit_attr="fitness"): """Select the *k* best individuals among the input *individuals*. The list returned contains references to the input *individuals*. :param individuals: A list of individuals to select from. :param k: The number of individuals to select. :param fit_attr: The attribute of individuals to use as selection criterion :returns: A list containing the k best individuals. """ return sorted(individuals, key=attrgetter(fit_attr), reverse=True)[:k] def selWorst(individuals, k, fit_attr="fitness"): """Select the *k* worst individuals among the input *individuals*. The list returned contains references to the input *individuals*. :param individuals: A list of individuals to select from. :param k: The number of individuals to select. :param fit_attr: The attribute of individuals to use as selection criterion :returns: A list containing the k worst individuals. """ return sorted(individuals, key=attrgetter(fit_attr))[:k] def selTournament(individuals, k, tournsize, fit_attr="fitness"): """Select the best individual among *tournsize* randomly chosen individuals, *k* times. The list returned contains references to the input *individuals*. :param individuals: A list of individuals to select from. :param k: The number of individuals to select. :param tournsize: The number of individuals participating in each tournament. :param fit_attr: The attribute of individuals to use as selection criterion :returns: A list of selected individuals. This function uses the :func:`~random.choice` function from the python base :mod:`random` module. """ chosen = [] for i in range(k): aspirants = selRandom(individuals, tournsize) chosen.append(max(aspirants, key=attrgetter(fit_attr))) return chosen def selRoulette(individuals, k, fit_attr="fitness"): """Select *k* individuals from the input *individuals* using *k* spins of a roulette. The selection is made by looking only at the first objective of each individual. The list returned contains references to the input *individuals*. :param individuals: A list of individuals to select from. :param k: The number of individuals to select. :param fit_attr: The attribute of individuals to use as selection criterion :returns: A list of selected individuals. This function uses the :func:`~random.random` function from the python base :mod:`random` module. .. warning:: The roulette selection by definition cannot be used for minimization or when the fitness can be smaller or equal to 0. """ s_inds = sorted(individuals, key=attrgetter(fit_attr), reverse=True) sum_fits = sum(getattr(ind, fit_attr).values[0] for ind in individuals) chosen = [] for i in range(k): u = random.random() * sum_fits sum_ = 0 for ind in s_inds: sum_ += getattr(ind, fit_attr).values[0] if sum_ > u: chosen.append(ind) break return chosen def selDoubleTournament(individuals, k, fitness_size, parsimony_size, fitness_first, fit_attr="fitness"): """Tournament selection which use the size of the individuals in order to discriminate good solutions. This kind of tournament is obviously useless with fixed-length representation, but has been shown to significantly reduce excessive growth of individuals, especially in GP, where it can be used as a bloat control technique (see [Luke2002fighting]_). This selection operator implements the double tournament technique presented in this paper. The core principle is to use a normal tournament selection, but using a special sample function to select aspirants, which is another tournament based on the size of the individuals. To ensure that the selection pressure is not too high, the size of the size tournament (the number of candidates evaluated) can be a real number between 1 and 2. In this case, the smaller individual among two will be selected with a probability *size_tourn_size*/2. For instance, if *size_tourn_size* is set to 1.4, then the smaller individual will have a 0.7 probability to be selected. .. note:: In GP, it has been shown that this operator produces better results when it is combined with some kind of a depth limit. :param individuals: A list of individuals to select from. :param k: The number of individuals to select. :param fitness_size: The number of individuals participating in each \ fitness tournament :param parsimony_size: The number of individuals participating in each \ size tournament. This value has to be a real number\ in the range [1,2], see above for details. :param fitness_first: Set this to True if the first tournament done should \ be the fitness one (i.e. the fitness tournament producing aspirants for \ the size tournament). Setting it to False will behaves as the opposite \ (size tournament feeding fitness tournaments with candidates). It has been \ shown that this parameter does not have a significant effect in most cases\ (see [Luke2002fighting]_). :param fit_attr: The attribute of individuals to use as selection criterion :returns: A list of selected individuals. .. [Luke2002fighting] Luke and Panait, 2002, Fighting bloat with nonparametric parsimony pressure """ assert (1 <= parsimony_size <= 2), "Parsimony tournament size has to be in the range [1, 2]." def _sizeTournament(individuals, k, select): chosen = [] for i in range(k): # Select two individuals from the population # The first individual has to be the shortest prob = parsimony_size / 2. ind1, ind2 = select(individuals, k=2) if len(ind1) > len(ind2): ind1, ind2 = ind2, ind1 elif len(ind1) == len(ind2): # random selection in case of a tie prob = 0.5 # Since size1 <= size2 then ind1 is selected # with a probability prob chosen.append(ind1 if random.random() < prob else ind2) return chosen def _fitTournament(individuals, k, select): chosen = [] for i in range(k): aspirants = select(individuals, k=fitness_size) chosen.append(max(aspirants, key=attrgetter(fit_attr))) return chosen if fitness_first: tfit = partial(_fitTournament, select=selRandom) return _sizeTournament(individuals, k, tfit) else: tsize = partial(_sizeTournament, select=selRandom) return _fitTournament(individuals, k, tsize) def selStochasticUniversalSampling(individuals, k, fit_attr="fitness"): """Select the *k* individuals among the input *individuals*. The selection is made by using a single random value to sample all of the individuals by choosing them at evenly spaced intervals. The list returned contains references to the input *individuals*. :param individuals: A list of individuals to select from. :param k: The number of individuals to select. :param fit_attr: The attribute of individuals to use as selection criterion :return: A list of selected individuals. This function uses the :func:`~random.uniform` function from the python base :mod:`random` module. """ s_inds = sorted(individuals, key=attrgetter(fit_attr), reverse=True) sum_fits = sum(getattr(ind, fit_attr).values[0] for ind in individuals) distance = sum_fits / float(k) start = random.uniform(0, distance) points = [start + i * distance for i in range(k)] chosen = [] for p in points: i = 0 sum_ = getattr(s_inds[i], fit_attr).values[0] while sum_ < p: i += 1 sum_ += getattr(s_inds[i], fit_attr).values[0] chosen.append(s_inds[i]) return chosen def selLexicase(individuals, k): """Returns an individual that does the best on the fitness cases when considered one at a time in random order. http://faculty.hampshire.edu/lspector/pubs/lexicase-IEEE-TEC.pdf :param individuals: A list of individuals to select from. :param k: The number of individuals to select. :returns: A list of selected individuals. """ selected_individuals = [] for i in range(k): fit_weights = individuals[0].fitness.weights candidates = individuals cases = list(range(len(individuals[0].fitness.values))) random.shuffle(cases) while len(cases) > 0 and len(candidates) > 1: f = max if fit_weights[cases[0]] > 0 else min best_val_for_case = f(x.fitness.values[cases[0]] for x in candidates) candidates = [x for x in candidates if x.fitness.values[cases[0]] == best_val_for_case] cases.pop(0) selected_individuals.append(random.choice(candidates)) return selected_individuals def selEpsilonLexicase(individuals, k, epsilon): """ Returns an individual that does the best on the fitness cases when considered one at a time in random order. Requires a epsilon parameter. https://push-language.hampshire.edu/uploads/default/original/1X/35c30e47ef6323a0a949402914453f277fb1b5b0.pdf Implemented epsilon_y implementation. :param individuals: A list of individuals to select from. :param k: The number of individuals to select. :returns: A list of selected individuals. """ selected_individuals = [] for i in range(k): fit_weights = individuals[0].fitness.weights candidates = individuals cases = list(range(len(individuals[0].fitness.values))) random.shuffle(cases) while len(cases) > 0 and len(candidates) > 1: if fit_weights[cases[0]] > 0: best_val_for_case = max(x.fitness.values[cases[0]] for x in candidates) min_val_to_survive_case = best_val_for_case - epsilon candidates = [x for x in candidates if x.fitness.values[cases[0]] >= min_val_to_survive_case] else: best_val_for_case = min(x.fitness.values[cases[0]] for x in candidates) max_val_to_survive_case = best_val_for_case + epsilon candidates = [x for x in candidates if x.fitness.values[cases[0]] <= max_val_to_survive_case] cases.pop(0) selected_individuals.append(random.choice(candidates)) return selected_individuals def selAutomaticEpsilonLexicase(individuals, k): """ Returns an individual that does the best on the fitness cases when considered one at a time in random order. https://push-language.hampshire.edu/uploads/default/original/1X/35c30e47ef6323a0a949402914453f277fb1b5b0.pdf Implemented lambda_epsilon_y implementation. :param individuals: A list of individuals to select from. :param k: The number of individuals to select. :returns: A list of selected individuals. """ selected_individuals = [] for i in range(k): fit_weights = individuals[0].fitness.weights candidates = individuals cases = list(range(len(individuals[0].fitness.values))) random.shuffle(cases) while len(cases) > 0 and len(candidates) > 1: errors_for_this_case = [x.fitness.values[cases[0]] for x in candidates] median_val = np.median(errors_for_this_case) median_absolute_deviation = np.median([abs(x - median_val) for x in errors_for_this_case]) if fit_weights[cases[0]] > 0: best_val_for_case = max(errors_for_this_case) min_val_to_survive = best_val_for_case - median_absolute_deviation candidates = [x for x in candidates if x.fitness.values[cases[0]] >= min_val_to_survive] else: best_val_for_case = min(errors_for_this_case) max_val_to_survive = best_val_for_case + median_absolute_deviation candidates = [x for x in candidates if x.fitness.values[cases[0]] <= max_val_to_survive] cases.pop(0) selected_individuals.append(random.choice(candidates)) return selected_individuals __all__ = ['selRandom', 'selBest', 'selWorst', 'selRoulette', 'selTournament', 'selDoubleTournament', 'selStochasticUniversalSampling', 'selLexicase', 'selEpsilonLexicase', 'selAutomaticEpsilonLexicase'] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/deap/tools/support.py0000644000076500000240000006360214456461441016041 0ustar00runnerstafffrom bisect import bisect_right from collections import defaultdict from copy import deepcopy from functools import partial from itertools import chain from operator import eq def identity(obj): """Returns directly the argument *obj*. """ return obj class History(object): """The :class:`History` class helps to build a genealogy of all the individuals produced in the evolution. It contains two attributes, the :attr:`genealogy_tree` that is a dictionary of lists indexed by individual, the list contain the indices of the parents. The second attribute :attr:`genealogy_history` contains every individual indexed by their individual number as in the genealogy tree. The produced genealogy tree is compatible with `NetworkX `_, here is how to plot the genealogy tree :: history = History() # Decorate the variation operators toolbox.decorate("mate", history.decorator) toolbox.decorate("mutate", history.decorator) # Create the population and populate the history population = toolbox.population(n=POPSIZE) history.update(population) # Do the evolution, the decorators will take care of updating the # history # [...] import matplotlib.pyplot as plt import networkx graph = networkx.DiGraph(history.genealogy_tree) graph = graph.reverse() # Make the graph top-down colors = [toolbox.evaluate(history.genealogy_history[i])[0] for i in graph] networkx.draw(graph, node_color=colors) plt.show() Using NetworkX in combination with `pygraphviz `_ (dot layout) this amazing genealogy tree can be obtained from the OneMax example with a population size of 20 and 5 generations, where the color of the nodes indicate their fitness, blue is low and red is high. .. image:: /_images/genealogy.png :width: 67% .. note:: The genealogy tree might get very big if your population and/or the number of generation is large. """ def __init__(self): self.genealogy_index = 0 self.genealogy_history = dict() self.genealogy_tree = dict() def update(self, individuals): """Update the history with the new *individuals*. The index present in their :attr:`history_index` attribute will be used to locate their parents, it is then modified to a unique one to keep track of those new individuals. This method should be called on the individuals after each variation. :param individuals: The list of modified individuals that shall be inserted in the history. If the *individuals* do not have a :attr:`history_index` attribute, the attribute is added and this individual is considered as having no parent. This method should be called with the initial population to initialize the history. Modifying the internal :attr:`genealogy_index` of the history or the :attr:`history_index` of an individual may lead to unpredictable results and corruption of the history. """ try: parent_indices = tuple(ind.history_index for ind in individuals) except AttributeError: parent_indices = tuple() for ind in individuals: self.genealogy_index += 1 ind.history_index = self.genealogy_index self.genealogy_history[self.genealogy_index] = deepcopy(ind) self.genealogy_tree[self.genealogy_index] = parent_indices @property def decorator(self): """Property that returns an appropriate decorator to enhance the operators of the toolbox. The returned decorator assumes that the individuals are returned by the operator. First the decorator calls the underlying operation and then calls the :func:`update` function with what has been returned by the operator. Finally, it returns the individuals with their history parameters modified according to the update function. """ def decFunc(func): def wrapFunc(*args, **kargs): individuals = func(*args, **kargs) self.update(individuals) return individuals return wrapFunc return decFunc def getGenealogy(self, individual, max_depth=float("inf")): """Provide the genealogy tree of an *individual*. The individual must have an attribute :attr:`history_index` as defined by :func:`~deap.tools.History.update` in order to retrieve its associated genealogy tree. The returned graph contains the parents up to *max_depth* variations before this individual. If not provided the maximum depth is up to the beginning of the evolution. :param individual: The individual at the root of the genealogy tree. :param max_depth: The approximate maximum distance between the root (individual) and the leaves (parents), optional. :returns: A dictionary where each key is an individual index and the values are a tuple corresponding to the index of the parents. """ gtree = {} visited = set() # Adds memory to the breadth first search def genealogy(index, depth): if index not in self.genealogy_tree: return depth += 1 if depth > max_depth: return parent_indices = self.genealogy_tree[index] gtree[index] = parent_indices for ind in parent_indices: if ind not in visited: genealogy(ind, depth) visited.add(ind) genealogy(individual.history_index, 0) return gtree class Statistics(object): """Object that compiles statistics on a list of arbitrary objects. When created the statistics object receives a *key* argument that is used to get the values on which the function will be computed. If not provided the *key* argument defaults to the identity function. The value returned by the key may be a multi-dimensional object, i.e.: a tuple or a list, as long as the statistical function registered support it. So for example, statistics can be computed directly on multi-objective fitnesses when using numpy statistical function. :param key: A function to access the values on which to compute the statistics, optional. :: >>> import numpy >>> s = Statistics() >>> s.register("mean", numpy.mean) >>> s.register("max", max) >>> s.compile([1, 2, 3, 4]) # doctest: +SKIP {'max': 4, 'mean': 2.5} >>> s.compile([5, 6, 7, 8]) # doctest: +SKIP {'mean': 6.5, 'max': 8} """ def __init__(self, key=identity): self.key = key self.functions = dict() self.fields = [] def register(self, name, function, *args, **kargs): """Register a *function* that will be applied on the sequence each time :meth:`record` is called. :param name: The name of the statistics function as it would appear in the dictionary of the statistics object. :param function: A function that will compute the desired statistics on the data as preprocessed by the key. :param argument: One or more argument (and keyword argument) to pass automatically to the registered function when called, optional. """ self.functions[name] = partial(function, *args, **kargs) self.fields.append(name) def compile(self, data): """Apply to the input sequence *data* each registered function and return the results as a dictionary. :param data: Sequence of objects on which the statistics are computed. """ values = tuple(self.key(elem) for elem in data) entry = dict() for key, func in self.functions.items(): entry[key] = func(values) return entry class MultiStatistics(dict): """Dictionary of :class:`Statistics` object allowing to compute statistics on multiple keys using a single call to :meth:`compile`. It takes a set of key-value pairs associating a statistics object to a unique name. This name can then be used to retrieve the statistics object. The following code computes statistics simultaneously on the length and the first value of the provided objects. :: >>> from operator import itemgetter >>> import numpy >>> len_stats = Statistics(key=len) >>> itm0_stats = Statistics(key=itemgetter(0)) >>> mstats = MultiStatistics(length=len_stats, item=itm0_stats) >>> mstats.register("mean", numpy.mean, axis=0) >>> mstats.register("max", numpy.max, axis=0) >>> mstats.compile([[0.0, 1.0, 1.0, 5.0], [2.0, 5.0]]) # doctest: +SKIP {'length': {'mean': 3.0, 'max': 4}, 'item': {'mean': 1.0, 'max': 2.0}} """ def compile(self, data): """Calls :meth:`Statistics.compile` with *data* of each :class:`Statistics` object. :param data: Sequence of objects on which the statistics are computed. """ record = {} for name, stats in self.items(): record[name] = stats.compile(data) return record @property def fields(self): return sorted(self.keys()) def register(self, name, function, *args, **kargs): """Register a *function* in each :class:`Statistics` object. :param name: The name of the statistics function as it would appear in the dictionary of the statistics object. :param function: A function that will compute the desired statistics on the data as preprocessed by the key. :param argument: One or more argument (and keyword argument) to pass automatically to the registered function when called, optional. """ for stats in self.values(): stats.register(name, function, *args, **kargs) class Logbook(list): """Evolution records as a chronological list of dictionaries. Data can be retrieved via the :meth:`select` method given the appropriate names. The :class:`Logbook` class may also contain other logbooks referred to as chapters. Chapters are used to store information associated to a specific part of the evolution. For example when computing statistics on different components of individuals (namely :class:`MultiStatistics`), chapters can be used to distinguish the average fitness and the average size. """ def __init__(self): self.buffindex = 0 self.chapters = defaultdict(Logbook) """Dictionary containing the sub-sections of the logbook which are also :class:`Logbook`. Chapters are automatically created when the right hand side of a keyworded argument, provided to the *record* function, is a dictionary. The keyword determines the chapter's name. For example, the following line adds a new chapter "size" that will contain the fields "max" and "mean". :: logbook.record(gen=0, size={'max' : 10.0, 'mean' : 7.5}) To access a specific chapter, use the name of the chapter as a dictionary key. For example, to access the size chapter and select the mean use :: logbook.chapters["size"].select("mean") Compiling a :class:`MultiStatistics` object returns a dictionary containing dictionaries, therefore when recording such an object in a logbook using the keyword argument unpacking operator (**), chapters will be automatically added to the logbook. :: >>> fit_stats = Statistics(key=attrgetter("fitness.values")) >>> size_stats = Statistics(key=len) >>> mstats = MultiStatistics(fitness=fit_stats, size=size_stats) >>> # [...] >>> record = mstats.compile(population) >>> logbook.record(**record) >>> print logbook fitness length ------------ ------------ max mean max mean 2 1 4 3 """ self.columns_len = None self.header = None """Order of the columns to print when using the :data:`stream` and :meth:`__str__` methods. The syntax is a single iterable containing string elements. For example, with the previously defined statistics class, one can print the generation and the fitness average, and maximum with :: logbook.header = ("gen", "mean", "max") If not set the header is built with all fields, in arbitrary order on insertion of the first data. The header can be removed by setting it to :data:`None`. """ self.log_header = True """Tells the log book to output or not the header when streaming the first line or getting its entire string representation. This defaults :data:`True`. """ def record(self, **infos): """Enter a record of event in the logbook as a list of key-value pairs. The information are appended chronologically to a list as a dictionary. When the value part of a pair is a dictionary, the information contained in the dictionary are recorded in a chapter entitled as the name of the key part of the pair. Chapters are also Logbook. """ apply_to_all = {k: v for k, v in infos.items() if not isinstance(v, dict)} for key, value in list(infos.items()): if isinstance(value, dict): chapter_infos = value.copy() chapter_infos.update(apply_to_all) self.chapters[key].record(**chapter_infos) del infos[key] self.append(infos) def select(self, *names): """Return a list of values associated to the *names* provided in argument in each dictionary of the Statistics object list. One list per name is returned in order. :: >>> log = Logbook() >>> log.record(gen=0, mean=5.4, max=10.0) >>> log.record(gen=1, mean=9.4, max=15.0) >>> log.select("mean") [5.4, 9.4] >>> log.select("gen", "max") ([0, 1], [10.0, 15.0]) With a :class:`MultiStatistics` object, the statistics for each measurement can be retrieved using the :data:`chapters` member : :: >>> log = Logbook() >>> log.record(**{'gen': 0, 'fit': {'mean': 0.8, 'max': 1.5}, ... 'size': {'mean': 25.4, 'max': 67}}) >>> log.record(**{'gen': 1, 'fit': {'mean': 0.95, 'max': 1.7}, ... 'size': {'mean': 28.1, 'max': 71}}) >>> log.chapters['size'].select("mean") [25.4, 28.1] >>> log.chapters['fit'].select("gen", "max") ([0, 1], [1.5, 1.7]) """ if len(names) == 1: return [entry.get(names[0], None) for entry in self] return tuple([entry.get(name, None) for entry in self] for name in names) @property def stream(self): """Retrieve the formatted not streamed yet entries of the database including the headers. :: >>> log = Logbook() >>> log.append({'gen' : 0}) >>> print log.stream # doctest: +NORMALIZE_WHITESPACE gen 0 >>> log.append({'gen' : 1}) >>> print log.stream # doctest: +NORMALIZE_WHITESPACE 1 """ startindex, self.buffindex = self.buffindex, len(self) return self.__str__(startindex) def __delitem__(self, key): if isinstance(key, slice): for i, in range(*key.indices(len(self))): self.pop(i) for chapter in self.chapters.values(): chapter.pop(i) else: self.pop(key) for chapter in self.chapters.values(): chapter.pop(key) def pop(self, index=0): """Retrieve and delete element *index*. The header and stream will be adjusted to follow the modification. :param item: The index of the element to remove, optional. It defaults to the first element. You can also use the following syntax to delete elements. :: del log[0] del log[1::5] """ if index < self.buffindex: self.buffindex -= 1 return super(self.__class__, self).pop(index) def __txt__(self, startindex): columns = self.header if not columns: columns = sorted(self[0].keys()) + sorted(self.chapters.keys()) if not self.columns_len or len(self.columns_len) != len(columns): self.columns_len = [len(c) for c in columns] chapters_txt = {} offsets = defaultdict(int) for name, chapter in self.chapters.items(): chapters_txt[name] = chapter.__txt__(startindex) if startindex == 0: offsets[name] = len(chapters_txt[name]) - len(self) str_matrix = [] for i, line in enumerate(self[startindex:]): str_line = [] for j, name in enumerate(columns): if name in chapters_txt: column = chapters_txt[name][i + offsets[name]] else: value = line.get(name, "") string = "{0:n}" if isinstance(value, float) else "{0}" column = string.format(value) self.columns_len[j] = max(self.columns_len[j], len(column)) str_line.append(column) str_matrix.append(str_line) if startindex == 0 and self.log_header: header = [] nlines = 1 if len(self.chapters) > 0: nlines += max(map(len, chapters_txt.values())) - len(self) + 1 header = [[] for i in range(nlines)] for j, name in enumerate(columns): if name in chapters_txt: length = max(len(line.expandtabs()) for line in chapters_txt[name]) blanks = nlines - 2 - offsets[name] for i in range(blanks): header[i].append(" " * length) header[blanks].append(name.center(length)) header[blanks + 1].append("-" * length) for i in range(offsets[name]): header[blanks + 2 + i].append(chapters_txt[name][i]) else: length = max(len(line[j].expandtabs()) for line in str_matrix) for line in header[:-1]: line.append(" " * length) header[-1].append(name) str_matrix = chain(header, str_matrix) template = "\t".join("{%i:<%i}" % (i, l) for i, l in enumerate(self.columns_len)) text = [template.format(*line) for line in str_matrix] return text def __str__(self, startindex=0): text = self.__txt__(startindex) return "\n".join(text) class HallOfFame(object): """The hall of fame contains the best individual that ever lived in the population during the evolution. It is lexicographically sorted at all time so that the first element of the hall of fame is the individual that has the best first fitness value ever seen, according to the weights provided to the fitness at creation time. The insertion is made so that old individuals have priority on new individuals. A single copy of each individual is kept at all time, the equivalence between two individuals is made by the operator passed to the *similar* argument. :param maxsize: The maximum number of individual to keep in the hall of fame. :param similar: An equivalence operator between two individuals, optional. It defaults to operator :func:`operator.eq`. The class :class:`HallOfFame` provides an interface similar to a list (without being one completely). It is possible to retrieve its length, to iterate on it forward and backward and to get an item or a slice from it. """ def __init__(self, maxsize, similar=eq): self.maxsize = maxsize self.keys = list() self.items = list() self.similar = similar def update(self, population): """Update the hall of fame with the *population* by replacing the worst individuals in it by the best individuals present in *population* (if they are better). The size of the hall of fame is kept constant. :param population: A list of individual with a fitness attribute to update the hall of fame with. """ for ind in population: if len(self) == 0 and self.maxsize != 0: # Working on an empty hall of fame is problematic for the # "for else" self.insert(population[0]) continue if ind.fitness > self[-1].fitness or len(self) < self.maxsize: for hofer in self: # Loop through the hall of fame to check for any # similar individual if self.similar(ind, hofer): break else: # The individual is unique and strictly better than # the worst if len(self) >= self.maxsize: self.remove(-1) self.insert(ind) def insert(self, item): """Insert a new individual in the hall of fame using the :func:`~bisect.bisect_right` function. The inserted individual is inserted on the right side of an equal individual. Inserting a new individual in the hall of fame also preserve the hall of fame's order. This method **does not** check for the size of the hall of fame, in a way that inserting a new individual in a full hall of fame will not remove the worst individual to maintain a constant size. :param item: The individual with a fitness attribute to insert in the hall of fame. """ item = deepcopy(item) i = bisect_right(self.keys, item.fitness) self.items.insert(len(self) - i, item) self.keys.insert(i, item.fitness) def remove(self, index): """Remove the specified *index* from the hall of fame. :param index: An integer giving which item to remove. """ del self.keys[len(self) - (index % len(self) + 1)] del self.items[index] def clear(self): """Clear the hall of fame.""" del self.items[:] del self.keys[:] def __len__(self): return len(self.items) def __getitem__(self, i): return self.items[i] def __iter__(self): return iter(self.items) def __reversed__(self): return reversed(self.items) def __str__(self): return str(self.items) class ParetoFront(HallOfFame): """The Pareto front hall of fame contains all the non-dominated individuals that ever lived in the population. That means that the Pareto front hall of fame can contain an infinity of different individuals. :param similar: A function that tells the Pareto front whether or not two individuals are similar, optional. The size of the front may become very large if it is used for example on a continuous function with a continuous domain. In order to limit the number of individuals, it is possible to specify a similarity function that will return :data:`True` if the genotype of two individuals are similar. In that case only one of the two individuals will be added to the hall of fame. By default the similarity function is :func:`operator.eq`. Since, the Pareto front hall of fame inherits from the :class:`HallOfFame`, it is sorted lexicographically at every moment. """ def __init__(self, similar=eq): HallOfFame.__init__(self, None, similar) def update(self, population): """Update the Pareto front hall of fame with the *population* by adding the individuals from the population that are not dominated by the hall of fame. If any individual in the hall of fame is dominated it is removed. :param population: A list of individual with a fitness attribute to update the hall of fame with. """ for ind in population: is_dominated = False dominates_one = False has_twin = False to_remove = [] for i, hofer in enumerate(self): # hofer = hall of famer if not dominates_one and hofer.fitness.dominates(ind.fitness): is_dominated = True break elif ind.fitness.dominates(hofer.fitness): dominates_one = True to_remove.append(i) elif ind.fitness == hofer.fitness and self.similar(ind, hofer): has_twin = True break for i in reversed(to_remove): # Remove the dominated hofer self.remove(i) if not is_dominated and not has_twin: self.insert(ind) __all__ = ['HallOfFame', 'ParetoFront', 'History', 'Statistics', 'MultiStatistics', 'Logbook'] if __name__ == "__main__": import doctest doctest.run_docstring_examples(Statistics, globals()) doctest.run_docstring_examples(Statistics.register, globals()) doctest.run_docstring_examples(Statistics.compile, globals()) doctest.run_docstring_examples(MultiStatistics, globals()) doctest.run_docstring_examples(MultiStatistics.register, globals()) doctest.run_docstring_examples(MultiStatistics.compile, globals()) ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1689936700.627589 deap-1.4.1/deap.egg-info/0000755000076500000240000000000014456461475014305 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936700.0 deap-1.4.1/deap.egg-info/PKG-INFO0000644000076500000240000003166314456461474015412 0ustar00runnerstaffMetadata-Version: 2.1 Name: deap Version: 1.4.1 Summary: Distributed Evolutionary Algorithms in Python Home-page: https://www.github.com/deap Author: deap Development Team Author-email: deap-users@googlegroups.com License: LGPL Keywords: evolutionary algorithms,genetic algorithms,genetic programming,cma-es,ga,gp,es,pso Platform: any Classifier: Development Status :: 4 - Beta Classifier: Intended Audience :: Developers Classifier: Intended Audience :: Education Classifier: Intended Audience :: Science/Research Classifier: License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL) Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Topic :: Scientific/Engineering Classifier: Topic :: Software Development Description-Content-Type: text/markdown License-File: LICENSE.txt # DEAP [![Build status](https://travis-ci.org/DEAP/deap.svg?branch=master)](https://travis-ci.org/DEAP/deap) [![Download](https://img.shields.io/pypi/dm/deap.svg)](https://pypi.python.org/pypi/deap) [![Join the chat at https://gitter.im/DEAP/deap](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/DEAP/deap?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) [![Build Status](https://dev.azure.com/fderainville/DEAP/_apis/build/status/DEAP.deap?branchName=master)](https://dev.azure.com/fderainville/DEAP/_build/latest?definitionId=1&branchName=master) [![Documentation Status](https://readthedocs.org/projects/deap/badge/?version=master)](https://deap.readthedocs.io/en/master/?badge=master) DEAP is a novel evolutionary computation framework for rapid prototyping and testing of ideas. It seeks to make algorithms explicit and data structures transparent. It works in perfect harmony with parallelisation mechanisms such as multiprocessing and [SCOOP](https://github.com/soravux/scoop). DEAP includes the following features: * Genetic algorithm using any imaginable representation * List, Array, Set, Dictionary, Tree, Numpy Array, etc. * Genetic programming using prefix trees * Loosely typed, Strongly typed * Automatically defined functions * Evolution strategies (including CMA-ES) * Multi-objective optimisation (NSGA-II, NSGA-III, SPEA2, MO-CMA-ES) * Co-evolution (cooperative and competitive) of multiple populations * Parallelization of the evaluations (and more) * Hall of Fame of the best individuals that lived in the population * Checkpoints that take snapshots of a system regularly * Benchmarks module containing most common test functions * Genealogy of an evolution (that is compatible with [NetworkX](https://github.com/networkx/networkx)) * Examples of alternative algorithms : Particle Swarm Optimization, Differential Evolution, Estimation of Distribution Algorithm ## Downloads Following acceptance of [PEP 438](http://www.python.org/dev/peps/pep-0438/) by the Python community, we have moved DEAP's source releases on [PyPI](https://pypi.python.org). You can find the most recent releases at: https://pypi.python.org/pypi/deap/. ## Documentation See the [DEAP User's Guide](http://deap.readthedocs.org/) for DEAP documentation. In order to get the tip documentation, change directory to the `doc` subfolder and type in `make html`, the documentation will be under `_build/html`. You will need [Sphinx](http://sphinx.pocoo.org) to build the documentation. ### Notebooks Also checkout our new [notebook examples](https://github.com/DEAP/notebooks). Using [Jupyter notebooks](http://jupyter.org) you'll be able to navigate and execute each block of code individually and tell what every line is doing. Either, look at the notebooks online using the notebook viewer links at the botom of the page or download the notebooks, navigate to the you download directory and run ```bash jupyter notebook ``` ## Installation We encourage you to use easy_install or pip to install DEAP on your system. Other installation procedure like apt-get, yum, etc. usually provide an outdated version. ```bash pip install deap ``` The latest version can be installed with ```bash pip install git+https://github.com/DEAP/deap@master ``` If you wish to build from sources, download or clone the repository and type ```bash python setup.py install ``` ## Build Status DEAP build status is available on Travis-CI https://travis-ci.org/DEAP/deap. ## Requirements The most basic features of DEAP requires Python2.6. In order to combine the toolbox and the multiprocessing module Python2.7 is needed for its support to pickle partial functions. CMA-ES requires Numpy, and we recommend matplotlib for visualization of results as it is fully compatible with DEAP's API. Since version 0.8, DEAP is compatible out of the box with Python 3. The installation procedure automatically translates the source to Python 3 with 2to3, however this requires having `setuptools<=58`. It is recommended to use `pip install setuptools==57.5.0` to address this issue. ## Example The following code gives a quick overview how simple it is to implement the Onemax problem optimization with genetic algorithm using DEAP. More examples are provided [here](http://deap.readthedocs.org/en/master/examples/index.html). ```python import random from deap import creator, base, tools, algorithms creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", list, fitness=creator.FitnessMax) toolbox = base.Toolbox() toolbox.register("attr_bool", random.randint, 0, 1) toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_bool, n=100) toolbox.register("population", tools.initRepeat, list, toolbox.individual) def evalOneMax(individual): return sum(individual), toolbox.register("evaluate", evalOneMax) toolbox.register("mate", tools.cxTwoPoint) toolbox.register("mutate", tools.mutFlipBit, indpb=0.05) toolbox.register("select", tools.selTournament, tournsize=3) population = toolbox.population(n=300) NGEN=40 for gen in range(NGEN): offspring = algorithms.varAnd(population, toolbox, cxpb=0.5, mutpb=0.1) fits = toolbox.map(toolbox.evaluate, offspring) for fit, ind in zip(fits, offspring): ind.fitness.values = fit population = toolbox.select(offspring, k=len(population)) top10 = tools.selBest(population, k=10) ``` ## How to cite DEAP Authors of scientific papers including results generated using DEAP are encouraged to cite the following paper. ```xml @article{DEAP_JMLR2012, author = " F\'elix-Antoine Fortin and Fran\c{c}ois-Michel {De Rainville} and Marc-Andr\'e Gardner and Marc Parizeau and Christian Gagn\'e ", title = { {DEAP}: Evolutionary Algorithms Made Easy }, pages = { 2171--2175 }, volume = { 13 }, month = { jul }, year = { 2012 }, journal = { Journal of Machine Learning Research } } ``` ## Publications on DEAP * François-Michel De Rainville, Félix-Antoine Fortin, Marc-André Gardner, Marc Parizeau and Christian Gagné, "DEAP -- Enabling Nimbler Evolutions", SIGEVOlution, vol. 6, no 2, pp. 17-26, February 2014. [Paper](http://goo.gl/tOrXTp) * Félix-Antoine Fortin, François-Michel De Rainville, Marc-André Gardner, Marc Parizeau and Christian Gagné, "DEAP: Evolutionary Algorithms Made Easy", Journal of Machine Learning Research, vol. 13, pp. 2171-2175, jul 2012. [Paper](http://goo.gl/amJ3x) * François-Michel De Rainville, Félix-Antoine Fortin, Marc-André Gardner, Marc Parizeau and Christian Gagné, "DEAP: A Python Framework for Evolutionary Algorithms", in !EvoSoft Workshop, Companion proc. of the Genetic and Evolutionary Computation Conference (GECCO 2012), July 07-11 2012. [Paper](http://goo.gl/pXXug) ## Projects using DEAP * Ribaric, T., & Houghten, S. (2017, June). Genetic programming for improved cryptanalysis of elliptic curve cryptosystems. In 2017 IEEE Congress on Evolutionary Computation (CEC) (pp. 419-426). IEEE. * Ellefsen, Kai Olav, Herman Augusto Lepikson, and Jan C. Albiez. "Multiobjective coverage path planning: Enabling automated inspection of complex, real-world structures." Applied Soft Computing 61 (2017): 264-282. * S. Chardon, B. Brangeon, E. Bozonnet, C. Inard (2016), Construction cost and energy performance of single family houses : From integrated design to automated optimization, Automation in Construction, Volume 70, p.1-13. * B. Brangeon, E. Bozonnet, C. Inard (2016), Integrated refurbishment of collective housing and optimization process with real products databases, Building Simulation Optimization, pp. 531–538 Newcastle, England. * Randal S. Olson, Ryan J. Urbanowicz, Peter C. Andrews, Nicole A. Lavender, La Creis Kidd, and Jason H. Moore (2016). Automating biomedical data science through tree-based pipeline optimization. Applications of Evolutionary Computation, pages 123-137. * Randal S. Olson, Nathan Bartley, Ryan J. Urbanowicz, and Jason H. Moore (2016). Evaluation of a Tree-based Pipeline Optimization Tool for Automating Data Science. Proceedings of GECCO 2016, pages 485-492. * Van Geit W, Gevaert M, Chindemi G, Rössert C, Courcol J, Muller EB, Schürmann F, Segev I and Markram H (2016). BluePyOpt: Leveraging open source software and cloud infrastructure to optimise model parameters in neuroscience. Front. Neuroinform. 10:17. doi: 10.3389/fninf.2016.00017 https://github.com/BlueBrain/BluePyOpt * Lara-Cabrera, R., Cotta, C. and Fernández-Leiva, A.J. (2014). Geometrical vs topological measures for the evolution of aesthetic maps in a rts game, Entertainment Computing, * Macret, M. and Pasquier, P. (2013). Automatic Tuning of the OP-1 Synthesizer Using a Multi-objective Genetic Algorithm. In Proceedings of the 10th Sound and Music Computing Conference (SMC). (pp 614-621). * Fortin, F. A., Grenier, S., & Parizeau, M. (2013, July). Generalizing the improved run-time complexity algorithm for non-dominated sorting. In Proceeding of the fifteenth annual conference on Genetic and evolutionary computation conference (pp. 615-622). ACM. * Fortin, F. A., & Parizeau, M. (2013, July). Revisiting the NSGA-II crowding-distance computation. In Proceeding of the fifteenth annual conference on Genetic and evolutionary computation conference (pp. 623-630). ACM. * Marc-André Gardner, Christian Gagné, and Marc Parizeau. Estimation of Distribution Algorithm based on Hidden Markov Models for Combinatorial Optimization. in Comp. Proc. Genetic and Evolutionary Computation Conference (GECCO 2013), July 2013. * J. T. Zhai, M. A. Bamakhrama, and T. Stefanov. "Exploiting Just-enough Parallelism when Mapping Streaming Applications in Hard Real-time Systems". Design Automation Conference (DAC 2013), 2013. * V. Akbarzadeh, C. Gagné, M. Parizeau, M. Argany, M. A Mostafavi, "Probabilistic Sensing Model for Sensor Placement Optimization Based on Line-of-Sight Coverage", Accepted in IEEE Transactions on Instrumentation and Measurement, 2012. * M. Reif, F. Shafait, and A. Dengel. "Dataset Generation for Meta-Learning". Proceedings of the German Conference on Artificial Intelligence (KI'12). 2012. * M. T. Ribeiro, A. Lacerda, A. Veloso, and N. Ziviani. "Pareto-Efficient Hybridization for Multi-Objective Recommender Systems". Proceedings of the Conference on Recommanders Systems (!RecSys'12). 2012. * M. Pérez-Ortiz, A. Arauzo-Azofra, C. Hervás-Martínez, L. García-Hernández and L. Salas-Morera. "A system learning user preferences for multiobjective optimization of facility layouts". Pr,oceedings on the Int. Conference on Soft Computing Models in Industrial and Environmental Applications (SOCO'12). 2012. * Lévesque, J.C., Durand, A., Gagné, C., and Sabourin, R., Multi-Objective Evolutionary Optimization for Generating Ensembles of Classifiers in the ROC Space, Genetic and Evolutionary Computation Conference (GECCO 2012), 2012. * Marc-André Gardner, Christian Gagné, and Marc Parizeau, "Bloat Control in Genetic Programming with Histogram-based Accept-Reject Method", in Proc. Genetic and Evolutionary Computation Conference (GECCO 2011), 2011. * Vahab Akbarzadeh, Albert Ko, Christian Gagné, and Marc Parizeau, "Topography-Aware Sensor Deployment Optimization with CMA-ES", in Proc. of Parallel Problem Solving from Nature (PPSN 2010), Springer, 2010. * DEAP is used in [TPOT](https://github.com/rhiever/tpot), an open source tool that uses genetic programming to optimize machine learning pipelines. * DEAP is also used in ROS as an optimization package http://www.ros.org/wiki/deap. * DEAP is an optional dependency for [PyXRD](https://github.com/mathijs-dumon/PyXRD), a Python implementation of the matrix algorithm developed for the X-ray diffraction analysis of disordered lamellar structures. * DEAP is used in [glyph](https://github.com/Ambrosys/glyph), a library for symbolic regression with applications to [MLC](https://en.wikipedia.org/wiki/Machine_learning_control). * DEAP is used in [Sklearn-genetic-opt](https://github.com/rodrigo-arenas/Sklearn-genetic-opt), an open source tool that uses evolutionary programming to fine tune machine learning hyperparameters. If you want your project listed here, send us a link and a brief description and we'll be glad to add it. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936700.0 deap-1.4.1/deap.egg-info/SOURCES.txt0000644000076500000240000001467214456461474016202 0ustar00runnerstaffINSTALL.txt LICENSE.txt MANIFEST.in README.md setup.py deap/__init__.py deap/algorithms.py deap/base.py deap/cma.py deap/creator.py deap/gp.py deap.egg-info/PKG-INFO deap.egg-info/SOURCES.txt deap.egg-info/dependency_links.txt deap.egg-info/requires.txt deap.egg-info/top_level.txt deap/benchmarks/__init__.py deap/benchmarks/binary.py deap/benchmarks/gp.py deap/benchmarks/movingpeaks.py deap/benchmarks/tools.py deap/tools/__init__.py deap/tools/constraint.py deap/tools/crossover.py deap/tools/emo.py deap/tools/indicator.py deap/tools/init.py deap/tools/migration.py deap/tools/mutation.py deap/tools/selection.py deap/tools/support.py deap/tools/_hypervolume/__init__.py deap/tools/_hypervolume/_hv.c deap/tools/_hypervolume/_hv.h deap/tools/_hypervolume/hv.cpp deap/tools/_hypervolume/pyhv.py doc/Makefile doc/about.rst doc/conf.py doc/contributing.rst doc/index.rst doc/installation.rst doc/overview.rst doc/pip_req.txt doc/porting.rst doc/releases.rst doc/_images/constraints.png doc/_images/genealogy.png doc/_images/gptree.png doc/_images/gptypederrtree.png doc/_images/gptypedtree.png doc/_images/gptypedtrees.png doc/_images/more.png doc/_images/nsga3.png doc/_images/twin_logbook.png doc/_static/DEAP.pdf doc/_static/copybutton.js doc/_static/deap_icon-39x55.png doc/_static/deap_icon_16x16.ico doc/_static/deap_long.png doc/_static/deap_orange_icon_16x16.ico doc/_static/deap_orange_icon_32.ico doc/_static/lvsn.png doc/_static/sidebar.js doc/_static/ul.gif doc/_templates/indexsidebar.html doc/_templates/layout.html doc/_themes/pydoctheme/theme.conf doc/_themes/pydoctheme/static/pydoctheme.css doc/api/algo.rst doc/api/base.rst doc/api/benchmarks.rst doc/api/creator.rst doc/api/gp.rst doc/api/index.rst doc/api/tools.rst doc/code/benchmarks/ackley.py doc/code/benchmarks/bohachevsky.py doc/code/benchmarks/griewank.py doc/code/benchmarks/h1.py doc/code/benchmarks/himmelblau.py doc/code/benchmarks/kursawe.py doc/code/benchmarks/movingsc1.py doc/code/benchmarks/rastrigin.py doc/code/benchmarks/rosenbrock.py doc/code/benchmarks/schaffer.py doc/code/benchmarks/schwefel.py doc/code/benchmarks/shekel.py doc/code/examples/nsga3_ref_points.py doc/code/examples/nsga3_ref_points_combined.py doc/code/examples/nsga3_ref_points_combined_plot.py doc/code/tutorials/part_1/1_where_to_start.py doc/code/tutorials/part_2/2_1_fitness.py doc/code/tutorials/part_2/2_2_1_list_of_floats.py doc/code/tutorials/part_2/2_2_2_permutation.py doc/code/tutorials/part_2/2_2_3_arithmetic_expression.py doc/code/tutorials/part_2/2_2_4_evolution_strategy.py doc/code/tutorials/part_2/2_2_5_particle.py doc/code/tutorials/part_2/2_2_6_funky_one.py doc/code/tutorials/part_2/2_3_1_bag.py doc/code/tutorials/part_2/2_3_2_grid.py doc/code/tutorials/part_2/2_3_3_swarm.py doc/code/tutorials/part_2/2_3_4_demes.py doc/code/tutorials/part_2/2_3_5_seeding_a_population.py doc/code/tutorials/part_2/my_guess.json doc/code/tutorials/part_3/3_6_2_tool_decoration.py doc/code/tutorials/part_3/3_6_using_the_toolbox.py doc/code/tutorials/part_3/3_7_variations.py doc/code/tutorials/part_3/3_8_algorithms.py doc/code/tutorials/part_3/3_next_step.py doc/code/tutorials/part_3/logbook.py doc/code/tutorials/part_3/multistats.py doc/code/tutorials/part_3/stats.py doc/code/tutorials/part_4/4_4_Using_Cpp_NSGA.py doc/code/tutorials/part_4/4_5_home_made_eval_func.py doc/code/tutorials/part_4/SNC.cpp doc/code/tutorials/part_4/installSN.py doc/code/tutorials/part_4/sortingnetwork.py doc/examples/bipop_cmaes.rst doc/examples/cmaes.rst doc/examples/cmaes_plotting.rst doc/examples/coev_coop.rst doc/examples/eda.rst doc/examples/es_fctmin.rst doc/examples/es_onefifth.rst doc/examples/ga_knapsack.rst doc/examples/ga_onemax.rst doc/examples/ga_onemax_numpy.rst doc/examples/ga_onemax_short.rst doc/examples/gp_ant.rst doc/examples/gp_multiplexer.rst doc/examples/gp_parity.rst doc/examples/gp_spambase.rst doc/examples/gp_symbreg.rst doc/examples/index.rst doc/examples/nsga3.rst doc/examples/pso_basic.rst doc/examples/pso_multiswarm.rst doc/tutorials/advanced/benchmarking.rst doc/tutorials/advanced/checkpoint.rst doc/tutorials/advanced/constraints.rst doc/tutorials/advanced/gp.rst doc/tutorials/advanced/numpy.rst doc/tutorials/basic/part1.rst doc/tutorials/basic/part2.rst doc/tutorials/basic/part3.rst doc/tutorials/basic/part4.rst examples/bbob.py examples/speed.txt examples/coev/coop_adapt.py examples/coev/coop_base.py examples/coev/coop_evol.py examples/coev/coop_gen.py examples/coev/coop_niche.py examples/coev/hillis.py examples/coev/symbreg.py examples/de/basic.py examples/de/dynamic.py examples/de/sphere.py examples/eda/emna.py examples/eda/pbil.py examples/es/cma_1+l_minfct.py examples/es/cma_bipop.py examples/es/cma_minfct.py examples/es/cma_mo.py examples/es/cma_plotting.py examples/es/fctmin.py examples/es/onefifth.py examples/ga/evoknn.py examples/ga/evoknn_jmlr.py examples/ga/evosn.py examples/ga/heart_scale.csv examples/ga/knapsack.py examples/ga/knn.py examples/ga/kursawefct.py examples/ga/mo_rhv.py examples/ga/nqueens.py examples/ga/nsga2.py examples/ga/nsga3.py examples/ga/onemax.py examples/ga/onemax_island.py examples/ga/onemax_island_scoop.py examples/ga/onemax_mp.py examples/ga/onemax_multidemic.py examples/ga/onemax_numpy.py examples/ga/onemax_short.py examples/ga/sortingnetwork.py examples/ga/tsp.py examples/ga/xkcd.py examples/ga/pareto_front/dtlz1_front.json examples/ga/pareto_front/dtlz2_front.json examples/ga/pareto_front/dtlz3_front.json examples/ga/pareto_front/dtlz4_front.json examples/ga/pareto_front/zdt1_front.json examples/ga/pareto_front/zdt2_front.json examples/ga/pareto_front/zdt3_front.json examples/ga/pareto_front/zdt4_front.json examples/ga/pareto_front/zdt6_front.json examples/ga/tsp/gr120.json examples/ga/tsp/gr17.json examples/ga/tsp/gr24.json examples/gp/__init__.py examples/gp/adf_symbreg.py examples/gp/ant.py examples/gp/multiplexer.py examples/gp/parity.py examples/gp/spambase.csv examples/gp/spambase.py examples/gp/symbreg.py examples/gp/symbreg_epsilon_lexicase.py examples/gp/symbreg_harm.py examples/gp/symbreg_numpy.py examples/gp/ant/AntSimulatorFast.cpp examples/gp/ant/AntSimulatorFast.hpp examples/gp/ant/buildAntSimFast.py examples/gp/ant/santafe_trail.txt examples/pso/basic.py examples/pso/basic_numpy.py examples/pso/multiswarm.py examples/pso/speciation.py tests/test_algorithms.py tests/test_benchmarks.py tests/test_convergence.py tests/test_creator.py tests/test_init.py tests/test_logbook.py tests/test_multiproc.py tests/test_mutation.py tests/test_operators.py tests/test_pickle.py tests/test_statistics.py././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936700.0 deap-1.4.1/deap.egg-info/dependency_links.txt0000644000076500000240000000000114456461474020352 0ustar00runnerstaff ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936700.0 deap-1.4.1/deap.egg-info/requires.txt0000644000076500000240000000000614456461474016700 0ustar00runnerstaffnumpy ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936700.0 deap-1.4.1/deap.egg-info/top_level.txt0000644000076500000240000000000514456461474017031 0ustar00runnerstaffdeap ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.6440876 deap-1.4.1/doc/0000755000076500000240000000000014456461475012447 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/Makefile0000644000076500000240000000610714456461441014104 0ustar00runnerstaff# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf $(BUILDDIR)/* html: PYTHONPATH=${PWD}/../ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/EAP.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/EAP.qhc" latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ "run these through (pdf)latex." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.6500487 deap-1.4.1/doc/_images/0000755000076500000240000000000014456461475014053 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/_images/constraints.png0000644000076500000240000012333314456461441017126 0ustar00runnerstaffPNG  IHDR,ӽJPsBIT|d pHYsaa?i IDATxwXg,u 45%FcC[,ĊK+%1j-"jj|Qk4X5bO ʲ΂.ę̜ٝ3c1c13P2`1c1cLN`1c1c1 ,c1c1f8c1c1 'c1c1ƘAc1c13hb1c1cX1c1c̠q1c1c4N`1c1c1 ,c1c1f8c1c1 'c1c1ƘAc1c13hb1c1cX1c1c̠q1fPZjL Lbd: 777`$882 G:c ,V,n޼?uօ5PjUt6mBFF!j)RZbL&S377;>3CEOwŘ!P7VVܽ{WXhaa*UiӦ?~ jIBM=߱cǤS#Eqb(PwLK 31_8RʞPдiSmVVVx1;ÇùsU+C8A-L ^^^ٳ' 99111Ǯ]>,1n޼ {{{y]1f-ܼyaĉx.]koAN+ȑ#^|lʹssqq)hG6n8 0ժU+oqQ<~&&&pqqAǎ1qD8::JV%qkժ;l1W* (ccc/_UVUsw օ+]͟?/s]cСؼy+R-Bpp0 -+VH S̘1D\.| u oFX6l>AתU+9rFAXԩCϧ4Ο?AX UPO eA͛7 6? AJe_ԩّU^NJ/^(yfݻ7\.Sfh˖-oٲ%d2i111$뵝ɉʗ/O)))3vX4g4AwkǓrJJJ3fPZH.5iӆ~7eiGUV%SSSX"uޝN:3ևR@@UR(<<\gT#ܴ.k$Ah͚5*Ǻ髯"///%\NժUnݺѡC1XӟX.EA(33-ZD5j 333VM:UcJDt $233J*/}w:t(999)U\)..N6M]:t( @S{S _~Ѻ;wR֖֭Օ @ϟ'wWӟ"oѣjoQ̨֖f͚4}thAKVv Nׯ_WEr)&&̃ Wt~XR\\\H&i$ҭ[tR~n,g{Ŋ'X7o @~iH\2;MFu!AE+UZ5kF4zh7nUZA,vZiԨQ4{l1b5jԈ5jDDD/^`ruu%A($$D|; 8MF9y{{+cTY⤷_~dnnNiӦQ-HP6XgϞ%AR9MqTbE iӦQHS%''jrjذ!ҬYhĈH ЬYԖו {; au59::}_FM`qJݻ`Vh4rHrtt$L;}4Rvh4sL0`YXX1cWPzhu~F})? l\\\^ۯ_?>C8q"͜9 L]tIy\ussS96L|(ۗhذa4i$H:tk߯ڵ+͚5I&I.}'4}t'sss .܎ M`i:tERJԤI/hСdnnN2>>dXJhĈ4k,4h999)۔pٳ' @zRI(m oAi̙ Iϟ?WY %+ܹCdffF֭[G PZ(;;[9]WbUEt\\M>4h@dffF...4byBCCݝ͍̙C:۔X>,,,T:M۹A!$v^I(5{8=kKCzNIII4k,SYXX5կ_f̘Aj۹ ,qUŵQtS#GGG233#j߾=ڵm_MrwG "255%GGG}T9=33tB ТET^B P.]TFh=ylllښ޾}M*﯐z:ܹqYHڹstZߟA~)_1h+H P6mHlA ___zʲ$M8Qez~;#> ,]*Rh3g2VU+)###ڽ{/^rzrzrr޽{TJUXYYYz}>")7 @&&&*; 6lrs5" EA裏>RIR5Ȉ=zS*_<֕={F666TR%VYZZϒbۧ\6X Phh$uIeO Pƍڹ,mh?uiJǓ ?GM Ј#TdeܹsI߿233APO+9%KƆzM&LSRH&QʕՎ?ԣGj֬ISL?[n:۔]9׏f̘AGV.o"_]M&O"EўҫW5%KKǺ;w 6)S_|A;w&sssϖ-}ݻwu.z@۱ ׯ'###>={l>|8կ__' ]|_ל9sH&Q޽iԫW/dT|y:{.N`"#:::_6lW\\+h. B CʕS դɣD 2]qp7ok0E}acP 'N$ʕ+<)zoܸ񽼼RJ*uٽ{7 VT ,w#&rhԨkid)ǩK.ڷo I˸qHzZ:t7k˹B/_$Ayz?6 Б#G^8BUV o+$"剸>v]#d2ʅtAXTbEiu%LF.]3$,X@ ܹsՖYYYD~KV):7nܘ粊΄ *$C;đFFF*I&"m۶ ԴiS}ٳgTzum_Qtp)$.rrr"LFigcI&$-[Lc9;۹"i6qDUV3 v_vNSݻZi"d\ ;vP&4wYpw&/BPjUܽ{/_Dlll4UT###O-PB֩#_|,YCj'L:uL1bv؁ϣ_~?gΜ7M3˗/رcׯ^z!** cƌ3^Zm+&L{h"k)UXݻwǏ?[n)۽YYY;wZ*U*_uU^N:[ny۷ʼO?~-N<'NWeeY ,VdpM$$$uqpp@BBUXW4YYYi&M=֮]իWcʕʤي+Wo߾E֭q9ԫW @Ŋabb"BHH޼ykQMt.,h]F$ܹFŋhѢ:vkkk!>>ZWQhٲ%<==c|(W{ѣmV¡C4 HMMU{޽ӧ,,,Ю];T^ʕL&CLL =xP9)EFFFy^4OXlo߮|ž9뇰0l9.4?/^zBܰaerR||~AAs/_Twږ)c֭˗ y1.\7o>y=?:;8K"]Pc]!ҡC>zF_OPA֭[ɓxZ}+2Çg:G6ѣG*1x`|'O޽{i&o7nƹs4&=z3dh311W_ٳgǐ!CTرŶn ?۷oǀm6899K.žn*c͚57n^;w.qyxxx{!=u\TLL Ad:5771|$$$رcǖ-[p]Β8.*7+⻼r ֭[uGffSa(Sm~QJ*y&߿粊&e'MիW:uBժU!rǒ ;;;WʕuK[r$:8K""W"4uC$?[RO/ BOv(˩$IgkC`` LLL7n\6m#>>> "'oFBBTF_FѩS'_HJJǕ ៷oa^|J&M3\~qmPlK4tPȫ;v %%Ç/֋bʺ&M;kn߾ڵk%qĉ"O_Ɨ_~ 8p`^DGGz8vJE'}\}?z/(jӳpҥB˕+u[uQ'9//^K 5&I\.++Kpo֬r"))YC\\lق%K`޼y7o- xF^3j+6m‘#Gj*`޼yh߾WI$s}6>|#brgϞ6lbccѾ}{;sv_|֭ 0o<9 jH^rvneggk?P$)(~9G*EPm7"p###]tqcǎ+Fk-\P倝)S0lذBŦhs@vvv "aaGQ~L>P1+2i$=ҊbN*D DqB7vqÇ/{5hسg>JnW^U[nBDƍ7J4.] ..>>>5jqU)))HIIJ-;;;/_<֮]SNyܹsjfgg=,?7n{/\ھGzJe^VVʅk݃ k*ΝW^)aL@aϞ=:;7mڄG>@˖-%4ϟWvڷorʩKHHXARU(象 v7]Xnݺm[] c]ɡmDev=wPM41Tm׬g| !+R3gDff&BBBаaC4m 4%;v oFÆ iҤ M˗nݺ)ĵk닩S*^z 7 ǏѠAmVl۶m{n;v\.+ nݺFXr%/xyy8pv튝;w*N}b(*[ҥK1sLԬY;w+RRRp=;v _ 1c`ׯ_F~O?i|''n _=B=0e4#"h';oߎ֭[cذaXf 5j$$$ʕ+vΜ9bҤI=z4|||лwoɓqㆲ@zQY#33ϟ?˗qdggSN3ԫWCjK߿>sXZZ*o۶-v܉={ hٲ%|}}ɘ۷O>hѢ:wu"99W\Ç*TݻѫW/4nmڴAڵ!111kձsXY(;}||дiS:u aaa2e$XZZLsKSWJ*Mƍw^lذ-RN?|0vQ$ ºu:&OHHP֝ҕX֦Yf'ܽ{7N87bEnܹ۷/֮]#==̙3 K.7lٲo߾E5h"LC2dEEE۷o56 d2ݻwO9mݺuԫW/rww' PЊ+(%%%_4p@Z*r[.X233kghAdkkK4gzʲ^ŋS Ғr9S׮]iÆ Fw=ԥK"KKKcǎQxx8d2P{ommtiORJdjjJUVN:QTTroߦnݺd2bbb4DDqqq4x`rpp rttӭ[ﲴJ8攔ҥKx"O… 6mΞ= cccnaaa+ۖ,YٳgcjCYO[+W~:=z`ƌ\1JK.a믿Srxxx`ر*e8jVZ͛ʕ+ Ŝ9s1$W'&&bÆ x-z@-M7oDVHlڴ n݂Ou`n/_… `899a̘1RŊ>mׯ 777YĈ#~z4k Rc$''K,eb*5pЙ3gT՚3$ؿ?0o_?PXdgg#99YYHS!** }ŏ?KckҤ  ڳg^|)qdwݻ4iLMM1$Sjk˻effbPy"3w^N`1puuEvvab-dj+'&$$k\1J|3faÆڵ|ncLa###!((H9 ި[. 66+WÇq9+WN`1IVAEGGcРARc֭[KMgϞsAk^nc,l fϞ#Gɓ'__ ==ӧOL8Qe6m}ƍ1a0Xd;PXvvvċܞ={A4Eqssl5 իon5X5~1o Z[7mg1qDZJ C7._Ơ>7j)~㦡{9ڵkGȑ#puuոW'о=Pቷe:GmHOND $m Ujժ:v;w.>3TXQkzrٳjz@:Gma⚠%W\+Wͻz*j֬#X[6d뷱t Cz:iȞ?m޽{8|6M_'1ض Xpq.]R𙋊ɷyص سHKڵ6,0aÆXn&HC^z@ɷ$ہ2WݺuÞ={~}ĠwFc(Wwoh#Á~W-Xk7ck׀Y=U? ZFJ)&&FFFcdݻ&M`d1ƊBuAիWk׮a.]@.#$$DYwƌHKKüyPR%Lyہoo , GMb͈/`ii ///lݺkkkTT +V?,b„ 5k?e1JRk9:t)t ucR`x;ÿàO[IXYG#&L(ux_2]ߪm[`bN#* :ATTTTF}o 6SX C*i!l*U}BQ`Xծp8baRFV>@߾Q:Gm@~]L; $1!z[p;8 = (POXZJ c%%$ءZ'fM,`q;Po.^V[u +'utvc1V)SQNcLz[yy+V-Z\}+cLN`1cE)gÇruGdu\fd1 ?OP-Ec1m8cccCVb"koL,Ƙ~]mu׷b1Vfp1+NdmucE++ 8~\L+[Mtc b1JJ:Y{p,Xa([!&ɹc2X1XI G_嬓&Nԉd1tSԷڻWLbi,\ԭ+ud1Xn^f޼{&u(@ě7RXQɊ6l /^,>~: 1Q%ˍہݸ̙tDE={?,;8ynXn0N`1t]5 7qq>qԡ0VoٻWLjm&֮ nߖ:nG#ŧ^ L8z-NnXnu;-1ƘaqrNU/r,V;w˗-[r}4cx:3\D%J>f1 QСbj"KqEUz2Ɗ?_ t| ?nɫ▝-ncڝ8QX1Ƙ!SjtIpbo}>};;d7ہ~Ыvp:K>-b1//a1%{wX1Xi-Jc⨌N kԐ:JX~ee۶.b}݁r夎 'lx5K&SnY[cҦZ5`43cNIR1Oii]hɓDֲe=LL.>@c.a1r8{_5'c*g d`x}NcoV_~ Ԫl,ַjӆGҥKҥ \\\`aa;;;4m۶mS[… h۶-`kkO>vm1ӧE)c,Wڶ-s1+팍[]֯Gh-Zt&^|%%I!cM`\w)\'^ԫ'utFrr2dUp$OWW:f <=<}UPRRR0m4o+VL&CHHe ^ #سhNLju ÔJe; =  \,>MgϷ-[テ?Zl.]`㏱~zr̓\.ѱcG ӧOʗh?1m`v,IL"Գ'P$!p$길@.K 3r\'̤I,116l۷oѫW/ jI{iuX'iNָqx,nXnHKGY)[efoXZJacggo'2Ƕvvvݫw">prۢ˭Teq$ og1V$lܸQr9{0 4@͚5KX̒d.>ww`@C>dhCիWBFFLMM^ (?0X)pp 0o`c#Y<1XEZFi{YabtZ'kBgϤM^Qߪ[7oÛ`jjʕ+c„  CPPs*T *!zQCq#qcFFt8kWICX1ʜ"}/ku||;-[p1/>Y1[v6p8*?zV=z-%`9r$QMè6ʴ7ہ~=zhJKKLL Pzuسg/_x}`[ V֮1ʪN PFZFFFg͓RQb"n#.cqHL:V T^xz4h|{7ж-!\b[c/Oa1- W^]ݻwt\4l]vŌ3y}em QN+k>GDE^=J赬:TAgC?|Yѯ1ƤrP3FEL`šm۶^:/_+̙3Xp!^z7j}{]1r1Ժ'cƌ{ 22pvvVO>}􁱱1ڴi4Zwut<݋1'9[T*udyx6novv:"+tqt7ЫѨ) Yf!;;7o{{{|g2dZh!q1 *>>^劭ՠps'l,&˜AoݶMeRE%GEψ%bQwA:"+~?VdWq"*׮]Cڵj]}G1+":Yt'N5bƏΞ-:YKzV&^ط[ˁ3scfn[ÆRGQUZ5\r(Wrӧ5c1Vll7`v`XN@\'1J~_o!41:dsΈŋ1qDx=z4fΜ c2sSSvv9Գԡ0%Gwᆱ.& Z'^@v6pX9ոqbod:U|2@6RGT&p;rbwb"G'L/<(ujԡ0QS'U3z4gkw/дx{aF*`lD۷SGop%+ ˖:R_"`B|y상+Ec1fll> $HLO ~Ȑ:BV$&OELT.k+VrALgf_}%u41}̙nUg1X]'k6NַߊC۶ܠ`a!u~ ܾ Ԫܿ/V=zA=LJb=1ƘvVmh&RGN`1cL:dl l(d`d .Ng ܸTYF IDAT|pVVRGcꈀ s$Bc1 ̜)2]6v8Radt19AbEb_+WJ ciRԎr1cFLZUBxKXIc%c`\vA]ٳł\ߊʕź~gH c**p1cS'%qVj@` ">1'1QXqrjpOd`)>]uB %EhcLTJoTc1 [z-V:2VT_Gկ?x 3V849XZhcL⭃f[8 x9oܐ:f ߸!R+wת['&6N 2V~ SGm49Y,0ex;b`9:_| 8!u4_z?ku4k&u4 ,f32p}s7o}"#CPc%۷?VSDm1ž7:t(>+WNNNٳ'.\\@@d2_ڵK.؞=  /Jne_zPrA~ۙ3=I]'V*j޼Ѿ~GGL/ϵ/mt`q `n@^' ׯO+A&-M]EOO[G?mss^'N޷ރi?ǽ{ߗ1C)zhg'u$L__1=}x)&M:uӧ/ѸqcDGGO\.GLLw #XX1J^dx_VVRGS`m#`ji_vm5oԷ4f| 8X;w t㯿[{ $D|ؼY:c bС<<1VTTIeZǎQF ,^X%eddFt33hcGia_֬+uO̍X8;k#3)(6l@p]뵍)22?2%g wt|7Uꅪ>>=}sܺ~oƤfk+[|߮]a~~<$[br夎WP\9xzz"!!Ae:TXk˖>>cE.37R?^h X5n,>j*| YQsߺ%El 5­>}yYvT8 }Zϟǂ p9$''2eJB¤ao/>lT0ʕ@6oT`@#,XVKJk= ,VqmVezZZS888gϞ m9mX-4T錱@\i~ qg1V&]z͛7ǃf8p~)BCC1`c% .]~Z*{^>LٓP iرHKKٳӼ_b֭F@@6oތf͚!55䃴GB9DEcWKŔ"<1XsNddd`pVZѣGX~=amm-qD 8MqH=+:Nj#J œe)Fܹs}v|7VN8qrmڴ7郍7b„ o5> :t(|M}Vi/]c&&kkҎh숎V"9_bߓ{~rb_ӓäs'addSSS)bz*vOTǎ.MLe0MBBBh",^cƌs^z\r8{eVi`s犷H#;;4Ae̡JqkļZa\RVz: ̘ g.?@Pā[Qj=AL-M3=3́2-WԜڒ4CMsh( ", rϸy:>ucɿ&$BcY:o}~~~L:SvARRUo%_uBZj!F @.d;)` !0K'Z|̟?9¬Ywk֬y tD[i=fφ޽aHpwW.gp,X}Kd,KGZp!ӧO___9ڵkٓ=zP\9z=gTR+޽Ӧ-Zw!* =]{ w:B4yd222 z\QF*T}ҫW/4i80jjVZ-Ӯ۷׾lϯ:a$'æMZyY>P`,GH``ct:qppp|'ܼytʔ)|ɓcy\ڗ="N!Dٶ ŋLYJK!Y:wO|Iy/,R\\fi3s9͛r+ &^]ƍK0*/|LٹsgyEE”)0a|=DBSZGhPu1y'''GŊ5kXB!r@ɒ%9{,~a@%Dh[Qo`~ ]>uKI alZN`zUu!IJɓ\9>\u2X[lW^t֍7boo˗qhB!rQС[fԨQ8;;s͛Gʕi۶\Wfyz4BXѣ!$Dnͫ:T,X͢޴0X 8yfڵkGӦMׯ~x%$iɊc\xxn"۷FIظq#m?mݾ}7aC.^8 ̚m )%s׮:0III9:ؽ[[~<~<):ek͚5ܿ &"(,2RvMqa,  !^Fƍ  ""D.\q4/Bc@b͛CZq#t,Є8 ^J,UQիT4 :0S;'d&"B+|i:N#̲u9<^^^ɓ"E0dݻ:B!YNph"ЫܹYΝti ¸um0}:DEN#0Vh88hLuAe/]v{ݻqƱa|}}UB!ukЊHۃ, Gz=7Te {\!ī3%'k_R⿖->_̞m;#?Y6 993f0~x4iB޼y9r$ -ZxsG.YBAnMwooB< Q,֠ =v[;a<=aj5Kk\a 7gijkԖ"7u¬88h36T'B}(ZUue,ggg._N>>>8qE#FPzugBd/n0'|QUu~ȩSOQ"!LX`okg{{kM7%BUa ]4i:\-1nzP&ǙB//ޯsB+Q zkkQp22BTV+wڗ]#iC{/Ϙ7nN#P-%&NfiNfYtO??#uLBWtrB一(Q ?%3 !╝Lii !T3/c{ofYjժ۷g̙̞=y1e:t@Æ UGBd^'<1bU'B]Ə:vz]$'ڵY+j !LWZ?B;vرP4ʘe 믿fȑ^___VZѣٱch%u-]Zqa ILϢQ8B`@Zops|5''=*\xxna1To ?:M(SV:09ai^vl:uRF)l`ccܹs;w(" lѮml' 4NqТK`」 }jEÇ{wXQu2e  !,oÙ3i3gkmMҥVua$lmms’$%i--%Wf3#4V+`ȯB A6ڬ, .^ at::\]a8HHPHahz=̜ Z߫|T'RN XBJ%_[B!,X:6nfaϘa._F/4. nc!B{һwo*V%J7 $$džЪU+Ϗ#]t!,,LAlT$|úu! Qm>}eKiF/46 UB!bV"<[8m‚شrNad\xxnН;w r\rdUcV˗IMMٯ@KfFL/*Jua$nDE[! M T1JRF):6VqZhl(V:+&o&n9!2D|X"fRK  !6lIIIL2'''^9`ܴXdg?)`܈2GЦ4FKz` !Zh\(J*(mٲ^zѭ[76n܈=/_ƍ !SNe˖-,[5jdk\YۛٮiS6L۱lY+z&Lz`pijkP[-^JRBиP*8Zy 8lٲG7mTa*!Ϗٳg3gvgggxN\\:GGg#Yz6޽%Cu"!ˈ#pa6mZ?)j뗣9d ¨]D')`[f g„ !f~~~.'N|ckkӧxޙ3gPynNۑ ƌs$9IKf^%$j;he3wE X"K83ϟˋqܹС| n'88Ν;dܜcc j}:>>>rȑ.q}ڷoO`` ~-ڵpŒ3F ҊX/ìYڗd!+[md6Ka.\oIm9$''3c Ə@&Mț7/#G_~EO< a!Ӣi6tYҼNG`` ݧHOOݝ}1a|MrM˖-Y`Yff̀IYN$CϠW/Au",aѮͭ?Ȕ .`ۆRHóT)l/ٙ˗/“'Nxj$!])#uWK^~l͚5ٳg֭!2.bŠcGՉ2&o^(HX[rF"S ^{.QQQ-Z  suծ˖UDt1"ʅVyyyqgޯr0,sH%|4\_u!LC^s@ѢPDYOvT0QQڎe =._̸q`TTJ*Q`A<==3f 8Œ\u"K.O?P> a^ӶxY:iKu݃>y僶luuƍ_ 5ߦPBp)6mĢEx7Yp!%).\eٖcժU+ڷo̙3Ƞnݺ̜9:аaCp>"UmK!iɝ΅/A"S aYRSaXu ֮sg@ZT7f׮]x{{S222 bҥxxxvB'ޏ[2K???V^ŋgL>]u4!D&d3'݊TEӓ/,Zァ-_s]%aL8sVЖ,?͚5{㬬h۶-m۶Aa&Yeccܹs;w(BWILKCf` 5..x1'jK,Ca@+V@`6Ku=^x_M4B3q1"V:+*8WPE!bgP%_iI0ann`̘ a޶oז *^Av|PB 1([,6mTGBؙ3ϛRy "i]f͂ݻb^:)(χ=wT1+`U^-^FOfȟ?W5BFJ!OفPв%L_ k֨N#9tMvW2veTӦMCOe?>>N:1j( `C?!""c*|L_uX$B1Tң?伜BXĿ>Ce"t CªUc4Ou>,L{_:0&}I?6?+\,kٳ)S0gZl?5j`߾}رKOQ ɩ{>%Eua$@h(+Ü9;[eeeAAA9r;booOHH;w6ai&~W/_.+!LĥK?6K!e %5=TG¼tmaT8|Xu"!L0|8§5gs+V 998p!7o2rH͛kWj.hS=K!ez0++WO0vC"sa0K^u"e9m!!!tڕmۆ/O>߿+V`cad/KPۺ{{; !ܭs/(okP[-EiLmL[Pu!S0oiHX WWJC@z:\ NN56lAAA/_7ҴiSF9s挡ώ;ԩSzޢ#)a T-\Uu !NQ_?E32&ahڲ#ˊZ!ijiū$XSl a޽9|Cѣ$$$0|pFA"E'>>ݹsD_jQQj%Vv㏩Ry={'1iӦOܾn:rE&Mx"H✍>.:ZhQf)za!d5p8&KC@2e}6Y晏͕+uɩh/a@m[F(Viūp% K]\\UVlذ'Ά hݺ5^^^9:J=!q8}7'7ɧ:BN;#<׺u{nOy]!C'NPh 1 KJJXbΝ%K?%^Bߠz8{Vu"!r^lVč֖zxNdu ӧV?׮]#00cǎ@ӦM]6ޔB8}A RC!b NFdbÉE57ٷo}{kV ЖZWWJoajyX8ruԡO>XA䋨Jě۔B_n]z"Lȑ#-[RF |M֬Y|-YB6x7ݽ =l>j AZS aX7nhū4smwN 5(AAN ))) ,Bdƙ3RfSG'nF1YB(ONѣ|̢#)3dgØ1lyqcթ0֊Wri+WWՉzZ?)j뗣9UlY.\h`pp0ׯ_;wB37`˚NUGf&""c*?a"ND}Qd"z222T0]g0e  ӦAvS .^Ԋk ۋQH#[ XӦMw~`ҤI|N#===;/HrZ6@rr26ÈsY<]Hdk zꅷ7ʕC!ЃyU"͛79r${aJ ,!mС\v Vvl߾۷ … 'pM)S |'OV t5ufNmfa60cɣ:x ~zC ]Hd"հad/}Jq1ݧ-,,셏ٹsg$f΄8=[%)ajQ{n&VVSg(<"ۥc9uT'O މϛr2Sˆk-'4  fd*:ϗ }e"IiQaBB(X?XHHH`1"EO||<ܹsD)r2$ՋVJ'U0 jXwo|Yu"!->^+Z s@>R2@aN8!KED!:: W_#Bg"Kʅ0En6_?8rDu"!}BXZ+ad B }'N0DQ(VK^g޼y߿@ !ۄƅ24QYQShd0:wVJ͉0vVd]JPHdʅn]%5M6}u֑+W.4i e;]k+N"2;;X>T[5ǖ J}4X:| (:$Dlw_tiIDN8nSY.U(WLH=6+K/,ױc8X;Pc8 ɝƏ77?\ъNN/t2e Sq -M+oS1HFaXK%BQD䄐!/F"|-K: sn:޽:x 2XcǨ] eDu +WµkpK=Z{80YbcamՔ)0qLʝB!^u"QxQ 6lgg{`D;M_u:xERB(9q\z! ^Q Q2&a6wo>P[V:0Gz=|- w7BjSl =Ju/bb[(B!;q @f` allV=5yDvILsY~]hUJd%PhQj_ۅqӦM|||ԩ?#nb9XdE k; rԬs@W77š U6lIIIL2Eu!,^TB.ӸTcQB(v( KIKdŌO.]t{Rh޼9~mNE^p\И}eĉ㈗d'v-3|RsB 帟vQ'繏q@Ya9üyPdw}Mn~x.<<7D1Uf]裏cԩL:Uu +[VqawSr2$C^ڇ!ٓqro"8p7?<Ɍg,2̶… >}:>>>rȑ.Bu} }4pw^zMŊD&s-O屵Ow*T ĘmNG`` קA. 7Bk+LE犪3j*5j,^hիGppxB -ʶ@ө"HܹС| n'88Ν;+L'rĆ n}PbE8G0c+Wɓ;|NfYz f͠E HMh,DDIK\;@ZFʵREX;wBkGc_(QnNnFFF?ϥJz5kggΝ!,Gs':jAP<,X))ә4ؼY+ m wcCd9j_{(P NTGhذa$%%1eQ@pXB^j};>Zrm7` G{ ]+Q?3ʵ/+"M:-[l2jԨǍ\&~,'NATG/5(AA玢4BUazmvP\ӘW'D60JEFR+,r #ɍ{78})3? 5p g9c̙3C>F|RN=ѿryZ?)j(*]_aa9I9fd 0J)4WLIMUDda{Ԭ79%$9!2eĉ㈗$)'N_2aY[h}d<~3sυio%1{C")lpBG}SNeԩ! rm+UE!TtHZ?ðaPkKJRLXYB(ЧFQXp!ӧO___9SL{܏Wu!xR￵W_An`c-[СZ's?oCRУ9L XBq,ߢC N#00t+J& ځ%뫎"V$]._׊9:=xo 0|8Ԩ!E+9bץ]8:S|Y9#88Xu!3t'ݼm%E&Y/ skϽ{?ﷳkנX$ ȧ!Dui|e%[ !%NxqAd] SjjքյL*iEsn&S|<= gE02RB>UGBGIw!>Ճ.|,iSطq2riܹ\ ZYv^CeB*Zsյ", &D X(rpЮ'ߓ*mʷk+Z={RL !,k煝4-4;8 e1q`AhJ<̈́xQ+H2g Ä ZѪlYxA\xxn#,a\ Ԯ RDdmMJŊeFϞ`oɄJt;6{{Q+Ɓ =]]==aWmD  ay%0;~6"BIu!0nN 1-bXuUGB ;iP]UGB! ۆoWZ>(M+yMQBa%<WtMu!:{tk8 J׫ @o}_GUÆ #O<888áCTG/AoO'.6[qm+!!#GRxqlmmQ۶mSKdF:(Օ6qWP/3!Plu/6Xuaa ,ȑ#Yz5c4k֌ݻw'xI|zSZiU*mΝ;SbE6oLȠ{㉗Թ̢2.PBSQB(rNUG / ްaC:uDժU0amڴyF.YBAnMwooB<ݷ+L3Eu5(AAN0O??֭[֭M6ڵk7nݺaee̊}n;cKpRI@\RNc\qҼ]e`|k(}QGdxGV_NSGh׮V"%%k'{,1ի+H'/^'}B2ͨ:zZ?)j˞~/,8~-筷z>}ѣG%Y1he"[FzF:C^:„=(ʢInųKrY^z,l룯]ț+/]BXr}u+]ޛ)Rȗ'm `]_/a/Ynfgz!LGd&DG4-Tuad7|Ô)S6mqqqxxxW_ѵkW+j^9]<0zhVhHϕݣYjUhҶK)X`ǫSt?"麣+^E||4n8cqm+JqYw9u>Ê+QwH*A2{k9~gz/f_*иTciFuɛ+o&~ 5&0YD2 =cѥ\{A/~8f[cѢE,ZHuaK.s'cwemǵau(F60OZ}_~IJR" _Nh\(U Wa\q&.ztNF?g۹m|yK\2쟭Mԟ˓_U|seB6:h4ֹ|(Hާ>!ѯF?ZmI{lT چ:0|>t.ߚF;jc+jŚ5$&6bXỂ)M8Y'zXZrq2%)-Fg,\kq!N>ԦS3]7d-Zl>.?P ~[@E鼭3a`A8>^}WC !ȼAl;O?51B)` t#&6q#*ꙏt|Ɨ$?H}JYjVyǡlyrQt:>n>w(^8w btэ 6p.ܣ 3D܍૳_nK;j^I{!hQQ9zY`ݻ?l:`7,גa1 \uAAg&d nKg/VlmNfl1,k?cņT[Y~,^nK;ꖨr}_8 e]{%%1(ۂw>{.Q X g]u캴EG;p׿zAY''n 񰐵.¸wϱz5%>+A݉N׿Ġ)^'3߈Ήg'|7L6mrq+w:xeug<ٹj'n k4y} 6q X`H!rƐP=a|ˁ<c ɟM!,K?;]{M;{]BdKS cRJ6Gß??;_UYDzLm:Mr;6oR%RmM%Zb*DɆ.;k[l5Yuw0m4[';@ҍsv)9\\8:Rpޮ6oyE" !0qfן} WG)` R(_!Kmh![l}z3vXbǰg1 _ t:x bb鰵;]v?^ 0*hщ|C7Mn;7QB9KL7޽EUmܼW0S4REB]B[]GZz- vC 1CDZTP& 'F (s3G qμg6H_o2:֎HGxSfN_7&|2{1uμr#9""H|Lîwu-䝓!J6V_-WDDD]wq+p%+ Vz[ [}cbsf;N@]_ߠo`'1(.őYG|ˠD,-RG!"LXY}3}ǒ/Ap_s]]CHnGD?ZcPgt:,ZPTǞ={E`it KFҟD2;|@{B޺j;= %~QQ6V$,Y pXiP(Шoq%yn? u:I3yĕkM [[t{㣧?0axHmOmC.SbĞEX+ͱ:h5,\uOߗڏ=@ O7vڅzDDDH7?==%NBCNq(ܺ߃=@`8S;2 hMGgy;x#|H8f?4RF'"NP(Q0m^Fu謀Bvۄ?RDH,{xU]x ~8t;Ə/bҥR_PY%hBO=9]x}zƱ玱C8"]?_ rKrUW(amn ^9 "!GkG h7F uXi0uT?}L0 =%%DQgΜVmu/ ;;,9oFLd^|&2zI[i '| cQ7=#9:W+K $h7@PbʃS߮?FI>$JKKj޾r1++<00)q V~Ɩ'`l߱R."j&NX7yJXDDDdR\ծ1d1Hm܈^vv-"&OF%2m==dx`G`CcYxzubEG~e> nuDuC5 e2SA@c..k_~gOWcMl~3콐|:F4xEѣ-+* ,"""""2:meUVV|y[b##1{;s)>>8>82fn?= #ak! !Wo/+8Stgjujw;пW BUuU^s׫G/ŕ+ =@<54isls -eeSa4ۏё4??i y[|VVVGhIJ/P3QGa]2jzmHkU?g"T ?[C N^9q8G|g/O3=1}/̔Ҿ춵prXX< (PW(Q:6 z^e31a A?=@`Q+ GC*_BB"""!9q Sz> %)) +WĪUPVVVݻw8"֝}`ã=.I9IؓK\׎1s"F_o? v#jj4YEY8StYEY.F*3FLߙp@`@غt{~BB"et}+99;v@zz: h0bZ Æ :qYo6ބh}{All,bcctǺ\Y~0oW_DZ!9/y؜@;8M޶aefP*Be]%n+( 4n#0>=̥h ca-[/Ɛ!CP\\8z(&L uD""b"""ӥQi6$ aC*⳸RyW*Шo-n0k۾ن]ЯW?]Z3֦Mb-$$^^^ذa_vݢTcL1gLn>xj<(uQh:|X"??_DDDdh"""""aG`n@΅ %K<}Z rrs++r#33S/rrrsx!['鸻u{%`Kw;a$c= ?Ry5km^sN7nܸs۹sݺ=7nߺc {7n:dVjj*&Nخkxlڴ m~ɓsN*2aϾb.`]DD'E(`? qСv]wڴih4-bbb 6 **+"Qc6ŭ,111:I]@DDDDY`ڵ/V4 Qcҥ [oĀ Qu( 9r3fLNâEJٳGINNٳ xxxg)cATBVKE׿0eXnԱ *##O?4`ccVkթKaٲe4iT*u333 Z Fӧ#//[ko=`tv{n@=`466B׷[\744;vիq9HHH(4lقK.a8|0>! )))RǓLAA,Y777( \||,֬Yul۶ 6m;wAAAhhh@bb"oߎ~ <JJJ v{ {m{@G w`a<( ؽ{wI& www(Q2ûvZ5N'\\\Dpp'<3g:A pBHjŊBP .X饗B%3P(DLLL„l^x񢰰˗/7dNah{=0]p {mQ^ZFXXXsʕ+8qD ٹڭ"%H$;wo~sLA\\n޼ia5+++]u;;;R2;=pL>}ń o>CE4m=w[zȥLzuhZ(-___@vvd2dQ ڵkXh?M8HKKΞ=C{Ƃ PYY)u<;w.`塲֭[pBT*#J… _|}}qyIw`wc = =`4a=`R۷ZVZZjHpBTWWcʕRG1 bx嗥"TUUaƌ@rr2.];v`ʔ)R3ԩS0`0uT̙3RǓԭ;= !puCݱC삻aMm3tݷDF%::شicP{ŁpiHJףWƲeƍ-Z'N8eEpp0 wyNNN8~8֭[JIK&2U{6v<ߨ5_nbbb~zlذq JW_Edd$zrh>챢fff2A888pС?3gΠzzz:Û*333>^h{ B!qqqb3ugR0zJXYYu։JBRSJ`<(J }HNNׯjZz#vCD}vP(Č3DbbHLL7oBq9VÇERRD[>&쁻cB `7!L &?t^R :TٳGX$JP(6R)u<͙3Gjc\uu}bʕNh&BBB K.eeeRGRo<_xz'OFىP?KcMw``oBfȭBq"""""""""L3H8"""""""""Yd,""""""""5H8"""""""""YHfjkk///ܸqy...8q"z +]@XD2ciiDc޼y^3gTr%"2V""bf&u"j qqqƍQZZ49r...R#". ""vQK`TXXRSSdzXR"""a6BHږQF/_ԑȀDD. jLUUUaĈ>qؿĩPDD. F$S/2󑔔o/+u,""2 nHk.|j ū˗ᅲ:u1 Z[d&++ ۛ0f\~NBϞ=%LIDD]=@DD8"""""""""Y[H8"""""""""Yd,""""""""5H8"""""""""Yd,""""""""5H8"""""""""Yd,""""""""5H8"""""""""Yd,""""""""5H8"""""""""Yd_ɯIENDB`././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/_images/genealogy.png0000644000076500000240000060323314456461441016533 0ustar00runnerstaffPNG  IHDR XvpsBIT|d pHYsaa?i IDATxw|ge,{Ĭ5RTm)j֨R{EڛWAd:9ܿ?$~^+9ϸuP !D"H$D ߷D"H$"H$D"H iH$D"H$lC D"H$$ېD"H$D"6"H$D"H iH$D"H$lC D"H$$ېD"H$D"6"H$D"H iH$D"H$lC D"H$$ېD"H$D"6"H$D"H iH$D"H$lC D"H$$ېD"H$D"6"H$D"H iH$D"H$lC D"H$$ېD"H$D"6"H$D"H iH$D"H$lC D"H$$ېD"H$D"6"H$D"H iH$D"H$lC D"H$$ېD"H$D"6"H$D"H iH$D"H$lC D"H$$ېD"H$D"6"H$D"H iH$D"H$lC D"H$$ېD"H$D"6"H$D"H Vӧ8q޽Kbb"9rlٲPzu,'ŋsy?J"_|TRիSxqtɓp- jԨA9=rIΞ=Kpp0& *UDժUFoٌ>|a4qvvbŊTZ*Uh޷ "9sJ"bŊY%3""3ۯ2eʼIWm|z=}^ɮB˗ܹsRpww(Y;n?\thZ- |zxxX%3$$5}} /_UNXdٳ9s .FÃ*UPF .lϞ=K߲lٲTZ5CK.olüyj%JXcdd͛ҥKjGGGt~:Oܹs<}!srTV2eʠR,Jׯ-%K???:;wpԩW?!x{{KŊ1>>SNիWAQXW:Ν*8~B @hsⵄT}-RUm \r!C)ŋb٢JeJ :Ha@ծ-֬Y# C2ϝ;'z%l 4.B[O+ՄQ"GN'ѿq (/Qzs.GSJx+-r*Zxbƌ"**NL&غuhܴs)!lj mrB @(\XL4I<{}-h1k,QtqQK'iDQU׭[K]6- @|V%p+#rկ$\3!oŭ[l%YӧE݅r}8ى5DDi_Wa<\\ݻw߷ιsDϞ=_w.¦ի-lVǤ޹yV*bbbļyDgJ%tB=PO'?Phr;{1ga4S -c~M}'uVx*(ٓx}zbbb^wt=)EhggTp8sT'yr 0a>8r(ZPi4B]߫ pڻD^$֬Y#]݄AP_;~q/ڂmڶ)d?ԡ//7OP$(#^/^zAׯF(s:K5o.yFCA";Txx @oЬ)VBPoL}(|^Pyػ [;{1k,CŊ+mX`npzPjի)ʼn Z-n-O8(vUd:ZN7N2D2ZDZD<NWyB^?tB.h2B0슢ӛ:2 F nJ )d^vMV  y#T%; F+ .*8BhgvvBq]] *{)dkTBy˸y& 4ISN>`KQ|4lΧYlܰL8#Gn,-s0?P= ի裏 aܺ}SSMkǢ<5ر}}v:} X܍K[wtSN^9r$ƍjZ]rAZmKmآLOI;vӿf͚s5!cǎeر|PS_L 5P˓P 6m5mB{؃Z%ݟV-Ԫ][Y=-iҴb&gV|#Go[6MΜ9߽GѰiSn߹v0t|J{Gǣ0 ìԨY۶ [2djY^x$9=!.+-K.V}`O߾,[UNs1O 6N:SzuΟ?O&y1p BGD;brlllB0dOH3L,-;{썍9[6oqc{m6nʣ!jN_ʂҸIS6oo 6 MZJ(e׎U}VXANx16,o`xT{'Sח]; EѯߗTe1-ӑh4_!e͛K~x9-ZChYf4wd۳ qFv)GA GmT@/?3|pT*oߑ]v%&`3ͯϟ}PmdrjԪMCBBus?L21c`3b*bcƍ_QfMj!E"Q;P xͤP).mڶųuj>d C2ydb>LM5cTֽXFCۧa%Q9r$&L`7IL,|-TlܸURNmIGpT(U{Wyj!lJZs@8^Rݳ\ϓ'OSljztD]Ʋ718EbT(V}OL?Cq`mܟ>ppW1Ltރ}b/,imE߇-#Xcǫtu떬_믿BP[ˌ7GŋFڄ'r8[ի{f̘ȑ#H<6LC5Iz]qJУ:;V+^C__ɀ17 wϟOnݨWn{^'W.lNN=ةhtV>τ Nc3f Cy9rq Pz BXNn`t!fPʕE2l\ʊ I\' _ `…' d͛7G1u$ *r m*W!>:B&DB|ܮ+/ϸr,`0W®37 dpT$#^{Xpaj~1Lԭ_ӷoۻ uBex }ӶtjՊ5WgI&(:.L&Tsz9gR\t>}: OA4 +:08L̺F* m~͛P_w1 ,ĭ'q2.1[ӢiCo]Bg2 RŇ1 83` e4sP@ρi@F00 w4l9*e\ghԦR O.]Hb6ƫA"w _/˵ضmZzw?b&ÖÙz);jDv| 4tG akż ݻ̙À߹MZ)]@?f" jLBy\OBؼy3m۶}KTT^${S_:='ThV#O@שXɛ}պ>1T4h0?D5,BnM*qg;x).}}P#Sr"R(cFkp{xr 46P%Ԟ!>˜ࣃek˔]^o'GBP6<NCL X ޒg\A7}\WM4J*ICݷPImVf`a5_ zRAY ]4 X D .tY 4\J5f`p.͵/΅ *rCɏxڣ>tC9<+>T pe6= @Q3v'xixCBB4y2)ag<p ZA7wY79NCt!CHbk5.4eSI@~ O`A?emnWTGAiZ͎;8wǔ`8vۓR< 1 w.v9 ;~?mɾ4=woqL<_~%pq6m܈͂iN1L~>ƌÒ%KZeL:HL)AbP| .GiCAiѢ!z<qw.`H_ {i݊Z %|goc2 ڵk޽{"ɹ&qxDhMWڮz[ptQf@ PvemU7KzE!Q:#k`=q\*L$@P>/=V\ ^ME05\ P8; Jv! \m ʢF@ɤs 0YBPӜoX`%(1q$k3dYx1&4Vܲ/{!N/FY7}/m|ga >m6VGh\5MX1*c4oޜy^Ũ7qs4>}z֦p,s?mx4D,c Ǐvy+^ m6`0 ՗ztO*LűxB>b$ޔۛ7P73i W7@Vm}qwNJl] f^jG-)OýGt"POF2q̲֒|r z6?yxm1xS,ٴw xBL_O* 19=J:uyK֬YCLt4ݮ7 r8竘:mVA˜ دN98R Zg5kN!- ~SH;~LM꨽1gT Gֿ1 [LjяcTm{???y?c<[kV>w uxf"9[ 4Vf1+FAA`d _AaoWAqY0!y\W- x- du?|d:- pj{M7st4 |nQ >yg'x.P87/3Cgfcd|Һ%@^ed;ɴŋ3l0EKcRyb7صRES7EubJaq+ցOorxr57Dh{lrBBBؽk7%)46ZݓfS9mNxI``JCG3azH7 [c3gJReرHƶ!\V^-]ր1LHR2FHM H>+:EZ6> 6Tܷnח+\-x/3x IDAT+mQE`̑')]VW0mLvؠ1b|Pf?d լ~^bH !w O,i;\חL|7Pj=P( {c~#j@aߤΘLMXxEީs޾ p4)pgu!"4 U\ ]VhHW-ȓFJBC֥+л)*_v 8W})[KE /^d2Q'ȹ>ʩgQ,?dz5,zg~leoϟY]r& Q'Njv\-ӕX)ǡQGt9{Y7Sgb*aefH!`cTgdPYm7O|"@$V;b(sq=ϑ+ga+7HGW0 #"!>4 UY;|yJRIA J|#3X2Ӄ[Ϭxw'ANOp0ǜIB7*Qぴtwa(Uą͍>|ewj*x5Ą1ݸEkTC-tKj\Vxx2-WS&}9<]38yq.컁߂.8<]8S%44{Tik<@:K]]{3$S.i@8VooF|-Lkv‚ Y}ޱ6ja̿n>GP<2O>{s?giw$S_Tto_׆V_^&-&ϟg 0TR!ۓow| h$q DTUx=  Ô+f^~VAmEX9 ~ڙꡦ\o,,Z8 \^ 93RXp%͠l BCq`SPpb&5.y^sV\(~1߿&s>a?WQcBKw>^m43c:I|\ʶTJyy*CJ2P%/xToLAy,9h7{XjWVN w:wE) ku{M }&)sy@3RkQ0U*;aA,G¸Ir_R!$"e+s.,C.OQb ͊{Wբ* ~>O(ٗ#VP~dr kYP#ҡkIluGN7AD7Q^Fi?(Y0^ ڧ2!qLTiT:;/qNlJ'^0wۆA!bK %v I:$:wQ k(4Ifܹ#OQ*M_ea\+I^:yB*L9fvaR5K&acK( 셰ԼCߡL=p5ѠkyƾWًCN"$,E|Έh췬C7rK2"",Y~ܼyi1fx5|V-e(r2s gvDׯ7"10O@Ųb?"Y4y>lZe8^~GSUv|e|z${>:c_uqqIuu^^/g3R؟>áݐUHUM$\ \QE&&<_ I]ݬw쟮 {wR\݃Gʶp1㦏}nm(~&)3OV+֛0 ~jԥKOUJIsBPC{bj1ԃT˕+Bv`󛨯F5_|}G;q {[#g45۔Hx;*UR0;av\.n݊z*Tu+~b~&IDEcp)I___n&dY똎7IL!+{+?=q:([GO8r_:t!F)U-^%'wFt+y*C֨(Z|SCQT3*VĘʳ357lt*|}Զ`V5zE* z{_JA])H⁥SQ/<໖o4 321OhHPٝTۢ|kq(u?,#7=vuTnew<8JN_gacC@Cɨ:|F`p*e P39sxRIQ_]>7'm*>*U"F .o HJ\KIҕdp~h;vi4?גuGWRƹ]eLWmZ *_Zx+#%DKceV֋wO֭ɑÎ?Ӡ[]߼ KgL파~_@ˉscxAczJǤ:9P6ܟ GX߁)!"Ôk$]"NߠЉ)TtڃIS{Fʂ . @o<zF> ̞FtʐaOQIx43VEӤ!*x'Om]g&MFԞf`wë{!9'SڳReQVΝ3f ߠIYuv̂THx0"p!ob . b `s7@}+N!g{V) Xy \ k‹$+ P\[aS}V~vez*Y/̆Z2QJe]@C~%~Bt23b" (ԛH ؛;tAzz}hv'_QwHfB=3e_7)@p}Ɔpy59.rI^ y[GOUp|P?'xh3OHdք@Xز =ui'>Ɔ5kЭ[7ZHPt$/TK%)}`l(6 ^ܢ=-J_,fΝhm6HK⚷E?!EBкb%6nү_?޼G 8oX'\Ϟ#TBT U=** B1]9Vs'u# Bʠ[ ZOPhqV:ZO\>q?@}c+4x1 "tV>7OG@@o"_1{~C VNxgls;9?ijgD|SV6#ỡ.\DD@]0neBoз?t>6id(fmQܚ܆(pv OJ܀9n_gWf %ѰQc|{u} P Št?PYwa,iO'*acׯ|{(tk)|9t G“JF"-/{VX^jMUM_棃A}.{UJN#Z /"NIg笤Gw"0B C'@Jqq(FPcPAU[AeQCn2PO,}EU޲ς۠RC;BI+(Q(W/gCFwG~Uue䟨Tp])bF^ P:04z/-KG~iq"18ʘB/gR=n ,X~ꗉV)g|'ėkx2˗.ShVb2[>ATZA˜ Y8$ lt(E dZ^PSlδ̧sX7n͛ɑ# Ala_0_WųtP&|t+W_dԨQL<P>w?m>ogI`J)Kt̼@͈:ԇq40BPdTq^3SkmT+":u mAQj6 4݃~- ;yԩSɗ']"4!*ݣRê$+SGJѢET aI ׾:˖q-g:Eb_ӦN{/Zf%hB!is {c1F?/ >>?-' TM/\0׺L#1Edsra*T}2$(N4@}H̑/_>ϝ8d Qd岥րQ? zhI{՟@<9P| q#gW.̄[3'O8S,{~HVT]9yF SD sY M_SBY؏Y"u*TvmV%=)Z<=džf<5KTF3ef#3ġь _GV\".ۛq}a߀LfK\w#ݖҹKW3,ŋ|R mBw+gð{/?Yf #VLe*V˗ٵk=ztk6gL?KWVTA\l<}Xƞ PNj[#ÃFI}.ɠ&X56Mӕ!͂FtWjv#Pܨf&dСthՇGY'};Ƭ3 81L *זߋ={{.J*V%`]DV&Mʶ bTV!ک6at3U$͍?-O?%7$:͚)D $G$Θ͌30`^^^ܿ'h7ZgtPs<k׎:upE6mʒ%Kڵ;ݾ^! ]^ÌLJKrUۏ~8g*'lģ;8S;:qLΝ;1zh֭[Grغ5#9rk&l{T햏vLmsm"ByV]QLߏVa6Bx9 ~ p@Jbܹ .eŨw=?5u8q"C P;;;vlF]j7R Z i3|^ʆiٲ% 6d>أza]c899vퟴk!hn4j%KУG|||ػg7їѮR(DޗRss>r͂ ӻ7Ռny*m~SNe*UfE3›E8ʹJx0q<==?~<Æ FVw|F gȐ!L4 (sL㋬8ʹ &<}xyy1`f̘S"0lQ~g~ʢE qssxyyV D hqpÿZl")@QACWcA-z"2鹠%T*8pI&d2K Gggus KcLX%NjPa_P "ES,ڶk')W[ßqEem?xe^."""8 IDAT,->,(.46:Q!Sԟ%93IȈUUCBoї?KBB:mݺ\@=˥m۶1$г Mş YKMl6:DԷ?@?6gcqN9|;Df[&NfƌC_~ŋdaaAm6ӧ ///ʪG 999 ߼yCzjGnK E|Wɔh200 ӣ|u5Cjj*Q&M;lIe~#]%VVF%{Gri޽Gl-=T4BW%M&M,52j؈Ο?_幋hQR"AcBeED;֖&^$bX,{ Jvv`Oht`VLhEZd~ZIffshܹTXX(0^ii):q4 &IA>İȤ իWԽs~qBc ఁ8ủa_ECZ:N#l~"zo%N$8"5h܄.^(f ~+D7+n'vc+@Çϟƌ Cb5 MM¿/0l-;&4ޗ/_hD@@'xm#Upb7J}{NhhjؘXJ-$)jISKv%nͥ)S04%` H/bu$ԃ^|)ƚx߾}dcסPR!-fcr) }AWW3瓅RNNج8iۚ8MdfJ` C۴!aÇ?2&33MFze5F7BᄶN({sTTi 4|pRVQ-[½laG]NȴUkڴimF۴+˰ՈّK h "%eעL>>(%%E={F&L 5 ͲD+?9[Eq3Zz5Rxx8u2ql>eTnj0biAt kLOOS^ٵ#Ʀ/;dNz ҢE۷o"|fϞMe{ ǀ;> NO4k,1cbbw0 " V^ek&N#Kb@?wsHZd 5hT&ܨpB4K-#@Zں4ez5ѠAb1=p#q8mR&dcᇾk% $ܹX$%%ɳ(**:ڶi kkkS&r^^Ο?D$ݾϟbP6Сe ᘚK.!)) aàNh#Y ve˖a5kV\ }]hjjqj&'''@LqiY31! hҤ  ///e˖PFii)UUqeX`ttsX[[:/_/nܸYfa޼ym߾G۷3\|6+f̘e˖a٘3g~xxxÇhݺ5Ə+Whݺ5к9ѣG4i"9r… `xzz⯿̙3q!6mBÆ )"œ9stRL2Vz{{#!!ǎÕ+WvZDAA41ƫ}b׮]SbӦM8u KKK;w?c9i$l۶ ̙3h$$$`ӦMx9Zi$t7Ç[[[tU/ Km' x=i{Ƽy󐒒vul?ƍA()'.]K.СԾ4DD\v IIIx%Kv...2ܹsHLL;+l6505:u{{{ޑw?ybhms 4z|~+m|#ß`cm ;;;899=--zPSSEVfYŋHHH@RR2AD04+3TTD˗/qE$%%G/2Z5aff&x%%%|2㑘r0׃U{ť @TTp| eeeh ppps%3?DMVr\ر#%UUUZny>_|Me ,,Çɓ'@H}ިQ#9sN"lٲX4{%mmmjҤ ]zU>\.ZnM@DriРAdkk+v!6M%KӐ!C{պÇ:YYYׯdddDІȁ"1bUVmA֬Y/spp=JhذaԢE ~jҤ ٓ.]DJJJ4vwǻ7Q^^Д)SmJDDԴiS9s&ziÓ%T/{.XtuuݝzIEEETN:uj̟Cht%@O<(P xY999҂ X,RSS=nff&/_Dddd짣 99Y 󏀈~zZBrr2}{,VVVsٳSk_˗/ưa;wHY… xL/ݣpwwǭ[.r,6ݻw عs'zUUUdddTۡv~:>~[[[Ox! cǢ{xY_&77nnnػw/݋Sm eee1_}}}@-?y/^hjjbƍBzz:6oތP<^ȑ#1h ~$Ã";;Ǐ 4h 4nVVVZh?\GUAݺuѦM|y PRRߏRYjxsM(P@4 DrssAD҂ 6m*ɓ`o4h(++ݼeŖ.]+V *Jb5D\\/e_ѨQ#!3O!1χ_W\Avp#,,L 2֮]_u7KӧlHT,ⰴĭ[вeK8::ضm.\/^m۶XfMlr|NNNz*Μ9#1?GQQBCC]]]eo߾^YަM@CCϟӧ0ydaڴip }AcС֭[0 6l@^ 4hЀ>pssÙ3gr`>}7oJIG'''0 ?ׯ_#77̬(޻Ԕ?wNNQ@h ~PU{H@$aԯ__ԩS???_%rb011AeY9aee% xql6xxxIfBn`llwK1>|saʔ)022*EFFHl `ƍ8p\4 <~'\x)SЭ[7ܻwcƌiйsg}BAA_QRRBtI@u֡ӦM_~K¼y󐐐p٨'aĉvD$۷o|2<==ѦM&2}[n LCakk-[",,LkHMMttt?"}P\\uڵkڷo *^ $''s7n|+ݻܜ_ƻ?xpaвeK<~...FqXjvڅX۷M6E%KQ.\ K,AN!!!hڴ)~<mؘcѢEW^r޽; `@o0űcǐ+5ͭ0RP 2; x dHlTϣnnnNԩÏ ({9ٳQQQXzuS6muuu >dggܠP;vڵkIJBCCѾ}{|qqqeÇ äI,PWVII ?ussCqq1Ν;'vo߾!&&صkC) a0qD~t;;;{,6w7ou`XS'Ͱ222axbܼyS4 6m]b޽r? @SSS!(PP(~ 8}4_p۽KxbXYYaСNǧON9ahh7'HXa[l1 xIHsжm[ܸqشi444<^aa!BBB[ì8ӦHXZZ{dffa :5"4k 7oDnЯ_?XDKKKcɒ% bcc>"ªU0l0"""B{)++  no߾k@\.^| gggZ3SSS̙3+WݻwѡCڵ aaa믿5Ċ+pEА_^RRM6[٪h{x]t>_ٳ'1gΜf[>|;wd|}}qdffe _FAA@---ŻEoGDeiU=))_fccCFo> 1k{E={FU+\.k׎+PާOݻ~R͓JZZZPQ`` ^zQff\ݹs'T\@{ᗍ;ڷoӦb>"B֦,X@TRRBbrٳ o{K.ƍ'JKKifϞ-sxbRSSl[rA(""vE PVVPB277:PII ͛7O׉f!Twq%KP:u󞵰0~?nݚwll,Ç*áC@FgcccfڵFDD'RVVիWe gϞ%K~YVhr_@d`ƍf^;waÆUi@jԨxFFF4w\߼Iiii2NhUZ,\|PTT@yzhbxЩSȑ#֭[WWIII֭[ MHrdaa!$`%x"'@I&4aQQQn߾/3f _)/DNjjjdkk+9*--`РFљ3gjl $ ӓ'ʓ"jРJy\\\he❅q\Ң˗}5@.r3ΦƍS.]X{ԩS'}'L@e_>vP={RV?qƑ@@3g  "KKKCDdggG#G ( ÏV)!""nnnJJJ{![zRB@@#' AAAhժ@(L{Nb&M@OOO*3, ԩS1m4\~Zrfفf#)) iuݻwgjUG10[4iڵ7Ν;1l0X0///\zE||<2߄I&hժ???dgg: |{FDD9"OZ""".|۷hРFyGt###XXXҥ ƍYf͛7`;wD6mпLWk "¨Q?zwS9KFȣgϞPUUuh"T 333|]fHdu5 @ZZZظq#'P }}JsMH^%%%=B׮]၁9GΝ;ӧOv4Ν;wpUڏ߿ׯ"SSS~`{w9.q ~V###~H~~~E>tN8AjjjdooO>}X?~$UUUZd6#11PBBۗ$kԤ /_\(jTmObp\jժ 8P<''O {k׮*PF,RÆ ~(Eȋ(?Hh >|xG(99YMVy\ ȆB"|@`bb"$22ի@yzz:i@2;v@]]>>>|s@D +>~ׯ_Kt@Ѽyshiilcԩ8p NII .\.]N:HII#h.Pp\7Nb;QaD޷={ "##3ܿ_\&^۾};Gۆ]׮]Cnn5je˖aӦM)O#F`ȑqFKZ°{nlڴ-|2=zI&GFXHk憨(ϩe˖ҥK%~~ӧO[@8/333dff hap*DE!Ȁ(Rd޽X}ٲC__ǵkװxjE2y!~W۷o", ۷ 1hРtZxX`fϞWĤF,**† WikY[MMM8;;#22PQQAϞ=DE!&#hqqqҥ 郵k [@DBш@*5?~<:t1c^2 ͛7Cpww˗/u}c?~~~nÆ 077b6tnnn(((@TT@ܹsgr)r'&&l6[ȯH >ѯrrrpĉ*#5572V(PP(gmDvv6\"6ZI2GV1w\,\W\X y$''CKK ͛7j8GGGǎӧO1~~y{Av퐙Wb~C5šCɓ'Wۊ8|0)Peee+BxBtttp L6 PXX(FKK !!!ŇЮ];,](Ө;ҥK,*Ο?/7//_|{ Ȟ ] 6Ď;77'';v1c&T<{L&jjj2fXZZbLEEE9s&wܽ{5k֠upvv$ ۷oP X,I&2xB?j\Cbb"4h{{{!ܺu իѾ}Zs.ϫWеkW<}/_F޽: P^= 4Ҷ$x Hnn.rrr͖J P pqqvZiJ_~ 0&Lȑ#ʪ 9 (KY~}P<~w0O-bbbN: Չ; ۷I _xbo ? j"%())aӃa~ /^D^^HI9CCC0 # 777L4 SNŝ;w{nb"SRRH΃]vr@TTTpa|]njGcǎXv-/_h4jԨ1}tgQNH>##up мys1y> Ub !ʊ*vލ+W⯿STsss\~WƶmЦM\pFw%0227j$Ν;Q\\,6^yx_If͚f h]\\-CUf1e tVC||<+ _7obĉR] Pv ܻwO~ .]*AII bccE}+2xqqHӻ~AWWW&g4hjj<0Q . DJą ajj*Q)--ʼn'D_EQ\ PUUŁ/?<ɬGGGhkkuuu1c իWC$%%!''G Y։z{:]J!(PP()*?~,0..>|i~m455Ŏ/\ 033Cpp0vQCamm-e+:+++L`1{l>>>3gΠm۶HHHɓ'uu[Dy($i+444#pߖɓpww!$ieӧQ\\ [[[DGGm۬Y3\x[nѣGannǏm-ׯ<<~*6lw3gV>;;:i$i@>| 2d2z- Vy ---؈ҠAhhh=ׯqU".]@A@D|yFd}DD ѡCٽׄLj#1c"ۤbC@nn.RSS$CYYYnfX@4,, >pqqApphs `ԨQx!lmm1p@xxxT)(.3f`ɘ>}:v]_Fu߶S@‹ŃfgϞ2 DYYY7oP ;W"00P\.~~~ "Ie.sNp\5JuK.W\/;E4551k,޽Zg$a$7wR?ג6 uuuDZB!HIe&XTD}YUf^Stݺu1tP'駟)v;w$(++m۶r@ G{*MWӤ;v@HHN:U#&1Uȑ#HOOi// Yۈ, o#===>}+~ܢhР"##K.aaa2.**VZua5O#00P\" 3hٲ%?.𹹸 ))IP͚5vZ/ Ehh(*oʕ޽{29e{ҥd`` .&&Pjj*,##eZgQQk׎XdӧŢ3gΈƍfzSNիICC\l@;FϞ={b7oL 🽚ҥKRRRĶ111ӧ_p!թSGl}ZZCI\իW ݻwkQ@Ph@g!fE"5 $rvvvXt)VX!uǎ(**¸q$ONNF۶mlaeeH#GbĈ7n޽+3gɓ'UHLL*`y޿.+x#fXՍ% [bĈLj#peŹs0qDM9@3I%%%8sz%%%̛7HLLzmGGGMI> ͒ٳ6hڴ , j"%sddd !!Al+dP* HM9`Xؽ{7'N/믿JWPPTKi۶-l6<$pQ|Çy#-߾}gggܽ{6s?~SLW@ و@޽0777$%%͛75B_kBHÆ qͫ4|lقh}Xr%?tqBB:w eeeܼyq)rذa(͐_FFTa"aeHff<h"`"MMM8q0`_yf=z۷oG&Moƍ000;' I9@xHo666b@(afff3gL듆$$$H())A~~D H s|:$MjjjBNi2$33_~嗝8ql6}Od}}}Ը(˼gDEEaرcݻj jjj077#zy7o={ 22+W7oބ%=]v&(A__2MOOԩ# OHJJBRR<8}'j\ƒba˖-.cXt)-ZKqa IDAT Ä  ̄SNظq#ѢE \vMqux"Ҥ#++ %%%Ri@PеkWlm0CBBp9 @o޼Cii)5|1EEEHhB~i"ӊ+HUU5kF1115߿P i5k5iDlܹ԰aC|"2˥&MP```AT}wڕZlIu?&MMM<۷oӓZhA2^x!]ll,GIlw@7oޔŋ"""d]GꙏZ/_L*͡kw5%Kؼ*<֬YCޙŸ53ʹoqȾgW>e_e;,BvαDiP)IE!DRZf޿?:3{~_@e-ZZ G`_U8v]_9\vӀ"((Ç%<==1~x̘1}YYYj?n@iʧO*\EnPV-xzz*% qepD/B\^^Ν=zaÆsZi,/.^K.~иϟ?Ǐjm^^2228Ʌk<"B&NqU^^^ԩSر#ݻ8 0m pcϞ=hҤ =Zns#"bG*DޯCHX@֣GszpQF?>V^$?$߿_̷Ƒ#GѣG@ D"1c4SZZ,,, ۔D0`J?9˗/ݻwUC\}@U333P(Ĉ#Y"\Q|\"iѢlhpvj ڵksGT|.t-ZP344ľ}C̛7D7nFFF2nݺUiP[lA͚51x`ƗORRڴi@]G9E7cKGAAJm9H$>*Po&tW^.z7o… h۶ڶBSNERR0`1Be69sIIIC˗J7 ݻwH$*~ԭ[&MBtt4v?h޼9"""Ś=7oƨQ4q EsssNIܐ2-P-۷/fK$''s@>}]]]'Gt(Vff&;h}@~ ZT>>Vx:ٳgu KKK^"OPSfRZٹsgW^8p/ZjصkN<ƍc߾}er0 D&M4NIDi@xKm۶-0$&&bȑpuueuwuuիb hK5,W(j{}}}fX@QGi'>>8@:"4j-[T?==>}R*hshR |#ߓ) 'hLFzzz3hjj*GMhݜ cǎZjQAAyLFnnnTJJMMB200kז233 [\$@ԧO@ӧOJOy3g2772AT:KRAz>7oN$tҥ2ͯJ4i$cǎJСCK]vLZt) www~W^;rv.γgH(RHHr/q:u갶&tnٲedbbϗ^: {ʺ2ƍG.\P\NSNeKVhĉ꺸(͍ڶm˩!CvN3n8j֬?3խ[SUVZGr" 6X,wӧ %''mwGƭd TfffH$ر\w"3(SS Ӏy{쁷7k(Y@m۶#FS333ԭ[R@主~?Fƍ8~8˜[-[`044,S_iii066V7qU+ԭ[ynU~s3,9B!!!+\C*2Xa…ѡC •+WФIlݺ6$$$9r$KRf$++yP,Y Oe]@Pinnn wܹs-NEh@"ͥK+u.]l۶Sr<5\̯@uǏku===!dpͤ5Ң CNnn.88 +,, "'NTYJ*سg"%{+~ylHR|qqqӧOϣ"ؾ};rssp[AժUU|y} T(-ر "fž={J8!4DV4r4h߿ =zӧOնŖ-[0~2y&Xvvv \`-V\\VXK"44QQQj`D(cBPPlll8a^✌K6t9@ #Gmܸ1F˗-Sp@`q ...ؽ{7Q^=~&VѢ2 (ʊGr;-- bWT6|cԨQR;vĢEںC:99͛ pu899!<<W fΜEHFT7ã\2sZjUn廲5jԀ+@ !&M„ U)(( rJYׯgi֬^ƍغu+ͱ}vx ak|޽,_P$ D:u0:PD"Sc͛79r$u놟~ ׯOGAJJ ݱ{nL<Ӈd2^tNI tЁpxbddd $$S}(H$BΝ9#ԭ["9'&&͛?-Vb|I_/_5gҀ:t",X&&&޽{WqrrBVVڲ"Jzj8;;7nO?̙3oƳgz84 =ƒԙpvLB!U3\]]{ncܹ*?---q)L0^^^9sFъ\\\p=L0sAp}zOݾ}Ύ8yUD4`l"3k׮qzG&)BꆇC$A  ((>|>7nH:u _|QYԨQ᯿fǏի9kN>VZq](DF cccܽ{S,( D"j@h h׮pzPr"4 7nDѴiSmbt„ eSOIE{?>Ν/*^V]tUrJNf fѩS'lٲ\Sncbb={PnB_RB222Э[7$$$1bvb!!!FHHzHHH@ff&Zhe˖@Q… uVBaJB 6`Y/^YnOڵ իWW]6/_ \tIm?={d2Y\[ D+8k,X,lܸQm]"CC\]]=@hxddd… @ $p"FU\|2._3gjSdggDLL mNYf CfScժUJ̓FU)ژʕ+pBi?HlLL \\\```@˗UŋN+BySoϟ?ٳgNXܺu [V$Kq-`ɒ%hժ*00 4h~a΄.AxI H xa]t O?^z)1cZl ///1_!## 7N>yy%ttt`iiɩ>lr֭ }͚51uT]*&%%͛7޽;~&X?:th-VWG0x`rzPj)((mnԭ[wb:pӧo.yUTF,=Æ C>}pmVe@;wC ovvv*hc]_Fbb"\]]Ǹh@lll|fr!dɘ8q"/!ƍh׮e ׵kW\zh۶KOO+Wĕ+W  ѦMx{{>}zap7*,,dzޫW/矜>|aЪU+,[H$–-[]Ve6m#ߏnݺaȐ!/2ɭVB!/\777;vL`%_~AAApiH$ x9gxy/^<DJ@+pӧOS*&&ڵYow64DFFbD޸q5kք%֬Y ?.4pݻwcϞ=077WÇ1mڴrOE`֬Y_6٩=zW)ޟ%ڷo)P(DPP/!ԩSܹ3jժDNAQN$&&{DyQ'''\rK.͛AD_~k@ s(^HIOOݻwUCDBff&ݫay Ǐ<{ G7b1ߏի}zr]]]N0sLlܸo޼av W(ҀQGj111j@h 66VE3>~o޼?~<3a߾}HMMmņ޽{2UPP SNQn߾#GӬY3bΝcKaddqƕ[ԣѾ}f$666닝ɤ N0#DDDo߾ԩ9p‚ /`Ĉkb1|||`jj +++e(5@Alf:tZ30}rׅOB9իW@ ( H^^BCC1vXjKD%H ۱o߾2ϯ?~:`ժUXd Μ9:ugј2e OMr%;;aaa4iR7F.*ԩSJiv~1 C.L2Q!"]#GĨQ]DlXt)"##;*ro%22߿G\\6n܈;wiӦU5"Bԯ_U.]ݻw1{lL:C 4oصkWsضmƍWCTT0}t^(_.Ȭ111VJ $$qMݼyYYY?> "%TZ@ѲeK4oh-Z*.:ѨQ#BÆ !J6|#E*w++rр߿o޼(Mjj*޽{d>n8xzzbҤIevڶa۶mhѢ޿D,XSH66l :T#dǎ.FIKKCjWbccWPcCӦMѰJ# i&%!D&aΜ97o|}}m۶2;.\7oޠUV7̙3q=4h{Ƙ1cx}ﮩP(2JHH`43<<<РA[* IDAT̋l9s)vxMԦSNؼy38ESH[nqn3{laJN> CCC|]8\͛e2>| bȑ8r>|DA+p@G)%k+,r#aƍݻ7rBAiD  446lo}i; 2'N'n޼֭[k<98x 0jԨJMyAwwwԬY\VKM*P.Tnԧ+=Qʦ2abظq#|r&eEz*ի.]˸zj N:ŶmFd˂p@T|={V̙3s߿̙MлwoԫW͸q0o>>@ԭ[})q\ `>ڵkwww 2D^/^ ˗/Y}Qvm%3={` ✐4X~=+W>}vVB{F\^۷F,_~ް%Kpy^W蒥iذ!>|vG'ҰaCX[[Gؽ{&XZTZ4 ѰFVZU&@h@6n܈ Er֭[cʕdW^!==]m/_`Μ9pqqAӦMq] 4HUѻwoaѢE8uTQѾ}rmڕ6önUl5j0Òk׮A"PmDWW[lA@@6n܈ϫW ooo֨i666Bdd$Ο?ƍWO  -@ Pc1bƌv U=ݺuÔ)SqFԩS{VF$!""uEUF]>gdq\]]qmLx"Ynn./tl&X999j7ᒓڵkF׫@hTbbbWC{_TDEEa̙/΄zѣGkP#{кuk'Oa…ի  ׯ_ǹs*DۘԨQ5:ۺU$99ڵquL:'N֭[ٜf̘'Oʕ+pvvf}.A,'_~;v,ϟ3gpKD a=|ϟ?G^^<<|111Anx Xh87oڵ+g> H$4hvޭR011L&P-@8$&&ڵҹ ǏeрU@cXX&LK +++ڵ ѷo_̞=SLիWѬY3cUuA[K߿\ "u{<\УJK.011W'%}tN,zDD2335Ȑs TREa[㏘?>\{-[* 111ppp۷qIlذ`α+M6A___Ē\`S :wڶFHOOgWw H4r,n 777077/q&ȅØ1ck׮hڴ̙3xzjիWqy K.aȑ#HMM?_M6j ???СC%iEB*رc`ɒ%8q y_- 41;t%{V.kCjRHR@aaaJRRRٳm^^D" U:A[@N& 8qss=zjO[:uЇϜΙ3PFFegg WWW=@&ј1cH__ܹCDD?~JCvv6Ӝ9s*lCzM-ZaÆlE%;;;Ә1cTݷogϞi4򢠠vA2d2-^7l+hڴi%'yyy_ 3gfϞ=GM6%Eqqqeϟiܹ$ ˗I&L֭[swdbbB2l"*}7oNرce&wNm۶%"ӧ!=HiNld2 }}}viĈͫcǎ4|pڶiӆ̻T*% H$8 ,5knGDK5j`=!KKKgl{5ֶZh)?5ȓZ׀\v YYY8|0tttUʑH$cԀh]N*UYw^={{.fΜ{LL4b{Żw0eΝcm &(b[-Z@xx8‚T_> /UV?~aܼyvǏ}׷8iiiŰ'O3<7od~%MsEاOb ++ W^-ۅh@NNqɓxbXjMoooL4 !!!@ݱgϞJHR ** O>H$T*СCѳgOhܷ֬YK.)QQQ222ZqmmmI*һwhڵCŅ<<ܿ|BnJDD3gTvK&qqqJ?9r;6l-[Xs7oc{.ZTfMvVR^F}N8::r3zj"*2'+}^_%XTܹsSƾ9M-К`~eShѢ21: BXYYqS6C"B@@o߲Fa- .SÇ3RAPPK.Xh2nѹyo߮tӧO2dĎ;ֶ\bZcLarXZZrŴ7oX2o߾v1&OYsd=z<ARRN>ͩٳgѺukܹsG^xDiInӮ];$%%KWWx!>|{9RRR@D믿4 c?FjpexB鼯ڼ@?~… Yso߾4?Y\̱b17ntի*խ#ӻkDF;;;g<=swq PPPT'<CBB3  :&im߾}9͗AJ흝Y5?S3=Ç͛ynZhVaa͜_CUjƺL*-[rܹsG)ΠArOJ 4W?DE8ժUSǮ d27ul/vS5ǐG*/rrr8&&&&L,SIKKclX?))\U޽{M5ֶLfu\ݻw}%33Qק;wXd /?:(;?~X888իW>}:G>ݻwh,2`EM5_>{Ne3v^|Ņ>i2S~:cLN%"8pR{333:}4>?~T:ǩ_Xp!`(\·X#㕮?j(zwfK [K@V{z=c(!ʔ)S8k}Ϧ)){Ǖ;w2i}8l2bS&333::::!i :Tɥ_~J獌*$lyz}...=wc 6{U-O0ʕ+N`@5]<1b >!iL!ѣctYn^^M4I=^~MX}GrrranՕcDzo֭[Q0ӧ朗D(oȑf*Lp5M\~=kLᓙ̂HJJbӳDlF^z߮]#33^p&M(VclLf]t<7-ZpGk6l!s$,&"[\{n|A;0;Y[[z "ɉTJJ 0zeggǜ 8 X^Ix|٬&keAUD9]v)y.N]DV۷*>}~>5w\޽Ĭ,,[ ,`>{&Xtk,"*8t$7T*eT׏s_sˏ?HD_.{}=MAA| |MoFUϟ+h9ן:uJm*HMMel׻wo-==])]j|DEnݺQ^^-]T?SYnk< tq-5SVŋ+/,,d*2S &M;lţڱ X|96m(L$K͛7/ևxyy*]ׯknZhVao߾jLNNN*8s ߨEEDDchj+ѲpBN;6S/ϟ3;wFTټ~5b99o͟0.&Gb sa}}D IDATe}R~ڵH$tu6ϥv%2M0q1LHH`oԩ:-[T:offm?}*9r.^x/ SQ%qzEnh9`E(U+N8uKӬY3>4 6?CeNM4/2fb3 ]ݺu蚾;-[T#Dff&1~la.+eN&" luM/_VjllLqqq,/d2-\un!YYYdllԗ.YYYaPu*BR͚5UQXXHׯsbРA*׿5lؐoc̘1SVZǎK׮]cuVymۖx^,Z666$JB[~֭[8GGGJdGLcƌ!ggg{:t^z~z:qY&qaffF۷og|H?57&= vْqa߾}*J@@vvv%BfddЯJunɰ.|}n8lAFqXS$HDC=]z)q [a’C(Zl~S66"tٱS':~8RNXuɒ%I;dhCF_nI ,PiD_߈ĘfCL:cvuAH轪mdbJ>>>Ҥȑ# (,Gy "ڵk}򅵯_TP22PqؐMFwUhѢHAA^ĺ$7$LXt !RBE(UbyОySjpc> Bc)XO { ENLvRȐĺ:fMF^Rl -y}$@XAWUm^EݠFt/PbUСC-66j*{uR& ;AAb1HG$@F!;;[wgaJJNԝRw:J]j8Lc8k˚e<11,n_ p7c@'4H"ӊ+hɒ%*צNxd&6ҫW^US5o08:e+yoPMSȰ?"!5o޼~:}w*נA(aΣU?Ú&bPU ) Ěttt$S~CX_@HEe{_.?tMqh^lf- gO'˿s} i)$WYS@;;SbbB.]l4XfmkD!)?zwHKO6SkbZlR/P H~W(΍; X3;yэCݺuqQ=޽{ݐx@]_CD!>/+V(??~,H m{}W?E*V8|9C-?+y;uƳ() hؖ_R)pp%g|||fl߾'ND1m&bC]^]f=[ }ΜEZнG7 إgq`9;w,'sb@OO÷4IRrG3)Rz-[DDDkǣ[Ey1(q3AFW 22@~ak)D 7Xoê2 'N%EWŋع I/0oJsьH~ aaa8q"~'Y~Nt'_N󔅐'"$$::::Du"3`CdzeIaڴie_^^N;v(0`W43,re|0`222иqcBH$T*:\:HPEKBز% jXC?2ʹ6TAȟ狂1c ] W"fa$[!HRٿ "GչXXPQ>K,Ea@_o`Z@EP(F}:慟pС?NT6@7ѣG0=ǏC_!мyssҢEID*c.r1+q42úuPR{8SF^g ͚LBfŻSUxuSe˖p1~wD~%CvxBJB@ ?ŋJl2,|=5 vE!"HJJBa 5@Ï_YN.^ѝ'DW`w 0bFSܺt*B=p'>>y j}""<1w\Y4㓏@?J !~~~W U,^K,DBY dO "9[!NB=47"B@@̙LVtPH t0lonAeµ}gtׄcǎŋ]"=/Yz57Ivv67kW?;#Z[]" (bG{-Q1DM%j1Fc]cc,H.y{[[˥33g9s̞ۆ#+n[Aָhߟ'O|w:yEx3[Tp"#K˃8 NDvFl~d[L7n?t {'FИ&XV{g!8ѣŴoɤUeMd-_ wܧTR![ JYx1FA QNs$_fx x5|y ꀹkio>'=*e>u EֲL53UǴ i$+5N?,g̚WwB.ox̭ߟV3 ƇWzxx~֭[ի[FCID4&|9J +jB$+8 y#m|O_xݻwY:}hHJA 7JܭVnO@ U`PhAǁ 7PbӲ:p2bӰ4SEVEuƭ?x>+F,DX0*7Aݗy/FA}L/ިMSA;5'H~B1Eʭ'hS0X2=mgd nx߹AN ?N=HLLO~$rpv}LTKף"Ϩnpj=_חٳg3w\BCC7n\lrT D=0J6w< C 4DC>ߎ)-amZd{Wr`gW^thl@f#`j93{W&]g4Rs%oGO3si-p#_ GOS~_I|0!hRtn='gJ٠M5/}#35E{$!Bpl%D=TNcV;F6:B x@`8́W ;\G26 &dqh@&STz& iOJyeSDphcӐS>nߝM6?%(A ?gT`nѡQ zQמG1=ϟa\VXWCbďaXtko%4$,݁.UE0.p3S~CQ؎d" hѢ/0{6AZPT\3„o?̆9K}Kh3`N}Nm0yDe#ycP */x.3>Dm=[@kx}˘NB"RuդAYp0v…HQ&ԔOo@5[HVã$оbc ~$EvvﳣjժL*9 V *ϚtYvǪޚĿ֍R,ҟDqk1Yf}9ܹC B/żVwP[<y7ټ_mJ{syC׮]y%vvv>}* ahݗAV,@[!u|b8̭6xd`S\'B߿GH9?i8z+P;e 'Qu2|f}z[ =l]ST~Q9Y (`DJ/{!Ogc"h=.fĚzY) D^ o ?F^Ո4LEQ =Uw&_WbTtOlU“[M?9P=n/ͱCmwawdDW,`02Qu]ih͂JFjر4U +-ML^SNͭ4 C? .GoR cIքe!.:xX߀1dWѭ.PLtMfnOԻ2S7'3@tZM۔ If~jtuah:ZO1uxmMɈw'y SU%5jQ*vCCt4Qѡe^}CKZ錙diFdf ˦Qnk=ZtAAA؝`j"kRd]a&ZK0#X1,?xzqFN}(!ᅻ=6ccRUVsB}cM3j^س#f2=Y|dW+R [At5cMpvˇʥJכ7} )750e{t\&>O 5h Nu9H-'s{0窭[rz /0,L9qW&|l:at{uj )!X+dvBlƉÑ+IreD!ظq#gΤfRkuXRN7[ĝ>;WiL'q ?1~xZT:U$^;w+} ,P ݝ1JVpX//ȫr IDAT} _XGyyrYJ:ޑ+`f n.ЪGrJ#+dќ]{.b'p1:?{EF8A>-fo"1tӱzo//CFF7oVP]G5--88!6|YW^W +YZ߾@:,R B+S?;׷?$f]֜5kL&Ç̨UC N.:ng|x)mYvЦٸy3%ˑ9:Կ,G^eVhrN( ^qX\64mNU݃GhBѸ!+!9Z^L,LrI y,/.HƇ@:( >(ޜw*F\8ӊ\.Wٳ"YU XB=|#S%ѷF>1)U$I;!a@)'[JM\U$?SDGձ-W[:TE8BӋLJ-4T`QwO9. Bb  b^u]ˈؕk`D2@IRR*{\EܟTo "?! 1h5pw6>.^ /@Z;$m2-mF!tqV5VCl<<E+Y)#Kז~NAW ZTbP=٢}.E{:$^yU̝Jq1 TjM Ǟw (!aDGG}o&=-ǎVV %Qfb0nN||<陸9j0tѰw>Arm:CBе=$Us*HriӰ2#rCabDd$QZLhVgglDG0O0QB7K1C軌 ϟ?ɮ@BGQ`7gY;?uCĭO/_. Pڭp tXTk`hGӲDOdWQPKY&66OOOΜ9~xx8{(" p &P?w b}ʬN c Eϟc,Ѣ/Qvf2Gc l٣(jHF]  <|ݞLn12Q_2ҟu/)&<4Q;g#J9Eަ#2GC^>ӠGOh,$ w!RzȁE0̾ %1%^r$?s6rܦґs1OS HKѐxOâhgjݒ:!ShCr 'W7r-A J ?傥hUǫ4R[OaGJYjZ2Xfė8d n,K枸}ZGSߓ#wP΍Sul+g+h5O@RT:ƈIz}LIA4Be&t{.Hdff(ڴDw&BGD8yQy)}ZiGS߆#^x@Q2nʋSdȕ2F&KodoYS231%NwgeoC~g? 6OoGt@*;#_QnC`x gC( ÃL=c(0+zUTIz㊕ɑ`jӡS%9n,,,%&);dtB̾ jH7$&aRH]Q%ixXE)%y"CEEfaHJ&cODJ &v /Sȶ4;Untwx0A@/)wΖ=>UfHkd,KG&Zug&Xzԙew 12cYJAR ࿂ fe|ȋt04%fU~h%/H{@f}3:ݮ8Xt{QvN"4fĤ"2^V,pdjAY3?ÊNqSA;[yFCFOtrȄJ^o>$%=j%*ݷ e 4߷ʀ$ ljuԊ T $}>$|}|YedH!q>3F^,@h cdQߪ ɱj,\rNUDz>B`{ rJE5.*?Q~Y6ܷZfABVPB,C JDbd"q y~(݌s8C|ș^oF?%1V]!c+}RO ^^^l4j8Q-)Bv^gw*A2Ɉ)tj-7^ Tь`?4͗Z7v%Qn"'kIJZ0P "f n_סV Zܸc.c  ݸ%z\$M5k>8(ԭᥜüR2q.QO1RULYbTT5ZCZ`=zyH+*OrseƇnv.7ƥh062Jw`].ITY1 Ϲ0 ײT?4@ 6#o4vJzIү:= N)ie+K˝1223@j׮5i!ZתP.;)^.p0OA#0LOE鸜]-p_hRnќ>W.6GP4ˉ"22 OEqzc6GCRh i¯M9@$P*P_ 7AQc@k/AluPgv"Q^x%oFl8ܽyh9 Cr*])Szhk* ؑ+rJsw9SQ#-j]3 \ )A +O+J:~ {NCiUK 0bw;C42Apjtgnk=;&L:OՋZҺX>5 5;ѱ*rLdұm2>o1m࣏"K"_AAH>YpӜsOlvbl?+H2PB}I֭[cdbBҺhbI=x&V`\ 9,XFnX|$zqpNye^EΝg!,HIuLQ/ 􇡰n303,ЩTĝΖ- lOoСYQ Dm9aU}-g1rŦiA zyYZv?,p0(0 5ع7$/ R>!Y֞L tlW-5)*n&1]9xUMSrX ر@D/o.oJ9~Ha;&SS5j׶>Xo'ɵ&a.D1q|NPG۩S'N Afiܭ1X9@r!^ ~iM΄>|ܶWRQcluvvzZݸUbJMEѰ =%cbI`7h'@ JbZ: BhCgrio Y:J9-QDQ2=Vi76 !.̸4 ,[*q/}(5,ݮ2'?a- IVMB&c?qaLr+\[ۇ]&hG4Yڃp.Wc:QigZ-W[3!(^dniua/=vFv6ug09sMW&[>z"֜ 5(~N/AErz7 ' rۗ͛7wmCl|̣ѯa6pz=bBeTkG zۇ_&h{4/#06;wLjj*Ǐ-WG\h\^ AƖR#DĈHY W۷ӣG"}ڵ|>h=q{@Q&_~Z]S2yPkPԫI9ԛSJ1$,(85J6#-y0r"d,A J*sMq34+} `ݷԚי߶,|q2tkKL{ /c4;m}5i.ܺuCC+9sÄ .I"% 䣚\8xժau0VD5޿egs4(<^ UiAè---ˣ1kS(|qHS%Ujsϋ2ېN$5YN9+8{9ХK.u&Iw;`a_8iɕ78̬Y7n\m۶{رcizO IDATѪQ,iAoVǥSLJAQֲUd}}gcb+ >Wy Faɜ#ah-vAP`#;s^oS{UѱM%(A vp5(aׅҼ ^< ܚx-݅:ScvV+kɦQӯ&Ge˖|gLk`,fe||T^ yh5Ȗ~R3h ~?'3g ~&N^y6Ac,_QFQN-R_|AAcdfdƤt r1O&l!< "3]`bbuIaw_6l3>^xA0Fla QcƌѻgHOϣMoݺ5Dt8267/VǙqZԗ^ f *T_"x,Bf#5F=yFjA$ n#+X7{1ٓ)1it(qvv#i}9*` \<ʕkJ-3@X|9Z/нؙ(8yիUT_Eo?m[s 6xZ|&=EVl޴ev AjY,Q͏?z؃T*ٶe3i1(w\k~ͪUعs'>>>8aLGUDtCw>$QMYxҰP(ظq JZ> Nؼy?ǯޭA v(3 $i49F%Xs-JfQFL3ga:d;HIE،!'O^|.;Wٿ.#9r$N J_ׯ˗/G/ȧ4.C9;ƹӧpwwؑTqh~bg6f͚E@@[nƎ05>Fԃ۽p:SzVVV8qRֵ귚b{f2}\&Msx_xyyq ,b>v _;پm 4 <<MС3gdԨQ<,BV_7ĂǏB K#ZmD]{hTh{D¿_eoSLaر<{},Ŋ#h0J9q8½EF!** '''ڵkǶm۸ #*uO~IpBc Iǝ9yr(ׅ+;5p{Lw}VO:~ sP) !@z=YTqtcq9 ȟo(^d0}{>fРA 6s碞EyB;'QK 9w+VE1Xv}l~/X'NdуիW#@.߄6xR [[8w :t59[šվZ~2iE2d,X-)A J:uZ jբ^zضd(Ed( dkFŽkHG{&!.y, ` gFRoGe|IoooZhkĤc[ƅ'/;.^U{9}|ʳg̞SZKɨ.+4HW.V33֬YLj#>;Krѽ.^uYgÎȗ uyۗe˲`6l؀T7!={@VsǟI{L0tAVD> mR d22;w.w3>}:ӦMc\~2X*{eEwgΞd)7B&Mdj /^Cch5ZL=Q>nuYj^(y8֭[sEzEНؕsb$;chYa)0 8''Ȏv3A¿ 2-Z%kyZ}/~+3h@oێ[rH,X_~޴lْso.dXIծ^?.2Ą%KIF |ڧ7/q{t"{y"b[a1|q_~%sFt52w7df%̓hz@3| Y3xoۖlllݫ<`={(WNbnnN>} ƭӑ90JEyB\~XĄnyF4jԈ'G:HYϋ]@2q -[6Cؘ^z駹x8sp0D,Z/U?/aʔ)?@[ J(HLLcƌVL(ݪ Z~.E^S: eiU}֭[N+3gΈM@2NE]Eyl}704FF_ٳgE&&M$J @DEuD5Dͮ.{%7rJ"|||HNN.픔aee%ʔ)-Q@ 0' ybWJ[S.J055QQQAՊ+V e\ zOMق[FLCj]Se}2ĉk\|Y͛zU* JCCB! jC>?NsfA+a`[JAFF:NܹST#a[TNRQ ^TI .ZvjZ \&,,7|#l366V >\ B! kT ßf vA{j~~b޽}#GH}nUZjt l`*sCagg'Ν;'J9stb5Wf&FB +saC60CjA!" QP5"Ď%؃"AT;~ u]g93o#BʄңP ;kݻiܸqԄyM[4'9lllhPrY s|:}4M>vB|yXXV&L KKJ}}ogO4R'cӶ4~xڻw/WzH4vXj֌8<)R-~ Zj={ B{.-]llSfIEEQmitС*dffM4L۷'UuuRRQ::d3`yxxн{܏?+V=5iNJ*NMhgϞ*NӦM:)**6YYEѭ[[kZv- :ttHUUT!Ў;())J"V40 ݹsnJDDtE@ R%k.TRSSKoӦMQzԱcG6lX>P-2|ѡ*MVVVΝKuooo@WS"=zHjǏz* {nr 2~JԵkWr ۗz*Qۛ8͜9-[F\.n޼IC-[2֭ݻwϧl3f ׏5kFYjkܘ^oѷo_rrrRPP //:kΝP͟M2Fszzzv#3ghʐK***IDD .$33:>:/60=z 99)… W5M6A(bRSRRB~~>ѼysAXݬau+|۷ImddܹDNVGؾ};/^+V@OO޽{ѥ_fexhݺ5^~YfߙpssÓ'O0h dgg… x1J~W <}ܹsd0`6n܈7/_7fApp0,,,,&<߾}χP(ą /bnnH4nF¾}PRRR !! ǐiiiiHKK}zzzh׮k4ez*rrr\ZY<557yYXXX#i`rrrbrrrxFv͛7[npvvFZZ믿MfEEEUVPTTķoU-x pԩ*r1o<<}:::۷/.\:X?tR >sfffرcq)ҥK;w. K M>240k,̛7 . fΜ)S &M#.\ O>prrBܺu l4jԈYׯ_`ll_BGG7oW GRRfϞX\vNJLL^ 򰳳C```kaT:uĬ ,,,eXҀ?~/+@ѪUr<|}}cڵPUU(U%%% >_e ?Á Ξ=jallPlٲCΝqZ^?˞={0o<,Z-:v숝;w{ׯajj*fjjXƎd*** »w0}t̙3Hq7 8ann'b„ ؾ};~ >>>xolT?4$, Ѱ@f8~8>~Xـ>uݻw\" He!gY+@`%///_ƨQ6VaHʕ+r .@R7~;8qW^Ç|r쯨X*S__qqq޽{vOruuׯ_T1W"mmm/Pؿ?fϞ `ƍ7nrXq!7Pܺu 666p8عs':uꄑ#G"55UloߢGx1nܸ555;cǎf̘ܹs())ի1i$t֍#88 ѣI HAAv'O@CCo޼AFm۶ӆ>6mHp8̞=׮]k}Z@Ba- =E>}XnXwX߆ ?P*@D'@Be~жm[B !..z۷]tfffr;&&&s믿K.U3|V-{iii((( VmXeagg۷oIWnn.1Bdee,i "'' 999˓ B111厵|r͛pqqAQQԾ" hmmmp8zGFvv6\Rkcݻذaoߎnݺ6~mrQL:?#~Wb6l<<<*5(޽{077nO>#Zh;vL8Xn֯_3f`ƌ@XX1|p?~iiipvv… |1233"p\ݻwӧ߿cԩ_Sŷv5sLӵ60߲{ZYPPPP'kܯDV8VWaH! }:tHJJ1"ʮR֭[OOO4ogΜADDVX!999$%%cǎ:M6ڵka>,--bŊ -N$Ǐɓ1uTٳ1bqJW%`ɒ%ر# KĄ pmI|V`\~NNN2dN< %~~~ D@@Ϡ`0(??Be벱AXXLB ##ڋ@Dؽ{wT&$$biӦ h zzzuΝ;K_aaa ! f~ 4jHaٲe֭F ի6o ooo\pAx=S- @i0z@@233k}#<<k֬7wG̺0k߿?`ii .cƌ֭[) ###:@gÇjBBB\.ttt*}Om`˄p`oo_D~U o ]p8ȴ\t ظq 0bL4I"+9//FFFx=ϟ?36FB~~>.^X'|\<ǃ@ իM/ƍq\.ѣGq dddXr%]^V(ͭmOEE#GΜ߾}#/^4mgΜ<Ξ= oool޼VVVJ3w~;vؘ",,qJ?Y@LMMWAKK oFns̝;?~u YCl :{쩕ѼysxJS[e˖5yuj~duuVRTTpX1cW^cSv$ %S֭sΔ˴P@@M>:uD?&t̙Z|ڧw4hР:֬YC<:uD?9Orrr4vXA|>fϞ-󾴴4Zj͙3I(&mذRRR]pBBB~ƍ"""_~JDD7o$EEEԩ{ׯQF|&HKKN*uw!t}ɓ'"##/_ #GRϞ=Ϗʕ+iСdffF%%%y m۶"2m^^^@)))5$tOvh޼y5̌Ν[{~J\.|||je-DDGΝ;Kٳx1׿|BٳD$)@D=ŤK;w&?0sQ^^͙3|>eff2̛75jD_|֭#uuu1FիWL[AA^ff&)**.ݽ{W+\YB iӦӈ#tO۶mi5}%999ZrrrHYYeYn<,,,, # -~H!abb|i+((O?GGGXXXȜSNسg|||pQŀHIIiq NNN 8{?2_.],[ ֭?^g]t F#;~~~g000]0ga֭binEj4k bq PRR“'OC^^ ϟ"l555|db7^~ݻwcŊbm}BNNi`^~ 0`+++lִܸ t99917ƍ{eTjJݰP\\\\zW[ 7nDZ& >>7ydL23gϙj٢LńN}Yf4!//u޽{(,,DnݰaÆZД% NNNppp'UV!$$gΜa6 ;w. q!#..7nD&M$~5䘿p8055eʇлwo| @i^?D=pEFݻiiiPSSêUw-[bRݻw?ƀ`hll ===ܼy֭^wBCC+x;(++}AgƇj:!!Jک.M6v;;;Jm___W"",,,uXR @۶ma`` ^fgg&L@۶m+5]`ll#G"++  ܺu֭>|E1l_|Y+c_v #Fq)&ŋqFlܸֈÌ3`dd'ObʕúuDEEub֓#2@Lܻw]vűc{{{ҥKPVVaѢEpwwq-Z W^իW͈޿ kp8͛7ѹsg4n.]=:vM6U$,9kjY@jSD2bnnMMgÒV|,+@1BW\!C$%''KuSQQQcٱcӱf͚Jϫ|ӦMШQ#4i߿!tuuu=4E,ǃ?>~g#'']vW!ׯ_agg3g0ݻw0a)SΝúu+WV(ϟ?G~ЦMDFF}?~<.];;;t/_fFB޽i&X[[cӦMشi~2 AƍѾ}{,(**Jd^255Ň <{ 5j233q-xxx 00? Kbfµk]qEEjMRT ''[[ ܯBaaa/ zÇ,~ȶo޼AZZ1sLlٲJsȑ#P(d\Dx9NdjN[޼y"fڵiB5G[qS]ҤI8$22Fҥ Zn-:kaaa zL@@444гgO @M6˗/#G… ɜXYYͭ...@ll쿶EEExyyΝ;HOOG.]e˖J`ذaׯ7%"¨Qkݻ_ 2ևʒx5(M>YˣG hkkK"""~~~b_ÇPWWNJ+$Nˮ+22R"( QQQhѢpM4n[FXX\./^ ??j74ڶm %%% 7,%%%L:ֿTAjWp8A("((Z*>XVaH=& vvv%iii  ]v]f%%%;wbO>(..FDDD'6lq̙{)ē'O0g,Y}644C A޽LR@sθr ~DGGcL"Z4 HZZp5C^^ D lmmѦMl۶ ̆۷oǶm$5kPRRK.!66?ԬTwAqqD ͛3&Q̜98x &Mƍc˖-UyU nݺI~>c dddTHcBB٫M<ڶmk׮U+W~VaH=xL+`@II 5ZG֭Q\\qơUVHJJBvv6ڶm --zCnXeQRR–-[t ۷oj s KKK\xѽ{w<{I-? ,ͺȗ,-BJJ ܹ#GQQQxlmmadd@@MM ǎëW0ebΜ9bz {ŪU`eeCɓصk!!!h޼uɲecca޼yΝ; ȑ#x iЪU+ 8wrz*_/@ڴiS#ZAϯ\4lt6\z* gggx-5_;wbbbrѻwza=}ZuE^SL>ׯݻw1p@&1`XZZݻwD^:vEܹ3R+Ƀ`cc}}}\~PRR3~w8::>>>bL" 0o<1 ,޽+`_FUDF\?rMx<~9s&s@ @\\RRR$͞=?ƽ{4fuRu#@^ !11^}uX߆ XXXHPٿ?AII Xf <bq (,,ꊁB]]XAD(++cǎu>};_ݻwaoo] B߾}ӧOCWW5g""_ӦMxzzBGG۸qcܿq455kƍC\\pYXW"00[ll޼=z􀳳3|HOOǣG_[@޾}hhh[nAD􄆆fΜ{"##annR]ۣUVؽ{wƬNBnPLX@baU~%@: ,,,eXR),,č7_DQQQѣGCS+QTTD~~>V\(˓ȨSPPP#N>]/+_[YYٳg:u*͛+++ ??CEvv6.\ǏÇ X@+//cǎիtR 'OB(ܹs){O^ "ppp PP(Tl),,d 88D!C@YYǎ,XՐ044T7,.3gח9 5qJ5 JJJ۷oHEWkaaaaXR/ CVVViӦRO /_Ar8q|>|||PTT.]@EEǁnXo߾5TTT0qD(**B(ݻwHIIϟ7aeeU۷hѢqbѢEb޾}3(цJĭ[l2UكwaR?͛7/ܹe˖!88-[uWd Ǘ/_4o&MŽ;Wjp8q 0ydp\:tR 99Z.XY (-H[LX@۷+YT̲`H=$ -Z`| nB`` <==affV`" PzjgiidxxxҲAāؠq (݄܏aaa!C 66۶mÇɓ'cԨQX`A!66EEEرcqm WwLҲq5=z4 ##/_fb͚5бcGkӧe?^( UUUfzㆵn:ի/FJJ =*s߿/" ۷RE @JU am?3 KC >x,eRRRBϞ=Ү];4mW\nNNNQ@ @jj*^={6PX E4/P꾧Wi7,___tFFFck;}VP*@s˗/#""7na6lQ\MQQQ 1b&O CCCŋ5quuǏ0vX|&&&àA~x5k/_@SSF}AD? p4+W2`Y3yz*RRR ۷W\p89p!deeI헝];K5PF`;L݇K"&&!# Dս{wJW!Pww蠁0x+"''Z?Vw@^^6662"v޿+Vmabdd^͚5|~իtuu57B8p&&&SJ}ǏDx >@i޿p!::ӦMÄ 0c 7ƠAk.ܸq[n˦U!!!o???رCzvv6Thѩ{PSScܰ444`jjp3ၧOV9 RCC"QFA[[` u,Pjr~z*~`H"++ _";; Xz5Nă@__rrrx=:t耽{"99ׯ_o'ƮHNNƭ[|\l߾Zb|̙֭3ѴiSI g5 {ƍ8t3oG{)-$$cƌn*mPRR}}}ݻWFr̘1x,--1tJӧO5FEabBWU+C߾}sAII h@ M*ކ@ ÇeqEEEL:GANNqb``W?MMMrhNNN V+PTTTU.  >>}MLLje˖L1‰'?j<_]ӭ[7թVVV```%KQQQ8~8x<7oJ(**¨Qrqx< ‹/)S`Ȑ!Sv5Bdd$,-- 'X[[.\:P(Į]χ~>|P]H qFGf.UҺukx<8w23f1˖-Chh/T#77܊3f@ff&N:%Ouu#@x<jAAA2Eڕ+WW)BDHMMeaH=" &&&VA1amVo6m͛[%nJݰD+Ҵi}%i={|< IDAT:vFF<==a``UVaĈx> cccDGGY~ҥD8z(.]G]v8v̿7aǎ:t(?-[W*]SSS|(((@HHk׮AUUR`Æ h׮v1pip\8;;R>### 999o߾ׯ_Æ )*F׮]rˍ144{n- VuB"j#/P*@}&U*Wg- ,,,yXRO "\rC )_YsNc͚5R ##T%%%iA۶m7 WWWW]Y~իWe˖Ę1c{۷X[[CEE2ř3g}vl۶ ={pC˗("-- [R/x!r>|ЧOHMMϟ//ݾ}Fk;}4LۛTTTH(2m#@۷oќ۷'WWLJF>}KdllL2{%ػ,ϟM8qB!Q~Ç7w\266mR۷̵@.\ "xjܸ1}T\\LSLݻK3rm6 dddD'Os:+ www@Abmdnn.֦JM4ah„ ZwCdԩԹsr3FڧO]xZkjjҦMuoy8p\.x,'''ѣD/WjÇk& +@ ֭#555*,,,ߦMHSS/^L***$o^^q8.^H:wϟ?3m~~~f̘A<[lذ);;~HYYTUUC&8.. Ȉ>~(š 211WkM"RRRŅ-6ʼo߾F:::%1H@ }}}JMM%R1*&B!P6m֬YCS3>|8}UuPHcƌ!@wQ@YYYLҷoߘ6{{9Ӷ}vxW"#99 .[l!>/YMyyyUuW^x,rG'''֭[ $Ç! z@ '' QVHQQV^]aCCCZxq&Gӧܹô}Љ'wޤ[Itt43gTx3g)((:ZߪU+244xB!1͛7UzYZ8/"_jj*|RVVY++@f̘At}˗ ˗/3m߿g>1~xڵk}E{=M4+˻w >i"---7o^nZ|9y{{Wh].x@?#1nJ٤T%͉'@D8 2$%%á#GTwܸqCڔQa{{{6lXwm v2Eѣb}۴iCf͢jҤ 0k4]bnn.ٔEll,IKK֯_/vr.222-[Vxz]QUٽ{7Q LKK:知% #t"!wu""ϧ֭[/Fֻwo/wBtuuݽ\d  GGr]222?~\UV4g͛XO?DJJJRH۶m)Sᆪ"Zb_F*wAT{В%KjUo=֖lmm%,#//Oã&`ee%1aڶmԵW9x~̙ԥKj /STTrxѿ*Q&&&(..Flll(HJ- ˗/=ƌ?V{ u.%|Fv_ ,W$YYY )6@aa!!//SNI ѰDdd$1}t>͚5ٳgニ/ɓ'Xbrrr+P(ĬY&QRR:)zjp\&СC|[CDvv65j...HHH(weYYi v777deeǧu54޿Ϥ-Yf!((HwJeSRZ@R߉V;;;U-BR +@eOdffVJ,_0`J/!hY: ]lzzz}6Z ǎ2F-/ƍƎ[qMP)7,ggg4nXBeֵ111:߿*TaahhXX,RX/tڵ~nb a5o޼BYYFʳ0 (D6,] """pYB}C0mضmvj}6 KKK| ;v &N>_`kk$Դϟ?? &`ƌZII 1m4L6 W^999}ׯ...r044İa0i$#..cǎ^z3gά|ĉeaaa_~wDv$$$ݻwL˅bbb̴mP(/b[_, HubdaI^V **B˗/#""'OP5<"  VVV(..FDDXٳ1zhL2FBDy&뇾}"%%~~~x9peJLL͛7Ѷmrgdd`Ĉ066޽{̄v؁_~{)wӟ!CѣGv횘%&&عs'9ϣm۶0`qرr10١* Ǐϟ%\aiiYx@"b8q"~G17͛QFVVVxk駟_޼y#%sssDFFe%jjVYZj999~J][UaȿH@@|~1%%%X|9}}}U &&&5s\KX@4iUUU bffmmm&DÁZh'''V{MUpU ŋc899bᐗoMOOg޼y۷/P(ĉsUz]8 aaaܹs1yyy6l߿WJXDրmbҤIx%;J_~Օ.Ȣ{055s*))Ahhh?I P9۳gڴi#F ==i733XcF $H>}???=zo߾شiS6 @d&-sy LmY@gT FSSSdhh,QXX _% VVVf9y$^|)))x֟ >jUQQQp`dd>}ŁhԨYfpE 4pDFFbذabuuu 4ɆUCll,бc ŋqrc?@ @nn.aoo_n|888 <<W\A޽%DEEAMM :::JOt0i$} 9GRR֯_YfCp8?~<Ν;lӧO۷J- g"-- &L`62anX!!!cÇbĭ[*9u\T?wCBQE;VLZ[sDDWPqMD10; }^>ޙ99O@?Ct aF Vzz: <<<<T\rEUVVfϞ;f͚ #!QCt>6OXr֭[*UB`` vڅ۷zL\( >|СLLLƍhٲ%޽;s'''Ex.^|`̙9s&ڴiS9ٳ7FqΝ|U222бcG]6_\qGx9q2k׮%@@DT V߾}#o߾Bɉf̘>i$_n޼!CH"PdddǕLF *jҤ ]zUdbbB~~~eN:daaAzK666ԬY3dzH.Ӵiߟ233󭓑AZ"CCC?t}RBB999'edd -_ ]xQI К5k(00BFhԨy{{QVI&zU(0 ,7k,b.\@ǎ#Q&##hҥZK,I FEDD=_Ν;SÆ -צM|z~kqF>ܹSX۫T^Ϝ9C۷Ꟈ _"$$eʔ֒ O>n HAQ!: Hll,RFLMM@rzjTP>>>[&JsNθu.^ȹφ1ڷoϩrc .F czTHMM,Ye˖a۶mVGtN8oooβD'O\rEJJ !HeA߾}(wț6m#F7nʗ/\ RnPz ׮]+UZZ(߀sAfУGXXX%HP~}-;@733SKc½{xzC Px;sαGEEqFu"U!!!:UqV^$̝;WVXqOXFFFZ6 RxFH$B:utG"88 ߿Ayf+WGŊq=VZjKE(Ti>Ձcܸqx9R۷oQ^=\x'Nĉ]IRt ϟѬY3wk1|_v킋 ^+W=p$6BΝahh˗#%%@*.,B탩)&MPiru-)Sa,]T#]$ap_ >C.,}7ª>|NNNya%$$D֜3/C{˗/Xt)B ? K PځM-]4vލcǎaժUz%##ׯ OOO<|ǎC;͛7)HZZڶm{ٳz 7v¦Mn:NKTܽ{^^^HLLč7жm|HR!!!8zh@)C6bܻwO|2$$D177G SS';> X|^:iMA\X*k^ =#AyxxxrrGN@THAOTp,Y"UiР}uvaɘs8x h߾=ܹv}:mۆ*UULO>! 5j`ݺuHJJbUkB%hbعs't ߿gM@a=)IpHH2220h V/^WmÓ3kצ;ő!͞=[#=33Ў; ɓ'e˖c.S M0A+=== FʺJԠA*^8}Q#/99,YB$ٳBC.^HԬY3222˗/]?11J*EժU4( 3g ___JOO׫T*ݻH$cǎ=T&XL_~Y?Ne& IDATdjjJcǎ%"֭K رc_~TZ5ADB~w5@O>-P=www@ԩխ[W+].999i%%%9M4@U^žFW>|`mcɒ%4k׮Z}\./P;;w5jY~|h޼y/|[2۷oqqqBBB oLL K.~NEy͛7۳իWtW߿E7d2+͟?)BbJ^* L&#;;;rvv&CC3\N[&+++z^u jy[5N___  ۗ.r "EЗ/_4ƱrJ244$244,O-Reee]/$$P\\unJYcDDׯ'HD߿ת׼ysN!~ԩdjjJ˯Ȇ H$q g]9Y[2sL~ w244T{H;wkNÆ s9:0^n={m۶a077GAWQT)DBsـܱ@ȵkRǁ~ Xp!z/^`ӦMpvv. L&1^~'N3… q۷5s^ѰaC:u >}^j(r qtY1n޼v킅E_|۷c(R:](bxlllC 333$&&… zG Hn_~E^4z{{C&zo7o 0vXHRlܸ@cL&ӊcذaHII}Tl{P~@ZjhhU.?;^'^ Aj`oo7{lcĈZy?*b.] u4tEE G\~ ϟX,Ѹqcz k׮Eɒ% 5nݺ\;ws9sвe|GFF߿GXXBBw^ݻ]t{wŘ1c0x`|UJn 777 :5\r8y$A7/_FƍQreٳGMLL' <Ν y(QH͚5aeebŊ[U\2 8@\֨Qm۶pVPP<== FFF{wARRg;Ó/d2;wH8tΝ˺U GhթS@g}Ҫ;@͚5cm{90zh*[,k^͚5<<M?7 Nٳ' B:zV9WWWׯxB-[pGw/^(**ԩC88̙CfffFG&e&$$Pʕtq-C:tMrߵk?~޽{Gj/X;ﻒE~~~$B tM^: eիWو#]1EJuu}244SLL I$] 4믿4[hA+Wۛٯkޅ 4ҥR) BСCe~J4|̤/jkVdժU\ Տ@ XB XzgϞ@3f sssg{{{ZjުG uFbXF }P(hԨQ$ ӧcǒD"!@=Rݻw$ 2DÇH;;|c?j6mҫL&S@Ծ}{Լys:t(RZӧOdaa~^gP(Ǐx&Æ -Zh,>|H˗z/СC7H$NFFFԺuk~vMTfMruucǎ͛77DJ5k0`nݚn޼I;w$&͛7Z҈a"sssrՉfa]f( z S q4n8"qZ`F=z 333*VZ.7^bm' oނRؘjժs&(((9\~NVvEn*)4>|T2eҪVJϟתvs#? ŋ \wÆ $9vJdbb D"V h5APرc255H111TN3f ".yzzj,& -Xu:ukW\Þ200W5ٳgkկWZ,5sLÆ gFxٳgYϫs# BwnݺZW׽`w4m4twwwm6rrrҹU(TVVU`{+WLDDwa*ׯ_SNNIF˗/Y'hiA9vk_lBz4`$mٲ S؀<<<<+ODP#bGi̘1Zr:ꃧ'߿oN4 Fƍ8u-Z@ٓaXU;P(:Ŋ8INNZ ҥKT*͛yzz:xUƍuk(#MFӜ5jĚgu.eD]x^mRtʻ+dmsd>5kn@2ӧOG׸~%z5?@@)))tT5NTFpUMDkڵ|YbS{.k=z:%}8N4=Z.Wx7?hjgeei`ƌiɬe  P-L,k`;X9KF@@rTR~WZŋkz-o٩SbŊb7o: mڴ*K 6 .\5jsYf:XSLack׮˗իk^ѴiSRJQbb"( ر#gGѪ3o߱wU9ЪU+ <333-n$ zW@-UT)jذV*Jnz|||ӧ~ Ø1cX˽3qD2y aGe˖J b ^I"w988p$nܸNnW%%%Ν;5j$ FRR.9={L]Ϗs@S6ʕ+~)S>}m.fյnݺ|wN*Uc+3zh#a'pgΜLuLWzHPО={XwfϞ$"tki?yW)))dmmL!CX쬕֤IdkkUV(R\\=y}}]M.3<<ؽ_J UlqTWϞ=yxxxK6 ?ӧOUnΜ9?%Kd5pkUVUmB(O>x Y~uVnaa`Vd ==K,̙3Yؽ{Nc4m&&&Zh~7ܹsӞPڑ :39rα%$$LJQ99{n;ܸa֬YHNNʫ^Nx)Ñ׸qc0 ^z!:::?~D5FWksL2ywcƌ֭[goef2@ύD"r9lڀJ76TpyF+m۶sف={& rΝ˷+Ǚ&X[[X=aݼy-[N:ob ³gϰk.+W...xgׯ_/=<<~zּ? 6ĴiXΝ;YpQP(Rg;w.T\cƌُ\.G=[ݻwk,zRa``[z҅+"ɓ' eӸqc/0_}`@bX}b1X?kDsE+… Fȑ#!vZo%9`*Ce}6o*4ڈa3>]vɒ%Yڋ/ Z ?;w˗/H$g@lllX=B',"•+W퍺u%4b1ϟ?ǖ-[Ptiu ޾}sqQ~} oAa[xx8g۷oG޽yB `ժU X,,{n 0sxٲe?~|͞=kԩSѾ}{4۷ڵ+ .X==}U144DZ4b1 P~_zwYaXy=)ׯ_YS\hY[[cȐ!Xn _ *h 8::z|wz3ޯN7Vm@~ӧOWgZjj@\|$^zzثm۶]n}LL gUVFtJ+W{[n-P2Ɔ5x̌BBBms޽:P 6rӮ];\ޞ$tAl@Zlզ I$]5x``w`& )33Sl  4Uj[Ŝ'~Ew)6.\Zz麾_~*U>xٳg~?:_?刎ٳgq)}8O>0ydܹs8u^_yh}7n`\1Y&N'''<}`;]HHH`ooülٲ+Wfer9ݫu T UH޽OFHHacc^zaʕz x{{?EDDz\m?ƪUX~aÆ` H!ʢ jܴ)x@X5jv7oөL)mo,κmݺUݧ.r9=zU.9͘1>LDJnY2b1]T*%3337n99g~kԀ߯>Abn߾MDD`r+[ąH1{{{-/e9ʢC >Du׬'boa@Bf9d29s>p ߟ?$^^B H$ WG݉뾫̙3P@@ߍԩsgx"wdaIJJf͚DB)d1*/M6Ug@!ѲUK~:? ǖ,''OБ#!.uj[@P2 h(A/HLȾշn{&;S=kT5l Ӂd\`ine1h Ni\`ƍØ1c`ee'f >+Zn͹(# 9se{ IDAT: kK!|QQ22; "8}K +jv;a͜0B$'8sDe`_EZA "#)~Bwx cѢE 0 q`1ШVpY>^D@y0DݰyfV; @o^TNr԰Y!]  R2 6o߾5k֬^ :B: . @8=#۹s竄Z%9}4F Q(бP(k,G`W %hۺ6nfE *TЫ3P Z )(8ޟK+@رc5jnի2h=*Enec,}S9< 5lq0 2`P[QTnhjf*f" !.#u3BE8qs h",_L>Bo,{u3||OxPBF)Saƌ000h/)) ȐJD=BX*&&@FOc8rY­reܺP^Tr`֜9HȀUsAPC g( y-ȂR)ʗ+KExx8:tl>1Cfp`rJ_ɸv ӫ:oۉJ*z~<<<HJ ?9LҀ/fM`zd2< r&Lȑ#9=a:[l X'OrYhϣo^ Y20-r.-]7U֭&ϒ#cN4(d mU)MF} 88 0B,W>u2p50)ǏGN (,}O@ ,q}-u 6 ;wBK'{*I8xG lu͛7ѽ$cT`POTW;)7j(aܹ@B,Np!W]G',_;wV0 cv)S+n}&,}Sk«dɒ'stDTT4FMax~>n +VGQlYa}D#$Aؤ1*mDōېMG1}:͛W`lqqqܵ+޾ AO}1u(Cd9J99hPuu޼y[RfZ4W(.&bŋ`„ ?W@$==ڶ7!Z A_1z'`ǎ۷/.]6[T=poM͹wJkaaa={`ѢE:0rShQL4 Æ cw;;;?-\]|Ym3ѠAcihQm*(Z-P" K\,#`  :%\g% !Cq FL<:?H8 ʭyP  kעmVpLVFZvRnQs ma׮`C]ӽKb0:(Ghh(ڴijػV_z眆 == jV.=On[(cbٲe7L nHGyxr3 z<v5s?ӧOѨQ}Y|CT+̣H9!9/_fΜ B-- իMF:O@m$ J`%=xnx={6nwC|Ŗaڵ5jTOo8p dY}̙N`TtH7ML+S,A"!= 'o$#C14mS D͕Weʛ(oT =.'''ڸq4%%EJ&MYF"6..Ljj*Kf(,4*TZ3]qq"+dmjX tzi92׀;hMW_3ӨTݎDF"26"N4 !J J#&NP4du~6'̊&LXzK=Ƶ@4,R>JE4x PP =b$ bPK *0Y"' !(d(иq4zPPڨ3 @ AE@!Խ (2_>ā!#C9ÀWe峕iwπ=) *m2H@zks|>-,aP:Pyϻ Dd`$#ÀvGɛFm.OXeQKdhH  ѵ4yS`Lmhzz5dÀ* 41ETD"Uyb1nd4ID5$l՜Tt#9$1 fL&PHeر$04$[z}_K֭$A)K:KG2F&oޞla4~}m3&oۯ41 ȩFzxoT͘4py:MfIٳg?12r)))>jĝA9gW=&Z:'3CN 5 'O!ݸ10:qL:ȼn'X'B_<;-j㵖%0F{..>}:z X[nPu^<+^./L]3 \o`'`P[gf`^M`X{ = yh372(Z$bŢXOܑrӈ U5Y)Y#>A ζTa3d5F*EJe*hMoZ}+ծ/<oBdb:1"P\ >`"j 0z[#4 iy)B!P4ax) Rw X$*S)P`ӧV2"%iii9|V<ر2%zEwRxTi: Z 0e0f0vtqbl﹪w: XNi ЯPH/ޗ߁Քu=_QϵZ2X<#a-e"JCWRP:J/q~{N@@JRX;n-իsEhh(݇;%("S `QM?c6* _9<%p( & hLME氱xp?0߾,?ȯ\ s0dFDp*ݿk֬x|\/şQrM+0VPzَ=H;{a \;Dpp0t"!Xg%e hkuRZٮ[R&ax=H#Ƿ_apFz9d2ЗDuGHng7n^Ngʸ iDb@l;V@!f] wfz1c t]#>*g $ddd, 4h@+?##6ðry&Tdr fޮp?'m@kDE1bŢdpTsݏʵ׎ V,Qkn> JO1:1(_ˀ YH&[]0)wR#[@bJ#[+=˲cSb|K蚭'p%(+r!!֭[m۶w HHLİJ=Щ4gqq0:nSŧ`ARؘT3oX`ipfJ@``c Sؙ+<0:L),m}Tn`49:__N\_#W}h3$Nڵks'7z*T"F>?s&f2c!0:*/Iy=U2Hc鬁{ B@UC$,'Gv Bxaaa_vZJ:@P&>[Kex@Ipp+LuY/&BscTP> | |XiD-Ac +0S*R rthG ݰWkM|)\`,:TT BTTz!|@"E`ccoB6FFFxFGE—$ 神~p 06"<Ā1`bk[&Eu  J,r]A BV*bꐪJzv`k#dR#@69{]C*&D\ޡ갶_[!C=[dMCE4 ŷoߴ+di:v>{w8/N1$j<ꟓO|O8%(_gd ?2p0U>*tw,={#=0h=ݾ`aOݞ첽M'.2h1<_L\\N][8}D_q[;$]ݛX@ԩFMF,>T۴(a-6L.ƺBPA=. 7nѣGشi#jQ׾OP <99J7RUd+;C;>:$kOقur@FVܹZ۷ow:ʑ6_,F['}>>)5BVSurQiq OFHL}Ҥ@B:O[`rJ@1s.llcX&U ,ZQYA)>e{E дΑf];dffU#)]ЦsΤr´љ=X eh$ǟv~ߛ\OZ麷Cq2k۷o#+#_k%O!ƀwe3 p^ڝ!>ʔ poO]Py,}x,ܿ $ݺ8:ZGPN/#uu򛷕I/kFiShe ؈`k]fM#[R f?xsaյ]erPw(.7 栶J׵Cci6JžF1d|̀OymUF6F`g=/y_AD>M7\qqH%piKT> MX`A(f 9EX]TP-.10fc J['3@yK +rԯt ,4bsh֬VH~߳@+'N?nvK cT0}A:f+_#Tsk5n{2긤x eiyw^2*At 8&iiVGV}MW>l=2->y<7LeP(T)fwv:xVM{{!}$ôGZ6|ǻVǵtE(t >^}y&YFFF X=sҥaU=uj>ZM(3wt+ W_U(ORZ%xyyƃ,joLjj9FݳQR2F2jj [ #Bfг GH Tj-On#Q$^"O,bvxvywϕW٫.eimeFr[+6.k7-u=E#`HzWBI3:U0k{!}4 *|š[5k ܥ5󬤊ۙNt'1%cdHp3!{u^qSQ. W  >)&AۚO2r8+J[ N4P0F_s ###w#+ fprrB 4ĭr֬n,ĝt"[hߺE(+Pb IDATVon $ၲ\Y2kFagG&9*;HL8 9-$kat|& ΂u1 |1OE_ ? ߎbRu$?z o;OIWt WAѡ3F6SK%GCա`"mdRo> 2ZJ&r4NP.8Fb\ \#rLͤflapLQ<$d}yŃ )7MD |)kC }qngh-NXF J%W\12EQ@[ץ;%qa̷:z3Ԕ$*40LK;g;pLtvwӱu5= qEVZ6nd^k@x-?EE.@|ԉ3z诅F 6oJ\rsl\#tɨI_Wem\ܺ)PϸW?Bݚ8 qF\}Q\ $Ȇ8hqpz~h A 2GSg 3___V{wz=|4FGc7Űq|0\E}d4!޳wSw,BٸWJϡ2Եl؀mNɓ<*"ƥ y9rUzcb~LQୱd搑Z'l&[ R 7.e`ooGd㐑  fWJsx Mkp9LFQq^zP .9l9:!e_p;sYjfO˧I|nszKSZ53aY*Uŋ̄ըq!r;22@X+-l: CՂ$!"ҿ~.+WxpWvYQZor*4qnN>aRmJw7ҡ^WB$> O7{AZI)M{0$DagOx`G|2B*اI Ǘg*I6t%aqE}4PMLpD }֭J$sQQټ:X+*i^L9)|zꉤTJ_I.JƯN:IZ[Vlc#2.FA*.x(kZI~(Wq/yh?wwJ45jQ+t:HK7RBtgΡvZg;JlxM:u14ȋ hz}RhX6,v+Z&}Nn:NFFF1-۴`CaӥQR)'M7 ԓ?B5BhsXۓEKٓK͊C9;ʄYsSCRB{б t{ zwLy47oLaP(Mod£i-fآ-ϻ\[?z,nP "LhllqkFrNرoۀi;ma4;:v3/a"eͷgzI>pF& Usqi(JwSg[_H}QݟFuB.40ꫯZ$O6c͛LE1'hoDQmAt?o^ɐo.]FUD¨~]K'g6P*Q5n톴v t={(^*0XNJY x ^`]·Imdհ_(o.ݻע* -:P;KoD9 轘aP[ngք?O}#8(ubۇ֟dz7c@M:̩Ez*7w/(WEwfYˣo c,Sf`4  ܎JVV^y9MaR=iOkmA9ǟ y:&\$$S 0fG~8=C\'HH=R@1I]'=;{>0T VavXeHFH.L06U4ˣ,dvд,m 7,^Bjs보]%A%JG~y@vogeɡTRh aQ?Q݁U(Y^zßׯg 0 -$JwvFNؚl'[7s?9?M+Qh4hon00HNFܹ Oݢlu0bC:::2d`dŌ㼌.;F_I6nvx!zkԥ UVeĈ,Y[ HȺ{ n,###?BsdZ-^5krtIna|Yl,[-~t!uʔ}Ŋ=ߺ.['ZdL> 6p-5Ku}DSAa(U/N]V ^~B (2K}޶"WDj'WPZ5>|Hٲeٹs'ݺu###Yfp<ꗆ?{5 [võ V*{/ˇ&,BrU~_vhFM||<.]Ep%6llTdVж.:tO_GN}f,_bӮA >T-x;nժ]p%3zЫhc"5`5kFtt4k֬?pX]3ml{؅cYx׆{/qF+ݻ',\Hesit?D$%[mzU>j{{׳ =f4fw_kƏ@˖-i޼9s-b ~Jzx 1:騥^zl\ChFM("~w7lbeTZرckН>[y7vMʕ?~s,_=G6gk6BzW8Wd $UdBfϞͰaPc?." M=s ???!پhrcO7` w\R%+f`m*^SxzzoAo;|1V,Pk*h"fϞaC=|m=ǃKkIݳ6mz L} "ZHv,@1+ڵc??'aQ)n?=J~b[vx b[ɍ76r'Ɂt*4i]P=fȸq"$$m۶3g|p&,n9W'R3J-n<Yc 3>#J?. U[ҡC86ÆMpjwR<+]v6sBi qy"Uk+-hT !رIKVZ*VЭ[7֭]Kϛux͠h.f…ZOR{ԩs/Ym ݏm' =xWWW6'((b^H'Ye~tW͗]/sp=V^M׮Ew(###?lXB`қ4G9QzL-jnpT864| Q:{iS*=v<;vDJKˢLX %1 nnnk?sPW[C*/fOkQT(J^!=tU ~"bc$:][94%UPB[ǎӪחS'hR2|K?[TӚX;cFkQ(̚5)S&jmQ:, ٦䎭CB wwwݷpamΝU߮)S#Gf &:w!'tt0]5o#([V4`CɈ(4[YUQ6mdP,ŅC3wPkor+]Dd@F9y83-["p6Yi&<-**&ބgt7ߨ1ѼyުYYYԫW2ӈv*:UѨv^%N%)8pl6|_~%FkBb ~!Z!TR!Hu\pTi5m5Xx1:<[o>ƍիW>|8cǎ|r1fԇ<Bb6CR.[%d-^w}ǬY#7QҢt)'Hu>ƭ6KP2Zli4@||<Ǝa_)t}^秓O-C g֬Y,|}}0a[n"]^iXZ]y %!^,[:v4N ŋԭ[7v۶mcڴI\O:v4k|EIe}pOp㡂<<0wx)))L:֮EP|e(k#c?wq  ۅ XD)+ؿ?c&L 6* ;/OmZjPEJ(E*̙ 8bUL8-`砤aǒx6vJ-gtGdrL:~OF0J2Π&###G(1~xѠIako/x˗ov$VX!-OV7ݻ*nR)EZbСԩSBu|g|;qDDDEvIt٢Ν:uҥXl뷵cƌ.]2Ꟑ ʕ+'ڶm+Zxdd?~hԤ3feEN/"!*Tv3RuaÆSN;رc8s(^2d6mhִptx9*Y\ѾXhHII)p}|PTbʕArqɻFR GƊ.]@/ !HJJعsgVZ%lٲ<ǣĄ { B܀U)DvF̜9Ӣ{VŦMDϞ=D*y ­R91tPqIqqqbkMC1mvm_ .EJĉVvoQ5*h޼P(Oш*U[t=:N۷O_8Gpvqb߾}BY4fbb;wkV8/gSgRIppݣ|{zzT*}/_[}%&&k1RlYJ*ELei)>łSE;991fVZEbbEDFFPvm7YfT*lm22222 ...TXLX–dSH֭IOOhspe>>|E})9ҥK3g5j`[{%22S" 44Ԑ*##~}LӦMQ)P*U C#p;vB+\ !p_~/"ܿߪ1CPCteM*W7oAٲe-6>CT*/_ADDxxx Ԭ,iܸq+dddddd^D6@d)iQ+ իWGPXl4n{{{+Ǐٳ'XP5P.222~~~95kҹs"#&&]v1ydJ%:P4 k6Сai *'NƢ~PD<}||BZf۶my$w̢V@,U 65vi[ɓ~_Ҩ]ᘣ%Dũ_.####c"HX T@r3똊}6.ڵ?xɓ'CV^R$**ݻw`TT9s&?ܹs|DEEaooo <7ojq[n𜻹U,jclDjհ/T';;`УGϟoU%7ƫAgq 222222" 2鲵( Hjj*)))wvvaÆFoTj5{ɉ 6Xws{,X@Xpss_~E2ǝ;wX~=ƍܹs4mڔ3gR?H19suV.]juߔJ,I =z`)$!!ti JU?߷;m۶*eڴi\z;vXeXbTZp)K6mopՓ)Ɔ5jXXkxY7 .L2e, 2 H,6l`>ސUhڵXKҸqc4i)))Zʰqoqrr>(9߿ŋ&440._lmC~x뭷Ϭ^[nQ CL^Xn]kZVs=EbxNNN4k*gϞV^ gϞj!"""O+ eʔvڲ//]U@J,Iɒ%2@rߨ;v .0|p{= bcʕ|7T{FiU)޽ZjV4h 2 .a(@ʲXIMM[nT\u*V%,, xV%E|}}@ll,O6;vFF@qqqrE tG =z`+4jԈ9sX¢뉊2J^Xl͛f k>tԪUҳgO|}}Yt鿺 H!Cо}}vQ$q6Gͨ9|}}6زe~]O (^xgŠCV[ܧvڔ-[4Ve:t(ŊcѢEVe :ht_ɍgeddddydD&RR% 4@FMdd$۷oTRnݚϓf| :Ν;3p<=ӧ T?l߾5k2sLFIll,62&ϟO:uرcYYYC|rujU ;իWݰddddd D6@dMrr2>4y0 Rìj*V\iР UDVw1f\͛GݺutGDDЮ];zE͚5`&^|{2ej& \)UVl2,YBV,悈h1 +@N8a2E ӧy}*VHʆ3XlU """ _Ĕ 6)####" 2y((Va0 ٳ| :A{zzR\fΝlذ ,A/^d߾}&ѣGS~}سg{1=|Tomt.446ϓa^M9q#F`С >ܚ.;wPL۽+T*Enpqq6:oAAA/EAdd$K\rFsŋVC6@dB 0 o6m ,,"####C6@dH*U5@^FqFMNdddm6)w[jũS eU=Fc͛lڴX8z( 6dȑ[2i$XS,ZWWW}siii9s(n. ;;=z`ccCppEs[Í7BPNڛ ..(TA @RA:dU'''zm2Gyw믋J~r ^:nnnrYdDsrk\jԨFΝ;c3glܸ1 ֭[h8}UUqF,YBŊ/Zŋx틟NNNzj?zQF|1t:x!>|8/_fǎ/_ފ X-Z9uuTR(59;;"r zު~DEEYru㞦Nʭ[شiULhr-TBla.VÚԤ`wϞ=̜9/ۯN:+@1bݻw_~F=zʕ+5j /?κu (%|wzFe|HH+W6U@Xv-?#7j VYחx Je˖<9OO"U@|}}V5CBBצMݭFS;wf޼yVK/rUrrr d<;wΐMFFFFFEdDooo]fjjժP(~:qqq 0]2}tT*-[Wā5 ^ϊ+L/[ ^O*U嫯bر\r/^{g2m[@0##ZͤI8q" j R4 "e*?- T~71s HE1˗/Oڵ56ol?m4"##ٳgU^$7V~9$#JYUQFFFF"cxsk {{{݉W^.]uY)oժaaaX5gQuVlҥKM/eeeh"J.͐!C_>̝;77f8q\p!DΟ?O۶m;wna)7oޤDWS/(fLJ&MQ) aQQjU raHIIڐhٲ%-Z`Μ9/ ,22 *Ptis oom:"c7I7\0l۶+W}vJ(aQ֭[ŋ(HLLdȑdGѡCRSSqpp`߾}ڵW^ys_ӧO|J=z!DHFF.\ޞM6accSX£GT/ŋSbEB`` {5,D{nT*x{{JU֭[VɈjܸQ%L>?Ǐ[7sP Vn:9DFFFF&?dDwwwL U@Z-ԭ[~7y*`ȑ.]'aaa4hЀh-%887n0eʔ|ۄAժUM{РAdggӽ{wJ*k2Gjj*Z5kZկ w0W[.S/hӦ *j7,T{[4?:uDڵ_JM`tfE6@dP*xzz/ɓ'V^G@6oLpp0}]84h1c_>BVZeN!7oڵaÆˍ0ٳ t& & b6ʔ)CN nX) ]Qǁf|Xo!`ĈT*.]Jff&| >>>:u 6pqC 9W)k7o 4?6?::н{w>Jϟ0q57oۦK.(Q5kx=LX  =*ȩSM666L4͛7gUHt(JZj%ȘD6@dLŝ;wsZO?%$$M6ѼyspkJ647ndΝ|w:t,XI_~( ϟOFusq!Nj6Thh(yRަЭ[7C?V) W^1 {{{~m6nPu&,/// El֬rڵ+ŋ)]4_U"""pwwxQTѺukO _z5qk]vW_1gڶmK)]t ggg5jāܻwQFѡCׯM4!**Yf={Çh,XW^fۅ~4%%%sN\\\ aܿSR%\\\ T{.PpuZ1իW/@˖- e888зo_֯_o cǎeO{ix=͕@? &&(L6'I6m(\0k׮d(S \|X4>P|||9s&.]ʅ+}KJJ {ȑ#j-Zĕ+WhѢQ{cƍFɕϟ?o!iӦ.퍷7ZJ*gרQ#GlT]=w0'37oN\\k֬ageժU ,[l?^/8;; 3f|XzVct:`+޽/^.\()R$K.* <8ƍ.\U #Fȶ߬YY7WOEAAAobQ0"((@t:]zݻwyEbb"̨Q .'5^iӦZ*SӧMVIˀ V˸\V*((((Q#  t}sDEEjnBeʔ|HHH|]v%))]vI*B[nj*m搓 R?yy2s3RR 5\O?}f릀1Bٳ%oܸ9"'N:+nX P# YJ˕+W$ϟ??]qxĝԬ$&&rEY ƍP ,`ĉ;v͛7ӧOڵkgv|ll,?3VYիe̘1EV~Ξ=ˎ;(Qd_}+ O<%˔)Vbanee K\\\(PV!@VV,lh=z4 |G׍ڃdqeaÆJ B&D) HrIѣÇO]&MH[ŋ4jԈ޽{ӰaCn߾͔)Sɓ' .Vիy G}^s0|wNɒ%Gɒ%Yf K.q&bmmmqmKDEEQT˰lٲZ HvX~پyOx^R2-4k Ϭܴ}lAT/`Db$c 丳8::cI?r)+\ΎuYMNNf޼y/_ݻwtR.]^/$$1cп֭[y,eA 퍗UVͶlj#9 !Y*FX 'b6kk\ 88⢀Rq 1115 ={4jOLL'񮮮|F:y~MbFQPPPPHGQ@PՒq r*K_d )DTՃRZ5ƏO޽{.Æ Kw#A=^ۛ6m%Ύr Əm߄޽{eUƾw !39W09TX) kkk>s6mdRh4/_>- 2eDJnX{lb6:)իWgqj:rX@lmmW(((((( Haeqjĉtq)$...āܽ{mEѢEr K.5 _8z(Wt  b„ *oooի'XeD “'O^:y[@]J2#C&`}2Ztóg5+3- `y&F̈\rԪU˨ĉ<~8%K4iEYd -[[[[Ysɓ's-)?EQDʊayL>ݬw Doذd@ɓ'e„ T\7nñc$]?~رc4hsϞ=Ns ƏoT#+ .dÆ Ӿ}{_X@>|H޼yNל"cLKݺuPY+@n)Q˴tQ;sl+`7oʶtRbccqEq{HiݺQ{pp0dL8Q}ܹ<" .XԩSGQPPPPDɳ7o^ի'ob$}Z@@:SNx!ƍcȐ!lRn߾͞={o$ݿrܹs)Tz2'66:–-[8~8 6r%@vd5M65 ˠvHNթSGRᕫ`ts2R~}5kfn*oDJLaiӦ8qBQPPPPPi(TPTՒr$U׳s 0WWW̙#| * 6,XBXXׯgԨQ& ^O߾}y1{!o޼8q>Ȣso a7; zTXw!UפO>;v'O-W++^ze8VKӦM%}J̖-[MatҲdVFKџ?ν{dQPPPPw( &)RH},Z o>TO?$Y00;d?~5k^?{ 60j(IMNXh666 2d]vqF*U˗(wAJJK uMt邍 7o6)37] r KsqYڵQ{DD%UVԨQCeҋZިQ#j↥( ҄sΝl%'' n2888///ڵkGRz*;w6#BCC%eܿ_ŗb…ښU,!::˗3tP?ԩSӃ}}}qpp,hms666ci!eHJJB]#;vd=+VU HrPչaʆ%&J2i5kl(bё5k* `Ljj*Ņ%Vь;*U̮]8r+W6U=hVXСCqrr=V"!!#GJ W^tԉI&ѸqcY t:ޫ$,, 暼"E(i]޽{$,ԙƆҥKRreIFKƍfԾw^^~-KFΝ%@s1`Yb]O(((((( '-,[dW׳fʗ/ϊ+:u*7oޤcǎ)p$4tRN<ڵk-ZX$F%{9X`{L[k:vHRޓ9u$&&CRXVZQ`AI+@ŊtwSJ|nٳgdj`dm&KVo<6k,Y2rb1$$$GY4NAAAA߅(dbΝY @\Ŝr<==4h-[Ν;|wFnnn(QHFV׎w2qD.III,\>}ȊæM \,N޽;@||N1T,7OSLl, Rףjѣ7o6 ` K(SnX*gT6,nX}5i7↥FQ@ _~d.YoժU4j6nhJܼy3=;N)RD 6θq,g ^7:tHsȷ~˟ɶmی4___YE4粔Mg[ Yc ӧ/^ȑ#]]]ɟ?gzŋoȝ8K>sr=ZٳS HZ↥GQ@qשS%XbxF=b\p f+TӧXx1]̙3;Z{̄ mڴs2w\ZhatϏM-X(?OO\ALLLzm , Zb*WlTCRLX JҐRJI;v̢:t(VVVF;wLp"'ix 4mT((((QA`РAܼy3G- r*V]\ukȑרسgwa3 ̞=ƍU1 dРA[2$113gX~޿$((Z- .ʄDRѧOvMttܶHQ.RVPTu&DŋKSٜߟo޼)sjQUe\]]OٳgtؑUrJIgϒdq HHHιV@ҥ2XQzIRR>>> ^+stvvPBb mڴ1jgΜ%CjF֯_ӧOM5eb`ȑ?;w~ȨQI- ۗnuz Ϟ=˵G)[l4qssߟE1x`ϵkװΝ;߻woN8lݔ*U ˗3yd2NI!%K$ K2vX7 (@jՌ}}}-zNmڴ٨7k]([Q߬Ǎ9.\Pț7p=Z~Z$gСronrȑ#3UՒӧO7occcoС{avF̙3]ߝݻwKǏSSPPP[X@L0Ap)Shݺ5 K8::u}ZlINxdD~ br)QdD@@,\0S[LL &M2.7;v~ӧO3|p _|!9> bhC*VF K ˵Z)F1eA*ʊrKKFyJcKeF,VPPPz=֒}-ZdL\ƂX[[h\xd۶m&4iR*˛gpʓ'OQ|ȟ?QK1cƘt}DZqFt .k׮m\m _) @Νiذғ???6Z-;!<#yTrd9AFRq-*dLDV%#9Z*> T_+܁x)w5jo:dիW믙ڤb* ՓTvDAAA῅Ǹy& <Ά z~{n&d׮]1&R1ݔ ?C~Bj՘?>666l߾d NFj"_| ;2aJKYW^?|Т51Xjɛ7d SNYaJJMMl)tW\'Jcܹ2IY@I^d,¿ECN3:fkkˎ;$-9-W^=Ο?ڵk)\pcR ȃdz*\ kԨ!(;R͛۷o{nɺ ^JTTQ%ixU [-PtiI@VL) &dDs]brgV||<ΝH)kFmYݯ T\dǏgRfbr2( A0` ˗/f͚DzsJ…8sJR H\\1b/^0\OsstiɅ3YZja*;%\)+++ʕ+m?s K5D^nŁ.]Z2%u@@ѼLeR *ʤd֬Ywt:0*((((Pcǎdž B~$%$$϶bܹs>}M@tnݚ\tN<ڵk0DEE1~xw )DJ..MOȕgNҥ܎yeח&X , *U]vu܁ޝ IDATw4lFcԮā((((wPǏ7Yn]-Zd.>>>xxx|lѶm[={( /_dذaq!~Ucޒcǎ7n~vJEeUdOIItH'[@3w?G@ҥ666րK>GstURڸqc-) Hcj5&L<ǬYwɓG2éSr^ESu&i pqqhr5>#vǏ=G֭ٻw,9Q@+>R ׯfՕ<9L;;;}vެ\xQriIo- ,Y2Wea2aVjjHδoި=$$$]  Hʕ3޽{w*>}y ( ȿd>3x Z֭[3- #""ꫯYd6&S$ k"E5j7l۶۷K[`"Δ0bɅUvƵw^٫e𿵀TPAre jhժ 0'888ǻWƁi7,C0/0:^JL[YY1n8IYf͒t (nX _̸q8sӧӢE @q)_<˖-xajҥKK) &]:tQѱSNeʪ–-[W_Y2t,X >M64lP4h [Frrd eq-;ʊ=z퓐@HHEs3PxqIe5'5jHfˉҺukfqqq1j߿aQF@t%&&Gc]M6dɱ۷O VZ >Ň)7;wd'Æ ܹ͟??+V@RIҿ|?0A` A$[YM籱DFFj*~jKf+O3n߾ӧ7h@rޗ$?@`)(Tr>}dsjIڵkDGG[,KuU.^ȓ'OdǰLvMjppp믿/&hE|$7 x^-KAAAA 55Uطoн{w[IHQTBB^#GW\31)SFxCsΒ}-ZT4h:q[+Ъe aѢEׯe]OrrЦMԙ*RrBɓ'ז-[ر#]Ad XiF24#|B=nnFc :;KΫG&̙3fMvڴiG^/ }ʺJPzua׮]BJJg)̟?_hРf}6Eɒg :NQ{ahkРpAAәJ*=L[q? ʚ^7onrJv"ܹS=_lYoAEP߳0a޽{feG J^+!;jZ2j^0j(ƍȑ#>JP|_ ^mիW qqqdϒ 0|Ebk%1x???Ϝ9suf<CGh ݠv"KGBƆ_cڴi888H^ի2yϞvQhVjiC!)@ݗhՐesSNRxÆ ={6-x[p2]*={qF9n߾04*VuTWbpE~z#0˗/FDD0oذa*=T; -e=CТEs6lDB$;z$RS]J8_gpKO/Z3~o߾/L4_~YIjj ~j${[@=FEmYh nnns&(-AP$̀ qwc\t")oL> xz@` p\ iHHڥ ,H"2># LiGh]BJ/K8bbE 1Ǚ_yÇaoAP0䱆d] Qzڶb%RJ4$PX5TPR%_WpF(w'3b1cư`TT2 R3NS^ދn #:<?j%?Kӣgwn"O[-N.(Z"!*'W"yx~a8:eqL0kkkI%ϟ?gl߶ ; >q\GJTrNMR7åD+YW7&""A{|ZHDw`N x{,1l0Ew4262?>cۏ8uxyygף( S?ϊ :~AaMH!)BXXXc=<Ga&$jS)mlLjHIԱweι_ bŊ@ٸa#'WFGr)}y4ÇP^=Xx1#GqB|}: ʕ+ԭ[qUutx;.MTI߁Ɗz^Δ:t )  T{p"lm 5M!GʇAGcMrJ2Ÿw!%^FmmGj͹a`$.|I|w̝3sCj{S[;;>)̀ǭHHLh[ha>9W܋;Xᜀ@69T> ;ΪIYkޱ,KtR+a/lL0p:&z|8ݲ%D'3S^F!!!TR2vC)z;\yn種? E@z]NwSz(m7q3Wb!20lғb*4nܘŋSfM@ϮSяn0Zfrx|0 ?۸=v-tis3͊2!]p9tj5Q>v΃pDF{I 66pzZhTQFh.m.@AS P17L':2"@d1}F-Pi l=t?xщP26 eX+.= ?U->Kd9XɆ=vId.HJjzq_Rؾ1Q PﭩPIMbG5qW3gΛ]]zZj2S1ˆ T x+/W`o%-jE 7CW S~by0=H Q719=ZԁӡbOKYʊ̸_gXyS Z 7gx}}ϝ{9&B<bM%?f KA:Xi*aZW 4X3ޓ9\JymLNJ-2dׇ/R 0DŽ^d+AJyhZC62.y\N3!/^M}Ѷ]_>uȓGl%+;Ԧh1+Uyuaㆍ&*((([x7fyͣaLTvL.y4p/&}t04_p& ~"2xVx̙3nݺeZ$;vWT u f_ O{~b 4}S >)/* #B,e2k. 2n`+ $ 4%g @Lq0` 4_p&~ Nvظ4\%Z.K-8#f *fڜp.ڨ  plUpK螒ӴdV䳁ϫAUP4/jLpLN+19]Fp8_3V\[S甆WDV3n5yT<ӷW*ƫ'gK*Gs,О2X᳏qƂ)OÄR9rڭ`GEӘcWoMI+a2p7BT>\vv'[y'+,{S?zLHQy 2?yV› ™pQ7d*{9haY{h8/5P+=$VC}m>K @;nÑpyW޵[l5xoM*}\;NJ:Q ^cG{D=}[8p{g:T믙x-+5e³WiΛ×_~?ܺu0vCLǫ'l[=vq݀XJ\؆Sef4+Z8 toDxx8;vL*q:sz]h8SgXJX4w +o3yd˗-j.bB&EW!!.wiq< B}.>,Go]޺I5)$Vm*.w( Cт'pBzDk8ԯuR%D%d~ؾWn=$۬H辶E8`^V @d2eɑgsRa~̐E9AM|QS iᠺˢE$>柆>5a] ]`>wVׇ{tq; .|8g6ZZIgS д2Xg Hdd$[laꇩ$@Ю"4yj]oWO 5ZCOČV}~vG2{4n i:7B`y,M; _ed>]C,Yrz濇y2PBbڛYoAml͠ҀGx]!j^,lBd:Ͳg?aRQ{iVuãUQ 1CTmW> y.sqT)OW.O_~d 4J#|}}IIMgQ&Z,fXe]aJY01maz;m[;SӦM%NÇY&5]YYkpںU>`8*αkշAj"*`:6CA7 U0*ackAr*|%@m킀0ߦd+/[B)P.E+#Co] fɄZ8ZgCgyW۶im''JHZE*:)ǏBEEh%K]2^엑Cw]cj(x 3LdqՄiVЭz*~x GV P|K0Phfh6=}*X3s j٪5ˤD9J PHMAgARt\ cex,MľHZ$ťHgw:$k^Ch+|83;^r Ƅ$͜q)*~wZ<;p:7"00Ry(` FB){ :ykvl!00PrLpp0 ;ӯ`>3< PG"ib Pp渄;i݆*B""?*]i+^i2YAa\b N3?T"f#R0,EyzqzDGP p5 Bk+ {eer)=k#aD%B2RGAD/<޺ce$&^wo'(߇/1\f{9VHB$(w3#5pLRw06 Ш,3i_m`GxMɠTP-/ܼySܷnFC IDATP5KirW[}UhtbA")Cd"i6ZRʃ>4Yg@2OP5mic ]ڂGФvU{389BJ*T1V5tM&h$I@)~vUI%_ 7)Y u0#wTP]:mI`Z¹bs?Hq7p߬DQߎdNX$(X6SjHNRUqŽ"eTJK#v-J6vϹuU λFTfbCT"l#Ek֫b\\/QiYt>*t'@մS 0&]%; *6O1n\]xU+# &2ޏIMrOmV:|h#{f x5ZAZ $k3Պ{T*OSp\?w/#evzqYPν&LJԐNiwajvqoZ)XޑwcЧYїFqvaq|-)a 81vt*bO|d}zbwyP[E'Ido-rozF]2ٹWCRR2>|y %)(((Pqo(x :1|aEc!H`e(L+ee| ~,$ZP2ґ ynC{+hPX1MTHmo62b#aCM;o| I3jPRIYQ\U[ 6)= ?2[B6P9|$-gb1},-_%_0WUE-T4ɣ/oN\0n4 V Xd| .b,eP4$sSz_ Y\tԭ3'%%Y෕Ltٷza;Գl^ ][%E}Z|Pm juqj@4II'TiS;GkRRעVHzJr|*|Y =ѱ$zN aJ(ےR/&x}/¦>GycR A6.FmL}; 6$Lإ۽H>i񊜋2{|W (R$^ړLfqR~]^F6n$2]D~63ɌRc;#-=7 Cz[+w* hXA kJ+Z?^ !%ɔ# đZ(jmZ%~i uNзL8 iɄvQB b,A͢oB%vϼ]\PR4lᣏ m'xB`&DF|ll3{Q,qؔs!b]wp.&kJwK[wϲ>"Mfvcrl`pϛ9{NxH}r;˸vWHDAtj5bzO~&#fH2OD2TP`1>w<&5IgO֐CnP>R6k;m6+wSb w:%IqtYw %Io~dQP;;")) r}LraiRԴ2Gjuk qT4GD8AEApǁlT |<9>Ϲu>!#oA /֪EXiN\" Dfo5[XTD KiJ^}Bլ)\zPt, L)ُ\lw7UzK\Fzc[լslocǥK<R {ârl@x8W.I{Of͚\˹ss Vt; ɅKKҶr |𰠝{(/0'ljX]"U5x( L^A8<$ˍ./O|Ci\5G$$R\WP@s%wlȏm5 t:2R ]Z{3[cwWVd鈿DeYISdb&qbQ~})~=r~K!J*{ޡ'ՈALD1ids/2ܯ,lh,8xR * =rE /7&,)$gy;_;9o<&LN$@364Ъp'%ƍ I ԫW#SC)HlU1ns'Ҹ -:3 TSgow=Lj8 IK)h{,)%nPXWsY j*7^4 9g9['}L)"K'L'+Dլ- { efK]{p{i cnnkVh&Mp=.TF ļm7TW%?vN=Y8c?紷ݛwrbw7NnF8Y1pj}9y.'Z,r{sJ-hN\+A<;GI^SE)8*V*p$]S˼+wS ?MCogHX &y*9n}kxG`VULu0Dz)A#OQ-_42烝^ʩGe #/Q '65o";v {58:u03+:̿Y@i!`S Y_/KԲR6j{c2JUdml#U~|6F@< kH0[.oÕDSr"+`.l s~ hIۜ[mtΰ}T<֯KvSg\7IJi_؂@. CK%I+"٫*Xg37g?g_G%(`yh|R"^.hXG۷=ZY<5˲127ڨD+!O;yG>tEkժ oN-;%9 ,n&JYrK-:m]aa][96糹Mm~~_ǂ}9h lm)7G[S Tw~+!韇~u!ҽ~LN=7M&@@1J1@u fzl<[oQc~B9n6lq_WwteTFJjnކΜ~;P {WhŰ-r"7Ҷm5EomDZqrML&[`lʄt{~xjL!@ԨQ۳ 5:9Š()V p0c`| j`HȆ/\a[7KuB5{޾C Jvp,$|LoRzѶۤ3aYIW3HWwIɏ}ۏlF>_H*G|}{@ʂb+U~k9ΕXp~2,Y#|L/0 Ilۛ@ְ0,PV=tʄ!9w륺^ 3[%M,Ձ=0Z ,A}%l/'4@6K[ c}xAʖR%aRMjҨR\+Ⱥ\哷łм)Y"t^oAtU$&8]5nu[(tNYUU0j6<lW}D]Xpr7!~:5M+8x֏35l8kaRE'%A]G^{JBbPWXCZ}k C}@|^ U p=V7c9'ˣmI`m .Nږ4lT4;t(`V6T9-৒*ȄnJ)l)fic,Ձy8w/&pdeL,^p%:SMuanl>sZ[q]V+{.&?N=*4ރD0nUYyG#uvxQ8+Uۻ6l8ki޳`-snЬG}FSфm*w,3tвﴌ BU-L\عs']v7n(.U8QF%HbhaE=Xr ^Çɫ5\HHp5Im`gaR/b<>GJA]a{ytR߮&Lj58x][ڸ~_6``XހdI{&V;"nA*GawwGG=kC]6K.U. )h|Jru{S@}%QK٭eHqh{)$׫ij8gRxJ`NWx:LGJq5*JA_|Z4!z_:O3'mt.IAVwe~ewN. |G1xdNm`=f4ptRЮ&agDnIL׊\ؿ?۷g +%+ 0TqnפdqORƋkt +wJW.UafM?]9"ᆲlh4c!=_[V)Uh|L,T5}z3I !`LK}ICI^/=/'#.pCH:of+Eْ8.bs7sڝCLɁYܻDzRVvLj@VylF'MM{Oٿz5Vg4 3u0qDW5 I\Zӡۯ*V̙ U--d3qt#G,OO?m:VQZbSj,?V)+`ܸԪUλ,${ViIʄnUdYܟ?'t駋b%CaxY3U$SrKevkCN^=M]Yn(&E Q"NU">6l_Oi]ylm{o3c,Y|gPM3g09azڸ *=uFiG={rM&}h40m :~mr->Y4G`oC#<%!wNZZS%U@|A%%`gaØ9s&=zbůDMT-\רd}KKK~;ͷTz`ɼ,>(///,Y?]vC|NgIu=(x-ѳgO80S' -+]΃?Уg/Vҳh*q*=jƞիL\RQ*nRУGOV\7leOj門:%Ǖjlۆ̟;pk ^cyl߈7c#Tr=sv-֭[GΝleLV˂P JUǓcոkt/q=*˂,@^Pѳ'CA}+0/ڭ ^P?Y(T*>>}ϔAQn+p(l=lvA}:Z۷og֬?ʌ}1R W}T5(n'J? k^P(tރl8®@*XXa~GL-6ljժeֽd )Sy s{B*:8BٽVZ( ڷo%5E}B1@جW0yd~1B->}gan">M*TgU@}PbQمaC)7ɉ93wau6աF-ʢT&O^f1jԧ,^ʕ+ӳWo6۝g+R⸗,%'Qk]vڵk[oakkw?g%5t(:}V~0a"gBƆ>}aO!".<*Jő?B@%fvصǐy游0s~9 խԮT U,:!',[Z9ljӧ5"+ő,`>ЫHbm +Xxzz˼8( K{_A.Qc9o ccc}]YN&P!(uX}%% ݻ( vB@;m+Aqt3ٴX! N{ z38q,>dddS1 /8|7̚9^mEhl-eB  6łȘ&L/\I*SW7[)<)ޗj"zSAT22fԨQ̘1RIժUeժU8q cSOOK)p~3AkkfjBzF&L`ʔ)xq>h .S>ݲi uk $p:7o4cѢ|g9s \]LMƌABϛ h PU*##Oرc ϟw@NSH>Jaz~&lEq]_av=z(OF}6@ƕ*#%2rc96ӷ]6˓SN1w B/\ך}+Q5-06S'|2WN%qx}QWiڬ ˗^z}! IDATRp=Q~}aif*J@>*R4/̙#⧟~J* kKsѴiSadd$&M$ѻwo-LLLBP(bҥ"**JL2EV[ږߞDCZJB޽{WoakkUh*cǎ ^/>3RD```6cccŌ3Gݺ_sR4WO̞=[ܿő#G{ʶ#^/fBBEF Z*x Qbڴi"::D{Bq)+^yJ῵޽رc.'f͚%<y{wfqqq%ڻtDos3BS/_ppoV4_O( g'GQF ѤIR;88X|G¡jHݺv۶m+5OII+V-7&FF|re1~xQ>^xQ|'ZuB홙v6l%ׯmj#LL YI|"<}|D6r#G"$`ggĉO %>>BAʕ_s;ydfΜIjj*GCdeeannάY`Æ TV)S`nݺE`` G͛ӨQ#N>ϗ_~BÇ2p@jժbΝ?~WҢE  ,]A7r RN ǭ^>y1rǟJhh(GPPR%<<< !uᤧcllLڵqqqyjWtΝ;Gll,z+ၥSpwwՕCbddD͚5Yf<ǯ9`Oxgfn߾͛7Otz=mڴ!::CQF Ə'=z`̟֭?-[F.]ꫯ]6ބРARݻ8q޽{3aT*ӦM#99Sݕ #++ .p]t: 0ooo~,erryQT^^{őMXXn";;+++ׯOJJ>DADD/_6 rssIvye_r~QfM7n kkkĀ8tDjĵkׄpuuh׮ѣN3iҤ:tB bʔ)֭[BR ={@Zr abb">C-d1t:ajj*~S,lllرcEBB/v/_.~1w\VݻwĉE*U^YfB!ڴi#DVVpppC ys @lܸQ @;vE߾}śoYfdddddd9 K?咦ի1...yk4|2~~~ܸqdw۷Yv-6m"** OOOO߾}3g$++oaBA&M ZjVKxw oşhhР .F͛7IOO^+<III :`Bc;#661cпڶmVۛUF)pi4P :5k֐ؘFӧ~222222O,@^ؾ};>>>rM6ajjJ\\իW/?7H:((f͚ѰaCZ-˖-ؘoGGG-[ٳgiժ6l`ذa_޽{f$N.\̙3,'GFFz01y*r^J"`t3!g&00 Fí[ᣏ>"33իW?X,sʥK̦LiKIrrr ڶmKZZZ *ꫯBϏ;vԩS>`|>|;dƍǁر#:`S۷y뭷psscTZ˗sRA̩Sx(U055-Tt8qI7nb<ȪUCN˖-E s:88лwoZ3zeEFP(ejSFFFFF%wwry}?N*UL_~X[[xbFM9x 6mo[n;v?s% +!O Ç' w2tP5kưa,8.\ȲeXh͛7 ,,ڵk?u`qyjquu(r(2226l-Zח۷oeL Lʕ m O=FCxx8IIIOmqS,@dddddYdddur[ؼy3jhҴiSΜ9CVV 8˗ͪU裏HKKcڵ8;;sZnرc7ol޼W舃a`ddIJeP*,^pfϞ]/F'|‡~XFWOXX,3|pJ%͛4mڴ6f̘kXx1JKbffFI8;;ZBPдiSýۺukԩV}15m!!,x222222$yٷoIII~uyLhHOO >8iӦ ݻw'>>cbee֭[ILL$33FÈ#pqqATҢE BBBŋ'}| ׯ_/вeRG+V@Pfqq[wwwXd  `8{0%n޼eJFFFٔ) y wwݛw6aÆT*͍v.\###Z-gΜՕ=Jڵy׸~:mڴziӆʕ+s)z= (mHI1bDKKK6lUK$&&rݗNt:-Z;CʕI,T\ggMzĉ) Ctt4Ç7#88DGdd$ ԔK>ոT*7.sٳg̦Li Lff&7oǧ\n߾^O5 ؊ww<$???;FHHL<!>:VZb3|||066f޽,_FիILLի8ۼys`޽?#O͛ Qfڳg7n0E-׳o>,X`XjlْҵINN.QƆdɒiKKY{xx`dd$a$$$믿N`` @h4s8w-QF1n8PՋ%Kйsg@Ϗ}&ݻwwތ5*O3fo/YWEXXk.<ZF=O>{… _yɓ'Q(4nܸH[vvvT^=0|p޽˖-[jlHݻTOO<}' ?5k, trr2{ˋ RiQ .2Yb[v- \Pƌݻw5kڵ#""֭[MnXx1,_~!}rO?W_}]7nǏO> ZjXZZwWJ͍7عs!SH1@qqq cǎ%==s-\{{{ ۂpss3ăE OOOZhWUʂ^U) yAU^Wv"##޽{FժUQ*]4 :ö)oӹsg222OIq!>]tCTVc2feʕPZ5NVߟ~ѥKNο1}Я_?ö,YoP#Ç^cllfiF!888O}#Fp*BJ\2)#####SyA9tqqqZAۓH J<~ +xyy)Ķ`J%7nd݀VZ1avō7&..9s0{l ĠAHJJ~3?~ߟUvLфT$==˗EСCiҤ!`ڵ0döLΞ=[jlp$(2_h4eWFFFFF$ ??/222ؾ};{fǎtԩ㌌hРA Bl?B!C .[&&&tЁ}fvMtt4өS',X`5J222ի666eyISdggsʕ?qqq 6,ҬV̙3/xbCE!Z=zdhjH]σ }V"99eA:uݰdddddQd(7Cغu+@c) {C۪U+֭}ر888Ŵi ۫UƼy3gϟgƍ 6>}m6$$$@;i$9Bnݘ?>7o|k_ƍdeeT+ Z: %ˍ`Ɍ9F=zs >ILRVy2dɬ[3h-[RѨQ#YѣĔkZjΉ'055jժ:Vp +fff?^gƌ\pϏaÆ1|ppvv̙3 4;o>ܹ1gߟiӦw߱f9rd^/[ ސ;V@,"?J*7٧jUڵ˳=((cjjZ&ȝ֭[" ?ժU3'tlٲŐ'22WWR;A:uTbի۷GTW(Oh///6m3k֬O>y9rSSS ;×_~ c˖-ONXX8::wWJ… Vݺu˳=wR]c@@;vʰ=&&^ h4Μ9Cfff~~~rرRZ*ժU+sAlllٔ)Y`z6n܈wQ9r8zmpyK}VVV&H 0 O!6B5G֭[̟?VqwwgӦMy*?:$$$vZZγ/((ڵkckk[ฤ$FI=իW}˗/GV3p? Ot\QԨQ+nX22222yW^7-`ӦMR޽{h߾) of'NRJ.)u5N2>ɓ'3qD<Ȝ9sʢFqʤI7ogΜyyYjf.ə3g  ̟?)SPz}:fРA̜9###/^ɓ'Yx˔KN˖- BRH`BBBXxq!qUSɵ$Wh4.\@JJJ}~~~DFFk׮R-Y$&&۷oM ?UT)tBBqFzZѣ!x뭷ؖ3+W.tTT!6B3:W?9Nѐh>vX/^L߾}Yl,] 2a ƌ믿7nw} CPꫯwWl޼??B僂W^of„ 6f͚8fѢETX}ϵkמZzBBB פI,X6G %####O ݯr1\t˗/ܯr >jB(v[mڵy׫j;\b HZvvvۗtV^MΝIOOgѢEddd0uT͛ǬY4hӦMĄ>_#,, 'vYt)fff߿V5 KKKXb *tFcjjZݻvZm(888 Y pƍrsڴiÇR )-Ħj h4XYYaddB 0d+ׯ;v 8Jņ hҤ ̜9#Gf֬Y>}:;{yWxгXd  ںT.\G,l߾̙ShZ0lذBS Ѱa"'*T`ѢEhHOOOܟlDFFFF@ /TT֭[ҥիWqtt|`xF(tn!;vONrr2gݺu,X+WRvmd͚57`jjʾ}o1LuVʕ+iݺ5Ç'55_ ȋ͛f AHJJ #Fcǎ;jԩ5k,tPPM4y硨ɽ9~!+V --T6lJ*s7SNYz_ ?^zadd?""ӧOܯbbbHMM5dyJ)/۠Ah42b>SFE zŷ~˔)SذadggӦM^ @Νٷoϟ'&&(M7_ Z-[R~BajjJz)}{jEƋ|iܯrh4\zذadzaÆR377ݽHbbّy^ܹs\zoor9M066K.l۶ ]>M{{{ WMT2|HLLޞYfhyqׯ$88{UM }x >LRR̜9 .<\v NB\x(4hL8Ս Rzu;wpΝg PtZjѱc'^.S#LaڵkW.ߴi;v t4AQT,_>www*VBΝ;8qB`ԫW={DѡCXWGbŊ!0`j!… 7Fj^gСg 6۳Zjamm]ɓ')h4?\ +VH͚5e"9 IDAT####ܑ ?={?wLL GwކmcggL5 EYX"+PMAHOOܜ*U0l0UFJLؼy3BzEb$$$o 75j $$q=makk̿"99UV1x"\Faҥ?~E_~A[yZjOwRYbnݺQz5rBѠ8sS0yx".]*W[lAPУGt:QQQԪUmk4 o Ϗ7osNöӧaVZOll,/^dΜ9Nزe Ν믿ˋ??7oҥKC;;;N:EժU1ck֬yq[ @/,NE`ڵ0dȐ"2vX Td]^VO>Ŋg.%MU*Ceݺu\Ĥ@BBB.3222222H9:t(кuk*W ٳglq@>M4A|###ꫯ0a>>> >cooϔ)S mҤ k׮Ɔpbccٽ{7ΝW^`mm͉'022_~y {aЅhZz聓S킂FբV1cFm˵k׊'BpԩgrEUlAL___t:֭+ў 4(s&HRɪUpssC3w\@vZ~7BT*YhIIItԉ3f ^ X?`&**pbŒ%K+AAA\BQ*AޢE ֭[*(Wr F 刿?]v-ʮ]_(Ӡhܹsy=z=zдiSq/Ԏ#_細`혚O?ҥKYx1_|B3f `̙"NDD\t JE5ʻKX|9j.}ϯX!e, O侸6BA+֞VVVer9C̔L#dʅW @lذ\οi&pöӧyRRRJ .4lu떰kkkQJ"meddڵk+ @8qs^Zm۶Bϝ;Wbԩ>vډ۷ 333ѪU+066>s-nիW *^BRbۮ[Nŋ%B̘1~f͚nǎ׮]+]ddO6۴i#JlWZ ;Vf6edddddG^)'6n܈;w.Nڵ ۶l`H[S^=tnըjbcc,fllO?D||< 2,]jԮ]\FԩS8q"VIHH`޽ҦM"933tVZի˹Wعs'7o,1|ժUDFFҸqWZ-m۶N:Ŷt)))e%W- wB,e7,,@ :w\ 33m۶&}1jub}dȐ!NlciiYl!~.]Rʂ =ɓٵkaW_}Ř1cOYb:uo߾=ur!bbbhѢE_6t:/_.Ej4mԐҹ0111)QЇrȑ H"AP{'J*8;;zrȦMmhuVeccc<==e"####ܐH9IPPPe:x ywQT_~7Mz5t)RCQ^ DAФI$*U&Z]Z(!vlv6لP~>>;{r[|}}9wfbղm:wLR^:֭fǛ?>*bŊ_pm{ꅧ'ʕ\pgΜ?#F~cܹ2ajժÇQT4oޜӧO?"ܼy4WEv;vSYO ---ӊ RdIwdF9^zVƃOU∮/:>Ljժ[JZ+V|ԨQ;vٶ >XJ*_Jb[ 0'O2}t,Y¡CСC,YVZqg[W C䫜*.\H"Eׯ6gŊ=\n||oZ-aaa(L֭[0`7Ȏ 6m5m>}Xpl{.^^^emGIbb" ..~e%$bŊt҅޽{ワ/dddɾ}W۷uW$%%˗3tP>ӬY3\\\([,f\z51"҈|n HRRNƍ)QDfͯBB 﫠`@Q@^0QQQ>|_9rر Do%11}֩T*֭[g=00Wg6o!ԯ_{$ٶʕk׮o~zFiѢ=z`̘1?~N:ѳgOVZex%18gGRQr 16lmwq-Zıcr=BD=(UT>}6hJe`mm͈#Xzu53o~I1RPPPPx.( &44FKD4mԨԩS899QXBK1h Ξ=+[?m4Y3͛SV\mիZz<|,s99}4,\ vڌ77dcƍ 2[ .roཽ_J^ѱcG*V([5N4㹞V߿[|F12S,,\\\xs?rHRRRXf6pBQVӠAEQPPPPx.( &88BF=͓.tӛiӦe%I~0alJ"00-[p-6ԨQ5jp}Zh9sm߾}{*VHPPÇgܸq;{fqttd۶m+W;v{wVYVd&NG}Ą  @Ny̯"""0,! x|W\t\ V믿n kֶ@א1*]4ݺu#((8+((((P9pK3gϞFGxsf 6m⫯S˗/7Aɀpttdɒ%fre>jߟWַm6˩jԨA֭;UDDD[oѲeK^Fa}Fͭ[ؾ};UT~cϞ=|'Fm+VK,᯿2k׮޽SNѺuBFyN… ۷l8#GxbN*===p[l!**bsUV ;;;j׮h4ϲ$&&=/#( `ԣG2XXFw!99z='Nd׮]uCeԨQܽ{wZO+\pAV5G@@wJWWWVZ ÇFSPPPPP(x{}iWwѣWLΓpq6%K$$$[[[ʔ)g6H#GM˕+W'Oܾ}ӧy7qwwgѢEYe*yѼyszer1rHvʹsӧ}9x %JUV<_< ыP@W}@v˗M|}}9uqD.]ZQ@ EyӲeKJ(R M6&Qӹs*T0JLh)Ӈ۷o/^ƍrر|ٔ.]ݻ&bZ*F!CdEU~3*`˖-ܼytf|ᇲʎ;ر#ݻwg_ҥK4hNNNFulٲylTd6\J||̐!CYtYg̘ٳo5jT%+(ry"*VhEp8`,8qy_S\9پԫW$\nPP˗C%**pUsB tԉ (m2հaC@qDWPPPP(<9Ν;_UBBv2~\\\[66mhѢQ ҠAd;U)V'Of͚5rJdjQFn:?~l2JbѢE4hЀ=z|GTZ .QLFg?dʕ,_~:fC .\Hɒ%NZ-FFfʝVİad^RR% <r ;wB͍ʕ++ B( ϑm۶F޽_۷o'==]Vy!Or.J(a-ZDMJJ2Q} `LSN1?hVD6mɓ'-)'ZV׮] H!r^jR޹sg^l޼^zOd7iD֨Q3og7oCKh\9ZYYzjߎ^dڷoOllQ+;vGf7n_~emz8/B9bgWK( sΔ-[66>֭[f'+ Y,:ٲe oߞʈ#dC80d:v-Z k!.]2)WUγRXxq&m=z5/0GqDWPPPPxUPB@ښnݺP9BCCqvvm۶Fqqq?ޤjڵfM6m t.^^^4Z?vmjGNd%X~E1cƐ@Ϟ=Yr%+V>=cfO._L-~}]YӺmndǨQ1Fɹ_U@Wg˗/WDAAAA0Qg8q¤m۶ ѵkWlmmm4i±c>|l!!!&$[^$??,34shZ.]jՕ-[8** 1o<&M$۾E߿%Je˖f2Уٸqcm/η~k{k#GZ^-йa 4vbdsDAAAA@ 9ƍmXO9 ;h'V0,]w5 6K|GZbE܅ZAXq׮]t&V?MީSgmP QT)?:o-+E\EfMرcŶmDffjZvN+%텳(_ի1cyB{9JscƌM7N hԠ !!!"===Kk׮+AFה!ѣEF Sk+Ç?.E@@h[_z.Dw]Lku֙yM1c ѫW/]pͺ/P"**ʬb֭b̘1YƢV~3!t:E}= ///q::8ɓ'K.Y-[̒Jyf-Zk׮)))fHOO%J]#kUԱ/ӧ-wަ2Z!\]E>bbٲe"!!17Q Qٳ\?ZykVɤzyd8s> :ݣx/Lٳg3qD*W(υW(mgM#M&ulE .p>ԀYӢUV1p"Çyot 'OE⬁ƞ%k$➌yƏMVHq_ӧsǹuUKihX>FZsweJ{1nܸ|93ܻje44Πz)T8s =mocǎ,3g矱u65V4*-hZ{ p6iKJEղ`|7nݡ_ j'kH}6|$>C~W^?'$4kbP T*x ±Gjji߮ L3kb&`Ŋ|xbbP:%VK1e:<1?>LdҤIYYTwݢ3V*ITPͣLZ4kʤo&Ӯ];2cbU C W9uCCgҤI% UPPPPCQ@rرc 8+W0 '4ykwCƍXj5UTyr޽{Z5jGwcm 8+)&9i~.gJJ _|s̡~1+>O4Xu 朳Q53g6rAZȶjO8z 5iƬX UTA'111|TOZ[uL4X ?Vs㱊I_۷MB0 + s v:n*Wƪ5k[.O<[n߿l2{CZ F!mq_i-VWQnVZM͚5w0;qAp%Z?5 :)ٳgX"|L2E`|Lު Nf,`]$|w=Cd#ŋ .G^M5MeřGt]RQjUV^Ku:g§ie|LV732f0笚Ȕ)S ܺuaCg cBKoXtWiVZMV̝;?-|T@1{2&9kš:Fo7::BCTWŸQ[1d/߭K^A.]+((((Ϣ( fزe }fy+?ӚU<8t|kMtM>79/]DVxͲ7a=ؗ]v5>>.:r,6~-P[联6Ç c%XYYyfC /-t4xq" ]j${M}vvJ/^ !5fPѽ[7QԂyT< r5aСߟM6RAf44q ,F-[믛mm6zIu/+Gi0/û f 6%KƛuV&|l-\Lܯk׮lm#:t;P&2i*B#G0/5 shZF Ί+y&h d1+4mƖmqvvӴo:#7*[.G04T[+W; O?*cspuKXr>GMeܽbŊquڵmM 'wW)#baV~RǏ?رc-ﬠE?Stme$m'ߧVnρ-ʼ_n߾MF=doi%-B'`XXr4pBVh"==my;:iiR@˴_.}*GgϞt؁n{OX(d'>ζ"=?7_Omt(Ֆ W+tшGE֗~KQ;N={iѢ5˗gRAZd_Ӡ+^}ѨQ#6Ctem2 Nq`_>m[N ȵZg_|ֲ SEj&߫;jv_eɒ%n#x;Bv݇;4l҂ Ѫes=a -E 0XuREHH('OdkcjL3v5+dcH(mۼ&>{&fRh>]maÆL0w( H?~LU^ۿaSMdh9Z֦2ǎ*hRBh׎stfcf˹ =3I>m 6Z РL"?ѡyÚh񍚇Ew?rٙ%,YE=P:=dL'aW3/ &0k,Vа 6mі888dQUc@ Th9YMtwE^C5P'%#iqzg"M [q:مq,i#QwV^JI? |_>w܈(^ )OIgx!,Ul8u 2 _q$\բn8B4CTg"W"4/ UΥK @w@0[ !!F!IѰTHO|DY;kGSN kZCw?6K#t]JQ}}"OGGqB8"jA$>? Fx!l5RE VJu?"U-o>!#t #y!\l# tĐҘ4DM~E45bT': to VGsFX#<4KB8ih}7o&4^=ZPd'ٻ-Ku~@X>haÆ g51 ѭ>^}ɫΡ[X8 QbpFV ,)R@Dj §br;D7O3kk_Ij3K[v O->n[5#b|mD0.$cwo^nވkd_Dc 7 4"w zZy}IU}$F K^8b^ק=5}((jЎB+'`Y1OD5wIM\t5M˞ #GX%?{I+E&V-- ]lܼy3}AC#@N|I8#0i$0T>a!,dݻ< Vw9%D1m#c :o7'@;R[y ayLn'S~):ϙ~Rٺ+cDDÏg໦aJ ka[wР<}):Ҍ3mM) ЦcWº[p?AFMPΌ> 3CתR2` !oC޳`-~'+Yp5Y/0_ZL`eD&@H]Yu6%K)Sɽ3i]G]ˠ'MwGRaV(G´۷./AGɷc~?_zփ!%V +MZc/o E-i=@}?:‡-!>͂NwڧGSз"-K'lmJ.0$|} ~ ׀"p>ݼ!~m^{1$H(G{F-TZVw/РYKrH:5sp0l8 z_ty6~-a!hP ެN)0C&0$e0ӵj ^a׼MPħ{]g<מ3~:޽l- ?Β%Kpvbp'-NФqӐ 0 ?׽fOe͚5<;v(~F$Gɐ,`Ӳ@wV})$oN:A{k,(0Q&-eə0 ]QRd*IK9J;!9C2{ ?l-aeRƬ#j?nm*xCc@ݧeNJay7gRU\aA/,>/s{@ujëI/I '@i'zB2mtM(~><]!Q_00yCߧD0|yԸJ׆‰?_ El5 |ϴMۚP5AA DAAAAAɄА {]R>X[޴n")+\8rR_;uOBzZKGnX`c&at[֭[JJe弪_JHBuh^Tc :)%JoV3zj.fdԯeeP,!Ɛ8a~㛔)msboro8[K9Bz7BC~gCE~X/{6$t"Q:]ۚB>z1V_Gha mg6bo#ct=htJ%9LLצuR969ﵿJg}p#AC/af!סk@P ~lHUALLHJ7w !ŧkPd>{R}f`q7Vӯ"hQB+y$P`HLmFz)DOBB._4мye%d'"( Ml %@s{(['ܦ˗]PDFQoנy (U[$tXi#:0\iR_kaHRY@rP6[ Io 7Ň0z3$lN O)Ny/sy֘`g ̿7Rb"> Z-mSW8w">sf6BѼ*Yba䰞 pFy*C3d[ {-M+k ^L|$)@R.0l};6\51a 4,&d6yKQ5~Uou2nzd@rL)1,),I?#x9/Rõ8 \9ru)jnW~hZ38{l;+((((B1sy5ƛpcBиܐ1?W?N **ҥKXNV˹a&,D€\F=}ӧOSAY26{z2QJ:Heh%Zk`xCY? qN Ӈ/C?o\:q)ҿC0 ?4XzB21_IRoyI.+?+ҧO6J򨠠𿇢艏uFrh[^io7!!Ajj*Z-fLSŃ ag ]~2~YaMRNzm60WWQX -Op!J QKBw3EȇWa&)9$=0*f;M@^0W(߆ɕl#AЧgœhf(黙0IP9ߒ˙K a܇ :͔{p'aZt22Zo<#4lÁp Ҟi֋0mw2Ht*}ݾrZ|Iɧ\x $ӣLD~65v&L;%)lJ6D TvzT߻0t*7XjCn> o OMZzGyX|^R*J(gc!4ǸrtZ?_B|v[UmQPPPPo`1d8'&æСg'PJ";ZyULKDlJUP`QЭ̀M7Cܳ5\G̝-aJbފ,S. l'7X[=mdLMCec%5C2\UzKݫ n NJX|d#qRйt"ҫ8 .|!]g fXϥKfTt:l(9t6qmll&r% {]ڐe`z+E 5B ޼g Xo.u2LR[1~^/cQ t^9tӸ\< 2g15 =-ߐ|t==B:n~OMř'f:ƭS$ݸ_)p.FKuWK/ܐ|*d&dT*7lHepn0q}°nа!cT_{m[9~ m +ظ7[;uw E(LKNٞp ùR.##2P42sʝ=Ǡg+˱I¿5,=Y]Zkh]()[}R,93aOtgU%^ y7΃-Z8E?L=QгSx9_>;%vSVԌJzJퟬBŽ"mwf3'y{J6z^| ' c20e3,? )_Yih{ sv@=Sm]8+?(uCH8e8yρV + ͖jaAoÝh( Ɇͻ?߂gJfn-Nh /ƥ+Wtz*V(G}'jXyw-j2Z;ìʐNPsh4St * e7zKʤI#*K~vJI<=>jyAT+K;*GQfL}$Ci׼/œ(x,[.*qs+}|uDyV62< Cp?PrW^tAO@~VeB(%*OIr>ʦ{3"yAuܾ)BAZGY9Pt5&>PQǙ! dWdh}^K><<˵P)6jׯ?6tYM)bZЧo?Ѱzץ]tXKΝs'f$A}j8aa75kTG/wTVJp߰0ږރXR*}I$I Xg\\k+-~6ӳ#&Mʰ|}}i׮ 0aÆ10GA:.[.]pӱcG@Ywxyy1框tND`=:p˗/ ԭ[76na];nbuc`P7 p!jԨ=;wGKτO)t#>Rݻ/kF͛7g>Iy:660AO@:<ͮwӨQ#mzӳF~B}7n߁۷cg7JiwࣹfƸj:uꊯ&-[6Em>^!h7SK%ǽX~VVVxxxs\N/ߛ\֝R]ၕ6'tحẑo>5tjȖ-qOniQG^/!`5Ɋi/;wFѰdό5_&˵\0J Ww9r${#.&-N1 }XuŚe˖"&Mbʔ)L٢8A*K>z߮Zc5.Y?-}H$IAz^XBΝCX[kDW𝌸 ?){DzZ 'lْL2EdϞ]<{,EۡCDҥ ЉK#* DE+b{yVZakc-f̘!3ZCl:F 6nhP ooo'wNaӈ.5bӗ)_+s"ڹhV ~~~};v(JL6=1cGq=>DyVJizĕL9zĦf.e}:>RDV?'O#G6:Fa%[ VJ9rJ3Hh"QRd։|٬Dk݋cʕsFzER^ާrO?$"""̺ϣGٳErS}͝UK,1ѣO?v6Z-1믹OѣG/**J,YD8U"{ȗJ}Z/[Z̞=[O:%  rdW^_ho%[ +FJ|Թ8p@ υpwsyO6ȗJ2Æ (P8|j3f:NW8zlJ:Mkm۴;w +|||DZ+P|Kܼy3Q$I`@;w8tfbذa~ݻev q%J`ڵt-W뇗'NgϞXYYvZ jϟ?ӜƆҥKSfMN>M)]4(r{ORT)ڷoҥKIHHҥK?~;w@ܹqrrzdϞʖ<~'NpYx⸸PtL7P$IzwLgϞjժ9rЮ];]PΝȑ#yrW_}Ŗ-[8~8Ppaիlj'^VҒFży8|#6lHXXti;Io,!fbȑoߞUV#Gsg͚ŨQtJeϒ$I $t3%%LF%I_j4WV%0Ќ%$ƌ#Ft()8::{nۖI2ALL }oaԨQlڴ`3OΧ~*I$&3% 3(P%KfZr}:Q"1J7{TT'Sʑ#3gdӦMٳPP!K||5c?)SN VZi^d>|w}J$IRL+ DGGs ODD{1prr.EB|||)Aj4h=w޵t8UƮ]8y$X:$)~:ЬY3/{4{9ӧO?I$ 3%8;;cmm@vAll,:t0cǎ6^[Z50L2aeȑ#-Alݺ?]oĖRW^5y~H$Io )yΎիg<___\\\(^Q?~ .H@Zj2$yaڴi^ԨQ#~7mF޽_$Rֻw7fÆ ]':Uϟ?gڴiٓefr$I1dbP>Y ٱcIïussH1 aXr)ӫW/|hpRշo_fϞә:uy !:u*nݚCQX1Yt)ݓI$#3U@ʖ-cߤ&}]relmm0L6eoÇ3~x;,X`pY|nj=qƱ~z'T$Ix[U@4M9x &% ׮] ȫkkkW.'g3qDJqqqtH"22!C3gN>SKN C:uӥK_ I$$+ fR2 رcKG%pvv6ձj ѳʀZ*~Wh4̘1ӧO6md'NՕ7orСt%IՏݻS| R$IL@̤V% ʕ+f3E␚cǎQtio0$\xfeeł IѰh"tBnصkCzmڴzQP!;JدݻweC$IzkL* IՇ ݻ& J.*1IAAAA)~tޝQFcK&N7-Zcǎ矖$`ĉtԉmrA->cbb:u*ݺu3iq I$IzL* /_>]}}})Pu18R,ihV Ȟ=Ef̘g-Qٸq#kצM6}bϟӽ{wƎ˄ XnY͓_s玬~H$Io5)^BС:N>Mttt Hxt:jԨ!,H"E;v, ,ٳ(vvvlٲʕ+Ӽy&nKs|[l///RTڵ+*TȀH%I$2db&CC@I@NC}G}ĤI:u*+WLuF[r%׮]I$#3UeȱcR= ,,= M=!XFW_1e]fpҹsg~~gF%!nݢ~ڵ ???F)_ɓԩUV{I$IRV Ҫ2$22 .֭[o8(]4WmO+e<``;3f 1ҡw̝;3gu & WWW8|YXj׮]cرYr?I$IJ2114MðSY;v,z#GfΜ/{t8f:t(&MˋsZ:t[v-|Kرc899e}4i;vI$$3%=ЧV5ȕ+*UJ5y ;&GPPP 1U 9vJ :t(lGfȑ >e˖Y:z;zA.]ؿ? ̲^Y$IY2IF&ܹӧOmp7hѢ.\XnAse͛gp̦h6m lܸ!ɓ'tԉS2}tVXm?eI$IjVmjӬ0ԪU ooo>}/իWt&? kkk cLrC7Aj4hǏ{)R!EO?zA9hժJӍ7h׮W\aʹk.cXf W^eӦMY~oI$I*V@8~xon.899 nYǏ'[lt(jY|9[?)UG͍o$L4:d憒$Idi2Ic* +W&{ðGTTYï6 46pqq!<<b2FGݻr֙-Ѿ}TTJ$Iһ@& h0:ƍt҅"EMbL:_ HZ.\#GK3(Pŋy oVرc9u7 2$͟*{l߾GGG>""" iӦ>>?^V?$I,ϓ3v' yL2^gjrG7ٳU+f͚5(mۖUV|ӇM6իԮ]ÇsNFM6UREF--I$I"18S/V...F'..Ǐ8f)d:uJuj2ʔ)ȑ#U۾"##8խ[7,Yڦ֭vJvA܈ѣ4k,CM~s$I&t0rASرIٳgyy|jbcc9sJgԨQ/^|8}g׮]8::fqI$IR$Ҫz6oެ{}CЍYAǜޞʕ+ ԱcG<<Gu@{Zqqq!!!SNt5ƎKBRqӦM@DOӱj*V~56m82qYY$ID2IԆ`9sW8޺ukԩCPPQܚZZ*666r*W\̘1CmキM||<#F̙3-ZT .мys?~'ѤIիgp$I$ tHbhUǎqww'&&ӧOy'''b4w-ժU`=znݺ)UV,\EqyIɓ'iժO<(ӶyfΜ9#$I Cj֖-[RzuӜ"P=YuWlC- IDAT}$j*#MѰ`ڔܹQߥKUwf̙;vPZ5k9Bh KZqԯ_H$IC& `_ɑ#vvv8;;9ܻ͛w/qS_79ɲ^: Pm6l111YQ۷www4 4nE#w\rڵ+qqqYnl©SdC$I H:Z şӬd0/quuEAPPJYgĉ͛7˗/3{l DdzŪUXp!͛7͍G& d޽ٲe zJs( hԨ 4H,$I H: tmݹ|2.]b۶mܼy35ـ0Iz6Tvvv=z >lv_Rqttdʔ)m'N|#>7ۛ~2h  ɓ'k/ξ}(Xjڵk8pE6eӧ̞='O$I$֭+ʖ-+lllD\Dɒ%BܺuK)4i"bΜ9M6O)S^͛W4cܵk(U(\DEٲeE``Qߣ8pȑ#jk6UR||Y{k׮5NŋOԩSMA{_mJFx@˗Oe%I$m!+ fz*ɵk^Eyfk:v(Çm۶ړW;TwKwss3j8Փ'O y76zʕ+Yp!OF,Y$b$IL@̠HahGȓ'*THў|B]ӻsA\]]SܹsF]/eZjѫW/ն!CX4yB0uT<==y9W^5֭[Smb޼y&k˗/f͚mllhݺu_$I&21C^',,LZjZq%_=#'E\6m4rʕٳgYp"RO>ѣGLK.ѣGMߔaÆoXYY6˗ܯ)W?׏bŊe%I$m#3lݺmIïJ";靀nAX3`L0Amر:gP5jի @J̾O֭Yf&(&88`cԨQrOI$Iz * v?u]P*"j^M:^MF+WGGGbLSDe7h Txdd$~mqKҿvM|}>Kzz;}&OC߾} .,I$Ie21C|\\wNqjժ-[6űlٲ87A-22ϧh7gLD rns+++ϟڶ|47ԭ[WuyiPsaɒ%d}cp8>C>L=<}(^J$IһN& fP"=xzz8f=zT5qvv3bC+ ɗ_~IrR 3*((WWT5 0yf=hXp!ݻwWmw穤իx{{8.$I6R~ʧӅ NqСCS_eDð_7-sUm[x1'OLW۪:pBu/st:VXAvToܸ~[u8I$I^R߹KR޽{y!V ?.\8դA͛_9==ۘ9sŊ\:@)Tћ6FhBrQ&4iFpelْvq^_|7|?A? CPpjtݝF !8q"ƍ3xܹsCӦM3256lu߿?EKh֬v?S'GQx J*Ŋ+Rcee%$Id !),,L3F̟WѴWbG[ 4{jW^BU/h[ѧ5ӖNv}FϞ=3؟^ׯ lQ3oD\ V+<;tN5;w.]_|:D޹/Z`(Z?~052ו+W gK|c%TЈ5k"Պ<ŤIDTTBgϞ.]կrʉ .X~]TTUʼn'g}& J "E|Z NdҼտK$IVa|޿/O"l*CeZ*!cXr~969Yl9~kz-Z5m>mx! PTiVxN1ƍۗ{$A6*"`m$g 2S={Wի:x3v BX/T_RJcǎeĉ4AW(25C`)- a>}z4iϋj›$</OߢP.9zRdɒ$I$dbc2i$ZԲӾ |qH#1cXz쁏o BΔ:炞%K~_~jVfͰyiø÷)_Spa\:tZ0f%>HáC[n:mٲeۗcg ?r|||hҤIĚUqvv' -ߗ^'g5>WK$I H2T>ugRއX( [caEA^'4ަ]NУL\!2{SdU ex C&96E:hxDhe~Gqk ^Y-X/`cXb^ N0 T"m T*Ho(+տa1|PzwR׃P-NnuK$I\75E}~i(bë- B9 <82rr nPo ?\Y+g*Ԯ_xB [y0"9@;P ;~ : {G6>CNFÃ~;Li_me;Kr+W2`$%lR/>| CkÓX8yWJ:Cy0dȐ2~8JkXPnżަ0ʦ$Ilk͝hv+XZ!&f׮]j*sI$IzK ,^eyb`{+O^+/BD 䶅[gCp}>x[o/ӡb>*6O^ *o(YXITON#VbCR?#EnlTW 4 u+rqs}[hΟn&R=yOS>>XQo_{7uIlxǫ_gرk$I$;d@l>=='!U^&(iy*Ok(}`y6[`h ıv6p%CP ޅSp36D*{r uTkD&(ê\`}} r^2=4hC)ggzu\`phn6A.00иLPwʖq -[R F'@Pf&y%_mHrac(u>xA}w? >xUeձ$I$-&`ɓTͧZHv8Z^3&L0ij"Bh 8~OaBrPvPV79D)ڮT;8%`dDdFYEQPtƭ=t8Ow H@xtD6adAf KH}ǩFF.y?$֭o;Hsx~T!;yp6Ԉa|\ sMhԀrA O^"#=n1~u_@ ^ a{Q̾m[@vAddd4/2cxh]w[sSQwUTsxĆlXx{W(hr3A$CsqfV1d/[@R4fb1e1aq7o}L(Js^GpBx%ڻ _lBJ4 {o+ S3T-vnȚ81 z5~L (hn\f%f<┫oV迿<%3|o_{x>j'CVfNءW{yb6f/$`q9]sg."""vP p8AR*-( 㻙/#= <1muL{6^mzC̒V5aRS}fz^0mg+7`ㅋHkhT[Puu=-!9?,pKpm7P|8ɆE5:q8߭ >Fznm@h3$gpP䅕Q6X0 tT 22 jW>Jȸ6!3| Y>ksu?} JU wbL@پ醊ZEӑiVs64SkdKfn;پ#u} %-5GczzM"CM}DQ|uw3.PG(N-AfV dxuHXx)4z>!?W60T6# JPa6_ }ao?qP( yWZ1Ȅs^Y6R/О 'J*k@fbI=ih ZZ+0=$%d bcc_CI-ʓC+7ȼ\P 9pC@`?\e^_."""vP HrXu=V3ShdU*7} js#'YpSly!Я l+2ip4̔C*9Q Y&hYogFm/嘯>=7.sZILLB^%_^ޖ]۾Ap=Ì& yf*9Ef۶7_{؝pS<f .qF7W՜V]Q0y||YϚw/^fpĚ끞 Q¿ `n6.R n [U͝CŐ@‚!! _`) :‹9 YR*00Q=iJ vMLl[~W^v9KqsqKDDy漽b%.Y|weY0+ҳ-~9b_UTc/;q[п_ g9_:sff;I+kgYNz Y۶mo+tM6|t҅? e[WV9ܒ)|\_|zMTT:u֓U wu6EW tϟϬYgS [5ТuT>3͑3Xn=FM6V|!)"!mIDAT>qPE>ӳ`ȑ|a# :PnƶRC-5cO19fWd&MԸÆ #((>.no=$YfY7[Ä p0vXrse/H-p2EX#0: o07-""ybc:%8yQкy'NӛW$>>N7o晧Cwxj$@Zmfֽp=cZNttt 5k/{ 7?mUɄVT3TZeȃ \:ט1cFwǧp)kϴhY'(k ;GB\W &&J/dǎL}b dr?x&bHWy Ow>;{-1ﬠwįSOpOv2="*-9𬋽^Fu'K.oaAINNfOKGxQ-LA'__2g2e ͛7oͼ<.]ʢ9S\F'g.6q%`uB{2<֮]CyE%nͱ(%^Ø3$%% 3ywyopW8ޡAwLsȴ34ij>"/_81NDd˂yNNۇgӦ3q&],YE sL&nanDLRiӧ3n8|\+tQGZZ999XEDD <}^È#==T222rѱcGHLLk׮WtRSS9~8億C\\ h,СC߿t<NHbccILLGWt`YtҾ}ݻ~d^/iiirq\DEEObbzIENDB`././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/_images/gptree.png0000644000076500000240000003634714456461441016055 0ustar00runnerstaffPNG  IHDRZ=sBIT|d pHYsaa?i IDATxg\Wڇ/fUaATbnD$[w7ɛ5M$5hTDQc(QXha`tc83L99o<9c#I@ ̝@ 0>B .Iwz&N@`yHġCHJJ"--Tr^EYSL&Ņ:IDDC i 1'Cii)֭caL Oޞ0pOOZdkZ#rG;3gi2@]$I|̞5[x"0!!G.<}\*.f,^???e/!t+ƍL6[l˖K܏Za 9t\W11<!k[1NPGFչς*φgyO-1g:uQQذsH]\ ү#//9,X@[!yyy<6`>Crt4xYa/Rl,~~~!1tByiq'il+ymu0;Fjj*aaaF%!t+/< ϳ-[=RO?!-#{{{܏XgETUU̙jޜ1-Z$\Ϊ^:u$1#nE|HNuR(ײ% VMW?Э11 jڔV&=3$/o!t_6KH}|XtY[;^RRRy:|ϻw9[\L7Ύ$\DFo/__Xa!U*! Nx84cΝZXѧoϞ幽{:aM$IW71n%NV&ZEX߿?JKyn^ڽ^^l&\`?s kڔ): ܻ1MJ+[^̔fmKt@ w`͈+v $JF aCr+*:o^w7.\`ŋiߞYK#IDQKO?NN,ՋQv1Y3eeȧݺP%-M|GJetPgKJh|Wɹ}''p7fOn.**n@}OoݚgA#I$폙 r9LPT(ស(}TTHĈ;)_tw֖mт5OLJ>>ΎJC&!t+oWa]C).HA[ bxfw_P{oyM RR I;v4lq"n%^P kr9qx:!F?)bjb"c[pYۯIׯaogGW׬x 񧞢Yrxvyxp8=,qEӓ?/]bرfo[ L}%V9CyM8u I&<@ݪ6m%,;uʤqkj9u1cƈ̄y3=%%&Fj*J%&)!t+?חIIhL0Kn.1'NG[W uBˀʿv5#KJڶ;/1CN}B_|ǎiiF9!|I vLJn(DnfZw+eܹ(J׿rz@;rqe޽_ff͜ICлR:Ē,7}'Dn! tSLa= nڔY!! i'jy9NbIv6 |)ӦMu B]~e̚12yx0, pooZ(ޡ#Mv$1hbcc 4w9? X))+O>!;;01:wWWbӹ}uu5M4gG/Y bDҥKeʔ)zwpp`ڴi\.h.\ /@+ӧOիW0;!BqF]ٳOƍyꩧX`@ GУGسgOڿ?={$..!C ;!Br҈d<uO$"##fǎP`ʹc 6 0g9}A!t+͛|̚5onƌ7111SP7ЭZj!6CJ1VwUVo[)j#6 @̺[)jCXmbhK6f9bhK6f9[ƲjCXma,K6f[ƶjCXmG݊0Vj3?b݊0Vj3/BV,VyCw+TZmj6K֌`jK6XmfZBLmՆ̇z=\Zm<seՆ̃uR a!z-VCz-VBXVj3-BKjCXmEbiZmtC,R qVEEERptt;;{cZmv-JJJ֪P(7S6N@8qM6v0s{~`oOhDtB^hڴ)`fX7wNxx8_}AAAlܸ4SSͽvvv@xd$QQQ='''3eh!JbӦM,ᗤ$89EBA'//:8 J\I iyyUPvӲeKsPT** ccHD*{y OGG66TVkz~>iϧNeƌhܥX4BѣG4aGһqcfd` ZXdobqV+NR׿[[>nߞAA8hQ빒<Ɋ3g(7ׂR׿Ep,^}UTGҹS'֬[G۶m h4ϷߦˣѨ^}UT|t9BvXv-;v4pƏ>B@uu5ύ˖-[xcG/,L8|&R]Ͷ8zelFMM Əc^h(hGf~>9_Q~bȶ nfjjjx0Y3_T2rn瓰{7={4hVlG׏͛ ѻw숏_~QF̚5St4C65JJ!;wṛG 2J_ 4Jjv Znm8Bf$!!hb{dfvFUTi-:wf޽d]+H>}[7^ 5j:mތO6$&'[ZS#?|s'a08:sРލ/F{M[''GE!3rr9==?AݍQ@,51yy,; ॐ׿A?ܫWYٻ7r$z1];zM LӒB7,[)[foQYYڵkM 2eKZyx$J%A,lܸfn@`陭[{:3BB89tPbc!t3r ܠ^"[T3BB8yֻmYbPpdFáyl9j{5$I"nbq!͚6j1=)t\NRSrV]VZ:B&ZSOGG<7(CV{_F24(U*$IZj31^^^T(w?FewD#IWU5øQYk MLNOB:GD5ƝZ3]kAktMLHHN_` DߢE Jџ`dDv¾?\ uXa!E&}ZB7Ͽ/_&,de4 (z~8rr8gcdeLJ% ny\Ye؅UU|sC. !t3Ym E'''&Oʲӧ^Qawq*U*^п n&z-${{^NI1Y̌|>:z蹕>82=%$/,L͛G@@IbZ2b83ay~8;ZMĖ-HMK3A6mbѬ׏[2j,FC[hؐLuXn\_Wt32vXF=$H3ZF}..f՚5f9dԨQ<7v,RR8ph$8RPիC݌ذzچO͛Q0_ |we_MXd$Cv$[m5^LLdӬY.]<ƣ[7n MgOic]a/29)Yncƌ1@uÆz uʴly)$\«g}flnf$I7ޠ~ѼȐ;KҬ,oIbΝ!rwwwvɓc;vp$IΦMWڭK.ѣG|ᇬZ+V=.۷sV}HySbǏvF%%qz-qrrb$$$p3_׺'Na&& #y'O{nZnͰa֌g},YE\|   :)4V&Rߙ~FÓ#G2g\|+,]Ep//z({{>[YVkJ#F0g\s\v튷7jAEFA7nhJ%m߾]4iڮ$%> 4d`ߗ^jJ Z)SH:tlmmXS-RNNN=zTruu.T*VbsI BիTUUu )==]>}"%''K/_QRȐΝ+IҥKt5..Nd+bL MLaaԦMUVR~~^}|'3L,X 988>66Vf!2!JѣG2wJ3grY̙CPPC5wJfA̺I6m)))l޼VF^*| Æ c̘1Vk ;6իӱ*r96ljM|w̟?qƙ;Ņ[0l0̴釹B72g„ 7~X5l߾3gsϡqG!t#ryx t_m낺ۙ7o1BFCҰaC~G̝7 ‚ /5w:&AkF@hnn``M6ڣ5nBDhb a=X&nSm7!:"lC}݄VV$IL>]hh ɇ~ʕ+VSl7!t6Pl7!t6Q_l7!t-6Rl7ai3g̙3&Ah;|駏&&ãl FGvBa jQ݄Q݄(n^Fʣb+oM/&Fˣ`=C (--EVHfhܸ֓hVYYIVVΝCRBӦM wUUUdeeq4 4i҄MZdwl]2l0quuժmuu5YYYRpppqVή555͛ &H킃%L&Q(!K}tZKIIqIƄ<J%m۶M4iԶC[I.?V/o/iA/]ziZv!M2Ej߱dkkZz5~Lzws}G\]]ÇK*QRBB4uTC4`iŋ딗$Ia2Kaa!-bɢE\J7 "ICd66T՜).&=?~E%I=BnyyvJHH 3^\\ŋYx!/^F^wo :y *5%JO'?-kR3ɑ2]C)--eɒ%.xxv <ws*U 1sҷo_sr;v0l0̙󟻯l2brY/  .]-bY/b4ԔeQ6HĩOZ*^ Xz-7pSYY+ҥK @p tӫ/I84^9WfРAX?^}UK>>۶۟^{R/^I8,g΅_`ZX38:3YYtƸV 21qqu}=|Md[72en ^*%irn:ƌclFII CL 1HeWHWYbǏ7@uV[QPT@ϺnV;lduZI/%qye,Y‹/6)bÕg5hthRfܾ}lx͛73l0 eee؟g3(~tmѨ4$NN|=ڠBEE z_3M ڿF!yz2˳Y~=cǎ5hPUULJBhI#~NĞ`ŊL4OoqB$a?΁}i8*gaGn.m(q IhPõwZþqRԩQ<[62d|%HȹuHIIK.F0&O̺ s0~C$g$,_~O'_-N+V`ʔ)l<Ǜ65jJ͛q bښvY x@nnXj-[QM61zhG;'Qi}+ p$FGxuo5Fa{8pףϟ?ߨ@NNO ؠ ұdDxy޾}899Ԏ~:C Ƅnx2 $FfR;*??C;З#FfO|te5uуIOV(_%% <ﳨ%M7__^ЁwyB}P(d1J}IDATa :ё>7o,G}DYu=4٪6tN|\r$1"F^8_}ϟ{,FEEElXih+ EUSիMUkVfFVvx`&WYY+qnah?=N,[$J%K-Ėz[vF[<Xdn1B_z5A:;TPbch4F~z*+y)#6g&ᆪM?inOZxbjjjoӦM̧ jlKIX|UUUbnF6kFZ&ʹ`Μ;Çk4}N%m {^Ojj*WhWȴ\Brrmeu4y -ܵncZo^޽{n/֯ÿ? j:е,BUUUWoCLܷ_x`w__r ]R__jw諭\t3pyefkQ/Ŋ.FUѐz8F}u Yg'N ^$IJ=dZik#jB_QTZ_Á7|"w=jj03Cp{ :ӕ,*Pȿ?f#ev GIHZiݬ9\4sЅsQZ\jZmll 2zW^%fYk "-Z-BGA.☦lx'vjy9[/_֭xۣ|g'F@9rEgVu ~l[b==I4Qa+fffQkFf}[ ptI+]\Xٷ/cZ`On.?^ٮr:9b+(({w:D jϵC~|(**ҫb#Y0hƯY0d oE]Tb0s˙/|wy2(;wI4z,P*:R^wc0ǿ:έSQAf/FiܙhR~V+ykQTG!tz.2Y3 DcF4kƾa(FЫjo>ZW[R/o7_Fg{O~3ZqީUUֆUS1I="Å"í*JuhԈbg;wN•r|| ·vJ鳪2;x!eWru$I"W{е*|'!Izo_zH/*",2N}<0ujw}u}znu=' ]>Bg_Qz]dםZtjgZ ֈ^]`۶myyuTTgv$CҲeK\\K# :@vz4E tہs;蔃Z&h0M&MTxO@VZC~F?x Y$I+N97$ѵkWƑdtڍk ׌ϸtR}FƆݺ֛o,WVz+W9 COeQ?!tIS_EXNt۷>L4+?_(˸Omd-"m0FM۵^;p# A E֢,ZI'w$T݆"kq%bƒ,ǾPRB|eZ1!iJ4^}Y|Ǹk4Y]yh@Da}t N{tٳgӲeK&&Q֛791s>=3n۶m >}0M*Ï?ұgOřLۓv35XɦMtnљd({ӧ>FkFUS^N &$rW"áCѣ ?:5RŖ-4snơjТhݺ59/m񡹑<55 ٹ q;v4-Z7P89*UKiuf͚QZZ6IFV022v0RwAp<Ѱe#nGWs@ 2C݄+4Jű!$;aРA9rahC1J'PZjg 8'O84im)a v (>Zmׯ2F K-(o)̮+`eHE4O=4j._7~zNT^JNUl3A֖F?\.K.KcΙSUH{;_&Bp`a"y'qrr {3EUeo>6'n[IhL&cĈ܆s4Psj|t}I&li{֪M= &ϯ'NϏ۵cd`VdIV9xyWM>A-ǎc</g4*HG[oE֢,ά:B;/Ж'O2~xÏ49rZ|YYyz-` mfF]ݮ:=oSr%YY~jo#!t/ȆuPi44pr"R ˋN^^4tp@ncCJu|" pvpŅt%RYCD=QD({>VRu| )*WC^ӧOdj駟Y{~Zý셃ZGQZ'iـ2}tZ~}Pl߾؅ڹ W*8x (ݕ)0c ZnsGF֭[ӥKx ~GH?|7y:v$<2޽{P(0`s8qM6Njz*׮ܻnЎtBTTO>ɏ 2Nbƍwkz=?#c(ٓѣGdlƙ3gHKK#5=+=dގv%Z]<2!o߾aÆf(--ER䄷=UI ߟ۷:uRTTDYY* GGG vvvN(ܺuRjjjprrbu}–+W2ydN>MV=O 4,j lm:u;w2gW;BAll,GB1113݇#/+VԴ^\\̪U6mZ7n1c&;LQ ,^Vӧ׹FELLIS,Fa<9s搝MBB,,z,5}OV@--Zj!6aCwCXj!6aB7VjX)tCZj!65aB7VjX'tcXj!6`qưjCXmkn,K6&,jnLK6&,JƴjCXmkbn K6&XMaՆ)-V>cjCXmEԖZmMP_1ZmMP_1aՆ ݜZmMP1iՆ ,VaYwKjCXmلn)ZmMP0ݒ,V>a[Vj'L.tKjCXmoݺEFFW^[[[<<< yw_~%ҥK7 y;ɓ'n/INNZ E{;"b$=h4Rbb4a)EϽ@iɒ%RPP4vX}Ú$ H&MjyI}.\(;}@$IVYr%_|'~=AV x<EE 7r$DUAq#Wv]<5) k4V^ͬ9,A _-Z]XM~f>HAN|L?7x&Mг8y""@Bfиcld UK/4Nc/b9=w'O$917кֲ2N-;El/>)SXdByLhHUU| מfCYv 6Ի?Cl22_GOXɁ"Ǣc ( f+< ]$z->ڽ܎w /mDD7nΞ=ߺ~BY7\ oN| P'a76H6fthS\6swmZ=CwI(-.Q΂_ fs{)-^܉_RRW{mK4_|;@>=챑Vus]$4 2[v |"jLϲ=ѫTaHFIe)PJx4j݆666FU`ܣhL*~h=5גKcQW4G'>]}Lh%^uCHUFU`wn߈ z826_r'Sdz'2;%u^w<'.](L/ԻÜ98* |"P> ӻmѵNn]v$A磇mHImneV-/8N^U j}hCxx8N Ho<;xbdK~r^wG4ڒ_El!'(Kt >''qM iBw;!I#q5*#zлm{d7tw8Ѐ[m%t^N$quUO<$I iF/\IB\tӨ$Ic玔njA\K[ٽ{7w-0k,.l@ '0ѣc̗"eˌd-̢UV/q pvr&4&s-\d&bÝԿ$n4)?

Sw׮UtэI&$&+ \ gIbT$MLSNL>$1M{IĠ8x O}NKE!.?˱hݺQcIcё8wW ڷo@ Nذ+9d'R6UЅD >3n׺tRm9x'շ~dԤIv&PqvPUPe$A&r7|Yf]cOj./ʛqc쟳_^{ ՑL<61JXq\".oELJ?~)鹴'͆7s7+I{"jXZÔSHؕ@m|?Rb55ά?CkڻtRFs?$77_zq~5FFw8!qdzaz!ŗqvqf 1cƨvZbp)]ޮ([oȸ1sL:w6qV6%;HDrr2)))(r< %2<nOwY ${jͽ{VWw׻3tP4h`B cjIXSG] X?4.+>4%IENDB`././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/_images/gptypederrtree.png0000644000076500000240000002605214456461441017624 0ustar00runnerstaffPNG  IHDR=jsBIT|d pHYsaa?i IDATxyXLbB""@cբ=zl_zU\~*Vcն.Ci bJT-uy%P,؅ "$hr d0Ds]3<ߙLɝ!x}(++SPPP>ZH$uFDDDa~~'Iuy=j_^~ݛtbpssbM*jSiii,:vGx&˗}pذatJJ ƚz5k+b[7VWWp <^*+++F.Wǒ˗/ҥKӓ֭[}&2 Cp<^HSof*rZ `||)9+eZ[[7 EQLjj* dtd>W__ux'"Ȑ //=<<{͛7]Vq/B 0DGrJ̙3s<^(T(.]AZZ;vl+_TrFhy|NOuDD9r$믿wwwotNYDu<WPPcǎ1:A}vm۶퐝fwR~駟ȎԩSԩS*@ `:z6u/U\\o>loP,cϞ=q…ڊ999H=zcccQ*/~mx*ѣGXX#eWxxxZơC >lkO6m5k3~x6cbNF]]gUUwxxC444"vwwwҥ ՙy{{1m4MٝfWOOχ l{cc#hZFVoq0 fw2hHwe2Ͷ7mږamAQY<΀sX hlq`ʔ)0ydaٲeAAA 999dx~{–-[ڍøc>>pAP*]mNDFF)J=1(J&22AN}NСCOKKK ^}Ү:xY[nI<==y䂍7"EQtUUUO[J H$3f|sNí[:t߈[lcccٳݹp}Meee;JɈZɓюˑ<^+99yI#|2d2C7^/wE"=ѿF{{{Ν\OZVԳg*G%''H$jkkq<^iSKJJ8r?D".:g[ztbϯѣs~wS6^~xB@ ۷:w[ğٝ/Ǯ_[XXQ >cǎ=R;"G/d|<={8qb(8c!32sǏyҥ'Ba/"E1 COϿSRŸ...;$IŠW\G}~g/eK:oO4hR$!H P(dۺw^7nܸ4M(`0o>3((7@ヒrdq֭R@TDp RT}( &L`6l؀N–X0 x!LJJBBAիb͚5Zx8GE*ϰaN2YYYfnFܴi$IVX$w `ƍ]\\4UPV[< `^^&$$ EQLHHHRT(hΝ]\\4zggg;mjqʕHQZ\ZZڗZ6􉉉lxbXXMᣏ>Z0 aklkKgΜiMaa!}||j/\uիe=zV˖-Cym3kÆ /_n%`xx8]ۙ"or^R>iiipl1<h"HMM _?`5$al}ii155ƯRp>PΨi޽DGG≲ .P(d{U\ mݺu6:F&Mtڵڵk@9,XA"}̹svDrLvk֬YR[[>>>1c|oMҏ$IիB=#UsolltiʕHR'qǎHSVV֛\UccL&iiӦ}ii3I }}}k̙cͼL}}=vBDě7oY~&z͉$I2(JqРAT*M91 k֬z t邽{uݗ{FFX,RDwwwLmVZbXki7L2%;wq111{}Q9Z#F=t+jڴi兡_#G044x iϟ...?(p֭ $Icǎ}EDMpS,s&]obؠ#xكuuu]%bpwwoNII1Ŗ/^hږAӧQR@ ۷{&L@6{E ̎اOO!8䫔(J22<<(ʴ̙30fLw^sssA @VVz5k_|zT /"TDEE@AA)ZLYYYF1#~~~Oowee%dee@ll,4mRLL TWWCEE?^}U~nj8Jr%1f_~ENݽʕ+0d/ 33Ə3g΄ݻwvΜ9п8x pq333a0a}1D"a.\4-_~ 7aHp @DHd/ A^]] ݻw7b}qcA߄HSSٶ#"DGGCEEԩSM$1b9r8#GxaǎK/Akk+|fahnnvuHpx >ӧO ___1[ss334삈D{E hH4V__ ,\n:S}PZZj_6O? `0bccڌ?qP4Msz2 ⡯ZP(I&!339x( `]/gv+ŭl۴i̙3rJ 777زe ](BSS@4x{{y`׶h4 JM:?S0o<2e ,^ zͅ}8p ?f͚W\> b1h4  iK .=#Gu`޼yv3ܽt([V׶jkk ` 4%3AqFׯArr2d2߿po߇8 ]vݬ?J-7DEEKOOBV ÀP(4innÇ[/IP( ##z-׶K-9EDDD!AT* ٿyxx0f͛gvrxfu?#4}bRi:cI5DFF*^*www8p |'ᥗ^6΍fٲepI1c?~>c[FJ%H?z\]]WT*bb"| 7oɓ'BC@a"##- ˅ gSEE4-jT*ATr?3srrP -#"7_xmiiiOD"^xϝ;wߢRtt4K/eq=f&Q64iE"vN555fmC,s6=BY0plbΝ;7wx".. Ҫ%ٔ ٳg6={6t:!QF9***$If͉`9h&:JɈZ)%%eYGit`cRccr>X(c郾t:* zv"XohhpH&##æ:TY5PRRb$JRRR,3XsDׯ7=f̘jii_ӗ[gΜύ?[6jq׮]Ew֭~߾}FeeeJTJ[o޼ixT*A?~oj=qq>0Ψ&WBvogee22Rx 444@QQT*ZMd2ԩS;w\6.]z2--mg߸qãk׮B( An@(Bkk+RI$&&N81d{n LǏ9|ܶڗ>uĉJed~~+W꺊D"g}HHHaddd~ddrĈnnnMJ*yXDEvh4bOO;ɋ2dowΤ ~F7L+VH:Π;v4 }v Yv&Lu<<ɋ-tҥ曛6x󟗧Mӧxxl7{Ge9max?JwG'REs}fjgȑ===233_J7~1:zF?xyyr;f N'PT}O<9W^=x߁aӿ>;;ۮjrJ( -.--u~HV{ lR!$M >h=W^  ,ѣ{7Z-[rN4Kz%3gδB ^p!<-իW{.AQl;M~5y6jxj2\WTvOHKKC ~6lX|r.Y,ioo?)Hl IDATmKj;w29EU,n1ް!0DLLL|gϞ7G*77wEbt#555ؿ~h4b~!b^x!ۇ2Qw o7,nuǏgifҤIL׮]]kmRVKKwWi؁@ .P(d{U\ `5 nnV{âF/_d[fbmۣ}||cƌޚ:B , H ~̹svDrDrkV}(J?WEΝGOzz:v #IҰzjGo^ǐz;w7v\Xv466d[IIIN%4r\?mڴ/ۋrR)>"; 7ثF7Dr+HF 6oޜH$SQQYȪUP,kkkk鄾5s̱8Ç#Ax%O]]b-͛A/Yd5cFoSaQ0 =p.ZU(,F F3zhnhjܽ{buN81[ lM}HLL޽{_z,F 3 kM(6Υhx R5dš/555\. &m3gxyyAuu}@>}J2ҚA~~~X,f""",~@ ӧݻa;wB\\Yó> ]z!˘h7~s`!#ZҾ: "fgg@ ={ "ѣG okĉ}ɓQ!g-F{`ﯚ7oUw N>$Im̶_E"*J2 (իW/[ڷoD?ӪСC+VRbd[ԿO<\@wq'265k6\iڮI,<}bu*** `onUÇGXx-G6K/Q9YfmSreٳn_ Yv;wn o$J\\[gZ=z1BA;`a!^av[GLP~{â F*{@$͛7'ZgG(33g::={6t:!Q^$AX ٳ R2bVjllt4!Gi200RRRuD6ZoXXohh`sJFFt:Tcu;wtP(څ&ciE:`[ y -0FD~:z{{cƌ9Y7J$CIIC1))Se,K?pH_@oǼް*]vM\l  Sjii_ӗ;d,mۆ+ι-@V'eQŋ#׬UUU+))ŤY}ﯿx"cqF:R&o{l IRյް)׿8|pZzz:zyy~~~/^|뉳TUUU=~JMX9jgyOF!,=H6'#+R)n:yMRL;~oj7f\̙?4Zwڅ^^^tn7љnTVVVϯRJ:iU V& Ã~yFQFw~Y'I~'>sW_}ea,//+V`=[f Im,z^p@py@D;pҥ'fo߾}7IB hU@ *>MEܺ lbV9cҤIXv-߿o5 .y-[6m瞳ox!K6!tMsmرcǎ!B\\}9ut:"##,H>}999UCE lFOOO4jd#FѣGMV!2N>m K#.iVmsN(̟?Z([m۶#4Q@ƍװaCڵ }7 <<nnnF_4J%qu1PW (}Lt:lĝ;wjtBܺ lcFܹsmuŻkׯh߾uFV6ڵkѭ[7{j6, uYUq ϯb!tMӦMmWذaZ+.i$I¸qs*y}vܺu cǎocߣ l{߰h"tر#wYͯ).y\]]1uT|wѢEHNNƜ9sTDFZ- ''III&3_!!!0a>C2Bٳg/Ba۷o[neprrs"j m۶ƍ1h 8::"..SKٳ'ܹǏۄ!tA cLj#p-Yf޽{8x Zha@t jC ݻ?m۶زeK?Ǵi3'N@vPu_}U_n޼٠Tܾ}|T*kskq I4m̄Z]l}tYq=@ԩSo 2HbXn]U j4;wDooo\G/RuApp0K>@zzٞB(1rÇHIIŋ;;;ԩSm۶EVgee駟oaKC]Pcy?RWFvv6Z-T*`DFXׯ㫯?F .ۯP(ЦMj<3xWQ#' _#11W\)V^T"((j>,^yԭ[dB@ XlvRP!44ׇRDAA^ FFӧO]tÇ{,L> .DΝAV#$$^^^C~~>Ӌ.puuň#0n8hxLFFmڴ᧟~ʻwT͛;w.F2nu&--;v$vЁׯÇ {%_8uT_]`V6nHwwwr۶me$;Z+W y3Gjt:-ZDJf͚1!!d[yyyRd۶mhp]!tYeӧO'b>dYرcP(l6~ԩ$[nFEEɉBTt:;;sԨQAjAwI* Ν;>|6m0,,eB,޽;6ll|2]\\ra1\jՊ:t(WxԩS^XkÇg`````` ^adΜ9kkR$޹s~+b F)̙3lpB* o͔)S7n_qEEwAAA}6:DI8c :u.]*ӧS$8pNӧIOmڴCۛ믿igg7xڵkA iӦ={6> &P$8q" CCCyatpp+<)ٳgŅ˗/cǸm63n"I3,,5ڵky:J#QYpGI|&-Z^+sիWeYgϞ]bz θ,J\g˗|nݺtpp9sH>:5jTܷo )Skܸ= uV_4hPBݻ7 P?Jŋ$yͷmFIᅲŋ$v*}ɒ%tttdAAA^|EFEE.ӣG  ʕ+8p Zmѯ[nP(HLL,Ֆ֭[ڵkq!@qYܿZݻw͛7qRjc駟 r@PP7o^"""O?SRRRtCRR} 6,lZZUVŶj HOO/NR8BHTz<==}:uTN $?ƛo;w߿hYk;88`̙9s&.]5k`֬YhҤ z]vύI&]$۷999HMMŜ9sÇheB,#.(B?uQnnAثVZ/^رc-$jńnM4ܹsrʢ䈈ٳ 40¤oV[>;u: 88;vJglmgzI `oo_L1ڴiQV۷/ԩ'>sŋ1x`W^[.~wٳCOy7oǏ… qy$$$`ǎׯuoAVV^xaðrJwAq]$''+3&}ܼyw=AAAP*tRiii--- hҤIR/B5IJ\ɍ!88fիpB4lذD|nVR[lsbƍ~_MXZƾd~VZae|gW_aѢEjhժ6mT$t|w5kΝ7n  S:u e I&hѢ+DEEmߺu+"""3h4EàwtԉC 0::Jc#2}||8}yoCh"֯_E9̜9G1cP_>NZbر#vNg$CFrrr ^H;:5uNuV,X,q=z4y睢,r} Ç`,XZŽ;Js͛СCɝ lYYXXXXN8booׯݿl޼x*o^z-O0sc3|t e޽gzzzq>e[껲LJ&L9W^KqnܸQ~W6ef駟f ₟ 9p5>/uʙE'~R;&SXXN:ptt;wm۶ԩvY ݻ D`` kΟ?cK/dŻ A ̙3iӬ/::Ȱ?S'^⾴Z-tf͚;R$n޼٢~ϟOܸqE#GRRѣ!2GMBǏ[V]Pit:iggM6Y… }s^xnذ,-AGGG~'tfHJUVTG]P)"N+Z,00ШJc 38j(Jd3b ]v L%27nHpB"`$W^oTD~~>7mg}˗/Efiܸ1J% cǎb k׎mvB(Kz f6l^{/ѣGƋ/_`0h.J裏آE `-8rH~gy9^xgϞ~ٳgO>tuuB`߾}y!| "?,SĜ9s0}tޫpP(Xr%`CVeH!tATWb!tATwb/!tA؊TlMzKG]P[! a"#^!tA5Ez@!G j zjv bjf RjV BjF 2j6 "ȋS.^K"/"v!Zy p ]#Dn5YB5!rӨbBW(v!yibBA$v!e)bB[ v!tG:غ؅m!rbbBQȫ[ "D^آ؅m !ꁭ]݂Ȳ .֭[χ<==ѪU+5AZiiiHIIO(,,Df͠P(8:0믿";;ZO? IlU/^8{,'OnݺٙJT*;uĉ'ɓeBX+e'Oĉ٩S'TRŅݺuɓy٪(t:GEIa7o̮]w^lf-GaƍT*Ǐt'Gʕ+ٮ];Az%3xJJLLd`` J%ccclNJ;p̸83EiY{^^ǏO֭/\`mYqFzxxLJw6 ȲiӦGli(ImJ䅅:t(%IiӘg5j38 V%z``` dt:`dd$%IU #n3f .\"7mD;;;$:C=7ol .$Μ9"MNN#GݾN+S+,/n$7n$M6Q$豱$b"׳`ƍ-9*GY9zh* ?~ܲBFAwww<*NJRY:qisڴiVMfddXş)W^mq_Z]taf͘Sf9!teQQQ۷3// aAAU|CAACBBhg}6}}}٧Ojŧz/;wU*'MTf!t۶mDJ5kXկ!^$UwXmFy,ӦM3\bU~!%IŋK/n  `6m$Ջ*ɲ̐ݻJ|nݚϧ'L`u999twwɓK/n׮]?*O:U%Kɓ*dU46oLF ݽ{7%I*~=33*˗/7oz177>!t\ IDATXx1U*޽kq_Z%pĉo('NdÆ fn޽KJŋW҈d׮]w}KN:FHKK#ر>\ 77mCF0pbenj///ܼy9r$QNtǏ/V><<QQQX~=Zl JUe;;;t ?H~Gt vvv&1b=jR]777T6!FS(xױaȲ\?/ WWWml/M @ ,,6WWW[۷oƍ{ŪUb +pB|pvvƋ/";$!11-œ9sw^AV̙3j=Xj8s juܹ>VyϟoA%~:nݺeRƍطo %%Ɉ1ږZ.MϨܿnݺRO4LNNf k$wEI/,,dF+m{k׮ѣG f8?IA]#FO>|j}я֭#޿8ݻ ={_&jڴIvf̘Aۅ+ʕ+}?77[#y(o׷{:t0ױcL<;wd_~% 4i7ndc޽W_tL~:8g|g/]ܺW@AA8T*}Lt:lU6),,4=ǏG3N~~>Jei+^zP(zj}:t(y İaàеkWܻw,*jcC,Z7j Ǽypiٳ{?O 0<RdG$nZ-fJU( W[*|||`R;wwƧ~1cƘ,ypv#GZwyhZ6nܘÆ שּׁ,:999#h4ʕ+w?Vk_5Tz\cYbPXկ!,\J#LB2 ALL ]\\[ݻwj-Yٽ{wm&܊|29j(3HɰRo-SP(8w2ݻwψCggjAzzՄa ) I'' M_~cZ$ϟ?h[#.Xu:njCׯ[\vvv/n?77QQQT*ܿЍDLR~P(a[۷o3::h\^,-lmr/_̈* -uO>,yĈ$oV1Ù3gB'''ݻנ:BFZh?]\\b _ɲm۶חB6/mfJrŊtqq)-Gh"T*6k֌ &cll,J%۶mkBR vݢu6lȹs<ݻ\d [nMӧO^P222Elݺ5,Yb|{7o9sذaâ힬kkb''йsg`ǎn:G]tSL8uT!'%%>æMjѾ}{j߿h}?HNNVE1n8fGӧO/?m"^>B!DnGf ^:B"i ]f"^Z-t!B"?V ]v!^ .D^;bUB"f +Z!t!rFx JF ]\PI5VBC-bB"Cm{jkЅ&]\`jkЅ&.D.5M6-t!r%IbY AMM ]\`MjmNB䂪nSB"T%,v:`b (j/t!rAu^.D.ؒ&w")) ׯ_G~~>vڡI&$({5AnBRRΞ= ؿ?rrr???!ׯ#99'Nl߾m۶Ehh(|||8B㩬IҥKHIIAvv6Z-P'PSVeǎٴiS(ݻsժU|Amua{Vx8嶉###yfWu#??7ofdd$}||m8jUQs>xVb^n4mڔÇ)˲-tV˸87oηz_|Reܻ͛w/cccٳgOJDWWW曼~zmQ乹\p!7nL ɓuVoϧ,,((+WsNN>]v%rڴiʪC1YYY6mZvӧsΝr (2oq֭v5kFJťK\B˲̩S'LJ8w獵'4h`S"/,,!C(IΝBؕeWJby}ص /@JիWWyq 9w\JġC-^ 4'wm6999?~<pԩuB5k>0KOrYz{{A̴s8bq˖-񑐐@ggg˳sݻٙG-[ΎMd;wA6>ٳ+,[зoN7oق+gӓ{6[&-$I/-ȑ#ttt ,?>ݺu7٤Igvvv6W^͛SR}o,;sL֭[T%i&իW 96dvv66l>zh֫W={2""¨:dܹtqqÇk׮ڵcPPIeYf-oT5-[4.CߟnK.e:u :<|#d ݻwK/Tbƍ9en߾GҥK=zkoT(1c8f*JN#GdHHMpp0GitӧϯX]hh( bpF׳,A_qqq$$آE kŶuܙz*NFvœ_i4yL8'NӧcFRBzzu-FZ69?CUHMME~~lJ^^RSSM:Obbbp ۷dmKVC]\t w5M<==eڻp^}b ÇzeI $$Ġ`dhРy>3Ɏ3gLoNq\kiiij&IF/b͚5?M4AxxѶBCC#;;&%%mN<$%%G^аaR˦ZjUl{VPPPPf )q $Q^rlT*틂 <nnn&wI͉$8czo/IvlM5j:u ,, زeKeRիW _ ˲ {{&IIIŋ%K]E↓kjvvvUGMj}ˆ#Lckmo>Czz:"##RNbۋ ]PyyyF;(((СC3~@`` fҁT*} Ul\mbooÇѣLo'''C$00:uBLL vڅcǎaǎg'>$77%Jb]?ܲ>ԏ>|x/S$^tɨz$$QPPPۘc߿OWW*\f͢$SRF4|K.QjYټy_-X!!!tuu38sb31 5kְytttd_'ǎ+eYf˖-F8pĉU4c[[t=cvW=&Mէveڵc߾}[*//^^^VPdYfPPgu/sREX4'9vXrMϹshgg9sXƍݗkz:DVIbcciggW-gяaL2rH6@3̬Zw^8CtR-g!}+T*bR 4Ȫ39sJJ6LD͚5cŞ={YeYf=W\Tb~.},6,7n%IE?-^PP(:99111">ҥK-:M:I޻w:tweT8{RRءCgY6mYm[{RT2**RJCrPe=ڦ3f>|H*J=J's5СܘTaydJJJ7ӧ߯vo>:99M6bܹsҥ  ׮]k?B`.]xyZlqa&}bĉ'?*##ڵkZ0 Z->SN!11IIIu aooOOϢbK/Uo-Inn/lSIDAT.ߏ~̄VRBCCN:{&O~b+dggc޽HLLFAJJ 舧zإK<쳕RB/ +1_DRڱAB\NZ@P ^IENDB`././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/_images/gptypedtrees.png0000644000076500000240000005236014456461441017277 0ustar00runnerstaffPNG  IHDR&; CiCCPICC profilexڝSwX>eVBl"#Ya@Ņ VHUĂ H(gAZU\8ܧ}zy&j9R<:OHɽH gyx~t?op.$P&W " R.TSd ly|B" I>ةآ(G$@`UR,@".Y2GvX@`B, 8C L0ҿ_pH˕͗K3w!lBa)f "#HL 8?flŢko">!N_puk[Vh]3 Z zy8@P< %b0>3o~@zq@qanvRB1n#Dž)4\,XP"MyRD!ɕ2 w ONl~Xv@~- g42y@+͗\LD*A aD@ $<B AT:18 \p` Aa!:b""aH4 Q"rBj]H#-r9\@ 2G1Qu@Ơst4]k=Kut}c1fa\E`X&cX5V5cX7va$^lGXLXC%#W 1'"O%zxb:XF&!!%^'_H$ɒN !%2I IkHH-S>iL&m O:ňL $RJ5e?2BQͩ:ZImvP/S4u%͛Cˤ-Кigih/t ݃EЗkw Hb(k{/LӗT02goUX**|:V~TUsU?y TU^V}FUP թU6RwRPQ__c FHTc!2eXBrV,kMb[Lvv/{LSCsfffqƱ9ٜJ! {--?-jf~7zھbrup@,:m:u 6Qu>cy Gm7046l18c̐ckihhI'&g5x>fob4ekVyVV׬I\,mWlPW :˶vm))Sn1 9a%m;t;|rtuvlp4éĩWggs5KvSmnz˕ҵܭm=}M.]=AXq㝧/^v^Y^O&0m[{`:>=e>>z"=#~~~;yN`k5/ >B Yroc3g,Z0&L~oL̶Gli})*2.QStqt,֬Yg񏩌;jrvgjlRlc웸xEt$ =sl3Ttcܢ˞w|/9%bKGD pHYsaa?itIMEv6 IDATxy\T_gAY$Ds T\Jbv]nz=ļZff5K("̼~$|x^'$$ݻw~L^/& DǍ7ؼyssٲe4 nѣG;v,0,,RHa"r@1,7ocǎƾ}иq2m CHHkqc"+$`)dddm۶w=_|\ٱcaÆaɒ%RHa"2Dc"X 'Oƅ {rKr EtRDGGKA_@JF %;v_;w3g >>NNN2c"+s$`XEѣGaeeU!^~͛7ǐ!C'Ha" #=111LJ~XaIkƸqj*<|P +)LXx15kΝ;Wxc[N +)LNJJ lقQFU#k׮P,_\ +)LǡRiSN!==] +)LL\\Qn1d:t`0ԩSr@A_Ia"Tebcc`?[@޶=‡~NgT[͛7  D$''^zFoWNl۶ z7w!((5j0Zkƍ7 *%j9Us&33Z<==j*_ ¥Kj*~"33S R"3&YV 6 6mB.]ЧObҥ0 FF#D&BUF{]|ضm<==ѳgO޽{FEwޅAW-T̙-[bϞ=Fo׾}gU^mݸq)))hٲAWDfL& .\@ZZZXj:uTsn"J *UG;<== D4k ͛7NJ+*,Y_a!/A|% EQ0zhlݺR~Ν;5j AWe'E0g+.]ܹs.ZjhժBCC1l0,ZsERRR]D A_Ia"Tч?yǏq\t Bjмys4i$񩩩x;30YJ] Ν;iӦ(D A_Ia"&:( > DW\XHmۆSNaڴieҞƏŋ`AW5 Ȍ 1dQEF! b %#AF? A_ 2c"裊BA_@JF >,"3& 2_Ɍ *2 A|e)2Q̘\|%3&>(D9JfL}TQ̘\|%3&>(D9JfL}TQ̘\|%3&>(D9JfL}TQ̘\|%3&>(D9JfL}TQ̘\|%3&>(D9JfL}TQ̘\|%3&>(D9JfL}TQ̘\|%3&>(D9JfL}TQ̘\|%3&>(D9JfL}TQ̘\|%3&>(D9JfL}TQ̘\|%3&>(D9JfL}TQ̘\|%3&>(D9JfL}TQ̘\|Ih=zTdgg5kքm{sG!sſo?NZZҠjkkk9YA|U|%3&Bs |駟/^RYf@vo"FB.^oիWh닀Kx7Pzu1_ +$PN &&/Ɩ-[h@VZh4Ƶk8:u j:tΝ;[DLL9s}=??? YYYHJJʓ@||<0d=Rb} BP`f駟֭['p֔IHH`6m[_~ǏkD? L@A|e龒R(K֮]KGGGs4 %jGqҥgڵw^^yjٰaCĔLFFFRѰy挍DW+95`0pʔ)0%%Lڽr RrJINN BEQ8~xIOmmmk.)LA|eYST( &OL={vTF"Iv^ρʊ֭+322سgOj4޽[ A_YJ s-ΨQRb4""pӦMGvv6CCCikkx)LA|eT( III+fڵMcǎQR1**z15k@Ha"+BiF]ta:uV!}^|"&MuMEE A|%2_j9 <>>>yw^ƆEnIN6홟Zݻ7PSK,!=wiӦήBmΜ9h4ENU5'N5o޼) _$=,K._~oOOOуG-2)Ѕ- bǎ+tSËLt{{ ݯz߮𘤦֖R+YJ.lAԯ_>>>xCrr2x tF=(Ν;=N+++1ׯGjjjbϞ=HJJz&&;wDΝV ':uJBlllmҴiS\~X~@>}pY<|:]t[pڵ @tt4f̘!{/5jo?}:qD^둙'N>@QNMHH4i$&M ;;III"+9J-P\wTV {EQIܶrh4ź*/?Q3}yyy=ݤIR_bƌpuuŻヒS{Ih4gETɍIARn]$''qBͽI8999ݻWvkA_WRFT8;;>3mm<<<ʬggg(?n݊ڵk}]v֘6mMD\ӧOG1p@ԬY-Z_|Q$\vލtc̙ `eeUfANNH;> _0%dddk?ߺФIԮ].]¨Qu?U8ԯ_QQQXti4dpp0v ŔNWh4m۶hݺ5Zj-[o߾Ϭ;HKK3#\>Mvv6j%J|eDȣYfy^zZj-p ̟? @zz:wիʕ+عs'f͚F=ޣ٨Q#3gɓ_p`˖-n׻woUV^:;W^Xt) 5qI`֬Y[;=/4  >ZmBBB%$$/p۷o*A_WRXjܪU+L>+V9sPN6iZڵk/"pwwϷ ̙3ѯ_?١I&ׯ_mK1o}:pM߿3㦲$ǑSh֯_|ӦM.r\\\޾J|e %m۶8p`_D< 8eʔgӧg͚yXV-j*YT>hڴi}j&MdItt4pŊޗNcذa|kK9 2[_ɩ*H* 6k?~!pڵ&pjZ:t0 1bU*9u0JNSzjՊVVV\~}1g`>}"&|WhggǃKGM\jUKa"+񕜦BY@UVT,8lذp͚5f288666OˤdPQ.[XHa"+񕜦BY$ydd$z=###hRgzyyΎ˗/^é(${ff&njCر#/^X ׮]K'''qV A_T"ӧGEQؽ{wر#ACE^"yߛcY^=j4ߟ`(Hfɒ%lѢ0,,)))F/ _t2sʕ+OSz-Ο?bBB/]_ٳٷo__~e_`#.X@ooo>?ҥK<{,w3fgϞtppJb^- _:ɟ;~8ǎvQ3{{{vܙ&L(#5sc~;F)0&|W9}t^rT}Ja"Wf+9mJa||<,X@ܴiϟ?_9Lff&OG:cGUYJҴt$%%W^!vؑ6l`VVVtfݩ( xiOSNUVT=z`tt4u:] رcG`.]J]}Bj066Tݻ^^^2333;uċ/jvZ:99͍۷oD_WVVbl1>{zմ_W% IÇgZZZI&QQjʬ<'' (4i333ˤݴ4>8yd*Mv񡍍 .\H^_&'''3$$pٲeR⫒j@*',eQI8=RJxNJ3gN~zZYYUVevr=h jnذ\3gpڴifqӓ>>+yIvv6SfEJJ ٳgO~^xZj9n8)L|բ}2O @w=CBWJRx|͛7WhT+W4X7Ʋyf(keҤIիW+ߏ>ҥKR|7~6?#2Wţo߾l֬YTݻwIU~~~ѣGݴiSϤΑ,q̘1wzz:9a)LUAjт=T -JZ~}+Iᢹ~:駟VJ;v ?~dbr1`tttpBj޸qdba0۷o(>c/@su2={䫯 g}feիW>|X1پ};nݺ_;4hPvNJ777)LW⫧}Uȥq'˷=)Riue+׋ijɯsWEJ.Ajj*VZ~?{lxxx]vPGA5ꫯ歫VѣhѢ<=={UzLr<OOOZ _qlٲ;vѣ,uLw;uaС%j$A|er*<ٓG\yOSmYTn>_UѾ¤Z-zl 0 njj*\]]Y#1pB$$$qΝgֿ}6j֬o(>rǔc<._Pl۶ ٳ'bbbp=%)$''D۫j <GA׮]FVV4 T*Im*_L ,d3;@ͧ)؏G|U^EPfMT*\v4hڵk>>>xѱcG|8AAA -rΝ;LL:uT鏄!CL&&^^^>b m۶2eJ7 trr3W9+՟}ա=+~ P O0^ PWq|TռuVIWWDń ʬJ֭[TF=ׯ[je;Bj٧O>~w-J|jfU#oT+4.wRr֬Y Ν3={aÆJ?**w$::xرJ]vD|%7TRaF|U\_ICVڻa^z%& ߵP例#<<ܤX^=wjj*mmmYR:vdG+ /Jtj1.qqqKVh{!o!|O,YBm*̙3Ÿx9qDZ[[͛R %O*2Wgذa+W*ˋF_ tB///*Y._L;;;>$ϑ 6mڔǏSR1**u0_ RVAEevVV+c}%k\ծ]";;;^|dcTa^gpppB%رcE&^Yc6k֬HHa"_=-W@QRC|e$}cϞ=TT5jT ̙C\bd9shgԨQTT~$DDDPQnܸfHHmmmD|%zWXzňbJRxzM5jTD>Cɓ&&'O&Ξ=\fJFIݻYCsРAuʼ RpϞ=E/J|UidJW%qDFFp*e6u°0iL:f4NJ cJJJ{eSR ;M2pLOO/vO>M???r׮]F _) S`*E_W'yw^֮]\dIg0yfɩ\*֊bڵtrr;7o\bYt:.Yʛ}Hd޼yjlذ!cbbJVff&###hؼys{[)LWB|@w\)O-Yr}>XNFEE… ٴiS`Ϟ=lJNNfhh(iӦ\p!߿_moݺř3gN:Ç?%;c۷oOlӦ W^]&&&&rĉtqq#""iTRWUJBxLujUJ$i3gĔ)Sɓ'Ή'gathٲ%ڵk999}6'OBӡO>=z4:w\W? 8t/^o~~~ D@@ݡhׯ#66qqq8}44  ѣGBSC#::/ƞ={`ggԬYjHJJʋٳgCbԨQ6oBN,\}g PGwlXqj5NX[c2@J8(w?!CCJE<*Ͱ0Ο?7nܰ޸qgXXJ/ ?؏m6ǑH..\Y믳nݺښ/=*U2c"_W&MW+I=?~~qF>|Çܸq#~(Ջ9ٟ&z*/]29&+l|%\'Pbb"'8phRRD|%_ j"k2hʔ)[WD\]WJ IrA]W+& WRH WRHKK +A|%$ .+A|%$ .&$ &$ WRH WRH J_Ia"I.H J_Ia"I.H R*+$ .ʼn WRH WU0$$ D\d D\dWJ IrA]WJ IrA]_ eJ-I.R+A|%$ .HQ"+K)L$IvA|%+$ Hq"&$ E ̹0$$ XԒ ʼn WRH % 2D\d(+$ Hq"&lj'p deeAVFhѢׯEQ\߹s'Nٳg{Azz:itLqN̆ RrѢERW+Uq*ˆ1c^fپ};aVIÁRQFEE1''L5 \bZ-t‡M?| Z-WXQO:**pРAeJv:;;sevzz:}]`DDDb]U J|e*jӧΝ[.9{,]]]{cȐ!ƍ˥W_eff$33]t:T.}lܸVVV:tYD޽K55l s%Θ1C J|e9zޗ~-p֬Ygٙ=z(ʵ7oEuʵƆcƌ1wߥ O*z;SRRla"_,W}Fggg޼y4p@:::ӼDӱ}lԨQ^~z;vd<ѣGW;uTaӕlذ!۷oo~:kԨAUXtrrbXXX-LW+O&^Bt]3uV(kqEj4.Xb`j4^xB!n۶b{ť?XjU0_W櫂1/+

kԨarw׮]ٶmJ6z5zn[iӆݺu3#""*lzxxpȑU0_W櫧ܿիW+%X/_(\| .zŋ9blْVVV)Q&߿dbo>0۷o(όXݻGV?m^XlU*3#fNJ֯_?Vd+V`FjٲeKرuMիخ&+}]V?0--ua߾}ԬYݺucpp(Fmm۶)hт%`0ۛdlܸQ"^ڵkϷ|ѢEVZN\?~L;;r1t҅3׮]ˉ'oCh"k׮moÆ TT:u*cbb8rHj4;v]FܰaC)LW+?Xn]*_}IrΝT_mEFFd×_~D K<!pIݪU+ݔ)S38趂믛D< Э˗SQ#ͷz+߲۳{nSn]2J|%<_N8-_~cǎرcq) 6  @~n+ HJJ2 ! 5GVVV#33%:O 7ob3gɓ6lXbgHbb"߿_8;; mŋx7-߿?8IEJ|%,_=S9s~~~%cgϞ kj}Yӕ~pǤCNN~JIBBt:]bRn]Xr%/@dt[r *=&gΜw^Gff&N8>ݻwG:u 14i$&M ;;* _,WI5kjի1`ԨQDHtpqq0ŘTRSMbV1x`9r]vgbbkkk1A۶m1l0l۶ Ɩ- IDAT[ \7wh"wd{i222`ccS *+2}=&/Bn:u*zjh4Y9sm_,qqqFL)&P^X[[x2c FϽV(>!!֨_~1yPU_Wg 9?࣏>¼yP^1k,8q¨rhmm ___ƖҌb쌺uVzL^|E899]vz*qu$\-nL?B~_oM\勒"\_W+uA`0@Rk۷owό7ƍ+IݰaߌV;vedd ::޼y3H"((Ȩr)L+R⟾?ϟǧ~cBєJ~۷7s 4@\\_[/lmmqi̝;-[ 6 k֬wyx뭷РAaӦMq`0ĉ4iR*LW+ W֫si0x`y8ϗ_~IEQhvIIITPRQRc^C:88Tc btpp0!AAAjӧO3TŤ^;?x`6j7Ξ=~~~tpp}}}9m4>|0o!CPR=ʕ+٨Q#ذe˖.W>|<`M|%_Y z"\ƍ׿Rw^ѣGM>z5jJҥTTrTT\lY?qD֨Q^^9Bܷo_omK-LW+ >cjQݛ-Z07Sq tp-ثW/={d˖-+XeffŅǏ7x wwrr2j5?D|%_Y ZJ{{{6Bw&\Ο?O+++Μ9B]vɽ3׭[WFFFʊϟ7S˜tpp`jjj+LW+ -[FܵkWhzz:6l:Pyhx̙ *FStvvӧhatl߾=6lXaӶ;w$iɅJ|%0_=ok׮dJJJKVkUe.l֬Q}zͭB_R~w1$$䙛ʚ y>Em j3fLBOOOvSԖ^W+ z*]]]ٺuk>xvvܹEԉ-{r`0pT۷o7l߾w- -cccM>&-"Ν;x[nMWWW^zZza"_,WE5v ֨Q[.J`00**8i$ vFahhh~BVڈ# / S#G,Ǐ3$$¦˂`TTT 0%%[f5xĉ"ׯ J|%_'NՕedٓ]E{nڲYf駟ʤϳCTT\j͍/*:t ʤǏYf={*3f ի̮kܹtuu-VWD|%_YիWٵkW`xxxw:;;Vݹe+ Jĉ ø(GQղaÆ~tYr6lؐZ}QoJMMĉRxɖ-[F'''ZS7odxx8[nENVD|%_/_Nj曌)MD׮]iA 3雤KNNhmmjժqግ-ֈ*!!HEQ8n8zOIIOOq( :bcc9|pښQQQ10,,iӦڵkEnyATtppˍWD|%_HXf /^~~~@z`kkܽ{'OD\\\իcРA5jZhaQuVXKpvvF@@Pvm ;;oF\\p-ԬYÆ È# }A%K`ʕwjժwwwX[[#++ ׯ_ϋɽ( EQ Z)(`i'h"و7\Xi!& ـE 1,Qa̜o 7yzrhp8X,j&2LNg* iRA{aq:Vcv|%O*&+*ym*"%˅D"A6Va&ed2Y, 5mn9\W$\f^G:L `>l/yaa ejvn fض8n7RRvNB\LW+՛+>FQDLri~T3^uvTLW:U?7?Q1`VIENDB`././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/_images/more.png0000644000076500000240000000273614456461441015524 0ustar00runnerstaffPNG  IHDR szzsBIT|dtEXtSoftwarewww.inkscape.org<pIDATXŖ[lUߙٝݶR)޸GBT MVcFYD|GQLLKb$5ATB@*ZJwv/s;nn|9̜93Aqx|ڗvܳB~PpUB)O+yX<sY:h9]@ڪze/ *Fn,Rǧ5" <_Ϫ7r5.mmǧh WYf. لrbgO[MfWkO(<9|RMH6a ȃ{f(p%69mOsg- _vW-D B0L)08 lf1;XTrYEoe^=tVN,{lC5:&$[H։'EMҵt\{WO0ԩV/|tGWkTB0nm6OԔr$d '4-X§Uŵݠ> ?${0w$(q@ qnoѿHd ۣX Rf@!Gy0dP98rJmglij@tHMZ|G;X99'@Dw s4ɍKSFPHiQl:ysKoOm:H)ʁƑΎ`aq(|Dpr&wN AMȤ>ND$aJ(i[XtlCN @id6aO$pj{EU_c΋MW0%3(4vz9s;7N tYf<ȔK63ۓ>H2܆D$3!QPuQ݊JW'Dҥ,f@e։U?V!R%lf'$ˤ eS|LI8gMۻ{PPPF\<#p:}nWbb"p'D]]v 0Xd F#t:.\ri5k )) V`JYd }X ئW_}SN^GZZ,YF=w o{{?cƌ^e]>pa%""n6c_O]Ux#ėthz 3f}p-[oJ ..JKK3o{3fgKbظq#n Á V<XnkL&\jo[8p?˗rrr~z@yy9?vzg}֭͛Q^^{w@Lx|4p{p!p"tb믾d*$裏/R +v3<"z!鹣FBii)֯__kjjo~ L6 ;wb9r$>C|yL:vZL0{Ŕ)SO-g̙3Fx@JJ9֗Gvv6g?|~;KDDD>[3z8]n&dK~߹zc'N(}-RSS)j7E]Tz<裨sO^>|ׯGrr2JKK .@znJJ PZZ*- Ǐ`yN_޽_~9F |;^o폍.9yKDDD>j: gכLPmپu:;J{Av[lҥKC\\/K;`HLLh" v9=8҂/Ǻu됒r,\P*p^"""1"^7 Y AnQl۶ yyy0m4ɓVVVݱcnK7@CC0n8inǾ}>V;:m߿?`{JKKԄ'xsAaaaWҞm;^"""qՔPe:ѫ@!)#Ѭ>+((@ee%֯_r<ذaCP1vX,\˗/ǶmP\\%K`ԨQ˥oǮ]gtM={To;vuPVV|G l(J}:l6/^piiixobܸq>#</~wص`5ՊJ@V{;6d%hpՔ&%k֬}݇3g΄)CZ^DNt'""J6p˷ cIE4^"""Yf DDDDx(1QDc%"""KDDDD/8aCi40ѐl2,Z(W^ 2 #Fc444|EEE_,?lĈKDDDtCMM ^|E|XtigۃغO~: wP`%""nl6[B{{;nz駟۞{t QTT$=n2pu!33Z&Lo4dddK/ʕ+i&ttt~_ajC=!裏ǚ5kZ pxǑFI&_ĉK˖-{ʕHIIZٳ{sVEZZ?#F`Xd bccqq%@ )) v,ޒ%Kp5'@ZZrJx٨s0`/x0aTWW,Ƅ k J;~_bg{yΊ+}v[8p~`(++XVL:7nġCp-`ҥصk׀ڦhvPg0+/gYxwbø;dl޼YYYxwGE]]?^;xWw^c0:'|SNž} Grr2vލz ~)V\j_Ƌ/_acըCmm/"EDVU,((bUU(XUU%Ăj7(^y啢(bkk#_^zd2F;DQœ'Or\w w}ܹsmvZ1..NرcѣiӦ\穧N*}#JRlhhYVQՊ۶mYwu]'(?MMMmmmR׭['-bFFOlܹsER)t:]w%=)^s5>E"-D\.666(/sssE%=xbmO 6Q\O,J/_|1***p_ҥKQQQ\|PT!mGyy9v;.BiYBB <˅ѣGkِp\Xz5֯_Zvl6h>ٳpݰZ={6֬Y#=oCyy9t:볍l$''K?~w}g1ym)//YeJӧOGiiiDZxb׿7>O6RL<FZ6k,\.;v IIIC&p ==]e xGVV V(**BVVV[ة r!}z b„ t/~  Lt}v,^Vz[ͱN~ظq#233} 8|k{KT|/nw DDDMVV^u^_}n^^J%v܉#Gp1̝;0yd\.444`Μ9~uV\yXd ΛĎ;q=2,`Pܶm}zNO%%%O~ł˗ؖ.] Be˖/pw`ٲeRHo5 7oFmm-L&SP DDD䣺Z*g֭[+V}ꩧ0g\q7ofϞSSz=>Scԩk1|>t F;mmmƻヒ{?qARDDDD ,ڵkp87x#AO L& k֬رcV1f^ۘ;w.j5֭[1gh4deeaʕhoo5jV^n #Gw6CBBt:M;wJ2e j5rssj*8ϕB@ZZFXx16l =yfL>* |ٵ\ǑjUdQs/_.r->˶l"d2fggK,wbJJϋ(/h0DWY믋Δ(>#I#&%%\s(t;O_D^/\.QEqܹw!=~}I1L"(`[{+(XKDDD7.;;{"o<SBK.?<3@Pꫯ/..a0|ֳZRnW(//DZt:iӦ|_\\H "n7*++1vXĉA@ZZ:{ߏɓ'-..֭[}zt].V+, Z<^ݎ 3fΜ:k,#G͞ß,[ w]̛7^{- DDDM vy@\^0keHv鐟x1i$KX|9SPd\/ /yL.wuoV\xCcכAJ4vycժUꪫ=VWXX 6@P ##çz:@֮]+WO>o|9f̘1 /j7=9<87$d2<뮻pC`ʔ)x^m'55ŋԆ)SD 1qDYfo/)Sp>#&&&:cǎ;Q^ޭ[`0`Ĉ}?}jɘn7CZZ-Z[oxkaժU8|0JKK[o>Q]]8r><#뮻 /@O>A}}=Ξ=Jؾ};N<> eeeRPKDDD&]Ȕi/t.tݠ5EP`Ŋx'V #GUW]cbZ=|Xf ֮] &`ܹxW8q"6oތcǎaΜ9\3fgAvvXdff7vڅI&n {9 W^ V#Gꫯѣq-g?n~'Q0fZQYYk@{t܎7: `"ukx1Yk1h(c%""")!h0"/E4^""""h DDD1p+/Qj޳X,Oo (( hZ466BT{f- ", `47lĉ'"nGee%nwB+Fiii3 /Qr,kaATg׃"z(1QDc%"""KDDDD"/E4^""""h DDDDx(1QDc%"""KDDDD"/E4^""""h DDDDx(1QDc%"""KDDDD"/E4^""""h DDDDx(1QDc%"""KDDDDM(=~(p@.CP@P@&cQ4D+QR-X>{=Ay?jA R`Q`%"^K`A40 DĒR IDAT@or\GZZZV2B@o"pߴ6k郧vffIYTJ?wx@o|{sÕH=>4E7,׵{_m牨?xz7kxFZ% par3sc२p֯z+Qϵ@3VVO`5 >ωA^GJJ 1m4477d2رcl0Rt|?nhh@uu5&O,_XKKD[pE ׯf+O}$TVVvI*.ab(ëP($bldBEEJt[|| D$&&`0놰'h/m0% kY{;7zRjLJ[/k4;Wdl0L0ͨ 5P,z\ßO^,Ҏbaֻ5PhUToǬ??J dddv& (-->KLLD\\7= X INL=an1Fc=׈#x{QCPxL&>ͅnGSSL&> ˅x)k4u(kOnOanxs:e빆3̕]CJN X7byyy}Z(y- cbb|>L|Ѐ2h48N477CՆl賮];K ^m=ek_{XNhUNӔBp:hjj¡CPUUr g oo?)z0R7kV=(bǎp\&wULLu DP 992 'N\.n~ g] ޞrb?~CZF;qjͻ?lS677c̘1u#j} GdgYYYp\8{,L&4Yll3YWåw <sg]8xnviF^+N_cP \{{@SzF ŔhhcjZZZ`6QSS# } 4Y_ Ca=o pis}t:Q__o┭Dcd00gffg=pS̻b^GO7]h֮_yz?~zъ|gTl4ICL&رch4Rh k~7oß鏇!^&/fCLLǭV+Je 4ekKK :::PPPQ[#NVb̜9*[O/+볈pC{QFICL&=zvFVz~hKrl!$^V$V+HLLqQˍD'p\qjW#L&<8D.5vN7RcE+w8NG? qc%H3Yrr2DQblƉ'pII P(B QN,bY> &Ј-v!)8q'j _׫Aϴ KhļyQyu܁=O`d:qR\6rT4iҀ! sCAd2!)) F3'c@PsM @VY;xG^ ZVG*& 4e뎎ڵ sΕsڊP"Iu-h2qaF"/($Q82 mG]E N/h gcdnAPG}i} 'ZDX`Bf,>L??d5{%@d2$&&"11Z'OL&j]taxer2 2|UDͷh(a4vrcaMf]nLʌC4݆g=~466U}}f(΋F8/?9nڴ HL@msrFkPWw Vz5 UYHALqmVܹ1/1lC땉ۓCl6g҇> Mk~kCO>(~ibf=0!/F+:EJU۷o=FuI q Qjd5Ę/^w?. %?g JGB\,TTmga9*ʌ8󺭻m6h50n.:D䂀d~Zh4̙3xC`K1L8{얖'C_畈zg() w( 9ς 66g/9(`VsNx{nwрg o>*.F#H Z t:&k>3̟?fPr gIFAPtrdsz U(t9Q8wlt]hX™8t{C6r *ڍPѣ-!`vSL鶞J@sYYP]pa/ '2tUo7{x{P(i3ɄrH.+곽%t{zyxg7SՄ 0a\#p5 ?EE={; 0EyEX5ə8Djb 477c̘1pE8"roaǎ̜92 (=|&`E|!IP:۰:H0]YMFB7BDOngqt+N72LȌ^_c"Pvz}6rH\.477Kb &&111HLL^:Tzxr1 }aP([;rҿ 3v>w]"RSp l6|TH\pzS'27sϚ5+z[lh1 43L8n]t G@ǹ:%JBu2vm(PTEqrc| ,d<Z,Wmo؈+Zf r\ JKKaZ" }=E0> d(뾆n u=f=113YiNCjlhZ\LfijC>lZ nG7N6 h:A*Tի477gFg˯GI}lN7Fpkص?OQXXp{P F Z@_)e/% z'S.CףngϞlFUU4'd@۟5F/4~-հ+ HUC [CFȡfj82tjEh7LNՁ:,:/:]C3nn >>WikkCŊm̀R.ÿԂ>g/<Q/}HKxÇGx#e 6TIpJ&U{>T袋lSа=(epjh?u Z|'mݮǚw sOF w#}rJ:+4Z \FX^3f\_ǽKk JKKj^A,X=DCWߘ#==]d2GVn~s*׿40/u3dL-"0Zߊ={:&KZZZ0'MqP+eazl<5hiiY'69FdZ[[ @j#??fh2Ш:;rD ?l4Z=Cp & p8F9|*P5)j0R7NL&Cz7L:,\qqq֫E||kjjlFyy9:::'`φc( 3H݄cX'[RHMIB&TbH6466ULzћ (FGcG!Vþ#aVp|ٳg/Cbș4ll;IJV(OKPn]&l 6\$$%%:::`6a2p r$$$H3K5K E8//^{;}Uy!W(>ʈy7`!ۿcը9݀9Gbubt1 c۸+pןs(R&@=p1:/ǶC-'ફ~{{;^~ef$$$?^шىXn)ÒEh i-\B}ӰlسgTk0>Y }ES  0Yr4 HK,;`Tz"NT0ۑ@\E 81d =hknG6[}p ƍ… a4q3Pqˍx Zd>W\p}VQF~h4,Xx1x 3(Ȣ>^T*ٌ@EiҋDTȧ3;{x GONFBz=N^W]/:CO"7{${W;;I&5ܛἼY]Q t!---L-hkOor Ki賲2XViDtO|a%;^tEC^^Ov"F%jq&EEEXhQX[ 1Fd&UYh- Mӑ`dX~h?YYYȝs%v}}ZK#6tTh ٻ@a-7cFcOk8wס, f3fgu}=瞒^g+1iԦ!/̙3ش}p+8*fw㼼l+vVuo%׿u6mᅬw:6w@R`։W4o{^\n'd3eg山deԁN/޵춼2,```/ DpwohZhZ1Bd2ɓ8|0bccۯ$O/oZ8^+Z[  Q1} ֒ _~LmvXN$T<^{m. _5,n—0dCTp| V=4۷c>CG\tw w(8p~!O1SghCN:!TrK,ˠBʼn'Boܴ,Cf&~q#/+ _ZOA1?b&x~D} 񿰭?qWrw}~jy9:KI9OR&q`۾(p\hkk|Xi_*\.DcbSt:Q]ulaшh87jim0R|>kiilFCCN'v%g. r<^K~헫/& !)Bc _GYp녤M.S'`ksc8 jcJJᾼǿzja\T͂fEIy5-8{9P>M<-NL IDATl-:řڠvЮvܸ>Fw(#p!.. 8x a6QRR˅x){{/ /^Y. (]Bh,:! ?;pQ#qaiyn(498nv***|6(:\1; sXn:c3k}Ϝ4jL'ڐaTw[h( g4LTJCL&i3ZDCEް$<\~qV`1нnTw>daWd} pX`Czr:o>ƔtT&+N ?i%Fnnn]^\nR݋ٸk^ZS2uXdI c07lWK#:~4\h9 'ԩSAQ$h %T*L>OW+As"L2* xWϡ=@P c Z̐ &șp&@V@ԧB̘`0[׭Ď (r8m6Xϸ &O ɱӄ 0~xx8鱇~8oA֡6*9BXK4ԅNp½o:@FihooǤIpM7᪫:+++pBvmXn |HOO/5XY2&0Ah8ch.^s) j.Qi@#[1qD\}@ΞXJ%.r Y񓃲?S hx"\/yt#wҾ˲dyo۽NH: °$L`x%̼ðaf2dX2Ö{I;/mYd˒TnK^%lcT[t[WwbӒtMtMsOcc#wX~=/"B42(0۾Y*w-pFEQf00~%I:;;޾RўrHo w޽\wuSxݻ`}W##*q!0OFQLN<^HL6%]duEihkk_2u7-@(Ի8/h9qo|Zji̊73 j^x>~'h30m1Pgm6cxG[[UUUX,)?F" F>BSv'АBq#LOpBə0?I^m5zg7/.$eʀCv[-$$T6h_FdE@%Fη?9_Wqi~_ਨ8}:^3c=q7tC9}ߵ:KC8%Cx_"?஻?!G>O?ͯk~XXweN ,Ӭm\O_Z„cj,-_ovm;"9GC6z?Q8N&''x߂drp8 <*r/-vXA)j}voH|۷s}CGGG󍍍<<lݺ|;'?Y)@jd~Ņ$x"&>ڻ$aƎ=M1yCbx1[h_фY~[x奓8:)6ΘEM[aQH$M&>[ПgC.bcx7h/2UQ{ӛfROyC1JEx8AQΟ??M$|F aNN# ɓ')++Ke -J ; ûfQ##&x9֛nN.Roʙ`56NpK/q7yuHIsSaendelxh/Cш#/uEAلkJhnSīfvO?N7$ êUFAz{{9}4χD퐆Rj\@)aTH9VA-5qlݺ[bdz26iX6wYDV~i.2"2GO+7a2LJ\ī,Ox/)jX^׋zH$K/ݻ66B}}=q #"V|C;7 ]im9 ^m r̈́5g &&=pӟtƅkh#Ϡ[l[׈v y讚;!2drN^R,u:4hd2a3 WPkh4R]]Muu5dcǎ1>>ΡCtx^|>n;PyxK9w>h! CE:tn=i6s.l6ٲe @8N͈?>v18q9Yx!8>.]KDA*jg)e "J֯_ɮAw&c}@ӝ;.ișؕ$i^ RR!WfehW^ye^s'wD\..&?f\.W:BJZMRXhiPbfQ##% v<7-`ݫ2ZUUŇ?y:l0<<U4 z5x5mg||dZHϟiDhEAB> j]ȣAk頺+Wyf=Fj\eMCX֔bsQ2j?y{Bē]}|5,f^Y BsiԠjECҐ;4eፏ1:gcxc lQ& `\1KK299qt|[.X Zلɓ'yx~`4&O?J(x[nI T!L^S`qb;.[Y0n_ן ۗ]b-J 8+WeVk^QhBm-ZSMjddlQGN΀*o1ۂR a to0qDts}HUӋ EQUull EQhii.,uK_aZٳgϔ޽殻*J/McHDbQ[tb6FQ۫T;vL+slXq,s\}~oڢܡ͢FFBHнKCF1A2!Ezӗ%hG.t@lE 譃0c:H2A1h|'ؽ{wƒlճ..Di`DSSӜ 466f^UUwߝy PWW~/`***Тh `sZj4zhW##E/xe b#(kҋɇ<4[_ֈtͽSG8u׃[#F&o 7fw$Fkf(Ȳs3*^/o욇7whW##(MGAQ$ɲH`SoJ Mjddhr~M-;ʚ#)$Me)I]/,rÖih]йԟ֣^]Tj5֭[RQQulnZc$$^]o⡇j;2*Kcq[.T8eK=W&x58pH$B{{5@Kgc58)!ج(e :T" P= gRvh}s~0pqV^ͧ?7_x<-aqq^xA ыzkFccc{t)w]\渻ie^o^YJW]fL.D0<ש BP(4%-Vүz~a G{FQ, !p4´>lwKJ"0s%}.Z \Kx?ee (O~N:ž~+sn0Q/Rcp?ݾ/`4Q n/)eO$IMm52R0O{xQĮ}(:JM7ٙ'`4.! e.'I$.eܯnؿ?=a`,X݈^?)wy'N[ IH&&^~y/x4&mqر#wX!kRxf9]j3@$-7GhwqZП~B´wKQVx&z] uu{㊎'ƬUĒ2FQ!z +_ʬZ@ts=XD"/.nK'mٲ'pKF][րm!&e˖<\c- Bm_2ǭ?~<]N!C8B< R e)4"y`quzpcgEِ<.q悊 n{m9,cr95W;>cu/_B0cu}5!ںzwѹy0 W]e$H?G ^kE&bF io<ZH$2kcN-x盖L i ,jd7->Siw IDATocفe NDu C/'acuZRz|>+6|LO"p0}X+63,8'( N› r醭->^:@E4`KyJUt:6kb0$5|\|s 9v\(jhR7whV##&x/gMl۶?t-1Z? ?뮻.㾥t^Sa;p?>}ɴf^A;9p]vᘄQ'`28I}}=6 YVutF̷, Fy\ ]QcLm]\8Le^ytܯ* .|J MjdQ^"#CkZ~Ν~[o꯲VZ+.Q/$FlnnٵkWcvӦMEQXerPPM9˹ O)#j\Y<+s:mqa&}w\T1TE`08̱磼<zhW##&xsn1tU0QtSH[oGYyMnۛt^FGG#vBeqXf,JKf+sֆjwb8Ґ;4R9"g*P"bs^L>Ŝ:uX,?^G%82Ρ`xxǓ,JPYlޙEbε}EQ, 9D)5 d&U̺L֧ p^,ꫯ&Hpa cїqؽJRee-7hW##&xg2@BCl@BC([z^Gނduo9_EbAEZ^yp 6HUZ(lP\FQ;yxF2---b)eA!Lj19Be@$ A8FWNl9n,UCؘ ҥGlyH5"|EH '0'*/aNiD)5oY9v8(B4oLAE'pxa`r@3bH NM y"hQݟ+lظHRRh(bԋAΟ?֭[tbh4ƙ >ߍĨ0:TwŅegSBF2݌+`ӡ6`sUMjdQ>hщl TQ+`NN0 ijFz{駰,Ak7u.|K_ ??}yWmt1d/=G<2:=n@OO`'Ob\wM ZfJ+刺sa2a.seiPMjdC!fqOySR!dt[I*FƃjG\4'Tg!Mى_hvZ$ZDϟoZv75TlZcMT㪪*$IJg|8z( zVx!ִܠ͢FF72|`P!xfKB8,Aty͠7kz Ƃ]c]Ph!Ѱoݳ*hvI0o#"z1zkWj떾_N,wZjlmteI&9-s,,k7GhbF&2/:^FHM#o H^60>YP0FZ[ZH$ q\H= eeeEEV)4cҋԗlLde{z\5BNXgk;e"xdCYY679@<:\ B xHWIWyuF?w}7™vu.~_z:(jҋ]С0OHQAvv/_ MEQ1]( |mB!wh:_E=^q!hC>3 G%}C>#e뷑@,G >_o˚5kr4Ql^kgIoSI40sn+0a\_ uX'B퐆\9< ^,+;pf"x[[N#ooxbLjs%^7cq\~"իr&~-&r2c='o}[6bKR(hvڟ\7 ⦛nG?.=^>hW##jQ:iRv-(;u\ڱ#s_VX/y(bJD)fiPnW\>|{ 9B42R_]&X! APBtyu(RߌxQ3# ]/b{'K|ls(6?3P sSη}Y;?3 fvk6vZ, uuu|ӟf|||W##E'Q\!Q\ކEPP㈇D8(m(+Dtkւp3׀1~q 7r*·ƑdJ}w_jרBU[·\կ~g>~v}nF|/| <\q>};Aw&x52RlրL nAlznb@6֧Ͼ⪛XBl} Y0`CT8E!`٦_~g?o\[0dp  [l)RjRPj̈́$I~}c| _´Ͽ\y~444W_}uQ(B96\E9lHqΗ'R.L7댈/7( ׾5Nuб Ow#zy>ϲo߾)obFNQËGbAZjYE+vfIقRb>dQ<_xpuץu]޽{3sWptC[[<oy[ԇbBj̈PM7ğ>-hia E1#Pcǎ6n?zqxSzR-[yWعsgZ\np` HH2]=rJD<O%)S\.X<|ah ׶ڶ~$IO9uT}nvꪫPd2]wŗXFVJI-m`v!SGPk K|΂#,w="xCPpNdUѷM鋰֣}mO UFc ;35UԢp8U^:U`0`4שP^}Uv;~]ծm"zx ]Vg_:/®]hii{{ ڗ\SwDYr"p)(m- Ⱦu3 /!2 cA@A^q Me>===<}=0Kkk7oŏUcAux=xt0fˀ[˱1._i r2&n((aЋ$sϱsμ( PXEh>VxNO!w1 -3l_OfZ)ۆe׮]y9jbӧx~ǓןKëvH3 Їعs']vKgm;7|3wپ}{:{o.xxEFVJIF2l> ?MpբjLϺAc ]coM]7NOsQ8\B=gEj6z %tPg[([UUU$ZX`wCA` (׋3s=@hxhrMf4ty.j"BCC]]]455166ƙ3gH&ivnxx OI=w}m6}B)WA+_ |>nf-ł&x5RJWG!629_P^%d};Ѡ7iNRgTo-$,H$_m|؈4^YP" νx ^wu\.$6H̉'rNFz=6nbP6S7gQ`Zdʕ8q8%>SjY`iM&|SNyWWE[lhW#+$xA%ClцE1%(MYcxLnң(N0LXKbv'{_y tl۶Nuc#=q6o(_qwrJFcdGΑ aF.*Att:ijjbhh@ Çik,ԸVKë&j4h\@IQ@nn09Qkfz3r'J/E$e""!7`6ı.cjF&@?6̶^R&>R 2ŸL&v{:믿>}6:kƞ]ر}۬p`o!:r`0HgO/^^9+j۝l vZ ߿BEE~A9~ëG4 ^7'c1B׫QЛ˛w~|Sy[F{:_n@yMEQ0@:,2L&#Ntt__Xl 2 dz)lقd7ih7I84D,cÆ f+=$Ig> TUxPLNuS1MqXGuAB~־(x^^/$O ٳ~4q<,x盇WeÛCȊ7%O*P<G=wH^5z2k $kZAQQ<+s&eY&<2'y=_c8`s| {TIRrY }?T\V+v3nSco~45e|4#u%13CU>6M?:.-pČ9~Q*'@;W]*hW#+w #vb_*m(M-/!y(Q*6.@o~[oϮ].zw-k`ӦMsV僅.8r(p\!Qq?*I$:׃QlsQYYv UUU$%ѱV ~jjjTbll9~~?^wҒdi}:\.\E7HQHs!bPXBd(MV^9 kV!2OڊU](y.-Lj0@3gpIA |ܥҠueiPm&,-\*7X(4 b#!ԉYu!v,`CLtr꬙mdߘ)Uja2uIqit"z]V]n6fø0-QdDYV,mv;v+W222ѣG7!9~K0,<7whWcF6N$=t2,!WmCn;,H;?nO4!  Dm( ."U %RH$d UٌL2Ra L&:78ۮYN, WF6]EAի'ez]L! \., 555ƌ9~v{&r2f IDAT/4ZH<JyxugQnADl} ҵR51$Ͼ0t,euQ$b D$ID"F:Yf*\1;===(v-h{+h,HdEA 7M + IY0;bwRSr!mS-JUƮ ܢ ^U݉O]Ϫt9_Z0ЂxH[f:䭷ϩB.رclٲeqP}' e^︇kmNoaO LaFVvnZ,?v!%zG3@^Oee%$ v|sη|U)-W=4R(\dEFeYF!n o A)S5W~{f^ϣָ"qz#bŊ)54 555twwLyyy:\mÛ[r$>000$I8**gdTAD@@l^®u 'MupAYP$P;#(Vr1f` je8@Qm10&v3PhL&D!szcD .}1u QC~8 3Z5ǐb(&'dU(2#=Pvg߽y -NSvh-o N0&F|7] +4FȲ˘wvK;, KFV țnGww~ظf54 Obğ J}r2=l M.M-IcȊjaxt gG΍㴘?L};xa@pQD^dp p:@J0q~m ۦkʹZ<gT*fMK{R( 555\qLNNѣy)y9^cZ0AAI2F|x]Kً̄P] T{^.Y_yZZZx T3(9(svq:XNt]ۜyޢԟ^cZ0AA*AMSn7׹#]c`xA!MFQ9_۾}*`-Q)s^)y&x`ttX,QU5r.{^K(jFwJg9q\gY?`0NZNepZL>+//aWHi(KiWQ@M$ۋ墦P(-f+Qxtנ &xWlB9FG=oa~o~Oݛ> X\fP-twwvM xYa2%~Mb{r7Si Lmd2yߞ<A,sZq& RIV#μ;j` fq U+JOѷ܏ZUKod[`}rS:[MtΩl# w7.LIytR>+k6t:M,#p1~?PhNR{Fib^T]IP =; bLysQIjZD3l''HaXD9suo)2BjXhll)bgΜjjjj R ޥ`4CVJVD^)|zB#!%A$DA1kdlU >OGylN4͉>&TUUy*[)k9[NKK ---szVqBb^ Z%VF'v‘W]x!EګZN@"@ȪV+~:t<)SS]ři6 NlV/M2Y8Obrx +uպsyf2<.bXN!x!x b4,#ν8 :X VlCpJ054I0ЮEpח/Dd?f$P:Я 0/zȠtT98- OFQ]~ҊN25DsTW@XoІRwj\ O3=~#mmm]l-.5(H %5G}/Pξ Z$Vw_@a!Ю2 qGx n'z]Ȇ/00sZ#.j3@Vw "w.Ft$=C^&?iL`|Xw=8镦RRJI)S)^/ yݻvY--ƙ4(H% ^XN(!]5`qrQC U ﬘;.kAȦ.iT3STΧs } q5͌b äh݅*V&F{زyvR OTJҶǼߚV늴4M3"E$jH''vsgFN{j~[b⺚" 3V"K,Tcd˿$rHS . T{ #{x+8;:xkfL&;͵FTYԢTJم|^7_rْRf "b^y$Ѱd9+Kcv"NkEn߁tH; ^ l*m+d2(=z?kjjح342's"H$.??djjmؽPXOO_?HGE E鳺zsiXŌyo*"ߚR K*m4LĈF:`C:1ka$- G92=/bWtVS8'Q t"eGVpݕJK?S~ϡe2LDNbW3*+ٱ9‘#GزFFu# ^M3Ĺ'W]V;oxE@cU'^q!4PʱVbwb1?jޱKq& R17=l.o7\`̻v̓oģ -H ym~';/ 0uh<vvT#[n5M(**?ȹ`(D4rWIUϼ&dk&=4FlA>;GڃdBg֭\yRiQJsJbp\\.' jk9'a0?5(HEpc0=i)ďP~\5y DDZ !7$+1+έWrï^ϫSa&MdFA.|vx}Em۠xlqMNbG 7Hgq0>6Ɖ'L4N-$vW =byaFFFb8pt}[\3iPJrn/{`xAˠA%t(&d!Z24R1KY'H@Mv]z>h$ؿ[BsS# ZRU3z&́#G/:Αc Ni)TJTJ3ۊ ٶm7|3͌:̙3RcbI{!3 ek%zz:b*%F$ǐviat#^ƒGQ`2j_L ι$P,jkky9q}3L㱙ѓ?~IL@͕[6pXÓX#=5*Vļ$ (:ҰژL&jkkG?P| 5\!x e] ^ՊjCf.JO }.%CUD d`q ;ж8sEAj=E@]J ( Nde"1E6%v1)7|37|3. -RjQNcK+AI^|yeGJˡ{w>jkkimm%J룫jB`0??bIW1p%1f5H e}}E\MyDSzH#N>IU-<=Ρo>;>%  Y$W_w3+4B@7y-&SY77EPûjDSSD"_ָH$(ldPR? zݐ_:EħV/ڎ_ERwoב60rUHՂʽV;Б`=WqVٟS_U˺~X+;nۅ~Y^kKVBRP) "xgp8hmm{>'7o`veAQRfIӤiRE_=9 sz9ҋR4Ŝs!n~__λnTUW_a0AA*BGZ= zDo]YQјlhR!< -[H)BUuΝ;7K͓jbX,8V+&d2I,.jkk %s%Yvg]aO>O/~'/AA kPJ4V(|kثr97#ς3r{i011ힵ=͒L&~)%4" rST^l6^,ak2%"qkk+DسgNp8LMMMզ,Y) s1L&e~O}SDQoc=u]W|#477O{wY5(!S(ZFL IDATg_aW7\7C8 IЮ|wa<|`wAƦ2LY/_'䪊TB [M:03:oMuuǎC͛Wd 3dYbH$o4FJLo.g׿u>/|믿Os=tuu f?Ns]w /9u>o}(' k0/$VR̵!ԗJ@xʝ-;Fg^EL#ad ?c_~am %d4===lݺuL&S0z鿥(2KL%tRc%6!'(멯's)4M㥗^ʧ<G%ExKAK{1kk| _'fpa^|Ζe_nנ QbES/? s%u 1z nAnz> g'43grlOg21I6@: ٴyiڢl&r7יbjbgXVTU]9墱a6l@$_/H--\͇BS+x4{Ea׮]Kson[`__GY kP!@F!Z/x +*ׂ}d^𿵴4X!;l~bv00zĬMB\egVjZPiL `0H0$JFرcqEP-UrSDӴ|^4555tvvιω'я~{z)-2 ~]'و] gΘbx\M 9'z##'M&󥚅ÉfaZ/>rH&LL&{\U+2Je9@aD"eE*keK뗕PUX,vX,FmmafE [l!N/0AA*I7d5mBȥ52ѓLYWc-u'WQ:?oE酣F‹J)d2 N{Tt. t:Mg'mdQF v4G0B,xy $ (]]]H) ݜNToi!#bsN~r9zh}n& ]sұck^!x RoYGxDD~RHzۛ9V&ʼnv(b 63(cgg1 YՂ8J&rfFi:T鿋J|?p$Ū$09xg;v\v[pFkk+D"vލ墮P(tYD'NiXJ]Bʯ \s ]w̻6/2_g>~a>rq>;;ՏrE Q yRLv[h)2tE]Ⱦo}{! YO^^R|T*E2d2cttDƍidY,huju:TUU^ >@+!Ʀ2-:Oi︁۷z;˙TWWS]]M&!DZcBa|>_tĈ};gF4媫駟/d;}E.5663}+z~aye0AA*IDn4a!F{Q_z|yfL/TZkMWҲX,h)$lsa2E2!|՛ הz 򗿜)mG~~Ya<3mLCC LLLD8tf9mXH-ZޥiZQz衂) >m7p/"`^y(W\3oÌ*F w#_G־aI,en+AԁŦXVnTv6@ v!5`FDC+SūBu)vY/v H'ORUUE8pT=X4fl kPrEbx S 6S7ͳ,S .Еr^DˈAx_@+&+Gyk罈7l,;+}Jx'5N3-S<7Mƃ>H?CCC,䬗.6 3kdZٛIĩ p}(ǟA2} YafX-a$v*7xcAobRsxK)xKyQ\iPrk JGDox}y&[cgAfBo5Azr"gF׿g&5(Ʀ8o?8V,KYEnD:\;B gRGr .Rdz~5LQWW$hGKu8Ek]JR#+H0 NX0Q4+=V1ԍG`4B6ArRHxa㪟L5B\w~_Afu )R(8|:rW󭡂R{B H)KoI{{E޾ ^p8L(Rpo4C,H%i=VMHRXqat!HQcc6"+&i+Ӄ&, KagNyi I6=Yr:Azs2 0ψrM #-Yj EQ4XӧO}x< ^mJ-:ג5C~JYXdyf.4 W'er Y(̓kȽIN?tW?Ƌ}8b/tZR]Jr|x [\ kPJIiuL&By (2gY9ۭ`ܢd8d[^9^=`2EirCQD $}[sѷ!Ĺ=}WzRRBxx'uNTӎUW|qP(Xk:|d?UDt?I7vrV֟ wAz0WnPq?yRUZjkk"rq:;; \~ӕk+->5rsWKr.Z,<ϜkW z.#jNYE3Ei&2nOqF%:,Tv㋑|#)J{E!ģ4i!P2ozׂE#.3g{g< gF\V6@QUvÅ_".o}[<3<໙JkMer1h޲г(GEHg1 8XW\tww裏[ !@uqq+'xJGl'x;vL< NÇFڵkI@4,ߏ'ňD"?~`0H8οo)uJC)Y->5XLlvN;N%0LcV%,yǚI`#zвDOw?Z kq1.َ,΢|En0a~CJ~I_/mogǘ̂I\N'#u7ƒq33700mmm$3ɌâsDm73ِRbJ2q|7UjF6o\f%#&zD"9owjj }҂ˆ.#[|i tx\'N"XTv:l6}c~=X;sLٯ05X\I:'Q~ ѷ)@>W]a!ξ܊zxҭcFVIN%/>6>buR[ݟlSNNVI5,&Yd2]M[wy gz8dVMؐ+!Ib6Id$Qp%7tSG\R*4551>>N$^f}-yR5rxK!x ?vVDQYeVl\$fv*',EvwXArLi~#$P;ѯy|\g};`s/_j0(͋YQp:쌍H$x`fR?0ȡ#d]aƓ̪#;N~v@D:tdpp0kmb c&̐zPY. ͍yEhۻ/zٰaD"zzza~:lln7w}w'r%[- U_6W>Ls8Bjd}ͤ33" bU`:_@ޟox LQRLz{{y}&nMLjt`uHGz'e7G/$u2e8rBAcc#{Os8B8fs[g#tQc3:RR'J*iƥ972bFӭdY-K2v2:e8!4՝YuRhPUp8L8&HFBJu:ֈRbubE̕$x:!FN"z~HMim\ C@tI9S1ԅ9\H!ر~>I; "?]ν9D{ 9$nEoFTa:Kfl#ѥ];|AbΒa&Iރ=th&P,f4I2;KcŢh#p80Hݻwv ÄB%G!+R7"8Riwޱld$"95֜)5Uy!"j+rh6|3^YϮ[ok'}Hi#=E6\[C&Ә^+:u jj#3̥.dY^ guUNB:z59z~BcYv9e$Ζ0sXjG-[TWWdbq1BpϷ1+هHi(=4XROɂcYnth(\־~FD܃l6Rb"vks.[:a ЖY7|ݤԑCCC|_aU;Uܱc/?u&"=l}9u:TEA"c۫lM9l6@CCD":l/t^j4) Riފ'ؼ]#C[B94bjir@z,N7B%bUA u#!Țۊ3, l޳Ql>9gz MiGM(w:u1^R9 mWӷǜ;-;t(S6n7nD"Z.5CKEx+ek/-ً2b"vQVLZo_./@gN#~nŚ^˟S)%gϞƦ x+# d0[8܊ rh, <_zζTU%*xhm9XVZZZhnnfllsαo>v;vdRg`Dxq6 楒D`%Eg^S=re+|pՃL6D=-Kf 7 νylCLo|!YW!7|3OWk9rr9f3C65yo#%rY ֖{|χ#ߟ/*rἷj]RNo1TGVK']1 Zj@zar1Y73ַ2`v!Ao4}[0"zM"Zsd`o5PUu\%0.z(&oo}+0ZO޹0LՑffqQce EkDxq6 D ݈e3]0CʹSHA,ZJ7PNtJw 1~>f;fdAK*W|3Dqglb}QlwU2)y3ttt֖}WzyoߕlԔպ !x 楒"2K)kt{Յ7&Fhbٟ+a=5<,"p>_tNSAQ|> y[Nl&Ϙa?o3[xjZ)T@ @  N8}t۷S<E#[gsR_JTX-wt}c9m{ZJ`,h bw ^# k Wp&U;T_p嶽ZJBb"o`X޾,.e6- c^y$[ꟻBf I@r4畻\<h}oE9 LcIV IDAT 7}ך"7͒>BE91a,)[l]6_w뇐R=r΅ǃ㡣"'Np8Luu`9Dx->4u!ɚ?r'HՊr)Sϓmo*XRYOۑ6$9jiR(kϱHEkjs)Jmm-LMMF9~E޾.ҜPJ-xbMyY"p*'E}骹PVJ_$tÂrZ}d,+$, ͘34Zdm fuHo\550 *5w)vZ[[iiiaddH$ž={p\n 5|xKq6 楒oG<Bz{ t CȆkb1~~z LA[_yǗçf'FtI͇l NP-s߅p""&ѫڑ k 0[.ڮ:w{Rg)ڮ+d2D8~8`p8LUU՜mz[N-.5rG+͚k6ŜnH=gU|_{m̉"K6Ndzsqw/1zj[FQ$ѯ| |g122?0="j-$M~;8d޽|c[MFiol6S__O}}=xH$‘#GPU5mfڋ50ΦT]^Y{%ܩaz@]}C5]pRjʢIfqoΆŋ]@c;Fـk:B&Qμz++4c|(1 Nn53-~{.zkٌ9;2l&N淫 Yȹq yb36,VWOped$@KmwP)) hTcZinnq^u"ccce|pi(=4\ esә#(H#D"C}qmh|m"=ԁb랲y,&ҙE5-+i6,2]'&" S; K:r@*v V@vS[[f#C  W|RF`^*MX?ҷ ٖ0/";!S|#)qN㳛edsftL&VOI-Ȃ. ă⎽Ґ"5&ڦͻ <68'56 :*fs{cHiXmJ/p8L" Յ2t:Wm#[zi0/"Wu H=sG5b,j.$x"g8_?lw#UE1/%??]qY}=AI9ø(L @6hXwJvSS%PL$x"MA;]ꢨr*-ZmᠭVD"޽۝-\5"RI"ܢKFOW+W>Hf;>"wn ?&k2!=GBsT1ގvŻWw0#0<lT`:Ʈ;\Ro)jd2b1Ξ=˱cBz/Fg`A*S^BH2S^l/4Xv666s]*v ٫c o9vB Eڍʽ]e b& |M,ԧ>/<10bh2Mmg'-oy˒n&VJNLCC LLLD8x f9f[De?Çg`^AVFd (=?@,RRRmn"ٗ25LP=W !J[g熖L&r컈NгρɊWwJ}g?I%(,庺nn7 D8y$~p8L XB7Çg`^օpX$+&3S*;[Dʹ4@Ȗߝ7!/XB$')o"zk쫘,)JY TK嶭( PP(D2$Cggg~۽DxRIbUA} / E|8CdGĩQr>KZyQL(Ǿt@DLƐM7]dSaABnSØ:uo5X3"PI'JE1vFKK ͌q9݋ᠮf98RI#0|dW9cĹ}dsk?jF ME%z1Йi19 5'Z&DdSHH4QR:]j{JCX޹B|lܸ~"yl-=]G8B+&wyH #]5yQ&-nD<Ⱦ񢴳\Jr1؅8brU5ާMF="zl }[BD<碗Dj T엘JXVI'VmD]]uuuLNND8z(BB7ñcMYae Nŕ+Z(]Or ThcE{o#n, A:۵8odwń @z 99\D7B6&K˲Bu4TL}}[)w餣+%N/ZK)f# !x 楒R;\=Yp8yFzQ57Ɯ`1ڋKdCH&Cs  @nGOOCݹm?zhf8u`>LfΛd#DӴyEl*"NNjl63>>ѣGQUzpYJ(tX,Fww7:Äa<ϼ1R!x oQjź/D#D o{iPkF$ǐb71|هl}Uڈ~ 1E׊ઙ"[nAنȬ޽~{bu\Xfl7 O]]a.\@YY`37Ӈ7{l4ŰlTUUEUU?~6`gmg<4Y|+~W}"=B??_I:n_9nHDjbcۼP== RO#ZPc{Aǚ,B:ܽt WY )#E\x~iڢnekf>h4Jyy9v,ˊEQ(++q0Gv "fAJW~ˏ& ===\pOEE~꼘WS,H'd%Ž,߉ou!@bTGadվiOgVKMj[ ru:bCˢ_"j# EuW!x|:# 2bx߇}UbWJI:^ԭ LΰN;J;ehhh4ʞ={0$h͢0>>Ç)(L&I$xY理dvhhh8NsoɅ1_K y6}UU qz{{9R)l6۬u4݌oD CzC`uCr ݌,۾"UDr l ՅВ+bVjXFд===Sƫ*t؅6ex溮3111;QRWWGww7(26 ⵒO\, !O}Sf߾}<<<|󟧺zz11ɂ͗N_*4 W:Wŷ?̎;f`bWQg|s-wX5W~13V2dQ]^3V((G _@f|1D#SyZp{0fX\4SNM-KҌc\02:9;]ܮIۏůPu"'8?GۉTvY+8 }d\ K7m68O?4}}}7cD1 -:ĦMQSS p/RZZ:.WsNܾ/g%%%LLLGydIM&"GNկ~y1t !TtCf|v~bа<݇Rp Cz̟aTYnN,ݖ݁@A|dQg.?ƶD4pk:S,h눅^ObxxAׂt: 0N(ʔn4?QjQ(ahxXkᘆb{pWvl!%+ooLoo/vk_{]t<-o'?5a-a ^Y;AKIQ$Hb6b ؝b捶e`ϥiqPNpql1rc(9 J*UX)=Ÿ _-(~Nǰ WCηEE 7[K8HhI O28-)vϴVWxd8۬a*pɔ[~EEu wQK9:x'p8~,n~v{K%HvK`v WpW-g^믟&9NbquuTq':O\$e~Q 234YxsIrڟ|f7OOϹ]OOy"5Y|2qVZ)"(@QUH a9Qu3ˏ =g}`bpȂ"ʔ[M;;kZQڞGM!d/}ct8s;fGbimIbim)dSGQΞbk!.BcK&8vr "PZ;{(tz{{Eť<4xJąC(\[[Kuu5CCCSAn@w+xL ^gˮu*2EQy饗^25Y|k>TY8grǦtE!baN8 , [\Oo{3111~7`/| 8 IDATϗ~)xM$_D l ˵`jA8T tAlyۡ6bbb @! wmuY'zy: T` aRhwrb(g^x'k]N' q0VP(DyyyV|9͠mV w>ݻ>ѱab ^h"p1X5M[[0¤@ ˨bA( BD*V/5{BV\f1t٢Y}(C-Q 5jCnzc{uN<#<m^rsNpJHpPnmꫯrOߢu0NJX /|o7ɳ>` Ϝ>< a?CsvNFlKD4G }(6x}Lz<<Tۅ 8w**o'+8Rֲ&scQ' K,5V4\)]o!Țo|?i/M2 zO@|!Sވ]Y܀Q{J˓ md4Fӛ;;:Yw=mnmA`?Ybɼ\I*yaZ3-NwዶqE,V !Jp*{e_jR]]MUUÄa^z%~?Pe|Ґgޕћ &O'h+PXX)D+X8"1AJO ‡]EEV^)#v>}"M/ `&+>,Y{Ǽfv7D#c<?{uWY\G q)D4,9[U tO-+`фf_-w~Bq9uV`0HyyaXxҐOA㫁)xMd[x+0LDf Y=7,矜D}dnaŔ8v"ڃrƜ*3:AfێBj6XEr(p#Gv P!mf,j[x*G;]Εrkd+M81ĕc5.16x^y!fZ]Hw x+U^t-=%xv;TtZCWR\RbO2$ܙ<"҅8;ޑqa gQ&$?}=o(DSpᮝu˽-[LBEE`ә|iwz7dAVBNZ+n_؊}8;-ŖH0<˺ K9Wr?;ZˁaCiInFʲl"¯B:6g[##¯/xxX,;5~z]v͖X,TUU !sAܷV\ `%5Y)J)Z|Y"vΔ[B0⧟@D{2 HD2t\puV!F..[YbMR)m7($ұnr_DTbƊ 8bkIWQto~{!wuuu֗l=B)..&L$I*++'W:hm7;dW܊D"Ι3gf RnUfxMu#1jIEV߂Q=5@`߅Cmsn&ƙ!^n$:2LK7{2&(rY)-soPSSbUE kF^FZsZ2˽>Ĝh[>\(ׁUnS__>!.^Hii)PǓMiڒL)x]-G;d͆b,<,W\YcE&F /ti %7^4cޏV/S[WZ]+0>>K/֭[s9U!g}c 1އϣkIZ|B̬+%(~MΟE@-a2tIbu7Z"/  9ڑI1fq2)!h_uxPP #᪰g pgBݐFUU䦛nbݤi^}U=J+fsm1-kdIl$9?DWUg: #n(X8{?tPZ~/h۟u]7_O|{wƒeFu_2@2ǨZΫ/.PV g85=FKK D"v6V颼VM TEaOɧxwgiXsm !zx^Rݴpy*++nܥ!Y5YPNuUihc0(V1~Y\?v;w|[Oss(@ Ak'8|Nk\N#3hV@q='`lL7,FD" G{!prǯoڇc|Nm[Ž;f'L/{W[dKl6jkkahh.TTTRfGdAmtֺozD9]#Նt@O=t_tm.?ǸkB0)v.>kt OP^Xzoqx0 θ28|믂D!C ^? #3U'^"rd2<}0{x3cz2_$pA9ʧwII %%%b10N`0n= ^0L o0ɂs+JlcHr1+H[b 3!]P-Bh|!^UnKԏ3eݥo̤V?z[~ \o,Zr5E9LeSQ'Jj^G.q[" [۸-/?v;^lťt\lPsoDp\lڴ*8@__mmm )(~a\[xayfКɂZ89H̴OetI$uun#D+gUdAK2cMH'J\pIQ.>71ցr9Ӌb.~gIii[STm?M ]\k1)Jh'Nƭ@V{p7p_=s!9B__߬ $/3<& Oq(5bM̚j8Z Hg T:j!vBr fpIMDzhaj*ZtX߿]TV*ܻچMu ǏfԜ5M2 )du!<{ !Ak6=۶mcӦM:#XܰsҰL ov0-&K"i]Ydv'YI.%ȒF< V[ƗSii˖پk\ZUm+m&5@<|_ՠs--|#IRKڥfc߾}lݺ-[ప:,,ngXhYx͠bGCX"b"F)0t[0ZtT^c ԡ(͎J1϶sO5YT [ƌ 6V'|;XX]@[WT_,V^EO_x|N+\3(l۶mr]KގۈaY)//!eSզ'O0^Nrq4{֣ȑ#||h#,W"twws)VT%7zM훂<lkZ q!F;YI+ȵ!_ JI݋fWc1G9'cm* C?&.I1mhX{ -S\^dIXgHoqnYqX#1e$&m)@nuzZT*Źc/j ⊍=mfc9BjQ;1|ߠ_:̿c|F[ 0޼]]Q$("C=Ȋ; w3FAD"apwv1,AACbk KՕ#2:Mq8͎z(vιaK>0NRbI$twwsTU% RQQ1#-j\2yDMD' _=,|ZdN /s2p$W#DF'Fsӯ,#{AK]8!ED{fm#ϱ}3USmgeWi_󃃃~b%N[QKAI[U zҟ30<ʉϷ sc1M3L p8[n^|EΜ9T۹.]ׯ9+lL ɢ;շ we?XuwdΊ_R3vL. Aaʶ摗Ͼ|sJr<iU??%%WWfx%q ]YdX{nhD( oՊi ۨLMELPVVFYYa=qkqA14 Lk("xet1N:7 #-6T Ղd,vm D AO!Fڑi%XBb>'8v|_!j{\t7 łb:r`1@);_?~@s֭;J$߸n2&RPP@SS F:`0i ƥ+`(ۚ/W=1:?wK?f߾}tyZo||_{#^r ~U!fh;B(,_6J=>h;L / RQ>pe՜wTeZI_h'I;F,7* IDATo7(/u|cs1Z,Buuun ZL60-&/B0(CTz܂tW=C&I>5.U"!QMgc~ミ]4]PTQ2em\9Y}IE4a04dAj~y $$ǐdَL. tq2[8v&V  ohm6۷o'LMss3LUokȬ<5Y|~XCciuD?oԪvɿsOdJK@ܔ!~[ֱؕ( ԏAO#-vDqouo+(^HqDA 1RŁ,߁-s!*dnd.,+Ê϶X~yܕ:X+;]pvꨩapp..^Hii)`Ǔ,/oY{"MlBpy"Н,pBA9HAO oH(n}A;Wƛߕ=ɤ]εUe-#EQNp2K㲩9x'k׮kҰ3&L˹ub $y|B!~UXM` ^E|FOٓHH!ՙi+W1ty+B u<}j3bWO:M'#`s!jsճkfddtt#b7g9\6;%"RJRdD"1xJ,;f>+_ܶ|>|>6msL -}0L o0ɢlt$}!dhZ"В`s?@lJ&oaâ(,fHF6f<&{gP 3Z1D|אD}rz RaFdKDI? \H$>dgCMfعDՊnnp8x<v;qTUUI)yꩧ %;_GDSQdiԞv:;;ٵk^]INfQ[[Kuu5CCCAn<]M7 GdQ6drBڰ o~ rq6Q{;AXnя~/}K}!) M9>Q>OI;ɂ׬?CB;͍忠ylWuę( gLLd|- 7\|dƪZ{0miƼ"VJɡCHR躎(8)ڤmR:4M^::zzzhmm****ƕ׍w] gRIip67}T!Wn 3~?"`u5)qjo`d {*2 2"\{^0}\ZKK Ÿ\mj"ZQ Յr~ 2ld,)HQ(--%K/"ã F7q67gzǹdYr՞[›9]UU*++`llp8̫! QRR2u-YryDMD ޵{ ?mZut3~e;Z㲐տGSnm"g;;ǝ"zct@1jF()'IG=oi5 c +Nw/+{{n_q ba&}CBaMcT{Ci%wzR,СC288Huu5~?sAvtjBזb-`syx^^/¹shf4Lk(b](Oώ8 5rIEzQO!݁Rc g7#Nzˏ>Os A)zi=/ɽXfX~ 7Vaݸ(] e4\ ,KNra۩!H܌bHEi͛ :Jvcfb٨6<A’Dchi' vv\6 uE7tHkU#+vsWR a~Mh(N BI|8vwO}s GbZc ""o{/=ASv έ!,"qvI?pV S. &/w=Xx#v!zg H֌5Ȳ w- eM80{cHW1:hՃu}+`ҽ Hw܋(-@Tp@KdJ7 ٟqg(# TWlcd,Ɓn7fR>|6NcUĚ1 ]NN"–-[g?n'r~V[~}-ע3mO]UU***f:tBBT^hSfS,)x[?Aiy*X16 @Y-ǸLm1bbgͣ{OUioՒZjuҳcc0&B 'I.$K8C08%!\B% %x***8v)=X |5tB[NĝNz칾}- w6줹p8㡪jYg7cj.sjjfZ[[ӧjŔP" YTN-_tTR ߾#Q8by1PdԚ(VQv7! A1/a,ܱYĦN{ "`JGPwd?Vi*O#FP%-crh4ߟjwlE`@[hx[fo 툑AگH$kT"z]Y~3>?mL?8FHCo o`۶mS6[SWWGmm-===҂vf^ ) kz(}o̵Z-^ǃ,|̙3ןD8ȀzԽ_߯3_d_^ :v 'v[N!lcYQXW$byTIj&_@ |hEa3E@ћ 7L_H̞->ڂ$ TÚp;"97I|駤, }H !,_{l\$Ipvx=*F>a>1D:ݸQ}|*vs!zvllp8ѣG)//z-ķm?Bnަ(x,fnNU/?*A{66-ϑOEd[;::>Or36dUca~(vm@9ZR9ůA58;]3L!DGP˦*j)ݰL[^CJE9܊%L?xkw r;u3IQ mZٱc@p8ӧ)))RQQ1;&yfEjg^}{߲|×enYw8w.2^) FZ oAUU *"[1yÿ cQsn#~ d0Ҁv]|.Y".ZMgAA@U4ϭ@܏i"H$6j)EUKԕGZQ N:]Ɩz6 444ꢩf<իRZs34,u+m<~}k,*f_`ކbL!W݉ƾŃRZDCC 0#Ht8KX,L&8V! ILb;CAat2^vWEdttP(]"~W,Ռ&E& N<'?yAE9eD"$چJQY&x7η٢v/PH<c z+ʾҹkYQXl-D"VҢ,XRo#ŐZe[Pׇ^0FV+!kM}` ^B+JiE[0v_[,΄`LD,@*Ao(!S2 ?wn  vv;=$qIl6 wS`i;ûe>UUU\tiQ'>{pjPEd3\6"2mhd'sPe˛NT]}ss5 $ $ &[O xzQbMދ (bx$# HZTk J=v/dYf J\ ]4IOthEo|^?Ph_زe \t FtZכ՞EYҘ+Rm>{1~ /`oEb=^e9 VtUdܿA"}g'MTR:HM6tZ UԦ=~O>~s~p;ޱO"tK['qB[EI7D@#/'\7EUٶP3JuZZ'^xrUVC85"_Zx--//G$z{{<ۋ97 w^2E[dA6Sw-k 8y${)vt;[ٖ-6tg+z|PBBwU_7rF0#J q ꥟?Wy d?BYTPW.z^s6',rcZAVyR Drk^j76bT+DQrQ]]PPRR]OVSt4t:8>v yg?9/~}sWePEd- ^pHg*`ȉ0ދx((-5oy[طonHD*=ݏY+`0\Ե#{ݶ^O5D Djf0wN=s8\($I$I _Yh?`HQ㮩\kM===fyں3ȂisV* ^^D&P/:#hRQJnb7])rY>;QUe`c ^Uҧ}S1Nk$Qu Цdz5{LASS^7[83=oC퍜 v ?ᴗ77YRQdY԰g***W^vhc)`X,f8 Pm3:FNqD,(x^( X,^oByĞ3>-… }89ͨu[|/ǣ>&Nk-E<v(;ޅZkn ra bcji=H)N9Tދ8xaUkLGvmԏg#œng޽LLLѣG)//ax;O:KdRdbbQVPNu缍2ZXX,F$n@_̅^^M*^۫K>zmY***`llP(+כ\6cw-T[<䟢`bBEqM |OzjfA)i*%o"6j-(u />8~Ԅ>FQTx&X7x_oӛQ<70oZuDv]Za;_L?, c6TEA$TdZ\So Wlfǎ9y$VGJk&j(>p(###FlemGF'rzt:FRT:,i0b>13Q;]ЖjeΝDQ:::8}4fCEE&av1»6(hEq=NRL/ 'E"^"w e*̅քvh|78|+$f,CN&tZR*`|#g֭u>0J _l}3M#(;]G+fgs:ȼ&$hHK_bHB nJ]]={єht:-,g_EbgΜ2Xge Ea,g0dz.y] Aع"`c4ihhESS---FZû0&E#ZdA `c+/藑s:-A" 6PNZQ6R)T5E"i+jY ?aTG~G/ zj_\3+|v\g͖b 5!QJ,4FGq8.NڙJ$^b1dYF,Цb L"*( 0>1Ao7㎆emKA(Ј"JH:Γ8n!FvחmdQZZ*;].bJC( " 2DQb3qM~7b鶱/_[mPUUi$Tt4Dm0}._0p%-,SEĶ x$H$… `0s=.0L`I*}u3ۿ;7FqF! 2K-q'DGvDQb2 Et l"bS[jWޑ(8NX,ƅ x<9xb2. Zx#SEf%7 DhiiZ߾|}/T j > e3 ԎXT V1uHLFLԥE$kzDjVo`0ȑ#Gp\|Μk\or˾U GSS$qIl6^҂xb׽ޥz+Y݄,5 sE1 ȉy Bj J jdU,2EyPu<R1ԲYeʕezb6-d 9|^AUU?z.5HRv"I0$I32Et~.˿C75Cx. XG,C$cM" *gJ011Aiiil#I[nɅ tx< 2o֔Dx8d2x^ 0ٳgwlLj͍iTcfuzc#)O1""ZA$J1q?y􉧸ks㠤Z"`p@*0ޅRRANi޲S Y0ڵI8z(9ۂʲяfe?s&&&1l)x<wPZZ:Ía]SYY-taJp=.w~t~^/l#tSH1aadYPDQn0Z[[ioo{zF6CJbPvԽWr8j6oZz衇xg߯/%Zȹ1-w[Woy3Wu¯KƑBҶd%yQEyxT6b[We?lfΝB'Nn|>7-"066Dg[K+R ՚W$I\.Jwkootxv`X5(FxQ`lٲ.s3D=s > G:DHFAԠly>Z/W_F.}&ڿjѪb'OB2Cu!\}߬QY} [F{&Z#JN^7UU}(H` Hىh [L")@\q7r}s;o(Ӟј}9{,Oee咄P$'dxd.Q Ñ$R#Jxon YT[lQVVFYYa?Nii)ݾm](j{)9!-f=RBL(}e|=ٰF:*Je*ך+?0ލj*%hD0 '3'^I!D"?ϗ-Rh3x72{+[?+UDڡBk23F8Pp8>Fs%VԲ- H `C}099SO=Żߣgp [&IdEokJw6z=@Zzzzhmm6p\ڢsbIRQEdÉH*ґL1"F$Jӝ[޸e$gYt+ZfXؽ{7㴶K/!T27PU=jw?_̣(h4,˔|YV[ГNM]XJG_?Zݎ`G@r+"o~9>f2Jf΅LJh=7RE1:00@0$ vzs nNPU!dl|!`IXJp ezDQDdE94Mp8L0rv[<է8B4l$86@:Y[T URKwqK?'kDŽQ͞ 4jal|,|NwT%5݈Wq؉Fb1" L&\V: @ee%9z(xlZb-Dx9u6 .Y{vfCO18(C&fJ%uUtGpZsFes7aTa4Z-Id)!srѬhM%0 n'$Ȟ[М>$&ufl6D-J5/SUUzmz+}6 ȱn+^Á`||v>Lee%>/+<#$UPTUQ99j<^bCݜ fgɔ=E;;|?͆f#yW0x^mr=YOéTjm\ o@ZdQlk1l@LGxgj(*p L1XfLTUk$8Vu&`|YX);)yAk!JLı$IVzlц:w;Hbi9UE~σҭg[ZZ|\E"<5©( *z!Svioo_h4b69I"LH66+^"q+/o ;[H(noCn[>qm4ٲe ~.\Bss3gJAf]ʺ3GEOqT,F;zf5xoFl{1]%@M@Oh&&Mϑu; \`6+5YL&o5T)++K?ۃUAjDEU7U8FGn_p& lPK(ی墷7Ӛ rYHƣ "TR)F1(Zvwv17e% +'7X(RjRk\n3"tttpw,i{r r] ·VC__PVjjjV-_9zm(x,zeJ bw<ѡ^OOZ[[immvS!jbacd|(6<}{'ٺMN?rSVf;R={] %ثXK+(5Ш}Yeob!| 餪B$ X(SJChpGȂl&@suO_[hN1H:HmF ZZZ")^x>MY>gWxGPLZF3#_fM#3R T3?EDyHm{;JfCP7oؿ?###SM@{.`G 9o {pE**++`0Hmm-n{ʘ(/x$d%%lȜ?qDmKx/*Ŧ\l?G.h[曳f]Wmt !+s.!+ 2'NfznbZ( " o>u%b6Z Tk ;> w|rkf={0::Jss3zgDnVQQ/h1t \[oѣllv\S_浪9hN7]x#u#H-/΁P(&2l4WAGj-Jדq1(//ghhVڦ(|_o|# ޱ8mײ9H\ [/LY?mmmA<^7)1.ṃ vvX 'I޽N.\Nt: .K\1pGȂl& OPsE\~-Վk1޲fqijjJfB$ z/ޢǠ7x-y)c1)AP50دBsI{^+pp 7044x5qB0#uA5!^Aiˋ͐vvhh`0H{{;Zz=wfsEzTU[n!7kAA58)݅w1%V;3o /".ڼN!|aŹ\t~^/S|9WDx' ELUU>%{wsZJiooǓ׋bo( "`#޹$Itvvb6'ٌBT6Cnn|6SbI1gUZ $&/oKŀ0 f'Jv~ӟb2{uLڊVettAf3TVVN9c^F=yw18P-عKW3d|)QI®]YK3Áp\gՌΆ S^^8PcǎQVVnh̖!#k-(֪]L2Kd&[ 9qN@ 5_ TUR0WdVeDQh6g4JX+. Igg'/^zJKWRXerd J厂7WN9j.>?N^ y'};x7է>$IۿV5]:W9cLLL  B8N|>߂-g: /6_#P1D03T> B!^}UJJJ|TTTDf.~rvX gXرcaΞ=hRQQ&xQ FZ +<Ԩ^g۶mx^9|pֽ ߓP&6_zAvVѥp0/g%"˕=q[Vly_bP8 vDYӢ7Ex^>XgReee B[[Djx6~v:+c邮<$7@J[_{ ,sԩ챕~dXf`/2ڵI8z(nE/:#;tb۶1,nNGGG.]Z,'7k5x^ώY.SZ\|x䟢- Hi(Tߗdb݌r:;;f|IsEЦqMʮ P$~?n6N8Ayy9@IK _@{## 鑷->]ru0:~jjj*b1"L}Q+| ɰ[GNDM "X|܈dr:;vPWWG[[ǎߟmѻ^4hD|OO/2VN& YS IDATM[[---RSS 2sf9cǃP(D0rxum=Fx|) " fn&B>9S b IҌ+Z- x^Z[[9vUUUݾƻ`R% TNϑ>T̳^.4 PYYIEEوoF.t2©'/'J2,j$ɳ ~ ]%;N I"eWӈ[Jit3l߾O{{;Ǐߏn_xk2Ѿ]#ÎIwOݣQ,J] KZ(r' ZK3H h4J$!188Nj%N Wee%B!9BEE^wދՎhڡ(x,B \拾 `flݾ.\@F#& d۸z=۷oV>LMM ~Q]2=h9$F\/? yZ · n6׬n]CtȆ $QD҉ >O{۝Fǟ~ FBN%<$eHro֭[ԩSl6~^ ,|يd+zdj3x=zhcexx6^|Ejjj]qzcހ)V%;;l6v"yW0x^gKk!»98Eʪ, d2dZʤ,d5[NtP(D{{;%~ Ν;^ziyQt?cZ#  ?A#񶯃vOsu1H&鈧b`0L9ͭe S' p+evSݬ/7jh0zT*EJKMnot: |l>lDQh4bc3it;D/aO`4zV$AڼK/uEqQb67%*2v,|.|{Flق匿˗/g|g\bo( N> uuu>}r0 k( DbSٺ~,ۡMKK GvSWW:q]o, {eddf0~^.YāK5XI*j:!vD:5_$V)ǐ`dFJoo/S^^>`I#*n~]f^XLNj%H16!uOg.BҊ 9rz-.Q`\t??ԧǭ4`0sN</_N璖9ѳ̉!Wg5, gpp+W_l$rj31 p(rX]u 3}zmCFA*zzzhmm% oE컀j cAd]e[\>֭[lѶ>Uf| @uuuHDmm-n.hmm-9Z(V"fsW444… hڂ|׫on$`3gz{x1a ]X$ʹs>ƙ3gg?<+FUU顩)[bٲ)LO1.hs=[+d577زe˪7iX (Bww7h4)//G:UԂ.'46 D͓]t1;۱q2~,M\w"K/qwMMoyQIbL+zx qЖ Jj@>=lKf\2br"~int::IĠxWPU=*7Xr ,twwގ(YKB\ᛑ Q^~eE4ȑ#|>:;;tx^˅Tww7===۷oQp8wo(x7(8qnFQD"ٜ0`Jm!R , hkknаn(tttڊd>?"] (2ADO{>kl>![hD"-Zz&0jEx?-v cAT 'zEQ"JIjӁûal K0De O>3 1lBDmmuKAUUb1<'fpM)wǎ#=z(wu,C8&Jv)hEWW>(}}}<ۦUQy׻}?6ַ裏;/1LǞ Էe2Mjjj^%Yvm7HRTTT022SJ Lab=o$8Et;=ԑ!Zh$Q 7G(ycHc!%"7GjCSst""nEWWZ[юPWRYYrܼ̾l H&Ay GfD? ~W%xWE[ȝcw}kTdhh6mir[z4̶ $e=Aj<OAv9kbwH$=No׼5oFU)9sEfJ1ˤR)lٲ*WIu?M(HN }nWN``}f˙F3>>>efi̇x N4UDk%o7b2f}}}8N|>_A*Mwv ߣGJ$Ǐ絯} 룬 ׋fwAٳGb|_\c}q˽3< 8Nn77tX,.ַ{n C^"Ʌ 0lݺŒ=ZgFFFr %6e >R*=$yLs@\.XV=餮.Q|sc UkDKdbtz(̘WxjI*R/Ix'?鋢rŋimmnC\9UUUx<===D"\.עL*tM,J*+xsP]/Vݻw399I{{;ǎ+HW:;flmbaǎ:::8s & CEEu/t,w/ɄML&|_峟,/_c˖ n˸\.@ArvolↆZ -s>涱͈ ш"@4 b3_6 8?"i-'u˟ϻMF|>--->|ۍ_nj*HD2yIPT$8ރ8BY@.RSSjmmBE1ZimmСCs W3۬ϥRRRBcc#@P(ĩSX,|3KqvX͔Ŭ`0P__磻ZZZF˽#R^) \"Vя~￟Gy[n??ļ=ɗVe˖-p^z%xV֙hɿc6ihhfhY'e8]+8iО2|MiI4>I?q&]v1>>Nss3׻NjHjH"9h$VovC7 TQ]z:׶+syF#@v(_s9eS.! r!nw,_ t ,3>aB7f_:$cif:ϗwQt3/!,5hxgg۷dXRR¾}nٲW5DQR]]M[['NY'/lGVV>1S-6 &IԐ댌 ϟOGG555vz=z~c$@ a@˦MZ7H:g5Cѐi<6P!^ +v"%%VϧX!==ٳgׇjeǎ!ݻw[G"{F GKioo1Ym:V󭭭l6, zN7Оsr,N94 ddd>tzVX@qiPG嗿%W&//^^BCCZSK֑B`F ?C~~>jv 8` DAAAXjZq\|{)))* ^ݻ+V;pwpWDex*z{{)--8&]8۱ ¥ʅsxU(--%???vO/ /] ?R)9D2Ed3w}7}v g&nF*A@%xw/ w\DυjO"PI%LVf #QN˅fFcDg+~6===(sL^9 'fCOe2An7DtHtww_GT(v VZ,<3g+)//(WaD$I_n ..uq 'D\tID[[{EVSQQqx؆3`dRB(//'333G!|َ~`,܈m"p]D|\~9TP꜈mOp6ګhRϾ/ݷgyL`ʬncIOOl6GԚj:vpDŶ+1#tu)>>χ"%% SWMEE\vuu7pGG"I]]]vGדBmm->;l\K.a֬YZ*j{^1x<7h4F|;(b۱X,dggSVVP1ZHT[QVFKN =RvdoKo'~Ā9}$4[Rj`B$A=u>Etsl* |>uuuv2220LFܽ q1I]7MN7 TN,[lRmP^:1cF]EHXcxHBnݎ ==57#IUUGuTDXQ__OKK j-x.];U7~\wuxQ8x<Ԅ^h4Ƭz^A'Env2ш*:jмv oŝ Ǒfs]U;JF#'Y9>rss1͓:?Z]Nmg@"߿ဉى*2220͓&}V"MƉ$ԓZO.b.Io+hRp Woj3h4},>zv; Q4I 0uvvw^-Z]v;j:ؖ6ֱ\x,^[neA B$viiig?YT>N'UUU PZZJnnmhã +"~p~iVX͛y衇8 _/y<A@v 5 _QK7AHKKcΜ9o jұ(P~V8s{8Xt)+W2p&;f ; ddddElkk+SVV_7*V5*veT_8'~_kJgQ?|˯+WFBvZy)2Z&gЋW<{z{{/~N'c֬YW\󉏏"*'vƌt:ٱcG9ᢊ(++XNSw7$$"}nZ4\+{TGROo&bmw< m'g|&N pxz>i#tlAA x< ??V;߄F%;k IDATAgbѼIہ&!! V>fs"IVr<@gpه$$YPPB___5@e*))i$UA!''``Y.@Wxusc6/@RaXQ?TQzx Á^G }IV8묳8묳Ϗ( KYYYl{N1>͆n'??FQ_V8MGn76rss1LLpSQ'DԤ3.8c;µf 7~ Cģy3}}}LNN!jqvhmm De_o!!!id/r[o5\C Of1r^"DAA>w.׳n:~qF>aJ&99C<и\.jkkiiiAa4Ǭ<ſ7ڷ cnƿhYzu۾h" JA bWVqJ% NF]OT L;|:ĸdJKKSnvcZill$//46[n@ ?"9Ap:xh=?mj6p# uHDkk+lPr> cӧ,١zϟ폇ݻw rAgg'W^y%_=EEES?Uuuur-tM,[,*}~F]]FԪkt:шN)I,o󏨺3M.E4; ~L@, &ӊYԅljP5~2( sGݼ;nFPZZz-b ˅j|Fߦ&nӹ{~/{K/8`4H ʫLYbbb,Zw$gV7 }פ`0r W_}5f~kSN9%;y^"I3ΈJSb޽ttt`2JH+xp8zl6NraXhnn&??P,Ycc#@0+ \pHDgg'#H4ا#@@aaa~^x[[L~WQRR2?9gBTN8>;yf$%%1{`ᠼ<*^ y$33Ξ={ V3f:e|_k3q9%jpM$_$dX|{ID$UPau<%7{2̤T飃9pQGaZٱ} )5iii /7; !!!h`0 dggMWWw}VKII!ڤVh},ʹZ+íB.RSS|rv$I=ׯٳgqQ-nʪUlvZ8IC,Txcmoogʕ<\qvmQl$jjjHKKOGFCs%11|fNWK70|xjya>*>O8Ɛ0L }᫐|bZq8a6s=$w<ܲ~>,I?c>뮻.(<<>J5fX\\AwCݍjxWftc0=##\hDŮ]@_R1]w9眳_۲e K, 6uVw˚5k83ڵk8#kbE  +I9˗/g޽s=Q|X,EEEi81Z 7*f= ?>,o{Gv{++WrGmD "a?Hixe;Х"FA/\; a1ї'!^rKu!A0Bh8NV+b4r WĻ$==}+$de{*ъ./qn/Z￟:kWYYɂ شi0x:e˖qm/^s߯}U*'(V+S:w{].׈s$>drrrXF2 ~Ν;?>?0g9`Krr2/tRپ}>x,_<իyٹs!Qzxc ˧~|NRq'}ύ78N;_~9:YT*>?Yf ~:?ϹLΝK{{;{ Dt;.fÉlhl RRR3g]]]l6STTtPTϟ'5)\c>55ٳgCMM .%%%#NJ$)#?r `o饻$g6 !De9,Gjj*Gy$l6>rss1ӶG|VG =ג|\.v;> CD.| Y7"1SGv>|5 onnޯ}ebZ!ȡx¦p%b ΝmUW]p8ުL1;233Yp!---U 9gnDxR-g̘y줦zF㔬h =4[~S 'h JPTH%4fϞMoo/{V`8䇷`d2a裏d2ŔH+Z hLG;)) F#ڵDFc}ea7A+7ʻ3/ngys'FNSPP@mm-|Z\+bvyyyymu&U-D?*˿P׽Ȩ u`\?.dee`ڨLaa~'h\.T?;-H|md(L4ƴommmе@$%%1sLF#uuu|dffb2o$Wfx1cF0;b0ah4f  ݻJJJ(**Hł La@ ߿999jZZZ2Z RYY… ٸq#0xzxW}̚5+Css3~;[laٲetMىTflZ*trQSSCkk܊).yH?|'E1;00@kk+ qwc@E.jjj(,y/aWs?(w_S+۷Cmm-t:JJJncIMMh4i'Lt-ͬV+hQ-&K$>SZR$Im۶ܲe K.'`…lذ^x={ϒ%K(..f͚5qǃ>ȏ~#yxuy Av($l2Kʢ\V̆s12+"vBvv6劍0N'{4uHSǂ +O/'_pq1Ruu4ptuuhtI}}=۶mcٲeTyGaaaD;y睤K?ĩܹsX,tww)))Fn5里j5@ 0a8ˮ&f'JWW6N())Wv8;>O>^Rt8$a2xGM6}̙ã>Jee%<gy&[neʕAuŤdQop8hmmEV@AA: nᆈx<jjjhnn  WZEo'!!+jn1"#;v$q1F*EaٰdggSZZ_'n4653[iK-c@W]d/ʴt:v %\VFt^X, :`0Dm@`(?`0;$(E*,[n VVEӑGoo/~;."VZEvvvTtRUUeee=t""֒Dyyyn$NC]4Wㄤ,ܗ ԚC9pG&xZ444w_;&N'~7OsN~H^^VW+;V@ @cc#6 Jh fcE]] hrW# ߏ>(3$Q\\̻ٳ߇2U޽˗駟j*.䒨T>$Ij4 S2I=l6h},yIN\^^ ?m;2Emm----c4{t8o'$e9EsaQ󩣳Zbx$W0Qj "MMMXV CT\ 3^N}}= >j~!fy\$QXXG};IUֈ+Š+HIIas1QpEf#??RTZ(..d2Mh(ie_ǃ5@YݟwJ# O[ILN>jjj=\ >A@'?=WO $ىbt:]Ԅx[ T*ըb611q\ DQՊ`0P\\|]b OCCuuuj Q9.|ap?z$IsN;lU8(ph4a BcЇwvR8ȩmN^<>;Ї5|>%%ABY^ omm- t:݄~׆ύ@7{BBBLhHDkk+V5(AK39 @Z1sLے$Ş={(--( ͞=78yf{9+͛<0 hb۹[yWYb]w]\R^^f$GӉ Fwߙ'$q>`Eh<옜0)HItttPSSd2Q\\υsܽ%&"x$$ :D*S .>JHDGG\!Wn5;;W{VF3!RłBףUt$6l6t:zmB].רÇMl6&"X,Fc$[!Eblٲ%KSYYɆ غu+UUUamS./^GMbb"k׮e۶m|#IxY|9===Ah\$Ij42G5c``F)//';EkQwNH{=atttpreq5M<7ڵ'|rR^rQ1L@ wɶww>2Rg۟ v_AꎘQ [ \.Qc@׷ߢ`FYWWEEE `aDIpO@É8;~Q[FcvC, c0aۑ4jmlbb"* IWWv#9;S\\Lkkl&((Dv>ٳg\[oO>9yǸY|9s{ .xU(99ٳgIUU rrr"9ۍC}lZZԢBDKDvORV@P욒/ =z}OLƣ|?JBSTTngϞ=ӳŋ#ѐϻf}>_؛@^^bXZƌ3;w.X,{= 1 QK&ëlFhE699!7R#Ν;III FDQr 90mVjʻ0n|AyO ̝;_ r-Ah";;{HVTo :JE 'IH)#8^^7j}mDwyZ:N7}Q. ^L&:'|BNNfyRbV}[ dQ!?@ˣłfh4*!1{llc4=QFʢVj4!Vdn5  {졦0y1Wnp`۱vȢ4BLᡇ'ϟݍ*}}}<<#}s=I?|>, #M+IҨmĢ>q~|]+ҎxfҥL&.Yt|6^z)*}o_p u_|R ,6_8' jHAA&iB.$m5M)@!V$I477cX (N IDATZ477h$---*:nU Blz=a[ߦ!BߝN'B^...&)) *U(CJ.\ƍ_h^ϵ^;ںu=әZn&z-n6IU ip"n\\ܐ$*&xz @OZmHToyG:'ũ@{}˹#QF/^/^:x$Q1k6܄t'TJdRxrl466dӌ|#5"$jcsQyW;933gA!p8hhh PPPrrh$##AbXf{e.]O<… ٰa/{!??%KP\\̚5kXv-wu FeI@ I?X|9(n:N>FH9 PWW TTT+`Dp:y材8@ŏ  O]]]0{mMJ@R@%#u/*b橧SJF%Sw~վPkо^zzz*EE, "|GvSWW 33DFF>DH=Åm$E6*mgg'eee\.SNa֬YN7=W{iӦ`Ĝ9sxG%=1 <3`V{]]>c}SGݸq#3g 555q嗓1GP cn"|$%%a2QšVDZs"?p8/~9]FN'iq63<0{?OZ@~h4N8P@^Z0 =\.tttߏJ H o.fcFJnm-rx:+T*>s֭[/+©zSa pHK/q}Q__O{{;B|>3f@ղpB~ӟ#hzNh){ؽ{7?яHϖ^pI&k[ٰa\rIDN!}{7欈lo8aoHDQv:>nHɉd555x^L&2QilljV1LǬX ë᪴#Ugq:d";;;':vL---ǣjtCzh^'%%eBXSSCQQѴr8TQ!MUUw~w$Gqp8{5KUUfcJss3guΣE\z("!_+8餓"zs>uSUšRU 0Lm|^4[SS$Ifl6OxVQԎVSWWGbb"&)*V]#!544YV Vgʬ,j ԂQ!P(s=ߎd⡇b֬YQJuu5qqqTTTD{i3!1^ @&=gOxё HT*rqD2<#l IWy bGiiiT;(Ѐj%>>wlhvG$[͆Fh4N2.ˏ!S-)) >dSM0zzz{yǸ 뮻ɉʶvJNNeee*No+@b* ̞m曜{$}qХKC׶{;V,TV+)))ԁ"Fl6G94 g;؈fCRj)(([,IRphpnV]-..9Oq"x&={Xj]vYĎ㡦fJJJ015thґpAdl袋0\.N7t?>klijj v|燭r(W!*L U8,XM6K_:e˖8cKxwy+S__m۹X|yzn6 F~~>D{y$~+adzpr!HD[[MdtZjI :H6]᪳O[[)))昴GknnFRQ\\h„QBDz$''/rYg_t)l߾=V^͗_~ɶm۸⋧wy믿.֬YÏU].մc0())jҶmHOONfvQb(?\yajzAHwb!99Zez{{+8(x^ 'lG[Ў$tl6RSS1deEy㔇Fr:6]-**R Q)..fǎuQo~m>}^r_3-穧bʕ̚5u1s̨]LQ^^U;lµ$$$5jj?~l6vJKKꢦ~F#Z6fUg pp6q=|Aڴ`+d/TfC HAAAlII !ȡZCupE]͛f5qUWիWK/;o/33JٳgTTTW̭ zʭEV!B"seZVKmm-!|?dff2|۩nJRB]Fr7xCH\`%VN <\juVFzkllwpOff&Zvl||<<='t/pQBTشiS0xbΜ9ٺ`nJJJGR)VAAaB(WA!En`0~z̙ 륦&z=F1&c!Q%I Ύ$h $s6:3G]n1hnnrV8JMMUĬBQ´ :A̞=7p;3^zNJJJذagqq:w}lڴs=իWRUUE?)c~?zF} B#UhcXvqhjjժ}fZ- Sضmېpo믿FӱrJ`Q´d˖-,YJ6l֭["//oz,^<###ٳg#?UUUxر;3/s֯_?ǩI{1o<>1j#ફ.7dN=?Q´ i&`bXl>in?8ׯgϞ=18$I^cŊh4֯_ϱ` ncZɡl@0R,hUgõ^c.Xjmm ["))i!:h41]VPjASz嫯 >wM0 z$''/Yt)l߾}לqdee[o a2^<<<'|2?%%%Q۞҂``05Ru6PT#iuvttw^~DKժpzժKS=cφ =,_M@LQv|~~>{ [o\믿NMM \s >իWOnGn6.Bnvϟ 7ˣⰐGNʢ.F#j%G-k4!6++kgHCvv6-j;X'H!TԶhjA=! ɊUP8477^9N\.Az(W@Ex'Q̛7֯_?V+#k֬'?ITzFg̘ hnnz***Ft(_$I:=P PTTD~~>v;w"I3f`޼yC0wVS~t:-Z/~񋠸-((@V+VAAAa0AV2 ¾!3gΤ;.sXC9>#~s׳yf֯_̙3#.~AL|XQ@ifyP΁/vs{u={8MiϞ=ydڳg؋/Zwh8 |\ҥKڱc222Զm[G t 7ݸqcuM6M:p.[/|uܹs.AV:tPddA ;vTӦM M*..Ç5c FiJOO~iPxx~Wm߾]cǎuh`aJJJdY1c8W-_jʔ):z&LvJkڴt=zT'OTnԦMgY_:u婨HwyV>}hĈ٠ Ԟt??t;}Y͜9#"ݻK LXBТEԵkZA 'OԺu딚 E>HԩS,KAAA. BBBg;v쨖-[J ҴpB;PNNN+11@B%$$hݺuzg5}̲,ӧOO?$///磊˷رڷo/,Pq P߾}xbgQQQQ&D /@iF+WTll^xi޼y=zt6e˕Vu)??_ pY4hsk˗5|effj֬Yܩ w QIDAT#Uܹs5kl٢gϪcǎZl Ʃk&/^L=á6x0WK}W_}M>]x vm.SSaa|||\5p@&M\wuv׮]tY7^-6mҴi\* RiEWqq|AhͲlߜviӦ)::Z}qv_pA&Lkчd94$&&jFVEThƍfbcc-[ZÆ nmذ:~sN+((Ȋw5wykĈV-GyĚ4idXN(eeens|!TvժU0`ZjVZiUKS뫞={Zl͛gϞt1/Auh4f}JOOnC=|7O~sF%Kh UVV5m'OLGKP|WZx>ӧOk֭7|TZZMQv] ?~{FEDD%վn?_iJKKժU+-_\ƍq(u ~J?~~a^xAO?vY˓&h+]xQ%%%1X4uСC5t*ʕԩ.]*Iѣ߯7xCC 1QCJKK_k۶9Rcj׮KhT +@=~M2dHW Q,ZH7n֭[q!pt钇BeG{%KhѢEڵkEZV>rϟ;vO>D W [h&MxhN$}'޽|}}իW/m߾MZ飕^{MZfp8p8TTTKz C.{=xƌ'*33S={1nUvŊ*..ȑ#|-YS=@PTTIRXX%???uA3fP~~>#IՒSqqq޽{5e閆vю5J.\<ַo_jʕnP |&鿯4{lIӧ|Nݻw+$$DK._nMhi u<}ٲv ̬ũntVFp8jmNyXܢ&]Qܨ+xEMhkn;w\OahѢeu+11QYYYZz5kq)##yfSBB6m*???-_\6mիܴmFB͞=[Ct6j/맔%$$h̙ڵmۦ={z=0{x`4/F0F#h^ x`4/F0F#h^ x`4/F0F#h^ x`4/F0F#h^ x`4/F0F#h^ x`4/F0F#h^ x`4/F0F#h^ x`4/F0F#h^ x`4/F0F#h^ x`4/F0F#h^ t"R\]IENDB`././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/_images/twin_logbook.png0000644000076500000240000011501114456461441017246 0ustar00runnerstaffPNG  IHDR XvpsBIT|d pHYsaa?i IDATxy cF IdO5)%2li$e PYRIb,ELEHRȞ:Ƙ1g}ι_<}L9\INNNAp~CoH@ !7$ ߐ~CoH@ !7$ ߐ~CoH@ !7$ ߐ~CoH@ !7$ ߐ~CoH@ !7$ ߐ~CoH@ !7$ ߐ~CoH@G2e??v 6 ¤ ӿ#3H\"EFJRVҥ}/K?.]wT}X6%'K'JjI KŋKMH|sg祎^-9ی~4jcTxҿo׮ҠAR:ҸqK6HKӷ@{S/H5jر>,>tRٲҐ!ҁҍ7J|ߘINNNv:YV<ƌcGJ^*,q?҉RT4{tm_Kߑ#RRvo߼YXQ cǤCʕRݺR|ԩSH\ }]ʱCxf҇5l'jdl)W.GcyJݺmHK>%W.)OZ/`F,hgϹ~*Wd"uzF\#X֭ҏ?֯E&?p l*HNGvJ>y'$cǔbEco͟?_2 @ILL͛ղeK/^{w+BW/;VW.$1Ѷ\.""v_xyi0{kg6mŋb|աC3sLywߵ-1jEڇǶm6$_>+:?ݑ#)g)ǯm*T AWZ5H>}4n8*?gv~e?-k֭[:ܖeGXVlW6EYMU Hoڱî˔k,_.?.nxJRժҒ%?.eUVZEe?-~nYs;?mY~gϙ48\"5mǡCi/[fޖq]ܮٮO- /ڷ=xPj¶xv'&0a #3>hJP!顇 ys+psɒ!C3}/Ε6Lc jǓzʔ{ɒʕ3?#W_-=!3;?ܲu?7;Vڲžx>߾#hےӯnW>8@<jZJkʕ+)@|^ WՀp !7$ ߐ~AӖ-cIǏK/ߙʔrgNoFR۴IjԒU?M{9r$][3H A~5r/.MJ$d.믷$`Ap$ HүG|W_I11?,L*P.鉍dyl["taԸ%_}#3jՒ>TZJso R&R–|)J)!szt ,(ZԶ]EG57>PZ@N ߿;|,)}TTνV˖һZ"ҥtdν8vH./ٺR\!M$y<}dmepa*RL"˚5RRr"X1mY1zmkx%!KHIKJQQҡC,~r- 3@$ 7YJjBP!:Yr=6dzvIO>)T4}W)Jgmۤm SdVV ;Ի%!Jȣfq+=쳖l!eu%#-\hIR\#ʜ0Nh!}.Rs]`5!zI?('ӑ2 >]UjξҴi[@[ɕK;V2c~ )Yj@ᥗlR.VGlɇWR6|ֹƍR:NG [r4r$[ou:"x]s]vyVFHm[;mb ~wޑUs:rk?!j飏 ;Oɒ o@7ސf̰nݒz\$\xt%NG\u lNwԪ'͟/5jtDȌ / ~Yl{JE;ʕ AS'I Di˜*}%G0xZlH.]}5bn]ixb EGK7h۰mj KY3[xՂLbH[߷ɖd><3EEвc8?-:6bE#BN^]{ҷtҟ:#AHȕZt¤ !"f|n%%tT|u ѣRR26~};;mRӦR)1iHJ[])lv_؇ѣ%Ni\6鮻<sgVڱ Vת?nzi(i;#u 7XҪ+ң.~w朊۰ wԫԠg{-jM>@r\5dr1clkdȥZQR3K?oZf-֭ M`g}VdH@NVԱԸmIxq)Op\2{ r,o^кtiۣ"#-8$kc~͖|8!ocGmkY_IÆI#ըt4$*秞- W]e*ljddu?ۦue(`w_g;A6md r疆w:pi ;kx 6|@AU ȎRg۾=q]{a4u v:f+CnҮ]ܹNG K.#l?p6uHVYaK IL-WH='zΑ͚I*٪g|>}u^8e?8^V&O-YɓRϞvF{wA0(P@zeҵm=2ZNhJ@6;r$v_d[RdIƏ7njժ@\{wl~UٲNG^݊Η,m6@fpm/VB~Xzivy2|m3$]6l_n[hVC`.6;L쿆9J:%"r^t$i^]ҕW: QɒҜ96pxQ#imM~%:u*m[6&LʗpWԬi0Ae W.NQkviKB6sMRm b[yH<顇;xuk#[)cK]oӽ4}Ͷiƿ1W},kζOrQ[_?eΝĉF) %uEd^W_9 ZZ8ZFG; BWXF[ .gOr,#Y3g^Rv6qU6pd#d+4pY?8d~+O۞'?߉_H3gJ/}|=ą0k,͚5+ͱhѢEbm:uJ9x%w)u"K*ŋ}f`}Jɓ c: *ʒ㥗^m6e9o_u!l 7Z'{iΜ9i.?|;BE ؿ˭.$đ u.m֯^I9 z.1m[>܏ۺՊ9/euZBǕ*ٖ믷 Aph^Cɒ(J'AȻZFH1:m?drR~NG7[WZʊ3o=>޴fxI'g8vyX{knI3/2e5˗vVD"A tfͲZA#it4p(il5q"vn)W.i4鯿lcۼ 'o?O9Y3#u;uj}גHLAq_o{'y<҃JW^iIG*vF\\ʀțMn l%0aշyg9̙cɆd[ y#r*dɓ6H@ 56,!F u8vmےۖ\vY3""e7-ZرҖ-c߾ɒ%y~Vw~M $ pnݬu6ml}J]t4@Z K=z9>VAn SChlw R&[5 pnB~ַM}1#'~㏭%K$ pyms41\dۆ0ft4/͝kmΆ.GWM|ӑ.q^6nl_7*P@WZd0\rW0di_^)}t%6+dJ#C]??+R6/v Z1K-ZH?tD#H@:n[r㝎Q6H@ӥ'p:" p*dg*Uk8w$ p- yufϖԱڏeˤ< [kS׮u:"W]2E ۊ ZfV :>FlbR)-*qT #|u Ӗ/ƏzȌE?zʆu_/-\(Q+TDIժi.~GU$ p%VA ŋ;>\hyŊIƝy,^vLƍ7ÇEX?H:؇{br圎 @VKٶo.,oig׺kq`eԷ%>k*M&Utt! H/9`~;;e4kԾ()..۶YC|#u$utNG3~;f̐ zHՋ~@XO!uBSL }Q-ymQ6'*7d[rV޾j|<ٷkKl>#GJT?"TlNN;rPBm Փ{N3Ƕ"ԻԶ%#\q[S.m,QH-o^KF""R.}Dn TTTwd "Tlu[/;ܱc_?#}aeX;K. r .YC'YKbٿOLD#1ўHX}u9c H@ kכ7 |}#2ela~ʕLd@bX+'5im[RtDIH35k: @jX8v , T¶L',䘄U+,H_LTyaĘo֢W# rLB'uH@ F 5nL_|ϑA; =9HRGk_r bcNGنH@ z3 RRJNGA$ @#%'K| ǐA,*恐gVo' >`H!rNXO$$H^k3@$ @c>{lۯ9u ѣRR2RRq\v 6 aa…~Rɒv~ @m~jH!u HK;J/ KZI~֯Fv쐪WcϹ_oPd@VJ+ӑ %$HuHNGaJ@/~[zgݥ/=>cԑD䡇2z?$Mh+t IDAT.FN/|qi|_rٳ\=R+u&-]*mvFFZӬ[f g/o$ Ȗ%KH@9.?pȴֵի߼ֻZR~yN =EX]- RRRNGqZٱC*]c۷u~}r|xk{#GRnϮ͛1c_6Y5n8ժU+Ubcp^ܹ-Zk/]:mV;vu2CqcKF6o"k3ʗ!!e*t$p H͚_KYѮײev]F_?_*V<;ߊVŊ9 W_I#G: %\kgۣ&M~؎=*Mjѽv$R%kۛÇK{=v4dۚd$,dɗ_/BU HzҭJv /Mn$kit0vv=}h}=x]7hpzW;֕Z[$M@袊,IH_+; %\H4 "͘!']~ul0>]N7tON)SRM@&|hQoC',dIr% /)߸]6FY1zbwg6~:>#)Ɏ~&M>7ӷҟ |aǥ뮳3aa&#ǏKժ}ǎOu ,KH 5r:~w<_~Ilt_Fo!BDleQ $$pzZYSu2ڴI=ߵr>C @){T_o#,i_gR*ҝwl\uE@7S CY[VNG|uF[#q+ @H݊8sm:k2NG,)o/]q8 D/oכ7Kj9 ݉ҳӑ@5kf͚؁O?I￟ R$ @(^\ʟBtdҥ@تUT;;؏>*d3EVȤD n]#w3fv[?{XL;S5 @!A$$H_o-K4r}n1+ @(жnC8 O^R۶it=R.R6)]eB Bʗfδ.HHåku:=&X9vGfMݺp%R~ I$ @"Ei\aC)*H  >]ybb_ 1 ĥ֭~[*XhHE@*UJʛU{j>^~YQh8+V@&+G6IݺIb0 D1 %ڷ&O؂XiJ@4HZJ[)*h8'V@K +=T)$ @r:?KF顇L#BTv*H:qB)_>)>@P!B@BOHKHfIŊ9 YB:Jr& 9.! .5lt4d+W@ʔ׷s4`Դ  .<~6k5 jՒ&N|~6,`;k_F!W& ;K?/u(.je],3~4jcTKooIzm?lMj*T_o8+f'-3, n oKcH}ڱK/}4$Ni^k?{ti+]Z'jՔcwmN .w 85k>3җ_J HJ9 ufϖrzH97%KJ۶UX릛z /V@BСc+~*Wd"uz{;x{ X¥Ç2pT%!9% ;vymߞ3{4nTbJ4o+^ у_KYܹls] Hbm:]DD9uO>ɸvO>:mW\\r&0Գ@Us2dvUt-NGO.ɗȑ}mhidkuwܸqU+1 $}l g9 Dn Voڱî˔  s.]p HP:q~i\sԼ3K@j֔6lJ{|2QwGRs⥗|@V +H͘a6~H)% L˥.ˑ!6V;(%/l{Ep!K@$K YB0w԰a}v=eJ ȦMv>$ XKWʟhpNH#Gtʕse74j]fԴ+"^II~&M2w?ʗ-[U0vMd DpԳ@Kz9w4B ʔr" ÇC\lȖ-NG ꫖|)t4hzr:r+)_f!@Y#+*a[޳G>ix^=K\1;R' U8 Rcii;S^eH;wJ%KJ+WJuyy~)Ǻu?ԏ?.}ԼBv é ('OZGf58 CΝ6&^>gb 0.+T, H@y 知)S\Νv]q + K +=js?nm(paԁDl!L(4oӑb֬Y5kVc i"*@$ KZ3v҅تUTvm߽oKCHݻK pl\"6VڱC:rH\CaÜ>:unV]pzVgp]c=+-ڶ:wq]+vw[ż@(DwЈy@#8e:sm6PDٲv!7[aRѢNG &HK۷sl=ڮliyQHZJ 1$ K,G=TԻӑrرҖ-#}uNRRg>sg%66"h7' p:@NٴIJ85 / #M?8]: #\iHNI%J8 #\$6u:+iF{@/@hDjUꫝ@7Ov(kO &e`@dܩӑ0H@ɓG!'W_⤨( `.C',?7ϊm(> eH@dDV-N# .C[H eʗY ǎ9I$G%vkE6Mh?6mڸq* 6˥߶do_;ֱt饶Zgl:;*ʶz/]z$+&}u X>Z;Ώ7ٹ$ǖT{rv:8Ȼ1sLUZhdźuԡCV0U R\ikD敺ua6)&&zs9xPsKpR?S'[uy8/ul5d}ӑ @TZUjr: X*BBuz2Q]^XV:qVLR˝[QbVFm6U+\9 (*ٱC*]c۷5R?gjѾy åI@m ;pNG@p;xg%&5ι^O>Js,..NqqqHV>0q|\wӑ4"ٲ:I5lrljiX1}{ms?O|vtGܞ]8ϟǍa H6o=fKJ S"|}_Rf|DFZq;Hܒ.(ﶩ2ew9O_ ү:E>]:~ܺXL  >]_[پfn{YSڰA:ڲev]FcRi;f5_`$mU۶)-@EwTTsJO۶Һu~v'IR=jOis mE%uc jǓzʔr5^HXԽY@NI=R%gc *vY3Ϥ)r҉'kĈgܾqF-\N;}t%E>8] !I&jҤǽ7~i 8PeRz5yC駭v_?)OXn;rDzm;2\yjGMj%%e> o#9O󮻜xJRҥ5uT= ?cɓ%Ix_% ŊSb|\I&:tYoViumi_-Xҷڶ+WrO8R' Ȥ$Wn0w߭;wjܹin;~ՠAUV-ǧW_+,,LOtz ȰaT3ӦMKm˻]|{H҉'/~*T (Zj饗^J7ٙ3g7oҥK+""B111jҤ^y4գGUTIWbTzuw}ڻw/\ȆX:m>$Ag) 뢢lE$ >Lw SԷo_M?qgժUZvJ7LWJ4vXϟ_V_,I1bj֬)S'P%2:_}OޥK/_^RڭV7Vll$[Y1b-X@={ԸqN%=IIIѣLvکu֒W_}Uy՚5kTx4zUcڷoƏ={_bb"u,$('ʕK~Z~Ytҋ/FJdگJlp#Z^]rT^믿)ShȐ!ڲe>3w}8lذaԵkW=a\(PFwܡ.]СC?~˗ך5k+ 2T!I V֭5uT=#Iʓ'nv 6LׯRp3Z{<5kTr~dGRR^|E.]Z?| ,,LcƌԩSoJ@$)<<\r57/_LGP$ I7ߜRҢER̘Ad+ t-//ut41UX2Kzteiʔ)UHI:6+V;nʪRFV(-]V .믿^&MRΝwS-S WZ裏j֬Y>(?nE*Q:3)Q’xPP$ Y}֭?מ=6=ULN]=ڷSPxjg];oKީSuQ0&V[l󅇇gϞڱcz<;vꫯ}KvnVҁθ?=t6ϼ+l ܹ#rb .K=z4*&l <}~Iz^T~H$ ZK%$$Xǘ1R:Ǔ?s>#=#裏4aU\YGV-M@2|ov3f衇ҼyNmg*W\/W\5sL+!!AJ, *h;Ogj*}'PllF6;CǎӒ%KrJ%&&lٲ;Nu )[ٴ)`.zi>lh;!>LjjlZ\j(ߪUTvm\RaKgΔRw?½a2@yv/X1Rɒ6Y3;xe 7dHPԀq  :tω6#~BTÇIuW_cK=&]zuyqgc_wT&M}׭,ho:uRΔ*Uo_4zRB콧5 A\cRH@$YKNG6wKO=e-[kԐ:?5'O?-:d5kܹ3\bۮayIIRŊ+E56@cDRǎR˖ң: )cPK.u#D۾SWϊ㥻mjEڥ^+wo)Mm3o۹ӆ \8!q+4}-Uy$ysvͱ\F~n A\c<' O=%-^,͚K mmժSzQsߧ} >]6 $klm+0/d0qc;t3o+]j9ypg lenęÂD  ymW]cN~ҬM= Yfu=ib]{m)l!#=򈭨~]^+,$hjl]<)u`_Ϝ)7E8ť9DxGϼȑɊMm ava륗&%@Mr}l$ۗ6,:q#/>,@NnnJmk뚕6-+-Z$%u&Uu"YBfk ],\h5~Y%& ǿۖ/!ّ'4qt6"w*GҞ=V@m \m-wygr4wn3_|!m(zk')Kk!ً5 ȏ?Zt$@hrM+ޞ=cǤ7ߤ{&Hۀ[I3J5P!k|VѻMB=ZvXK $&V mĉ.ڵNpBرҖ-#} xN,)[ֶ+ `nQG. HҘ16paBOtt 5vtt$1BC $_^+Wv8pIM2E3gڵk?H"VzԺuk/siSW4o^֞,HJWz:(C.ʥ{o_h} Y;wJV/5yS߿k=z'OoUHx*[;~Io~4 H۶muW*i挿"AVdCϟ5jh…*XwQmǿ@ @Yh$^UR?A@ņ4PxA@hT@ҤWi'@r?M&dk{'d{pS|Txq/0ś L93S|wsgn"5fy#gĉп?,iw4"~gߟcǎ1gcZndÆ c̙8N ~ {9n)BHHwy'}]Oq:L:%KТE J(әQhܹՋ5kLpp0>Jmҥ %K$88f͚h"Ki 0URpaʔ)Cǎ?7Umݖ9p۷/N3۵^### !((o7x/f8~ɢfM8v̽M ߗ_6k :uR#5H?r<>ΞD9rSfM6mJb;v,&M{ۗoiӦ1f̘T3uT}Ml۳g-Z`Ϟ=z뭴iӆgϲ`ZnG}C=깢Xd mڴgk0sQ@4iBJ8u˖-cРAYiӦ=ז-[hڴ)111k׎0o:uD6mRXv-w}7'OutڕG2w\7oΜ9s{xn[h֭W<61uԉU:fÆ ̞=E?|TRnݺʕ+y饗Xl}Te0̟UW|zȬO4 ՃELٱc7-VpX֌i?iYN `EGG- ,_$]hY+[V>vG"~ ;gY޿;;'"zr8[oVn]tZ;vHlBBBPnq!@Vo6@̙3cbbp+((:rHb)S,a(Po<ƻsTm o9kժUnlr8և~־xbpXÚ:ujbKjժYAAA֏?VJ *XqqqKnݺuV``t:޽{[g϶vޝn9H'*Ud)R=wŊu+Xz/*UW^qo[a{5[&ϔdѣ&=Hrشimhw$xtN}7 Vjլ|p8Ç߿pX .tk=zp8> m᰺wΝk9kĉmϝ;w{'ݻr8V4>u]>6l>cǎhѢ _Z*THLvUT)SN_$ O¬ Xfr{,<< N:˗2eX 6P&$ [' f>,Y>~e*2!OLJϩS>ϊHP䳅FA۶pMvG#믇h{^7}ܹ֭[S1;={駟O?eȑ"}eL:5qWLʕ+W^IG௿JXÆ ӌ=EsNΧXw^~=M4\͚5cҥnmw1۷'ƝiXݺuSN,__~u?3w\ΝK>}O<.tޝ72zh:w?([,Ϲ Ow)bր$hn>gW_574^--O>靸D#3nFd lڔ~<_Pԭkw9nҤI6+Uڵc̛7.]|͚5?>111vZ6oLN(UTs?~s0Ν;=1114hЀݻwӨQ#KRɓ%)ʗ/<ꫯ̜9Gy$ZS$Ŀ7d*> lj##Fp{lʕnmrG|]qϟ?ve*r:t֍72rH/_dԨQL46m07֭j]^~|YjK}9pލ/|6iyQhޘD|^ +VY> H~6uT.]D O9-_cҥ޽k-w٧OFiׯ-[mۺuM{3eǎn ?N:IN,Jϩ$;`4'ų>Kxx83gSV-6mɓ')]Ф ;vZjL0ŋ{cʕ\pFFFtR Ç_[}+Wһwo*W… SMnС\x|0q Zr'OdݺuW]iSXl1Ӎ31n!j2'G@N5_Ze榧(>}r7.,4R̛oٳFLE/u+V`턅Q?已׏#G2e^}$gҥ֬Yî]8|0AAAƻzj}BCCi޼y(Ү]Xp!{Gtׯqqq4l0U"PdI <@tt4'NZjjՊ*Up vO?ă>ĉt[3Ôuv82vl/&~o8 #ɦG-#?Ǎr:nesrw[NӚ;wnb%JXN K3g7|ӪWlYUVڵkgM<:lO?r:閠?:Xʕ-jկ_Oݻw[zRٲeչsg+$$*ZմiSkѢE͛?c=M7d)R j֬iu͚1cFP<ٷo5aSNu]g/^ *Vhm֚aOkN[)k6,Xj׮U\9+00ЪPըQ#륗^nz}Q)ÛO8,+lng`FEҋ6#;0}:@X$UJ110lYs4lcvO.6l:9}:K/z]zM|X E|W_Ap$M9W_myZ'$w}֭[ͧ^2ǧ|.)W).xZbc3W:ػ6kD'(]:9AA oJIg|O'wfdheر*&ɓ Ȓ%fDDDD2g}s9+T;v5{Ld63jh8f{ꦪoT̞ ]cۛ i!|dYvI~y$ߧHVѣ&.Ph2`hQ,!H&ֻ*ilsfjf{$ga4ؽʴuj?4 'N4#"ԷZ5> wei})q- ėBBI˗;+رf͂?#ْwŊW^6G""Y=W ⥗`t8yn,0u8gs8hc8SaC|5j;ugf[n1{= &@&XS/& 2HDDD$ N@.^w߅=J,P""n|6IH6uyhDDD$L@,v>mtH)7`2jل'"hDDD$P8#'ϚD|hDDD$nGsr> jw4"""Wb4+?]axp8lHDDDrq ȺuСrYt^HNR""nʔdvhf6۳HDD(ٔ<w %K¢EP AHn ;=r$j/?wH*^O@Ο?UWyEDDě4KDRjrtn/Z7;(T$$@~fyF^xQ`H*˛IJrq ˂>?7?DDD$ .ٳ"o |`6DDR?{^~x"^DDD~:vhQͷzHSD$ H9#㏛DDD$?ظ77P[ sP""HL <t/ x.LCʴip(ao>BSD$%P\H@^{ CO`\/}ro^`cQJ@D$#JnjO|Д탙3\u IDATaE4KD<dPR '+/@6PNR/H x fYpzRIdd[۩SpsɓtiXxÌc"% "Qh(YOth:uʁ'}DDD]zyplۖ4{"!"TrldDرz%""?W)9o僔Gf49!!Or <0|sN&""{.]x/{7DD<*_;zD6d,3,-""խ k푑 oX=q oFXL`#3ʖDDD|3Ϙb+ %f&hDD_=r9^Dr[h(?.]/_5 TBDDDw5TbH2T ]5K_Bmk6w6 JE ՓtrIX XNJr] ^ u{d""""dj9ƌCM[pM/7*$QQйijք_3}=G|mwCO35d(cx-.ɻj VTҿR[BЯI.қe>2eL2o^x}s\pX$/JxwI@U]~[gF,40ק߷n c6j6NѢnVȑ!5e˂ÑN2|8(ad_% A ]mf5 kd$6Z,hF<& ?l+DDD$o5 .)W).xZbc3٤ͷliӧ:&$$ĭ-"";"xLO[ɯ S7ؤdzu߬*UysS4=cǎ^"6Ljʿ 8j UDDDɯ>9TyաCbuݻʗ&WKD$/I?=={BӦ%"""y_% ugθZekך S-Z,n`A4IH^▀8wi`qHW H׮&%řE'M:|l1%t=rfNj;v ڷ7Imژ)[&_f]w +ؿ*W;4Ƀj HÆЭ=?PʾwIB\}Mݻ᪫L[׮&IySj׵eΛo²efqL5!U+&4Ξ$(o/,_nvJ@$/d*Q|F@DRt cwo#jr1o}{WF`ر;2D$1y zO4"""ӧaCT4% ".k@p",[jwD>'4,…FDDć|"7ΙNxHIgD[nJM%v@?r8DDD|ҥP8]ŊAgwt>E Ǡ_?3l& \ a};\ k =|>x|`:tl,0i<<% ""/"##tk;uTΞcǚ{5_v~TJ@?\ ]?QP4(DDD򿈈"""֮]KzL Gx!9Ea*+""A~~}ٲލLJ)p~;L 'B`QIJ@DDD<{CТCUZ"{P/1gaz+Xa6%|yͺHF o'O@Æg[h(,_nw""">᪫̌9skLȨ8>ܬ*R|tiĘeBp0l ]OrfYُ_2H=rMhوвDDDkйu.{zqLI6m_mBdF1vH߈p႙p;4iG 53vG""""y_% W̙o'}1~0lX}RSᥗʹ=ٴɌ ofJWv6=;|5t落qHW HT?P!uJ8p йsR[2ypR>o%ރC{xŵךCDDD&J@֭540ק߷n m{W_f(~:ʕ!$6l;ɋ*9t*THjsݾ.OС䢓' S""""YWex/\0SR*\8f sf <"""2d݅ Сl٢=,, -; ɋ* 2RMz<}w1cEd>ƱcR\/qgz|/ ?sp)X*xfSb*лw1?kMH6Y<$̟_~aa&Ԏ"""Y~5R){ +ԾjOox8|?UhQ`>/Hժ}L /||>y2kgw4~V-{a(|k;iRR[\Yܸ3be \=;1S}{(Xд s^<6|9+S_S/kWb`VM EDD$jaC {LVl,w\0m.bյIRxJ6<, ^}5of_5ѠY3-Yp!<0 Z;IGX)K}ݑH^D|{;M[ħr۶J@w| jv|I P%,,% ->}7F2dIRE d_t4t-[kPrPցȕiD3 ^{´\F@$k;{%B+S"wmF@-ӤٳvG""""L dΙ3к5K7BMCDDD|ɸsm[/ۤO" 7@%""""tɘ CX$…M4% """% reqqХ \ K@&vG$>*,L OS$}.ApvG$>̕XݑR"i|-ٳM]tSoݑR"/M#}#""~'>ެ}a3/YR> bf֮嗡P!|*, #G 4hDDDa^{#YJ@˗w`ZV4Erdž J@DD$o$22ҭԩSw:~|1J6g*V"EqcX4c}cb([,m֭s?w(^,Ckla UցH^n^^|ʔіW H߾lfϽM%$@۶ ¨Q?ТؑtSXwks+>ޔ 'M {EJ+};Llans5{c>MZf4χ5m{M7KB (ٴu5k%3f `&ᆤ? )fjrmɰ~ #!((^L$maaMDD/8`8RZ<}K@ Lr)T$?oJ|e>lQi߽d˖\H@Μ1Y /@ʦ[9""S&^kkÜ9%w-L:{?[.YΌX7h`ׯO;Yv If!jk2e2G{_b4YHHքKJ"""Bбcv׺5 )R)̝/hVMJv2-!֬1P^|< ;wQ!> y%,ph#n_g_vج0 -2 t2x`BBBoS`,iV?n %J"6)YT1 HDшdr#9~2)&=}G?6[N?cRbEXLZd-իOq:@ff! O*x*u萹X1g~)<,HDDD(\ΝvG""""/׀*amV*.]o4@jw"""yJ@D$W+˛K=ׂн)a""S'(Qž8DDD2% "²_ kZ4{I5˔~Qhz4.lOL"""yukܹYs_~ E\l䓰o'2Ҍ/n#"MaLE$ׅ$̙=~܌0/ۛ|TO? ѦOpP2 dȹHjJ@D$׹oڔ~бIB/es>zxUضL 7?RDDw(\w Tfց$$@>f͇y_p@fֲefʄ f7p>(EDDDDr]puK@ (s3(`ɓakMBտ?}Wd;رm^VF6g| LjFIDDD WrsECމ[|~ٌԫ+V) ӧa޴7SʶK={yS|yY3jY""OW*am;}{320}u<*[,28wDDD#_R2x^r(s. ǮۦdfvG%""W8?:Y$ğt WŋРIFDDD3% "5)x˴-XUn$!-ZihbAɏ֭ׄfs2̓/0%`l9^{$"'OHS""^fOǸq0~-f /h+W,E"""yZZ#MnmJ鑑vG$""s@DGp0Tfv7ߴ;"U*+<i60lY3]#""P-{IDAT)Z2%iӠQ#xi8\9X*U;J@Dī̽aoyayа)7hwD""b7% "UJ<& !* nT=?DDn!""qcSl>hDDnJ@DD$=?I :ìzE,3% ""AA0kI>z2RQ"""^S*̘ )ED(xs[hDD۔׽IDv;&% ""uN'|,i_`wD""-J@DD%Kٰe <X7(|3|LjED$ ;o}U0p ԩ&{3AhDD$7)BT\\lwD""[OT f΄^;-J@DDgho FYvG#""8>*V"EqcX4c}cb([eKX ͛CѢP $ve[e  ݺA߾D?ҵy:gY֬V-AꫡGؾ|U|6dԨS@6|94kvh6laàti8|[ ի'~=q{1chQnCIDDa):gYyY9g|򉩆վ=kK]A`{9۰r%, n]7P?K@V6nj1߰ 7d_~IoTsgֽ;Ԭ / 3f$&AY$\? }wݕND$(VlRpꔙ V{{Ll""<4h>^k ӧ$*\'*2I@2& 3S`A8}L:4)S~K% ""qf:], 6m9s`\9"0$#:@'">ImիÍ7|4üEL fTz}Mޠ?۶7n4#w?`AO{͈|q9b] wGf̨vG+">ɲ$C\m߷E֪eKޞ\h(珍௿J;(&&kFs5:o-[L)b)3P;Ѳq8:yYesڅ 2a !q\*3*…OKll:6صkzJ;ISz!9ss9|rrYeݻi^"0Ӯx6N`y_% AA oJ>-RjՊ>k[Ʋk.Zj%K͇*P4+״5՞ش^L2w}i """"kڴi:eJ> ?dZ^Y,~{U><l]S1+s,hJ0%? ΞM:vtznDDDD&^UbFvG;GS}V NeˠyssL߾0m)xU-!b NիvΝ_u֔*U ԩS=_Ѻuk+Fҥӧǎrľ!o߾nf @ZꫯGl߾=ձ֒dZKyfuFj(Z(KiӦ̘1#ձ֒dZKovکK~4a f7UrW52e'ە4kۗYf1djԨ)ShӦ ˗/ry7?7h vWjlw=z_pVXCs뭷RdIz-Μ9Ø1cظq#Wk^y(T|[[%OyYr%ݺu#,,C1~x֭oFZ]k)e5{rYKŊ9u뒐ŋ8q"+V@vh3fpAF Z˨ ty2tP&[K? q%"еZ?d޽|=J@lt *p‰KjM4I&?k׎]s=b:"׵uOpoܽ{wj֬ /@TT=z)2mٲ'xMr_t]<2dݻw̘1߯k-7е1b#Ft=e R&>.SZ5:v˯8_-]9cȐ!8N-[fw(9|0m۶dɒDEE%Εֵ[ZZhٲ%zbq }|5\c9pXNr:gϞ6olj*ZUT)w?cc?kYrFEѕ.[im,fY$yYAeFPD+be+ȍؠ+fZiw?z-O;/8y<y}ukjj2s1Æ 3n۸\.l LKK|>_=9ku^ VVVfLbL=L\\6moCr3׍{s|>INN)~g`ր m lC`@؆KC."@vvRo;v߆ ˲l U =zTzlp~ӡCt]|Y|]~Rc9N93?;輋/*++KԷoߠ7vf1/KϞ=_E6,#v>$tk*}gS^4vXg9U||Ɛ9~RSSh}_ҥK6m-I:uf̘riРAÇ/**1F#h ș3gG)&&FG'OӧX3 F=aÆiرjyM8QT^^n߯ |ŋ+..NOֶmtU8p@pBթBp|Ύ?^ʕ+|>}7n\ eY&333ۦ!=xċ~hRRRǏO8 ><{ncYIKK jk"""̝;w1&66,X ]}}5͝;XeVZ=8S[[([d,aYgddejjjeuuu&::L4ct0qIRTTTHS>}[QQnݺ'OjƌjnnVccc`KKKSuu3}׫V]rETQQfeeephܸq:ydH-ZRr߻wO0a1:{lMkkN8 2$PYf~ݻw4V@,GBHKҎ;t]]~]gϖ$]pA^ZW9Ʋ,ݸqC 4(͛o)I}$Ztǘ=zGiW[[5kȑ#jjj knnA>S|eeW_fkw=WddBꪪp80b EFFj޼yqFH_TRRׯmhhO:UZ~ZZZBmG}ٙcnvKzcYNSiii:|pz^ x0;勤T3gTRR5zhcTSSR9۷*99Y_|U__~I׮]oǣb͙3G[oV|^mh ȑ#5tP}嗺v<<Dƌ#IZlt:a֭[ y^-^XNS%%%z 4N@!@OUYY͛7ĉڵk,Ґ!C'h…JNN/|ٳG7oT߾}{iڵA}NϗϜ9SWAA6mڤGiJMMռy*//ײe˴a\.effjɒ%JII jK,[ / FҩS 6MǏWiiij+ճ ؄5 lC`@؆6! m lC`@؆6! m lC`@O*yD2IENDB`././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.6553335 deap-1.4.1/doc/_static/0000755000076500000240000000000014456461475014075 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/_static/DEAP.pdf0000644000076500000240000000420314456461441015271 0ustar00runnerstaff%PDF-1.3 % 4 0 obj << /Length 5 0 R /Filter /FlateDecode >> stream x[1 D{YAH^{/Nֲ?> 1IܚO̙v|yS~ꃸo`r!CRUFv_+d_ZR)<5s%>dRR||QB{kT3?xo%_əy> m=`^@Xڪ5;QJMiRB D;8ː3(hN@l0# U/UAB"tUG >'RlHNX@j4dNz7(ORm4uoLr tuh"D&7ttLGrE=MF)m:'6rј4275(fJw$z!N3|u{Y+LI_ " 6y0A _ϨYס\8-tcn'Z+E|UүB}6E|_" WvDBf:Kx+Yo$,)I6\=Vsʀ6XO*s6 ;④KKG GW DSTx"e@Ψi*КV3uy-n1)}6ŋ4Emsr򰐂47u]S[>s`Na*ya6P*7s T+ZrdWzDjN_Xtz1ٮ*zb8'Lw(zHbk3,PP(M +n[PYX )/\in m(8u8-uG9 YCR|yg@/p؜JVH` h|="!73ʾ2ݻ_t endstream endobj 5 0 obj 966 endobj 2 0 obj << /Type /Page /Parent 3 0 R /Resources 6 0 R /Contents 4 0 R /MediaBox [0 0 792 612] /CropBox [95.51011 155.4157 634.1732 416.6958] /BleedBox [0 0 792 612] /TrimBox [0 0 792 612] /ArtBox [0 0 792 612] /Rotate 0 >> endobj 6 0 obj << /ProcSet [ /PDF ] /ExtGState << /Gs2 7 0 R /Gs1 8 0 R >> >> endobj 7 0 obj << /Type /ExtGState /OPM 1 >> endobj 8 0 obj << /Type /ExtGState /SM 0.02 >> endobj 3 0 obj << /Type /Pages /MediaBox [0 0 612 792] /Count 1 /Kids [ 2 0 R ] >> endobj 9 0 obj << /Type /Catalog /Pages 3 0 R >> endobj 10 0 obj (Mac OS X 10.13.3 Quartz PDFContext) endobj 11 0 obj (D:20180404131408Z00'00') endobj 1 0 obj << /Producer 10 0 R /CreationDate 11 0 R /ModDate 11 0 R >> endobj xref 0 12 0000000000 65535 f 0000001708 00000 n 0000001081 00000 n 0000001481 00000 n 0000000022 00000 n 0000001062 00000 n 0000001311 00000 n 0000001389 00000 n 0000001434 00000 n 0000001564 00000 n 0000001613 00000 n 0000001666 00000 n trailer << /Size 12 /Root 9 0 R /Info 1 0 R /ID [ <0032731bbe3e9c376961207318ddc94c> <0032731bbe3e9c376961207318ddc94c> ] >> startxref 1783 %%EOF ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/_static/copybutton.js0000644000076500000240000000467714456461441016650 0ustar00runnerstaff$(document).ready(function() { /* Add a [>>>] button on the top-right corner of code samples to hide * the >>> and ... prompts and the output and thus make the code * copyable. */ var div = $('.highlight-python .highlight,' + '.highlight-python3 .highlight') var pre = div.find('pre'); // get the styles from the current theme pre.parent().parent().css('position', 'relative'); var hide_text = 'Hide the prompts and output'; var show_text = 'Show the prompts and output'; var border_width = pre.css('border-top-width'); var border_style = pre.css('border-top-style'); var border_color = pre.css('border-top-color'); var button_styles = { 'cursor':'pointer', 'position': 'absolute', 'top': '0', 'right': '0', 'border-color': border_color, 'border-style': border_style, 'border-width': border_width, 'color': border_color, 'text-size': '75%', 'font-family': 'monospace', 'padding-left': '0.2em', 'padding-right': '0.2em', 'border-radius': '0 3px 0 0' } // create and add the button to all the code blocks that contain >>> div.each(function(index) { var jthis = $(this); if (jthis.find('.gp').length > 0) { var button = $('>>>'); button.css(button_styles) button.attr('title', hide_text); jthis.prepend(button); } // tracebacks (.gt) contain bare text elements that need to be // wrapped in a span to work with .nextUntil() (see later) jthis.find('pre:has(.gt)').contents().filter(function() { return ((this.nodeType == 3) && (this.data.trim().length > 0)); }).wrap(''); }); // define the behavior of the button when it's clicked $('.copybutton').toggle( function() { var button = $(this); button.parent().find('.go, .gp, .gt').hide(); button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'hidden'); button.css('text-decoration', 'line-through'); button.attr('title', show_text); }, function() { var button = $(this); button.parent().find('.go, .gp, .gt').show(); button.next('pre').find('.gt').nextUntil('.gp, .go').css('visibility', 'visible'); button.css('text-decoration', 'none'); button.attr('title', hide_text); }); }); ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/_static/deap_icon-39x55.png0000644000076500000240000002726114456461441017320 0ustar00runnerstaffPNG  IHDR'7uS `iCCPICC profilexH HLinomntrRGB XYZ  1acspMSFTIEC sRGB-HP cprtP3desclwtptbkptrXYZgXYZ,bXYZ@dmndTpdmddvuedLview$lumimeas $tech0 rTRC< gTRC< bTRC< textCopyright (c) 1998 Hewlett-Packard CompanydescsRGB IEC61966-2.1sRGB IEC61966-2.1XYZ QXYZ XYZ o8XYZ bXYZ $descIEC http://www.iec.chIEC http://www.iec.chdesc.IEC 61966-2.1 Default RGB colour space - sRGB.IEC 61966-2.1 Default RGB colour space - sRGBdesc,Reference Viewing Condition in IEC61966-2.1,Reference Viewing Condition in IEC61966-2.1view_. \XYZ L VPWmeassig CRT curv #(-27;@EJOTY^chmrw| %+28>ELRY`gnu| &/8AKT]gqz !-8COZfr~ -;HUcq~ +:IXgw'7HYj{+=Oat 2FZn  % : O d y  ' = T j " 9 Q i  * C \ u & @ Z t .Id %A^z &Ca~1Om&Ed#Cc'Ij4Vx&IlAe@e Ek*Qw;c*R{Gp@j>i  A l !!H!u!!!"'"U"""# #8#f###$$M$|$$% %8%h%%%&'&W&&&''I'z''( (?(q(())8)k))**5*h**++6+i++,,9,n,,- -A-v--..L.../$/Z///050l0011J1112*2c223 3F3334+4e4455M555676r667$7`7788P8899B999:6:t::;-;k;;<' >`>>?!?a??@#@d@@A)AjAAB0BrBBC:C}CDDGDDEEUEEF"FgFFG5G{GHHKHHIIcIIJ7J}JK KSKKL*LrLMMJMMN%NnNOOIOOP'PqPQQPQQR1R|RSS_SSTBTTU(UuUVV\VVWDWWX/X}XYYiYZZVZZ[E[[\5\\]']x]^^l^__a_``W``aOaabIbbcCccd@dde=eef=ffg=ggh?hhiCiijHjjkOkklWlmm`mnnknooxop+ppq:qqrKrss]sttptu(uuv>vvwVwxxnxy*yyzFz{{c{|!||}A}~~b~#G k͂0WGrׇ;iΉ3dʋ0cʍ1fΏ6n֑?zM _ɖ4 uL$h՛BdҞ@iءG&vVǥ8nRĩ7u\ЭD-u`ֲK³8%yhYѹJº;.! zpg_XQKFAǿ=ȼ:ɹ8ʷ6˶5̵5͵6ζ7ϸ9к<Ѿ?DINU\dlvۀ܊ݖޢ)߯6DScs 2F[p(@Xr4Pm8Ww)Km]<bKGD pHYs/tIME #/i IDATh!D _^8(ufce䂏cHp-d OLe0o6hb!ː bXm1c"_ LPe b,#&Ra [ّ ah[ j5c`HTeb)"[ a\ ad\ g7caEVeb&"_  be;C-(*) -#}.~.(*  -#{/ ~/ *) 䖖LLd$ j6~.  !!X@_9%U!~.ssw ۬'%*( XX ++^o<<H% . ֎**7~z/bb #".VV ,,zF *( fjt5  Ow \:#昘QQ4ccq"M&  + ` (uurrff隚d}}kk iiUVj{{||{{'UVp xx xx tt@UVv{{uu{{ yyCD( c##}^ h{{{{1yy RSWX/|zz~~{{ wwQRVW#v{{xxzzRS-IDATWX pyyzz RSWXl{{uuRSXYZ vvzz&!.SIENDB`././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/_static/deap_icon_16x16.ico0000644000076500000240000000217614456461441017356 0ustar00runnerstaff h(  S؝ά턄]̺̟~~~~~~~~ww``oޛ`Dj>YYYYYYYYYYaẋ.z|YYYYYYYYYZYYYY@@ V\ YYXX@Na YY/yy@! Vj1??././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/_static/deap_long.png0000644000076500000240000010142214456461441016524 0ustar00runnerstaffPNG  IHDRu?iCCPICC Profile(UoT?o\?US[IB*unS6mUo xB ISA$=t@hpS]Ƹ9w>5@WI`]5;V! A'@{N\..ƅG_!7suV$BlW=}i; F)A<.&Xax,38S(b׵*%31l #O-zQvaXOP5o6Zz&⻏^wkI/#&\%x/@{7S މjPh͔&mry>k7=ߪB#@fs_{덱п0-LZ~%Gpˈ{YXf^+_s-T>D@קƸ-9!r[2]3BcnCs?>*ԮeD|%4` :X2 pQSLPRaeyqĘ י5Fit )CdL$o$rpӶb>4+̹F_{Яki+x.+B.{L<۩ =UHcnf<>F ^e||pyv%b:iX'%8Iߔ? rw[vITVQN^dpYI"|#\cz[2M^S0[zIJ/HHȟ- Ic ߀e$'"%ZJq<1f O@: 4@@: 4@@: 4@@: 4@@: 4@@: 4@@: @>ĻzuV)܇@:P.)W|30/`z uk^BhK?TPi>Bh>C0PZΜu@X,9; 'ԁ%1g`:4,Pɜr BX}chP֡Mх1i&9;M9; `@:9;UOPxӎ >BeЇn@.9;u9T@0g`fB9;1g`BLL˜ uC:::P>&!e(:P9;c2g` B`|ЇnhPF ;'tɄ:Ժ`4B`DǜbB`n50g CÜW u"P.I;6:@Fmz:@:vB&bRB=+$Ze*Be!ڷ}> 1 ueHadB`IsvcH,MK#ɜ`a:r,PX:svEk`< b(F]xvl@K:z4D>ڱ-ڱ v u&xʜRB*$xaΎBuڱC>ĻX7@vs3gP4Ü`bBsLHpsvIu.g0:P׎ P`X`P`x]xvl:c1gP`LϜbBi\D0%sv3 ufp<svT:cJ u}> nB3g8FPsvW ub"@ʜ @BBuvl^BtX'@{>&`]:2gVE2sv`5:3gV@ ud!}:KN;6X2h!X.C/e,9;B50g'XsvaBu1g%X#sv9B2g"X.I;6P`O>ڱ@:< vNB3g*$e@U:ΜPsvBJ3Pn!}FBNN;6Pshp.C/iu9;0 3gF'`(: ɜPᙳ0sv`PBԅ1i0>svbBvlp>S;pS3g `I:ǜ(&`n9;v(5c:c!}P#uIagL@g:ʜxB@PBa&%ZBڳvlPVuD@˺>&X3g pBĜKҘ" uX&svXe "`svT8ki@:>C0B%9;H@s:9;4E@#:` 9;TMߙ@:>ڱPP cҎ u ǜ* 033gu40 c!}PΕN;6"hȄ:p.C/`LB9;HC2gu`x08Ü%1@:06svPх1i:0%sv8Pvl*>%7',_}@ !:P+&ty`.I/!>=~l$n驪8Ew5;S>ߧ1cB_k%kZ&)j\ȮseSX0ѥf,uH `npv!]ݦo4'^ݾmh3']nSunjZB>'.Pj`CK{S揰;R6ޚb1{@yBu`k!|I9u&}K3yb:P.I;6av:_ո|'[= 7[%C>D0 *–1(<|'Y= SքuV)c`0CkbMiWx> Mbzx!]H?vV&.2Gl.>}0/=<ƫ7avq{Gz{ ~뛻Gݐ5MIs']i x}s7uE81vaa훗^_S[z}mzU6->i-Rׅ}sz.Ε;< ا%_?|f/I?OUrpebL%@+߾|a/aTWʦǏK6s|X]Q&g5ay:sv8 0痩)܌~}c8wӞ u؇w!]oobXN<` u`]:V&7i %W>6<%,^e:_ly\Nڔ:0dyb:9;0p1U;$MQ褸痹aN77]k!ԅM.}7aj_fsmo_hP(х1MOmRPi_6koXSý~yuf*t=h3gF?~NoC:5֡jxuWDUl=*uڱTR;%ۗ[W@M)e7=X+"aLlvl|BR8]޻d+Z3;?~ 8aΎvl0o-(E#bΆ躤ONZ36~`8B2dCw[@:sv\ IoԆ;% a_R:u0Sa&ˆC{!8a1\> z-nrm͝V}uS ӎ ߖVn sOLu_ $ل:MHt,I6lPAd?f%ƑΜΡeRHDOYPaΎvl00\J(}ZTK͜iBfKA=7?(C:4GT6EVXjЍ$,. u阳]oY:S f@M Z7ў| %)٨J u90u8& uCTd:kZ9Ol/szvlLo>UJ6n߄d%.Џ)+CNzyc:˼"$jЅ1M&@]ocswqM!e3.Xt.n7:T븘f͢+-0g2fp1WLŜm 6ikdK 8OlPQ'`Y_oZ,jmP5PÜR]0\΄Tpa'Mm!p2eĶiWVah|uxAFsD6Sj+Qݵlş%]*cE!wq@'.Ph9;뛻ݦ/!6gV4`4бY>uχ95iO ͝5'./sUv@7n~WaSt?sbMԴA>ݿۛ.ݦ.7[] (5=Lhwac77ws>ִz_CH)msX cMR8|XӮnƩKܝZ9N,a.жo6(+]gC71BL!nN5 ?~ۛhxxO^Yzp}-o{mo/B.P9Sk9O(0geW%=~<}˟z:f"f > Gn ը`4B`i>&svEJ?Smb~Jl~RapMv *Z('0gGXCE^lb txB!b_F$kb;k"VuJlb Ko3iC=?~ۛܛkZ)-_^﬉,qMhs>)SJsћ` n߄xd#M[n߄;=́Y:P/zl>4Da7s@ nЇnӎ P0gG;6RBﺰ`P9sv u^bP!8Μ B32gP4]xvlԄ:0gP\kLEpNLAp9sv ub0"F"aΎ`@BqtڱCi!}@:cKagp)4."9;ل:S3g8P`'ǜP`^E:5؇^;6@=:@]>&:52gxFP+sv':u3g!uZ` h9;jB%haΎX@:`]:-ۇ>|ws0>@Rؙ'Xsv`:a,P`iE,9;8B2gDlBu`zmB;.t}Lڱ@{:kd4GV@S:f4B9;Їn^&vڱ@:%7'm؄.>}@͡/.ɏaM!>ЇBtnc8׷-sXa=] )s훰!]Pj%%(n߄q/+?8xͷʫwc>c8Q})[~fםfx ~ IDATMX4~ZXz0QVg)|L6Q{>\tǏ篱eme`c&?机A5]a/oPH]1f?աi6g1BL!no|8dz}) :]qk |j~MXBůfMߝ3,mՁߋF\#vᢖoO]}3XR6mzLҙ(sr#_Z)no}ZuSԅ;?\i k5*qvfiw=}*xo uR+@ B  Bū7?BVc50^6ynqOuSULY/)278ow|B`3$yӹZ|;ڦ`8W_vZ HTdz<2Tp~+`9;6aJBZ}ΝIڊv:<1q AVZ&ý9Læz/dbs:ɜXZyud!R7[|֤ m!DƩMPs~k!lB~O_}v͐n!bb:vnԿ7;~̈́>C&mWbT_7.XChSX} Z=e ޺)/'!uMSk!hx'wy!34ۓb~QU:!u_!\u~qc꽲iժsnf U3mtn0/?nÆDNA?d7ǯol -=ָ^%moJ+YCko;礇w%mlY:'߅pFz}s7Ic1ژ ՗@gy2حPt^B$mEz9=͇ܿTM 06G!![N8m Wy3=~>ݿK–i6VɚPSx.`4! r+/gX65[,Gt슢5pCfܹ||S]f.&TvP=ut_[-yecѕOE#Buy #?r-dj!|\  qnPMU:!uvkOU͘msm+/)oTP0l507V5F~ӽC?)dA/~\%\eU:!u?֞h$0iǏ;Ӂː[{.T*?Be\0ZGEl&dj ۈ uįՁ0Ө0ͦ0̅EUVn7BXUr!j՟>l@!TpT 4p u8s109ZC^.俖}6)e֜qjv V! r)5=&O u8˫s/`^ٶCoVM3TґK:fl5| fQWtI/*K/L]HɎy֫C6ܤ˶r XZ3jm(}^ "t8߄ֽg{U\%s|~r|smͪ6 b74iiZiyE -wvsiJ'LUo1q<%1cղ֍#k,9PR-spZJ@֯V6?OFvZ74e)n5WXfב: U<} u8vk?dv ק `!*o:-~rlP U2jg_z!Evk3 \֧|k&\mg TYx3*ꆙXrO UB~u~%^v*ܒi34wfRVa5j5Z(!orgJùT s*iq_l%f cƔsL RVXdσs{XRBNB8~3s_m 4 ܶ2+9f?ǎT V67w9\.d~>;P<bޝ{s۬O4h ۤ^EG>U=T8}-<-ss5 WΞn}5VgtTUԚ#WMZЙnJ'2?b`p􍪂 ݩs0_U#5AR\U_ue|1ʜUX@B 8?\eM,*fT{۩cgW6[ $]*'>YG`u_>5FZ .nQ~wEGv&|Ϲ`{Uq%n_}훒LUC؇^2|4R`Eᅪx0WN[h؊[ vj6@ }aeUWe؎oK[>E 8YKyJXhldXx}sTvL쫪*W]=BjP5{+N3?~n)G7}WP62zkm6Pntn3>d/8ģd總_KkW~V~U`_L#߂̗[k,+Ir3s U&/5ؠm_`>UZUVB~3 5_}mf}+I&JB*ejtU6V XI<pK6B̬Ã9RP:nIޘ_mS :ߜ5 _ۭ͡]MP΅)Cג+suuM>Z:o:{NRX9֯tNSCrb T ݱ+F炐ڲq} !xoB0E3nhR]!GG[kʴ!J{P]n{I:3ǿzXC6k꙼%^96/rt-odMɇOsTU؝,{L E0<ȫڜ44Bg9 ݐ_GZVl7ނ->uqP4Y! !w{~`0כ[̱qXU/g|Ev-rZs}idu%9h9)N i'3U%w~!|29-9em `rU:mm |n"Ea*3WgR8?͜cN]nSf+TSnߔ:aԫZ'CS^.J竖&}_GއsՄ)!^F%3r١i=ouNq]);7enlBH]ub_>?~7_nækZg6ÙZ}gf@H ܇L ڭ  uǿ}H?} τ:ivws23uE2?LZ@Tc~^z uE1?tfB`1[P ܇Ly #ݿn\W7Fg7O7R" ѱt-N{&):p1pҁ>@A1EҒ̪jnUdծ[<{{ f*u3tBZsO5J%:P::@U[Gxq{ v*ujtF2n*u*h6" *u tƥAP,֦JjR(@g")r/'r_''0[ڭM'kQ@gZZ@MZZ@U:@v߾"|W1?IDdZ>e5da~Nf).s/؍J`r@5P9ez #&Z)T@:[+L KvSJ/ѓh@<<^;:h[+kP $)kP-:[+\RR FSt2ZR\^?::z R؛vkuz &/|{Jj'vZRX^p3u. tj^=˽0*u^ϩkiM[iրRZ`p/sڑ.^\^p8:Ӑ C|hw[kkZT@Kr/KӰV 'r_''0:ږc5Q3%ik3u`f[ 9*u`F:3T@kTLh6/e5R3 Й O4Lz $F}/_^hm@[3U:*3u!:*qĔR5Kvb~!c50oaw^=jMS侇;ԐJRܽŰz55BB(P؁9U:?{{p'zuZxr=oÉ]i  qY]?[^=yݜdFaA)-}C^SJxk\KF{M؏~e]~ *ִ^~ѓW'ukBiWw}ߍ$}T471-~>`|nZtma>Z GpB,{rvm><ћ.^\g˳g<~^҂k+Q^\״aBc@ϡOQeͮNjlLgwٳ>N!WH J9 IDATm 6K +Ɲ=v\f:PsӇYazCg\W\H/.;Ì<\NC~eA՞qWEM u 簳mr:u&-U6ΐWlBZY}+{3t8Q)lO Cڶ{#gyB(9%MsTcaخ'{lBQv%r!]&v}^#Rf u 簿Op>jSSaCFt~sdj^\wJ#WLcsq___wWl]5A'nۭ t؋kЦmwZmlPUU>Z wҵ6[i7Eu2~&'!m]*v̩sn=f0#N| ͠?VoP˞t~0{ggKaCˬVa, Tlm@~=SӴACu@_zˠr6f=]wiխjudђgI ZߧpmzY8iGqR\qqu0C5a8o\zjfD奟Z/f@ ^ r_G>^=[WT<{^=`~UL1?AukUz~4Q^!ʹP&`~Cz غiT?9ک|K;J{:4c-؄:02ss0d=99نga okk 6F7LZ>kM 0^jUy5 i-؄:0s Цt*٤aM_~=LlQ^^m!' WaT%T aN[RX^˜:7VFlvs[{} CxkPsZdL#4é9GN*iԧEUdkPp>vkL\4zZw:j&Ȣg Z:Ӳ wPt;?>4jy:[ZPxFuMh w jZzON14|Z u0N8 N&&vfW JZX!-;ŒZ u`O09gM]-!dmC,><sтk8E<9I޷9`!myQ:7Y6rK:ڟxYg7a:[zھE;62l^-tI/.ɖ/!ǧOC̽ uhW(_u).|^aS%L :T:?{=h ).:.QZGѓ]u'Ȫ>a'['nW7JTA跑WltQNb fCn硏bZƓEK(?J)3{(O6t?W4=9J&d\#=ZZhN6>9RnRa tnqw@jQ"ݼREIaȮ+&om^=jǿnv}pb-ӧ-߾"|WZAsUՁvڢ*Yhٳxrw:oBs;MJK66Bvkmy:w_qeNӧoD=_ڮAP#=\JޫE8J艪Km'6S>`~%jvFŋf`myW[¨P:[*= 5O3WA51V(1jijViMn=۽"5-Oڱ[ u Cvnc@ |{͘ `VY|d[K6ظG-ش_`~@Cl@:[rzR\}Td~ݵd>ZWrkk^-؄:⏏`z]t O6G agv.uswjI}T=SyF%sB#O{tk\NY<9I[z }-ښG:?{=9G׫^ēӟ#j;>zr\k>:ׯtCcOSuIdw^=KqF^ joM!PZ6I/.cscm{*'RY֨F'Mbuz?V/U *nOzys0j}ﹴ^ON|\WTqWn[U|0 ڭQנ]b̊[M,]''AR\n>5pCZ%! GiuC0n4؂M5'([UjٌMC>l6k݊-{.b 6M3?Z9Q-،qWc>#uoP U,sЪ26Tmp@{nD2W>#^:?vМBڱ5М! tЮB6Tkچr6FQV ^+= gM]t:7B/[zo*[pivle6}j!'<{6g&ytދҦ|{0(߶qSNLLY<..^\Ɠk3pk}csC˳gdBR\iM+:T}Pi[njta#0[6BPUpVnR^Nރi@.އoА'40-I2C-:Tapiި[#-؄:TZԽTcթyBhdFs#eC\ѓi<9IbZȽFfn!ʚ^:TW|{0N uDNuo}k"u 2gƐ.^\n-eEOUxt[&'ܪhC*?&+T@-l(@-؄:TZWGXdCwzz65;nPX-:Tnom '_kPGqSL׸`㴒Bg~pom su uo2ߌ[ک&74{8}Pb]!>kUܫ5sv&^k5XPA2_@n2WPGnzMju?oګMV` \▽mzP>>f鬞HBGKzhLkkP|vkOj0:e;6lZbi@@%:7׷q{آUB`~s6 %#تOO2j)s뼿os׷>5o3:ߋ ufmnJ&kks+aq<9jB^Nf_MB+@y}{m׹Y*m* +shZw[$머gx^7<^œӟl~ǧOOzUߝBF_6-RLfi׶J7V]3>˽)ac}LqbQLxr|}XoRuIpV!EoLqҪ]=-[Z7q^=|^uO*x|Ǔ-ߤ)0_ŋxr|Bosv{k:ت :LN5%lArr2շcG;m|ݪlat2>z3Ժq}]ժͫu\7D󳇽+(oWαO~ I}  mk:%[ I.o!̵&5i}pj< `2_zˀbDFV<'H.ݴN\E)EE70[Hxq9M.e0&\ iUt~pͯaix&*ZoX@M}@BFu;?G$Vfp]6R\XJu}XE^$m qH^e:?{8룅@C>Ahρ-*; (M[nw_).^e9W`v榪I$<{a}V:?U 1{ }=9vk]:?L#>zrbZ!E!p 6qX aI S\Q}v#=߮ ?;`j}^Cl{B(PJ]?[ s=9εqz[cNP9@L' uTB* ~îY,vk;R^:R؏J!!B0E5_xq{ @:3_>Gae<?X@2zi6gG7=߾OaRXRL!N@$C?>!^SJ@o?BZ|/_''0?c5~m~QBWW!]o_ CڵBXB0' EpY~]Wݾh "?VAջx@nڞI{ ?BhFFڭ^RgFstzǛuPU_ʽ hk:3t>·_gCBx6s`dIp83uwr`K fDρ (Q\::2)B!6W³l4>Gg&2J`TڭAô^&R'l[WWڰѬhkTd_>M޾ _ss>hSLKN.Gz|b~Dr8J f=Gguhvk0ٳkE|*f~̍kTLסB idRgJM@oL%:TLC[J/.s:@gGWW!]U.އ|,ЁJ*o<:#^Qwܫh؎ R>7!>}:0Sgcst&fn ӧ!E8ZOCzyleiON!UK= "cZvrRXW&2{eUP! tœ>@l憐',G|8Ĵ1-C20:'ǷZZ''!6}[poK01Zo:?Zu^]Z!0zC^r=`HگCx7s~|pe<9Iq%]mNmfѓxrۿPZq-ؓV{BSu؃9n* zwCzt6ȵۤpt+\ j HbX^F'st `޾~ |W$yj[3TS m~txjg=P ξ*[m:n ؕ >i1nk趖hC3`25йULuJu`NasnMt4;tfHN_;G>e PΎ r|9V4mӧZgʁ=q.^\ >zrSp2L1p}zAg FYf2:ݯ!|yYu=<:Ņ%߇U@:[ӈw^#1?Mצ#@%UtT O2ئm>t?ҹիZy5އMxR u6X:6*ɽ f~0&'tjy6X@׬(L~CĴ|(hPaJF\7?`սu2& IDAT ^zU$-`'4|ݶ[24`:[u}&E5ද[֭kiLL#9M3_Z߾B5`[Nj`s_J^b^:c_:!l9hSVS|0_J_zfB;ܾou_E&ܯ:!hun3#T`r6UBI:U:d<5Ղ stf|♟ж>;#(-1(2.-k:3VRR V'ܱYnnlQ^شYi`"ust-uc~>`r+sȭZޢZ0P'%DaBWW!]U jvkkL!wuIіhZ0G u!9Aݯdf~P׀V+ oDƄiwf꘣FL}GրtSUtUzZof꘣C'uFw;?$,:S5bhSJ$U:U`XZ-ل:cstH7ШueK=U,KZ|Zy9kkK YK%@C>˽e)*Ż_C×Ws=9SS\U@˭ݿw *-e<9YuL,]''JU0J2G\]7Wsi&>>}W=n**OTΞ|_>^a;9@>''|B[uLkUT~M2?|{%n BPnLzoNư;ĶK{V qK옖ѓU82F 6h<1GA! t:ho~_|Uoi{q\GPti`mf1_vkRU:?{{lVrhH 6fPgNo~a~P٩$ Ns}m>[@Ӛkf1_g#s*m9 МUllj|lj||tܶLjK ɱZJcz ۇ. tzC\EA~kl>D[h[S:9:0 ?̽nTIkiqRXή:jW__3̈́:??5GI\]w!^I6߾"|WЮ=&BOgɲ߭J&H M3t`eM:7st5?|ր uCNJw݂M@ژc9l⏏:@[JϰƩ|Z>ͽlx_KFL9:dy;MIdtfzrՂ (>1Gj]]wW;f(i baފlEѡ9:TݯE׹x㿙̏k@1J=l9[ZuV*@%u:4mmXU%1-q ~iR (6YсFdc~0k]A#|hJΆj@- u~jM4_F+`Vn9Ԗpr_`Ņ:㐜A1?`V"@c*nҭC0ZкBsthDuPeVaaF{``]E:0 #1?l}BΖi}u-i|Pfj`}G>= l>U:-^= -+"1Gٹt>Ў>C<9=;kv{Aڔ=ԉ^,}3Hh+xH}^kZoZP'yez_6iИ^nN|x}-W[5ĿM4l:tA(]xq9[g}E<9I߻ sWZPg u,7iuyЃ@%˳gt+ib ˱8*Ĕ{0,:'z1?'LZ5Wٳ_><с똯c~@>5I/.bWߧ *h䡎9::JP:!9:clqu?on `WIWN;C3 t n3Ycu<UTg.=60m*\-т |6wuswI+n퓖WWϠSG*ӧ!NOR\WVLH/Ϟǧ=nXY)_81&PBܫDZ27.=9qsb7i`zl`͛< Au:Uyf(7M2İsR؁TC5`TRV79`V-~)Ƚ(JsB($ A#f/MkJaq{@֭:(R疊R%9SsKQZخJ[*v=:legWP*unUtz(R疊fKвINfJ4)O>՚@`OUTRìJ[*v&w9TUsKRY eeP'3 VkҊ n&I:T,:@5U[s:,s/)ns:R'_81rR(5 ءABHien@_eHq{Ьu5@&TRC3Tj TRCI@ԹbRP*un}(U:TP-:bX/s/5 ءRB`2iaU]>՚@ԹbFe:T]u@2Թb*"=~{gN* VoV>k&hl+un᧐":wTIKss2J[O< 1rqW#h+unء8*uhP"zK+ss7kҊ [ tf@=TP:VZ̍J{\iuS#:[!+:3Nu;ienگuЊ ֭:KS3SӓV79>RgG*vJ!9lRgG*vٓFR!Ez22(PF%ԁƥUW@=_;uX^k&`'*uqi{4H4(-`_*ueHq{:GT DSma?̽ '`A urienC~m@ZA5R3; BT(-`,*uFb`ns:FΈTp:P 9LC32{@ȴbhк՚@IԙvR ԙMu@lTLLԁB B ;"ԁJZZnjM@QTd뇟BJP*u (Jҟyb\^iuS#X*u b{ԁ*u b 9ENAT:0]u #PFV^Ch0VkRP*vPCIKsshJB8]u@&)Sa?̽ PZZbrjM@TTD̨ԁ@NET|:G,ԩP[`~:̀PV0Wگ:,s/`RVkfKN_81rԁZ@*ueHq{H:MP(:̚P u27Z#bn& T4FNCT0Z@*ubGZTt:R:4MuJNTeRR8;SCkbX/s/j%ԙN:4#w=̽ k3]5 @ΌةJT̈`|w9JRS :&Ez22UBO!EuPj90>f*雇!Uu[ZT̜ԡhiinLK̩vV79JB*vR:FC1O5hκ՚@ RQS:dFb,ns:P:lb'3:LNuLV:L&iVZA֭:P:b':JuF؁VJvbgB*u\Z{{~T;PT BabX^C {8ĴʽiAhk%ː":OݵZ@cTp;#RhJbJpW#a*vFR>bX/s/Pv&a27E5LdjM3RJ~'-RT:G3RѨ9JBs[BF%9PgCگ1*`Vk#*u=ԙ!T0 ;MZTtT0_OC먂JP#ar~)Ƚ uV~ɥ?}0ĸʽfjMD٨RAs!;GZRTlRiYUSCv*vh]u@Jb*u"=~{@[:EN`,گQب]50:INPS47JbUtIԡhQSECfu VhFbVk :Tcv;*u %PC5ҟQ%:G 1r/֩ʣՉxk9@_:\Ac֭:@TP+vTLD5m#0ȥ8N܀T@ `u73Gsm߾x>1(uK}xs :@JkQHcԡ M;Fѻ9@Tί8ԚAKCS*v:QmPQDC(v:w> ))uhbW:Ǡ4GCB;J i7}Qv:ʣws8FbkԚARn ߾I)KŔ:zԡ4 c:ǠtGCwkyU;RP}3ХÎQ p~Nx;fHJ:Wuu)]S&:ǠpBbRG7F8nfɣwss~ b+ԚAJ8QMt:pB3<:Ns:Jxbg*)(ubN ΐVyٗ uEУNvP+Z\ ?=(.r<0JUX3R.??SV>:Ǡ0#o;*o6ҟ`aQ'Xkp~OOtԁ;ZTWwsQ(vuA L`bRGP Lda訓G5H3Nt*ԁMV,^8P3L,^CcRfrwH¨3kؙuɣwsbq~ fT)Sk`:/)U4y@dJX@9 X诏:ǠR4^SΫ~xRG ,(?=>,SsZԁ.*vn-uʛ/VF(sǼ>aT5(dSlSk)uwJn@:Pmα1tBy*o6 u"vu~9SlSk)uB5J~@JFu!~v)%@Q @F:u0`Q @F:u0`Q @F:u0`Q @F:u0`Q @F: XJK\IENDB`././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/_static/deap_orange_icon_16x16.ico0000644000076500000240000000217614456461441020711 0ustar00runnerstaff h(   Ls;s M@@@:@O@@@U@'@ 9k?././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/_static/deap_orange_icon_32.ico0000644000076500000240000001027614456461441020350 0ustar00runnerstaff  ( @ p4UYyY2E7>Nj#I{]E}ryT ZKwnCMo/K_8Ay'? @? ???/??././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/_static/lvsn.png0000644000076500000240000030660614456461441015571 0ustar00runnerstaffPNG  IHDR"#gAMA asRGB cHRMz&u0`:pQ<bKGD pHYsgRIDATx}w|y3{{S"EQz&ıcljql;[\e+.KV(Iba$Dow;3G? ݝٙٹ=oy^90000000000000008au 2C .o`````````````p| \ 0$! 70000000000000@`H``p!J1Bs.\z.!y5δ[L</{"sƅqb is<*(D1g y]8 :;u -S_O_qCW C KTRTrTT X/8% \6(%ځa@{$i#v%'9``BQ@ii2@F&ѽ OУ7²xfddG?5000000 78QRq38]y5hH6mv#nov0"N $L7H=Y4:r p}?i_}9$. Ls0iZc~djq 3X6ztC0w? 3 2M?-J !T` )rT y8{$"*넚LCG_G|DCc*_k/4XCRL3ƤqB?1o@Tk\V,=(C~Nv끡}c!7p!c|Ex^ӫ!]sqt >eqFDClz!O> irrin=c\##aAB屨g^8Sn2zC݌!C"jYDOsVs%i5u8Tr+vnGnܫFqbdM9IIw !sq5UpK"NE89%\ux?|G%PhH=PJ   ϫ 5U~ m cƤ0q8ݮ}e``Y ѻ!4C :Δ"/+AYUE@ꊄzA" t_qKzO֥w]\O<2_K؇ ceTdaEy04h HB]  c A*yX~ :PVgnRhpt1.ƻ;6±m  a1np70b,]=D&`HAFQPEq$ːtSŠ8 lT(5;-".;kcCs{XzCt,sBU:1*HJp~hFcĔrޓ^ymb~< q%B>N QCWy>AVkGYQϕkͣϏ5y}"T#S|՞ĭ\y yj@L$ɓsits}CF(@ $ 8x[FӚ300p\4RM^e;+ۙtSHn0DPV]5vbw8N=TR&I`h~f'Bc*rQy۲Zm}(j0ed~М!DqE.l9JOTU8@AN.b 9rr7Hضfdh{/8pRȺ @uq ҡ5vsdcFbJ+Q^2k(.Bu}Rkl Z)N%ѣmz,y /<ӣ,ܑ_'uEyE 켦(O"qj 2 V._GtOǹNcPVFByU{V|0 TDP ɜ ?c@ (.3ᅢ)CG31u}t=j/⭭^s d~'Ξ3sѯ{8 S/`xvrl8.o4_\EXu=lqSpWct%'[ݶ07#c0wDL2Cֈs76o O廭:dakS%[7 f΢Yt/lXl\=e6d!ۦe~&~;䱢¡gAv.` 1bl,>8Y~M cQj6 Dz31apL>C{kט\ßy7cǿlH~ }{At <î#XsہSeH&S~ynS̄G8gAwE>p|bw8-^R/G8H7Džk[A>R='#hDԁgtkɑ mYPOʱBe eU?mM5łM@$WR'0\`Xsʪ+ѯ{O =R'[Q0eh ;&Lò]Q^[{͉q܅2WKOphL%q N{{VtU&p';O6JPZ2^qt1ܲ;ƄA=u>ݺcΨ xy{Ngy ߅=zaPK20ip'ءk0s8ޫg3l ek7ݢH=ںg+tbcxi*VU`QNnN< ƞMdן:= p)xmV{JʱAuVފ^ާmJqwkw?0 +voiu[s66N};xgFTcX~mJ, sFO38P|"#nسw C?};l&*)Bmc } i&srҎxzw *o,hc!5R 4hymٷYw# uLue?Y&"°C%,,3sQPFAT?L F(>%[c-!5|q|Ms $a-BeetUv8Ү~R'-0eH޳~+O<#s|#kV#'+hUy8Xr"i)NWU`O6bqi mǙJڳ5y۶1ܔu$^NFǗǭ4yMe] ?ѕoQޓo`7㹵b_t16']~vJ*bcٚ*sNWU` ͆+8|T׮I>;n\&o~*j>urT` amJֻ^ܰ*#c2$?&\C'aDZ(8h8wWz~>7 rt*󚐞$^i`:G1VvKr)i.H)!<%HGu%傊Z QBfjZjX,71a=ɠϟ( W8`5!ESm(%E88y).?7LĄy\@(`tԌoƈx~l}DϼxÑŨilM2&ަV;?l;v1M6 c lm&m¯='⛷܏,_%g{ sѻ[!rQ]_i^SI|۴NP\Qe;7բyphOF5T/T+c1@-쁜D꓍(,ޓǰ𾈆@Yu%ɇoG/ñȹ#5vƈqxOAmc)I[MQ؀f GFDz][<Ǯ,L2M\ur,ٶGpj?.(ęʮ^ _O6bӡ=wxP77- KI()t޹s# $2$4yF/^hW.H7m/O$ObL$RUNQ !rUj&ob\Ecy{l~[_[[Ucϯ_G[NMayu}ہ'^l_" a!2d$@ANn$ZKqT;=7^uK߆T#ʪ+ɾ !|멇P⦅5r{%.=IOv}ooO$6/wo:|vM-^A=ĒN޻nU7wچz,ݱxU4wvlĄAp%M7Až:ԏ)G>I΢#M:zħ$l ؁C+ sp4$(sHZ T{}wsXSKg!WA7Ym4RmDZ3XpTu:o0$*|pp)y"Ce M4t/Z !UwE~'^o%}d$t]?Uף5CӿjFwzhhxD2$ yDVܓ%}~"Y_bP$/?l43sLY_?h΃OUKbx#kEێĶcѯ{O\=u6zq :lՄ i4),۹7ϼ$#,̍/3 B_1{x|Gk)qӌx8z$r.InH5[񒱓ڦ[ϣ>&|t sb;V[n{%No={;5Wq魊(H3ՕvTݐȊ7KSGpW7+Ndnr>x-Xu=~ʓ2tT5O66\OǛ?c&h}rYy%flix=j1 rrzӽ7S{v} G>eqγkXxt[ᢹf~~n5_pm=z?y홴QF~QG-M<\z oo߀W>xu>ϭ[5Uh1я{ۖ|ͶMy.^}:e Gt^]Nӧx~2.=O8qͣ+r)D8p@ D(JnK 1=´O-]FHFq{JOYC}F#Q/8]O>?2xN/׌<J WU{=?Ξ)\]'iatL$"{؏YugZ:2e,`PSH[HG-O}i؈_|՞ut,w̹ }, C/9qϮ~oo]Ҋr$KxzF99N<>Rckrf%BDN}%̚zʼn)rUsR Mg0Ҟsn*\o&FW]浇+!IP#4pJUl~'InMCŹQ_859GH #O4$Z1J xG~/}ga``pn[+̑hȵ[tp$pcmc+ߊJ+3) }ozNʹaR}zwW\{n횟!#Ѣ)(<h36cKOĆZO<?ߤHWLX×h"m ڣp/,5ܚ Xv#ڋL eenO?iӝ^ذJFcڬ_rÐ;mvoEU]#`qi,qQ(7r5_B=UKDՁv"8'h@($i'rEBJpsR گH7^ū0z5~=_I?HM*f@[X_z:PJ%l_FJ|/k)$ʨ#ϡBg}C*;7%~8R~5]/cXCZu;.;9xe]--(w(֓n3 ~g3Eާ?{\r 8~M;׵WM<棧5=zcwF~Et^߼&֓޻3tT|nͱj%/6G:{e揟(Ymn?XhgTӇ>i^ k'e Ðax%X nK4r{DGN *4?v>`I{*[yjuE8: cwH[M]vF}DP#x&"}s9I/?dwݓ۫rt&'z( PUhCFi|qxj!c^jOk/_& rjtoM~F|^ybBb KB!WO= {*+$Oiiu|X|Wg̑pϼ+c9]7еꖓa3!$WơEEQTvCV4{7xkz(.j}Wq=Dz1vf!;`}'b4e5ۇYŎ!<Ö=O6z%U&ÚwR4Hs.$|Qq)X"W\J %FJHt. _7:0he:gڞD;ΟUi24BBsei4B"/nO۵GA"8z39UzUA"F+u#'fgфHis{xh/o?<8TrOCM>q ޣ3[ߧ[w|ڂO_qz!\ܬ7cKd@(uI(EiAdP!xaX:o̤I!`7kxl^3xݖaiol^Mt< ?Oa<]l> 1a A-xJO0WdXzP٧2 Ǧjw?iyU ~RRY-c"ޤTI VсF$`@[Z}t5-nC !" -㊄ ou{V27]6SQӐ)d$tXZ$>g.c=֓O^}E!EdE ^1qzԹ?r8?mبf>;Kܬl|l5J*b՞Z@c*l?vHho?goL s_<'/; \5yFlW/M,Sȱ>O&|nͱ zxj`q[.kySlXL=]6*:^\A^isDj'I^G]"e@0x"kN"׉QAKm0VVgc\t.rTOk¡IPnV=Dm^:YfR$]b! f"%@ٌw"M}k€^֞kWH=C%dqQLO9\αn|oM 9'tӑF7:ƻhnԡUBk݁]S)CFb@^zbƈ܄۵ sw{jibWbX7sJ^(9r+^`۱]=- ?GL%r=TH &ji C핒[zLY˻HRB74'=E l]Hn+!}noFf-j-O kA>#OF<[3m4w>R:Jĸѱ+O#/n8?N@0/mD^7c+_ٷ²]cWrulCmC}Lu%^߼_?_|,3j~P@ss 딹\4|t~q* E]XuR,NS!q{kw|]`IT7Dt¶1N옴pW.6VXQKboƘc>q$s=/e/D8 vv;juqAӨAT}2uvvD'3 aVC)+HIZ-DiByDkD_L-WF9quX0~Z uXxaO@.0-"~T_a.SE\wϽ"m42uis)#xz=Zb z|bn[5]!̊b͸evH)ҭEbI~ehbG<8~Km[[ǖ̻gշvZÅ^;+ C;ۏ<-O8J92E`!$M9F\՜~,Nmy^H> g0?uGžأsrp,,(DςB<g+PTVSgϠ,ʪ+PYSD;p cLxFǞ֛k% T9:-ݩql,E;kuf~LF*"ASL W*95 }p C k , FBW)IUWKLA|f$ٜQ=/v&ϊ|m^Bmc~E޷yS.z}ӹ_zw=^ʞ|w#vDZL<_vL}rl:O$`BKMA@C,.޺I~]B}uf噤w{[H+7 |0{T^MGK+2xI8S]lCU]mD)@$Yyԉ8 $UA9@<4/PqOq4\S2Ba^>1ws̷eoaO- P]ű38\z'JQZq5Zo/S{~5L( (])?;&յDE@{"6WR{!{ե>C9g9qWb}zؼ!TIKcas {u u|fTSZ>z)aCc_{.eT ?gݿv"aۚ@I`T2s> r-'y98GM}>aͨzo:_ŧljc'ze'#6|_aO|phoܕٗc26;6jGFmG[&8Hcq uy^}LuI$b;vy}#掙.sFMhw4^}1(q5(*+ELxfNјJMVp=‘R{g"A{_<#TL8ED՜6EP=1w? yYo lƈ~0RolľSp8Δte\UGh#ba B@#(BZ迖2 ^ !F,hAƬ8 NRv?=YDL@Zԩ"FUX0Fh }kRgWv+߅:G|GIeyD+mPI;u~ٵxk:ףFo;O.\{.8P\ԩkYKDRsm?珞._~{^:' pdĭh@zin20E>hxyM4m g]-,: taqۜ+7߃k1s.x7y~xa}}fˌ!p=nDzQlBQ< мT\ q%D8!j5.or-8zwä!#q›q+1T τmc␑+w7ߺ4kF tMQN)W yauxy-t#B8@AE$!7h/0pz",JrItPhC';me!^}]_Efɋ8Rz*#^AxY)v鐾[A=ؙN5 As٘<0Ya``` *"[V]19ᝁ1c =nC?gzԙ$dRyoeO mo#r1^xvpGSJo4$:_|`߭<m[K<:g@6f;&'?C<ғ2';SPQ;e+ۋZLj-摈@ 'z؁C0iA敍a-f`aD؋?v )/Ɖ'VѲ[1eh3)BEch~2j7=ڨjkеmFM_x$'ŘuPop,Kɔ ך Az@2+K0x-(!;I-ǟмHʍxH^+j/ǟ#?ړg`p ,6qp޳#iK=C={~vionb'/.6]tU~ӭo;'6z` a"َ.*[(͞}ёH)cccٸrbۍ)s.äє:狏-49eՕx# ɿnڜ&ۚUX*?7 -Yi f9c%Y۽'L/WMծ/9_,wGbuOo$B䩋_Ix*/Pn&D+d[DDTy膪RٞLk)4L_~xl2Q H4JGK8Nyݞ £xt00(vtp T wm֎]N0L(bP]_{<\αm2s~GOw<n858gP 7+vbWފ6nUa=)CGwcэ0sĸo])e*i#åDz;>TGnYGƓ/WSJoaf?*v, o%6ދ"J \W>@x+R9ϡ!F΋@Vu͊OUXB$S)cC$Kն̗^x+_xӭL5掝2z 5pr|l[Y(QS龢*xb* m7]/C0R-s]|{J9(.lI`еp>~AX|(^[P?:T1E?ێcgJ7 ܄K8,٦b`5[È"PXOkn1oUB|b*/|.c|w&ٿ3r|afjB)>2o!f 'kQ e;7GD.o~b͖ZA=!vK=~5VG6R yMᚩqm^u=?x1-x,:zsc 3v?0$M֠JŒ@@*eNjhN0 3cwns9n9e08Gg/Y\?c> sUI.C¡Ku8Y./3QDZbH:W8E}byOxțײ֞5 @ UEJ \=b5Q՜vj۳/ga” 4w_g!<&LK^]_u/gdCzŸQ5'pyLzz?>\b_Ǝ[ a)=ǔ- ߸fJkBnc |=/?rnɣxh͎}i(2c[##0x_03^wW(_;MU<}-Z^\YN\+&@a4H:rקqч|YBuj{1ޒ4 @uIf?Lp)9}2IqI@"cOuM*G]+OD> $UJh5'P|m狏!k`Z'Gu5(P*}'7K^Ľ._hrV+wo dM[B(Ze;7Er ;ɺ9,|ONW_+o)?<k(U:Rւ%]Pc]3ՕrD'OQcQG 8#yXkn=\|멇v}÷oD F֭ DJ#f\mAC?{/ZE HFoȖc~DW5N.Pxv۩JOy\:|Kb_ԳICFb܀&ޠU, sNܱ>q5J+HK`?o9ꪽL'zEN紺oA @ʣ47X qlqҏ *^d߁AdD8>!4__ݟ@ ЌJh@c]cwtƼSNUW5y@UAv. P|t^ 'm<ı3%Yt {^>fonY>` 'csjXo[[WN[z|Vo>{E7z)& nĞk{87Bml{qE/kCK_-cZW:U{{/;i{r7w_풗Ч[Xͅj6&ܜASqHC[16c|EY.2+mhi ?[;l'Ɔf"捙ch >} ~)'~aYhh;y9/ϔ`Cxov|p(}Ug_nx"T{K*[G|"RIS}D="-0O(6dŮ-8U^=z5پ4f RIV+:˱l^zKcj~};0kx\9i:&"IsʺZ9} ]V|#LK#;ܫcTcH~i۝NS${2? Ԣ_?aTP4K?yk}i洇FD^~d~R*~'3~I?sAOΐZ2>/_{g {X{K޳2dd}'-/|P#w8ز7N{~ɣ-GMC}3J|`!}\} |QAJD^rs]usX!ׂwcD뒍'~oI\2n2Z-7gR=>ݺG=z =M۶ 1>w晗klm_$ y5sͰm\E[N}1ol ־4,{N̑xE9ѺԷu_Ky.7ދ[yk+G4a`|=)c.ݓpUG8\zUm\k& 2r?LwEGv Fϗ(mAx@SZZm ڕƼqt*u;|M54rW8 +!(/CJʼz!`2jOSg~8.rګ력Ҩ ߞzcO9ru\Hز{pA dmy~&p䙱^Z|?t)B7C%'cϽ a aO}_6|KN#_l8˽wF=sS'Ɩl 7Θ]zu칤+C)NwVQ|xqCuM?4$/}ᷟ{8Em"ӗM(Mo.[BS z'={5<KolU-A^V6|;7:cٖk}[owwnjxkS0 u "Z hzɲxf9 L6M=+20h8CGa8YvUCFD9 _bȺR|8^y#0NCjV_)2 zmQJyR:2/8^c3`k Il:/[IAI}HrH$P1bs(B}-$(3s^_<=@F,N;\C$OOg@$ց&FL=^};iO(8Ndᣗ\󸇿[K( rpK )KVk-f9 -'j϶V+'eeoa ݿ͞Xmۉ'5ڻƤ#u4؀pEEBYa\iтb;ؓ&t<`!dϯ[|C2.8 `ɶ ph.nf yc&isڝ8S]u1@SExooV pα1;u5UOGOڋW6&?rп{O {XVtr7wtCKڌZ䜔 ^EH:Cש/zƜhU3.ؼwlDRtSRy Γ <`J?*Gb #qa \\n>S!N}}(rNA s)2jLrw|S?|8P؀qUukl@ ?;{ƈ>:`pGOӨiGC2,tÐ^}1O)h``pnCIbf76V `zj*(Ifa^.3 MXy*<-=q$\@cr- {LcAxU{17""LE؄ \{3C #ЇƯZUSCb 'c;fR',ƇW`Hq^ {kWeaڰ12|;en yݳMM r,Ñ!ͅ>=gQ2f4E1& 4!]~}$2tU)tN "g*f$]76l894a Ð}1gD$S@mC^[Sēx6|ɮ}sApN6MC,w/bi8Q/}< UH9Jߏ~A$\s;nѾL>4$kPb,44 {x=c&fL\cFEm9,^("JpD}xIރik9aMCXhgKh>0T~d`d=W7y ;iWt4>$,!awLR|g 9)$H5.<Y`Y,! p`;/QhL%wv=*Χ衐tq`D1>6<T}>7*&tL{\{2^H@Si =Ha=%j-qcϋ?IPsj}ƮnOO= v!}־P.o@ᒱSP2)ENؾ8%E%(9qFJ0X`b)z.GFA}0h ;C F^6`Cx|囨!G'Xy].J_hi|:ޤm"\{/\?DiƯy3,f}s9ꖎB bὲٴ{6ѽ"/ c %chqQqU1f(h4Ncòi6cуz8 jHꌼF7AD_2:96zp{e@}®ku ƨ!B=&5 ip}8 G|u›`wd ,?^VM~*/7?'n{#La{q6lyo: 8q3lELxc\>g<ߗ/Ô0ɘ2wr+wmƋkMD?D*#~!b溷;.]&QZ(;C`=p)jZ3&ym &1?"̻lNIO=Q_je%_; 1cЁH%SغzV/^ 61`,ԩ&<›gAYLC~N- AqFc}tiv8v@8=.ڟ[T]?Q\}* '>S㤣7Ġ⿸*m?GCDN#8d!>cr K\p$,6 =ւn.8$pWسcA88>^_X<@Ls" An>w ,q%B='4g.L_0S< cBJz\q$M/*lhNq=>eNG4JAcUV4׌AJ?nqi9!%~W(7t>200000000,.(kT_p=OzKaKC{1SAېJzzR~0^!~N L_cAl|nY3[q֊>Fp[\v<\uο~IIPQc~i ߀/c````````Y\0$lMs[0yH8baVasKPVR!I6#ę&|!pcE}nY:&?weȸ2 a1s/ؘ{L\ ~B ՑEger`1 t}r;yOr$]7ohݺc),ALxPU^-2q5H(D>mPz%O g^*_y}ob ޏ MP* RIs\Nmwĺ};Ԫ%hH5C"@{/sI<!ig4?J~ Ϛ@йPmD)Sq81@Q bzOcJ$[@H{3jB&A&T voFc*BP"pޱbҐ]= bòOFɑ9& IoK3Ηj`gZ\+Ⱦhg<N"\Ag10 H%s|9ܔ p W߻wr:9(+ů+ʫ+c\ы2uz8U4)u5J<n/18#CVpI! $>(Ή&`x (`? & !{ 38oIyXg j帒]нa0k ٻa!/Ql{GҹԶ^,QyWyrY~;˒}1 bman\#=ٴ<oENgRP_W*9!I0 [tgZt-g<.=FdS+8:" B}$Zp!= ÏP)rNUWƛ+@7z|Z19'!e~d-_o;kU!QX0~*sz zxxW/16_zUڎ5Iw|ȗW3;=fYw^|˶}/g~_o_g{PJH{{Ṙ:w+JUoa{HLD]??V%1%WGKP;~/3={Cklf]1C;7ːr]|PtY?+)wgÓa/:5rGZBb}ĵ{zHd<) tNv9i?8!8HGqHWQoTH{h~1b\W CxӏDC]v\^uq 8̩"W@,:cP[A_=<,Kӱ-cx}O ْ ×~{9\p/w}Oܔ0cśg0e@x9?0 'B;fOEb1=q݋T0yrVK)<5m!ژbzyc'LAp^姱~ oؤ#0fAP_[f*˓_WIlC,łm!Omc]vg6C"߳@]_D V0 Ss𔚗]IR}T%/{xbA~g5Uy^ey<!z"sk ˉ_8'?,'F}0OZ{cO(?6U@Yp}$IDt9{t'myC+jz4vMC9j0{ď+ɩp]cNEx=%j-qcK]A> ?^sn m8/H~2=rض,3=u 27=j`X=LZ"|᭗?f+Q|a,ۖ(o|bxe_]ς|l[[]pXO'< `0\.Cn*OSTcJz]3?xmA4[#ur _$Iz4GDžDŽ}:.ދi/cxO^潇g1/5@6b[6~c2G چRyWL䆿&  o<6В*I| ɏ+<_O3= Ce/g2Z?X`b{[ɷ-d_L~|x= σ}O|x{r$)xT҅5a8>{y]V٦σf#=y#BF=u=b````````z$=(,RId; \6"d']=tvsG^H~yW a=]I v#:yՋI-Z !# ~O} YQ70000000J$` ;y\{0Ԃ\\:~+agD,v`Y Y_b|'[~i<'_([6 O0|{܀zsSnJ%]x ׿.rd rw?Cf{+[?-RsbrI{u|:%aL \IiƯ#VN{ V1\@do13߀/&|C0xfqWlƶmh~m[m]`{[fd,*J3ODzBi?L|vRP~1EiLpY$f/TUuUJi6%2I_`(2.Shbd-͸2^hZ,oa}-95RY"G_시w">׶g v@_ϱX6`96wlmO8vŷae%dY=f1O>|}x@m3eҘ+<{.++w}ˑ̲ṁ'$-_2,ğ)g6tr[/!..,b}#㵧^wICZ{T+O g`㊜$2'u+U$_͟{όD=0Ҟr(!q#zdt4s>qo D==a)_w{ko[wk%ɱ3 W}ICc~ <10i/rm0ۆ;7 8 ~UbfA@\k9~?Gx_K,82l[  ㆨ`[kx+2&#DP~q:Z2ȈsrH%vykM\_ Ft.z~\C%'WF]c=h$>F8Uy^ AB>-tЯ lj?m'#2=Exp͙߁#{t0 st WtY?gH~2ʽ[r0}MZy|&V>";!"DzˊE vZ_PW('d{%{-z>VוyG^څu"-$>()kUPO[,@԰냒 KEo[37 qΰG"zJ؉^Yƀ|C/(l[?1n*H_]!7/z ~dEAؾ"',?d/gLk**g9 AT`dL7B`m!H)S ,AYr|`;m=zC~?l*SVʓ0`!D(SQQ"] }ϔpb" @7]=#;ˎ IB!!s?tMjK2BU?bOt^~gk*;dDqNx"{ enV6.7 OkZNr&&X^(D߿m~3B~c~{۶,;tY l_dI$$l D' dpYٳD``ȞHA`܊+P>u<$G?rX76<464ML^2w}~]m=ܔ?s#^3C 'zCЃbJ+m-A_3={I | rxLˡNi!Y>96T}r ˂O;peO9uȞwzH8 GVx91' [KLD & {YPz呀y5*.<촽&bjBTP0 3sJ)?4{@<ěO=hZe@K?.JNt000000000PRA4IY P LDNVVW ذl?nԫ<˙~5#?ϤWI엾}LIG?+÷}/=w H '$l'ǖX@Un=G2`YAɻ exKD ؤ<"R1?r^ߏX`DV"xȁ8!9` mro9A ?˂00[ =ma6p1'8m9P˟O߲z'ֿДaEOrY yrץr" A̍Q}!J"@_hx 8r^B ."aPGKB®^# =겪fJqa3$2g}-NI}ȷ+;9??rG9vOE-QA#˜~ǂ _; QmVq 0$ $`; ``~4 'aɲm?3>{|jM@ '=nl}F:`P{N:9udR+AĽv#z k Ul|kۃ700000000h]BwC69'{{^}z} 2Z˷51k_f;BBHtj#R<pxX~AP [ ` VJضO}d:!a6x D@Y`746&ׇ^tiW"rDPN3TNCkOA_S:COu@ˑi՜]Z:f@I3{ S/8Y/4SVt:/FqY< 1~а^ oG"W< $FK/ VY ݶw,XYY/XIԀ8 "|/'x-! vl~8}v`[BOU 8|ᾄ;K #,0'd$,$86v}ѡspIUVi? 3yF]+BD8"'SZD~>il"bTO|J=9}_ȈEޥ^_Sgo```````!Ty=RZg` ].Ɠ?e- 늅-H׭d&2%_z|~KHp5Z`D XLS*q,ے MB8c@C]C-%㧒0vyC<=ɁW}p|?'&p~ iUr{@ AH B)iQ$@͟71u`Ԙ!pW6/dJw< OX6.5 8'`}«(Bm -!`ynd>$$eeLzA$,ؖsu~"D܏(p^B1o;G=֓Ϥm1_!?a+elWӗ⁁Q@Xdx>J +~XvD#d9)ŒgWtS›1yHV6eEBT{J !tY9G>i 2N#8Y~FߩŗBY#!8]&ߕ=h቗ ޒ%\R.aI=bzoqoDDm1/Ⱦ̓$@D8\ i ` -'툐U"},i5N*=fYrýw/ 4D?֢[U pC(+;M.TrL7PT!" ZQu}- {9xC$u@ j»OQ#?q z6tv P٪q{^AWA~&l_<4LZǞ\/紼fYbo[DR&II>yMŽT:2B䰈<8Si tmRg&l%HsMx_=?Uԡ{ÄNq;SdbD [z008xW/ ^j'ѧ9Ѻyp>mO-ɓab+.TOaBsmٶ*w~Z: c$,|+No'z*-I<σ=X<|RJ6hg8ϿeOJRX7BOMdѳ-8/0S.8źY-wS4b@\|}I3"?^ Z{K$ ^xǞ1_Y^Dxx4o^Ȇeg&?@ҋ]K/- LM t}e*r <i*#rOwF  8UQ⊳iCC2 rMdwBzBv"k``IpR8PR*Et:t,݀{tf|oM]ҟ;,=5ݧb|`{Qk)XD'O7KJS%4x8cۨO [/Wu,0'mx_/JyןX}c0gouw⧯>jC\ײ<\MACq.0P/6 tW!j%@ݺ򯏟r/NUH?08Je̕#vCf cb }^Xw̹GGe-]PGuqLv׽gAP\QU{au*jkZb C{C0}X5zt:,:tìz :s<q x%3Hei(OYE8ȋ,Q.(g %{x|>`%PvyX$Q;$'+ 9qx<q%X@nY g y`fA> cRABo x9#\v9ہ/a=8~~␑eC1F-=CpzC*=U $4%瑽z4cG߈JgȄkD^\NoVmC=?TE߾d{{ MK%e``2x{{㹵pv[n^mߋ5Xw`7؍߿ w?<]==@jrN98&:tAbm("fGF|>+d<RLByϫ"u,ȷ@\x}!_O=Hؾ!wUy<1pmn& hXg,_1s`]a;ϯ'#/4'kWwP /6s{ifz&p{ & sd,=77N)ЌP/i9~q2 g@0OqIVL\{uu[{w~SGGoa? uM^[lq$ld,f7K^l&L능200h!v/x{Nx5|NPEW\qp/=zw :< d]b%f~eB Rl}( ]4? B p`gF"; NVIB'aix+ŗc6A')0mX;plSJ{3".x"QO.M}<Ѯy <JN)?ܔs3Gyt } cJ{~hH6vuWMec``^o]ߢkƄA0@.(Dv"FV`I;u,yopacx꽥 _/7݋/C38O!߲=#$B;yY(lm7@;\F(*ʤHÿ5B LC؃`2Y'A > 6o6X,$l8Yl'~z 0uع`̆,],,lr"_"EMU-6܊uW_N쵧zDTJ_ t@Ğ !p&/۫ #I4^+oGRSU-2сPxx)1n 0gD\;m6iϽ+1cX?b7)|ph/換ҡhL%ce200axnfܳ>\>"䦽kVc]==\?|ID(M/?='Ƕz2NS),?@ '#9t _u5&M5kRKg2+Ԟ Lk2!g1?fl{iaHv(V ,+ _\/oZ *Hpv^ʚN%0uh9J<M^/?*_tU~DdEY9!<=7sFOu`ƈgֵ`W$tUE&ҵ100"ki{^/\}+yWk5SgcgavKv`WOՀ`M8T†_\i*(G8up9 aƇăaW Ļx|Jcx9iLx9ԂXa9`{ċrφ<؎9<ύ5H`]So[`)Lsc9㾪o=,z1$ ny):M*}H(Q=&mA2O9eAC7^y^cqX4y挚Pms~)m{{!uۢ/mMe;7l||5›#*^`~ &SgcqX{+܄Gh7;/pAfQ_]_U Rjr]=_rܴcK6f◈>t*:B}J]?r ,a_./ճ:5gdKeDJFWZPMWut3ǩ2mL~'‹R$ mBI؋/<"ݣ| Z{=?>VJb*><|ykfқ#yv/̑p;z҇S.(Č=+j>5C_Tvu#~&\k4y~ a̕71>20~ᮞA^}>{Ta-/W |{qy]=\%O*%H07zt0{=$˫%|'oxnzdj8 ̶c [eoԂ\%$?;#D}!"V'8'SF՞l n/$Hmz.2=B%=V!Ȑ חgƌcpL;vzw\9izZvn0Ƌo`p!馰wϽ7gďRWO =i-xwOէ1|;MAɯFuC)D9ôݿKZVx^pb= ¢}FdU.Isl$]yL lG4__ V /8y-NA F,cHl/peD6Egb)E `T ]g+~㽰; b݁]N6M5yM{;pHWO۠f-m_s3ow=&O 2F:kȾL5Uؽe_+,bx'GօS 9":46O5Ѝq3Z?ݲ33h~>J{m|0mXxex 0Tl?vvo!䘤I@Ԣ#ǥ7]4B )/I7!@܇2 is daڰѸrt\1"8v=3 -GĞ֣0#_듍Xw`wC1308PV]y۲ڇa¿f,m6"#lM/s4l]Hu|>kɒyo!\A T-Dy~W4[\M~pyjYMF'{meZR<ϟ' Gh8c۶;gތ%E(:SrI(<@!H>/'^͊ښ&{A*|q5 Cv" = п{/;# Ll8pס#=uxuѐJp)(.©2Tס.DzBn1{O7C{nFHSk}0mNAaӪmoZp&r ɣ<nqFKhL44`#,c?gnЧ -Ocjܰ oǶw7܋<0RykkEQ<'}S$4&r 1!8Hh?G`ʰQ|4,8YgO_\+lpWrmb+r]} m~6$|u=6ZԮG^f\f%l{xP܄7ny9m꟢1ye5M{ouiƾ=y ϭ]7'{uǢOmZ} ZAC2r8c.nsiO>R ݯCA斵XPI$Я'捝}RI< V7yݽg=Xڽ Uh lS%c'΋/>k*쇾ws]u5(7nuT׵O1cgJ{?g2^Cc鴐;f_)CGft ^߼5]_?xEgO{,c(s.ǂS[6Xk ݹ k5!?;3GUfvZ\I '#yzB^Vغ~ ߣPm8Y;8&!F38AE Z& '^+'@^vv= uԡ֦.c2Fjzߎ)CF_m}ƗS;Sej_&=kocQZUg'p'ml'|m"_Ϭ~g!q(ƫVM1u(} _Y^r/QTV{͗#qbI fiRP>8*p,ccOv/kU{Ww\v1UK"Y |;wi߿c c[6>8ڋ[f]lG}^Ta[x sq%?i9p2dnq9N[qfNؽ%=z㛷ٖ/—yҰ-3wwl-{ |N6zܽGNwo`q2q\[ /--Gɉ[ր/z/ɐ|*Fތ' zYLd\: B2]V/ y'@s uRn@I4A{m&-U=a Dsi*>RI | #:ar]+&14-;͉=tu}^o*B*6ku$q LnF Ǿ``z*mS݊}_ r&ϯصeݎP)M nM{.Ƥ#Ҟ?>;էD[lnnE>6އrOu?OC}/ Qn$71z/b~ D8O"_(3 n/Y"?DZu&R|f -ŲP}+&No1F` cj\ ,#Dvsii&\1V9ǟ~랗`ڰ?phԭ_߼i0O\:.><'[ F9'F##ڃm"ffmaڝo;Of5mp=[Sy}V4/_{{w^A;of׵h^䟿\lDE¶1W_7lwŔbу-H6& R;WNonkSYt8c;X|"mԆ~-31Gd I7o={,۹rYֻFxd+\@I ?+zyt" qyKhJmqx=pX<{v-nY?wq08ci+qzmm~94+иVVLTK]Tʅ<8\0߈uG L'1 <7y #-JYa!d\=e6w~ѷ&Ǟ/,ǮG5 XgcZDFưTQܘ=3m_ W#gk,޺Fk=zdX[O.nq9. \Yu%ۉ6NO=O(-EF2EQХy,PTg 3-))H @3tTQTZ#WM̜L38h/l.̡6ô # 5&eU5V68yͶ7D^y'޽\o{մD׈bb{ l-c DaPx s󔛢3 t ]1L;.Ğptly6I' Pt9^݅҃e(.*A~a^h'5?,%.E{!L\tz.N_s9c[a_Qtj #>paTϣ@ ٯ폪kz/Q/z=a=Knj| ? =k‰}ax/jdd"=56H4/œ+|x:F25iS )a? DUGnY/ZŁL.=32}A*Bg r#8P)g_y-2Rpڀ8@tm&_>yͣcWS7݉gLoV?4*`_Y1va?[Bƅ?xd7FXwS*ӾE~x;b0gɦE7߶&6XE'jSe!KlUZ,|0< 8tD<.'zo75*GXo 05t)Z=yu4'E^Ժ"DÎ;9dog@R Zen[U9yGvjwD&؛E~4sg lTδ'MgoJod. +8}mK:J+/f@f񗪑hػ I;-C3{>s[M %n庛o{MJ)ȻufyCvhE0|bn]7vt! iЃ!hE5ͰSϴ H@h=:wb€]V6(:_i8unKog4ky{NnqJ|>"vsjR_u'.=$4k$AD'KjUP@ 7vL_- BکtT&yǯQ zr0opG=<̚mR<3s_qC"sc~C~2>wm**n>Lc̍cU(Ne%^$hA7 }:al<(k$mK&j}a Y2~H\ Έ w0S8覭M;Uuy͟-AsMa:LʵD;Ft]lci*,7azpFPnܰ+ǢLt{lMWmN@U*n:l8V>0IOG#zN( }[⌁#Uw#ǡU -{vao?FK*+pWmxH^mz|m1cw ˳FSWf?6i(t㫃ҹ5 ߾36Ga(/EHד< ř,̋K蒀]AdC"Iػso|}pGwNMvVqC $V< 7t;eU򌘰v:CauhBUB0Bq]3t B^n@ [aLOUVV"XQ꠆#d=`Cfy-<)~ XÉ404} pf{z )hS URwGdz0ޓOЬQcvp(.:tY\9E<S*KqTξNxɭ7uGȎ1 'Q7 ˰@_3ޱ8E6nK W>j &$Me&!~iޭЦiZݟϝYb`@a zGX/>fP0Sn\:D SY㏝[t> ލ5s>u?%[z<^<훰^| s'Wb 1;y^czQ|~I| 6!TGૼ45 rIQ]}#.>05EA@QI(-F;1pdA | SN *2OBHcelo<:!=hB_k5G OLk_uZujׂ *TUTըB0Bߤl>4AtˋOޯkֵah&L0 0t0teŇ&*; pF|!=s'3)i c݀ze$l\naynl-s{ #5#!3-G!*>}uَד?# ѭO٣i *ve #CuC%-?=lhشw|y<.xͨXkGM"mK۽Q ~7`͵Ñ GlMƕ_V/qܓ  Ga>0'5/C^$f_RUͰs%l_MվZI'8w^۞x9o{>`~-CthA `z0` `5`Z0P0#h 4̀iZ]:4L{#N[ ð+k`*y Axo>W$Y؞bJ)_'h_gbLxk@O@cݘ{O;0{v淞UiD50j`y=dz_!hhٸw|șDEj o} ^zSկ<1>4##Sޘ[BTzYi zL(ܑ 7nި~Q^cʅZs,P}N59<0v܏[z:!1u!6a ԔT4oK/z fogD 9=&5"N=+AGUiXLغ/aE5̀ah& -AVdf=#iP Bը **PUU e!Aԋtgχ5[O Ɍ6v5N4Xߩ@V/R-pNI9 "xeพG^W7==ZO$!$ReJ8;=#,obW~xsw ,3Pv׮sBCXcK<а) ٮ y紋 }Hb_Y %LZ[q1jK`~!ogF aJ{N;׀C]Uw}FjFt#ͯx1sψ%Ts?|)]Y\X13=- 47_*K07A1P*e=w&!P@U*T0T(%J\X-g؞WbgM@1M;_\BLvpmU!5@Uh~ T` Ht&BBA*̀]/@ (Pu~ҡ; qw*:U7aNE8}L`Xôa"2Vȿ,=*&9>!1RzZ+JOy홓|:73 wȞ0H4\i]=;|߳;Uˆ//-70M?{*~X:׌>'6ƺv6_qœLȯҔuwkwoըݻO+oU}g(i8v,/Gj2*!T=ʪ*1ojaO'W[OދzΈ}jMxkqǯ"wlTU)_g⊑pR5H_5ߋo*Ln)QcDژ\^8/o[yؽou`„BVXn{i:SV0nBL3o vyPfmUUB **,Fey!TQŶjTUQUe thHa?-g : t#!4z( [ ФVڂIXM@7@ {ZGQ_}^|#.pxpa@nKč!B2k F: _tCL?_^G!s2\Ho81B01kWf`8hc7j#\g ]Z'&}wu `E|?|OJC1hhݤ/^}':P} o?/|>Ce#ls?k?._0 V/(dd%ŇdT̀].9'&}+_~܊I|Rè˺>a7|#]^kVOcb8jT"abs3OKQnFixpH6]7mU} q q[ЀQm@_A7D7z&FvhfXz2nBuf uKIA'2  7 .SrYS( rPpB_%M-h;Fl+-m=GEk opӱE^W>&{n\bǫ&6OC]ug"5axOk1G-qԨyM™ 8uxѰG"i41rsŕQ#lX (U;6'e̒"رy>{})'K^HIMrΉ^8S7 K%`EqZ:peu ۛOX9LZ uY0k"#AJ5(KYF0?#d427 *a h;b$ g0IX Zb)snV ϓDPˆ= sjٸ]y;=<x:\xswQ=E5EPdg+RRٗ KUQqĬ7jw7=j _iUwk61][WYX]_xOqrK /+h፝>&9oă^ft͚][q[?Wuq&|Xp5r֯܈ܞ!FDJj@}soڦDZ:!&|wPPa P SB`h aBM5 @H(f1jRT[|PSUzXZ8S-z-㆕z@ua6VN=3L0*趁CL4]$!qatx{^ {lO&9п}EcL|O6%7ox|9kpBǃpՌEtj 7pllܻ6j^m1긓{*ZZ]E{0qdASVx;qG/)3y\ #~:4mS ǤEo(<)!sӶ#^L_o__ymDCoe_x?~Y W,Qۻݢ{H~m|a[h#9}"ySQan=%лMݛTG$coi1o^/%&eitޭY0%UѫC}`6mZ.VƮx I~=cm$Z!z`ڭЭ]Bl2+ 7X@C*]ӀB DQ,!=V (vS7@VbG(PYj&݄Z@ X9Q>քbzsʠX AɽM`L 1,'lj맦&#qx0 ][oC:Ə@~A~Ugh_}}Stw=%ʧژ^ְ~v] +=Ww2|>}&7+*^k+nzi<{mQ ă0Z<9 !UXis.=1Sdy^tK{˶8ј$^G2 r䥷7/X3O>\3j1Qqry(=sET=d">бYKӭ!םɓX\Bl>.6> zk.mJ"ۺ/nyi+qaƷӤ -#&'t%ַ~µcNǷ5Wx_i1+(C+r@`W񁄄NJp[&(˙-'72qk[Pa44ŸN0yU56z5i{gctd^)jotꅾei a 1phbQCkt@ȋ3 R~!Ú][}By񫎲c>'P=SET QO#65&%ÖR޷utߙa#dafY!ʿnXzaB7M9iU*b{Y0]eDZ{.҂'Y84PY^ ]בbjU'/OUy^9,]'wo2i$lHt [vNm| ,=xOmѽe]+EX5ӏ>.~BD}07F,v-&Iğ;~wTUT9eb = y{PQvZsyx7ONM&!,fN>̜r Ӱ U 0,u{]#4fy5MG(A m~u Bjhj ԠW0u˻*\􃚎PHG(H7 `BA T usfjG֠k!h `Z0dӬHC-CAR75b4t2+G ]h,IydegA(°1=zAf-Ѯy˶n2~ /~TAV " (b oobjNϒđ )mT@PVR_LD 7 5EbQuA#ȅRK2 QV |,T`h =mA|y0+l?^.|DRАлMGx]ѧ.a++= r`HURfXnvlݿW8>6B>EF޼|!blx["o[" I~ Pro?n9O?I8{)P14*>FBΎ߶~~vh 4 X\!ލ(u|pnuKe,:;L*Dž0l> ׭st#th]נM5f4j$YH+&tl:ka`t!FȀ0B:PGD"0B١۪ĄA@׉U2.'H0xxolOZFIOYAf{'{Xhy?q93WyC4iZ/B>u]{1=>Ú8ZanZS0X1/Lc uN3ٗ0Wi!<2=fݔ-8-b ~ -k@\{G֥~Xwge(J]ZF'W -ih۶^ jX2VH#J ֳ[fLe 壶ʶV68l 8b \7ZNQ"iBNpTRrG^x=42$wo贾9t@7̀A|xMסɈf؄^ ~H5hA;?dҠWk0X_Ȁ AB0B>-ð7-{v8Ҡ2$h! FHI]ӠѨ݄lMC,4A Сk&X0m}Ԟi,ZB$GQ؆.B"~(Ajj]='{hN[E,d:VU`X{*+.6&lH9 N&!Rc_6qٱ%ع.|䡷+l'=Z>C6 [VԍpB/Lws<•[r0N?++I.=F,ǯ|;ti(9PYDS={ص]uuw*כ&ˡ߬Uވp(X)7.R_8+FL뇟#g7Y N B ןq +]E5;-=l/}g 't+qg{ +\;b*` ;z"KJKXm D+(lڻ+hb 'Iwkr-GFfЭe[%[cʲyWZ,?>EӰegP|*k_߆>KQ·1SzD99`ʍ}foċmTL$YI*h'09ÌαR im;&uK$ǟ8: ~Xz0*x#ܦs7 5R7f Yj$& E$,93M|yhj$uӊ 0zİ=L< 7s .Nj˳ύ U`ۛ1o["SCN1h&gĚ]>[{BZKKԣ{&M'{lw2}bPIG[˶xq<2:3ڱs֮{GvIJKZ'p|^8~BSֶJ y΋M?\mmn$_ qEoWo}]Ev"(wP@LOd/CVN&PkM;&۶x>695:16MةʾԮIC-RLL+߰쭼x |-§rx*d"T-!X!Ԭ{$Vz4iV4 hAn;"}-ڧfB7vYs€D7b9K @Ѯk 9V/Y&Ma CktFpa}KWZe% wt B;(i\)eY b/N WiUc;)ȞD4<ѭm.소j-v=4bڊ'0?\+/ގxucNMc7bcu^W0L!]'Qf6 APT[>/{"5ʪ+Zud08="'܀O_{cd^H{p|bŠ=JN@fZg$.IL2m"ENz&Xy]5L.w'M8IiipKcLlAWy밷 ;۷F3_q;WF8K E!o=>މpD0phdbrGiRrLXd%`GCp 1w;t^D3Q>UמW׭|RǧdܰCM[<`yX :Оi{iGD8w9 XSDm!+'Gf}?M6fψKt [>Tѹyktiѫ.rh}9Ͼ ַfQ U޲I ukw>-h{lՎ͸"h8P^;{+o޲-n=鬘 Aj_.?+qi۩-&3nxacE]^-p{wɌQ~p ~hU-VhxyL_][(hw"^61 7^>}wDvBi>q~j&lqe[^vvo .el+'pmYTWl)[#rsY|_8d/7z !0-N.G/$+I҄GWU>b=ω DT(35aZa &n**A* (H;SZU=`TrGTk=M@i7 QRrN#IeǷ [ 0BQ>tߥr&3kas'&}ӹcT1A2[AGNVhޘ9 SÕǝ1MiW ?e=Z7Ǟz`A-r0 n9bNcŘ~.>f,<9;^0Oҳ_Uuqpj έh9Q]4CǼpZUǝ,PHC=SR1{߸, (CPא{M;UEO"+oą1mBA#au阾r!y2@lY7 <|9oX^f6?*^V$~~Ė=^t+ !)s=%lΫ+6aq}^qu\V ?(Dl'P҃xpݗ%uݻRBc $.6mQ (6)fb :3g`EbԘ\@% R<5i3{(PM( J71I"&Yu4FgCզO@q!B֝H$aYh\I* *1c=C'L΃jm jr>0PF˫ޣU;fm^<2=>[7v- VcEx)T}&:#[rc9^K<eIoE.p㦱g`R'amƃb `Uz)Yh<6GAN#dg@7 //ņ=;n6ףU>Gi`EY\6ٺ]J9f@NF&Z ;=i))(ĎP4}K%>G}a.G5Txѯ0{K ĂkaKB^պCs .AnoAS&9(Ga<4CNFTEAIe;UQ0koU;6{HƙGߦ{"'2;7ۍa̓a]^7ε3r47yYH P^]a-(v=#;AWZ'oN;< =ZBz)eo^(YJ'>MkMxޟnSF&済Y=/?a lųyYQ(a;^jKUSdfgm6zk*R&, D[U[u+0uza_0Lki7*0a( sa}y*`@{\p7%@֋#ن)M(KbbI$m|^ϑP:+/g+x/Y?jǦ? A]zo,>GPVݰD]@,}쫉bq`G[ e0a `qQ(pϽ0G.7byMj3vB)yJXdeX3];b}g+D!k6ޑ4lr4)~tEY%F2vڥ.~>mqUQ"*oAEWyH~歑yUXmcXsFvdsׯV9o̘/΂&Y7 ٵ-.@ 8d\2ℸV'.O>/¡SV|ź"OMDz~hN,n\1&8{_ 8‹vlH9‰}+-a55c&pEōchnaz n{FƻVh,NSYi _^HkǜXVgֈꆁ. <3V KosF TX}3.뻩h0{ ~Ynxs XbMB(NBvNN`#O dո=Kf)"M,tT>4L'|1Q#zz%8O M6N@3Pʸzta`Oj 'PwBxc"i_=Y(Cj 7x^6R`b޺~\vq|47*fY;zynX0/Yt$fg~ކ?0 S .i :xsaٵ !=5Am5w՘W~[V2&~_P '< /L{'ͨh`an=bcUz7݅Lk:_7D=û7_hibo{aͲ0tK`,dysO(^', Ya⿙ןV!M_X /w48o>@0Ѹsr4MU݂`U qj O_ٽH)*m^}0ΨÝZRThR9G8P=_Bבʈ dS׾2 gKO X}tV]:F,&LӀNL[1!=;bː}ge81HOYnצO=": qtK?N,x^-p{K1s"X(rzispP )_bȞ-ϧ_\cBv6|W\8yY8 ~1hߴE:#,ٲ>ڥ`pĥ}5g0+>cwPs, hڰ=}Qճ.7.*!ر;=oeUxObۦ)Z`_1ɷ4)Ő=q Es‹S'nu{L Xn>,hgL´ n;WcŘb!V@U1ko\0lLL!G_ |?)giLϷ HCԨBW'\x Ҏ=OvE *Oeٜ}"yMhMIDATʌoC}'~f,{, qzokg@EgE \)F>`b[)bu(~*Ėٷ槄5/EI po9;|NďKB[Q4ijH>R ΍,ѰQ,ߕҤ  aY5IXy"GcRoUWbMXkvǮQB0 din>:4m>m:bP5kO要5;bɖزo7v,Byu4guдtn-Y[I`ޝuF%bCIeV.ƮEUOѣU{k t[' бf6ڱ;΃Q\YBHב&ٹhӤ)znzY^d0Q ;/TX}ع ;aYU%t@fj: r]AsnC:LCZ_3+P0ۻIHW|_SÀu> \@,|VO ^8/X{PTO( b5燹}/si [dKҨv?n,Rl,>d?V(@ۦ#I5xE[В{Þ=mϿy.@&__ϻ74b?Qzlm^}* 3a`;cD/S`NE) W??!dG0IL̴=z# TBB)JOGAn^y%$$F׮\/B*CEOZo1<M˫n3!M@0,(^p/HC| sEΈP: \aK[kO1iȾT>fX:pOlAywI]Negُ"#+C,@ %Ǟp# {K Dݠ^](،K@UEk5D:uz8WCiryJwlE$t?ۣEj?  D#}mMI`GD !g[+9PW{7Jѣc>Jc!@!n g/AEYλݹ֜{˹ӝz.zPdWs}Gsrb9r=.׀|]2."p^E.)'&-`ߞ"(PM{u\EȖymZrAЎ #ʑVgIS+N:;uolyc/2|;<SMO'zhCwyO}28 $Qޑ 3{4t*KX8k9,~n ,_Qt}'įoE!>rA!tL|N > t"] az֐BLnuOݕ8`~9M m}階aN%$$$$$$$kԊUO_l7{ӐMN?L לhj"92s 0A;Wq}z. ƶ,0!'\D$\ ԛ+_h0t8οjs‰ƗO"ğ|yOқ4oֆ H{ɇ)4 1D(si.SH4 Lx h- ڷL%$$$$$$$kԸ_Snԯf`m7'N@Vas2^iPwcϼ!`׃oE f8z]y:'vwgj; A GYqo,(.*s|M1a0A`*6 L,!|,} -gbXb0ta0MK޴7L g}HYa> 5>nc# b&6W&20,J/^l+jFЯJ9]KHHHHHHH5jL鍔+Qbctbѩ= n/۳N;5,vl ~;_ g#'ru$ _4ʅvǍ3OWwSw;/"§ \ V!攼%1cIO}nP &N: 2)uD~ܺ J찰^d7An^N)!!!!!!!qأ$?%%+ny5n.3׾@s1oL%ӓ n+h'S|g w~z<ɴQ?މy^MLł_=gS|6 ϳ|<l|\5XDxSOILiyMSM\Lإ=)z8NJLČ!XЁ0jdSBBBBBBB@I>uũh֪)'ۡ0Vs9=wO5̸0vRe3zb#;Rp=}]y S5}>^̲w 8{-`aʀy[7lmrF`.%Bl5H2 ^ vD.pSs- -& =N^6timLJE0]p!Pmyg!9nn&͗4 ÄnN~, NĪB關&K=߆{{FG'}|)!!!!!!!qD$|fddYot^y^$VPÕ".xyVG(B+"U `€OO>wzecd paC9(|J*'GDmq9u'5s:|HXj@B\#F8Sw%啘bg vD#Ӵe/!!!!!!!!QoHWUyn`drp>ufiܼ½/\υ;VN/ xɿh.{.@D\|9΂JS+ ݯ1X8; Z=K`~$vHlص8O# (ؙ!3;37P"јho_cC=L # !oHo 37#ͮ(rކs=ej|kFT\QM#< N5NI pe"|μxÇwcpq&{z |פοLk\F|Q/G~]Wx/yUN1M\"•;$ܻ#;CnIО TWT3F%{G$FO܎t'R>dn3r3χ}xv'QfI zN=7'0b×sryةlvD͵wY?j44ډ) W2p˷`"|ޤD> rIأp?Gc/p|?b\&zN16B9W*nUPՄ~H$39Yw3e{4BdUBBBBBBBDB} hri7*t `$Tb~{-+\_q"FoD ^u{su{󩲿,i8{|c ,+Xb x%}M>w75կ~$:l38+|^70Ab/zۻ#.VZΪHn؎ %5]CAuUcN;.C8Q'qynd7|}]B;.8鄑xO;z>sFfl`'pyK?w-xpNvN\\q#q{K~_V[Fmw~Bnz uQ wYLnO=<}Ƨ@ bmMHHKHُy 3+ԠKMٍrPؼ Õ8Q'$_UU<;g,o)#/^ '.QRc+D'+33Ж +Vޙo8Mr kf,ErD[. PcocͲuuh 55wff:Oj^/z$gn$)4l[7}:&tCEYOu@q{S'ΘQ劖47BUU_Ʀ[hݱ%,^W-XWe2B$v~Lm K-L;H#\V@1Ź)ա 0b(&z펳95AwN"C׸'چ%APQZq瞐JHHHHHHHHI>\Ѳm3׀wEOC{b-zE>W8pQD>*Ys.#oNp> 9Nf|`sS(vP.k; ͍[غa{'7ψ3c"}kc% 3a љ&N"iK4tPL A~|[a4ջߢqa [$Ch(C@|xBUU''3O:s< G/M[RY0>#iTc;s4ω]ͼp^w?˓s~&G#h8oUNqDXTEzΈ \=ڻeSJ\n~ZcZԚy Aaو#+ n/Y;5,>7|aO>̋8~{=!q_MHQWJq:qK3uBp3qF 16Z *.ޜZ.πY nޯK' |hX\TS/>9ÕP/qc!B|\;!Ƞǫ΅yV(j\ z;Nwx8yH =߭;&ǯ*7kU<2R]s)я7ZDD߮<_rc9 WL)Lĥb񃑒H%$$$$$$$$8(/np[Q\To섖L%DŽv\ /(aw yV˞y]&:5Ðhh۹ t׫ vӖ㡛>Q54{?>:fXWJp~' IK?;W9zN`³ T]Ѫ"_c9pͽX*#~q"וZnuX"|X<xN[,ڧb! aޙ4/IGKOI%$$$$$$$$ k]w7~Rorյgz4םwa N]9v8:ߕ-OX} /XyN{g^{'3 0/!` yr(A9sh6<&AieKW]e$ӇГ[:4ŀX| !޷ ]HHK%/wgȓG 5-5Öz[v(#NH:@/A>7ܹӝYEOȬGpYB8_TӿmfSvEi 㧂,ƌt,ހkŴ x cNkNCZz*GKQp HĆXp/l QA>CzgDFVFg#|Wi XZ4Ѫ]Kl`qӷ#-]zq:toN@(" s«_( !/!!!!!!!4n==4Ë],M~v~DzEE}' @wyŵKN{CzύEw{`ؘAX7c!WyT#^{DXf {`3G9*;"I#bbuyJI%$$$$$$$$b@RI>̘3^x5,z{.ìBJ *lo>mRUF|W@d]0pY?WiK1x䀄?+[3y[nIL;T|5'nDD7ɧ de.gKbwO~鶧^?DUUn+NK%$$$$$$$$jCέpDžM']oNOz"83K< (HKvmP*] C#M' Ǡ.XkQGkϪS-w &.W O#H|ӣo*XȎ:GG]3[w~[mlN)-.Å7)HHHHHHHHHsWxsܳc4w+i EvԈ=TO ;}hእKZĞ_{Ή/}O ,|ꃝ3Q'GRkеmulX /?6 S!_&^mr3T"WM>5%|Ǒ bOqЃ[  p ưl{O"."{Tg(>P ?))dOCBBBBBBBB8$I>Lb:^x=$EέN-Z w]d\'sݺs{8.AXuX'LqQ >|s̜瘩Xd^%3.um(T$@BWw)z鑸1H$S'@R%Y,gBA3i𑐐h8I>w0PU=C,mJϤa4{ﹾ(AW! O:1>\?Ι'E"<^x'g9x@!/U^x8ٸz3>~K,3X ݷ‽$?>BOH6?7Pbó,O߬uS\pÙu X—o}mzk08T]]gY.QBBBBBBBp!O੿ߦ-gvx<,>LȺًxᮼp*x9n;)v }r|b8HS hyݥ= !Cz K~[wh?$Xt=>{q &O= PR.._t;'J XxUQ0 7ן1gDJjݭDc-eeF<ރ/~q_{w(BнodOEBBBBBBBB"Ah$q\!S-y.^+ {.ퟟ O|wQÓsaEx#+?j2IXl\ޝ_4<רIm=q0$WJ)'WFPU]y2,a}!--y@U95ni{&{* D!p[żw!59fܝv1wծcv^w1oI#pzO>7kX<"v;kNI_XJq5D a+N}}]31_ppp('J,b Hq{ G '0lC QH Z SR0x{*ሪ*|Ddeg kbv6JpZ(:= A|bۆ./ .LJs(aܲbO}W" d{' {J)s1=#N=:\_n`K0_eu-cR*]'Xx܉ӤmM B}6}>X4j['X,ANn6q;˘ahݾ5::ӑ3~^$y5/iu:w6VM S/D*KVJHHHH$ ]7p<|xEUe++ wO=1X '\Ǒnb?|KSw8Z~9}6p>‚$lp֚t!t~ׇ Sc^7ڕ_>8/Jz(|}xh`訁k VK,DzWaז=.9x$O" ٍkP75/Wַ?Bu6̘ rEb.*]3PТ# O$$ǿS͋xNzJ*޺~nRXc!/ƬKÞӮ9^ndg$e$b|LrYa#<%$$$%4Xw^x?vneM77^ 5"k*hݦ?E=cpA _<.oU@)T|H$7^>Ww"\T%N{Kng>8SıqYY[Qtl$!!! 䛦/E2mי8F,z]DW tJ}4v'=zMy`ǛΉk< ^y{^C=G`8` ^\_ ؽ?SSz)[QHMߋ6tOLqp.W30E-EwLMSWtdfg"#+iHIM? $|exa&RRSlE_& u[)"~B, bBxxM#kM8%Ÿg} >ڱ 㨢 !!! $|@+F%%ԛnb‘R1`*k0aiD ìk7:<%'8s`ΐ ep&s`\y?F *"DKϑ7|گT̘8 ݹ- :xُv:df9G<C笪B!}== zAANQ^kvm?M4/eZW"nY^/*OQ޵7reOIM Cu_qn+vpTrY]7x{!-:Gi:jWİ]~! EYw&Ǐ ?=.?Luω)r3v,pX0{1]vM'!3Y%.CNlgDQ!3UVRɞDBQ1c:ѯǛ7W'SԀa#kxy(*uq=#7S{ ew!lz_/' PcW8sMˑƹΛ.yK@(7zpxq4m"b?6ߤ\9XZ#YnZ{h^#1nC^}l7cl۸?N̬ 4p:wb+}ءg\srr=M 1=%H핆x?z!F,;BBB$pҹǣy=πD GXP;-F8}8%by]o O8ϴ4MtY_4=@1>e󐚖JH$ )9b֥N;~(!P؂G/h(HHHH4T6{~?/#HH)4CEBa>aN;I,h _*THw #ݹBbsW`d\-6;5;AhZ@JW^TWD̨Ɨo}/ oϗd6ş8>-wi]Fi.I%$|p:o]| Cgd㸞%O!(*+ƽ;rf۵CHד=#&1ƪf6lٷeՕDS /Nx w_W[;m1.szԿϜڜ#ϝ/Y* a^i<+O+({nZX) H'IW%'+riD0t!;Ϋϋ1p.p}!/p=u{YH~i&6EXDA3Ϣ`p60 ᤳOt%$ y{i+F;?`5}<{mP>tc:MhG5Sr{Jtn81’ l)v>(/xӶ14r@y)X11!-%sдQ~6zKy^c4kR50%cކXe=6ۍj-9OѧMGݡ2kV;tn^5!37rоo 5Uhm:}IL,ۺ?^U;6c]YiԬjݐ*B$rrħk¶ A" %ΏhŃ(TL:vBm8\C|iD4VL?jd x'<O{Ro>^~LӀ [&voE*ᙿm/CA1a`ڄضq;3=JaڢiH{ UW0_,!q0ojڱ%9˷mć'%e)jȄFx,n= S'?='#5 MY阼d.&/N9g<6/S0u+n {<(,C_ Hs3aᦵ5j?;=}v.=qrȊax7P {ph\UjϘ7D=`w.>i+?È}p1'Gqcž^]##I@QWb_h>{|n\$_7 |W|0{*h` b&ؾ _Λ&9]'dE|0ك}yVPrx+![^qVvѯ5 Y m;Duh90{rMs+nfob5ah0 =7۽ZwAJ ]z׵+pkǜ 4Bf0uQ)5W߅f>( bh^:6Cq/`ڊὡ DrqX]zy9:#x["SLs'%h./ gI3}.:=Sw#':3$1>ʉ"#]"K$TlߴqĻV PB:ߛwE3=h  \}:tlKHrޣhݤ,jOxV?kH3St47$챓F^ kWD4ċ ݵ-9GuG@S2r3pBkNyu^-eWWE=>{Cg8=;qkOPC({v|qLZ[\TO(8cj5AuǎM函Qc|9<^8qD1أ=u˿4NކϫY|]*9Eu@wNHh//;}apX,Q+0;~V- wQ7Mڧ0#Q'H aFop9̌Ϛ-!q`_1~9 e%e@Vv&"_A~>ܟ_F4.~ ;.qY IEN0 [k‡8/L vq-G-?9hݤ)v9ַmZQ09=ZCzJlфt㼡k=^}JQ3/Ltj voObA9 sж s󐕖*޹%,Y[y-[x⒛"Ft޲mF-ms4m<wС+:4mٹHKIAE{J⏝[ڕa!]Ó};7S.D ƿq}ϣCgԶKQզ_U1Ă?vnPr0G?n -%URK6Âk"F8I4\$:舗y_ Ԫ1Oo`9Dfנjtع)))XƇѡ[\|yh\GR`ӤٖR~Z 233ϤP?Oi>{0 VСk;8iX/!qiaՎѾ[;u酑='e- jv'yҹ #W.zN8V |IyCFCӖ(mt UMAQƶ0w*]:AGO8=lXԯfVϱE9?No|V/N=YGYGuJd߽ }l^'}UzP^Rv&{ $$ ee/g\{?z9y zܼu%o:¬SROL.^npÞXkwWqe9nѼџbO c6?-bN=pXk;I_@"ۯ]gxx1sbO~3ApKj9ҘUEűbxxcw0^ON$&Qp'?~V23^n {RX g m___X+^qBQUcޝ Y?C}[qR,Q4LqM^. g)N8' t>mcy=yYx:;Bu<'bԴ`ARoR2طh/ \LpUẅ́ Bxx9sYl}/-N3 %KrBgiݳ>,5rwO\# g/k~ޙʲ dfYw|lH&ǢDY\zq? z!ٹٸK IxύN8xMkܟ}ujXH@{J O;;9mB ko8{LuѵeNL 1aunI\)J*+|uN0wzA +* +ʴob(Ǔ6А#WYmq|5O>FJz$CVMʤgoHo&'(WKѯs O]!eVB9dAF>p ~r塈װ5wNAfcSaC EQa~ws#=-))_x~h&ɷOc;dxs AEY%:^4.K"!qX#=%uET3ņ=GEÑxwMprjYu%Ԣ؞X"1'Bɯh0KOdEI=\ш'+ݦ<ˠN=p3|ig'1bIZŕZ5 qC|]Oe|;,qpxC<P'7]'!fc&0aN]{x/3|' .!yLvG ex~pHYጏ.#3P3h{voۍ~Q41Hڨ(_`e2l={/)sDž; 0<'IAJ`P%^;8 ڠKָ"Ա&ֆ2\kzxAao:<0\3 9}; zg9 \jk_V:~:a*yv9 fgkSϯ?+Gnߋw!ػ(ُ*J+0ixcHHEJj 'HIXpBv{V'I%-g[^Z;#O>&$!qDBQ*iS|5g]:V_qB ,w?Ɔ=bN63.l{M, Ao¦;aOzEZ  Sgsܼ5F{lMظG!+-_dQ"ɐ$?\qxտP^Þs[!x:%B-4DSN9g2(Yv8{8~&'F ^'Vj0u%k[SWox 󿏰knH$6noL^7nGzz*R\Dް"==}8O5i:23qWEkoDY| >G׻U݂{ ss_>gX t&2iGZ}9q"UrQ-YexE@Ѕ$1sώxgoh_-pGB xKXD<^<{'zN=Ev;^IZDpN4tFnrU;6cK9G)jsr=wXQOnx<7j xp[}`n5h1~oشwʪ*e { @&Mv} ض)CH$ɯ ׿{F ˫dS_2%Đ^> Gxe|mr p~sNXQG)_(oCX!XR-xHi(8H%B\ >{ccpAU׮ġ ˢNVBt$`w|Ӷ#V-Ae(uW=ZC&[Zѫ <:}"K8fΨaG6CVzFB Hkwo1$ŕ(@tvsv؇{x}K#JH}G&TU kWn?J!t[atH2@z=ܼ=@HWߐ# 59Bϗ˃ b?!^'ۏXnP4:ϋjv)i=0r9MNi> oOL4k}:c1Ѳ푛m&֯܀5cזݨ(@JZ Ri驂x)y>TDq|~[zΆ? EUh` pMIr/!Я]g\2b,>csG~/]}R-)vECI¾j-V.)El$&fr<%= ?xBj-nݚxu S f{5|-hVuʯt_]Wa8PX ׿_oMQKn=bnsX޸zx=kr,#q1c吨cH_Kt|?Ļ3HO @7\;D{*{:v{&&L8aOg-PesZш=.ỷ/- SϟiJ~(PV%X"蚎4(@nh (eVپUHIQ-{{mӹjO$ )Ӥ=΃zfAq)(lCuGǂxH$᭟ 'ʪ+=f>$~</ڴ*{ VxB/ظ/L }nw.T1-绿0Qr+nY[Ρ7+; >mY8 zԐH \+q%a^A.>$s&sDS |<Br^H@Y.|yt,\N:늸4ę٧Q~f&?\Lԏ#м߯0?T\ X8h<}zYƭ?6}_p=p̴mrsc"pBTNc. vZϧ.Pリ\5lΪf)ϦW vmم[waќ% 0  -= YٙhԸ# M6Fa(Djjb?n`ؽmv@ JPUQ`U@ X9 5#UBϏsp^ f;.AFơ#SBB">+l[O:OYs}u}Ȋ 7 Qt|dn臐K}'9BX(HWר VWǾbUi㊑|BUwFR}պT'm;)_bQ/ċS'ES/Dv=B:bccoa^o?T,Ou‘qNo@09;||qs12h r>߉N/D4c=<.M@8AfT`Qn~jqȹ?[?m*owRw;3~CJJ 0 e(ptkMLӄi: vUΨc$u"%`F A@jZQ/G &N?!ēgD@ ,_|Ngb¸NDm !!qx#ۺmʰ+-}{U[(Sۣr3=)1"Q2wJT5zm/\1ɿh H 8"WZe[7i R~VzN07qٱ'Kt82Դ8[2RS}gb5YENF&{OwYUϗ9!$@ 0 Q(Rhj[k[oZ{[{8+2a9 $2}ӹ;{>C ӧ$;{<'&kwN"P!U.OyEXW-)>h%O)?̹?]7/<#D5_\OK)4W8DW3.m`t wIEP}en@VŮDyԝsd>% W%Z(p:iJս#&_g}e_p&64YU 48}?B"Qm2~vK? |Ĥ'%7z]"lΆnvOIo9c7^wj׵Zܝ\d[3RҌuGD[o^nnXὸXuNjh$'>P6`Nw&|>2}f}Zow܏-p*ڂxp3B%L+a# 1qxj+ዊCUJqkұ@4JUl~' џ9ΨSDIdV)5Eƥ Zryĸ،n8yd~I#N6s=f=% Q~u7Od-u'sG' TC5?̤tBr7ךчVhwW߱ n1̗)Crq%"wtۍf`x>mŮ|Ywn?'bJNgIqF w[ȝE]S 1X˱zv[C/Eg4/;\ ˲pQ89W#%aex:z+Vaw .F2eu|@jV۪U$η"pmɊ` +mo ɕ[-N<Jv L^?Iͅ)Cp@PEwqeT;olU5-קrzfwrZK>QfY2B@R҆w8 cLe!#aε^3w0]hlm詶w3C0r|~QV7E+&粯gp6QQu=6w&ǁҒ3P8쩆c'Q>8 lWWf<Ƒ9w`#|7Ꮿ#Cx]."TNЏE )9"("E>?5`(S!=G.F6}frٵ@r[ǣ&~n t&HJ$G\,-/#hfpp+QGO+CQkGhkuT]!',ګlÅs܄0Xʛ-HVe2:Z!PM1/5Ѹw6_WTƆ痌hߦ oo{b{v;@:#?PZlgl:*j қ:vTm!GmO7übЬe a?q'g\k#)mU5}(E9)0_^ϻf7xxZ<)mFeMuR7AV-!6γ1|N4_>QQk:F/dQzQ$$fGS(phM4e΄ y]qR9֥WzVqkk#sFYDzT< bЈl)㗡\Wc`/ uM¨J4] Ǜzi$l=0Jˑw^Gs5&t^[kȽ4E-^{*ƥ6(-;  Z=?956&/,Jwrp؄|8; tTi`jﬡΏINގyc0 |_GR#K}cSLj6ķF{=8>O'oHC8XZm+/9If 3]Y0! q]"@ %7"u͇OÖ\a{vVM^FGcWs6FYH]}(F*Z;̪خH//%DO}U"y2F-HO~teTDm3# `YiMVqrKe&sU o:H{l]:AD""R-_ŸT6ySЍ'{DI/ oCO]JW VƧ_MIjNn,؀.ட,¨93 {g_n{9 JS5-rF~S1QS7We&btAM+x#$[rvwx֫7ׯʊv[ +?4^lgD_| UHql#EdF `2g/ltHmiryjn: vpzJ[/j ?U P$КkG*rе;m1v"DwIm$EyT .T VCApC_lk`;1ؕH>TFs?4^H|4TTV_5#Kuˆ{WE  ۘ>B0ll91k+ѓ03iq{Z-j{xo{1{YA=s:&>vHd$?תO}]?`O}]S|m^;=џ1w`󁽭ޒ#޳ԛD$'4--9`# .!~pΓ{e`tC2nR" *h޼R]M'\ĽJ$lmatJDR @dʁ:6;_g,9g'\ڻj[o\X%TwEJJV)6Vk84 E"/I4o2 '9i dQ|XI#61_r~߹`^E;˽B=ڬ9|hYwq;dtHCQb(/x < HJl߈C[R 7_43faYxۆVTm56oS)O̞ 61<Z;|됞* Z805/9uƕFOJ1QhTWȥD^=ω |NĠ#PkS#VW8N{؊,yd_c3h?%Ɨύ:MtÙ_~/3ryE!"P@C(/BK'IꩣFE(_)d(0-coƍߺ]3O0ُ?4F+r5ˈ׫u"ua}X;YcM9>OgG>ݺ#'o%XxH~SeOaqbgnq_Ž#dlo~1>| <>~V>ߵ'Nl]|Myowpql>>£?6HKOo2^hĸvYsf`#ysO?!#P.Zffʭ#RR?g;oگEa@8!܋๓gғ{WXꝠj7&' (p4@2 Ȫb,:o' e^ /hMpZTyN֪V"D)Ak/DU_L;~ Ar R&6 7 ~N,a.b9a:#mQ! k&h>ӒxFS-þ]\SCy)ZKY&"f?D|HOJv]H;V2F[cE?~Xq;oz ]dp O!Pa+ ʀ>]3y_kR?y9#k~Z4eZ_[;zη; ˲p 7沯>غu=39Ӈmu0g6q}z ̾'#FsZ4RtĘ֏iQtKH{T W/KXJ4Ǣ.wk''N U Bj+#d*{|e=ߝ+=*F$e@W tj|8C{Vk9P,ݲ:$$%`׿o͚ a:7ELg/9{n>{T.:?,~2|i ֮]RdL0ʀIL9ٍ6>?or DHYCVОSvݳ0~OU4kMQ\7b|Z|'8v=q.q xۍLdaG-kO5wmC{"oKCx१gM GKqߞ}EOO^+w0: Xx,yvlޅW1 X>iQIhs˂e0Bޑt8:F%-sa;ֹe C ! m>Rc" p*;-5|M9Ng #F2-xX\zgbo OW-u_"u\zRElԻm&8n%|B`Y>e<>@T i7s'Մ3<ǰ9!b7S\.5/KK\/܍=G7: G0Mko6h8]}vzj']9Y}=k4DzR׵ڀ++<%5})iLNCFr*2S"&!51 QQ8][eDZ~|k U89+&Nú|VgO}1QѸr*gZU[.h˲pq E~|un15Sx}rk'Ɵ]RwF& 2c.VaLd9n> {YQuaE5x{{O虚 񎥊ӓ 6:Uu8Z^G`|.:$_;'!m7ƼqSqS /׏6O>Ԭ} >f(|xqiQrZ%_-&XҘkos pj :V1=˦uZ#Ω- ]2 N׫;i+{Ѿ|9q¨ OZf͇ذmDn:Sqm)1wxA!?mmi L\F?Q[GzffΛ>mIc QzgfO|+׶XǗ߈ow⶧ycjH隉$Ba(փl&GnuQa}6ۧI!{/SQ|pC(לOuho,L0+n󿆝GԐk.l¸'лkFE݁sُ^zg`jH Yݐ0J+p{k!+[~hV_\w1^Xa>oOֵ`L4tO@U] /ڂXk+RؼGʎHf!o|k~33聉s0g_. (8- Ѷ (;]icX~тZtr%C/@]1qtxLG0_y4 ]7vǬQQx/z\?p_ن/lkR9Y}Ƨu}C!W5cHHMLjѺ9?zi{L+vo5?h#7$ dC$$?~}FcK_ 1s>BvXSbM36:6<=sR*bq3fY:+8x'x{&wY%h* ymvب~i-Yiݐݽ{j:&%LKK*0fb|̙s$t|k6/. E4 "Vrz.`6ԈKZ QQ89NEƐG5sRq*rIѹzlPD1XTEwOYAFV4OSGT m/kzӕ 맅 EEgp@e?^;| `i ~__^>s=E5j"~~խHi]iCGod٫r?k4(?ݱs<$FMߏ?EG)>>ٶh/ݼMs2jJm=T&GO`C3U ƒWބ[yY,ݴiK7iVθeYƅԭkp 7cVo_.yg;6܊j[)do)>fnr='ꡇz'YDޥ1kݛY WIDAT>8Q\J1s@5(9"j8U$E5w@-;p 2/R li|R#PC[؋ r嘻>W/yo")}F_|Oh=A>PF0D!0}4 hηgt`/x&AbW>1πveYS5az=wfWfGzg9WaE^(Զi^?SlfNJdNJqڳ3z6)7ߋ=E|+؉G7,gD]0h.-n|(a;{wذ!a 0i0\:jX]Q +#39 C(VTr)+ڂz;@qEBAlT4Ź[T97`#1w{!>xcߋښ:J{jt># m># }Y@V@UjbJwG h:z+/ ,g&|P, :?cc0hx6f̝y2 ÜmUOoz#RCQy)*ka6⑕֭ŹAs((GEiTk$tOMG\b.e -lwt%JN_h_R+siB0U:B!ĢkdH8 LTK>ņQZ\ _OOg_;P jQm&0kM'v%n^uD᳨ZTΎ_^6IM;)sI)y#wOǴyH떆Ga/a:1J5˰`B*6|B;/Moʐ\<Ho *g͆Uؿj$W#l]@N5oCrzvKLF{}x?q dzʃ)N *WsUI+ڛR}Rd3V0/ʇ}7k2zo Waa:/= es6V Xl;>A^h͸T#LJ"Gchxi~3=(nOj|=Ou9h=&_+bDC!z遱G!wp3 0 %5X!!&ߙ{ .=0$l3mΏƕ[PtU5E \~e7[oٝOsaDѕgxq7MA"+tea|a) Hj!ѣFNqycӺ#qa8^.~Nk w\ϴ;{`ػmN/ FE^qq(Ehs+"'G L1IT|)㈰ޖkJi=GIkPs5Kqҧ^ p)@ I ? 'b蘜~a9]m\>!L)i=m9'`#9a}.9PʲJB!X>*V)޹ċ+wMeϚ/闶uZ{y> +'7F̪Yx6'4R9A"{7 qSG#[yWaܼe-ޫ8][}Ѿ(13sclH3Y YÞ}_x TWUEVΗߓ=-ݞJe9[s XFǃ ݰ7>@CZlgZPp +bԤHNai?NWK_ŧ;65~ zuEzR2n6 SsFv欂|_Ǟ}سGEiI*"HsٻHܹK^چ$[ lc{wu}72 %=4/ `[@bDt}A7oG?naaĬ+؉M-> 1+1Ԏ6ÜuϜT`=8TpNJNx9NWFmu- ,H?c`7hJE[n 4g?U-BaCaDEG!K"R+HHG0 0 XoYblf.n]R:z sVF>өCmMu~ " "_kb{\y">}TQBtl bbX!>!qqqKCLlLGo;0 0L CB] X@bl0 0 0 tuVQPf%tEXtdate:create2013-05-13T18:16:51-04:00o%tEXtdate:modify2013-05-13T18:09:43-04:00u8IENDB`././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/_static/sidebar.js0000644000076500000240000001206014456461441016034 0ustar00runnerstaff/* * sidebar.js * ~~~~~~~~~~ * * This script makes the Sphinx sidebar collapsible. * * .sphinxsidebar contains .sphinxsidebarwrapper. This script adds in * .sphixsidebar, after .sphinxsidebarwrapper, the #sidebarbutton used to * collapse and expand the sidebar. * * When the sidebar is collapsed the .sphinxsidebarwrapper is hidden and the * width of the sidebar and the margin-left of the document are decreased. * When the sidebar is expanded the opposite happens. This script saves a * per-browser/per-session cookie used to remember the position of the sidebar * among the pages. Once the browser is closed the cookie is deleted and the * position reset to the default (expanded). * * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. * :license: BSD, see LICENSE for details. * */ $(function() { // global elements used by the functions. // the 'sidebarbutton' element is defined as global after its // creation, in the add_sidebar_button function var bodywrapper = $('.bodywrapper'); var sidebar = $('.sphinxsidebar'); var sidebarwrapper = $('.sphinxsidebarwrapper'); // original margin-left of the bodywrapper and width of the sidebar // with the sidebar expanded var bw_margin_expanded = bodywrapper.css('margin-left'); var ssb_width_expanded = sidebar.width(); // margin-left of the bodywrapper and width of the sidebar // with the sidebar collapsed var bw_margin_collapsed = '.8em'; var ssb_width_collapsed = '.8em'; // colors used by the current theme var dark_color = '#AAAAAA'; var light_color = '#CCCCCC'; function sidebar_is_collapsed() { return sidebarwrapper.is(':not(:visible)'); } function toggle_sidebar() { if (sidebar_is_collapsed()) expand_sidebar(); else collapse_sidebar(); } function collapse_sidebar() { sidebarwrapper.hide(); sidebar.css('width', ssb_width_collapsed); bodywrapper.css('margin-left', bw_margin_collapsed); sidebarbutton.css({ 'margin-left': '0', //'height': bodywrapper.height(), 'height': sidebar.height(), 'border-radius': '5px' }); sidebarbutton.find('span').text('»'); sidebarbutton.attr('title', _('Expand sidebar')); document.cookie = 'sidebar=collapsed'; } function expand_sidebar() { bodywrapper.css('margin-left', bw_margin_expanded); sidebar.css('width', ssb_width_expanded); sidebarwrapper.show(); sidebarbutton.css({ 'margin-left': ssb_width_expanded-12, //'height': bodywrapper.height(), 'height': sidebar.height(), 'border-radius': '0 5px 5px 0' }); sidebarbutton.find('span').text('«'); sidebarbutton.attr('title', _('Collapse sidebar')); //sidebarwrapper.css({'padding-top': // Math.max(window.pageYOffset - sidebarwrapper.offset().top, 10)}); document.cookie = 'sidebar=expanded'; } function add_sidebar_button() { sidebarwrapper.css({ 'float': 'left', 'margin-right': '0', 'width': ssb_width_expanded - 28 }); // create the button sidebar.append( '

«
' ); var sidebarbutton = $('#sidebarbutton'); // find the height of the viewport to center the '<<' in the page var viewport_height; if (window.innerHeight) viewport_height = window.innerHeight; else viewport_height = $(window).height(); var sidebar_offset = sidebar.offset().top; var sidebar_height = sidebar.height(); //var sidebar_height = Math.max(bodywrapper.height(), sidebar.height()); sidebarbutton.find('span').css({ 'display': 'block', 'margin-top': sidebar_height/2 - 10 //'margin-top': (viewport_height - sidebar.position().top - 20) / 2 //'position': 'fixed', //'top': Math.min(viewport_height/2, sidebar_height/2 + sidebar_offset) - 10 }); sidebarbutton.click(toggle_sidebar); sidebarbutton.attr('title', _('Collapse sidebar')); sidebarbutton.css({ 'border-radius': '0 5px 5px 0', 'color': '#444444', 'background-color': '#CCCCCC', 'font-size': '1.2em', 'cursor': 'pointer', 'height': sidebar_height, 'padding-top': '1px', 'padding-left': '1px', 'margin-left': ssb_width_expanded - 12 }); sidebarbutton.hover( function () { $(this).css('background-color', dark_color); }, function () { $(this).css('background-color', light_color); } ); } function set_position_from_cookie() { if (!document.cookie) return; var items = document.cookie.split(';'); for(var k=0; k-ByK9x ` RjpNW _ [(t[j$uf+9W .dPNb0р"H NO!"ܾ=8Ǝ!(@9"2BD] d tH@ 6]q++sŏGn y8Ar0@@g:|^y yp9&_ W2h % |'"@/R1XW,H=l`F+"c38 H8#pZN(!8OFsU"Hu:^)$hs ZXZxPl:YYZb4%3GZ\@ni6z,"Y`@P)LPJر\r7z~ iUqW|^D Ҥĝi=aC!D_!DZZ&䩰5ɞ!+Pn ;/,»0;pB!"| 0rA (2ƒ$Ä|0!"g qκ`9M }uzl0+z,006R)` H] +bu<] !_"[Z5,z TP*c WJ mГǟr x~]hA -{P^d؃sDe !' .Ԅ_k 5!F!![~J [JM2/v7:=8f}dq gPD\QUkOpOU#5͈ZCPq;DLD`ZKw@8! P/0HW0&rĤRb'C}P&Q OAJy ShA &ZS`(̰MLz$j_lKAܕxZ*( @!G$p&]L1*OqȈBə@,0v0'Ad!m40-@Z@!@HQ g3E-ah2 C%Z^ul @Rp-DA"M" ֜3E⸹|lsZ8<,5 ju%#.x q!6yN+S ȠԌDh _@< B9љOFԤ6uRRWC )P! ZYGز v"^]r҅vy ,L6 @@La %$! D(HGJws"l N HA]gH>D(x@YE"TT.kZ $u>&spIH2W/`Bg<tD W`DLMv:F2w_nY񌏈?,fukZ*b"Cz<]@U_OԈٛ>nS^yTؼve#۽~o>L1i3R^"((tezQ!Y 4їn~2%;Wz]gyS&6܇wix7'U"!)","!TtRB1`gQ%屁G yDDuwі|e3}n|rAO SFw6fs)`1H(1`.% 3s#"L" PH.,c!F`.7r-FAa^7;sU}P9e %K\œawEz# 3&P$29=s4_Q43#42243g-Hm ,^vgvH6$;~XS a7sFHwiA{t:Lr8)%"S:y7sEp Pij6e0gPU"s0r0@i$#`<1P!ӏncc=%@uB$`;!/L%=S) pSr 3|1>עqC(?![ H`aL;BqC)ꈓ\i ~j4p4)גIa)IP!p9:ؕx p3c*Aj!  ~~ 9hRwk) lI pSaٚT暶 I)x …Ĺʙ[9}ՙI[`܆ 9){8 9 ੋZCU'%(`0o `y_ *) ":iA H w U k JWX("jK> ʚ Z 9ʤp7j 9p a.Cp_>v9D;0QpI)djh$J ]Zp ) WV\̧ %z `*K4NƣH4 XDL Z*IJzy: xPVʢP[p襕ШnR$U基*Uz* U 0P $ WF]zСQYC@NZ< UJRZS?  Jy SJ;Z 9:p ڊB*GD:DE*A@_jXbRI: KJ|}P*]`T 6Ц$X-p g?P1Q4fDn%I-cFM* YZe-z # x $w4 *ڲZC<IJpXF!DeUiMnIGJ @7m@tt v=etecp UUG+XUp{ꪅP G1rе+k`W-PkBBF!;_%I+o:UcUkp+ฏѷ wMk S닐X[_[k9pFKE˷ 1:Yvh[ + 9:]*L{ھC54-0*TkL@IW'.P+JM0 zO[e\Z{ ִ;^ď f;Q>HL`KZb\L5PDV@VZ^ܽзaZ{rž_g$\6?pz'H Wmi~|$J ȋ*":ʤ p - $JJT+ ˾<̾'_@ȜA Cڦ | ;ڞ!ʢ_ڙ9ɘ*w,j ]iɄ@T,[yB ]zpąW&̈́aL}Lϭ|z K]΄@\ע"vEbma@f;ИYh-:`uԌڙ C!E@jYxS=\b L ۤ͆^;n! s{fe #x|=̘p'0 h-mjpclA¦lfy1١V|lT0 h0܂Х]Lk o@pH'n2#qvqٍqp C= j<ל& m͙s R! uxH? tEuI  >|ӕХA]D HwI' =U$=4>xY{.n #0"oL؏ ͋L=zضHב #{%Y6-##/{:b{흴 ~,،pm- 95~"q&7Λ)׳W~-r~bۄd}|x~ڸ -P<`QB{B-)BD ߅#믖6: :N ](+Є7`ר.bn+ Y8C) {Ҏ,MX>l- ! ,@Qˮ'*4^;.36#M8s2X~h(1 2/s(xnҋDUК^豌h hXJ 9%z"(:с~=gը ($/Сv#>Ʀ#96>#ĒII )il 'Ak܃OMͤjA/MMt'E'2HDD?cx( ܵ[L Ҝ,_>i)ITֲpY8IK*_qB*=ObM0jcإ _aMO)@jPp}F_(".88.)3% "ö̵/ΰ  *><4K ޱnc%bq% dlbZvZ 5|~$DN4O$I+MW=Vp؈a0Ug+)+3LĄQ x-q(ֳjYK7FɊj r]#?]|HW_[2rDj5vZ|økAE!0yClS9gqt";et&?9feFsM01ge̅VG2<6 mSRѣ 2I(!c0BH }ž{$}@/# BuKi@ZS u5^UBx#sN)& 0~rH(@"12Qu -(.Hɂ;]APF)%%ák+Qe/' x84S eHA l (l @$矆 r="U RS!؉' zgp 0YbvzI&Zh aIZFj0A2 `A&|@B 9AD(RDBB {lꠂnµM :v͕Jb.TITd˘0`DXzn)m$Dc@(|ܢȄB&/Docs for other versions

Other resources

././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/_templates/layout.html0000644000076500000240000000246614456461441017010 0ustar00runnerstaff{% extends "!layout.html" %} {%- block extrahead %} {{ super() }} {% if not embedded %}{% endif %} {% endblock %} {% block rootrellink %}
  • Project Homepage{{ reldelim1 }}
  • {{ shorttitle }}{{ reldelim1 }}
  • {% endblock %} {% block footer %} {% endblock %} ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.6124585 deap-1.4.1/doc/_themes/0000755000076500000240000000000014456461475014073 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.6568913 deap-1.4.1/doc/_themes/pydoctheme/0000755000076500000240000000000014456461475016234 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1689936700.657385 deap-1.4.1/doc/_themes/pydoctheme/static/0000755000076500000240000000000014456461475017523 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/_themes/pydoctheme/static/pydoctheme.css0000644000076500000240000000541714456461441022376 0ustar00runnerstaff@import url("default.css"); body { background-color: white; margin-left: 1em; margin-right: 1em; } div.related { margin-bottom: 1.2em; padding: 0.5em 0; border-top: 1px solid #ccc; margin-top: 0.5em; } div.related a:hover { color: #41cdce; } div.related:first-child { border-top: 0; border-bottom: 1px solid #ccc; } div.sidebar { background-color: #eeeeee; } div.sphinxsidebar { background-color: #eeeeee; border-radius: 5px; line-height: 130%; font-size: smaller; } div.sphinxsidebar h3, div.sphinxsidebar h4 { margin-top: 1.5em; } div.sphinxsidebarwrapper > h3:first-child { margin-top: 0.2em; } div.sphinxsidebarwrapper > ul > li > ul > li { margin-bottom: 0.4em; } div.sphinxsidebar a:hover { color: #41cdce; } div.sphinxsidebar input { font-family: 'Lucida Grande','Lucida Sans','DejaVu Sans',Arial,sans-serif; border: 1px solid #999999; font-size: smaller; border-radius: 3px; } div.sphinxsidebar input[type=text] { max-width: 150px; } div.body { padding: 0 0 0 1.2em; } div.body p { line-height: 140%; } div.body h1, div.body h2, div.body h3, div.body h4, div.body h5, div.body h6 { margin: 0; border: 0; padding: 0.3em 0; } div.body hr { border: 0; background-color: #ccc; height: 1px; } div.body pre { border-radius: 3px; border: 1px solid #ac9; } div.body div.admonition, div.body div.impl-detail { border-radius: 3px; } div.body div.impl-detail > p { margin: 0; } div.body div.seealso { border: 1px solid #dddd66; } a.reference em { font-style: normal; } div.body a { color: #167171; } div.body a:visited { color: #f38b28; } div.body a:hover { color: #41cdce; } tt, pre { font-family: monospace, sans-serif; font-size: 96.5%; } div.body tt { border-radius: 3px; } div.body tt.descname { font-size: 120%; } div.body tt.xref, div.body a tt { font-weight: normal; } .deprecated, .deprecated-removed { background-color: #ffe4e4; border: 1px solid #f66; padding: 7px } .deprecated { border-radius: 3px; } table.docutils { border: 1px solid #ddd; min-width: 20%; border-radius: 3px; margin-top: 10px; margin-bottom: 10px; } table.docutils td, table.docutils th { border: 1px solid #ddd !important; border-radius: 3px; } table p, table li { text-align: left !important; } table.docutils th { background-color: #eee; padding: 0.3em 0.5em; } table.docutils td { background-color: white; padding: 0.3em 0.5em; } table.footnote, table.footnote td { border: 0 !important; } div.footer { line-height: 150%; margin-top: -2em; text-align: right; width: auto; margin-right: 10px; } div.footer a:hover { color: #41cdce; } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/_themes/pydoctheme/theme.conf0000644000076500000240000000111714456461441020176 0ustar00runnerstaff[theme] inherit = default stylesheet = pydoctheme.css pygments_style = sphinx [options] bodyfont = 'Lucida Grande', 'Lucida Sans', 'DejaVu Sans', Arial, sans-serif headfont = 'Lucida Grande', 'Lucida Sans', 'DejaVu Sans', Arial, sans-serif footerbgcolor = white footertextcolor = #555555 relbarbgcolor = white relbartextcolor = #666666 relbarlinkcolor = #444444 sidebarbgcolor = white sidebartextcolor = #444444 sidebarlinkcolor = #444444 bgcolor = white textcolor = #222222 linkcolor = #0090c0 visitedlinkcolor = #00608f headtextcolor = #1a1a1a headbgcolor = white headlinkcolor = #aaaaaa ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/about.rst0000644000076500000240000000375214456461441014313 0ustar00runnerstaff.. image:: _static/deap_long.png :width: 300 px :align: right :target: index.html .. image:: _static/lvsn.png :width: 175 px :align: right :target: http://vision.gel.ulaval.ca/ .. image:: _static/ul.gif :width: 175 px :align: right :target: http://www.ulaval.ca/ About DEAP ========== Main Contributors ----------------- In alphabetical order - `François-Michel De Rainville `_ - `Félix-Antoine Fortin `_ - `Christian Gagné `_ - Olivier Gagnon - Marc-André Gardner - Simon Grenier - Yannick Hold-Geoffroy - Marc Parizeau DEAP is developed at the `Computer Vision and Systems Laboratory (CVSL) `_ at `Université Laval `_, in Quebec city, Canada. Publications on DEAP -------------------- - Félix-Antoine Fortin, François-Michel De Rainville, Marc-André Gardner, Marc Parizeau and Christian Gagné, "DEAP: Evolutionary Algorithms Made Easy", Journal of Machine Learning Research, pp. 2171-2175, no 13, jul 2012. - François-Michel De Rainville, Félix-Antoine Fortin, Marc-André Gardner, Marc Parizeau and Christian Gagné, "DEAP: A Python Framework for Evolutionary Algorithms", in EvoSoft Workshop, Companion proc. of the Genetic and Evolutionary Computation Conference (GECCO 2012), July 07-11 2012. Citation -------- Authors of scientific papers including results generated using DEAP are encouraged to cite the following paper. .. code-block:: latex @article{DEAP_JMLR2012, author = " F\'elix-Antoine Fortin and Fran\c{c}ois-Michel {De Rainville} and Marc-Andr\'e Gardner and Marc Parizeau and Christian Gagn\'e ", title = { {DEAP}: Evolutionary Algorithms Made Easy }, pages = { 2171--2175 }, volume = { 13 }, month = { jul }, year = { 2012 }, journal = { Journal of Machine Learning Research } } ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.6608706 deap-1.4.1/doc/api/0000755000076500000240000000000014456461475013220 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/api/algo.rst0000644000076500000240000000314014456461441014663 0ustar00runnerstaffAlgorithms ========== .. automodule:: deap.algorithms Complete Algorithms ------------------- These are complete boxed algorithms that are somewhat limited to the very basic evolutionary computation concepts. All algorithms accept, in addition to their arguments, an initialized :class:`~deap.tools.Statistics` object to maintain stats of the evolution, an initialized :class:`~deap.tools.HallOfFame` to hold the best individual(s) to appear in the population, and a boolean `verbose` to specify whether to log what is happening during the evolution or not. .. autofunction:: deap.algorithms.eaSimple(population, toolbox, cxpb, mutpb, ngen[, stats, halloffame, verbose]) .. autofunction:: deap.algorithms.eaMuPlusLambda(population, toolbox, mu, lambda_, cxpb, mutpb, ngen[, stats, halloffame, verbose]) .. autofunction:: deap.algorithms.eaMuCommaLambda(population, toolbox, mu, lambda_, cxpb, mutpb, ngen[, stats, halloffame, verbose]) .. autofunction:: deap.algorithms.eaGenerateUpdate(toolbox, ngen[, stats, halloffame, verbose]) Variations ---------- Variations are smaller parts of the algorithms that can be used separately to build more complex algorithms. .. autofunction:: deap.algorithms.varAnd .. autofunction:: deap.algorithms.varOr Covariance Matrix Adaptation Evolution Strategy =============================================== .. automodule:: deap.cma .. autoclass:: deap.cma.Strategy(centroid, sigma[, **kargs]) :members: .. autoclass:: deap.cma.StrategyOnePlusLambda(parent, sigma[, **kargs]) :members: .. autoclass:: deap.cma.StrategyMultiObjective(population, sigma[, **kargs]) :members: ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/api/base.rst0000644000076500000240000000056714456461441014665 0ustar00runnerstaffBase ==== .. automodule:: deap.base Toolbox ------- .. autoclass:: deap.base.Toolbox .. automethod:: deap.base.Toolbox.register(alias, method[, argument[, ...]]) .. automethod:: deap.base.Toolbox.unregister(alias) .. automethod:: deap.base.Toolbox.decorate(alias, decorator[, decorator[, ...]]) Fitness ------- .. autoclass:: deap.base.Fitness([values]) :members:././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/api/benchmarks.rst0000644000076500000240000001226614456461441016067 0ustar00runnerstaff========== Benchmarks ========== .. automodule:: deap.benchmarks ============================= ============================= ============================ ===================== Single Objective Continuous Multi Objective Continuous Binary Symbolic Regression ============================= ============================= ============================ ===================== :func:`cigar` :func:`fonseca` :func:`~binary.chuang_f1` :func:`~gp.kotanchek` :func:`plane` :func:`kursawe` :func:`~binary.chuang_f2` :func:`~gp.salustowicz_1d` :func:`sphere` :func:`schaffer_mo` :func:`~binary.chuang_f3` :func:`~gp.salustowicz_2d` :func:`rand` :func:`dtlz1` :func:`~binary.royal_road1` :func:`~gp.unwrapped_ball` :func:`ackley` :func:`dtlz2` :func:`~binary.royal_road2` :func:`~gp.rational_polynomial` :func:`bohachevsky` :func:`dtlz3` .. :func:`~gp.rational_polynomial2` :func:`griewank` :func:`dtlz4` .. :func:`~gp.sin_cos` :func:`h1` :func:`zdt1` .. :func:`~gp.ripple` :func:`himmelblau` :func:`zdt2` .. .. :func:`rastrigin` :func:`zdt3` .. .. :func:`rastrigin_scaled` :func:`zdt4` .. .. :func:`rastrigin_skew` :func:`zdt6` .. .. :func:`rosenbrock` .. .. .. :func:`schaffer` .. .. .. :func:`schwefel` .. .. .. :func:`shekel` .. .. .. ============================= ============================= ============================ ===================== Continuous Optimization ======================= .. autofunction:: deap.benchmarks.cigar .. autofunction:: deap.benchmarks.plane .. autofunction:: deap.benchmarks.sphere .. autofunction:: deap.benchmarks.rand .. autofunction:: deap.benchmarks.ackley .. autofunction:: deap.benchmarks.bohachevsky .. autofunction:: deap.benchmarks.griewank .. autofunction:: deap.benchmarks.h1 .. autofunction:: deap.benchmarks.himmelblau .. autofunction:: deap.benchmarks.rastrigin .. autofunction:: deap.benchmarks.rastrigin_scaled .. autofunction:: deap.benchmarks.rastrigin_skew .. autofunction:: deap.benchmarks.rosenbrock .. autofunction:: deap.benchmarks.schaffer .. autofunction:: deap.benchmarks.schwefel .. autofunction:: deap.benchmarks.shekel Multi-objective --------------- .. autofunction:: deap.benchmarks.fonseca .. autofunction:: deap.benchmarks.kursawe .. autofunction:: deap.benchmarks.schaffer_mo .. autofunction:: deap.benchmarks.dtlz1 .. autofunction:: deap.benchmarks.dtlz2 .. autofunction:: deap.benchmarks.dtlz3 .. autofunction:: deap.benchmarks.dtlz4 .. autofunction:: deap.benchmarks.zdt1 .. autofunction:: deap.benchmarks.zdt2 .. autofunction:: deap.benchmarks.zdt3 .. autofunction:: deap.benchmarks.zdt4 .. autofunction:: deap.benchmarks.zdt6 Binary Optimization =================== .. automodule:: deap.benchmarks.binary .. autofunction:: deap.benchmarks.binary.chuang_f1 .. autofunction:: deap.benchmarks.binary.chuang_f2 .. autofunction:: deap.benchmarks.binary.chuang_f3 .. autofunction:: deap.benchmarks.binary.royal_road1 .. autofunction:: deap.benchmarks.binary.royal_road2 .. autofunction:: deap.benchmarks.binary.bin2float Symbolic Regression =================== .. automodule:: deap.benchmarks.gp .. autofunction:: deap.benchmarks.gp.kotanchek .. autofunction:: deap.benchmarks.gp.salustowicz_1d .. autofunction:: deap.benchmarks.gp.salustowicz_2d .. autofunction:: deap.benchmarks.gp.unwrapped_ball .. autofunction:: deap.benchmarks.gp.rational_polynomial .. autofunction:: deap.benchmarks.gp.rational_polynomial2 .. autofunction:: deap.benchmarks.gp.sin_cos .. autofunction:: deap.benchmarks.gp.ripple Moving Peaks Benchmark ====================== .. automodule:: deap.benchmarks.movingpeaks .. autoclass:: deap.benchmarks.movingpeaks.MovingPeaks(self, dim[, pfunc][, npeaks][, bfunc][, random][, ...]) :members: .. automethod:: deap.benchmarks.movingpeaks.MovingPeaks.__call__(self, individual[, count]) .. autofunction:: deap.benchmarks.movingpeaks.cone .. autofunction:: deap.benchmarks.movingpeaks.function1 Benchmarks tools ================ .. automodule:: deap.benchmarks.tools :members: convergence, diversity .. autofunction:: deap.benchmarks.tools.noise .. automethod:: deap.benchmarks.tools.noise.noise .. autofunction:: deap.benchmarks.tools.rotate .. automethod:: deap.benchmarks.tools.rotate.rotate .. autofunction:: deap.benchmarks.tools.scale .. automethod:: deap.benchmarks.tools.scale.scale .. autofunction:: deap.benchmarks.tools.translate .. automethod:: deap.benchmarks.tools.translate.translate././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/api/creator.rst0000644000076500000240000000023714456461441015404 0ustar00runnerstaffCreator ------- .. automodule:: deap.creator .. autofunction:: deap.creator.create(name, base[, attribute[, ...]]) .. autodata:: deap.creator.class_replacers././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/api/gp.rst0000644000076500000240000000070614456461441014354 0ustar00runnerstaffGenetic Programming =================== .. automodule:: deap.gp .. autoclass:: deap.gp.PrimitiveTree :members: .. autoclass:: deap.gp.PrimitiveSet :members: .. autoclass:: deap.gp.Primitive :members: .. autoclass:: deap.gp.Terminal :members: .. autoclass:: deap.gp.Ephemeral :members: .. autofunction:: deap.gp.compile .. autofunction:: deap.gp.compileADF .. autoclass:: deap.gp.PrimitiveSetTyped :members: .. autofunction:: deap.gp.graph ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/api/index.rst0000644000076500000240000000027314456461441015054 0ustar00runnerstaffLibrary Reference ================= Description of the functions, classes and modules contained within DEAP. .. toctree:: :maxdepth: 2 creator base tools algo gp benchmarks ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/api/tools.rst0000644000076500000240000002073114456461441015106 0ustar00runnerstaffEvolutionary Tools ================== .. automodule:: deap.tools .. _operators: Operators --------- The operator set does the minimum job for transforming or selecting individuals. This means, for example, that providing two individuals to the crossover will transform those individuals in-place. The responsibility of making offspring(s) independent of their parent(s) and invalidating the fitness is left to the user and is generally fulfilled in the algorithms by calling :func:`toolbox.clone` on an individual to duplicate it and ``del`` on the :attr:`values` attribute of the individual's fitness to invalidate it. Here is a list of the implemented operators in DEAP, ============================ =========================================== ========================================= ========================================= ================ Initialization Crossover Mutation Selection Migration ============================ =========================================== ========================================= ========================================= ================ :func:`initRepeat` :func:`cxOnePoint` :func:`mutGaussian` :func:`selTournament` :func:`migRing` :func:`initIterate` :func:`cxTwoPoint` :func:`mutShuffleIndexes` :func:`selRoulette` .. :func:`initCycle` :func:`cxUniform` :func:`mutFlipBit` :func:`selNSGA2` .. .. :func:`cxPartialyMatched` :func:`mutPolynomialBounded` :func:`selNSGA3` .. .. :func:`cxUniformPartialyMatched` :func:`mutUniformInt` :func:`selSPEA2` .. .. :func:`cxOrdered` :func:`mutESLogNormal` :func:`selRandom` .. .. :func:`cxBlend` .. :func:`selBest` .. .. :func:`cxESBlend` .. :func:`selWorst` .. .. :func:`cxESTwoPoint` .. :func:`selTournamentDCD` .. .. :func:`cxSimulatedBinary` .. :func:`selDoubleTournament` .. .. :func:`cxSimulatedBinaryBounded` .. :func:`selStochasticUniversalSampling` .. .. :func:`cxMessyOnePoint` .. :func:`selLexicase` .. .. .. .. :func:`selEpsilonLexicase` .. .. .. .. :func:`selAutomaticEpsilonLexicase` .. ============================ =========================================== ========================================= ========================================= ================ and genetic programming specific operators. ================================ =========================================== ========================================= ================================ Initialization Crossover Mutation Bloat control ================================ =========================================== ========================================= ================================ :func:`~deap.gp.genFull` :func:`~deap.gp.cxOnePoint` :func:`~deap.gp.mutShrink` :func:`~deap.gp.staticLimit` :func:`~deap.gp.genGrow` :func:`~deap.gp.cxOnePointLeafBiased` :func:`~deap.gp.mutUniform` :func:`selDoubleTournament` :func:`~deap.gp.genHalfAndHalf` :func:`~deap.gp.cxSemantic` :func:`~deap.gp.mutNodeReplacement` .. .. .. :func:`~deap.gp.mutEphemeral` .. .. .. :func:`~deap.gp.mutInsert` .. .. .. :func:`~deap.gp.mutSemantic` .. ================================ =========================================== ========================================= ================================ Initialization ++++++++++++++ .. autofunction:: deap.tools.initRepeat .. autofunction:: deap.tools.initIterate .. autofunction:: deap.tools.initCycle .. autofunction:: deap.gp.genFull .. autofunction:: deap.gp.genGrow .. autofunction:: deap.gp.genHalfAndHalf .. autofunction:: deap.gp.genRamped Crossover +++++++++ .. autofunction:: deap.tools.cxOnePoint .. autofunction:: deap.tools.cxTwoPoint .. autofunction:: deap.tools.cxTwoPoints .. autofunction:: deap.tools.cxUniform .. autofunction:: deap.tools.cxPartialyMatched .. autofunction:: deap.tools.cxUniformPartialyMatched .. autofunction:: deap.tools.cxOrdered .. autofunction:: deap.tools.cxBlend .. autofunction:: deap.tools.cxESBlend .. autofunction:: deap.tools.cxESTwoPoint .. autofunction:: deap.tools.cxESTwoPoints .. autofunction:: deap.tools.cxSimulatedBinary .. autofunction:: deap.tools.cxSimulatedBinaryBounded .. autofunction:: deap.tools.cxMessyOnePoint .. autofunction:: deap.gp.cxOnePoint .. autofunction:: deap.gp.cxOnePointLeafBiased .. autofunction:: deap.gp.cxSemantic Mutation ++++++++ .. autofunction:: deap.tools.mutGaussian .. autofunction:: deap.tools.mutShuffleIndexes .. autofunction:: deap.tools.mutFlipBit .. autofunction:: deap.tools.mutUniformInt .. autofunction:: deap.tools.mutPolynomialBounded .. autofunction:: deap.tools.mutESLogNormal .. autofunction:: deap.gp.mutShrink .. autofunction:: deap.gp.mutUniform .. autofunction:: deap.gp.mutNodeReplacement .. autofunction:: deap.gp.mutEphemeral .. autofunction:: deap.gp.mutInsert .. autofunction:: deap.gp.mutSemantic Selection +++++++++ .. autofunction:: deap.tools.selTournament .. autofunction:: deap.tools.selRoulette .. autofunction:: deap.tools.selNSGA2 .. autofunction:: deap.tools.selNSGA3 .. autofunction:: deap.tools.selNSGA3WithMemory .. autofunction:: deap.tools.uniform_reference_points .. autofunction:: deap.tools.selSPEA2 .. autofunction:: deap.tools.selRandom .. autofunction:: deap.tools.selBest .. autofunction:: deap.tools.selWorst .. autofunction:: deap.tools.selDoubleTournament .. autofunction:: deap.tools.selStochasticUniversalSampling .. autofunction:: deap.tools.selTournamentDCD .. autofunction:: deap.tools.selLexicase .. autofunction:: deap.tools.selEpsilonLexicase .. autofunction:: deap.tools.selAutomaticEpsilonLexicase .. autofunction:: deap.tools.sortNondominated .. autofunction:: deap.tools.sortLogNondominated Bloat control +++++++++++++ .. autofunction:: deap.gp.staticLimit Migration +++++++++ .. autofunction:: deap.tools.migRing(populations, k, selection[, replacement, migarray]) Statistics ---------- .. autoclass:: deap.tools.Statistics([key]) :members: .. autoclass:: deap.tools.MultiStatistics(**kargs) :members: Logbook ------- .. autoclass:: deap.tools.Logbook :members: Hall-Of-Fame ------------ .. autoclass:: deap.tools.HallOfFame .. automethod:: deap.tools.HallOfFame.update .. automethod:: deap.tools.HallOfFame.insert .. automethod:: deap.tools.HallOfFame.remove .. automethod:: deap.tools.HallOfFame.clear .. autoclass:: deap.tools.ParetoFront([similar]) .. automethod:: deap.tools.ParetoFront.update History ------- .. autoclass:: deap.tools.History .. automethod:: deap.tools.History.update .. autoattribute:: deap.tools.History.decorator .. automethod:: deap.tools.History.getGenealogy(individual[, max_depth]) Constraints ----------- .. autoclass:: deap.tools.DeltaPenalty(feasibility, delta[, distance]) .. autoclass:: deap.tools.ClosestValidPenalty(feasibility, feasible, alpha[, distance]) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.6138449 deap-1.4.1/doc/code/0000755000076500000240000000000014456461475013361 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1689936700.666996 deap-1.4.1/doc/code/benchmarks/0000755000076500000240000000000014456461475015476 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/benchmarks/ackley.py0000644000076500000240000000125614456461441017315 0ustar00runnerstafffrom mpl_toolkits.mplot3d import Axes3D from matplotlib import cm from matplotlib.colors import LogNorm import matplotlib.pyplot as plt try: import numpy as np except: exit() from deap import benchmarks def ackley_arg0(sol): return benchmarks.ackley(sol)[0] fig = plt.figure() # ax = Axes3D(fig, azim = -29, elev = 50) ax = Axes3D(fig) X = np.arange(-30, 30, 0.5) Y = np.arange(-30, 30, 0.5) X, Y = np.meshgrid(X, Y) Z = np.fromiter(map(ackley_arg0, zip(X.flat,Y.flat)), dtype=np.float, count=X.shape[0]*X.shape[1]).reshape(X.shape) ax.plot_surface(X, Y, Z, rstride=1, cstride=1, norm=LogNorm(), cmap=cm.jet, linewidth=0.2) plt.xlabel("x") plt.ylabel("y") plt.show()././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/benchmarks/bohachevsky.py0000644000076500000240000000127614456461441020355 0ustar00runnerstafffrom mpl_toolkits.mplot3d import Axes3D from matplotlib import cm from matplotlib.colors import LogNorm import matplotlib.pyplot as plt try: import numpy as np except: exit() from deap import benchmarks def bohachevsky_arg0(sol): return benchmarks.bohachevsky(sol)[0] fig = plt.figure() ax = Axes3D(fig, azim = -29, elev = 50) # ax = Axes3D(fig) X = np.arange(-15, 15, 0.5) Y = np.arange(-15, 15, 0.5) X, Y = np.meshgrid(X, Y) Z = np.fromiter(map(bohachevsky_arg0, zip(X.flat,Y.flat)), dtype=np.float, count=X.shape[0]*X.shape[1]).reshape(X.shape) ax.plot_surface(X, Y, Z, rstride=1, cstride=1, norm=LogNorm(), cmap=cm.jet, linewidth=0.2) plt.xlabel("x") plt.ylabel("y") plt.show()././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/benchmarks/griewank.py0000644000076500000240000000117514456461441017654 0ustar00runnerstafffrom mpl_toolkits.mplot3d import Axes3D from matplotlib import cm import matplotlib.pyplot as plt try: import numpy as np except: exit() from deap import benchmarks def griewank_arg0(sol): return benchmarks.griewank(sol)[0] fig = plt.figure() ax = Axes3D(fig, azim = -29, elev = 40) # ax = Axes3D(fig) X = np.arange(-50, 50, 0.5) Y = np.arange(-50, 50, 0.5) X, Y = np.meshgrid(X, Y) Z = np.fromiter(map(griewank_arg0, zip(X.flat,Y.flat)), dtype=np.float, count=X.shape[0]*X.shape[1]).reshape(X.shape) ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=cm.jet, linewidth=0.2) plt.xlabel("x") plt.ylabel("y") plt.show()././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/benchmarks/h1.py0000644000076500000240000000124214456461441016350 0ustar00runnerstafffrom mpl_toolkits.mplot3d import Axes3D from matplotlib import cm from matplotlib.colors import LogNorm import matplotlib.pyplot as plt try: import numpy as np except: exit() from deap import benchmarks def h1_arg0(sol): return benchmarks.h1(sol)[0] fig = plt.figure() # ax = Axes3D(fig, azim = -29, elev = 50) ax = Axes3D(fig) X = np.arange(-25, 25, 0.5) Y = np.arange(-25, 25, 0.5) X, Y = np.meshgrid(X, Y) Z = np.fromiter(map(h1_arg0, zip(X.flat,Y.flat)), dtype=np.float, count=X.shape[0]*X.shape[1]).reshape(X.shape) ax.plot_surface(X, Y, Z, rstride=1, cstride=1, norm=LogNorm(), cmap=cm.jet, linewidth=0.2) plt.xlabel("x") plt.ylabel("y") plt.show()././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/benchmarks/himmelblau.py0000644000076500000240000000124214456461441020157 0ustar00runnerstafffrom mpl_toolkits.mplot3d import Axes3D from matplotlib import cm from matplotlib.colors import LogNorm import matplotlib.pyplot as plt try: import numpy as np except: exit() from deap import benchmarks def himmelblau_arg0(sol): return benchmarks.himmelblau(sol)[0] fig = plt.figure() ax = Axes3D(fig, azim = -29, elev = 49) X = np.arange(-6, 6, 0.1) Y = np.arange(-6, 6, 0.1) X, Y = np.meshgrid(X, Y) Z = np.fromiter(map(himmelblau_arg0, zip(X.flat,Y.flat)), dtype=np.float, count=X.shape[0]*X.shape[1]).reshape(X.shape) ax.plot_surface(X, Y, Z, rstride=1, cstride=1, norm=LogNorm(), cmap=cm.jet, linewidth=0.2) plt.xlabel("x") plt.ylabel("y") plt.show()././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/benchmarks/kursawe.py0000644000076500000240000000152714456461441017527 0ustar00runnerstafffrom mpl_toolkits.mplot3d import Axes3D from matplotlib import cm import matplotlib.pyplot as plt try: import numpy as np except: exit() from deap import benchmarks X = np.arange(-5, 5, 0.1) Y = np.arange(-5, 5, 0.1) X, Y = np.meshgrid(X, Y) Z1 = np.zeros(X.shape) Z2 = np.zeros(X.shape) for i in range(X.shape[0]): for j in range(X.shape[1]): Z1[i,j], Z2[i,j] = benchmarks.kursawe((X[i,j],Y[i,j])) fig = plt.figure(figsize=(12,5)) ax = fig.add_subplot(1, 2, 1, projection='3d') ax.plot_surface(X, Y, Z1, rstride=1, cstride=1, cmap=cm.jet, linewidth=0.2) plt.xlabel("x") plt.ylabel("y") ax = fig.add_subplot(1, 2, 2, projection='3d') ax.plot_surface(X, Y, Z2, rstride=1, cstride=1, cmap=cm.jet, linewidth=0.2) plt.xlabel("x") plt.ylabel("y") plt.subplots_adjust(left=0, right=1, bottom=0, top=1, wspace=0, hspace=0) plt.show()././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/benchmarks/movingsc1.py0000644000076500000240000000133214456461441017746 0ustar00runnerstafffrom mpl_toolkits.mplot3d import Axes3D from matplotlib import cm import matplotlib.pyplot as plt try: import numpy as np except: exit() import random rnd = random.Random() rnd.seed(128) from deap.benchmarks import movingpeaks sc = movingpeaks.SCENARIO_1 sc["uniform_height"] = 0 sc["uniform_width"] = 0 mp = movingpeaks.MovingPeaks(dim=2, random=rnd, **sc) fig = plt.figure() ax = Axes3D(fig) X = np.arange(0, 100, 1.0) Y = np.arange(0, 100, 1.0) X, Y = np.meshgrid(X, Y) Z = np.fromiter(map(lambda x: mp(x)[0], zip(X.flat,Y.flat)), dtype=np.float, count=X.shape[0]*X.shape[1]).reshape(X.shape) ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=cm.jet, linewidth=0.2) plt.xlabel("x") plt.ylabel("y") plt.show()././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/benchmarks/rastrigin.py0000644000076500000240000000115114456461441020041 0ustar00runnerstafffrom mpl_toolkits.mplot3d import Axes3D from matplotlib import cm import matplotlib.pyplot as plt try: import numpy as np except: exit() from deap import benchmarks def rastrigin_arg0(sol): return benchmarks.rastrigin(sol)[0] fig = plt.figure() ax = Axes3D(fig, azim = -29, elev = 50) X = np.arange(-5, 5, 0.1) Y = np.arange(-5, 5, 0.1) X, Y = np.meshgrid(X, Y) Z = np.fromiter(map(rastrigin_arg0, zip(X.flat,Y.flat)), dtype=np.float, count=X.shape[0]*X.shape[1]).reshape(X.shape) ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=cm.jet, linewidth=0.2) plt.xlabel("x") plt.ylabel("y") plt.show()././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/benchmarks/rosenbrock.py0000644000076500000240000000126614456461441020215 0ustar00runnerstafffrom mpl_toolkits.mplot3d import Axes3D from matplotlib import cm from matplotlib.colors import LogNorm import matplotlib.pyplot as plt try: import numpy as np except: exit() from deap import benchmarks def rosenbrock_arg0(sol): return benchmarks.rosenbrock(sol)[0] fig = plt.figure() # ax = Axes3D(fig, azim = -29, elev = 50) ax = Axes3D(fig) X = np.arange(-2, 2, 0.1) Y = np.arange(-1, 3, 0.1) X, Y = np.meshgrid(X, Y) Z = np.fromiter(map(rosenbrock_arg0, zip(X.flat,Y.flat)), dtype=np.float, count=X.shape[0]*X.shape[1]).reshape(X.shape) ax.plot_surface(X, Y, Z, rstride=1, cstride=1, norm=LogNorm(), cmap=cm.jet, linewidth=0.2) plt.xlabel("x") plt.ylabel("y") plt.show()././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/benchmarks/schaffer.py0000644000076500000240000000117714456461441017630 0ustar00runnerstafffrom mpl_toolkits.mplot3d import Axes3D from matplotlib import cm import matplotlib.pyplot as plt try: import numpy as np except: exit() from deap import benchmarks def schaffer_arg0(sol): return benchmarks.schaffer(sol)[0] fig = plt.figure() ax = Axes3D(fig, azim = -29, elev = 60) # ax = Axes3D(fig) X = np.arange(-25, 25, 0.25) Y = np.arange(-25, 25, 0.25) X, Y = np.meshgrid(X, Y) Z = np.fromiter(map(schaffer_arg0, zip(X.flat,Y.flat)), dtype=np.float, count=X.shape[0]*X.shape[1]).reshape(X.shape) ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=cm.jet, linewidth=0.2) plt.xlabel("x") plt.ylabel("y") plt.show()././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/benchmarks/schwefel.py0000644000076500000240000000117714456461441017647 0ustar00runnerstafffrom mpl_toolkits.mplot3d import Axes3D from matplotlib import cm import matplotlib.pyplot as plt try: import numpy as np except: exit() from deap import benchmarks def schwefel_arg0(sol): return benchmarks.schwefel(sol)[0] fig = plt.figure() # ax = Axes3D(fig, azim = -29, elev = 50) ax = Axes3D(fig) X = np.arange(-500, 500, 10) Y = np.arange(-500, 500, 10) X, Y = np.meshgrid(X, Y) Z = np.fromiter(map(schwefel_arg0, zip(X.flat,Y.flat)), dtype=np.float, count=X.shape[0]*X.shape[1]).reshape(X.shape) ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=cm.jet, linewidth=0.2) plt.xlabel("x") plt.ylabel("y") plt.show()././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/benchmarks/shekel.py0000644000076500000240000000155714456461441017324 0ustar00runnerstafffrom mpl_toolkits.mplot3d import Axes3D from matplotlib import cm from matplotlib.colors import LogNorm import matplotlib.pyplot as plt try: import numpy as np except: exit() from deap import benchmarks #NUMMAX = 5 #A = 10 * np.random.rand(NUMMAX, 2) #C = np.random.rand(NUMMAX) A = [[0.5, 0.5], [0.25, 0.25], [0.25, 0.75], [0.75, 0.25], [0.75, 0.75]] C = [0.002, 0.005, 0.005, 0.005, 0.005] def shekel_arg0(sol): return benchmarks.shekel(sol, A, C)[0] fig = plt.figure() # ax = Axes3D(fig, azim = -29, elev = 50) ax = Axes3D(fig) X = np.arange(0, 1, 0.01) Y = np.arange(0, 1, 0.01) X, Y = np.meshgrid(X, Y) Z = np.fromiter(map(shekel_arg0, zip(X.flat,Y.flat)), dtype=np.float, count=X.shape[0]*X.shape[1]).reshape(X.shape) ax.plot_surface(X, Y, Z, rstride=1, cstride=1, norm=LogNorm(), cmap=cm.jet, linewidth=0.2) plt.xlabel("x") plt.ylabel("y") plt.show()././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.6683269 deap-1.4.1/doc/code/examples/0000755000076500000240000000000014456461475015177 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/examples/nsga3_ref_points.py0000644000076500000240000000155314456461441021011 0ustar00runnerstaffimport matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D import numpy from deap import tools NOBJ = 3 P = [12] SCALES = [1] fig = plt.figure(figsize=(7, 7)) ax = fig.add_subplot(111, projection="3d") # the coordinate origin ax.scatter(0, 0, 0, c="k", marker="+", s=100) # reference points ref_points = [tools.uniform_reference_points(NOBJ, p, s) for p, s in zip(P, SCALES)] ref_points = numpy.concatenate(ref_points) _, uniques = numpy.unique(ref_points, axis=0, return_index=True) ref_points = ref_points[uniques] ax.scatter(ref_points[:, 0], ref_points[:, 1], ref_points[:, 2], marker="o", s=48) # final figure details ax.set_xlabel("$f_1(\mathbf{x})$", fontsize=15) ax.set_ylabel("$f_2(\mathbf{x})$", fontsize=15) ax.set_zlabel("$f_3(\mathbf{x})$", fontsize=15) ax.view_init(elev=11, azim=-25) ax.autoscale(tight=True) plt.tight_layout() plt.show() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/examples/nsga3_ref_points_combined.py0000644000076500000240000000207014456461441022644 0ustar00runnerstaffimport matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D import numpy from deap import tools NOBJ = 3 P = [2, 1] SCALES = [1, 0.5] fig = plt.figure(figsize=(7, 7)) ax = fig.add_subplot(111, projection="3d") # the coordinate origin ax.scatter(0, 0, 0, c="k", marker="+", s=100) # reference points # Parameters NOBJ = 3 P = [2, 1] SCALES = [1, 0.5] # Create, combine and removed duplicates ref_points = [tools.uniform_reference_points(NOBJ, p, s) for p, s in zip(P, SCALES)] ref_points = numpy.concatenate(ref_points, axis=0) _, uniques = numpy.unique(ref_points, axis=0, return_index=True) ref_points = ref_points[uniques] ## for subset, p, s in zip(ref_points, P, SCALES): ax.scatter(subset[:, 0], subset[:, 1], subset[:, 2], marker="o", s=48, label="p = {}, scale = {}".format(p, s)) # final figure details ax.set_xlabel("$f_1(\mathbf{x})$", fontsize=15) ax.set_ylabel("$f_2(\mathbf{x})$", fontsize=15) ax.set_zlabel("$f_3(\mathbf{x})$", fontsize=15) ax.view_init(elev=11, azim=-25) ax.autoscale(tight=True) plt.legend() plt.tight_layout() plt.show() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/examples/nsga3_ref_points_combined_plot.py0000644000076500000240000000207614456461441023710 0ustar00runnerstaffimport matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D import numpy from deap import tools NOBJ = 3 P = [2, 1] SCALES = [1, 0.5] fig = plt.figure(figsize=(7, 7)) ax = fig.add_subplot(111, projection="3d") # the coordinate origin ax.scatter(0, 0, 0, c="k", marker="+", s=100) # reference points # Parameters NOBJ = 3 P = [2, 1] SCALES = [1, 0.5] # Create, combine and removed duplicates ref_points = [tools.uniform_reference_points(NOBJ, p, s) for p, s in zip(P, SCALES)] # ref_points = numpy.concatenate(ref_points, axis=0) # _, uniques = numpy.unique(ref_points, axis=0, return_index=True) # ref_points = ref_points[uniques] ## for subset, p, s in zip(ref_points, P, SCALES): ax.scatter(subset[:, 0], subset[:, 1], subset[:, 2], marker="o", s=48, label="p = {}, scale = {}".format(p, s)) # final figure details ax.set_xlabel("$f_1(\mathbf{x})$", fontsize=15) ax.set_ylabel("$f_2(\mathbf{x})$", fontsize=15) ax.set_zlabel("$f_3(\mathbf{x})$", fontsize=15) ax.view_init(elev=11, azim=-25) ax.autoscale(tight=True) plt.legend() plt.tight_layout() plt.show() ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.6146853 deap-1.4.1/doc/code/tutorials/0000755000076500000240000000000014456461475015407 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.6688406 deap-1.4.1/doc/code/tutorials/part_1/0000755000076500000240000000000014456461475016575 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_1/1_where_to_start.py0000644000076500000240000000413514456461441022414 0ustar00runnerstaff## 1.1 Types from deap import base, creator creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", list, fitness=creator.FitnessMin) ## 1.2 Initialization import random from deap import tools IND_SIZE = 10 toolbox = base.Toolbox() toolbox.register("attribute", random.random) toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attribute, n=IND_SIZE) toolbox.register("population", tools.initRepeat, list, toolbox.individual) ## 1.3 Operators def evaluate(individual): return sum(individual), toolbox.register("mate", tools.cxTwoPoint) toolbox.register("mutate", tools.mutGaussian, mu=0, sigma=1, indpb=0.1) toolbox.register("select", tools.selTournament, tournsize=3) toolbox.register("evaluate", evaluate) ## 1.4 Algorithms def main(): pop = toolbox.population(n=50) CXPB, MUTPB, NGEN = 0.5, 0.2, 40 # Evaluate the entire population fitnesses = map(toolbox.evaluate, pop) for ind, fit in zip(pop, fitnesses): ind.fitness.values = fit for g in range(NGEN): # Select the next generation individuals offspring = toolbox.select(pop, len(pop)) # Clone the selected individuals offspring = map(toolbox.clone, offspring) # Apply crossover and mutation on the offspring for child1, child2 in zip(offspring[::2], offspring[1::2]): if random.random() < CXPB: toolbox.mate(child1, child2) del child1.fitness.values del child2.fitness.values for mutant in offspring: if random.random() < MUTPB: toolbox.mutate(mutant) del mutant.fitness.values # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in offspring if not ind.fitness.valid] fitnesses = map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit # The population is entirely replaced by the offspring pop[:] = offspring return pop if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.6751404 deap-1.4.1/doc/code/tutorials/part_2/0000755000076500000240000000000014456461475016576 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_2/2_1_fitness.py0000644000076500000240000000033414456461441021255 0ustar00runnerstaff## 2.1 Fitness from deap import base from deap import creator ## FitnessMin creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) ## FitnessMulti creator.create("FitnessMulti", base.Fitness, weights=(-1.0, 1.0)) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_2/2_2_1_list_of_floats.py0000644000076500000240000000115114456461441023030 0ustar00runnerstaff## 2.2.1 List of floats import random import array import numpy from deap import base from deap import creator from deap import tools creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", list, fitness=creator.FitnessMax) IND_SIZE=10 toolbox = base.Toolbox() toolbox.register("attr_float", random.random) toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_float, n=IND_SIZE) creator.create("Individual", array.array, typecode="d", fitness=creator.FitnessMax) creator.create("Individual", numpy.ndarray, fitness=creator.FitnessMax)././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_2/2_2_2_permutation.py0000644000076500000240000000067314456461441022401 0ustar00runnerstaff## 2.2.2 Permutation import random from deap import base from deap import creator from deap import tools creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", list, fitness=creator.FitnessMin) IND_SIZE=10 toolbox = base.Toolbox() toolbox.register("indices", random.sample, range(IND_SIZE), IND_SIZE) toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.indices) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_2/2_2_3_arithmetic_expression.py0000644000076500000240000000120414456461441024432 0ustar00runnerstaff## 2.2.3 Arithmetic expression import operator from deap import base from deap import creator from deap import gp from deap import tools pset = gp.PrimitiveSet("MAIN", arity=1) pset.addPrimitive(operator.add, 2) pset.addPrimitive(operator.sub, 2) pset.addPrimitive(operator.mul, 2) creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", gp.PrimitiveTree, fitness=creator.FitnessMin, pset=pset) toolbox = base.Toolbox() toolbox.register("expr", gp.genHalfAndHalf, pset=pset, min_=1, max_=2) toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.expr) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_2/2_2_4_evolution_strategy.py0000644000076500000240000000150014456461441023770 0ustar00runnerstaff## 2.2.4 Evolution Strategy import array import random from deap import base from deap import creator from deap import tools creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", array.array, typecode="d", fitness=creator.FitnessMin, strategy=None) creator.create("Strategy", array.array, typecode="d") def initES(icls, scls, size, imin, imax, smin, smax): ind = icls(random.uniform(imin, imax) for _ in range(size)) ind.strategy = scls(random.uniform(smin, smax) for _ in range(size)) return ind IND_SIZE = 10 MIN_VALUE, MAX_VALUE = -5., 5. MIN_STRAT, MAX_STRAT = -1., 1. toolbox = base.Toolbox() toolbox.register("individual", initES, creator.Individual, creator.Strategy, IND_SIZE, MIN_VALUE, MAX_VALUE, MIN_STRAT, MAX_STRAT) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_2/2_2_5_particle.py0000644000076500000240000000124714456461441021636 0ustar00runnerstaff## 2.2.6 Particle import random from deap import base from deap import creator from deap import tools creator.create("FitnessMax", base.Fitness, weights=(1.0, 1.0)) creator.create("Particle", list, fitness=creator.FitnessMax, speed=None, smin=None, smax=None, best=None) def initParticle(pcls, size, pmin, pmax, smin, smax): part = pcls(random.uniform(pmin, pmax) for _ in range(size)) part.speed = [random.uniform(smin, smax) for _ in range(size)] part.smin = smin part.smax = smax return part toolbox = base.Toolbox() toolbox.register("particle", initParticle, creator.Particle, size=2, pmin=-6, pmax=6, smin=-3, smax=3) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_2/2_2_6_funky_one.py0000644000076500000240000000111314456461441022021 0ustar00runnerstaff## 2.2.6 Funky one import random from deap import base from deap import creator from deap import tools creator.create("FitnessMax", base.Fitness, weights=(1.0, 1.0)) creator.create("Individual", list, fitness=creator.FitnessMax) toolbox = base.Toolbox() INT_MIN, INT_MAX = 5, 10 FLT_MIN, FLT_MAX = -0.2, 0.8 N_CYCLES = 4 toolbox.register("attr_int", random.randint, INT_MIN, INT_MAX) toolbox.register("attr_flt", random.uniform, FLT_MIN, FLT_MAX) toolbox.register("individual", tools.initCycle, creator.Individual, (toolbox.attr_int, toolbox.attr_flt), n=N_CYCLES) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_2/2_3_1_bag.py0000644000076500000240000000102714456461441020555 0ustar00runnerstaff## 2.3.1 Bag import random from deap import base from deap import creator from deap import tools creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", list, fitness=creator.FitnessMin) IND_SIZE = 20 toolbox = base.Toolbox() toolbox.register("attr_int", random.randint, -20, 20) toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_int, n=IND_SIZE) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.population(n=100) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_2/2_3_2_grid.py0000644000076500000240000000122214456461441020747 0ustar00runnerstaff## 2.3.2 Grid import random from deap import base from deap import creator from deap import tools creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", list, fitness=creator.FitnessMin) IND_SIZE = 20 toolbox = base.Toolbox() toolbox.register("attr_float", random.random) toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_float, n=IND_SIZE) N_ROW, N_COL = 20, 10 toolbox.register("row", tools.initRepeat, list, toolbox.individual, n=N_COL) toolbox.register("population", tools.initRepeat, list, toolbox.row, n=N_ROW) population = toolbox.population() population[9][9] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_2/2_3_3_swarm.py0000644000076500000240000000147314456461441021164 0ustar00runnerstaff## 2.2.6 Particle import random from deap import base from deap import creator from deap import tools creator.create("FitnessMax", base.Fitness, weights=(1.0, 1.0)) creator.create("Particle", list, fitness=creator.FitnessMax, speed=None, smin=None, smax=None, best=None) creator.create("Swarm", list, gbest=None, gbestfit=creator.FitnessMax) def initParticle(pcls, size, pmin, pmax, smin, smax): part = pcls(random.uniform(pmin, pmax) for _ in range(size)) part.speed = [random.uniform(smin, smax) for _ in range(size)] part.smin = smin part.smax = smax return part toolbox = base.Toolbox() toolbox.register("particle", initParticle, creator.Particle, size=2, pmin=-6, pmax=6, smin=-3, smax=3) toolbox.register("swarm", tools.initRepeat, creator.Swarm, toolbox.particle) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_2/2_3_4_demes.py0000644000076500000240000000111114456461441021116 0ustar00runnerstaff## 2.3.4 Demes import random from deap import base from deap import creator from deap import tools creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", list, fitness=creator.FitnessMin) IND_SIZE=10 toolbox = base.Toolbox() toolbox.register("indices", random.sample, range(IND_SIZE), IND_SIZE) toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.indices) toolbox.register("deme", tools.initRepeat, list, toolbox.individual) DEME_SIZES = 10, 50, 100 population = [toolbox.deme(n=i) for i in DEME_SIZES] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_2/2_3_5_seeding_a_population.py0000644000076500000240000000126714456461441024226 0ustar00runnerstaff# 2.3.5 Seeding a population import json from deap import base from deap import creator creator.create("FitnessMax", base.Fitness, weights=(1.0, 1.0)) creator.create("Individual", list, fitness=creator.FitnessMax) def initIndividual(icls, content): return icls(content) def initPopulation(pcls, ind_init, filename): with open(filename, "r") as pop_file: contents = json.load(pop_file) return pcls(ind_init(c) for c in contents) toolbox = base.Toolbox() toolbox.register("individual_guess", initIndividual, creator.Individual) toolbox.register("population_guess", initPopulation, list, toolbox.individual_guess, "my_guess.json") population = toolbox.population_guess() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_2/my_guess.json0000644000076500000240000000006314456461441021314 0ustar00runnerstaff[ [1,2,3,4,5,6], [0,0,0,0,1,1], [1,1,1,1,1,1] ] ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1689936700.679048 deap-1.4.1/doc/code/tutorials/part_3/0000755000076500000240000000000014456461475016577 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_3/3_6_2_tool_decoration.py0000644000076500000240000000140314456461441023214 0ustar00runnerstafffrom deap import base from deap import creator from deap import tools toolbox = base.Toolbox() MIN, MAX = -5, 5 def checkBounds(min, max): def decorator(func): def wrapper(*args, **kargs): offspring = func(*args, **kargs) for child in offspring: for i in range(len(child)): if child[i] > max: child[i] = max elif child[i] < min: child[i] = min return offspring return wrapper return decorator toolbox.register("mate", tools.cxBlend, alpha=0.2) toolbox.register("mutate", tools.mutGaussian, mu=0, sigma=2) toolbox.decorate("mate", checkBounds(MIN, MAX)) toolbox.decorate("mutate", checkBounds(MIN, MAX)) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_3/3_6_using_the_toolbox.py0000644000076500000240000000356314456461441023353 0ustar00runnerstaff## 3.6 Using the Toolbox from deap import base from deap import tools toolbox = base.Toolbox() def evaluateInd(individual): # Do some computation result = sum(individual) return result, toolbox.register("mate", tools.cxTwoPoint) toolbox.register("mutate", tools.mutGaussian, mu=0, sigma=1, indpb=0.2) toolbox.register("select", tools.selTournament, tournsize=3) toolbox.register("evaluate", evaluateInd) ## Data structure and initializer creation import random from deap import creator creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", list, fitness=creator.FitnessMax) toolbox.register("attr_float", random.random) toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_float, 10) toolbox.register("population", tools.initRepeat, list, toolbox.individual) pop = toolbox.population(n=100) CXPB, MUTPB, NGEN= 0.7, 0.3, 25 ## 3.6.1 Using the Tools for g in range(NGEN): # Select the next generation individuals offspring = toolbox.select(pop, len(pop)) # Clone the selected individuals offspring = map(toolbox.clone, offspring) # Apply crossover on the offspring for child1, child2 in zip(offspring[::2], offspring[1::2]): if random.random() < CXPB: toolbox.mate(child1, child2) del child1.fitness.values del child2.fitness.values # Apply mutation on the offspring for mutant in offspring: if random.random() < MUTPB: toolbox.mutate(mutant) del mutant.fitness.values # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in offspring if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit # The population is entirely replaced by the offspring pop[:] = offspring././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_3/3_7_variations.py0000644000076500000240000000307614456461441021777 0ustar00runnerstaff## 3.7 Variations import random from deap import base from deap import creator from deap import tools ## Data structure and initializer creation creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", list, fitness=creator.FitnessMax) toolbox = base.Toolbox() toolbox.register("attr_float", random.random) toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_float, 10) toolbox.register("population", tools.initRepeat, list, toolbox.individual) def onemax(individual): return sum(individual), toolbox.register("mate", tools.cxTwoPoint) toolbox.register("mutate", tools.mutGaussian, mu=0, sigma=1, indpb=0.2) toolbox.register("select", tools.selTournament, tournsize=3) toolbox.register("evaluate", onemax) pop = toolbox.population(n=100) CXPB, MUTPB, NGEN= 0.7, 0.3, 25 fitnesses = toolbox.map(toolbox.evaluate, pop) for ind, fit in zip(pop, fitnesses): ind.fitness.values = fit from deap import algorithms for g in range(NGEN): # Select and clone the next generation individuals offspring = map(toolbox.clone, toolbox.select(pop, len(pop))) # Apply crossover and mutation on the offspring offspring = algorithms.varAnd(offspring, toolbox, CXPB, MUTPB) # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in offspring if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit # The population is entirely replaced by the offspring pop[:] = offspring././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_3/3_8_algorithms.py0000644000076500000240000000203214456461441021761 0ustar00runnerstaff## 3.7 Variations import random from deap import base from deap import creator from deap import tools ## Data structure and initializer creation creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", list, fitness=creator.FitnessMax) toolbox = base.Toolbox() toolbox.register("attr_float", random.random) toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_float, 10) toolbox.register("population", tools.initRepeat, list, toolbox.individual) def onemax(individual): return sum(individual), toolbox.register("mate", tools.cxTwoPoint) toolbox.register("mutate", tools.mutGaussian, mu=0, sigma=1, indpb=0.2) toolbox.register("select", tools.selTournament, tournsize=3) toolbox.register("evaluate", onemax) pop = toolbox.population(n=100) CXPB, MUTPB, NGEN= 0.7, 0.3, 25 fitnesses = toolbox.map(toolbox.evaluate, pop) for ind, fit in zip(pop, fitnesses): ind.fitness.values = fit from deap import algorithms algorithms.eaSimple(pop, toolbox, cxpb=0.5, mutpb=0.2, ngen=50) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_3/3_next_step.py0000644000076500000240000000276314456461441021405 0ustar00runnerstaff## 3.1 A First Individual import random from deap import base from deap import creator from deap import tools IND_SIZE = 5 creator.create("FitnessMin", base.Fitness, weights=(-1.0, -1.0)) creator.create("Individual", list, fitness=creator.FitnessMin) toolbox = base.Toolbox() toolbox.register("attr_float", random.random) toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_float, n=IND_SIZE) ind1 = toolbox.individual() print(ind1) # [0.86..., 0.27..., 0.70..., 0.03..., 0.87...] print(ind1.fitness.valid) # False ## 3.2 Evaluation def evaluate(individual): # Do some hard computing on the individual a = sum(individual) b = len(individual) return a, 1. / b ind1.fitness.values = evaluate(ind1) print(ind1.fitness.valid) # True print(ind1.fitness) # (2.73, 0.2) ## 3.3 Mutation mutant = toolbox.clone(ind1) ind2, = tools.mutGaussian(mutant, mu=0.0, sigma=0.2, indpb=0.2) del mutant.fitness.values print(ind2 is mutant) # True print(mutant is ind1) # False ## 3.4 Crossover child1, child2 = [toolbox.clone(ind) for ind in (ind1, ind2)] tools.cxBlend(child1, child2, 0.5) del child1.fitness.values del child2.fitness.values ## 3.5 Selection selected = tools.selBest([child1, child2], 2) print(child1 in selected) # True ## 3.5 Note LAMBDA = 10 toolbox.register("select", tools.selRandom) population = [ind1, ind2]*10 selected = toolbox.select(population, LAMBDA) offspring = [toolbox.clone(ind) for ind in selected] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_3/logbook.py0000644000076500000240000000257714456461441020611 0ustar00runnerstaffimport pickle from deap import tools from stats import record logbook = tools.Logbook() logbook.record(gen=0, evals=30, **record) print(logbook) gen, avg = logbook.select("gen", "avg") with open("logbook.pkl", "w") as lb_file: pickle.dump(logbook, lb_file) # Cleaning the pickle file ... import os os.remove("logbook.pkl") logbook.header = "gen", "avg", "spam" print(logbook) print(logbook.stream) logbook.record(gen=1, evals=15, **record) print(logbook.stream) from multistats import record logbook = tools.Logbook() logbook.record(gen=0, evals=30, **record) logbook.header = "gen", "evals", "fitness", "size" logbook.chapters["fitness"].header = "min", "avg", "max" logbook.chapters["size"].header = "min", "avg", "max" print(logbook) gen = logbook.select("gen") fit_mins = logbook.chapters["fitness"].select("min") size_avgs = logbook.chapters["size"].select("avg") import matplotlib.pyplot as plt fig, ax1 = plt.subplots() line1 = ax1.plot(gen, fit_mins, "b-", label="Minimum Fitness") ax1.set_xlabel("Generation") ax1.set_ylabel("Fitness", color="b") for tl in ax1.get_yticklabels(): tl.set_color("b") ax2 = ax1.twinx() line2 = ax2.plot(gen, size_avgs, "r-", label="Average Size") ax2.set_ylabel("Size", color="r") for tl in ax2.get_yticklabels(): tl.set_color("r") lns = line1 + line2 labs = [l.get_label() for l in lns] ax1.legend(lns, labs, loc="center right") plt.show()././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_3/multistats.py0000644000076500000240000000340014456461441021350 0ustar00runnerstaffimport operator import random import numpy from deap import algorithms from deap import base from deap import creator from deap import gp from deap import tools random.seed(0) stats_fit = tools.Statistics(key=lambda ind: ind.fitness.values) stats_size = tools.Statistics(key=len) mstats = tools.MultiStatistics(fitness=stats_fit, size=stats_size) mstats.register("avg", numpy.mean) mstats.register("std", numpy.std) mstats.register("min", numpy.min) mstats.register("max", numpy.max) creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", gp.PrimitiveTree, fitness=creator.FitnessMin) pset = gp.PrimitiveSet("MAIN", 1) pset.addPrimitive(operator.add, 2) pset.addPrimitive(operator.mul, 2) pset.addEphemeralConstant("rand101", lambda: random.randint(-1,1)) pset.renameArguments(ARG0='x') def evalSymbReg(individual, points): # Transform the tree expression in a callable function func = toolbox.compile(expr=individual) # Evaluate the mean squared error between the expression # and the real function : x**4 + x**3 + x**2 + x sqerrors = ((func(x) - x**4 - x**3 - x**2 - x)**2 for x in points) return sum(sqerrors) / len(points), toolbox = base.Toolbox() toolbox.register("expr", gp.genHalfAndHalf, pset=pset, min_=1, max_=2) toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.expr) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("evaluate", evalSymbReg, points=[x/10. for x in range(-10,10)]) toolbox.register("compile", gp.compile, pset=pset) pop = toolbox.population(n=100) # Evaluate the individuals fitnesses = map(toolbox.evaluate, pop) for ind, fit in zip(pop, fitnesses): ind.fitness.values = fit record = mstats.compile(pop) print(record) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_3/stats.py0000644000076500000240000000262214456461441020302 0ustar00runnerstaffimport random import numpy from deap import algorithms from deap import base from deap import creator from deap import tools random.seed(0) stats = tools.Statistics(key=lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) def evalOneMax(individual): return sum(individual), creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", list, fitness=creator.FitnessMax) toolbox = base.Toolbox() toolbox.register("attr_bool", random.randint, 0, 1) toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_bool, 100) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("evaluate", evalOneMax) pop = toolbox.population(n=100) # Evaluate the individuals fitnesses = map(toolbox.evaluate, pop) for ind, fit in zip(pop, fitnesses): ind.fitness.values = fit record = stats.compile(pop) print(record) stats = tools.Statistics(key=lambda ind: ind.fitness.values) stats.register("avg", numpy.mean, axis=0) stats.register("std", numpy.std, axis=0) stats.register("min", numpy.min, axis=0) stats.register("max", numpy.max, axis=0) record = stats.compile(pop) print(record) pop, logbook = algorithms.eaSimple(pop, toolbox, cxpb=0.5, mutpb=0.2, ngen=0, stats=stats, verbose=True)././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.6816125 deap-1.4.1/doc/code/tutorials/part_4/0000755000076500000240000000000014456461475016600 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_4/4_4_Using_Cpp_NSGA.py0000644000076500000240000001150414456461441022311 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import random from deap import algorithms from deap import base from deap import creator from deap import tools from deap import cTools import sortingnetwork as sn INPUTS = 6 def evalEvoSN(individual, dimension): network = sn.SortingNetwork(dimension, individual) return network.assess(), network.length, network.depth def genWire(dimension): return (random.randrange(dimension), random.randrange(dimension)) def genNetwork(dimension, min_size, max_size): size = random.randint(min_size, max_size) return [genWire(dimension) for i in range(size)] def mutWire(individual, dimension, indpb): for index, elem in enumerate(individual): if random.random() < indpb: individual[index] = genWire(dimension) def mutAddWire(individual, dimension): index = random.randint(0, len(individual)) individual.insert(index, genWire(dimension)) def mutDelWire(individual): index = random.randrange(len(individual)) del individual[index] creator.create("FitnessMin", base.Fitness, weights=(-1.0, -1.0, -1.0)) creator.create("Individual", list, fitness=creator.FitnessMin) toolbox = base.Toolbox() # Gene initializer toolbox.register("network", genNetwork, dimension=INPUTS, min_size=9, max_size=12) # Structure initializers toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.network) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("evaluate", evalEvoSN, dimension=INPUTS) toolbox.register("mate", tools.cxTwoPoint) toolbox.register("mutate", mutWire, dimension=INPUTS, indpb=0.05) toolbox.register("addwire", mutAddWire, dimension=INPUTS) toolbox.register("delwire", mutDelWire) toolbox.register("select", cTools.selNSGA2) def main(): random.seed(64) population = toolbox.population(n=300) hof = tools.ParetoFront() stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("Avg", tools.mean) stats.register("Std", tools.std) stats.register("Min", min) stats.register("Max", max) CXPB, MUTPB, ADDPB, DELPB, NGEN = 0.5, 0.2, 0.01, 0.01, 40 # Evaluate every individuals fitnesses = toolbox.map(toolbox.evaluate, population) for ind, fit in zip(population, fitnesses): ind.fitness.values = fit hof.update(population) stats.update(population) # Begin the evolution for g in range(NGEN): print("-- Generation %i --" % g) offspring = [toolbox.clone(ind) for ind in population] # Apply crossover and mutation for ind1, ind2 in zip(offspring[::2], offspring[1::2]): if random.random() < CXPB: toolbox.mate(ind1, ind2) del ind1.fitness.values del ind2.fitness.values # Note here that we have a different scheme of mutation than in the # original algorithm, we use 3 different mutations subsequently. for ind in offspring: if random.random() < MUTPB: toolbox.mutate(ind) del ind.fitness.values if random.random() < ADDPB: toolbox.addwire(ind) del ind.fitness.values if random.random() < DELPB: toolbox.delwire(ind) del ind.fitness.values # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in offspring if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit print(" Evaluated %i individuals" % len(invalid_ind)) population = toolbox.select(population+offspring, len(offspring)) hof.update(population) stats.update(population) print(" Min %s" % stats.Min[0][-1][0]) print(" Max %s" % stats.Max[0][-1][0]) print(" Avg %s" % stats.Avg[0][-1][0]) print(" Std %s" % stats.Std[0][-1][0]) best_network = sn.SortingNetwork(INPUTS, hof[0]) print(best_network) print(best_network.draw()) print("%i errors, length %i, depth %i" % hof[0].fitness.values) return population, stats, hof if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_4/4_5_home_made_eval_func.py0000644000076500000240000001263114456461441023555 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import sys import random import logging import time logging.basicConfig(level=logging.DEBUG, stream=sys.stdout) from deap import algorithms from deap import base from deap import creator from deap import tools import SNC as snc # ... import sortingnetwork as sn INPUTS = 11 def evalEvoSN(individual, dimension): fit,depth,length= snc.evalNetwork(dimension, individual) return fit, length, depth def genWire(dimension): return (random.randrange(dimension), random.randrange(dimension)) def genNetwork(dimension, min_size, max_size): size = random.randint(min_size, max_size) return [genWire(dimension) for i in range(size)] def mutWire(individual, dimension, indpb): for index, elem in enumerate(individual): if random.random() < indpb: individual[index] = genWire(dimension) def mutAddWire(individual, dimension): index = random.randint(0, len(individual)) individual.insert(index, genWire(dimension)) def mutDelWire(individual): index = random.randrange(len(individual)) del individual[index] creator.create("FitnessMin", base.Fitness, weights=(-1.0, -1.0, -1.0)) creator.create("Individual", list, fitness=creator.FitnessMin) toolbox = base.Toolbox() # Gene initializer toolbox.register("network", genNetwork, dimension=INPUTS, min_size=9, max_size=12) # Structure initializers toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.network) toolbox.register("population", tools.initRepeat, list, toolbox.individual) # ... toolbox.register("evaluate", evalEvoSN, dimension=INPUTS) toolbox.register("mate", tools.cxTwoPoint) toolbox.register("mutate", mutWire, dimension=INPUTS, indpb=0.05) toolbox.register("addwire", mutAddWire, dimension=INPUTS) toolbox.register("delwire", mutDelWire) toolbox.register("select", tools.selNSGA2) def main(): random.seed(64) population = toolbox.population(n=500) hof = tools.ParetoFront() stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("Avg", tools.mean) stats.register("Std", tools.std) stats.register("Min", min) stats.register("Max", max) CXPB, MUTPB, ADDPB, DELPB, NGEN = 0.5, 0.2, 0.01, 0.01, 10 # Evaluate every individuals fitnesses = toolbox.map(toolbox.evaluate, population) for ind, fit in zip(population, fitnesses): ind.fitness.values = fit hof.update(population) stats.update(population) # Begin the evolution for g in range(NGEN): t1 = time.time() print("-- Generation %i --" % g) offspring = [toolbox.clone(ind) for ind in population] t2 = time.time() # Apply crossover and mutation for ind1, ind2 in zip(offspring[::2], offspring[1::2]): if random.random() < CXPB: toolbox.mate(ind1, ind2) del ind1.fitness.values del ind2.fitness.values t3 = time.time() # Note here that we have a different scheme of mutation than in the # original algorithm, we use 3 different mutations subsequently. for ind in offspring: if random.random() < MUTPB: toolbox.mutate(ind) del ind.fitness.values if random.random() < ADDPB: toolbox.addwire(ind) del ind.fitness.values if random.random() < DELPB: toolbox.delwire(ind) del ind.fitness.values t4 = time.time() # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in offspring if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit print(" Evaluated %i individuals" % len(invalid_ind)) t5 = time.time() population = toolbox.select(population+offspring, len(offspring)) t6 = time.time() #hof.update(population) stats.update(population) t7 = time.time() print(stats) print("Times :") print("Clone : " + str(t2-t1) + " (" + str((t2-t1)/(t7-t1)) +"%)") print("Cx : " + str(t3-t2) + " (" + str((t3-t2)/(t7-t1)) +"%)") print("Mut : " + str(t4-t3) + " (" + str((t4-t3)/(t7-t1)) +"%)") print("Eval : " + str(t5-t4) + " (" + str((t5-t4)/(t7-t1)) +"%)") print("Select : " + str(t6-t5) + " (" + str((t6-t5)/(t7-t1)) +"%)") print("HOF + stats : " + str(t7-t6) + " (" + str((t7-t6)/(t7-t1)) +"%)") print("TOTAL : " + str(t7-t1)) best_network = sn.SortingNetwork(INPUTS, hof[0]) print(best_network) print(best_network.draw()) print("%i errors, length %i, depth %i" % hof[0].fitness.values) return population, stats, hof if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/code/tutorials/part_4/SNC.cpp0000644000076500000240000001371114456461441017723 0ustar00runnerstaff/* This file is part of DEAP. DEAP is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. DEAP is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with DEAP. If not, see . */ /* This file is an example of a C evaluation function interfaced with DEAP; * It implements the evaluator of the sorting network problems, making it * about five times faster (up to 16x if used in combination with the * optimized cTools.nsga2 selection algorithm). * * To compile it, you may want to use distutils to simplify the linking, like * the installSN.py script provided with this package. * You may then import it like any other Python module : import SNC SNC.evalNetwork(dimension, individual) * See 4_5_home_made_eval_func.py for a comprehensive example. */ #include #include #include #include // Set of connectors that can be applied in parallel. typedef std::map Level; static PyObject* evalNetwork(PyObject *self, PyObject *args){ // Retrieve arguments (first : network dimensions; second : list of connectors [tuples]) PyObject *inDimensions = PyTuple_GetItem(args, 0); PyObject *listNetwork = PyTuple_GetItem(args, 1); std::vector mNetwork; const unsigned int inputs_size = (unsigned int)PyInt_AS_LONG(inDimensions); const unsigned int lNbTests = (1u << inputs_size); unsigned long lCountMisses = 0; unsigned int inWire1, inWire2; const unsigned long lgth = PyList_Size(listNetwork); // Network creation for(unsigned int k = 0; k < lgth; k++){ // Retrieve endpoint values inWire1 = (unsigned int)PyInt_AS_LONG(PyTuple_GetItem(PyList_GetItem(listNetwork,k),0)); inWire2 = (unsigned int)PyInt_AS_LONG(PyTuple_GetItem(PyList_GetItem(listNetwork,k),1)); // Check values of inWire1 and inWire2 if(inWire1 == inWire2) continue; if(inWire1 > inWire2) { const unsigned int lTmp = inWire1; inWire1 = inWire2; inWire2 = lTmp; } // Nothing in network, create new level and connector if(mNetwork.empty()) { Level lLevel; lLevel[inWire1] = inWire2; mNetwork.push_back(lLevel); continue; } // Iterator to the connector at current level, after mWire1 bool lConflict = false; Level::const_iterator lIterConnNext = mNetwork.back().begin(); for(; (lIterConnNext != mNetwork.back().end()) && (inWire1 > lIterConnNext->first); ++lIterConnNext); if(lIterConnNext != mNetwork.back().end()) { // Check if conflict with next connector and inWire2 if(inWire2 >= lIterConnNext->first) lConflict = true; } if(lIterConnNext != mNetwork.back().begin()) { // Iterator to the connector at current level, before mWire1 Level::const_iterator lIterConnPrev = lIterConnNext; --lIterConnPrev; // Check if conflict with previous connector and inWire1 if(inWire1 <= lIterConnPrev->second) lConflict = true; } // Add connector if(lConflict) { // Add new level of connectors Level lNextLevel; lNextLevel[inWire1] = inWire2; mNetwork.push_back(lNextLevel); } else { // Add connector to current level mNetwork.back()[inWire1] = inWire2; } } // Network test for(unsigned int i=0; i lSeqIOrig(inputs_size, 0.0); for(unsigned int j=0; j lSeqISorted(lSeqIOrig); for(unsigned int z=0; zfirst; const unsigned int lId2 = lIter->second; if(lSeqISorted[lId1] > lSeqISorted[lId2]) { const double lTmp = lSeqISorted[lId1]; lSeqISorted[lId1] = lSeqISorted[lId2]; lSeqISorted[lId2] = lTmp; } } } bool lIsSorted = true; bool lLastWasOne = false; for(unsigned int w=0; w. try: from itertools import product except ImportError: def product(*args, **kwds): # product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy # product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111 pools = map(tuple, args) * kwds.get('repeat', 1) result = [[]] for pool in pools: result = [x+[y] for x in result for y in pool] for prod in result: yield tuple(prod) class SortingNetwork(list): """Sorting network class. From Wikipedia : A sorting network is an abstract mathematical model of a network of wires and comparator modules that is used to sort a sequence of numbers. Each comparator connects two wires and sort the values by outputting the smaller value to one wire, and a larger value to the other. """ def __init__(self, dimension, connectors = []): self.dimension = dimension for wire1, wire2 in connectors: self.addConnector(wire1, wire2) def addConnector(self, wire1, wire2): """Add a connector between wire1 and wire2 in the network.""" if wire1 == wire2: return if wire1 > wire2: wire1, wire2 = wire2, wire1 try: last_level = self[-1] except IndexError: # Empty network, create new level and connector self.append({wire1: wire2}) return for wires in last_level.items(): if wires[1] >= wire1 and wires[0] <= wire2: self.append({wire1: wire2}) return last_level[wire1] = wire2 def sort(self, values): """Sort the values in-place based on the connectors in the network.""" for level in self: for wire1, wire2 in level.items(): if values[wire1] > values[wire2]: values[wire1], values[wire2] = values[wire2], values[wire1] def assess(self, cases=None): """Try to sort the **cases** using the network, return the number of misses. If **cases** is None, test all possible cases according to the network dimensionality. """ if cases is None: cases = product(range(2), repeat=self.dimension) misses = 0 ordered = [[0]*(self.dimension-i) + [1]*i for i in range(self.dimension+1)] for sequence in cases: sequence = list(sequence) self.sort(sequence) misses += (sequence != ordered[sum(sequence)]) return misses def draw(self): """Return an ASCII representation of the network.""" str_wires = [["-"]*7 * self.depth] str_wires[0][0] = "0" str_wires[0][1] = " o" str_spaces = [] for i in range(1, self.dimension): str_wires.append(["-"]*7 * self.depth) str_spaces.append([" "]*7 * self.depth) str_wires[i][0] = str(i) str_wires[i][1] = " o" for index, level in enumerate(self): for wire1, wire2 in level.items(): str_wires[wire1][(index+1)*6] = "x" str_wires[wire2][(index+1)*6] = "x" for i in range(wire1, wire2): str_spaces[i][(index+1)*6+1] = "|" for i in range(wire1+1, wire2): str_wires[i][(index+1)*6] = "|" network_draw = "".join(str_wires[0]) for line, space in zip(str_wires[1:], str_spaces): network_draw += "\n" network_draw += "".join(space) network_draw += "\n" network_draw += "".join(line) return network_draw @property def depth(self): """Return the number of parallel steps that it takes to sort any input. """ return len(self) @property def length(self): """Return the number of comparison-swap used.""" return sum(len(level) for level in self) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/conf.py0000644000076500000240000001772714456461441013755 0ustar00runnerstaff# -*- coding: utf-8 -*- # # DEAP documentation build configuration file, created by # sphinx-quickstart on Sat Jan 30 13:21:43 2010. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, time # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.append("..") #sys.path.append(os.path.abspath('_ext/')) import deap # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo', 'sphinx.ext.intersphinx', 'sphinx.ext.extlinks', 'sphinx.ext.viewcode'] try: import matplotlib except ImportError: pass else: extensions += ['matplotlib.sphinxext.only_directives', 'matplotlib.sphinxext.plot_directive'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. source_encoding = 'utf-8' # The master toctree document. master_doc = 'index' # General information about the project. project = u'DEAP' copyright = u'2009-%s, DEAP Project' % time.strftime('%Y') # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = deap.__version__ # The full version, including alpha/beta/rc tags. release = deap.__revision__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. #unused_docs = [] # List of directories, relative to source directory, that shouldn't be searched # for source files. exclude_trees = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'default' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, the todo will be printed in the documentation todo_include_todos = True # Search in python documentation intersphinx_mapping = {'python' : ('http://docs.python.org/', None), 'numpy' : ('http://docs.scipy.org/doc/numpy', None)} # Reload the cached values every 5 days intersphinx_cache_limit = 5 # -- Options for pyplot extension ---------------------------------------------- # Default value for the include-source option plot_include_source = False # Code that should be executed before each plot. #plot_pre_code # Base directory, to which ``plot::`` file names are relative # to. (If None or empty, file names are relative to the # directory where the file containing the directive is.) #plot_basedir # Whether to show links to the files in HTML. plot_html_show_formats = True # -- Options for extlinks extension ---------------------------------------------- import subprocess try: tree = subprocess.check_output(["git", "rev-parse", "HEAD"]).strip() except OSError: import warnings warnings.warn("Cannot link examples because we cannot retrieve the git version", Warning) else: extlinks = {'example': ('https://github.com/DEAP/deap/blob/{tree}/examples/%s.py'.format(tree=tree), "examples/")} # -- Options for HTML output --------------------------------------------------- # Add any paths that contain custom themes here, relative to this directory. html_theme_path = ["_themes"] # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'classic' and 'alabaster'. html_theme = 'pydoctheme' #RTFD.org does not support sphinx 1.3.1 yet. #html_theme = 'alabaster' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {'collapsiblesidebar': True} # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = "" # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. html_favicon = "deap_orange_icon_32.ico" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. html_use_smartypants = True # Custom sidebar templates, maps document names to template names. html_sidebars = { 'index': ['indexsidebar.html'], } # Additional templates that should be rendered to pages, maps page names to # template names. html_additional_pages = {} # If false, no module index is generated. #html_use_modindex = True # If false, no index is generated. html_use_index = True # If true, the index is split into individual pages for each letter. html_split_index = True # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'DEAP-doc' # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('contents', 'DEAP.tex', u'DEAP Documentation', u'DEAP Project', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # Additional stuff for the LaTeX preamble. latex_preamble = r'\usepackage{amsmath,amssymb}' # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_use_modindex = True ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/contributing.rst0000644000076500000240000000465514456461441015713 0ustar00runnerstaffContributing ============ Reporting a bug --------------- You can report a bug on deap Github issues page. ``_ Retrieving the latest code -------------------------- You can check the latest sources with the command:: git clone https://github.com/DEAP/deap.git Contributing code ----------------- The preferred way to contribute to deap is to fork the `main repository `__ on GitHub, then submit a "pull request" (PR): 1. Fork the `project repository `__: click on the 'Fork' button near the top of the page. This creates a copy of the code under your account on the GitHub server. 2. Clone your fork locally:: $ git clone git@github.com:YourLogin/deap.git 3. Create a branch to hold your changes:: $ git checkout -b my-feature and start making changes. Never work in the ``master`` branch! 4. When you're done editing, do:: $ git add modified_files $ git commit to record your changes in Git, then push them to GitHub with:: $ git push -u origin my-feature Finally, go to the web page of your fork of the deap repository, and click 'Pull request' to send your changes for review. You can also contact us on the deap users list at ``_. Coding guidelines ----------------- Most of those conventions are base on Python PEP8. *A style guide is about consistency. Consistency with this style guide is important. Consistency within a project is more important. Consistency within one module or function is most important.* Code layout +++++++++++ Same as PEP8. Imports +++++++ First imports in a file are the standard library module, then come the imports of deap module, and finally the custom module for a problem. Each block of imports should be separated by a new line. :: import system from deap import base import mymodule Whitespace in Expressions and Statements ++++++++++++++++++++++++++++++++++++++++ Same as PEP8. Comments ++++++++ Same as PEP8 Documentation Strings +++++++++++++++++++++ Same as PEP8 Naming Conventions ++++++++++++++++++ - **Module** : use the lowercase convention. - **Class** : class names use the CapWords? convention. - **Function** / Procedure : use the mixedCase convention. First word should be an action verb. - **Variable** : use the lower_case_with_underscores convention. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.6912563 deap-1.4.1/doc/examples/0000755000076500000240000000000014456461475014265 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/examples/bipop_cmaes.rst0000644000076500000240000000411714456461441017274 0ustar00runnerstaff.. _bipopcma-es: ================================================ Controlling the Stopping Criteria: BI-POP CMA-ES ================================================ A variant of the Covariance Matrix Adaptation Evolution Strategy (CMA-ES) [Hansen2001]_ implies to control very specifically the termination criteria in the generational loop. This can be achieved by writing the algorithm partially invoking manually the :meth:`generate` and :meth:`update` inside a loop with specific stop criteria. In fact, the BI-POP CMA-ES [Hansen2009]_ has 9 different stop criteria, which are used to control the independent restarts, with different population sizes, of a standard CMA-ES. As usual, the first thing to do is to create the types and as usual, we'll need a minimizing fitness and an individual that is a :class:`list`. .. literalinclude:: /../examples/es/cma_bipop.py :lines: 34-35 The main function includes the setting of some parameters, namely the number of increasing population restarts and the initial sigma value. Then, the instantiation of the :class:`~deap.base.Toolbox` is done in the main function because it will change with the restarts. Next are initialized the :class:`~deap.tools.HallOfFame`, The :class:`~deap.tools.statistics` and the list of :class:`~deap.tools.Logbook` objects, one for each restart. .. literalinclude:: /../examples/es/cma_bipop.py :lines: 37-53 Then the first loop controlling the restart is set up. It encapsulates the generational loop with its many stop criteria. The content of this last loop is simply the generate-update loop as presented in the :func:`deap.algorithms.eaGenerateUpdate` function. .. literalinclude:: /../examples/es/cma_bipop.py :lines: 62,101-110,114-130,151-188,192-194 Some variables have been omitted for clarity, refer to the complete example for more details :example:`es/cma_bipop`. .. [Hansen2001] Hansen and Ostermeier, 2001. Completely Derandomized Self-Adaptation in Evolution Strategies. *Evolutionary Computation* .. [Hansen2009] Hansen, 2009. Benchmarking a BI-Population CMA-ES on the BBOB-2009 Function Testbed. *GECCO'09*././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/examples/cmaes.rst0000644000076500000240000000270714456461441016106 0ustar00runnerstaff.. _cma-es: =============================================== Covariance Matrix Adaptation Evolution Strategy =============================================== The Covariance Matrix Adaptation Evolution Strategy (CMA-ES) [Hansen2001]_ implemented in the :mod:`~deap.cma` module makes use of the generate-update paradigm where a population is generated from a strategy and the strategy is updated from the population. It is then straight forward to use it for continuous problem optimization. As usual the first thing to do is to create the types and as usual we'll need a minimizing fitness and an individual that is a :class:`list`. A toolbox is then created with the desired evaluation function. .. literalinclude:: /../examples/es/cma_minfct.py :lines: 28-32 Then, it does not get any harder. Once a :class:`~deap.cma.Strategy` is instantiated, its :meth:`~deap.cma.Strategy.generate` and :meth:`~deap.cma.Strategy.update` methods are registered in the toolbox for uses in the :func:`~deap.algorithms.eaGenerateUpdate` algorithm. The :meth:`~deap.cma.Strategy.generate` method is set to produce the created :class:`Individual` class. The random number generator from numpy is seeded because the :mod:`~deap.cma` module draws all its number from it. .. literalinclude:: /../examples/es/cma_minfct.py :lines: 34,36,37,41-50,52,54 .. [Hansen2001] Hansen and Ostermeier, 2001. Completely Derandomized Self-Adaptation in Evolution Strategies. *Evolutionary Computation* ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/examples/cmaes_plotting.rst0000644000076500000240000000502314456461441020020 0ustar00runnerstaff===================================================== Plotting Important Data: Visualizing the CMA Strategy ===================================================== With this example we show one technique for plotting the data of an evolution. As developers of DEAP we cannot make a choice on what data is important to plot and this part is left to the user. Although, plotting would all occur the same way. First the data is gathered during the evolution and at the end the figures are created from the data. This model is the simplest possible. One could also write all data to a file and read those file again to plot the figures. This later model would be more fault tolerant as if the evolution does not terminate normally, the figures could still be plotted. But, we want to keep this example as simple as possible and thus we will present the former model. Evolution Loop ============== The beginning of this example is exactly the same as the :ref:`CMA-ES ` example. The general evolution loop of function :func:`~deap.algorithms.eaGenerateUpdate` is somewhat insufficient for our purpose. We need to gather the required data on each generation. So instead of using the :func:`~deap.algorithms.eaGenerateUpdate` function, we'll develop it to get a grip on what is recorded. First, we'll create objects to record our data. Here we want to plot, in addition to what the :class:`~deap.tools.Logbook` and :class:`~deap.tools.HallOfFame` objects contain, the step size, the axis ratio and the major axis of the covariace matrix, the best value so far, the best coordinates so far and the standard deviation of the all coordinates at each generation. .. literalinclude:: /../examples/es/cma_plotting.py :lines: 59-64 Once the objects are created, the evolution loop, based on a generational stopping criterion, calls repeatedly the :meth:`generate`, :meth:`evaluate` and :meth:`update` methods registered in the toolbox. .. literalinclude:: /../examples/es/cma_plotting.py :lines: 66-75 Then, the previoulsy created objects start to play their role. The data is recorded in each object on each generation. .. literalinclude:: /../examples/es/cma_plotting.py :lines: 88-93 Now that the data is recorded the only thing left to do is to plot it. We'll use `matplotlib `_ to generate the graphics from the recorded data. .. literalinclude:: /../examples/es/cma_plotting.py :lines: 95-124 Which gives the following result. .. plot:: ../examples/es/cma_plotting.py :width: 67% The complete example : :example:`es/cma_plotting`. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/examples/coev_coop.rst0000644000076500000240000000556314456461441016775 0ustar00runnerstaff======================= Cooperative Coevolution ======================= This example explores cooperative coevolution using DEAP. This tutorial is not as complete as previous examples concerning type creation and other basic stuff. Instead, we cover the concepts of coevolution as they would be applied in DEAP. Assume that if a function from the toolbox is used, it has been properly registered. This example makes a great template for implementing your own coevolutionary algorithm, it is based on the description of cooperative coevolution by [Potter2001]_. Coevolution is, in fact, just an extension of how algorithms works in deap. Multiple populations are evolved in turn (or simultaneously on multiple processors) just like in traditional genetic algorithms. The implementation of the coevolution is thus straightforward. A first loop acts for iterating over the populations and a second loop iterates over the individuals of these population. The first step is to create a bunch of species that will evolve in our population. .. literalinclude:: /../examples/coev/coop_evol.py :lines: 72 Cooperative coevolution works by sending the best individual of each species (called representative) to help in the evaluation of the individuals of the other species. Since the individuals are not yet evaluated we select randomly the individuals that will be in the set of representatives. .. literalinclude:: /../examples/coev/coop_evol.py :lines: 77 The evaluation function takes a list of individuals to be evaluated including the representatives of the other species and possibly some other arguments. It is not presented in detail for scope reasons, the structure would be, as usual, something like this :: def evaluate(individuals): # Compute the collaboration fitness return fitness, The evolution can now begin. .. literalinclude:: /../examples/coev/coop_evol.py :lines: 85-96,103-106,113-114 The last lines evolve each species once before sharing their representatives. The common parts of an evolutionary algorithm are all present, variation, evaluation and selection occurs for each species. The species index is simply a unique number identifying each species, it can be used to keep independent statistics on each new species added. After evolving each species, steps described in [Potter2001]_ are achieved to add a species and remove useless species on stagnation. These steps are not covered in this example but are present in the complete source code of the coevolution examples. - :example:`Coevolution Base ` - :example:`Coevolution Niching ` - :example:`Coevolution Generalization ` - :example:`Coevolution Adaptation ` - :example:`Coevolution Final ` .. [Potter2001] Potter, M. and De Jong, K., 2001, Cooperative Coevolution: An Architecture for Evolving Co-adapted Subcomponents. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/examples/eda.rst0000644000076500000240000000437114456461441015546 0ustar00runnerstaff======================================= Making Your Own Strategy : A Simple EDA ======================================= As seen in the :ref:`cma-es` example, the :func:`~deap.algorithms.eaGenerateUpdate` algorithm is suitable for algorithms learning the problem distribution from the population. Here we'll cover how to implement a strategy that generates individuals based on an updated sampling function learnt from the sampled population. Estimation of distribution ========================== The basic concept concept behind EDA is to sample :math:`\lambda` individuals with a certain distribution and estimate the problem distribution from the :math:`\mu` best individuals. This really simple concept adhere to the generate-update logic. The strategy contains a random number generator which is adapted from the population. The following :class:`EDA` class do just that. .. literalinclude:: /../examples/eda/fctmin.py :pyobject: EDA A normal random number generator is initialized with a certain mean (*centroid*) and standard deviation (*sigma*) for each dimension. The :meth:`generate` method uses numpy to generate *lambda_* sequences in *dim* dimensions, then the sequences are used to initialize individuals of class given in the *ind_init* argument. Finally, the :meth:`update` computes the average (centre) of the `mu` best individuals and estimates the variance over all attributes of each individual. Once :meth:`update` is called the distributions parameters are changed and a new population can be generated. Objects Needed ============== Two classes are needed, a minimization fitness and a individual that will combine the fitness and the real values. Moreover, we will use :class:`numpy.ndarray` as base class for our individuals. .. literalinclude:: /../examples/eda/fctmin.py :lines: 28-29 Operators ========= The :func:`~deap.algorithms.eaGenerateUpdate` algorithm requires to set in a toolbox an evaluation function, an generation method and an update method. We will use the method of an initialized :class:`EDA`. For the generate method, we set the class that the individuals are transferred in to our :class:`Individual` class containing a fitness. .. literalinclude:: /../examples/eda/fctmin.py :pyobject: main The complete :example:`eda/fctmin`. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/examples/es_fctmin.rst0000644000076500000240000000457114456461441016766 0ustar00runnerstaff=========================== Evolution Strategies Basics =========================== Evolution strategies are special types of evolutionary computation algorithms where the mutation strength is learnt during the evolution. A first type of strategy (endogenous) includes directly the mutation strength for each attribute of an individual inside the individual. This mutation strength is subject to evolution similarly to the individual in a classic genetic algorithm. For more details, [Beyer2002]_ presents a very good introduction to evolution strategies. In order to have this kind of evolution we'll need a type of individual that contains a :attr:`strategy` attribute. We'll also minimize the objective function, which gives the following classes creation. .. literalinclude:: /../examples/es/fctmin.py :lines: 33-35 The initialization function for an evolution strategy is not defined by DEAP. The following generation function takes as argument the class of individual to instantiate, *icls*. It also takes the class of strategy to use as strategy, *scls*. The next arguments are the minimum and maximum values for the individual and strategy attributes. The strategy is added in the :attr:`strategy` member of the returned individual. .. literalinclude:: /../examples/es/fctmin.py :pyobject: generateES This generation function is registered in the toolbox like any other initializer. .. literalinclude:: /../examples/es/fctmin.py :lines: 56-57 The strategy controls the standard deviation of the mutation. It is common to have a lower bound on the values so that the algorithm don't fall in exploitation only. This lower bound is added to the variation operator by the following decorator. .. literalinclude:: /../examples/es/fctmin.py :pyobject: checkStrategy The variation operators are decorated via the :meth:`~deap.base.Toolbox.decorate` method of the toolbox and the evaluation function is taken from the :mod:`~deap.benchmarks` module. .. literalinclude:: /../examples/es/fctmin.py :lines: 59,60,63-65 From here, everything left to do is either write the algorithm or use one provided in :mod:`~deap.algorithms`. Here we will use the :func:`~deap.algorithms.eaMuCommaLambda` algorithm. .. literalinclude:: /../examples/es/fctmin.py :lines: 67,69-81 The complete :example:`es/fctmin`. .. [Beyer2002] Beyer and Schwefel, 2002, Evolution strategies - A Comprehensive Introduction ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/examples/es_onefifth.rst0000644000076500000240000000021214456461441017274 0ustar00runnerstaff.. _one-fifth: ============== One Fifth Rule ============== Soon! .. The one fifth rule consists in changing the mutation strength when ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/examples/ga_knapsack.rst0000644000076500000240000000574314456461441017263 0ustar00runnerstaff===================================== Knapsack Problem: Inheriting from Set ===================================== Again for this example we will use a very simple problem, the 0-1 Knapsack. The purpose of this example is to show the simplicity of DEAP and the ease to inherit from anything else than a simple list or array. Many evolutionary algorithm textbooks mention that the best way to have an efficient algorithm is to have a representation close the problem. Here, what can be closer to a bag than a set? Lets make our individuals inherit from the :class:`set` class. .. literalinclude:: /../examples/ga/knapsack.py :lines: 41-42 That's it. You now have individuals that are, in fact sets, they have the usual attribute :attr:`fitness`. The fitness is a minimization of the first objective (the weight of the bag) and a maximization of the second objective (the value of the bag). We will now create a dictionary of 100 random items to map the values and weights. .. literalinclude:: /../examples/ga/knapsack.py :lines: 34-39 We now need to initialize a population and the individuals therein. For this, we will need a :class:`~deap.base.Toolbox` to register our generators since sets can also be created with an input iterable. .. literalinclude:: /../examples/ga/knapsack.py :lines: 44,47,50-53 Voilà! The *last* thing to do is to define our evaluation function. .. literalinclude:: /../examples/ga/knapsack.py :pyobject: evalKnapsack Everything is ready for evolution. Ho no wait, since DEAP's developers are lazy, there is no crossover and mutation operators that can be applied directly on sets. Lets define some. For example, a crossover, producing two children from two parents, could be that the first child is the intersection of the two sets and the second child their absolute difference. .. literalinclude:: /../examples/ga/knapsack.py :pyobject: cxSet A mutation operator could randomly add or remove an element from the set input individual. .. literalinclude:: /../examples/ga/knapsack.py :pyobject: mutSet We then register these operators in the toolbox. Since it is a multi-objective problem, we have selected the NSGA-II selection scheme : :func:`~deap.tools.selNSGA2` .. literalinclude:: /../examples/ga/knapsack.py :lines: 83-86 From here, all that is left to do is either write the algorithm or use provided in :mod:`~deap.algorithms`. Here we will use the :func:`~deap.algorithms.eaMuPlusLambda` algorithm. .. literalinclude:: /../examples/ga/knapsack.py :lines: 88,90-107 Finally, a :class:`~deap.tools.ParetoFront` may be used to retrieve the best non dominated individuals of the evolution and a :class:`~deap.tools.Statistics` object is created for compiling four different statistics over the generations. The Numpy functions are registered in the statistics object with the ``axis=0`` argument to compute the statistics on each objective independently. Otherwise, Numpy would compute a single mean for both objectives. The complete :example:`ga/knapsack`. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/examples/ga_onemax.rst0000644000076500000240000002406514456461441016755 0ustar00runnerstaff.. _ga-onemax: =============== One Max Problem =============== This is the first complete example built with DEAP. It will help new users to overview some of the framework's possibilities and illustrate the potential of evolutionary algorithms in general. The problem itself is both very simple and widely used in the evolutionary computational community. We will create a population of individuals consisting of integer vectors randomly filled with ``0`` and ``1``. Then we let our population evolve until one of its members contains only ``1`` and no ``0`` anymore. Setting Things Up ================= In order to solve the One Max problem, we need a bunch of ingredients. First we have to define our individuals, which will be lists of integer values, and to generate a population using them. Then we will add some functions and operators taking care of the evaluation and evolution of our population and finally put everything together in script. But first of all, we need to import some modules. .. literalinclude:: /../examples/ga/onemax.py :lines: 20-24 ------- Creator ------- Since the actual structure of the required individuals in genetic algorithms does strongly depend on the task at hand, DEAP does not contain any explicit structure. It will rather provide a convenient method for creating containers of attributes, associated with fitnesses, called the :mod:`deap.creator`. Using this method we can create custom individuals in a very simple way. The ``creator`` is a class factory that can build new classes at run-time. It will be called with first the desired *name* of the new class, second the *base class* it will inherit, and in addition any subsequent arguments you want to become attributes of your class. This allows us to build new and complex structures of any type of container from lists to n-ary trees. .. literalinclude:: /../examples/ga/onemax.py :lines: 26-27 First we will define the class :class:`FitnessMax`. It will inherit the :class:`Fitness` class of the :mod:`deap.base` module and contain an additional attribute called *weights*. Please mind the value of *weights* to be the tuple ``(1.0,)``. This way we will be maximizing a single objective fitness. We can't repeat it enough, in DEAP single objectives is a special case of multi objectives. Next we will create the class :class:`Individual`, which will inherit the class :class:`list` and contain our previously defined :class:`FitnessMax` class in its *fitness* attribute. Note that upon creation all our defined classes will be part of the ``creator`` container and can be called directly. ------- Toolbox ------- Now we will use our custom classes to create types representing our individuals as well as our whole population. All the objects we will use on our way, an individual, the population, as well as all functions, operators, and arguments will be stored in a DEAP container called :class:`~deap.base.Toolbox`. It contains two methods for adding and removing content, :meth:`~deap.base.Toolbox.register` and :meth:`~deap.base.Toolbox.unregister`. .. literalinclude:: /../examples/ga/onemax.py :lines: 29,31,36,38,41-42,45 In this code block we register a generation function :meth:`toolbox.attr_bool()` and two initialization ones :meth:`individual` and :meth:`population`. :meth:`toolbox.attr_bool`, when called, will draw a random integer between 0 and 1. The two initializers, on the other hand, will instantiate an individual or population. The registration of the tools to the toolbox only associates *aliases* to the already existing functions and freezes part of their arguments. This allows us to fix an arbitrary amount of argument at certain values so we only have to specify the remaining ones when calling the method. For example, the :meth:`attr_bool` generator is made from the :func:`~random.randint` function that takes two arguments *a* and *b*, with ``a <= n <= b``, where *n* is the returned integer. Here, we fix ``a = 0`` and ``b = 1``. Our individuals will be generated using the function :func:`~deap.tools.initRepeat`. Its first argument is a container class, in our example the :class:`Individual` one we defined in the previous section. This container will be filled using the method :meth:`attr_bool`, provided as second argument, and will contain 100 integers, as specified using the third argument. When called, the :meth:`individual` method will thus return an individual initialized with what would be returned by calling the :meth:`attr_bool` method 100 times. Finally, the :meth:`population` method uses the same paradigm, but we don't fix the number of individuals that it should contain. The Evaluation Function ======================= The evaluation function is pretty simple in our example. We just need to count the number of ones in an individual. .. literalinclude:: /../examples/ga/onemax.py :lines: 48-49 The returned value must be an iterable of a length equal to the number of objectives (weights). The Genetic Operators ===================== Within DEAP there are two ways of using operators. We can either simply call a function from the :mod:`~deap.tools` module or register it with its arguments in a toolbox, as we have already seen for our initialization methods. The most convenient way, however, is to register them in the toolbox, because this allows us to easily switch between the operators if desired. The toolbox method is also used when working with the :mod:`algorithms` module. See the :ref:`short-ga-onemax` for an example. Registering the genetic operators required for the evolution in our One Max problem and their default arguments in the toolbox is done as follows. .. literalinclude:: /../examples/ga/onemax.py :lines: 55,58,62,68 The evaluation will be performed by calling the alias *evaluate*. It is important to not fix its argument in here. We will need it later on to apply the function to each separate individual in our population. The mutation, on the other hand, needs an argument to be fixed (the independent probability of each attribute to be mutated *indpb*). Evolving the Population ======================= Once the representation and the genetic operators are chosen, we will define an algorithm combining all the individual parts and performing the evolution of our population until the One Max problem is solved. It is good style in programming to do so within a function, generally named ``main()``. ----------------------- Creating the Population ----------------------- First of all, we need to actually instantiate our population. But this step is effortlessly done using the :meth:`population` method we registered in our toolbox earlier on. .. literalinclude:: /../examples/ga/onemax.py :lines: 72,77 ``pop`` will be a :class:`list` composed of 300 individuals. Since we left the parameter *n* open during the registration of the :meth:`population` method in our toolbox, we are free to create populations of arbitrary size. The next thing to do is to evaluate our brand new population. .. literalinclude:: /../examples/ga/onemax.py :lines: 87-90 We :func:`map` the evaluation function to every individual and then assign their respective fitness. Note that the order in ``fitnesses`` and ``population`` is the same. Before we go on, this is the time to define some constants we will use later on. .. literalinclude:: /../examples/ga/onemax.py :lines: 79-83 ------------------------ Performing the Evolution ------------------------ The evolution of the population is the final step we have to accomplish. Recall, our individuals consist of 100 integer numbers and we want to evolve our population until we got at least one individual consisting of only ``1`` and no ``0``. So all we have to do is to obtain the fitness values of the individuals .. literalinclude:: /../examples/ga/onemax.py :lines: 94-95 and evolve our population until one of them reaches ``100`` or the number of generations reaches ``1000``. .. literalinclude:: /../examples/ga/onemax.py :lines: 97-99,100-104 The evolution itself will be performed by selecting, mating, and mutating the individuals in our population. In our simple example of a genetic algorithm, the first step is to select the next generation. .. literalinclude:: /../examples/ga/onemax.py :lines: 106-109 This will creates an *offspring* list, which is an exact copy of the selected individuals. The :meth:`toolbox.clone` method ensure that we don't use a reference to the individuals but an completely independent instance. This is of utter importance since the genetic operators in toolbox will modify the provided objects in-place. Next, we will perform both the crossover (mating) and the mutation of the produced children with a certain probability of :data:`CXPB` and :data:`MUTPB`. The ``del`` statement will invalidate the fitness of the modified offspring. .. literalinclude:: /../examples/ga/onemax.py :lines: 111-112,115-116,120-123,126-128 The crossover (or mating) and mutation operators, provided within DEAP, usually take respectively 2 or 1 individual(s) as input and return 2 or 1 modified individual(s). In addition they modify those individuals within the toolbox container and we do not need to reassign their results. Since the content of some of our offspring changed during the last step, we now need to re-evaluate their fitnesses. To save time and resources, we just map those offspring which fitnesses were marked invalid. .. literalinclude:: /../examples/ga/onemax.py :lines: 130-134 And last but not least, we replace the old population by the offspring. .. literalinclude:: /../examples/ga/onemax.py :lines: 139 To check the performance of the evolution, we will calculate and print the minimal, maximal, and mean values of the fitnesses of all individuals in our population as well as their standard deviations. .. literalinclude:: /../examples/ga/onemax.py :lines: 141-152 This evolution will now run until at least one of the individuals will be filled with ``1`` exclusively. A :class:`~deap.tools.Statistics` object is available within DEAP to facilitate the gathering of the evolution's statistics. See the :ref:`short-ga-onemax` for an example. The complete source code of this example: :example:`ga/onemax`. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/examples/ga_onemax_numpy.rst0000644000076500000240000000201414456461441020173 0ustar00runnerstaff============================ One Max Problem: Using Numpy ============================ The numpy version one max genetic algorithm example is very similar to one max short example. The individual class is inherited from the :class:`numpy.ndarray`. .. literalinclude:: /../examples/ga/onemax_numpy.py :lines: 18,26 The first major difference is the crossover function that implements the copying mechanism mentioned in the :doc:`/tutorials/advanced/numpy` tutorial. .. literalinclude:: /../examples/ga/onemax_numpy.py :pyobject: cxTwoPointCopy This crossover function is added to the toolbox instead of the original :func:`deap.tools.cxTwoPoint` crossover. .. literalinclude:: /../examples/ga/onemax_numpy.py :lines: 67 The second major difference is the use of the *similar* function in the :class:`~deap.tools.HallOfFame` that has to be set to a :func:`numpy.array_equal` or :func:`numpy.allclose` .. literalinclude:: /../examples/ga/onemax_numpy.py :lines: 80 The complete source code: :example:`ga/onemax_numpy`.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/examples/ga_onemax_short.rst0000644000076500000240000000302714456461441020167 0ustar00runnerstaff.. _short-ga-onemax: =============================== One Max Problem: Short Version =============================== The short One Max genetic algorithm example is very similar to the full one :ref:`-ga-onemax`. The only difference is that it makes use of the :mod:`~deap.algorithms` module which implements some basic evolutionary algorithms. The initializations are almost the same. We only have to import some additional packages and modules. .. literalinclude:: /../examples/ga/onemax_short.py :lines: 16,19-21 In order to use the evolution functions implemented in :mod:`~deap.algorithms`, we have to register some functions from the :mod:`~deap.tools` module: :func:`evaluate`, :func:`mate`, :func:`mutate`, and :func:`~deap.Toolbox.select`. .. literalinclude:: /../examples/ga/onemax_short.py :lines: 41-44 The toolbox is then passed to the algorithm and via ``stats`` it uses the registered functions. .. literalinclude:: /../examples/ga/onemax_short.py :lines: 46, 49-58 The short GA One max example makes use of a :class:`~deap.tools.HallOfFame` in order to keep track of the best individual to appear in the evolution (it keeps it even in the case of extinction), and a :class:`~deap.tools.Statistics` object to compile the population statistics during the evolution. Every algorithm in the :mod:`~deap.algorithms` module can handle these objects. Finally, the *verbose* keyword indicates whether we want the algorithm to output the results after each generation or not. The complete source code: :example:`ga/onemax_short`. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/examples/gp_ant.rst0000644000076500000240000000606714456461441016271 0ustar00runnerstaff.. _artificial-ant: ====================== Artificial Ant Problem ====================== The Artificial Ant problem is a more sophisticated yet classical GP problem, in which the evolved individuals have to control an artificial ant so that it can eat all the food located in a given environment. This example shows how DEAP can easily deal with more complex problems, including an intricate system of functions and resources (including a small simulator). For more information about this problem, see :ref:`refPapersAnt`. Primitives set used =================== We use the standard primitives set for the Artificial Ant problem : .. literalinclude:: /../examples/gp/ant.py :lines: 150-156 - ``if_food_ahead`` is a primitive which executes its first argument if there is food in front of the ant; else, it executes its second argument. - :func:`prog2` and :func:`prog3` are the equivalent of the lisp PROGN2 and PROGN3 functions. They execute their children in order, from the first to the last. For instance, prog2 will first execute its first argument, then its second. - :func:`move_forward` makes the artificial ant move one front. This is a terminal. - :func:`turn_right` and :func:`turn_left` makes the artificial ant turning clockwise and counter-clockwise, without changing its position. Those are also terminals. .. note:: There is no external input as in symbolic regression or parity. Although those functions are obviously not already built-in in Python, it is very easy to define them : .. literalinclude:: /../examples/gp/ant.py :lines: 62-75, 122-123 Partial functions are a powerful feature of Python which allow to create functions on the fly. For more detailed information, please refer to the Python documentation of :func:`functools.partial`. Evaluation function =================== The evaluation function use an instance of a simulator class to evaluate the individual. Each individual is given 600 moves on the simulator map (obtained from an external file). The fitness of each individual corresponds to the number of pieces of food picked up. In this example, we are using a classical trail, the *Santa Fe trail*, in which there is 89 pieces of food. Therefore, a perfect individual would achieve a fitness of 89. .. literalinclude:: /../examples/gp/ant.py :pyobject: evalArtificialAnt Where `ant` is the instance of the simulator used. The :func:`~deap.gp.evaluate` function is a convenience one provided by DEAP and returning an executable Python program from a GP individual and its primitives function set. Complete example ================ Except for the simulator code (about 75 lines), the code does not fundamentally differ from the :ref:`Symbolic Regression example `. Note that as the problem is harder, improving the selection pressure by increasing the size of the tournament to 7 allows to achieve better performance. The complete :example:`gp/ant` .. _refPapersAnt: Reference ========= *John R. Koza, "Genetic Programming I: On the Programming of Computers by Means of Natural Selection", MIT Press, 1992, pages 147-161.* ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/examples/gp_multiplexer.rst0000644000076500000240000000344514456461441020056 0ustar00runnerstaff.. _mux: ======================= Multiplexer 3-8 Problem ======================= The multiplexer problem is another extensively used GP problem. Basically, it trains a program to reproduce the behavior of an electronic `multiplexer `_ (mux). Usually, a 3-8 multiplexer is used (3 address entries, from A0 to A2, and 8 data entries, from D0 to D7), but virtually any size of multiplexer can be used. This problem was first defined by Koza (see :ref:`refPapersMux`). Primitives set used =================== The primitive set is almost the same as the set used in :ref:`Parity `. Three Boolean operators (and, or and not), imported from :mod:`operator`, and a specific if-then-else primitive, which return either its second or third argument depending on the value of the first one. .. literalinclude:: /../examples/gp/multiplexer.py :lines: 56-62 As usual, we also add two terminals, a Boolean true and a Boolean false. Evaluation function =================== To speed up the evaluation, the computation of the input/output pairs is done at start up, instead of at each evaluation call. This pre-computation also allows to easily tune the multiplexer size, by changing the value of *MUX_SELECT_LINES*. .. literalinclude:: /../examples/gp/multiplexer.py :lines: 32-54 After that, the evaluation function is trivial, as we have both inputs and output values. The fitness is then the number of well predicted outputs over the 2048 cases (for a 3-8 multiplexer). .. literalinclude:: /../examples/gp/multiplexer.py :pyobject: evalMultiplexer The complete :example:`gp/multiplexer`. .. _refPapersMux: Reference ========= *John R. Koza, "Genetic Programming I: On the Programming of Computers by Means of Natural Selection", MIT Press, 1992, pages 170-187.* ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/examples/gp_parity.rst0000644000076500000240000000377714456461441017024 0ustar00runnerstaff.. _parity: =================== Even-Parity Problem =================== Parity is one of the classical GP problems. The goal is to find a program that produces the value of the Boolean even parity given n independent Boolean inputs. Usually, 6 Boolean inputs are used (Parity-6), and the goal is to match the good parity bit value for each of the :math:`2^6 = 64` possible entries. The problem can be made harder by increasing the number of inputs (in the DEAP implementation, this number can easily be tuned, as it is fixed by a constant named PARITY_FANIN_M). For more information about this problem, see :ref:`refPapersParity`. Primitives set used =================== Parity uses standard Boolean operators as primitives, available in the Python operator module : .. literalinclude:: /../examples/gp/parity.py :lines: 49-55 In addition to the *n* inputs, we add two constant terminals, one at 0, one at 1. .. note:: As Python is a dynamic typed language, you can mix Boolean operators and integers without any issue. Evaluation function =================== In this implementation, the fitness of a Parity individual is simply the number of successful cases. Thus, the fitness is maximized, and the maximum value is 64 in the case of a 6 inputs problems. .. literalinclude:: /../examples/gp/parity.py :pyobject: evalParity `inputs` and `outputs` are two pre-generated lists, to speedup the evaluation, mapping a given input vector to the good output bit. The Python :func:`sum` function works on booleans (false is interpreted as 0 and true as 1), so the evaluation function boils down to sum the number of successful tests : the higher this sum, the better the individual. Conclusion ========== The other parts of the program are mostly the same as the :ref:`Symbolic Regression algorithm `. The complete :example:`gp/parity`. .. _refPapersParity: Reference ========= *John R. Koza, "Genetic Programming II: Automatic Discovery of Reusable Programs", MIT Press, 1994, pages 157-199.* ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/examples/gp_spambase.rst0000644000076500000240000000656314456461441017303 0ustar00runnerstaff.. _spambase: =================================== Spambase Problem: Strongly Typed GP =================================== This problem is a classification example using STGP (Strongly Typed Genetic Programming). The evolved programs work on floating-point values AND Booleans values. The programs must return a Boolean value which must be true if e-mail is spam, and false otherwise. It uses a base of emails (saved in *spambase.csv*, see :ref:`refPapersSpam`), from which it randomly chooses 400 emails to evaluate each individual. Primitives set ============== Strongly-typed GP is a more generic GP where each primitive, in addition to have an arity and a corresponding function, has also a specific return type and specific parameter(s) type. In this way, each primitive is someway describe as a pure C function, where each parameter has to be one of the good type, and where the return value type is specified before run time. .. note:: Actually, when the user does not specify return or parameters type, a default type is selected by DEAP. On standard GP, because all the primitives use this default type, this behaves as there was no type requirement. We define a typed primitive set almost the same way than a normal one, but we have to specify the types used. .. literalinclude:: /../examples/gp/spambase.py :lines: 37-68 On the first line, we see the declaration of a typed primitive set with :class:`~deap.gp.PrimitiveSetTyped`. The first argument remains the set name, but the next ones are the type of the entries (in this case, we have 57 float entries and one Boolean output; we could have written `float` 57 times, but it is fairly quicker and more understandable to use the :func:`itertools.repeat` function). The last argument remains the entries prefix. After that, we define the primitives themselves. The definition of a typed primitive has two additional parameters : a list containing the parameters type, in order, and the return type. The terminals set is then filled, with at least one terminal of each type, and that is for the primitive set declaration. Evaluation function =================== The evaluation function is very simple : it picks 400 mails at random in the spam database, and then checks if the prediction made by the individual matches the expected Boolean output. The count of well predicted emails is returned as the fitness of the individual (which is so, at most, 400). .. literalinclude:: /../examples/gp/spambase.py :pyobject: evalSpambase Toolbox ======= The toolbox used is very similar to the one presented in the symbolic regression example, but notice that we now use specific STGP operators for crossovers and mutations : .. literalinclude:: /../examples/gp/spambase.py :lines: 88-92 Conclusion ================ Although it does not really differ from the other problems, it is interesting to note how Python can decrease the programming time. Indeed, the spam database is in csv form : with many frameworks, you would have to manually read it, or use a non-standard library, but with Python, you can use the built-in module :mod:`csv` and, within 2 lines, it is done! The data is now in the matrix *spam* and can easily be used through all the program : The complete :example:`gp/spambase` .. _refPapersSpam: Reference ========= Data are from the Machine learning repository, http://www.ics.uci.edu/~mlearn/MLRepository.html ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/examples/gp_symbreg.rst0000644000076500000240000001557114456461441017157 0ustar00runnerstaff.. _symbreg: =============================================== Symbolic Regression Problem: Introduction to GP =============================================== Symbolic regression is one of the best known problems in GP (see :ref:`refPapersSymbreg`). It is commonly used as a tuning problem for new algorithms, but is also widely used with real-life distributions, where other regression methods may not work. It is conceptually a simple problem, and therefore makes a good introductory example for the GP framework in DEAP. All symbolic regression problems use an arbitrary data distribution, and try to fit the data with the most accurate symbolic formula available. Usually, measures like the RMSE (Root Mean Square Error) or MSE (Mean Squared Error) are used to measure an individual's fitness. In this example, we use a classical distribution, the quartic polynomial :math:`(x^4 + x^3 + x^2 + x)`, a one-dimension distribution. *20* equidistant points are generated in the range [-1, 1], and are used to evaluate the fitness. Creating the primitives set =========================== One of the most crucial aspect of a GP program is the choice of the primitives set. They should make good building blocks for the individuals so the evolution can succeed. In this problem, we use a classical set of primitives, which are basic arithmetic functions : .. literalinclude:: /../examples/gp/symbreg.py :lines: 29-43 The redefinition of the division is made to protect it against a zero division error (which would crash the program). The other functions are simply a mapping from the Python :mod:`operator` module. The number following the function is the *arity* of the primitive, that is the number of entries it takes. On the last line, we declare an :class:`~deap.gp.Ephemeral` constant. This is a special terminal type, which does not have a fixed value. When the program appends an ephemeral constant terminal to a tree, the function it contains is executed, and its result is inserted as a constant terminal. In this case, those constant terminals can take the values -1, 0 or 1. The second argument of :class:`~deap.gp.PrimitiveSet` is the number of inputs. Here, as we have only a one dimension regression problem, there is only one input, but it could have as many as you want. By default, those inputs are named "ARGx", where "x" is a number, but you can easily rename them : .. literalinclude:: /../examples/gp/symbreg.py :lines: 44 Creator ======= As any evolutionary program, symbolic regression needs (at least) two object types : an individual containing the genotype and a fitness. We can easily create them with the creator : .. literalinclude:: /../examples/gp/symbreg.py :lines: 46-47 The first line creates the fitness object (this is a minimization problem, so the weight is negative). The `weights` argument must be an iterable of weights, even if there is only one fitness measure. The second line creates the individual object itself. Very straightforward, we can see that it will be based upon a tree, to which we add a fitness. If, for any reason, the user would want to add any other attribute (for instance, a file in which the individual will be saved), it would be as easy as adding this attribute of any type to this line. After this declaration, any individual produced will contain those wanted attributes. Toolbox ======= Now, we want to register some parameters specific to the evolution process. In DEAP, this is done through the toolbox : .. literalinclude:: /../examples/gp/symbreg.py :lines: 49-70 First, a toolbox instance is created (in some problem types like coevolution, you may consider creating more than one toolbox). Then, we can register any parameters. The first lines register how to create an individual (by calling gp.genHalfAndHalf with the previously defined primitive set), and how to create the population (by repeating the individual initialization). We may now introduce the evaluation function, which will receive an individual as input, and return the corresponding fitness. This function uses the `compile` function previously defined to transform the individual into its executable form -- that is, a program. After that, the evaluation is only simple maths, where the difference between the values produced by the evaluated individual and the real values are squared and summed to compute the MSE (Mean Squared Error), which is returned as the fitness of the individual. .. warning:: Even if the fitness only contains one measure, keep in mind that DEAP stores it as an iterable. Knowing that, you can understand why the evaluation function must return a tuple value (even if it is a 1-tuple) : .. literalinclude:: /../examples/gp/symbreg.py :pyobject: evalSymbReg :emphasize-lines: 9 Returning only the value would produce strange behaviors and errors, as the selection and stats functions relies on the fact that the fitness is always an iterable. Afterwards, we register the evaluation function. We also choose the selection method (a tournament of size 3), the mate method (one point crossover with uniform probability over all the nodes), and the mutation method (a uniform probability mutation which may append a new full sub-tree to a node). Then, we decorate the mate and mutate method to limit the height of generated individuals. This is done to avoid an important draw back of genetic programming : bloat. Koza in his book on genetic programming suggest to use a max depth of 17. At this point, any structure with an access to the toolbox instance will also have access to all of those registered parameters. Of course, the user could register other parameters basing on their needs. Statistics ========== Although optional, statistics are often useful in evolutionary programming. DEAP offers a simple class which can handle most of the "boring work". In this case, we want to compute the mean, standard deviation, minimum, and maximum of both the individuals fitness and size. For that we'll use a :class:`~deap.tools.MultiStatistics` object. .. literalinclude:: /../examples/gp/symbreg.py :lines: 78-84 Note that a simple :class:`~deap.tools.Statistics` object can be used, as in previous examples when statistics over a single key are desired. Launching the evolution ======================= At this point, DEAP has all the information needed to begin the evolutionary process, but nothing has been initialized. We can start the evolution by creating the population and then calling a complete algorithm. In this case, we'll use :func:`~deap.algorithms.eaSimple`. .. literalinclude:: /../examples/gp/symbreg.py :lines: 75,76,86-87 The hall of fame is a specific structure which contains the *n* best individuals (here, the best one only). The complete :example:`gp/symbreg`. .. _refPapersSymbreg: Reference ========= *John R. Koza, "Genetic Programming: On the Programming of Computers by Means of Natural Selection", MIT Press, 1992, pages 162-169.* ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/examples/index.rst0000644000076500000240000000202614456461441016117 0ustar00runnerstaffExamples ======== This section contains some documented examples of common toy problems often encountered in the evolutionary computation community. Note that there are several other examples in the ``deap/examples`` sub-directory of the framework. These can be used as ground work for implementing your own flavour of evolutionary algorithms. Genetic Algorithm (GA) ---------------------- .. toctree:: :maxdepth: 1 ga_onemax ga_onemax_short ga_onemax_numpy ga_knapsack coev_coop nsga3 .. _gpexamples: Genetic Programming (GP) ------------------------ .. toctree:: :maxdepth: 1 gp_symbreg gp_parity gp_multiplexer gp_ant gp_spambase Evolution Strategy (ES) ----------------------- .. toctree:: :maxdepth: 1 es_fctmin es_onefifth cmaes bipop_cmaes cmaes_plotting Particle Swarm Optimization (PSO) --------------------------------- .. toctree:: :maxdepth: 1 pso_basic pso_multiswarm Estimation of Distribution Algorithms (EDA) ------------------------------------------- .. toctree:: :maxdepth: 1 eda ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/examples/nsga3.rst0000644000076500000240000001044714456461441016031 0ustar00runnerstaff.. _nsga-3: ====================================================== Non-dominated Sorting Genetic Algorithm III (NSGA-III) ====================================================== The Non-dominated Sorting Genetic Algorithm III (NSGA-III) [Deb2014]_ is implemented in the :func:`deap.tools.selNSGA3` function. This example shows how it can be used in DEAP for many objective optimization. Problem Definition ------------------ First we need to define the problem we want to work on. We will use the first problem tested in the paper, 3 objectives DTLZ2 with ``k = 10`` and ``p = 12``. We will use `pymop `_ for problem implementation as it provides the exact Pareto front that we will use later for computing the performance of the algorithm. .. literalinclude:: /../examples/ga/nsga3.py :start-after: # Problem definition :end-before: ## Algorithm Parameters -------------------- Then we define the various parameters for the algorithm, including the population size set to the first multiple of 4 greater than H, the number of generations and variation probabilities. .. literalinclude:: /../examples/ga/nsga3.py :start-after: # Algorithm parameters :end-before: ## Classes and Tools ----------------- Next, NSGA-III selection requires a reference point set. The reference point set serves to guide the evolution into creating a uniform Pareto front in the objective space. .. literalinclude:: /../examples/ga/nsga3.py :start-after: # Create uniform reference point :lines: 1 The next figure shows an example of reference point set with ``p = 12``. The cross represents the the utopian point (0, 0, 0). .. plot:: code/examples/nsga3_ref_points.py As in any DEAP program, we need to populate the creator with the type of individual we require for our optimization. In this case we will use a basic list genotype and minimization fitness. .. literalinclude:: /../examples/ga/nsga3.py :start-after: # Create classes :end-before: ## Moreover, we need to populate the evolutionary toolbox with initialization, variation and selection operators. Note how we provide the reference point set to the NSGA-III selection scheme. .. literalinclude:: /../examples/ga/nsga3.py :start-after: # Toolbox initialization :end-before: ## Evolution --------- The main part of the evolution is mostly similar to any other DEAP example. The algorithm used is close to the :func:`~deap.algorithms.eaSimple` algorithm as crossover and mutation are applied to every individual (see variation probabilities above). However, the selection is made from the parent and offspring populations instead of completely replacing the parents with the offspring. .. literalinclude:: /../examples/ga/nsga3.py :pyobject: main Finally, we can have a look at the final population .. image:: /_images/nsga3.png :align: center Higher Dimensional Objective Space ---------------------------------- NSGA-III requires a reference point set that depends on the number of objective. This point set can become quite big for even relatively low dimensional objective space. For example, a 15 dimensional objective space with a uniform reference point set with ``p = 4`` would have 3060 points. To avoid this situation and reduce the algorithms runtime [Deb2014]_ suggest to combine reference point set with lower p value. To do this in DEAP, we can combine multiple uniform point set using: .. literalinclude:: ../code/examples/nsga3_ref_points_combined.py :start-after: # reference points :end-before: ## This would give the following reference point set with two underlying uniform distribution: one at full scale, and the other at half scale. .. plot:: code/examples/nsga3_ref_points_combined_plot.py Conclusion ---------- That's it for the NSGA-III algorithm using DEAP, now you can leverage the power of many-objective optimization with DEAP. If you're interested, you can copy the `example `_ change the evaluation function and try applying it to your own problem. .. [Deb2014] Deb, K., & Jain, H. (2014). An Evolutionary Many-Objective Optimization Algorithm Using Reference-Point-Based Nondominated Sorting Approach, Part I: Solving Problems With Box Constraints. IEEE Transactions on Evolutionary Computation, 18(4), 577-601. doi:10.1109/TEVC.2013.2281535. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/examples/pso_basic.rst0000644000076500000240000001135714456461441016761 0ustar00runnerstaff================================== Particle Swarm Optimization Basics ================================== The implementation presented here is the original PSO algorithm as presented in [Poli2007]_. From Wikipedia definition of PSO `PSO optimizes a problem by having a population of candidate solutions, here dubbed particles, and moving these particles around in the search-space according to simple mathematical formulae. The movements of the particles are guided by the best found positions in the search-space which are updated as better positions are found by the particles.` Modules ======= Before writing functions and algorithms, we need to import some module from the standard library and from DEAP. .. literalinclude:: /../examples/pso/basic.py :lines: 16-24 Representation ============== The particle's goal is to maximize the return value of the function at its position. PSO particles are essentially described as positions in a search-space of D dimensions. Each particle also has a vector representing the speed of the particle in each dimension. Finally, each particle keeps a reference to the best state in which it has been so far. This translates in DEAP by the following two lines of code : .. literalinclude:: /../examples/pso/basic.py :lines: 27-29 Here we create two new objects in the :mod:`~deap.creator` space. First, we create a :class:`FitnessMax` object, and we specify the :attr:`~deap.base.Fitness.weights` to be ``(1.0,)``, this means we want to maximise the value of the fitness of our particles. The second object we create represent our particle. We defined it as a :class:`list` to which we add five attributes. The first attribute is the fitness of the particle, the second is the speed of the particle which is also going to be a list, the third and fourth are the limit of the speed value, and the fifth attribute will be a reference to a copy of the best state the particle has been so far. Since the particle has no final state until it has been evaluated, the reference is set to ``None``. The speed limits are also set to ``None`` to allow configuration via the function :func:`generate` presented in the next section. Operators ========= PSO original algorithm uses three operators : initializer, updater and evaluator. The initialization consist in generating a random position and a random speed for a particle. The next function creates a particle and initializes its attributes, except for the attribute :attr:`best`, which will be set only after evaluation : .. literalinclude:: /../examples/pso/basic.py :pyobject: generate The function :func:`updateParticle` first computes the speed, then limits the speed values between ``smin`` and ``smax``, and finally computes the new particle position. .. literalinclude:: /../examples/pso/basic.py :pyobject: updateParticle The operators are registered in the toolbox with their parameters. The particle value at the beginning are in the range ``[-100, 100]`` (:attr:`pmin` and :attr:`pmax`), and the speed is limited in the range ``[-50, 50]`` through all the evolution. The evaluation function :func:`~deap.benchmarks.h1` is from [Knoek2003]_. The function is already defined in the benchmarks module, so we can register it directly. .. literalinclude:: /../examples/pso/basic.py :lines: 51-55 Algorithm ========= Once the operators are registered in the toolbox, we can fire up the algorithm by firstly creating a new population, and then apply the original PSO algorithm. The variable `best` contains the best particle ever found (it is known as gbest in the original algorithm). .. literalinclude:: /../examples/pso/basic.py :pyobject: main Conclusion ========== The full PSO basic example can be found here : :example:`pso/basic`. This is a video of the algorithm in action, plotted with matplotlib_. The red dot represents the best solution found so far. .. _matplotlib: http://matplotlib.org/ .. raw:: html
    References ========== .. [Poli2007] Ricardo Poli, James Kennedy and Tim Blackwell, "Particle swarm optimization an overview". Swarm Intelligence. 2007; 1: 33–57 .. [Knoek2003] Arthur J. Knoek van Soest and L. J. R. Richard Casius, "The merits of a parallel genetic algorithm in solving hard optimization problems". Journal of Biomechanical Engineering. 2003; 125: 141–146 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/examples/pso_multiswarm.rst0000644000076500000240000000602514456461441020100 0ustar00runnerstaffMoving Peaks Benchmark with Multiswarm PSO ========================================== In this example we show how to use the :class:`~deap.benchmarks.movingpeaks.MovingPeaks` benchmark. A popular algorithm on this benchmark is the Multiswarm PSO (MPSO) [Blackwell2004]_ which achieve a great offline error and is able to follow multiple peaks at the same time. Choosing the Scenario --------------------- The moving peak benchmark allows to choose from the 3 original scenarios proposed in the `original studies `_. This is done by retrieving one of the constants defined in the :mod:`~deap.benchmarks.movingpeaks` module. Here we will use Scenario 2. .. literalinclude:: /../examples/pso/multiswarm.py :lines: 37,41 Once the scenario is retrieved, we need to set a few more constants and instantiate the benchmark, here the number of dimensions and the bounds of the problem. .. literalinclude:: /./examples/pso/multiswarm.py :lines: 43-46 For a list of all the variables defined in the ``SENARIO_X`` dictionaries see :class:`~deap.benchmarks.movingpeaks.MovingPeaks` class documentation. Initialization -------------- As in every DEAP example we are required to create the objects. The moving peak benchmark is a max problem, thus we need a maximizing fitness. And, we associate that fitness to a particle as in the :doc:`pso_basic` example. .. literalinclude:: /../examples/pso/multiswarm.py :lines: 48-51 Then, the particle generator is defined. It takes the particle class object :data:`pclass` into which to put the data. Remember that :class:`creator.Particle`, which is gonna be give to this argument in the toolbox, inherits from :class:`list` and can be initialized with an iterable. The position (elements of the list) and the speed (attribute) of the particle is set to randomly generated numbers between the given bounds. .. literalinclude:: /../examples/pso/multiswarm.py :pyobject: generate The next function update the particle position and speed. .. literalinclude:: /../examples/pso/multiswarm.py :pyobject: updateParticle Thereafter, a function "converting" a particle to a quantum particle with different possible distributions is defined. .. literalinclude:: /../examples/pso/multiswarm.py :pyobject: convertQuantum Finally, all the functions are registered in the toolbox for further use in the algorithm. .. literalinclude:: /../examples/pso/multiswarm.py :lines: 97-104 Moving Peaks ------------ The registered evaluation function in the toolbox refers directly to the instance of the :class:`~deap.benchmarks.movingpeaks.MovingPeaks` benchmark object :data:`mpb`. The call to :func:`mpb` evaluates the given individuals as any other evaluation function. Algorithm --------- The algorithm is fully detailed in the file :example:`pso/multiswarm`, it reflects what is described in [Blackwell2004]_. .. [Blackwell2004] Blackwell, T., & Branke, J. (2004). Multi-swarm optimization in dynamic environments. In *Applications of Evolutionary Computing* (pp. 489-500). Springer Berlin Heidelberg.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/index.rst0000644000076500000240000000370214456461441014303 0ustar00runnerstaff.. image:: _static/deap_long.png :width: 300 px :align: right DEAP documentation ================== DEAP is a novel evolutionary computation framework for rapid prototyping and testing of ideas. It seeks to make algorithms explicit and data structures transparent. It works in perfect harmony with parallelisation mechanism such as multiprocessing and `SCOOP `_. The following documentation presents the key concepts and many features to build your own evolutions. .. sidebar:: Getting Help Having trouble? We’d like to help! * Search for information in the archives of the `deap-users mailing list `_, or post a question. * Report bugs with DEAP in our `issue tracker `_. * **First steps:** * :doc:`Overview (Start Here!) ` * :doc:`Installation ` * :doc:`Porting Guide ` * **Basic tutorials:** * :doc:`Part 1: creating types ` * :doc:`Part 2: operators and algorithms ` * :doc:`Part 3: logging statistics ` * :doc:`Part 4: using multiple processors ` * **Advanced tutorials:** * :doc:`tutorials/advanced/gp` * :doc:`tutorials/advanced/checkpoint` * :doc:`tutorials/advanced/constraints` * :doc:`tutorials/advanced/benchmarking` * :doc:`tutorials/advanced/numpy` * :doc:`examples/index` * :doc:`api/index` * :doc:`releases` * :doc:`contributing` * :doc:`about` .. toctree:: :hidden: overview installation porting tutorials/basic/part1 tutorials/basic/part2 tutorials/basic/part3 tutorials/basic/part4 tutorials/advanced/gp tutorials/advanced/checkpoint tutorials/advanced/constraints tutorials/advanced/benchmarking tutorials/advanced/numpy examples/index api/index releases contributing about ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/installation.rst0000644000076500000240000000170214456461441015673 0ustar00runnerstaffInstallation ============ Requirements ------------ DEAP is compatible with Python 2.7 and 3.4 or higher. The computation distribution requires SCOOP_. CMA-ES requires Numpy_, and we recommend matplotlib_ for visualization of results as it is fully compatible with DEAP's API. .. _SCOOP: http://www.pyscoop.org/ .. _Numpy: http://www.numpy.org/ .. _matplotlib: http://www.matplotlib.org/ Install DEAP ------------ We encourage you to use easy_install_ or pip_ to install DEAP on your system. Linux package managers like apt-get, yum, etc. usually provide an outdated version. :: easy_install deap or :: pip install deap If you wish to build from sources, download_ or clone_ the repository and type:: python setup.py install .. _download: https://pypi.python.org/pypi/deap/ .. _clone: https://github.com/DEAP/deap.git .. _easy_install: http://pythonhosted.org/distribute/easy_install.html .. _pip: http://www.pip-installer.org/en/latest/ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/overview.rst0000644000076500000240000000677514456461441015057 0ustar00runnerstaffOverview ======== If you are used to any other evolutionary algorithm framework, you'll notice we do things differently with DEAP. Instead of limiting you with predefined types, we provide ways of creating the appropriate ones. Instead of providing closed initializers, we enable you to customize them as you wish. Instead of suggesting unfit operators, we explicitly ask you to choose them wisely. Instead of implementing many sealed algorithms, we allow you to write the ones that fit all your needs. This tutorial will present a quick overview of what DEAP is all about along with what every DEAP program is made of. Types ----- The first thing to do is to think of the appropriate type for your problem. Then, instead of looking in the list of available types, DEAP enables you to build your own. This is done with the :mod:`~deap.creator` module. Creating an appropriate type might seem overwhelming but the creator makes it very easy. In fact, this is usually done in a single line. For example, the following creates a :class:`FitnessMin` class for a minimization problem and an :class:`Individual` class that is derived from a list with a fitness attribute set to the just created fitness. .. literalinclude:: /code/tutorials/part_1/1_where_to_start.py :lines: 2-4 That's it. More on creating types can be found in the :doc:`tutorials/basic/part1` tutorial. Initialization -------------- Once the types are created you need to fill them with sometimes random values or sometime guessed ones. Again, DEAP provides an easy mechanism to do just that. The :class:`~deap.base.Toolbox` is a container for tools of all sorts including initializers that can do what is needed of them. The following takes on the last lines of code to create the initializers for individuals containing random floating point numbers and for a population that contains them. .. literalinclude:: /code/tutorials/part_1/1_where_to_start.py :lines: 7-16 This creates functions to initialize populations from individuals that are themselves initialized with random float numbers. The functions are registered in the toolbox with their default arguments under the given name. For example, it will be possible to call the function :func:`toolbox.population` to instantly create a population. More initialization methods are found in the :doc:`tutorials/basic/part1` tutorial and the various :doc:`examples/index`. Operators --------- Operators are just like initializers, except that some are already implemented in the :mod:`~deap.tools` module. Once you've chosen the perfect ones, simply register them in the toolbox. In addition you must create your evaluation function. This is how it is done in DEAP. .. literalinclude:: /code/tutorials/part_1/1_where_to_start.py :lines: 19-25 The registered functions are renamed by the toolbox, allowing generic algorithms that do not depend on operator names. Note also that fitness values must be iterable, that is why we return a tuple in the evaluate function. More on this in the :doc:`tutorials/basic/part2` tutorial and :doc:`examples/index`. Algorithms ---------- Now that everything is ready, we can start to write our own algorithm. It is usually done in a main function. For the purpose of completeness we will develop the complete generational algorithm. .. literalinclude:: /code/tutorials/part_1/1_where_to_start.py :lines: 28-64 It is also possible to use one of the four algorithms readily available in the :mod:`~deap.algorithms` module, or build from some building blocks called variations also available in this module. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/pip_req.txt0000644000076500000240000000004014456461441014632 0ustar00runnerstaffdeap numpy>=1.5 matplotlib>=1.0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/porting.rst0000644000076500000240000000657514456461441014671 0ustar00runnerstaff============= Porting Guide ============= DEAP development's high velocity and our refusal to be at the mercy of backward compatibility can sometime induce minor headaches to our users. This concise guide should help you port your code from the latest version minus 0.1 to the current version. General ======= #. The algorithms from the :mod:`~deap.gp.algorithms` module now return a tuple of 2-elements : the population and a :class:`~deap.tools.Logbook`. #. Replace every call to DTM by calls to `SCOOP `_. #. Statistics and logging of data are accomplished by two distinct objects: :class:`~deap.tools.Statistics` and :class:`~deap.tools.Logbook`. Read the tutorial on :doc:`logging statistics `. #. Replace :class:`~deap.tools.EvolutionLogger` by :class:`~deap.tools.Logbook`. #. Replace usage of :func:`tools.mean`, :func:`tools.var`, :func:`tools.std`, and :func:`tools.median` by their Numpy equivalent. #. If the fitness has multiple objectives, add the keyword argument ``axis=0`` when registering statistical function. Genetic Algorithms (GA) ======================= #. Replace every call to the function :func:`~deap.tools.cxTwoPoints` by a call to :func:`~deap.tools.cxTwoPoint`. #. Remove any import of cTools. If you need a faster implementation of the non-dominated sort, use :func:`~deap.tools.sortLogNondominated`. #. When inheriting from Numpy, you must manually copy the slices and compare individuals with numpy comparators. See the :doc:`tutorials/advanced/numpy` tutorial. Genetic Programming (GP) ======================== #. Specify a ``name`` as the first argument of every call to :func:`~deap.gp.PrimitiveSet.addEphemeralConstant`. #. Replace every call to :func:`~deap.gp.lambdify` and :func:`~deap.gp.evaluate` by a call to :func:`~deap.gp.compile`. #. Remove the pset attribute from every :func:`~deap.creator.create` call when creating a primitive tree class. #. In the toolbox, register the primitive set as the ``pset`` argument of the following mutation operator: :func:`~deap.gp.mutUniform`, :func:`~deap.gp.mutNodeReplacement` and :func:`~deap.gp.mutInsert`. #. Replace every call to the function :func:`~deap.gp.genRamped` by a call to :func:`~deap.gp.genHalfAndHalf`. #. Replace every call to :func:`~deap.gp.stringify` by a call to :func:`str` or remove the call completely. #. Replace every call to :func:`~deap.gp.lambdifyADF` by a call to :func:`~deap.gp.compileADF`. #. Replace the decorators :func:`~deap.gp.staticDepthLimit` and :func:`~deap.gp.staticSizeLimit` by :func:`~deap.gp.staticLimit`. To specify a limit on either depth, size or any other attribute, it is now required to specify a `key` function. See :func:`~deap.gp.staticLimit` documentation for more information. Strongly Typed Genetic Programming (STGP) ----------------------------------------- #. :class:`~deap.gp.PrimitiveSetTyped` method now requires type arguments to be defined as classes instead of string, for example ``float`` instead of ``"float"``. Evolution Strategy (ES) ======================= #. Replace every call to the function :func:`~deap.tools.cxESTwoPoints` by a call to :func:`~deap.tools.cxESTwoPoint`. Still having problem? ===================== We have overlooked something and your code is still not working? No problem, contact us on the deap users list at ``_ and we will get you out of trouble in no time.././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/releases.rst0000644000076500000240000001263614456461441015005 0ustar00runnerstaff================== Release Highlights ================== Here is the list of changes made to DEAP for the current release. API enhancements ++++++++++++++++ - algorithms: Every algorithm now return the final population and a logbook containing the evolution statistical data. - base: Fitness objects are now hashable. - base: Added a ``dominates`` function to the Fitness, which can be replaced. This function is now used in most multi-objective specific selection methods instead of ``isDominated``. - base: Fitness - implementation of a ``__repr__`` method. (issue 20) - examples: Removed prefix (ga, gp, pso, etc.) from examples filename. - gp: Added ``pset`` to mutation operators that require it. - gp: Replaced the :func:`~deap.gp.stringify` function by :func:`PrimitiveTree.__str__`. Use ``str`` or ``print`` on trees to read their code. - gp: Added an explicit description of the error when there are no available primitive/terminal of a certain type. - gp: Added symbolic regression benchmarks in ``benchmarks.gp``. - gp: Removed the ephemeral generator. - gp: Added a :func:`~deap.gp.PrimitiveTree.from_string` function to :class:`~deap.gp.PrimitiveTree`. - gp: Added the possibility to name primitives added to a PrimitiveSet in :func:`~deap.gp.PrimitiveSet.addPrimitive`. - gp: Added object oriented inheritance to strongly typed genetic programming. - gp: :class:`~deap.gp.PrimitiveSetTyped` now requires real classes as type instead of string. See the :doc:`Spambase example `. - gp: Replaced :func:`~deap.gp.evaluate` and :func:`~deap.gp.lambdify` by a single function :func:`~deap.gp.compile`. - gp: Replaced :func:`~deap.gp.lambdifyADF` by :func:`~deap.gp.compileADF`. - gp: New :func:`~deap.gp.graph` function that returns a list of nodes, edges and a labels dictionary that can then be fed directly to networkx to draw the tree. - gp: Renamed :func:`deap.gp.genRamped` as :func:`deap.gp.genHalfAndHalf`. - gp: Merged :func:`~deap.gp.staticDepthLimit` and :func:`~deap.gp.staticSizeLimit` in a single function :func:`~deap.gp.staticLimit` which takes a key function in argument than can be return the height, the size or whatever attribute the tree should be limited on. - tools: Revised the :class:`~deap.tools.HallOfFame` to include only unique individuals. - tools: Changed the way statistics are computed. See the :class:`~deap.tools.Statistics` and :class:`~deap.tools.MultiStatistics` documentation for more details and the tutorial :doc:`logging statistics ` (issue 19). - tools: Replaced the :class:`EvolutionLogger` by :class:`~deap.tools.Logbook`. - tools: Removed :class:`~deap.tools.Checkpoint` class since it was more trivial to do simple checkpointing than using the class. The documentation now includes an example on how to do checkpointing without Checkpoint. - tools: Reorganize the operators in submodule, tools now being a package. - tools: Implementation of the logarithmic non-dominated sort by Fortin et al. (2013), available under the name :func:`~deap.tools.sortLogNondominated`. - tools: Mutation operators can now take either a value or a sequence of values as long as the individual as parameters (low, up, sigma, etc.). - tools: Removed DTM from the sources. - tools: Removed the cTools module. It was not properly maintained and rarely used. - tools: Renamed :func:`~deap.tools.cxTwoPoints` as :func:`~deap.tools.cxTwoPoint` - tools: Renamed :func:`~deap.tools.cxESTwoPoints` as :func:`~deap.tools.cxESTwoPoint` - tools: Bounds as well as some other attribute related parameters now accept iterables or values as argument in crossovers and mutations. Documentation enhancements ++++++++++++++++++++++++++ - Major overhaul of the documentation structure. - Tutorial are now decomposed in two categories: basic and advanced. - New tutorial on :doc:`logging statistics ` - New tutorial on :doc:`checkpointing ` - New tutorial on :doc:`inheriting from Numpy ` Bug fixes +++++++++ **Release 1.0.0** - creator: Issue 23: error in creator when using unicode source. - creator: create does not handle proper slicing of created classes inheriting from ``numpy.ndarray`` anymore. This was bug prone and extremely hard to maintain. Users are now requested to include ``numpy.copy()`` operation in their operators. A tutorial on inheriting from numpy is on its way. **Release 1.0.1**: - tools: issue #26: Operators with bounds do not work correctly when bounds are provided as list instead of iterator. rev: `b172432515af`, `9d4718a8cf2a`. - tools: add missing arguments to sortLogNondominated (`k`, `first_front_only`). rev: `f60a6520b666`, `4de7df29dd0f`. - gp: issue #32: :meth:`~deap.gp.PrimitiveTree.from_string` used incorrect argument order with STGP. rev: `58c1a0711e1f`. **Release 1.0.2**: - benchmarks: fix computation of DTLZ2, DTLZ3 and DTLZ4. - cma 1+Lambda: fix the computation of the rank-one update. - gp: replace the generate functions default value for the argument `type_` from `object` to `None`. This removes the obligation to define the type_ argument for the individual generation function when doing STGP. - gp: fix a bug with OOGP when calling addPrimitive and addTerminal in arbitrary order. - gp: fix Ephemeral regeneration with mutEphemeral. rev: `ae46705`. - gp: fix issue #35 - from_string had issues with OOGP. - Fix issue #26: in four examples, files are opened but never closed. ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1689936700.615518 deap-1.4.1/doc/tutorials/0000755000076500000240000000000014456461475014475 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.6936178 deap-1.4.1/doc/tutorials/advanced/0000755000076500000240000000000014456461475016242 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/tutorials/advanced/benchmarking.rst0000644000076500000240000000556614456461441021431 0ustar00runnerstaffBenchmarking Against the Bests (BBOB) ===================================== Once you've created your own algorithm, the structure of DEAP allows you to benchmark it against the best algorithms very easily. The interface of the `Black-Box Optimization Benchmark `_ (BBOB) is compatible with the toolbox. In fact, once your new algorithm is encapsulated in a main function, there is almost nothing else to do on DEAP's side. This tutorial will review the essential steps to bring everything to work with the very basic :ref:`one-fifth`. Preparing the Algorithm ----------------------- The BBOB makes use of many continuous functions on which the algorithm will be tested. These functions are given as an argument to the algorithm. The toolbox shall thus register the evaluation in the main function. The evaluation functions provided by BBOB return a fitness as a single value. The first step is to put each fitness in its own tuple, as required by DEAP's philosophy on single objective optimization. We will use a decorator for this. .. literalinclude:: ../../../examples/bbob.py :pyobject: tupleize The algorithm is encapsulated in a main function that receives four arguments: the evaluation function, the dimensionality of the problem, the maximum number of evaluations and the target value to reach. As stated earlier, the toolbox is initialized in the main function with the :func:`update` function (described in the example) and the evaluation function received, which is decorated by our tuple-izer. Then, the target fitness value is encapsulated in a :class:`FitnessMin` object so that we can easily compare the individuals with it. The last step is to define the algorithm, which is explained in the :ref:`one-fifth` example. .. literalinclude:: ../../../examples/bbob.py :pyobject: main Running the Benchmark --------------------- Now that the algorithm is ready, it is time to run it under the BBOB. The following code is taken from the BBOB example with added comments. The :mod:`fgeneric` module provides a :class:`LoggingFunction`, which take care of outputting all necessary data to compare the tested algorithm with the other ones published and to be published. This logger contains the current problem instance and provides the problem target. Since it is responsible of logging each evaluation function call, it is not even needed to save the best individual found by our algorithm (call to the :func:`main` function). The single line that is related to the provided algorithm in the call to the :func:`main` function. .. literalinclude:: ../../../examples/bbob.py :lines: 26,27,28,90-137 Once these experiments are done, the data contained in the :file:`ouput` directory can be used to build the results document. See the `BBOB `_ web site on how to build the document. The complete example is available in the file :example:`bbob`. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/tutorials/advanced/checkpoint.rst0000644000076500000240000000647514456461441021130 0ustar00runnerstaff============= Checkpointing ============= In this tutorial, we will present how persistence can be achieved in your evolutions. The only required tools are a simple :class:`dict` and a serialization method. Important data will be inserted in the dictionary and serialized to a file so that if something goes wrong, the evolution can be restored from the last saved checkpoint. It can also serve to continue an evolution beyond the pre-fixed termination criterion. Checkpointing is not offered in standard algorithms such as eaSimple, eaMuPlus/CommaLambda and eaGenerateUpdate. You must create your own algorithm (or copy an existing one) and introduce this feature yourself. Starting with a very basic example, we will cover the necessary stuff to checkpoint everything needed to restore an evolution. We skip the class definition and registration of tools in the toolbox to go directly to the algorithm and the main function. Our main function receives an optional string argument containing the path of the checkpoint file to restore. :: import pickle def main(checkpoint=None): if checkpoint: # A file name has been given, then load the data from the file with open(checkpoint, "rb") as cp_file: cp = pickle.load(cp_file) population = cp["population"] start_gen = cp["generation"] halloffame = cp["halloffame"] logbook = cp["logbook"] random.setstate(cp["rndstate"]) else: # Start a new evolution population = toolbox.population(n=300) start_gen = 0 halloffame = tools.HallOfFame(maxsize=1) logbook = tools.Logbook() stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("max", numpy.max) for gen in range(start_gen, NGEN): population = algorithms.varAnd(population, toolbox, cxpb=CXPB, mutpb=MUTPB) # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in population if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit halloffame.update(population) record = stats.compile(population) logbook.record(gen=gen, evals=len(invalid_ind), **record) population = toolbox.select(population, k=len(population)) if gen % FREQ == 0: # Fill the dictionary using the dict(key=value[, ...]) constructor cp = dict(population=population, generation=gen, halloffame=halloffame, logbook=logbook, rndstate=random.getstate()) with open("checkpoint_name.pkl", "wb") as cp_file: pickle.dump(cp, cp_file) Now, the whole data will be written in a pickled dictionary every *FREQ* generations. Loading the checkpoint is done if the main function is given a path to a checkpoint file. In that case, the evolution continues from where it was in the last checkpoint. It will produce the exact same results as if it was not stopped and reloaded because we also restored the random module state. If you use numpy's random numbers, don't forget to save and reload their state too. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/tutorials/advanced/constraints.rst0000644000076500000240000000655314456461441021345 0ustar00runnerstaffConstraint Handling =================== Evolutionary algorithms are usually unconstrained optimization procedures. In this tutorial, we present several ways of adding different types of constraints to your evolutions. This tutorial is based on the paper by Coello Coello [CoelloCoello2002]_. Penalty Function ----------------- Penalty functions are the most basic way of handling constrains for individuals that cannot be evaluated or are forbidden for problem specific reasons, when falling in a given region. The penalty function gives a fitness disadvantage to these individuals based on the amount of constraint violation in the solution. For example, instead of evaluating an individual violating a constraint, one can assign a desired value to its fitness. The assigned value can be constant or increasing (decreasing for maximization) as the distance to a valid solution increases. The following figure shows the fitness function :math:`g(x)` (in green) and the penalty function :math:`h(x)` (in red) of a one attribute individual, subject to the constraint :math:`3 < x < 7`. The continuous line represent the fitness that is actually assigned to the individual :math:`f(x) = \left\lbrace \begin{array}{cl}g(x) &\mathrm{if}~3 < x < 7\\h(x)&\mathrm{otherwise}\end{array} \right.`. .. image:: /_images/constraints.png :width: 75% :align: center The figure on the left uses a constant offset :math:`h(x) = \Delta` when a constraint is not respected. The center plot uses the euclidean distance in addition to the offset to create a bowl like fitness function :math:`h(x) = \Delta + \sqrt{(x-x_0)^2}`. Finally, the right plot uses a quadratic distance function to increase the attraction of the bowl :math:`h(x) = \Delta + (x-x_0)^2`, where :math:`x_0` is the approximate center of the valid zone. In DEAP, a penalty function can be added to any evaluation function using the :class:`~deap.tools.DeltaPenalty` decorator provided in the :mod:`~deap.tools` module. :: from math import sin from deap import base from deap import tools def evalFct(individual): """Evaluation function for the individual.""" x = individual[0] return (x - 5)**2 * sin(x) * (x/3), def feasible(individual): """Feasibility function for the individual. Returns True if feasible False otherwise.""" if 3 < individual[0] < 7: return True return False def distance(individual): """A distance function to the feasibility region.""" return (individual[0] - 5.0)**2 toolbox = base.Toolbox() toolbox.register("evaluate", evalFct) toolbox.decorate("evaluate", tools.DeltaPenalty(feasible, 7.0, distance)) The penalty decorator takes 2 mandatory arguments and an optional one. The first argument is a function returning the validity of an individual according to user defined constraints. The second argument is a constant value (:math:`\Delta`) returned when an individual is not valid. The optional argument is a distance function between an invalid individual and the valid region. This last argument takes on the default value of 0. The last example shows how the right plot of the top image was obtained. References ---------- .. [CoelloCoello2002] Coelle Coello, C. A. *Theoretical and numerical constraint-handling techniques used with evolutionary algorithms: a survey of the state of the art*. Computer Methods in Applied Mechanics and Engineering 191, 1245–1287, 2002. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/tutorials/advanced/gp.rst0000644000076500000240000002742514456461441017405 0ustar00runnerstaff.. _genprogtut: Genetic Programming =================== To see a given function, one can simply print out an individual candidate solution to see the string representation. However, it is possible to generate a figure. Genetic programming is a special field of evolutionary computation that aims at building programs automatically to solve problems independently of their domain. Although there exist diverse representations used to evolve programs, the most common is the syntax tree. .. image:: /_images/gptree.png :align: center For example, the above figure presents the program :math:`\max(x + 3 * y, x + x)`. For this tree and further examples, the leaves of the tree, in green, are called terminals, while the internal nodes, in red, are called primitives. The terminals are divided in two subtypes: the constants and the arguments. The constants remain the same for the entire evolution while the arguments are the program inputs. For the last presented tree, the arguments are the variables :math:`x` and :math:`y`, and the constant is the number :math:`3`. In DEAP, user defined primitives and terminals are contained in a primitive set. For now, two kinds of primitive set exists: the loosely and the strongly typed. Loosely Typed GP ---------------- Loosely typed GP does not enforce a specific type between the nodes. More specifically, primitives' arguments can be any primitives or terminals present in the primitive set. The following code define a loosely typed :class:`~deap.gp.PrimitiveSet` for the previous tree :: pset = PrimitiveSet("main", 2) pset.addPrimitive(max, 2) pset.addPrimitive(operator.add, 2) pset.addPrimitive(operator.mul, 2) pset.addTerminal(3) The first line creates a primitive set. Its arguments are the name of the procedure it will generate (``"main"``) and its number of inputs, 2. The next three lines add functions as primitives. The first argument is the function to add and the second argument the function arity_. The last line adds a constant terminal. Currently, the default names for the arguments are ``"ARG0"`` and ``"ARG1"``. To change it to ``"x"`` and ``"y"``, simply call :: pset.renameArguments(ARG0="x") pset.renameArguments(ARG1="y") .. _arity: http://en.wikipedia.org/wiki/Arity In this case, all functions take two arguments. Having a 1 argument negation function, for example, could be done with :: pset.addPrimitive(operator.neg, 1) Our primitive set is now ready to generate some trees. The :mod:`~deap.gp` module contains three prefix expression generation functions :func:`~deap.gp.genFull`, :func:`~deap.gp.genGrow`, and :func:`~deap.gp.genHalfAndHalf`. Their first argument is a primitive set. They return a valid prefix expression in the form of a list of primitives. The content of this list can be read by the :class:`~deap.gp.PrimitiveTree` class to create a prefix tree. :: expr = genFull(pset, min_=1, max_=3) tree = PrimitiveTree(expr) The last code produces a valid full tree with height randomly chosen between 1 and 3. Strongly Typed GP ----------------- In strongly typed GP, every primitive and terminal is assigned a specific type. The output type of a primitive must match the input type of another one for them to be connected. For example, if a primitive returns a boolean, it is guaranteed that this value will not be multiplied with a float if the multiplication operator operates only on floats. :: def if_then_else(input, output1, output2): return output1 if input else output2 pset = PrimitiveSetTyped("main", [bool, float], float) pset.addPrimitive(operator.xor, [bool, bool], bool) pset.addPrimitive(operator.mul, [float, float], float) pset.addPrimitive(if_then_else, [bool, float, float], float) pset.addTerminal(3.0, float) pset.addTerminal(1, bool) pset.renameArguments(ARG0="x") pset.renameArguments(ARG1="y") In the last code sample, we first define an *if then else* function that returns the second argument if the first argument is true and the third one otherwise. Then, we define our :class:`~deap.gp.PrimitiveSetTyped`. Again, the procedure is named ``"main"``. The second argument defines the input types of the program. Here, ``"x"`` is a :class:`bool` and ``"y"`` is a :class:`float`. The third argument defines the output type of the program as a :class:`float`. Adding primitives to this primitive now requires to set the input and output types of the primitives and terminal. For example, we define our ``"if_then_else"`` function first argument as a boolean, the second and third argument have to be floats. The function is defined as returning a float. We now understand that the multiplication primitive can only have the terminal ``3.0``, the ``if_then_else`` function or the ``"y"`` as input, which are the only floats defined. The previous code can produce the tree on the left but not the one on the right because the type restrictions. .. image:: /_images/gptypedtrees.png :align: center .. note:: The generation of trees is done randomly while making sure type constraints are respected. If any primitive has an input type that no primitive and terminal can provide, chances are that this primitive will be picked and placed in the tree, resulting in the impossibility to complete the tree within the limit fixed by the generator. For example, when generating a full tree of height 2, suppose ``"op"`` takes a boolean and a float, ``"and"`` takes 2 boolean and ``"neg"`` takes a float, no terminal is defined and the arguments are booleans. The following situation will occur where no terminal can be placed to complete the tree. | .. image:: /_images/gptypederrtree.png :align: center In this case, DEAP raises an :class:`IndexError` with the message ``"The gp.generate function tried to add a terminal of type float, but there is none available."`` Ephemeral Constants ------------------- An ephemeral constant is a terminal encapsulating a value that is generated from a given function at run time. Ephemeral constants allow to have terminals that don't have all the same values. For example, to create an ephemeral constant that takes its value in :math:`[-1, 1)` we use :: pset.addEphemeralConstant(lambda: random.uniform(-1, 1)) The ephemeral constant value is determined when it is inserted in the tree and never changes unless it is replaced by another ephemeral constant. Since it is a terminal, ephemeral constant can also be typed. :: pset.addEphemeralConstant(lambda: random.randint(-10, 10), int) Generation of Tree Individuals ------------------------------ The code presented in the last two sections produces valid trees. However, as in the :ref:`next-step` tutorial, these trees are not yet valid individuals for evolution. One must combine the creator and the toolbox to produce valid individuals. We need to create the :class:`Fitness` and the :class:`Individual` classes. We add a reference to the primitive set to the :class:`Individual` in addition to the fitness. This is used by some of the gp operators to modify the individuals. :: creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", gp.PrimitiveTree, fitness=creator.FitnessMin, pset=pset) We then register the generation functions into a :class:`~deap.base.Toolbox`. :: toolbox = base.Toolbox() toolbox.register("expr", gp.genFull, pset=pset, min_=1, max_=3) toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.expr) Calling :func:`toolbox.individual` readily returns an individual of type :class:`~deap.gp.PrimitiveTree`. Evaluation of Trees ------------------- In DEAP, trees can be translated to readable Python code and compiled to Python code objects using functions provided by the :py:mod:`~deap.gp` module. The first function, :func:`str` takes an expression or a PrimitiveTree and translates it into readable Python code. For example, the following lines generate a tree and output the code from the first example primitive set. :: >>> expr = genFull(pset, min_=1, max_=3) >>> tree = PrimitiveTree(expr) >>> str(tree) 'mul(add(x, x), max(y, x))' Now, this string represents the program we just generated, but it cannot yet be executed. To make it executable, we have to compile the expression into a Python code object. Since this function has two inputs, we wish to compile the code into a callable object. This is possible with :func:`~deap.gp.compile`. The function takes two arguments: the expression to compile and the associated primitive set. The following example compiles the previous tree and evaluates the resulting function for :math:`x=1` and :math:`y=2`. :: >>> function = compile(tree, pset) >>> function(1, 2) 4 When the generated program has no input argument, the expression can be compiled to byte code using the same :func:`~deap.gp.compile` function. An example of this sort of problem is the :ref:`artificial-ant`. Tree Size Limit and Bloat Control --------------------------------- Since DEAP uses the Python parser to compile the code represented by the trees, it inherits from its limitations. The most commonly encountered restriction is the parsing stack limit. The Python interpreter parser stack limit is usually fixed between 92 and 99. This means that an expression can at most be composed of 91 succeeding primitives. In other words, a tree can have a maximum depth of 91. When the limit is exceeded, Python raises the following error :: s_push: parser stack overflow Traceback (most recent call last): [...] MemoryError Since this limit is hard-coded in the interpreter, there exists no easy way to increase it. Furthermore, this error commonly stems from a phenomena known in GP as bloat. That is, the produced individuals have reached a point where they contain too much primitives to effectively solve the problem. This problem leads to evolution stagnation. To counteract this, DEAP provides different functions that can effectively restrain the size and height of the trees under an acceptable limit. These operators are listed in the GP section of :ref:`operators`. Plotting Trees -------------- The function :func:`deap.gp.graph` returns the necessary elements to plot tree graphs using `NetworX `_ or `pygraphviz `_. The graph function takes a valid :class:`~deap.gp.PrimitiveTree` object and returns a node list, an edge list and a dictionary associating a label to each node. It can be used like following with pygraphviz. :: from deap import base, creator, gp pset = gp.PrimitiveSet("MAIN", 1) pset.addPrimitive(operator.add, 2) pset.addPrimitive(operator.sub, 2) pset.addPrimitive(operator.mul, 2) pset.renameArguments(ARG0='x') creator.create("Individual", gp.PrimitiveTree) toolbox = base.Toolbox() toolbox.register("expr", gp.genHalfAndHalf, pset=pset, min_=1, max_=2) toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.expr) expr = toolbox.individual() nodes, edges, labels = gp.graph(expr) ### Graphviz Section ### import pygraphviz as pgv g = pgv.AGraph() g.add_nodes_from(nodes) g.add_edges_from(edges) g.layout(prog="dot") for i in nodes: n = g.get_node(i) n.attr["label"] = labels[i] g.draw("tree.pdf") Using NetworkX, the last section becomes: :: import matplotlib.pyplot as plt import networkx as nx g = nx.Graph() g.add_nodes_from(nodes) g.add_edges_from(edges) pos = nx.graphviz_layout(g, prog="dot") nx.draw_networkx_nodes(g, pos) nx.draw_networkx_edges(g, pos) nx.draw_networkx_labels(g, pos, labels) plt.show() Depending on the version of graphviz, the nodes may appear in an unpredictable order. Two plots of the same tree may have sibling nodes swapped. This does not affect the primitive tree representation nor the numerical results. How to Evolve Programs ---------------------- The different ways to evolve program trees are presented through the :ref:`gpexamples` examples. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/tutorials/advanced/numpy.rst0000644000076500000240000001007114456461441020134 0ustar00runnerstaff===================== Inheriting from Numpy ===================== DEAP's :class:`~deap.creator` allows to inherit from :class:`numpy.ndarray` so that individuals can have the properties of the powerful `Numpy `_ library. As with any other base class, inheriting from a :class:`numpy.ndarray` is no more complicated than putting it as a base class. :: import numpy from deap import base, creator creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", numpy.ndarray, fitness=creator.FitnessMax) What You Should be Concerned With! ================================== Inheriting from :class:`numpy.ndarray` is an appealing feature, but some care must be taken regarding validity of the data and performance of the system. Copy and Slicing ---------------- Slicing a :class:`numpy.ndarray` should be done with care. The returned element is a :func:`numpy.ndarray.view` of the original object. This leads to bug prone code when swapping data from one array to another. For example, the two points crossover use the following for swapping data between two lists. :: >>> a = [1,2,3,4] >>> b = [5,6,7,8] >>> a[1:3], b[1:3] = b[1:3], a[1:3] >>> print(a) [1, 6, 7, 4] >>> print(b) [5, 2, 3, 8] With :class:`numpy.array`, the same operation leads to a single resulting individual being changed. :: >>> import numpy >>> a = numpy.array([1,2,3,4]) >>> b = numpy.array([5,6,7,8]) >>> a[1:3], b[1:3] = b[1:3], a[1:3] >>> print(a) [1 6 7 4] >>> print(b) [5 6 7 8] The problem is that, first, the elements in ``a`` are replaced by the elements of the view returned by ``b`` and the element of ``b`` are replaced by the element in the view of ``a`` which are now the one initially in ``b`` leading to the wrong final result. One way of to circumvent this problem is to explicitly copy the view returned by the ``__getitem__``. :: >>> import numpy >>> a = numpy.array([1,2,3,4]) >>> b = numpy.array([5,6,7,8]) >>> a[1:3], b[1:3] = b[1:3].copy(), a[1:3].copy() >>> print(a) [1 6 7 4] >>> print(b) [5 2 3 8] Thus, care must be taken when inheriting from :class:`numpy.ndarray`; **none** of the operators in the :mod:`~deap.tools` module implement such copying. See the One Max with Numpy example for the complete two points crossover. Comparing Individuals --------------------- When one wants to use a :class:`~deap.tools.HallOfFame` or :class:`~deap.tools.ParetoFront` hall-of-fame. The *similar* function should be changed to a compare all function. Using the regular :func:`operator.eq` function will result in a vector of comparisons :: >>> a = numpy.array([1, 2, 3]) >>> b = numpy.array([1, 2, 3]) >>> operator.eq(a, b) array([ True, True, True], dtype=bool) This cannot be used as a condition :: >>> if operator.eq(a, b): ... print "Gosh!" ... Traceback (most recent call last): File "", line 1, in ValueError: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all() One must replace the *similar* operator by a numpy function like :func:`numpy.array_equal` or :func:`numpy.allclose`. :: hof = tools.HallOfFame(1, similar=numpy.array_equal) Now the condition can be computed and the hall-of-fame will be happy. :: >>> if numpy.array_equal(a, b): ... print "Yeah!" "Yeah!" Performance ----------- If your intent is performance, `DEAP Speed `_ reveals that using an :class:`array.array` should be preferred to :class:`numpy.ndarray`. This is mostly because the creation (also required by the deep copy) of new arrays is longer for the :class:`numpy.array` than for :class:`array.array`. What You Don't Need to Know =========================== The creator replaces systematically several functions of the basic :class:`numpy.ndarray` so that - array instances can be created from an iterable; - it deep copies the attributes added in the ``__dict__`` of the object; - pickling includes the dictionary of attributes. See the implementation of :class:`~deap.creator._numpy_array` in the :mod:`~deap.creator` module for more details. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.6953928 deap-1.4.1/doc/tutorials/basic/0000755000076500000240000000000014456461475015556 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/tutorials/basic/part1.rst0000644000076500000240000003156214456461441017337 0ustar00runnerstaff.. _creating-types: Creating Types ============== This tutorial shows how types are created using the creator and initialized using the toolbox. Fitness ------- The provided :class:`~deap.base.Fitness` class is an abstract class that needs a :attr:`~deap.base.Fitness.weights` attribute in order to be functional. A minimizing fitness is built using negatives weights, while a maximizing fitness has positive weights. For example, the following line creates, in the :mod:`~deap.creator`, a ready to use single objective minimizing fitness named :class:`FitnessMin`. .. literalinclude:: /code/tutorials/part_2/2_1_fitness.py :lines: 6 The :func:`~deap.creator.create` function takes at least two arguments, a name for the newly created class and a base class. Any subsequent argument becomes an attribute of the class. As specified in the :class:`~deap.base.Fitness` documentation, the :attr:`~deap.base.Fitness.weights` attribute must be a tuple so that multi-objective and single objective fitnesses can be treated the same way. A :class:`FitnessMulti` would be created the same way but using: .. literalinclude:: /code/tutorials/part_2/2_1_fitness.py :lines: 9 This code produces a fitness that minimizes the first objective and maximize the second one. The weights can also be used to vary the importance of each objective one against another. This means that the weights can be any real number and only the sign is used to determine if a maximization or minimization is done. An example of where the weights can be useful is in the crowding distance sort made in the NSGA-II selection algorithm. Individual ---------- Simply by thinking about the different flavors of evolutionary algorithms (GA, GP, ES, PSO, DE, ...), we notice that an extremely large variety of individuals are possible, reinforcing the assumption that all types cannot be made available by developers. Here is a guide on how to create some of those individuals using the :mod:`~deap.creator` and initializing them using a :class:`~deap.base.Toolbox`. .. warning:: Before inheriting from :class:`numpy.ndarray` you should **absolutely** read the :doc:`/tutorials/advanced/numpy` tutorial and have a look at the :doc:`/examples/ga_onemax_numpy` example! .. _list-of-floats: List of Floats ++++++++++++++ The first individual created will be a simple list containing floats. In order to produce this kind of individual, we need to create an :class:`Individual` class, using the creator, that will inherit from the standard :class:`list` type and have a :attr:`fitness` attribute. .. Then, we will initialize this list using .. the :func:`~deap.tools.initRepeat` helper function that will repeat ``n`` times .. the float generator that has been registered under the :func:`attr_float` alias .. of the toolbox. Note that the :func:`attr_float` is a direct reference to the .. :func:`~random.random` function. .. literalinclude:: /code/tutorials/part_2/2_2_1_list_of_floats.py :lines: 2,5-18 The newly introduced :meth:`~deap.base.Toolbox.register` method takes at least two arguments; an alias and a function assigned to this alias. Any subsequent argument is passed to the function when called (à la :func:`functools.partial`). Thus, the preceding code creates two aliases in the toolbox; ``attr_float`` and ``individual``. The first one redirects to the :func:`random.random` function. The second one is a shortcut to the :func:`~deap.tools.initRepeat` function, fixing its :data:`container` argument to the :class:`creator.Individual` class, its :data:`func` argument to the :func:`toolbox.attr_float` function, and its number of repetitions argument to ``IND_SIZE``. Now, calling :func:`toolbox.individual` will call :func:`~deap.tools.initRepeat` with the fixed arguments and return a complete :class:`creator.Individual` composed of ``IND_SIZE`` floating point numbers with a maximizing single objective :attr:`fitness` attribute. Variations of this type are possible by inheriting from :class:`array.array` or :class:`numpy.ndarray` as following. .. literalinclude:: /code/tutorials/part_2/2_2_1_list_of_floats.py :lines: 20,21 Type inheriting from arrays needs a *typecode* on initialization, just as the original class. .. _permutation: Permutation +++++++++++ An individual for the permutation representation is almost similar to the general list individual. In fact they both inherit from the basic :class:`list` type. The only difference is that instead of filling the list with a series of floats, we need to generate a random permutation and provide that permutation to the individual. .. First, the individual class is created the exact same way as the .. previous one. Then, an :func:`indices` function is added to the toolbox .. referring to the :func:`~random.sample` function. Sample is used instead of .. :func:`~random.shuffle` because the latter does not return the shuffled list. .. The indices function returns a complete permutation of the numbers between ``0`` .. and ``IND_SIZE - 1``. Finally, the individual is initialized with the .. :func:`~deap.tools.initIterate` function which gives to the individual an .. iterable of what is produced by the call to the indices function. .. literalinclude:: /code/tutorials/part_2/2_2_2_permutation.py :lines: 2- The first registered function ``indices`` redirects to the :func:`random.sample` function with its arguments fixed to sample ``IND_SIZE`` numbers from the given range. The second registered function ``individual`` is a shortcut to the :func:`~deap.tools.initIterate` function, with its :data:`container` argument set to the :class:`creator.Individual` class and its :data:`generator` argument to the :func:`toolbox.indices` alias. Calling :func:`toolbox.individual` will call :func:`~deap.tools.initIterate` with the fixed arguments and return a complete :class:`creator.Individual` composed of a permutation with a minimizing single objective :attr:`fitness` attribute. .. _arithmetic-expr: Arithmetic Expression +++++++++++++++++++++ The next individual that is commonly used is a prefix tree of mathematical expressions. This time, a :class:`~deap.gp.PrimitiveSet` must be defined containing all possible mathematical operators that our individual can use. Here, the set is called ``MAIN`` and has a single variable defined by the arity_. Operators :func:`~operator.add`, :func:`~operator.sub`, and :func:`~operator.mul` are added to the primitive set with each an arity of 2. Next, the :class:`Individual` class is created as before with the addition of a static attribute :attr:`pset` to remember the global primitive set. This time, the content of the individuals will be generated by the :func:`~deap.gp.genHalfAndHalf` function that generates trees in a list format based on a ramped procedure. Once again, the individual is initialized using the :func:`~deap.tools.initIterate` function to give the complete generated iterable to the individual class. .. literalinclude:: /code/tutorials/part_2/2_2_3_arithmetic_expression.py :lines: 2- .. _arity: http://en.wikipedia.org/wiki/Arity Calling :func:`toolbox.individual` will readily return a complete individual that is an arithmetic expression in the form of a prefix tree with a minimizing single objective fitness attribute. Evolution Strategy ++++++++++++++++++ Evolution strategies individuals are slightly different as they contain generally two lists, one for the actual individual and one for its mutation parameters. This time, instead of using the list base class, we will inherit from an :class:`array.array` for both the individual and the strategy. Since there is no helper function to generate two different vectors in a single object, we must define this function ourselves. The :func:`initES` function receives two classes and instantiates them generating itself the random numbers in the ranges provided for individuals of a given size. .. literalinclude:: /code/tutorials/part_2/2_2_4_evolution_strategy.py :lines: 2- Calling :func:`toolbox.individual` will readily return a complete evolution strategy with a strategy vector and a minimizing single objective fitness attribute. Particle ++++++++ A particle is another special type of individual as it usually has a speed and generally remembers its best position. This type of individual is created (once again) the same way as inheriting from a list. This time, :attr:`speed`, :attr:`best` and speed limits (:attr:`smin`, :attr:`smax`) attributes are added to the object. Again, an initialization function :func:`initParticle` is also registered to produce the individual receiving the particle class, size, domain, and speed limits as arguments. .. literalinclude:: /code/tutorials/part_2/2_2_5_particle.py :lines: 2- Calling :func:`toolbox.particle` will readily return a complete particle with a speed vector and a fitness attribute for maximizing two objectives. .. _funky: A Funky One +++++++++++ Supposing your problem has very specific needs, it is also possible to build custom individuals very easily. The next individual created is a list of alternating integers and floating point numbers, using the :func:`~deap.tools.initCycle` function. .. literalinclude:: /code/tutorials/part_2/2_2_6_funky_one.py :lines: 2- Calling :func:`toolbox.individual` will readily return a complete individual of the form ``[int float int float ... int float]`` with a maximizing two objectives fitness attribute. .. _population: Population ---------- Populations are much like individuals. Instead of being initialized with attributes, they are filled with individuals, strategies or particles. Bag +++ A bag population is the most commonly used type. It has no particular ordering although it is generally implemented using a list. Since the bag has no particular attribute, it does not need any special class. The population is initialized using the toolbox and the :func:`~deap.tools.initRepeat` function directly. .. literalinclude:: /code/tutorials/part_2/2_3_1_bag.py :lines: 17 Calling :func:`toolbox.population` will readily return a complete population in a list, providing a number of times the repeat helper must be repeated as an argument of the population function. The following example produces a population with 100 individuals. .. literalinclude:: /code/tutorials/part_2/2_3_1_bag.py :lines: 19 Grid ++++ A grid population is a special case of structured population where neighbouring individuals have a direct effect on each other. The individuals are distributed in the grid where each cell contains a single individual. However, its implementation only differs from the list of the bag population, in that it is composed of lists of individuals. .. literalinclude:: /code/tutorials/part_2/2_3_2_grid.py :lines: 20-21 Calling :func:`toolbox.population` will readily return a complete population where the individuals are accessible using two indices, for example ``pop[r][c]``. For the moment, there is no algorithm specialized for structured populations, we are awaiting your submissions. Swarm +++++ A swarm is used in particle swarm optimization. It is different in the sense that it contains a communication network. The simplest network is the completely connected one, where each particle knows the best position that has ever been visited by any particle. This is generally implemented by copying that global best position to a :attr:`gbest` attribute and the global best fitness to a :attr:`gbestfit` attribute. .. literalinclude:: /code/tutorials/part_2/2_3_3_swarm.py :lines: 11,23 Calling :func:`toolbox.swarm` will readily return a complete swarm. After each evaluation the :attr:`gbest` and :attr:`gbestfit` should be set by the algorithm to reflect the best found position and fitness. Demes +++++ A deme is a sub-population that is contained in a population. It is similar to an island in the island model. Demes, being only sub-populations, are in fact not different from populations, aside from their names. Here, we create a population containing 3 demes, each having a different number of individuals using the *n* argument of the :func:`~deap.tools.initRepeat` function. .. literalinclude:: /code/tutorials/part_2/2_3_4_demes.py :lines: 17-20 Seeding a Population ++++++++++++++++++++ Sometimes, a first guess population can be used to initialize an evolutionary algorithm. The key idea to initialize a population with non-random individuals is to have an individual initializer that takes a content as argument. .. literalinclude:: /code/tutorials/part_2/2_3_5_seeding_a_population.py :lines: 2- The population will be initialized from the file ``my_guess.json`` that shall contain a list of first guess individuals. This initialization can be combined with a regular initialization to have part random and part non-random individuals. Note that the definition of :func:`initIndividual` and the registration of :func:`individual_guess` are optional as the default constructor of a list is similar. Removing those lines leads to the following: :: toolbox.register("population_guess", initPopulation, list, creator.Individual, "my_guess.json") ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/tutorials/basic/part2.rst0000644000076500000240000002535214456461441017340 0ustar00runnerstaff.. _next-step: Operators and Algorithms ======================== Before starting with complex algorithms, we will see some basics of DEAP. First, we will start by creating simple individuals (as seen in the :ref:`creating-types` tutorial) and make them interact with each other using different operators. Afterwards, we will learn how to use the algorithms and other tools. A First Individual ------------------ First import the required modules and register the different functions required to create individuals that are lists of floats with a minimizing two objectives fitness. .. literalinclude:: /code/tutorials/part_3/3_next_step.py :lines: 2-16 The first individual can now be built by adding the appropriate line to the script. .. literalinclude:: /code/tutorials/part_3/3_next_step.py :lines: 18 Printing the individual ``ind1`` and checking if its fitness is valid will give something like this .. literalinclude:: /code/tutorials/part_3/3_next_step.py :lines: 20-21 The individual is printed as its base class representation (here a list) and the fitness is invalid because it contains no values. Evaluation ---------- The evaluation is the most personal part of an evolutionary algorithm, it is the only part of the library that you must write yourself. A typical evaluation function takes one individual as argument and returns its fitness as a :class:`tuple`. As shown in the :ref:`core` section, a fitness is a list of floating point values and has a property :attr:`~deap.base.Fitness.valid` to know if this individual shall be re-evaluated. The fitness is set by setting the :attr:`~deap.base.Fitness.values` to the associated :class:`tuple`. For example, the following evaluates the previously created individual ``ind1`` and assigns its fitness to the corresponding values. .. literalinclude:: /code/tutorials/part_3/3_next_step.py :lines: 24-32 Dealing with single objective fitness is not different, the evaluation function **must** return a tuple because single-objective is treated as a special case of multi-objective. Mutation -------- The next kind of operator that we will present is the mutation operator. There is a variety of mutation operators in the :mod:`deap.tools` module. Each mutation has its own characteristics and may be applied to different types of individuals. Be careful to read the documentation of the selected operator in order to avoid undesirable behaviour. The general rule for mutation operators is that they **only** mutate, this means that an independent copy must be made prior to mutating the individual if the original individual has to be kept or is a *reference* to another individual (see the selection operator). In order to apply a mutation (here a gaussian mutation) on the individual ``ind1``, simply apply the desired function. .. literalinclude:: /code/tutorials/part_3/3_next_step.py :lines: 35-37 The fitness' values are deleted because they're not related to the individual anymore. As stated above, the mutation does mutate and only mutate an individual. It is neither responsible of invalidating the fitness nor anything else. The following shows that ``ind2`` and ``mutant`` are in fact the same individual. .. literalinclude:: /code/tutorials/part_3/3_next_step.py :lines: 39-40 Crossover --------- The second kind of operator that we will present is the crossover operator. There is a variety of crossover operators in the :mod:`deap.tools` module. Each crossover has its own characteristics and may be applied to different types of individuals. Be careful to read the documentation of the selected operator in order to avoid undesirable behaviour. The general rule for crossover operators is that they **only** mate individuals, this means that an independent copies must be made prior to mating the individuals if the original individuals have to be kept or are *references* to other individuals (see the selection operator). Lets apply a crossover operation to produce the two children that are cloned beforehand. .. literalinclude:: /code/tutorials/part_3/3_next_step.py :lines: 43-46 .. note:: Just as a remark on the language, the form ``toolbox.clone([ind1, ind2])`` cannot be used because if ``ind1`` and ``ind2`` are referring to the same location in memory (the same individual) there will be a single independent copy of the individual and the second one will be a reference to this same independent copy. This is caused by the mechanism that prevents recursive loops. The first time the individual is seen, it is put in the "memo" dictionary, the next time it is seen the deep copy stops for that object and puts a reference to that previously created deep copy. Care should be taken when deep copying containers. Selection --------- Selection is made among a population by the selection operators that are available in the :mod:`deap.tools` module. The selection operator usually takes as first argument an iterable container of individuals and the number of individuals to select. It returns a list containing the references to the selected individuals. The selection is made as follow. .. literalinclude:: /code/tutorials/part_3/3_next_step.py :lines: 49-50 .. warning:: It is **very** important here to note that the selection operators does not duplicate any individual during the selection process. If an individual is selected twice and one of either object is modified, the other will also be modified. Only a reference to the individual is copied. Just like every other operator it selects and only selects. Usually duplication of the entire population will be made after selection or before variation. .. literalinclude:: /code/tutorials/part_3/3_next_step.py :lines: 56-57 Using the Toolbox ----------------- The toolbox is intended to contain all the evolutionary tools, from the object initializers to the evaluation operator. It allows easy configuration of each algorithm. The toolbox has basically two methods, :meth:`~deap.toolbox.Toolbox.register` and :meth:`~deap.toolbox.Toolbox.unregister`, that are used to add or remove tools from the toolbox. This part of the tutorial will focus on registration of the evolutionary tools in the toolbox rather than the initialization tools. The usual names for the evolutionary tools are :func:`~deap.mate`, :func:`~deap.mutate`, :func:`~deap.evaluate` and :func:`~deap.select`, however, any name can be registered as long as it is unique. Here is how they are registered in the toolbox. .. literalinclude:: /code/tutorials/part_3/3_6_using_the_toolbox.py :lines: 2-8,10-15 Using the toolbox for registering tools helps with keeping the rest of the algorithms independent from the operator set. Using this scheme makes it very easy to locate and change any tool in the toolbox if needed. .. _using-tools: Using the Tools +++++++++++++++ When building evolutionary algorithms, the toolbox is used to contain the operators, which are called using their generic name. For example, here is a very simple generational evolutionary algorithm. .. literalinclude:: /code/tutorials/part_3/3_6_using_the_toolbox.py :lines: 30- This is a complete algorithm. It is generic enough to accept any kind of individual and any operator, as long as the operators are suitable for the chosen individual type. As shown in the last example, the usage of the toolbox allows to write algorithms that are as close as possible to pseudo code. Now it is up to you to write and experiment on your own. Tool Decoration +++++++++++++++ Tool decoration is a very powerful feature that helps to control very precise things during an evolution without changing anything in the algorithm or operators. A decorator is a wrapper that is called instead of a function. It is asked to make some initialization and termination work before and after the actual function is called. For example, in the case of a constrained domain, one can apply a decorator to the mutation and crossover in order to keep any individual from being out-of-bound. The following defines a decorator that checks if any attribute in the list is out-of-bound and clips it if this is the case. The decorator is defined using three functions in order to receive the *min* and *max* arguments. Whenever the mutation or crossover is called, bounds will be checked on the resulting individuals. .. literalinclude:: /code/tutorials/part_3/3_6_2_tool_decoration.py :lines: 8- This will work on crossover and mutation because both return a tuple of individuals. The mutation is often considered to return a single individual but again like for the evaluation, the single individual case is a special case of the multiple individual case. |more| For more information on decorators, see `Introduction to Python Decorators `_ and `Python Decorator Library `_. Variations ---------- Variations allow to build simple algorithms using predefined small building blocks. In order to use a variation, the toolbox must be set to contain the required operators. For example in the lastly presented complete algorithm, the crossover and mutation are regrouped in the :func:`~deap.algorithms.varAnd` function, this function requires the toolbox to contain the :func:`~deap.mate` and :func:`~deap.mutate` functions. This variation can be used to simplify the writing of an algorithm as follows. .. literalinclude:: /code/tutorials/part_3/3_7_variations.py :lines: 33- This last example shows that using the variations makes it straight forward to build algorithms that are very close to pseudo code. Algorithms ---------- There are several algorithms implemented in the :mod:`~deap.algorithms` module. They are very simple and reflect the basic types of evolutionary algorithms present in the literature. The algorithms use a :class:`~deap.base.Toolbox` as defined in the last sections. In order to setup a toolbox for an algorithm, you must register the desired operators under the specified names, refer to the documentation of the selected algorithm for more details. Once the toolbox is ready, it is time to launch the algorithm. The simple evolutionary algorithm takes 5 arguments, a *population*, a *toolbox*, a probability of mating each individual at each generation (*cxpb*), a probability of mutating each individual at each generation (*mutpb*) and a number of generations to accomplish (*ngen*). .. literalinclude:: /code/tutorials/part_3/3_8_algorithms.py :lines: 33- The best way to understand what the simple evolutionary algorithm does, is to take a look at the documentation or the source code. Now that you built your own evolutionary algorithm in Python, you are welcome to gives us feedback and appreciation. We would also really like to hear about your project and success stories with DEAP. .. |more| image:: /_images/more.png :align: middle :alt: more info ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/tutorials/basic/part3.rst0000644000076500000240000002264314456461441017341 0ustar00runnerstaffComputing Statistics ==================== Often, one wants to compile statistics on what is going on in the optimization. The :class:`~deap.tools.Statistics` are able to compile such data on arbitrary attributes of any designated object. To do that, one needs to register the desired statistic functions inside the stats object using the exact same syntax as in the toolbox. .. literalinclude:: /code/tutorials/part_3/stats.py :lines: 12 The statistics object is created using a key as first argument. This key must be supplied a function that will later be applied to the data on which the statistics are computed. The previous code sample uses the :attr:`fitness.values` attribute of each element. .. literalinclude:: /code/tutorials/part_3/stats.py :lines: 13-16 The statistical functions are now registered. The ``register`` function expects an alias as first argument and a function operating on vectors as second argument. Any subsequent argument is passed to the function when called. The creation of the statistics object is now complete. Predefined Algorithms --------------------- When using a predefined algorithm such as :func:`~deap.algorithms.eaSimple`, :func:`~deap.algorithms.eaMuPlusLambda`, :func:`~deap.algorithms.eaMuCommaLambda`, or :func:`~deap.algorithms.eaGenerateUpdate`, the statistics object previously created can be given as argument to the algorithm. .. literalinclude:: /code/tutorials/part_3/stats.py :lines: 50-51 Statistics will automatically be computed on the population every generation. The verbose argument prints the statistics on screen while the optimization takes place. Once the algorithm returns, the final population and a :class:`~deap.tools.Logbook` are returned. See the :ref:`next section ` or the :class:`~deap.tools.Logbook` documentation for more information. Writing Your Own Algorithm -------------------------- When writing your own algorithm, including statistics is very simple. One only needs to compile the statistics on the desired object. For example, compiling the statistics on a given population is done by calling the :meth:`~deap.tools.Statistics.compile` method. .. literalinclude:: /code/tutorials/part_3/stats.py :lines: 38 The argument to the compile function must be an iterable of elements on which the key will be called. Here, our population (``pop``) contains individuals. The statistics object will call the key function on every individual to retrieve their :attr:`fitness.values` attribute. The resulting array of values is finally given to each statistic function and the result is put into the ``record`` dictionary under the key associated with the function. Printing the record reveals its nature. >>> print(record) {'std': 4.96, 'max': 63.0, 'avg': 50.2, 'min': 39.0} How to save and pretty print the statistics is shown in the :ref:`next section `. Multi-objective Statistics -------------------------- As statistics are computed directly on the values with numpy function, all the objectives are combined together by the default behaviour of numpy. Thus, one needs to specify the axis on which to operate. This is achieved by giving the axis as an additional argument to the register function. .. literalinclude:: /code/tutorials/part_3/stats.py :lines: 41-45 One can always specify the axis even in the case of single objective. The only effect is to produce a different output, as the objects are numpy arrays. >>> print(record) {'std': array([ 4.96]), 'max': array([ 63.]), 'avg': array([ 50.2]), 'min': array([ 39.])} Multiple Statistics ------------------- It is also possible to compute statistics on different attributes of the population individuals. For instance, it is quite common in genetic programming to have statistics on the height of the trees in addition to their fitness. One can combine multiple :class:`~deap.tools.Statistics` objects in a :class:`~deap.tools.MultiStatistics`. .. literalinclude:: /code/tutorials/part_3/multistats.py :lines: 14-16 Two statistics objects are created in the same way as before. The second object will retrieve the size of the individuals by calling :func:`len` on each of them. Once created, the statistics objects are given to a MultiStatistics one, where the arguments are given using keywords. These keywords will serve to identify the different statistics. The statistical functions can be registered only once in the multi-statistics, as shown below, or individually in each statistics. .. literalinclude:: /code/tutorials/part_3/multistats.py :lines: 17-20 The multi-statistics object can be given to an algorithm or they can be compiled using the exact same procedure as the simple statistics. .. literalinclude:: /code/tutorials/part_3/multistats.py :lines: 54 This time the ``record`` is a dictionary of dictionaries. The first level contains the keywords under which the statistics objects have been registered and the second level is similar to the previous simple statistics object. >>> print(record) {'fitness': {'std': 1.64, 'max': 6.86, 'avg': 1.71, 'min': 0.166}, 'size': {'std': 1.89, 'max': 7, 'avg': 4.54, 'min': 3}} .. _logging: Logging Data ============ Once the data is produced by the statistics (or multi-statistics), one can save it for further use in a :class:`~deap.tools.Logbook`. The logbook is intended to be a chronological sequence of entries (as dictionaries). It is directly compliant with the type of data returned by the statistics objects, but not limited to this data. In fact, anything can be incorporated in an entry of the logbook. .. literalinclude:: /code/tutorials/part_3/logbook.py :lines: 7-8 The :meth:`~deap.tools.Logbook.record` method takes a variable number of arguments, each of which is data to be recorded. In the last example, we saved the generation, the number of evaluations and everything contained in the ``record`` produced by a statistics object using the star magic. All records will be kept in the logbook until its destruction. After a number of records, one may want to retrieve the information contained in the logbook. .. literalinclude:: /code/tutorials/part_3/logbook.py :lines: 12 The :meth:`~deap.tools.Logbook.select` method provides a way to retrieve all the information associated with a keyword in all records. This method takes a variable number of string arguments, which are the keywords used in the record or statistics object. Here, we retrieved the generation and the average fitness using a single call to select. A logbook is a picklable object (as long as all inserted objects are picklable) providing a very nice way to save the statistics of an evolution on disk. .. literalinclude:: /code/tutorials/part_3/logbook.py :lines: 1,15 .. note:: Every algorithm returns a logbook containing the statistics for every generation and the number of evaluations for the whole evolution. Printing to Screen ------------------ A logbook can be printed to screen or file. Its :meth:`~deap.tools.Logbook.__str__` method returns a header of each key inserted in the first record and the complete logbook for each of these keys. The rows are in chronological order of insertion while the columns are in an undefined order. The easiest way to specify an order is to set the :attr:`~deap.tools.Logbook.header` attribute to a list of strings specifying the order of the columns. .. literalinclude:: /code/tutorials/part_3/logbook.py :lines: 21 The result is:: >>> print(logbook) gen avg spam 0 [ 50.2] A column name containing no entry in a specific record will be left blank as for the ``spam`` column in the last example. A logbook also contains a stream property returning only the yet unprinted entries. :: >>> print(logbook.stream) gen avg spam 0 [ 50.2] >>> logbook.record(gen=1, evals=15, **record) >>> print(logbook.stream) 1 [ 50.2] Dealing with Multi-statistics ----------------------------- The logbook is able to cope with the dictionary of dictionaries return by the :class:`~deap.tools.MultiStatistics` object. In fact, it will log the data in :attr:`~deap.tools.Logbook.chapters` for each sub dictionary contained in the record. Thus, a *multi* record can be used exactly as a record. .. literalinclude:: /code/tutorials/part_3/logbook.py :lines: 30-31 One difference is the column ordering, where we can specify an order for the chapters and their content as follows: .. literalinclude:: /code/tutorials/part_3/logbook.py :lines: 33-35 The resulting output is:: >>> print(logbook) fitness size ------------------------- --------------- gen evals min avg max min avg max 0 30 0.165572 1.71136 6.85956 3 4.54 7 Retrieving the data is also done through the chapters. .. literalinclude:: /code/tutorials/part_3/logbook.py :lines: 39-41 The generations, minimum fitness and average size are obtained, chronologically ordered. If some data is not available, a :data:`None` appears in the vector. Some Plotting Sugar ------------------- One of the most common operations when an optimization is finished is to plot the data during the evolution. The :class:`~deap.tools.Logbook` allows to do this very efficiently. Using the select method, one can retrieve the desired data and plot it using matplotlib. .. literalinclude:: /code/tutorials/part_3/logbook.py :lines: 39-62 When added to the symbolic regression example, it gives the following graphic: .. image:: /_images/twin_logbook.png :width: 50% ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/doc/tutorials/basic/part4.rst0000644000076500000240000001236514456461441017342 0ustar00runnerstaff.. _distribution-deap: Using Multiple Processors ========================= This section of the tutorial shows all the work that is needed to distribute operations in DEAP. Distribution relies on serialization of objects which is usually done by pickling, thus all objects that are distributed (functions and arguments, e.g. individuals and parameters) must be pickleable. This means that modifications made to an object on a distant processing unit will not be made available to the other processing units (including the master one) if it is not explicitly communicated through function arguments and return values. Scalable Concurrent Operations in Python (SCOOP) ----------------------------------------------- SCOOP_ is a distributed task module allowing concurrent parallel programming on various environments, from heterogeneous grids to supercomputers. It has an interface similar to the :mod:`concurrent.futures` module introduced in Python 3.2. Its two simple functions :func:`~scoop.futures.submit` and :func:`~scoop.futures.map` allow to distribute computation efficiently and easily over a grid of computers. In the :ref:`second part `, a complete algorithm was exposed with the :func:`toolbox.map` left to the default :func:`map`. In order to distribute the evaluations, we will replace this map by the one from SCOOP. :: from scoop import futures toolbox.register("map", futures.map) Once this line is added, your program absolutely needs to be run from a :func:`main` function as mentioned in the `scoop documentation `_. To run your program, use scoop as the main module. .. code-block:: bash $ python -m scoop your_program.py That is it, your program has been run in parallel on all available processors on your computer. .. _SCOOP: http://pyscoop.org/ Multiprocessing Module ---------------------- Using the :mod:`multiprocessing` module is similar to using SCOOP. It can be done by replacing the appropriate function by the distributed one in the toolbox. :: import multiprocessing pool = multiprocessing.Pool() toolbox.register("map", pool.map) # Continue on with the evolutionary algorithm .. warning:: As stated in the :mod:`multiprocessing` guidelines, under Windows, a process pool must be protected in a ``if __name__ == "__main__"`` section because of the way processes are initialized. .. note:: While Python 2.6 is required for the multiprocessing module, the pickling of partial function is possible only since Python 2.7 (or 3.1), earlier version of Python may throw some strange errors when using partial function in the multiprocessing :func:`multiprocessing.Pool.map`. This may be avoided by creating local function outside of the toolbox (in Python version 2.6). .. note:: The pickling of lambda function is not yet available in Python. .. Parallel Evaluation .. ------------------- .. The multiprocessing example shows how to use the :mod:`multiprocessing` module .. in order to enhance the computing power during the evaluations. First the .. toolbox contains a method named :func:`~deap.map`, this method has the same .. function as the built-in :func:`map` function. In order to use the .. multiprocessing module into the built-in :mod:`~deap.algorithms`, the only .. thing to do is to replace the map operation by a parallel one. Then the .. difference between the `Multiprocessing One Max Example .. `_ and the `Regular One .. Max Example `_ is the .. addition of these two lines .. :: .. .. # Process Pool of 4 workers .. pool = multiprocessing.Pool(processes=4) .. tools.register("map", pool.map) .. .. Parallel Variation .. ------------------ .. .. The parallelization of the variation operators is not directly supported in .. the algorithms, although it is still possible. What one needs is to create its .. own algorithm (from one in the algorithm module for example) and change the .. desired lines in order to use the :meth:`~deap.toolbox.map` method from the .. toolbox. This may be achieved for example, for the crossover operation from .. the :func:`~deap.algorithms.eaSimple` algorithm by replacing the crossover part .. of the algorithms by .. :: .. .. parents1 = list() .. parents2 = list() .. to_replace = list() .. for i in range(1, len(offspring), 2): .. if random.random() < cxpb: .. parents1.append(offspring[i - 1]) .. parents2.append(offspring[i]) .. to_replace.append(i - 1) .. to_replace.append(i) .. .. children = tools.map(tools.mate, (parents1, parents2)) .. .. for i, child in zip(to_replace, children): .. del child.fitness.values .. offspring[i] = child .. .. Since the multiprocessing map does take a single iterable we must .. bundle/unbundle the parents, respectively by creating a tuple in the .. :func:`tools.map` function of the preceding code example and the following .. decorator on the crossover function. .. :: .. .. def unbundle(func): .. def wrapUnbundle(bundled): .. return func(*bundled) .. return wrapUnbundle .. .. tools.decorate("mate", unbundle) ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1689936700.696318 deap-1.4.1/examples/0000755000076500000240000000000014456461475013520 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/bbob.py0000644000076500000240000001166114456461441014774 0ustar00runnerstaff # This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import array import math import random import time from itertools import chain from deap import base from deap import creator from deap import benchmarks import fgeneric import bbobbenchmarks as bn creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", array.array, typecode="d", fitness=creator.FitnessMin) def update(individual, mu, sigma): """Update the current *individual* with values from a gaussian centered on *mu* and standard deviation *sigma*. """ for i, mu_i in enumerate(mu): individual[i] = random.gauss(mu_i, sigma) def tupleize(func): """A decorator that tuple-ize the result of a function. This is useful when the evaluation function returns a single value. """ def wrapper(*args, **kargs): return func(*args, **kargs), return wrapper def main(func, dim, maxfuncevals, ftarget=None): toolbox = base.Toolbox() toolbox.register("update", update) toolbox.register("evaluate", func) toolbox.decorate("evaluate", tupleize) # Create the desired optimal function value as a Fitness object # for later comparison opt = creator.FitnessMin((ftarget,)) # Interval in which to initialize the optimizer interval = -5, 5 sigma = (interval[1] - interval[0])/2.0 alpha = 2.0**(1.0/dim) # Initialize best randomly and worst as a place holder best = creator.Individual(random.uniform(interval[0], interval[1]) for _ in range(dim)) worst = creator.Individual([0.0] * dim) # Evaluate the first individual best.fitness.values = toolbox.evaluate(best) # Evolve until ftarget is reached or the number of evaluation # is exhausted (maxfuncevals) for g in range(1, maxfuncevals): toolbox.update(worst, best, sigma) worst.fitness.values = toolbox.evaluate(worst) if best.fitness <= worst.fitness: # Increase mutation strength and swap the individual sigma = sigma * alpha best, worst = worst, best else: # Decrease mutation strength sigma = sigma * alpha**(-0.25) # Test if we reached the optimum of the function # Remember that ">" for fitness means better (not greater) if best.fitness > opt: return best return best if __name__ == "__main__": # Maximum number of restart for an algorithm that detects stagnation maxrestarts = 1000 # Create a COCO experiment that will log the results under the # ./output directory e = fgeneric.LoggingFunction("output") # Iterate over all desired test dimensions for dim in (2, 3, 5, 10, 20, 40): # Set the maximum number function evaluation granted to the algorithm # This is usually function of the dimensionality of the problem maxfuncevals = 100 * dim**2 minfuncevals = dim + 2 # Iterate over a set of benchmarks (noise free benchmarks here) for f_name in bn.nfreeIDs: # Iterate over all the instance of a single problem # Rotation, translation, etc. for instance in chain(range(1, 6), range(21, 31)): # Set the function to be used (problem) in the logger e.setfun(*bn.instantiate(f_name, iinstance=instance)) # Independent restarts until maxfunevals or ftarget is reached for restarts in range(0, maxrestarts + 1): if restarts > 0: # Signal the experiment that the algorithm restarted e.restart('independent restart') # additional info # Run the algorithm with the remaining number of evaluations revals = int(math.ceil(maxfuncevals - e.evaluations)) main(e.evalfun, dim, revals, e.ftarget) # Stop if ftarget is reached if e.fbest < e.ftarget or e.evaluations + minfuncevals > maxfuncevals: break e.finalizerun() print('f%d in %d-D, instance %d: FEs=%d with %d restarts, ' 'fbest-ftarget=%.4e' % (f_name, dim, instance, e.evaluations, restarts, e.fbest - e.ftarget)) print('date and time: %s' % time.asctime()) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.6997433 deap-1.4.1/examples/coev/0000755000076500000240000000000014456461475014454 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/coev/coop_adapt.py0000644000076500000240000001153014456461441017130 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . """This example contains the adaptation test from *Potter, M. and De Jong, K., 2001, Cooperative Coevolution: An Architecture for Evolving Co-adapted Subcomponents.* section 4.2.3. A species is added each 100 generations. """ import random try: import matplotlib.pyplot as plt except ImportError: plt = False import numpy from deap import algorithms from deap import tools import coop_base IND_SIZE = coop_base.IND_SIZE SPECIES_SIZE = coop_base.SPECIES_SIZE TARGET_SIZE = 30 NUM_SPECIES = 1 noise = "*##*###*###*****##*##****#*##*###*#****##******##*#**#*#**######" schematas = ("1##1###1###11111##1##1111#1##1###1#1111##111111##1#11#1#11######", "1##1###1###11111##1##1000#0##0###0#0000##000000##0#00#0#00######", "0##0###0###00000##0##0000#0##0###0#0000##001111##1#11#1#11######") toolbox = coop_base.toolbox if plt: toolbox.register("evaluate_nonoise", coop_base.matchSetStrengthNoNoise) def main(extended=True, verbose=True): target_set = [] stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) logbook = tools.Logbook() logbook.header = "gen", "species", "evals", "std", "min", "avg", "max" ngen = 300 adapt_length = 100 g = 0 add_next = [adapt_length] for i in range(len(schematas)): target_set.extend(toolbox.target_set(schematas[i], int(TARGET_SIZE/len(schematas)))) species = [toolbox.species() for _ in range(NUM_SPECIES)] # Init with random a representative for each species representatives = [random.choice(s) for s in species] if plt and extended: # We must save the match strength to plot them t1, t2, t3 = list(), list(), list() while g < ngen: # Initialize a container for the next generation representatives next_repr = [None] * len(species) for i, s in enumerate(species): # Vary the species individuals s = algorithms.varAnd(s, toolbox, 0.6, 1.0) r = representatives[:i] + representatives[i+1:] for ind in s: ind.fitness.values = toolbox.evaluate([ind] + r, target_set) record = stats.compile(s) logbook.record(gen=g, species=i, evals=len(s), **record) if verbose: print(logbook.stream) # Select the individuals species[i] = toolbox.select(s, len(s)) # Tournament selection next_repr[i] = toolbox.get_best(s)[0] # Best selection g += 1 if plt and extended: # Compute the match strength without noise for the # representatives on the three schematas t1.append(toolbox.evaluate_nonoise(representatives, toolbox.target_set(schematas[0], 1), noise)[0]) t2.append(toolbox.evaluate_nonoise(representatives, toolbox.target_set(schematas[1], 1), noise)[0]) t3.append(toolbox.evaluate_nonoise(representatives, toolbox.target_set(schematas[2], 1), noise)[0]) representatives = next_repr # Add a species at every *adapt_length* generation if add_next[-1] <= g < ngen: species.append(toolbox.species()) representatives.append(random.choice(species[-1])) add_next.append(add_next[-1] + adapt_length) if extended: for r in representatives: # print individuals without noise print("".join(str(x) for x, y in zip(r, noise) if y == "*")) if plt and extended: # Do the final plotting plt.plot(t1, '-', color="k", label="Target 1") plt.plot(t2, '--', color="k", label="Target 2") plt.plot(t3, ':', color="k", label="Target 3") max_t = max(max(t1), max(t2), max(t3)) for n in add_next: plt.plot([n, n], [0, max_t + 1], "--", color="k") plt.legend(loc="lower right") plt.axis([0, ngen, 0, max_t + 1]) plt.xlabel("Generations") plt.ylabel("Number of matched bits") plt.show() if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/coev/coop_base.py0000644000076500000240000000735614456461441016764 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . """This is the base code for all four coevolution examples from *Potter, M. and De Jong, K., 2001, Cooperative Coevolution: An Architecture for Evolving Co-adapted Subcomponents.* section 4.2. It shows in a concrete manner how to re-use initialization code in some other examples. """ import random from deap import base from deap import creator from deap import tools IND_SIZE = 64 SPECIES_SIZE = 50 def initTargetSet(schemata, size): """Initialize a target set with noisy string to match based on the schematas provided. """ test_set = [] for _ in range(size): test = list(random.randint(0, 1) for _ in range(len(schemata))) for i, x in enumerate(schemata): if x == "0": test[i] = 0 elif x == "1": test[i] = 1 test_set.append(test) return test_set def matchStrength(x, y): """Compute the match strength for the individual *x* on the string *y*. """ return sum(xi == yi for xi, yi in zip(x, y)) def matchStrengthNoNoise(x, y, n): """Compute the match strength for the individual *x* on the string *y* excluding noise *n*. """ return sum(xi == yi for xi, yi, ni in zip(x, y, n) if ni != "#") def matchSetStrength(match_set, target_set): """Compute the match strength of a set of strings on the target set of strings. The strength is the maximum of all match string on each target. """ sum = 0.0 for t in target_set: sum += max(matchStrength(m, t) for m in match_set) return sum / len(target_set), def matchSetStrengthNoNoise(match_set, target_set, noise): """Compute the match strength of a set of strings on the target set of strings. The strength is the maximum of all match string on each target excluding noise. """ sum = 0.0 for t in target_set: sum += max(matchStrengthNoNoise(m, t, noise) for m in match_set) return sum / len(target_set), def matchSetContribution(match_set, target_set, index): """Compute the contribution of the string at *index* in the match set. """ contribution = 0.0 for t in target_set: match = -float("inf") id = -1 for i, m in enumerate(match_set): v = matchStrength(m, t) if v > match: match = v id = i if id == index: contribution += match return contribution / len(target_set), creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", list, fitness=creator.FitnessMax) toolbox = base.Toolbox() toolbox.register("bit", random.randint, 0, 1) toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.bit, IND_SIZE) toolbox.register("species", tools.initRepeat, list, toolbox.individual, SPECIES_SIZE) toolbox.register("target_set", initTargetSet) toolbox.register("mate", tools.cxTwoPoint) toolbox.register("mutate", tools.mutFlipBit, indpb=1./IND_SIZE) toolbox.register("select", tools.selTournament, tournsize=3) toolbox.register("get_best", tools.selBest, k=1) toolbox.register("evaluate", matchSetStrength) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/coev/coop_evol.py0000644000076500000240000001400614456461441017005 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . """This example contains the evolving test from *Potter, M. and De Jong, K., 2001, Cooperative Coevolution: An Architecture for Evolving Co-adapted Subcomponents.* section 4.2.4. The number of species is evolved by adding and removing species as stagnation occurs. """ import random try: import matplotlib.pyplot as plt plt.figure() except: plt = False import numpy from deap import algorithms from deap import tools import coop_base IND_SIZE = coop_base.IND_SIZE SPECIES_SIZE = coop_base.SPECIES_SIZE NUM_SPECIES = 1 TARGET_SIZE = 30 IMPROVMENT_TRESHOLD = 0.5 IMPROVMENT_LENGTH = 5 EXTINCTION_TRESHOLD = 5.0 noise = "*##*###*###*****##*##****#*##*###*#****##******##*#**#*#**######" schematas = ("1##1###1###11111##1##1111#1##1###1#1111##111111##1#11#1#11######", "1##1###1###11111##1##1000#0##0###0#0000##000000##0#00#0#00######", "0##0###0###00000##0##0000#0##0###0#0000##001111##1#11#1#11######") toolbox = coop_base.toolbox toolbox.register("evaluateContribution", coop_base.matchSetContribution) def main(extended=True, verbose=True): target_set = [] species = [] stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) logbook = tools.Logbook() logbook.header = "gen", "species", "evals", "std", "min", "avg", "max" ngen = 300 g = 0 for i in range(len(schematas)): size = int(TARGET_SIZE/len(schematas)) target_set.extend(toolbox.target_set(schematas[i], size)) species = [toolbox.species() for _ in range(NUM_SPECIES)] species_index = list(range(NUM_SPECIES)) last_index_added = species_index[-1] # Init with random a representative for each species representatives = [random.choice(species[i]) for i in range(NUM_SPECIES)] best_fitness_history = [None] * IMPROVMENT_LENGTH if plt and extended: contribs = [[]] stag_gen = [] collab = [] while g < ngen: # Initialize a container for the next generation representatives next_repr = [None] * len(species) for (i, s), j in zip(enumerate(species), species_index): # Vary the species individuals s = algorithms.varAnd(s, toolbox, 0.6, 1.0) # Get the representatives excluding the current species r = representatives[:i] + representatives[i+1:] for ind in s: # Evaluate and set the individual fitness ind.fitness.values = toolbox.evaluate([ind] + r, target_set) record = stats.compile(s) logbook.record(gen=g, species=j, evals=len(s), **record) if verbose: print(logbook.stream) # Select the individuals species[i] = toolbox.select(s, len(s)) # Tournament selection next_repr[i] = toolbox.get_best(s)[0] # Best selection if plt and extended: # Book keeping of the collaborative fitness collab.append(next_repr[i].fitness.values[0]) g += 1 representatives = next_repr # Keep representatives fitness for stagnation detection best_fitness_history.pop(0) best_fitness_history.append(representatives[0].fitness.values[0]) try: diff = best_fitness_history[-1] - best_fitness_history[0] except TypeError: diff = float("inf") if plt and extended: for (i, rep), j in zip(enumerate(representatives), species_index): contribs[j].append((toolbox.evaluateContribution(representatives, target_set, i)[0], g-1)) if diff < IMPROVMENT_TRESHOLD: if len(species) > 1: contributions = [] for i in range(len(species)): contributions.append(toolbox.evaluateContribution(representatives, target_set, i)[0]) for i in reversed(range(len(species))): if contributions[i] < EXTINCTION_TRESHOLD: species.pop(i) species_index.pop(i) representatives.pop(i) last_index_added += 1 best_fitness_history = [None] * IMPROVMENT_LENGTH species.append(toolbox.species()) species_index.append(last_index_added) representatives.append(random.choice(species[-1])) if extended and plt: stag_gen.append(g-1) contribs.append([]) if extended: for r in representatives: # print final representatives without noise print("".join(str(x) for x, y in zip(r, noise) if y == "*")) if extended and plt: # Plotting of the evolution line1, = plt.plot(collab, "--", color="k") for con in contribs: try: con, g = zip(*con) line2, = plt.plot(g, con, "-", color="k") except ValueError: pass axis = plt.axis("tight") for s in stag_gen: plt.plot([s, s], [0, axis[-1]], "--", color="k") plt.legend((line1, line2), ("Collaboration", "Contribution"), loc="center right") plt.xlabel("Generations") plt.ylabel("Fitness") plt.show() if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/coev/coop_gen.py0000644000076500000240000001123214456461441016607 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . """This example contains the generalizing test from *Potter, M. and De Jong, K., 2001, Cooperative Coevolution: An Architecture for Evolving Co-adapted Subcomponents.* section 4.2.2. Varying the *NUM_SPECIES* in :math:`[1, \ldots, 4]` will produce the results for one to four species respectively. """ import random try: import matplotlib.pyplot as plt except ImportError: plt = False import numpy from deap import algorithms from deap import tools import coop_base IND_SIZE = coop_base.IND_SIZE SPECIES_SIZE = coop_base.SPECIES_SIZE NUM_SPECIES = 4 TARGET_SIZE = 30 noise = "*##*###*###*****##*##****#*##*###*#****##******##*#**#*#**######" schematas = ("1##1###1###11111##1##1111#1##1###1#1111##111111##1#11#1#11######", "1##1###1###11111##1##1000#0##0###0#0000##000000##0#00#0#00######", "0##0###0###00000##0##0000#0##0###0#0000##001111##1#11#1#11######") toolbox = coop_base.toolbox if plt: # This will allow to plot the match strength of every target schemata toolbox.register("evaluate_nonoise", coop_base.matchSetStrengthNoNoise) def main(extended=True, verbose=True): target_set = [] stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) logbook = tools.Logbook() logbook.header = "gen", "species", "evals", "std", "min", "avg", "max" ngen = 150 g = 0 for i in range(len(schematas)): size = int(TARGET_SIZE/len(schematas)) target_set.extend(toolbox.target_set(schematas[i], size)) species = [toolbox.species() for _ in range(NUM_SPECIES)] # Init with random a representative for each species representatives = [random.choice(s) for s in species] if plt and extended: # We must save the match strength to plot them t1, t2, t3 = list(), list(), list() while g < ngen: # Initialize a container for the next generation representatives next_repr = [None] * len(species) for i, s in enumerate(species): # Vary the species individuals s = algorithms.varAnd(s, toolbox, 0.6, 1.0) # Get the representatives excluding the current species r = representatives[:i] + representatives[i+1:] for ind in s: ind.fitness.values = toolbox.evaluate([ind] + r, target_set) record = stats.compile(s) logbook.record(gen=g, species=i, evals=len(s), **record) if verbose: print(logbook.stream) # Select the individuals species[i] = toolbox.select(s, len(s)) # Tournament selection next_repr[i] = toolbox.get_best(s)[0] # Best selection g += 1 if plt and extended: # Compute the match strength without noise for the # representatives on the three schematas t1.append(toolbox.evaluate_nonoise(representatives, toolbox.target_set(schematas[0], 1), noise)[0]) t2.append(toolbox.evaluate_nonoise(representatives, toolbox.target_set(schematas[1], 1), noise)[0]) t3.append(toolbox.evaluate_nonoise(representatives, toolbox.target_set(schematas[2], 1), noise)[0]) representatives = next_repr if extended: for r in representatives: # print individuals without noise print("".join(str(x) for x, y in zip(r, noise) if y == "*")) if plt and extended: # Do the final plotting plt.plot(t1, '-', color="k", label="Target 1") plt.plot(t2, '--', color="k", label="Target 2") plt.plot(t3, ':', color="k", label="Target 3") plt.legend(loc="lower right") plt.axis([0, ngen, 0, max(max(t1), max(t2), max(t3)) + 1]) plt.xlabel("Generations") plt.ylabel("Number of matched bits") plt.show() if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/coev/coop_niche.py0000644000076500000240000000644514456461441017136 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . """This example contains the niching test from *Potter, M. and De Jong, K., 2001, Cooperative Coevolution: An Architecture for Evolving Co-adapted Subcomponents.* section 4.2.1. Varying the *TARGET_TYPE* in :math:`\\lbrace 2, 4, 8 \\rbrace` will produce the results for the half-, quarter- and eight-length schematas. """ import random import numpy from deap import algorithms from deap import tools import coop_base IND_SIZE = coop_base.IND_SIZE SPECIES_SIZE = coop_base.SPECIES_SIZE TARGET_SIZE = 200 TARGET_TYPE = 2 def nicheSchematas(type, size): """Produce the desired schemata based on the type required, 2 for half length, 4 for quarter length and 8 for eight length. """ rept = int(size/type) return ["#" * (i*rept) + "1" * rept + "#" * ((type-i-1)*rept) for i in range(type)] toolbox = coop_base.toolbox def main(extended=True, verbose=True): target_set = [] species = [] stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) logbook = tools.Logbook() logbook.header = "gen", "species", "evals", "std", "min", "avg", "max" ngen = 200 g = 0 schematas = nicheSchematas(TARGET_TYPE, IND_SIZE) for i in range(TARGET_TYPE): size = int(TARGET_SIZE/TARGET_TYPE) target_set.extend(toolbox.target_set(schematas[i], size)) species.append(toolbox.species()) # Init with a random representative for each species representatives = [random.choice(s) for s in species] while g < ngen: # Initialize a container for the next generation representatives next_repr = [None] * len(species) for i, s in enumerate(species): # Vary the species individuals s = algorithms.varAnd(s, toolbox, 0.6, 1.0) # Get the representatives excluding the current species r = representatives[:i] + representatives[i+1:] for ind in s: ind.fitness.values = toolbox.evaluate([ind] + r, target_set) record = stats.compile(s) logbook.record(gen=g, species=i, evals=len(s), **record) if verbose: print(logbook.stream) # Select the individuals species[i] = toolbox.select(s, len(s)) # Tournament selection next_repr[i] = toolbox.get_best(s)[0] # Best selection g += 1 representatives = next_repr if extended: for r in representatives: print("".join(str(x) for x in r)) if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/coev/hillis.py0000644000076500000240000001305714456461441016311 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import random import sys import numpy from deap import algorithms from deap import base from deap import creator from deap import tools sys.path.append("../ga") import sortingnetwork as sn INPUTS = 12 def evalNetwork(host, parasite, dimension): network = sn.SortingNetwork(dimension, host) return network.assess(parasite), def genWire(dimension): return (random.randrange(dimension), random.randrange(dimension)) def genNetwork(dimension, min_size, max_size): size = random.randint(min_size, max_size) return [genWire(dimension) for i in range(size)] def getParasite(dimension): return [random.choice((0, 1)) for i in range(dimension)] def mutNetwork(individual, dimension, mutpb, addpb, delpb, indpb): if random.random() < mutpb: for index, elem in enumerate(individual): if random.random() < indpb: individual[index] = genWire(dimension) if random.random() < addpb: index = random.randint(0, len(individual)) individual.insert(index, genWire(dimension)) if random.random() < delpb: index = random.randrange(len(individual)) del individual[index] return individual, def mutParasite(individual, indmut, indpb): for i in individual: if random.random() < indpb: indmut(i) return individual, creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Host", list, fitness=creator.FitnessMin) creator.create("Parasite", list, fitness=creator.FitnessMax) htoolbox = base.Toolbox() ptoolbox = base.Toolbox() htoolbox.register("network", genNetwork, dimension=INPUTS, min_size=9, max_size=12) htoolbox.register("individual", tools.initIterate, creator.Host, htoolbox.network) htoolbox.register("population", tools.initRepeat, list, htoolbox.individual) ptoolbox.register("parasite", getParasite, dimension=INPUTS) ptoolbox.register("individual", tools.initRepeat, creator.Parasite, ptoolbox.parasite, 20) ptoolbox.register("population", tools.initRepeat, list, ptoolbox.individual) htoolbox.register("evaluate", evalNetwork, dimension=INPUTS) htoolbox.register("mate", tools.cxTwoPoint) htoolbox.register("mutate", mutNetwork, dimension=INPUTS, mutpb=0.2, addpb=0.01, delpb=0.01, indpb=0.05) htoolbox.register("select", tools.selTournament, tournsize=3) ptoolbox.register("mate", tools.cxTwoPoint) ptoolbox.register("indMutate", tools.mutFlipBit, indpb=0.05) ptoolbox.register("mutate", mutParasite, indmut=ptoolbox.indMutate, indpb=0.05) ptoolbox.register("select", tools.selTournament, tournsize=3) def cloneHost(individual): """Specialized copy function that will work only on a list of tuples with no other member than a fitness. """ clone = individual.__class__(individual) clone.fitness.values = individual.fitness.values return clone def cloneParasite(individual): """Specialized copy function that will work only on a list of lists with no other member than a fitness. """ clone = individual.__class__(list(seq) for seq in individual) clone.fitness.values = individual.fitness.values return clone htoolbox.register("clone", cloneHost) ptoolbox.register("clone", cloneParasite) def main(): random.seed(64) hosts = htoolbox.population(n=300) parasites = ptoolbox.population(n=300) hof = tools.HallOfFame(1) hstats = tools.Statistics(lambda ind: ind.fitness.values) hstats.register("avg", numpy.mean) hstats.register("std", numpy.std) hstats.register("min", numpy.min) hstats.register("max", numpy.max) logbook = tools.Logbook() logbook.header = "gen", "evals", "std", "min", "avg", "max" MAXGEN = 50 H_CXPB, H_MUTPB = 0.5, 0.3 P_CXPB, P_MUTPB = 0.5, 0.3 fits = htoolbox.map(htoolbox.evaluate, hosts, parasites) for host, parasite, fit in zip(hosts, parasites, fits): host.fitness.values = parasite.fitness.values = fit hof.update(hosts) record = hstats.compile(hosts) logbook.record(gen=0, evals=len(hosts), **record) print(logbook.stream) for g in range(1, MAXGEN): hosts = htoolbox.select(hosts, len(hosts)) parasites = ptoolbox.select(parasites, len(parasites)) hosts = algorithms.varAnd(hosts, htoolbox, H_CXPB, H_MUTPB) parasites = algorithms.varAnd(parasites, ptoolbox, P_CXPB, P_MUTPB) fits = htoolbox.map(htoolbox.evaluate, hosts, parasites) for host, parasite, fit in zip(hosts, parasites, fits): host.fitness.values = parasite.fitness.values = fit hof.update(hosts) record = hstats.compile(hosts) logbook.record(gen=g, evals=len(hosts), **record) print(logbook.stream) best_network = sn.SortingNetwork(INPUTS, hof[0]) print(best_network) print(best_network.draw()) print("%i errors" % best_network.assess()) return hosts, logbook, hof if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/coev/symbreg.py0000644000076500000240000001067614456461441016501 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import random import sys import numpy from deap import base from deap import creator from deap import tools # GP example "symbreg.py" already defines some useful structures sys.path.append("..") import gp.symbreg as symbreg creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("IndGA", list, fitness=creator.FitnessMax) toolbox_ga = base.Toolbox() toolbox_ga.register("float", random.uniform, -1, 1) toolbox_ga.register("individual", tools.initRepeat, creator.IndGA, toolbox_ga.float, 10) toolbox_ga.register("population", tools.initRepeat, list, toolbox_ga.individual) toolbox_ga.register("select", tools.selTournament, tournsize=3) toolbox_ga.register("mate", tools.cxTwoPoint) toolbox_ga.register("mutate", tools.mutGaussian, mu=0, sigma=0.01, indpb=0.05) toolbox_gp = symbreg.toolbox def main(): pop_ga = toolbox_ga.population(n=200) pop_gp = toolbox_gp.population(n=200) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) logbook = tools.Logbook() logbook.header = "gen", "type", "evals", "std", "min", "avg", "max" best_ga = tools.selRandom(pop_ga, 1)[0] best_gp = tools.selRandom(pop_gp, 1)[0] for ind in pop_gp: ind.fitness.values = toolbox_gp.evaluate(ind, points=best_ga) for ind in pop_ga: ind.fitness.values = toolbox_gp.evaluate(best_gp, points=ind) record = stats.compile(pop_ga) logbook.record(gen=0, type='ga', evals=len(pop_ga), **record) record = stats.compile(pop_gp) logbook.record(gen=0, type='gp', evals=len(pop_gp), **record) print(logbook.stream) CXPB, MUTPB, NGEN = 0.5, 0.2, 50 # Begin the evolution for g in range(1, NGEN): # Select and clone the offspring off_ga = toolbox_ga.select(pop_ga, len(pop_ga)) off_gp = toolbox_gp.select(pop_gp, len(pop_gp)) off_ga = [toolbox_ga.clone(ind) for ind in off_ga] off_gp = [toolbox_gp.clone(ind) for ind in off_gp] # Apply crossover and mutation for ind1, ind2 in zip(off_ga[::2], off_ga[1::2]): if random.random() < CXPB: toolbox_ga.mate(ind1, ind2) del ind1.fitness.values del ind2.fitness.values for ind1, ind2 in zip(off_gp[::2], off_gp[1::2]): if random.random() < CXPB: toolbox_gp.mate(ind1, ind2) del ind1.fitness.values del ind2.fitness.values for ind in off_ga: if random.random() < MUTPB: toolbox_ga.mutate(ind) del ind.fitness.values for ind in off_gp: if random.random() < MUTPB: toolbox_gp.mutate(ind) del ind.fitness.values # Evaluate the individuals with an invalid fitness for ind in off_ga: ind.fitness.values = toolbox_gp.evaluate(best_gp, points=ind) for ind in off_gp: ind.fitness.values = toolbox_gp.evaluate(ind, points=best_ga) # Replace the old population by the offspring pop_ga = off_ga pop_gp = off_gp record = stats.compile(pop_ga) logbook.record(gen=g, type='ga', evals=len(pop_ga), **record) record = stats.compile(pop_gp) logbook.record(gen=g, type='gp', evals=len(pop_gp), **record) print(logbook.stream) best_ga = tools.selBest(pop_ga, 1)[0] best_gp = tools.selBest(pop_gp, 1)[0] print("Best individual GA is %s, %s" % (best_ga, best_ga.fitness.values)) print("Best individual GP is %s, %s" % (best_gp, best_gp.fitness.values)) return pop_ga, pop_gp, best_ga, best_gp, logbook if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.7014453 deap-1.4.1/examples/de/0000755000076500000240000000000014456461475014110 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/de/basic.py0000644000076500000240000000540014456461441015533 0ustar00runnerstaff# This file is part of EAP. # # EAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # EAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with EAP. If not, see . import random import array import numpy from deap import base from deap import benchmarks from deap import creator from deap import tools # Problem dimension NDIM = 10 creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", array.array, typecode='d', fitness=creator.FitnessMin) toolbox = base.Toolbox() toolbox.register("attr_float", random.uniform, -3, 3) toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_float, NDIM) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("select", tools.selRandom, k=3) toolbox.register("evaluate", benchmarks.sphere) def main(): # Differential evolution parameters CR = 0.25 F = 1 MU = 300 NGEN = 200 pop = toolbox.population(n=MU); hof = tools.HallOfFame(1) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) logbook = tools.Logbook() logbook.header = "gen", "evals", "std", "min", "avg", "max" # Evaluate the individuals fitnesses = toolbox.map(toolbox.evaluate, pop) for ind, fit in zip(pop, fitnesses): ind.fitness.values = fit record = stats.compile(pop) logbook.record(gen=0, evals=len(pop), **record) print(logbook.stream) for g in range(1, NGEN): for k, agent in enumerate(pop): a,b,c = toolbox.select(pop) y = toolbox.clone(agent) index = random.randrange(NDIM) for i, value in enumerate(agent): if i == index or random.random() < CR: y[i] = a[i] + F*(b[i]-c[i]) y.fitness.values = toolbox.evaluate(y) if y.fitness > agent.fitness: pop[k] = y hof.update(pop) record = stats.compile(pop) logbook.record(gen=g, evals=len(pop), **record) print(logbook.stream) print("Best individual is ", hof[0], hof[0].fitness.values[0]) if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/de/dynamic.py0000644000076500000240000001325714456461441016107 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . """Implementation of the Dynamic Differential Evolution algorithm as presented in *Mendes and Mohais, 2005, DynDE: A Differential Evolution for Dynamic Optimization Problems.* """ import array import itertools import math import operator import random import numpy from deap import base from deap.benchmarks import movingpeaks from deap import creator from deap import tools scenario = movingpeaks.SCENARIO_2 NDIM = 5 BOUNDS = [scenario["min_coord"], scenario["max_coord"]] mpb = movingpeaks.MovingPeaks(dim=NDIM, **scenario) def brown_ind(iclass, best, sigma): return iclass(random.gauss(x, sigma) for x in best) creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", array.array, typecode='d', fitness=creator.FitnessMax) toolbox = base.Toolbox() toolbox.register("attr_float", random.uniform, BOUNDS[0], BOUNDS[1]) toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_float, NDIM) toolbox.register("brownian_individual", brown_ind, creator.Individual, sigma=0.3) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("select", random.sample, k=4) toolbox.register("best", tools.selBest, k=1) toolbox.register("evaluate", mpb) def main(verbose=True): NPOP = 10 # Should be equal to the number of peaks CR = 0.6 F = 0.4 regular, brownian = 4, 2 stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) logbook = tools.Logbook() logbook.header = "gen", "evals", "error", "offline_error", "avg", "max" # Initialize populations populations = [toolbox.population(n=regular + brownian) for _ in range(NPOP)] # Evaluate the individuals for idx, subpop in enumerate(populations): fitnesses = toolbox.map(toolbox.evaluate, subpop) for ind, fit in zip(subpop, fitnesses): ind.fitness.values = fit record = stats.compile(itertools.chain(*populations)) logbook.record(gen=0, evals=mpb.nevals, error=mpb.currentError(), offline_error=mpb.offlineError(), **record) if verbose: print(logbook.stream) g = 1 while mpb.nevals < 5e5: # Detect a change and invalidate fitnesses if necessary bests = [toolbox.best(subpop)[0] for subpop in populations] if any(b.fitness.values != toolbox.evaluate(b) for b in bests): for individual in itertools.chain(*populations): del individual.fitness.values # Apply exclusion rexcl = (BOUNDS[1] - BOUNDS[0]) / (2 * NPOP**(1.0/NDIM)) for i, j in itertools.combinations(range(NPOP), 2): if bests[i].fitness.valid and bests[j].fitness.valid: d = sum((bests[i][k] - bests[j][k])**2 for k in range(NDIM)) d = math.sqrt(d) if d < rexcl: if bests[i].fitness < bests[j].fitness: k = i else: k = j populations[k] = toolbox.population(n=regular + brownian) # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in itertools.chain(*populations) if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit record = stats.compile(itertools.chain(*populations)) logbook.record(gen=g, evals=mpb.nevals, error=mpb.currentError(), offline_error=mpb.offlineError(), **record) if verbose: print(logbook.stream) # Evolve the sub-populations for idx, subpop in enumerate(populations): newpop = [] xbest, = toolbox.best(subpop) # Apply regular DE to the first part of the population for individual in subpop[:regular]: x1, x2, x3, x4 = toolbox.select(subpop) offspring = toolbox.clone(individual) index = random.randrange(NDIM) for i, value in enumerate(individual): if i == index or random.random() < CR: offspring[i] = xbest[i] + F * (x1[i] + x2[i] - x3[i] - x4[i]) offspring.fitness.values = toolbox.evaluate(offspring) if offspring.fitness >= individual.fitness: newpop.append(offspring) else: newpop.append(individual) # Apply Brownian to the last part of the population newpop.extend(toolbox.brownian_individual(xbest) for _ in range(brownian)) # Evaluate the brownian individuals for individual in newpop[-brownian:]: individual.fitness.value = toolbox.evaluate(individual) # Replace the population populations[idx] = newpop g += 1 return logbook if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/de/sphere.py0000644000076500000240000000726714456461441015755 0ustar00runnerstaff# This file is part of EAP. # # EAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # EAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with EAP. If not, see . import random import array import numpy from itertools import chain from deap import base from deap import benchmarks from deap import creator from deap import tools # Problem dimension NDIM = 10 creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", array.array, typecode='d', fitness=creator.FitnessMin) def mutDE(y, a, b, c, f): size = len(y) for i in range(len(y)): y[i] = a[i] + f*(b[i]-c[i]) return y def cxBinomial(x, y, cr): size = len(x) index = random.randrange(size) for i in range(size): if i == index or random.random() < cr: x[i] = y[i] return x def cxExponential(x, y, cr): size = len(x) index = random.randrange(size) # Loop on the indices index -> end, then on 0 -> index for i in chain(range(index, size), range(0, index)): x[i] = y[i] if random.random() < cr: break return x toolbox = base.Toolbox() toolbox.register("attr_float", random.uniform, -3, 3) toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_float, NDIM) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("mutate", mutDE, f=0.8) toolbox.register("mate", cxExponential, cr=0.8) toolbox.register("select", tools.selRandom, k=3) toolbox.register("evaluate", benchmarks.griewank) def main(): # Differential evolution parameters MU = NDIM * 10 NGEN = 200 pop = toolbox.population(n=MU); hof = tools.HallOfFame(1) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) logbook = tools.Logbook() logbook.header = "gen", "evals", "std", "min", "avg", "max" # Evaluate the individuals fitnesses = toolbox.map(toolbox.evaluate, pop) for ind, fit in zip(pop, fitnesses): ind.fitness.values = fit record = stats.compile(pop) logbook.record(gen=0, evals=len(pop), **record) print(logbook.stream) for g in range(1, NGEN): children = [] for agent in pop: # We must clone everything to ensure independence a, b, c = [toolbox.clone(ind) for ind in toolbox.select(pop)] x = toolbox.clone(agent) y = toolbox.clone(agent) y = toolbox.mutate(y, a, b, c) z = toolbox.mate(x, y) del z.fitness.values children.append(z) fitnesses = toolbox.map(toolbox.evaluate, children) for (i, ind), fit in zip(enumerate(children), fitnesses): ind.fitness.values = fit if ind.fitness > pop[i].fitness: pop[i] = ind hof.update(pop) record = stats.compile(pop) logbook.record(gen=g, evals=len(pop), **record) print(logbook.stream) print("Best individual is ", hof[0]) print("with fitness", hof[0].fitness.values[0]) return logbook if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.7025845 deap-1.4.1/examples/eda/0000755000076500000240000000000014456461475014251 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/eda/emna.py0000644000076500000240000000652314456461441015542 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import random import numpy from operator import attrgetter from deap import algorithms from deap import base from deap import benchmarks from deap import creator from deap import tools creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", numpy.ndarray, fitness=creator.FitnessMin) class EMNA(object): """Estimation of Multivariate Normal Algorithm (EMNA) as described by Algorithm 1 in: Fabien Teytaud and Olivier Teytaud. 2009. Why one must use reweighting in estimation of distribution algorithms. In Proceedings of the 11th Annual conference on Genetic and evolutionary computation (GECCO '09). ACM, New York, NY, USA, 453-460. """ def __init__(self, centroid, sigma, mu, lambda_): self.dim = len(centroid) self.centroid = numpy.array(centroid) self.sigma = numpy.array(sigma) self.lambda_ = lambda_ self.mu = mu def generate(self, ind_init): # Generate lambda_ individuals and put them into the provided class arz = self.centroid + self.sigma * numpy.random.randn(self.lambda_, self.dim) return list(map(ind_init, arz)) def update(self, population): # Sort individuals so the best is first sorted_pop = sorted(population, key=attrgetter("fitness"), reverse=True) # Compute the average of the mu best individuals z = sorted_pop[:self.mu] - self.centroid avg = numpy.mean(z, axis=0) # Adjust variance of the distribution self.sigma = numpy.sqrt(numpy.sum(numpy.sum((z - avg)**2, axis=1)) / (self.mu*self.dim)) self.centroid = self.centroid + avg def main(): N, LAMBDA = 30, 1000 MU = int(LAMBDA/4) strategy = EMNA(centroid=[5.0]*N, sigma=5.0, mu=MU, lambda_=LAMBDA) toolbox = base.Toolbox() toolbox.register("evaluate", benchmarks.sphere) toolbox.register("generate", strategy.generate, creator.Individual) toolbox.register("update", strategy.update) # Numpy equality function (operators.eq) between two arrays returns the # equality element wise, which raises an exception in the if similar() # check of the hall of fame. Using a different equality function like # numpy.array_equal or numpy.allclose solve this issue. hof = tools.HallOfFame(1, similar=numpy.array_equal) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) algorithms.eaGenerateUpdate(toolbox, ngen=150, stats=stats, halloffame=hof) return hof[0].fitness.values[0] if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/eda/pbil.py0000644000076500000240000000534514456461441015551 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import array import random import numpy from deap import algorithms from deap import base from deap import creator from deap import tools class PBIL(object): def __init__(self, ndim, learning_rate, mut_prob, mut_shift, lambda_): self.prob_vector = [0.5] * ndim self.learning_rate = learning_rate self.mut_prob = mut_prob self.mut_shift = mut_shift self.lambda_ = lambda_ def sample(self): return (random.random() < prob for prob in self.prob_vector) def generate(self, ind_init): return [ind_init(self.sample()) for _ in range(self.lambda_)] def update(self, population): best = max(population, key=lambda ind: ind.fitness) for i, value in enumerate(best): # Update the probability vector self.prob_vector[i] *= 1.0 - self.learning_rate self.prob_vector[i] += value * self.learning_rate # Mutate the probability vector if random.random() < self.mut_prob: self.prob_vector[i] *= 1.0 - self.mut_shift self.prob_vector[i] += random.randint(0, 1) * self.mut_shift def evalOneMax(individual): return sum(individual), creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", array.array, typecode='b', fitness=creator.FitnessMax) toolbox = base.Toolbox() toolbox.register("evaluate", evalOneMax) def main(seed): random.seed(seed) NGEN = 50 #Initialize the PBIL EDA pbil = PBIL(ndim=50, learning_rate=0.3, mut_prob=0.1, mut_shift=0.05, lambda_=20) toolbox.register("generate", pbil.generate, creator.Individual) toolbox.register("update", pbil.update) # Statistics computation stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) pop, logbook = algorithms.eaGenerateUpdate(toolbox, NGEN, stats=stats, verbose=True) if __name__ == "__main__": main(seed=None) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.7058892 deap-1.4.1/examples/es/0000755000076500000240000000000014456461475014127 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/es/cma_1+l_minfct.py0000644000076500000240000000371414456461441017246 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import numpy from deap import algorithms from deap import base from deap import benchmarks from deap import cma from deap import creator from deap import tools N=5 creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", list, fitness=creator.FitnessMin) # See http://www.lri.fr/~hansen/cmaes_inmatlab.html for more details about # the rastrigin and other tests for CMA-ES toolbox = base.Toolbox() toolbox.register("evaluate", benchmarks.sphere) def main(): numpy.random.seed() # The CMA-ES One Plus Lambda algorithm takes a initialized parent as argument parent = creator.Individual((numpy.random.rand() * 5) - 1 for _ in range(N)) parent.fitness.values = toolbox.evaluate(parent) strategy = cma.StrategyOnePlusLambda(parent, sigma=5.0, lambda_=10) toolbox.register("generate", strategy.generate, ind_init=creator.Individual) toolbox.register("update", strategy.update) hof = tools.HallOfFame(1) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) algorithms.eaGenerateUpdate(toolbox, ngen=200, halloffame=hof, stats=stats) if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/es/cma_bipop.py0000644000076500000240000002012614456461441016424 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . """Implementation of the BI-Population CMA-ES algorithm. As presented in *Hansen, 2009, Benchmarking a BI-Population CMA-ES on the BBOB-2009 Function Testbed* with the exception of the modifications to the original CMA-ES parameters mentioned at the end of section 2's first paragraph. """ from collections import deque import numpy from deap import algorithms from deap import base from deap import benchmarks from deap import cma from deap import creator from deap import tools # Problem size N = 30 creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", list, fitness=creator.FitnessMin) def main(verbose=True): NRESTARTS = 10 # Initialization + 9 I-POP restarts SIGMA0 = 2.0 # 1/5th of the domain [-5 5] toolbox = base.Toolbox() toolbox.register("evaluate", benchmarks.rastrigin) halloffame = tools.HallOfFame(1) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) logbooks = list() nsmallpopruns = 0 smallbudget = list() largebudget = list() lambda0 = 4 + int(3 * numpy.log(N)) regime = 1 i = 0 while i < (NRESTARTS + nsmallpopruns): # The first regime is enforced on the first and last restart # The second regime is run if its allocated budget is smaller than the allocated # large population regime budget if i > 0 and i < (NRESTARTS + nsmallpopruns) - 1 and sum(smallbudget) < sum(largebudget): lambda_ = int(lambda0 * (0.5 * (2**(i - nsmallpopruns) * lambda0) / lambda0)**(numpy.random.rand()**2)) sigma = 2 * 10**(-2 * numpy.random.rand()) nsmallpopruns += 1 regime = 2 smallbudget += [0] else: lambda_ = 2**(i - nsmallpopruns) * lambda0 sigma = SIGMA0 regime = 1 largebudget += [0] t = 0 # Set the termination criterion constants if regime == 1: MAXITER = 100 + 50 * (N + 3)**2 / numpy.sqrt(lambda_) elif regime == 2: MAXITER = 0.5 * largebudget[-1] / lambda_ TOLHISTFUN = 10**-12 TOLHISTFUN_ITER = 10 + int(numpy.ceil(30. * N / lambda_)) EQUALFUNVALS = 1. / 3. EQUALFUNVALS_K = int(numpy.ceil(0.1 + lambda_ / 4.)) TOLX = 10**-12 TOLUPSIGMA = 10**20 CONDITIONCOV = 10**14 STAGNATION_ITER = int(numpy.ceil(0.2 * t + 120 + 30. * N / lambda_)) NOEFFECTAXIS_INDEX = t % N equalfunvalues = list() bestvalues = list() medianvalues = list() mins = deque(maxlen=TOLHISTFUN_ITER) # We start with a centroid in [-4, 4]**D strategy = cma.Strategy(centroid=numpy.random.uniform(-4, 4, N), sigma=sigma, lambda_=lambda_) toolbox.register("generate", strategy.generate, creator.Individual) toolbox.register("update", strategy.update) logbooks.append(tools.Logbook()) logbooks[-1].header = "gen", "evals", "restart", "regime", "std", "min", "avg", "max" conditions = {"MaxIter" : False, "TolHistFun" : False, "EqualFunVals" : False, "TolX" : False, "TolUpSigma" : False, "Stagnation" : False, "ConditionCov" : False, "NoEffectAxis" : False, "NoEffectCoor" : False} # Run the current regime until one of the following is true: ## Note that the algorithm won't stop by itself on the optimum (0.0 on rastrigin). while not any(conditions.values()): # Generate a new population population = toolbox.generate() # Evaluate the individuals fitnesses = toolbox.map(toolbox.evaluate, population) for ind, fit in zip(population, fitnesses): ind.fitness.values = fit halloffame.update(population) record = stats.compile(population) logbooks[-1].record(gen=t, evals=lambda_, restart=i, regime=regime, **record) if verbose: print(logbooks[-1].stream) # Update the strategy with the evaluated individuals toolbox.update(population) # Count the number of times the k'th best solution is equal to the best solution # At this point the population is sorted (method update) if population[-1].fitness == population[-EQUALFUNVALS_K].fitness: equalfunvalues.append(1) # Log the best and median value of this population bestvalues.append(population[-1].fitness.values) medianvalues.append(population[int(round(len(population)/2.))].fitness.values) # First run does not count into the budget if regime == 1 and i > 0: largebudget[-1] += lambda_ elif regime == 2: smallbudget[-1] += lambda_ t += 1 STAGNATION_ITER = int(numpy.ceil(0.2 * t + 120 + 30. * N / lambda_)) NOEFFECTAXIS_INDEX = t % N if t >= MAXITER: # The maximum number of iteration per CMA-ES ran conditions["MaxIter"] = True mins.append(record["min"]) if (len(mins) == mins.maxlen) and max(mins) - min(mins) < TOLHISTFUN: # The range of the best values is smaller than the threshold conditions["TolHistFun"] = True if t > N and sum(equalfunvalues[-N:]) / float(N) > EQUALFUNVALS: # In 1/3rd of the last N iterations the best and k'th best solutions are equal conditions["EqualFunVals"] = True if all(strategy.pc < TOLX) and all(numpy.sqrt(numpy.diag(strategy.C)) < TOLX): # All components of pc and sqrt(diag(C)) are smaller than the threshold conditions["TolX"] = True # Need to transfor strategy.diagD[-1]**2 from pyp/numpy.float64 to python # float to avoid OverflowError if strategy.sigma / sigma > float(strategy.diagD[-1]**2) * TOLUPSIGMA: # The sigma ratio is bigger than a threshold conditions["TolUpSigma"] = True if len(bestvalues) > STAGNATION_ITER and len(medianvalues) > STAGNATION_ITER and \ numpy.median(bestvalues[-20:]) >= numpy.median(bestvalues[-STAGNATION_ITER:-STAGNATION_ITER + 20]) and \ numpy.median(medianvalues[-20:]) >= numpy.median(medianvalues[-STAGNATION_ITER:-STAGNATION_ITER + 20]): # Stagnation occurred conditions["Stagnation"] = True if strategy.cond > 10**14: # The condition number is bigger than a threshold conditions["ConditionCov"] = True if all(strategy.centroid == strategy.centroid + 0.1 * strategy.sigma * strategy.diagD[-NOEFFECTAXIS_INDEX] * strategy.B[-NOEFFECTAXIS_INDEX]): # The coordinate axis std is too low conditions["NoEffectAxis"] = True if any(strategy.centroid == strategy.centroid + 0.2 * strategy.sigma * numpy.diag(strategy.C)): # The main axis std has no effect conditions["NoEffectCoor"] = True stop_causes = [k for k, v in conditions.items() if v] print("Stopped because of condition%s %s" % ((":" if len(stop_causes) == 1 else "s:"), ",".join(stop_causes))) i += 1 return halloffame if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/es/cma_minfct.py0000644000076500000240000000416414456461441016577 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import numpy from deap import algorithms from deap import base from deap import benchmarks from deap import cma from deap import creator from deap import tools # Problem size N=30 creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", list, fitness=creator.FitnessMin) toolbox = base.Toolbox() toolbox.register("evaluate", benchmarks.rastrigin) def main(): # The cma module uses the numpy random number generator numpy.random.seed(128) # The CMA-ES algorithm takes a population of one individual as argument # The centroid is set to a vector of 5.0 see http://www.lri.fr/~hansen/cmaes_inmatlab.html # for more details about the rastrigin and other tests for CMA-ES strategy = cma.Strategy(centroid=[5.0]*N, sigma=5.0, lambda_=20*N) toolbox.register("generate", strategy.generate, creator.Individual) toolbox.register("update", strategy.update) hof = tools.HallOfFame(1) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) # The CMA-ES algorithm converge with good probability with those settings algorithms.eaGenerateUpdate(toolbox, ngen=250, stats=stats, halloffame=hof) # print "Best individual is %s, %s" % (hof[0], hof[0].fitness.values) return hof[0].fitness.values[0] if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/es/cma_mo.py0000644000076500000240000001353214456461441015731 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import numpy from deap import algorithms from deap import base from deap import benchmarks from deap.benchmarks.tools import hypervolume from deap import cma from deap import creator from deap import tools # Problem size N = 5 # ZDT1, ZDT2, DTLZ2 MIN_BOUND = numpy.zeros(N) MAX_BOUND = numpy.ones(N) EPS_BOUND = 2.e-5 # Kursawe # MIN_BOUND = numpy.zeros(N) - 5 # MAX_BOUND = numpy.zeros(N) + 5 creator.create("FitnessMin", base.Fitness, weights=(-1.0, -1.0)) creator.create("Individual", list, fitness=creator.FitnessMin) def distance(feasible_ind, original_ind): """A distance function to the feasibility region.""" return sum((f - o)**2 for f, o in zip(feasible_ind, original_ind)) def closest_feasible(individual): """A function returning a valid individual from an invalid one.""" feasible_ind = numpy.array(individual) feasible_ind = numpy.maximum(MIN_BOUND, feasible_ind) feasible_ind = numpy.minimum(MAX_BOUND, feasible_ind) return feasible_ind def valid(individual): """Determines if the individual is valid or not.""" if any(individual < MIN_BOUND) or any(individual > MAX_BOUND): return False return True def close_valid(individual): """Determines if the individual is close to valid.""" if any(individual < MIN_BOUND-EPS_BOUND) or any(individual > MAX_BOUND+EPS_BOUND): return False return True toolbox = base.Toolbox() toolbox.register("evaluate", benchmarks.zdt1) toolbox.decorate("evaluate", tools.ClosestValidPenalty(valid, closest_feasible, 1.0e+6, distance)) def main(): # The cma module uses the numpy random number generator # numpy.random.seed(128) MU, LAMBDA = 10, 10 NGEN = 500 verbose = True create_plot = False # The MO-CMA-ES algorithm takes a full population as argument population = [creator.Individual(x) for x in (numpy.random.uniform(0, 1, (MU, N)))] for ind in population: ind.fitness.values = toolbox.evaluate(ind) strategy = cma.StrategyMultiObjective(population, sigma=1.0, mu=MU, lambda_=LAMBDA) toolbox.register("generate", strategy.generate, creator.Individual) toolbox.register("update", strategy.update) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("min", numpy.min, axis=0) stats.register("max", numpy.max, axis=0) logbook = tools.Logbook() logbook.header = ["gen", "nevals"] + (stats.fields if stats else []) fitness_history = [] for gen in range(NGEN): # Generate a new population population = toolbox.generate() # Evaluate the individuals fitnesses = toolbox.map(toolbox.evaluate, population) for ind, fit in zip(population, fitnesses): ind.fitness.values = fit fitness_history.append(fit) # Update the strategy with the evaluated individuals toolbox.update(population) record = stats.compile(population) if stats is not None else {} logbook.record(gen=gen, nevals=len(population), **record) if verbose: print(logbook.stream) if verbose: print("Final population hypervolume is %f" % hypervolume(strategy.parents, [11.0, 11.0])) # Note that we use a penalty to guide the search to feasible solutions, # but there is no guarantee that individuals are valid. # We expect the best individuals will be within bounds or very close. num_valid = 0 for ind in strategy.parents: dist = distance(closest_feasible(ind), ind) if numpy.isclose(dist, 0.0, rtol=1.e-5, atol=1.e-5): num_valid += 1 print("Number of valid individuals is %d/%d" % (num_valid, len(strategy.parents))) print("Final population:") print(numpy.asarray(strategy.parents)) if create_plot: interactive = 0 if not interactive: import matplotlib as mpl_tmp mpl_tmp.use('Agg') # Force matplotlib to not use any Xwindows backend. import matplotlib.pyplot as plt fig = plt.figure() plt.title("Multi-objective minimization via MO-CMA-ES") plt.xlabel("First objective (function) to minimize") plt.ylabel("Second objective (function) to minimize") # Limit the scale because our history values include the penalty. plt.xlim((-0.1, 1.20)) plt.ylim((-0.1, 1.20)) # Plot all history. Note the values include the penalty. fitness_history = numpy.asarray(fitness_history) plt.scatter(fitness_history[:,0], fitness_history[:,1], facecolors='none', edgecolors="lightblue") valid_front = numpy.array([ind.fitness.values for ind in strategy.parents if close_valid(ind)]) invalid_front = numpy.array([ind.fitness.values for ind in strategy.parents if not close_valid(ind)]) if len(valid_front) > 0: plt.scatter(valid_front[:,0], valid_front[:,1], c="g") if len(invalid_front) > 0: plt.scatter(invalid_front[:,0], invalid_front[:,1], c="r") if interactive: plt.show() else: print("Writing cma_mo.png") plt.savefig("cma_mo.png") return strategy.parents if __name__ == "__main__": solutions = main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/es/cma_plotting.py0000644000076500000240000001027614456461441017160 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import numpy from deap import algorithms from deap import base from deap import benchmarks from deap import cma from deap import creator from deap import tools import matplotlib.pyplot as plt # Problem size N = 10 NGEN = 125 creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", list, fitness=creator.FitnessMin) toolbox = base.Toolbox() toolbox.register("evaluate", benchmarks.rastrigin) def main(verbose=True): # The cma module uses the numpy random number generator numpy.random.seed(64) # The CMA-ES algorithm takes a population of one individual as argument # The centroid is set to a vector of 5.0 see http://www.lri.fr/~hansen/cmaes_inmatlab.html # for more details about the rastrigin and other tests for CMA-ES strategy = cma.Strategy(centroid=[5.0]*N, sigma=5.0, lambda_=20*N) toolbox.register("generate", strategy.generate, creator.Individual) toolbox.register("update", strategy.update) halloffame = tools.HallOfFame(1) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) logbook = tools.Logbook() logbook.header = "gen", "evals", "std", "min", "avg", "max" # Objects that will compile the data sigma = numpy.ndarray((NGEN,1)) axis_ratio = numpy.ndarray((NGEN,1)) diagD = numpy.ndarray((NGEN,N)) fbest = numpy.ndarray((NGEN,1)) best = numpy.ndarray((NGEN,N)) std = numpy.ndarray((NGEN,N)) for gen in range(NGEN): # Generate a new population population = toolbox.generate() # Evaluate the individuals fitnesses = toolbox.map(toolbox.evaluate, population) for ind, fit in zip(population, fitnesses): ind.fitness.values = fit # Update the strategy with the evaluated individuals toolbox.update(population) # Update the hall of fame and the statistics with the # currently evaluated population halloffame.update(population) record = stats.compile(population) logbook.record(evals=len(population), gen=gen, **record) if verbose: print(logbook.stream) # Save more data along the evolution for latter plotting # diagD is sorted and sqrooted in the update method sigma[gen] = strategy.sigma axis_ratio[gen] = max(strategy.diagD)**2/min(strategy.diagD)**2 diagD[gen, :N] = strategy.diagD**2 fbest[gen] = halloffame[0].fitness.values best[gen, :N] = halloffame[0] std[gen, :N] = numpy.std(population, axis=0) # The x-axis will be the number of evaluations x = list(range(0, strategy.lambda_ * NGEN, strategy.lambda_)) avg, max_, min_ = logbook.select("avg", "max", "min") plt.figure() plt.subplot(2, 2, 1) plt.semilogy(x, avg, "--b") plt.semilogy(x, max_, "--b") plt.semilogy(x, min_, "-b") plt.semilogy(x, fbest, "-c") plt.semilogy(x, sigma, "-g") plt.semilogy(x, axis_ratio, "-r") plt.grid(True) plt.title("blue: f-values, green: sigma, red: axis ratio") plt.subplot(2, 2, 2) plt.plot(x, best) plt.grid(True) plt.title("Object Variables") plt.subplot(2, 2, 3) plt.semilogy(x, diagD) plt.grid(True) plt.title("Scaling (All Main Axes)") plt.subplot(2, 2, 4) plt.semilogy(x, std) plt.grid(True) plt.title("Standard Deviations in All Coordinates") plt.show() if __name__ == "__main__": main(False) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/es/fctmin.py0000644000076500000240000000556014456461441015760 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import array import random import numpy from deap import algorithms from deap import base from deap import benchmarks from deap import creator from deap import tools IND_SIZE = 30 MIN_VALUE = 4 MAX_VALUE = 5 MIN_STRATEGY = 0.5 MAX_STRATEGY = 3 creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", array.array, typecode="d", fitness=creator.FitnessMin, strategy=None) creator.create("Strategy", array.array, typecode="d") # Individual generator def generateES(icls, scls, size, imin, imax, smin, smax): ind = icls(random.uniform(imin, imax) for _ in range(size)) ind.strategy = scls(random.uniform(smin, smax) for _ in range(size)) return ind def checkStrategy(minstrategy): def decorator(func): def wrappper(*args, **kargs): children = func(*args, **kargs) for child in children: for i, s in enumerate(child.strategy): if s < minstrategy: child.strategy[i] = minstrategy return children return wrappper return decorator toolbox = base.Toolbox() toolbox.register("individual", generateES, creator.Individual, creator.Strategy, IND_SIZE, MIN_VALUE, MAX_VALUE, MIN_STRATEGY, MAX_STRATEGY) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("mate", tools.cxESBlend, alpha=0.1) toolbox.register("mutate", tools.mutESLogNormal, c=1.0, indpb=0.03) toolbox.register("select", tools.selTournament, tournsize=3) toolbox.register("evaluate", benchmarks.sphere) toolbox.decorate("mate", checkStrategy(MIN_STRATEGY)) toolbox.decorate("mutate", checkStrategy(MIN_STRATEGY)) def main(): random.seed() MU, LAMBDA = 10, 100 pop = toolbox.population(n=MU) hof = tools.HallOfFame(1) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) pop, logbook = algorithms.eaMuCommaLambda(pop, toolbox, mu=MU, lambda_=LAMBDA, cxpb=0.6, mutpb=0.3, ngen=500, stats=stats, halloffame=hof) return pop, logbook, hof if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/es/onefifth.py0000644000076500000240000000532214456461441016276 0ustar00runnerstaff # This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import array import random from deap import base from deap import creator from deap import benchmarks from deap import tools IND_SIZE = 10 creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", array.array, typecode='d', fitness=creator.FitnessMin) def update(ind, mu, std): for i, mu_i in enumerate(mu): ind[i] = random.gauss(mu_i,std) toolbox = base.Toolbox() toolbox.register("update", update) toolbox.register("evaluate", benchmarks.sphere) def main(): """Implements the One-Fifth rule algorithm as expressed in : Kern, S., S.D. Muller, N. Hansen, D. Buche, J. Ocenasek and P. Koumoutsakos (2004). Learning Probability Distributions in Continuous Evolutionary Algorithms - A Comparative Review. Natural Computing, 3(1), pp. 77-112. However instead of parent and offspring the algorithm is expressed in terms of best and worst. Best is equivalent to the parent, and worst to the offspring. Instead of producing a new individual each time, we have defined a function which updates the worst individual using the best one as the mean of the gaussian and the sigma computed as the standard deviation. """ random.seed(64) logbook = tools.Logbook() logbook.header = "gen", "fitness" interval = (-3,7) mu = (random.uniform(interval[0], interval[1]) for _ in range(IND_SIZE)) sigma = (interval[1] - interval[0])/2.0 alpha = 2.0**(1.0/IND_SIZE) best = creator.Individual(mu) best.fitness.values = toolbox.evaluate(best) worst = creator.Individual((0.0,)*IND_SIZE) NGEN = 1500 for g in range(NGEN): toolbox.update(worst, best, sigma) worst.fitness.values = toolbox.evaluate(worst) if best.fitness <= worst.fitness: sigma = sigma * alpha best, worst = worst, best else: sigma = sigma * alpha**(-0.25) logbook.record(gen=g, fitness=best.fitness.values) print(logbook.stream) return best if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.7166135 deap-1.4.1/examples/ga/0000755000076500000240000000000014456461475014107 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/evoknn.py0000755000076500000240000000553114456461441015761 0ustar00runnerstaff#!/usr/bin/env python2.7 # This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import csv import random import numpy import knn from deap import algorithms from deap import base from deap import creator from deap import tools # kNN parameters import knn FILE="heart_scale.csv" N_TRAIN=175 K=1 # Read data from file with open(FILE, "r") as data_csv: data = csv.reader(data_csv) trainset = list() trainlabels = list() rows = [row for row in data] random.shuffle(rows) for row in rows: trainlabels.append(float(row[0])) trainset.append([float(e) for e in row[1:]]) classifier = knn.KNN(K) classifier.train(trainset[:N_TRAIN], trainlabels[:N_TRAIN]) def evalClassifier(individual): labels = classifier.predict(trainset[N_TRAIN:], individual) return sum(x == y for x, y in zip(labels, trainlabels[N_TRAIN:])) / float(len(trainlabels[N_TRAIN:])), \ sum(individual) / float(classifier.ndim) creator.create("FitnessMulti", base.Fitness, weights=(1.0, -1.0)) creator.create("Individual", list, fitness=creator.FitnessMulti) toolbox = base.Toolbox() # Attribute generator toolbox.register("attr_bool", random.randint, 0, 1) # Structure initializers toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_bool, classifier.ndim) toolbox.register("population", tools.initRepeat, list, toolbox.individual) # Operator registering toolbox.register("evaluate", evalClassifier) toolbox.register("mate", tools.cxUniform, indpb=0.1) toolbox.register("mutate", tools.mutFlipBit, indpb=0.05) toolbox.register("select", tools.selNSGA2) def main(): # random.seed(64) MU, LAMBDA = 100, 200 pop = toolbox.population(n=MU) hof = tools.ParetoFront() stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean, axis=0) stats.register("std", numpy.std, axis=0) stats.register("min", numpy.min, axis=0) stats.register("max", numpy.max, axis=0) pop, logbook = algorithms.eaMuPlusLambda(pop, toolbox, mu=MU, lambda_=LAMBDA, cxpb=0.7, mutpb=0.3, ngen=40, stats=stats, halloffame=hof) return pop, logbook, hof if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/evoknn_jmlr.py0000644000076500000240000000361114456461441016777 0ustar00runnerstaff#!/usr/bin/env python2.7 # This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import knn, random from deap import algorithms, base, creator, tools def evalFitness(individual): return knn.classification_rate(features=individual), sum(individual) creator.create("FitnessMulti", base.Fitness, weights=(1.0, -1.0)) creator.create("Individual", list, fitness=creator.FitnessMulti) toolbox = base.Toolbox() toolbox.register("bit", random.randint, 0, 1) toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.bit, n=13) toolbox.register("population", tools.initRepeat, list, toolbox.individual, n=100) toolbox.register("evaluate", evalFitness) toolbox.register("mate", tools.cxUniform, indpb=0.1) toolbox.register("mutate", tools.mutFlipBit, indpb=0.05) toolbox.register("select", tools.selNSGA2) population = toolbox.population() fits = toolbox.map(toolbox.evaluate, population) for fit, ind in zip(fits, population): ind.fitness.values = fit for gen in range(50): offspring = algorithms.varOr(population, toolbox, lambda_=100, cxpb=0.5,mutpb=0.1) fits = toolbox.map(toolbox.evaluate, offspring) for fit, ind in zip(fits, offspring): ind.fitness.values = fit population = toolbox.select(offspring + population, k=100) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/evosn.py0000644000076500000240000001156614456461441015615 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import random import numpy from deap import algorithms from deap import base from deap import creator from deap import tools import sortingnetwork as sn INPUTS = 6 def evalEvoSN(individual, dimension): network = sn.SortingNetwork(dimension, individual) return network.assess(), network.length, network.depth def genWire(dimension): return (random.randrange(dimension), random.randrange(dimension)) def genNetwork(dimension, min_size, max_size): size = random.randint(min_size, max_size) return [genWire(dimension) for i in range(size)] def mutWire(individual, dimension, indpb): for index, elem in enumerate(individual): if random.random() < indpb: individual[index] = genWire(dimension) def mutAddWire(individual, dimension): index = random.randint(0, len(individual)) individual.insert(index, genWire(dimension)) def mutDelWire(individual): index = random.randrange(len(individual)) del individual[index] creator.create("FitnessMin", base.Fitness, weights=(-1.0, -1.0, -1.0)) creator.create("Individual", list, fitness=creator.FitnessMin) toolbox = base.Toolbox() # Gene initializer toolbox.register("network", genNetwork, dimension=INPUTS, min_size=9, max_size=12) # Structure initializers toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.network) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("evaluate", evalEvoSN, dimension=INPUTS) toolbox.register("mate", tools.cxTwoPoint) toolbox.register("mutate", mutWire, dimension=INPUTS, indpb=0.05) toolbox.register("addwire", mutAddWire, dimension=INPUTS) toolbox.register("delwire", mutDelWire) toolbox.register("select", tools.selNSGA2) def main(): random.seed(64) population = toolbox.population(n=300) hof = tools.ParetoFront() stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean, axis=0) stats.register("std", numpy.std, axis=0) stats.register("min", numpy.min, axis=0) stats.register("max", numpy.max, axis=0) logbook = tools.Logbook() logbook.header = "gen", "evals", "std", "min", "avg", "max" CXPB, MUTPB, ADDPB, DELPB, NGEN = 0.5, 0.2, 0.01, 0.01, 40 # Evaluate every individuals fitnesses = toolbox.map(toolbox.evaluate, population) for ind, fit in zip(population, fitnesses): ind.fitness.values = fit hof.update(population) record = stats.compile(population) logbook.record(gen=0, evals=len(population), **record) print(logbook.stream) # Begin the evolution for g in range(1, NGEN): offspring = [toolbox.clone(ind) for ind in population] # Apply crossover and mutation for ind1, ind2 in zip(offspring[::2], offspring[1::2]): if random.random() < CXPB: toolbox.mate(ind1, ind2) del ind1.fitness.values del ind2.fitness.values # Note here that we have a different scheme of mutation than in the # original algorithm, we use 3 different mutations subsequently. for ind in offspring: if random.random() < MUTPB: toolbox.mutate(ind) del ind.fitness.values if random.random() < ADDPB: toolbox.addwire(ind) del ind.fitness.values if random.random() < DELPB: toolbox.delwire(ind) del ind.fitness.values # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in offspring if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit population = toolbox.select(population+offspring, len(offspring)) hof.update(population) record = stats.compile(population) logbook.record(gen=g, evals=len(invalid_ind), **record) print(logbook.stream) best_network = sn.SortingNetwork(INPUTS, hof[0]) print(stats) print(best_network) print(best_network.draw()) print("%i errors, length %i, depth %i" % hof[0].fitness.values) return population, logbook, hof if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/heart_scale.csv0000644000076500000240000005716414456461441017104 0ustar00runnerstaff1.0,0.708333,1.0,1.0,-0.320755,-0.105023,-1.0,1.0,-0.419847,-1.0,-0.225806,0,1.0,-1.0 -1.0,0.583333,-1.0,0.333333,-0.603774,1.0,-1.0,1.0,0.358779,-1.0,-0.483871,0,-1.0,1.0 1.0,0.166667,1.0,-0.333333,-0.433962,-0.383562,-1.0,-1.0,0.0687023,-1.0,-0.903226,-1.0,-1.0,1.0 -1.0,0.458333,1.0,1.0,-0.358491,-0.374429,-1.0,-1.0,-0.480916,1.0,-0.935484,0,-0.333333,1.0 -1.0,0.875,-1.0,-0.333333,-0.509434,-0.347032,-1.0,1.0,-0.236641,1.0,-0.935484,-1.0,-0.333333,-1.0 -1.0,0.5,1.0,1.0,-0.509434,-0.767123,-1.0,-1.0,0.0534351,-1.0,-0.870968,-1.0,-1.0,1.0 1.0,0.125,1.0,0.333333,-0.320755,-0.406393,1.0,1.0,0.0839695,1.0,-0.806452,0,-0.333333,0.5 1.0,0.25,1.0,1.0,-0.698113,-0.484018,-1.0,1.0,0.0839695,1.0,-0.612903,0,-0.333333,1.0 1.0,0.291667,1.0,1.0,-0.132075,-0.237443,-1.0,1.0,0.51145,-1.0,-0.612903,0,0.333333,1.0 1.0,0.416667,-1.0,1.0,0.0566038,0.283105,-1.0,1.0,0.267176,-1.0,0.290323,0,1.0,1.0 -1.0,0.25,1.0,1.0,-0.226415,-0.506849,-1.0,-1.0,0.374046,-1.0,-0.83871,0,-1.0,1.0 -1.0,0,1.0,1.0,-0.0943396,-0.543379,-1.0,1.0,-0.389313,1.0,-1.0,-1.0,-1.0,1.0 -1.0,-0.375,1.0,0.333333,-0.132075,-0.502283,-1.0,1.0,0.664122,-1.0,-1.0,-1.0,-1.0,-1.0 1.0,0.333333,1.0,-1.0,-0.245283,-0.506849,-1.0,-1.0,0.129771,-1.0,-0.16129,0,0.333333,-1.0 -1.0,0.166667,-1.0,1.0,-0.358491,-0.191781,-1.0,1.0,0.343511,-1.0,-1.0,-1.0,-0.333333,-1.0 -1.0,0.75,-1.0,1.0,-0.660377,-0.894977,-1.0,-1.0,-0.175573,-1.0,-0.483871,0,-1.0,-1.0 1.0,-0.291667,1.0,1.0,-0.132075,-0.155251,-1.0,-1.0,-0.251908,1.0,-0.419355,0,0.333333,1.0 1.0,0,1.0,1.0,-0.132075,-0.648402,1.0,1.0,0.282443,1.0,0,1.0,-1.0,1.0 -1.0,0.458333,1.0,-1.0,-0.698113,-0.611872,-1.0,1.0,0.114504,1.0,-0.419355,0,-1.0,-1.0 -1.0,-0.541667,1.0,-1.0,-0.132075,-0.666667,-1.0,-1.0,0.633588,1.0,-0.548387,-1.0,-1.0,1.0 1.0,0.583333,1.0,1.0,-0.509434,-0.52968,-1.0,1.0,-0.114504,1.0,-0.16129,0,0.333333,1.0 -1.0,-0.208333,1.0,-0.333333,-0.320755,-0.456621,-1.0,1.0,0.664122,-1.0,-0.935484,0,-1.0,-1.0 -1.0,-0.416667,1.0,1.0,-0.603774,-0.191781,-1.0,-1.0,0.679389,-1.0,-0.612903,0,-1.0,-1.0 -1.0,-0.25,1.0,1.0,-0.660377,-0.643836,-1.0,-1.0,0.0992366,-1.0,-0.967742,-1.0,-1.0,-1.0 -1.0,0.0416667,-1.0,-0.333333,-0.283019,-0.260274,1.0,1.0,0.343511,1.0,-1.0,-1.0,-0.333333,-1.0 -1.0,-0.208333,-1.0,0.333333,-0.320755,-0.319635,-1.0,-1.0,0.0381679,-1.0,-0.935484,-1.0,-1.0,-1.0 -1.0,-0.291667,-1.0,1.0,-0.169811,-0.465753,-1.0,1.0,0.236641,1.0,-1.0,0,-1.0,-1.0 -1.0,-0.0833333,-1.0,0.333333,-0.509434,-0.228311,-1.0,1.0,0.312977,-1.0,-0.806452,-1.0,-1.0,-1.0 1.0,0.208333,1.0,0.333333,-0.660377,-0.525114,-1.0,1.0,0.435115,-1.0,-0.193548,0,-0.333333,1.0 -1.0,0.75,-1.0,0.333333,-0.698113,-0.365297,1.0,1.0,-0.0992366,-1.0,-1.0,-1.0,-0.333333,-1.0 1.0,0.166667,1.0,0.333333,-0.358491,-0.52968,-1.0,1.0,0.206107,-1.0,-0.870968,0,-0.333333,1.0 -1.0,0.541667,1.0,1.0,0.245283,-0.534247,-1.0,1.0,0.0229008,-1.0,-0.258065,-1.0,-1.0,0.5 -1.0,-0.666667,-1.0,0.333333,-0.509434,-0.593607,-1.0,-1.0,0.51145,-1.0,-1.0,-1.0,-1.0,-1.0 1.0,0.25,1.0,1.0,0.433962,-0.086758,-1.0,1.0,0.0534351,1.0,0.0967742,1.0,-1.0,1.0 1.0,-0.125,1.0,1.0,-0.0566038,-0.6621,-1.0,1.0,-0.160305,1.0,-0.709677,0,-1.0,1.0 1.0,-0.208333,1.0,1.0,-0.320755,-0.406393,1.0,1.0,0.206107,1.0,-1.0,-1.0,0.333333,1.0 1.0,0.333333,1.0,1.0,-0.132075,-0.630137,-1.0,1.0,0.0229008,1.0,-0.387097,-1.0,-0.333333,1.0 1.0,0.25,1.0,-1.0,0.245283,-0.328767,-1.0,1.0,-0.175573,-1.0,-1.0,-1.0,-1.0,-1.0 -1.0,-0.458333,1.0,0.333333,-0.320755,-0.753425,-1.0,-1.0,0.206107,-1.0,-1.0,-1.0,-1.0,-1.0 -1.0,-0.208333,1.0,1.0,-0.471698,-0.561644,-1.0,1.0,0.755725,-1.0,-1.0,-1.0,-1.0,-1.0 1.0,-0.541667,1.0,1.0,0.0943396,-0.557078,-1.0,-1.0,0.679389,-1.0,-1.0,-1.0,-1.0,1.0 -1.0,0.375,-1.0,1.0,-0.433962,-0.621005,-1.0,-1.0,0.40458,-1.0,-1.0,-1.0,-1.0,-1.0 -1.0,-0.375,1.0,0.333333,-0.320755,-0.511416,-1.0,-1.0,0.648855,1.0,-0.870968,-1.0,-1.0,-1.0 -1.0,-0.291667,1.0,-0.333333,-0.867925,-0.675799,1.0,-1.0,0.29771,-1.0,-1.0,-1.0,-1.0,1.0 1.0,0.25,1.0,0.333333,-0.396226,-0.579909,1.0,-1.0,-0.0381679,-1.0,-0.290323,0,-0.333333,0.5 -1.0,0.208333,1.0,0.333333,-0.132075,-0.611872,1.0,1.0,0.435115,-1.0,-1.0,-1.0,-1.0,-1.0 1.0,-0.166667,1.0,0.333333,-0.54717,-0.894977,-1.0,1.0,-0.160305,-1.0,-0.741935,-1.0,1.0,-1.0 1.0,-0.375,1.0,1.0,-0.698113,-0.675799,-1.0,1.0,0.618321,-1.0,-1.0,-1.0,-0.333333,-1.0 1.0,0.541667,1.0,-0.333333,0.245283,-0.452055,-1.0,-1.0,-0.251908,1.0,-1.0,0,1.0,0.5 1.0,0.5,-1.0,1.0,0.0566038,-0.547945,-1.0,1.0,-0.343511,-1.0,-0.677419,0,1.0,1.0 1.0,-0.458333,1.0,1.0,-0.207547,-0.136986,-1.0,-1.0,-0.175573,1.0,-0.419355,0,-1.0,0.5 -1.0,-0.0416667,1.0,-0.333333,-0.358491,-0.639269,1.0,-1.0,0.725191,-1.0,-1.0,-1.0,-1.0,-1.0 -1.0,0.5,-1.0,0.333333,-0.132075,0.328767,1.0,1.0,0.312977,-1.0,-0.741935,-1.0,-0.333333,-1.0 -1.0,0.416667,-1.0,-0.333333,-0.132075,-0.684932,-1.0,-1.0,0.648855,-1.0,-1.0,-1.0,0.333333,-1.0 -1.0,-0.333333,-1.0,-0.333333,-0.320755,-0.506849,-1.0,1.0,0.587786,-1.0,-0.806452,0,-1.0,-1.0 -1.0,-0.5,-1.0,-0.333333,-0.792453,-0.671233,-1.0,-1.0,0.480916,-1.0,-1.0,-1.0,-0.333333,-1.0 1.0,0.333333,1.0,1.0,-0.169811,-0.817352,-1.0,1.0,-0.175573,1.0,0.16129,0,-0.333333,-1.0 -1.0,0.291667,-1.0,0.333333,-0.509434,-0.762557,1.0,-1.0,-0.618321,-1.0,-1.0,-1.0,-1.0,-1.0 1.0,0.25,-1.0,1.0,0.509434,-0.438356,-1.0,-1.0,0.0992366,1.0,-1.0,0,-1.0,-1.0 1.0,0.375,1.0,-0.333333,-0.509434,-0.292237,-1.0,1.0,-0.51145,-1.0,-0.548387,0,-0.333333,1.0 -1.0,0.166667,1.0,0.333333,0.0566038,-1.0,1.0,-1.0,0.557252,-1.0,-0.935484,-1.0,-0.333333,1.0 1.0,-0.0833333,-1.0,1.0,-0.320755,-0.182648,-1.0,-1.0,0.0839695,1.0,-0.612903,0,-1.0,1.0 -1.0,-0.375,1.0,0.333333,-0.509434,-0.543379,-1.0,-1.0,0.496183,-1.0,-1.0,-1.0,-1.0,-1.0 -1.0,0.291667,-1.0,-1.0,0.0566038,-0.479452,-1.0,-1.0,0.526718,-1.0,-0.709677,-1.0,-1.0,-1.0 -1.0,0.416667,1.0,-1.0,-0.0377358,-0.511416,1.0,1.0,0.206107,-1.0,-0.258065,1.0,-1.0,0.5 1.0,0.166667,1.0,1.0,0.0566038,-0.315068,-1.0,1.0,-0.374046,1.0,-0.806452,0,-0.333333,0.5 -1.0,-0.0833333,1.0,1.0,-0.132075,-0.383562,-1.0,1.0,0.755725,1.0,-1.0,-1.0,-1.0,-1.0 1.0,0.208333,-1.0,-0.333333,-0.207547,-0.118721,1.0,1.0,0.236641,-1.0,-1.0,-1.0,0.333333,-1.0 -1.0,-0.375,-1.0,0.333333,-0.54717,-0.47032,-1.0,-1.0,0.19084,-1.0,-0.903226,0,-0.333333,-1.0 1.0,-0.25,1.0,0.333333,-0.735849,-0.465753,-1.0,-1.0,0.236641,-1.0,-1.0,-1.0,-1.0,-1.0 1.0,0.333333,1.0,1.0,-0.509434,-0.388128,-1.0,-1.0,0.0534351,1.0,0.16129,0,-0.333333,1.0 -1.0,0.166667,-1.0,1.0,-0.509434,0.0410959,-1.0,-1.0,0.40458,1.0,-0.806452,-1.0,-1.0,-1.0 -1.0,0.708333,1.0,-0.333333,0.169811,-0.456621,-1.0,1.0,0.0992366,-1.0,-1.0,-1.0,-1.0,-1.0 -1.0,0.958333,-1.0,0.333333,-0.132075,-0.675799,-1.0,0,-0.312977,-1.0,-0.645161,0,-1.0,-1.0 -1.0,0.583333,-1.0,1.0,-0.773585,-0.557078,-1.0,-1.0,0.0839695,-1.0,-0.903226,-1.0,0.333333,-1.0 1.0,-0.333333,1.0,1.0,-0.0943396,-0.164384,-1.0,1.0,0.160305,1.0,-1.0,0,1.0,1.0 -1.0,-0.333333,1.0,1.0,-0.811321,-0.625571,-1.0,1.0,0.175573,1.0,-0.0322581,0,-1.0,-1.0 -1.0,-0.583333,-1.0,0.333333,-1.0,-0.666667,-1.0,-1.0,0.648855,-1.0,-1.0,-1.0,-1.0,-1.0 -1.0,-0.458333,-1.0,0.333333,-0.509434,-0.621005,-1.0,-1.0,0.557252,-1.0,-1.0,0,-1.0,-1.0 -1.0,0.125,1.0,-0.333333,-0.509434,-0.497717,-1.0,-1.0,0.633588,-1.0,-0.741935,-1.0,-1.0,-1.0 1.0,0.208333,1.0,1.0,-0.0188679,-0.579909,-1.0,-1.0,-0.480916,-1.0,-0.354839,0,-0.333333,1.0 1.0,-0.75,1.0,1.0,-0.509434,-0.671233,-1.0,-1.0,-0.0992366,1.0,-0.483871,0,-1.0,1.0 1.0,0.208333,1.0,1.0,0.0566038,-0.342466,-1.0,1.0,-0.389313,1.0,-0.741935,-1.0,-1.0,1.0 -1.0,-0.5,1.0,0.333333,-0.320755,-0.598174,-1.0,1.0,0.480916,-1.0,-0.354839,0,-1.0,-1.0 -1.0,0.166667,1.0,1.0,-0.698113,-0.657534,-1.0,-1.0,-0.160305,1.0,-0.516129,0,-1.0,0.5 -1.0,-0.458333,1.0,-1.0,0.0188679,-0.461187,-1.0,1.0,0.633588,-1.0,-0.741935,-1.0,0.333333,-1.0 -1.0,0.375,1.0,-0.333333,-0.358491,-0.625571,1.0,1.0,0.0534351,-1.0,-1.0,-1.0,-1.0,-1.0 -1.0,0.25,1.0,-1.0,0.584906,-0.342466,-1.0,1.0,0.129771,-1.0,0.354839,1.0,-1.0,1.0 -1.0,-0.5,-1.0,-0.333333,-0.396226,-0.178082,-1.0,-1.0,0.40458,-1.0,-1.0,-1.0,-1.0,-1.0 1.0,-0.125,1.0,1.0,0.0566038,-0.465753,-1.0,1.0,-0.129771,-1.0,-0.16129,0,-1.0,1.0 -1.0,0.25,1.0,-0.333333,-0.132075,-0.56621,-1.0,-1.0,0.419847,1.0,-1.0,-1.0,-1.0,-1.0 1.0,0.333333,-1.0,1.0,-0.320755,-0.0684932,-1.0,1.0,0.496183,-1.0,-1.0,-1.0,-1.0,-1.0 1.0,0.0416667,1.0,1.0,-0.433962,-0.360731,-1.0,1.0,-0.419847,1.0,-0.290323,0,-0.333333,1.0 1.0,0.0416667,1.0,1.0,-0.698113,-0.634703,-1.0,1.0,-0.435115,1.0,-1.0,0,-0.333333,-1.0 1.0,-0.0416667,1.0,1.0,-0.415094,-0.607306,-1.0,-1.0,0.480916,-1.0,-0.677419,-1.0,0.333333,1.0 1.0,-0.25,1.0,1.0,-0.698113,-0.319635,-1.0,1.0,-0.282443,1.0,-0.677419,0,-0.333333,-1.0 -1.0,0.541667,1.0,1.0,-0.509434,-0.196347,-1.0,1.0,0.221374,-1.0,-0.870968,0,-1.0,-1.0 1.0,0.208333,1.0,1.0,-0.886792,-0.506849,-1.0,-1.0,0.29771,-1.0,-0.967742,-1.0,-0.333333,1.0 -1.0,0.458333,-1.0,0.333333,-0.132075,-0.146119,-1.0,-1.0,-0.0534351,-1.0,-0.935484,-1.0,-1.0,1.0 -1.0,-0.125,-1.0,-0.333333,-0.509434,-0.461187,-1.0,-1.0,0.389313,-1.0,-0.645161,-1.0,-1.0,-1.0 -1.0,-0.375,-1.0,0.333333,-0.735849,-0.931507,-1.0,-1.0,0.587786,-1.0,-0.806452,0,-1.0,-1.0 1.0,0.583333,1.0,1.0,-0.509434,-0.493151,-1.0,-1.0,-1.0,-1.0,-0.677419,0,-1.0,-1.0 -1.0,-0.166667,-1.0,1.0,-0.320755,-0.347032,-1.0,-1.0,0.40458,-1.0,-1.0,-1.0,-1.0,-1.0 1.0,0.166667,1.0,1.0,0.339623,-0.255708,1.0,1.0,-0.19084,-1.0,-0.677419,0,1.0,1.0 1.0,0.416667,1.0,1.0,-0.320755,-0.415525,-1.0,1.0,0.160305,-1.0,-0.548387,0,-0.333333,1.0 1.0,-0.208333,1.0,1.0,-0.433962,-0.324201,-1.0,1.0,0.450382,-1.0,-0.83871,0,-1.0,1.0 -1.0,-0.0833333,1.0,0.333333,-0.886792,-0.561644,-1.0,-1.0,0.0992366,1.0,-0.612903,0,-1.0,-1.0 1.0,0.291667,-1.0,1.0,0.0566038,-0.39726,-1.0,1.0,0.312977,-1.0,-0.16129,0,0.333333,1.0 1.0,0.25,1.0,1.0,-0.132075,-0.767123,-1.0,-1.0,0.389313,1.0,-1.0,-1.0,-0.333333,1.0 -1.0,-0.333333,-1.0,-0.333333,-0.660377,-0.844749,-1.0,-1.0,0.0229008,-1.0,-1.0,0,-1.0,-1.0 1.0,0.0833333,-1.0,1.0,0.622642,-0.0821918,-1.0,0,-0.29771,1.0,0.0967742,0,-1.0,-1.0 -1.0,-0.5,1.0,-0.333333,-0.698113,-0.502283,-1.0,-1.0,0.251908,-1.0,-1.0,-1.0,-1.0,-1.0 1.0,0.291667,-1.0,1.0,0.207547,-0.182648,-1.0,1.0,0.374046,-1.0,-1.0,-1.0,-1.0,-1.0 -1.0,0.0416667,-1.0,0.333333,-0.226415,-0.187215,1.0,-1.0,0.51145,-1.0,-1.0,-1.0,-1.0,-1.0 -1.0,-0.458333,1.0,-0.333333,-0.509434,-0.228311,-1.0,-1.0,0.389313,-1.0,-1.0,-1.0,-1.0,-1.0 -1.0,-0.166667,-1.0,-0.333333,-0.245283,-0.3379,-1.0,-1.0,0.389313,-1.0,-1.0,0,-1.0,-1.0 1.0,-0.291667,1.0,1.0,-0.509434,-0.438356,-1.0,1.0,0.114504,-1.0,-0.741935,-1.0,-1.0,1.0 1.0,0.125,-1.0,1.0,1.0,-0.260274,1.0,1.0,-0.0534351,1.0,0.290323,1.0,0.333333,1.0 -1.0,0.541667,-1.0,-1.0,0.0566038,-0.543379,-1.0,-1.0,-0.343511,-1.0,-0.16129,1.0,-1.0,-1.0 1.0,0.125,1.0,1.0,-0.320755,-0.283105,1.0,1.0,-0.51145,1.0,-0.483871,1.0,-1.0,1.0 1.0,-0.166667,1.0,0.333333,-0.509434,-0.716895,-1.0,-1.0,0.0381679,-1.0,-0.354839,0,1.0,1.0 1.0,0.0416667,1.0,1.0,-0.471698,-0.269406,-1.0,1.0,-0.312977,1.0,0.0322581,0,0.333333,-1.0 1.0,0.166667,1.0,1.0,0.0943396,-0.324201,-1.0,-1.0,-0.740458,1.0,-0.612903,0,-0.333333,1.0 -1.0,0.5,-1.0,0.333333,0.245283,0.0684932,-1.0,1.0,0.221374,-1.0,-0.741935,-1.0,-1.0,-1.0 -1.0,0.0416667,1.0,0.333333,-0.415094,-0.328767,-1.0,1.0,0.236641,-1.0,-0.83871,1.0,-0.333333,-1.0 -1.0,0.0416667,-1.0,0.333333,0.245283,-0.657534,-1.0,-1.0,0.40458,-1.0,-1.0,-1.0,-0.333333,-1.0 1.0,0.375,1.0,1.0,-0.509434,-0.356164,-1.0,-1.0,-0.572519,1.0,-0.419355,0,0.333333,1.0 -1.0,-0.0416667,-1.0,0.333333,-0.207547,-0.680365,-1.0,1.0,0.496183,-1.0,-0.967742,0,-1.0,-1.0 -1.0,-0.0416667,1.0,-0.333333,-0.245283,-0.657534,-1.0,-1.0,0.328244,-1.0,-0.741935,-1.0,-0.333333,-1.0 1.0,0.291667,1.0,1.0,-0.566038,-0.525114,1.0,-1.0,0.358779,1.0,-0.548387,-1.0,0.333333,1.0 1.0,0.416667,-1.0,1.0,-0.735849,-0.347032,-1.0,-1.0,0.496183,1.0,-0.419355,0,0.333333,-1.0 1.0,0.541667,1.0,1.0,-0.660377,-0.607306,-1.0,1.0,-0.0687023,1.0,-0.967742,-1.0,-0.333333,-1.0 -1.0,-0.458333,1.0,1.0,-0.132075,-0.543379,-1.0,-1.0,0.633588,-1.0,-1.0,-1.0,-1.0,-1.0 1.0,0.458333,1.0,1.0,-0.509434,-0.452055,-1.0,1.0,-0.618321,1.0,-0.290323,1.0,-0.333333,-1.0 -1.0,0.0416667,1.0,0.333333,0.0566038,-0.515982,-1.0,1.0,0.435115,-1.0,-0.483871,-1.0,-1.0,1.0 -1.0,-0.291667,-1.0,0.333333,-0.0943396,-0.767123,-1.0,1.0,0.358779,1.0,-0.548387,1.0,-1.0,-1.0 -1.0,0.583333,-1.0,0.333333,0.0943396,-0.310502,-1.0,-1.0,0.541985,-1.0,-1.0,-1.0,-0.333333,-1.0 1.0,0.125,1.0,1.0,-0.415094,-0.438356,1.0,1.0,0.114504,1.0,-0.612903,0,-0.333333,-1.0 -1.0,-0.791667,-1.0,-0.333333,-0.54717,-0.616438,-1.0,-1.0,0.847328,-1.0,-0.774194,-1.0,-1.0,-1.0 -1.0,0.166667,1.0,1.0,-0.283019,-0.630137,-1.0,-1.0,0.480916,1.0,-1.0,-1.0,-1.0,1.0 1.0,0.458333,1.0,1.0,-0.0377358,-0.607306,-1.0,1.0,-0.0687023,-1.0,-0.354839,0,0.333333,0.5 -1.0,0.25,1.0,1.0,-0.169811,-0.3379,-1.0,1.0,0.694656,-1.0,-1.0,-1.0,-1.0,-1.0 1.0,-0.125,1.0,0.333333,-0.132075,-0.511416,-1.0,-1.0,0.40458,-1.0,-0.806452,0,-0.333333,1.0 -1.0,-0.0833333,1.0,-1.0,-0.415094,-0.60274,-1.0,1.0,-0.175573,1.0,-0.548387,-1.0,-0.333333,-1.0 1.0,0.0416667,1.0,-0.333333,0.849057,-0.283105,-1.0,1.0,0.89313,-1.0,-1.0,-1.0,-0.333333,1.0 1.0,0,1.0,1.0,-0.45283,-0.287671,-1.0,-1.0,-0.633588,1.0,-0.354839,0,0.333333,1.0 1.0,-0.0416667,1.0,1.0,-0.660377,-0.525114,-1.0,-1.0,0.358779,-1.0,-1.0,-1.0,-0.333333,-1.0 1.0,-0.541667,1.0,1.0,-0.698113,-0.812785,-1.0,1.0,-0.343511,1.0,-0.354839,0,-1.0,1.0 1.0,0.208333,1.0,0.333333,-0.283019,-0.552511,-1.0,1.0,0.557252,-1.0,0.0322581,-1.0,0.333333,1.0 -1.0,-0.5,-1.0,0.333333,-0.660377,-0.351598,-1.0,1.0,0.541985,1.0,-1.0,-1.0,-1.0,-1.0 -1.0,-0.5,1.0,0.333333,-0.660377,-0.43379,-1.0,-1.0,0.648855,-1.0,-1.0,-1.0,-1.0,-1.0 -1.0,-0.125,-1.0,0.333333,-0.509434,-0.575342,-1.0,-1.0,0.328244,-1.0,-0.483871,0,-1.0,-1.0 -1.0,0.0416667,-1.0,0.333333,-0.735849,-0.356164,-1.0,1.0,0.465649,-1.0,-1.0,-1.0,-1.0,-1.0 -1.0,0.458333,-1.0,1.0,-0.320755,-0.191781,-1.0,-1.0,-0.221374,-1.0,-0.354839,0,0.333333,-1.0 -1.0,-0.0833333,-1.0,0.333333,-0.320755,-0.406393,-1.0,1.0,0.19084,-1.0,-0.83871,-1.0,-1.0,-1.0 -1.0,-0.291667,-1.0,-0.333333,-0.792453,-0.643836,-1.0,-1.0,0.541985,-1.0,-1.0,-1.0,-1.0,-1.0 1.0,0.0833333,1.0,1.0,-0.132075,-0.584475,-1.0,-1.0,-0.389313,1.0,0.806452,1.0,-1.0,1.0 -1.0,-0.333333,1.0,-0.333333,-0.358491,-0.16895,-1.0,1.0,0.51145,-1.0,-1.0,-1.0,-1.0,-1.0 -1.0,0.125,1.0,-1.0,-0.509434,-0.694064,-1.0,1.0,0.389313,-1.0,-0.387097,0,-1.0,1.0 1.0,0.541667,-1.0,1.0,0.584906,-0.534247,1.0,-1.0,0.435115,1.0,-0.677419,0,0.333333,1.0 1.0,-0.625,1.0,-1.0,-0.509434,-0.520548,-1.0,-1.0,0.694656,1.0,0.225806,0,-1.0,1.0 1.0,0.375,-1.0,1.0,0.0566038,-0.461187,-1.0,-1.0,0.267176,1.0,-0.548387,0,-1.0,-1.0 -1.0,0.0833333,1.0,-0.333333,-0.320755,-0.378995,-1.0,-1.0,0.282443,-1.0,-1.0,-1.0,-1.0,-1.0 1.0,0.208333,1.0,1.0,-0.358491,-0.392694,-1.0,1.0,-0.0992366,1.0,-0.0322581,0,0.333333,1.0 -1.0,-0.416667,1.0,1.0,-0.698113,-0.611872,-1.0,-1.0,0.374046,-1.0,-1.0,-1.0,-1.0,1.0 -1.0,0.458333,-1.0,1.0,0.622642,-0.0913242,-1.0,-1.0,0.267176,1.0,-1.0,-1.0,-1.0,-1.0 -1.0,-0.125,-1.0,1.0,-0.698113,-0.415525,-1.0,1.0,0.343511,-1.0,-1.0,-1.0,-1.0,-1.0 -1.0,0,1.0,0.333333,-0.320755,-0.675799,1.0,1.0,0.236641,-1.0,-0.612903,1.0,-1.0,-1.0 -1.0,-0.333333,-1.0,1.0,-0.169811,-0.497717,-1.0,1.0,0.236641,1.0,-0.935484,0,-1.0,-1.0 1.0,0.5,1.0,-1.0,-0.169811,-0.287671,1.0,1.0,0.572519,-1.0,-0.548387,0,-0.333333,-1.0 -1.0,0.666667,1.0,-1.0,0.245283,-0.506849,1.0,1.0,-0.0839695,-1.0,-0.967742,0,-0.333333,-1.0 1.0,0.666667,1.0,0.333333,-0.132075,-0.415525,-1.0,1.0,0.145038,-1.0,-0.354839,0,1.0,1.0 1.0,0.583333,1.0,1.0,-0.886792,-0.210046,-1.0,1.0,-0.175573,1.0,-0.709677,0,0.333333,-1.0 -1.0,0.625,-1.0,0.333333,-0.509434,-0.611872,-1.0,1.0,-0.328244,-1.0,-0.516129,0,-1.0,-1.0 -1.0,-0.791667,1.0,-1.0,-0.54717,-0.744292,-1.0,1.0,0.572519,-1.0,-1.0,-1.0,-1.0,-1.0 1.0,0.375,-1.0,1.0,-0.169811,-0.232877,1.0,-1.0,-0.465649,-1.0,-0.387097,0,1.0,-1.0 1.0,-0.0833333,1.0,1.0,-0.132075,-0.214612,-1.0,-1.0,-0.221374,1.0,0.354839,0,1.0,1.0 1.0,-0.291667,1.0,0.333333,0.0566038,-0.520548,-1.0,-1.0,0.160305,-1.0,0.16129,0,-1.0,-1.0 1.0,0.583333,1.0,1.0,-0.415094,-0.415525,1.0,-1.0,0.40458,-1.0,-0.935484,0,0.333333,1.0 -1.0,-0.125,1.0,0.333333,-0.339623,-0.680365,-1.0,-1.0,0.40458,-1.0,-1.0,-1.0,-1.0,-1.0 -1.0,-0.458333,1.0,0.333333,-0.509434,-0.479452,1.0,-1.0,0.877863,-1.0,-0.741935,1.0,-1.0,1.0 1.0,0.125,-1.0,1.0,-0.245283,0.292237,-1.0,1.0,0.206107,1.0,-0.387097,0,0.333333,1.0 1.0,-0.5,1.0,1.0,-0.698113,-0.789954,-1.0,1.0,0.328244,-1.0,-1.0,-1.0,-1.0,1.0 -1.0,-0.458333,-1.0,1.0,-0.849057,-0.365297,-1.0,1.0,-0.221374,-1.0,-0.806452,0,-1.0,-1.0 -1.0,0,1.0,0.333333,-0.320755,-0.452055,1.0,1.0,0.557252,-1.0,-1.0,-1.0,1.0,-1.0 -1.0,-0.416667,1.0,0.333333,-0.320755,-0.136986,-1.0,-1.0,0.389313,-1.0,-0.387097,-1.0,-0.333333,-1.0 1.0,0.125,1.0,1.0,-0.283019,-0.73516,-1.0,1.0,-0.480916,1.0,-0.322581,0,-0.333333,0.5 -1.0,-0.0416667,1.0,1.0,-0.735849,-0.511416,1.0,-1.0,0.160305,-1.0,-0.967742,-1.0,1.0,1.0 -1.0,0.375,-1.0,1.0,-0.132075,0.223744,-1.0,1.0,0.312977,-1.0,-0.612903,0,-1.0,-1.0 1.0,0.708333,1.0,0.333333,0.245283,-0.347032,-1.0,-1.0,-0.374046,1.0,-0.0645161,0,-0.333333,1.0 -1.0,0.0416667,1.0,1.0,-0.132075,-0.484018,-1.0,-1.0,0.358779,-1.0,-0.612903,-1.0,-1.0,-1.0 1.0,0.708333,1.0,1.0,-0.0377358,-0.780822,-1.0,-1.0,-0.175573,1.0,-0.16129,1.0,-1.0,1.0 -1.0,0.0416667,1.0,-0.333333,-0.735849,-0.164384,-1.0,-1.0,0.29771,-1.0,-1.0,-1.0,-1.0,1.0 1.0,-0.75,1.0,1.0,-0.396226,-0.287671,-1.0,1.0,0.29771,1.0,-1.0,-1.0,-1.0,1.0 -1.0,-0.208333,1.0,0.333333,-0.433962,-0.410959,1.0,-1.0,0.587786,-1.0,-1.0,-1.0,0.333333,-1.0 -1.0,0.0833333,-1.0,-0.333333,-0.226415,-0.43379,-1.0,1.0,0.374046,-1.0,-0.548387,0,-1.0,-1.0 -1.0,0.208333,-1.0,1.0,-0.886792,-0.442922,-1.0,1.0,-0.221374,-1.0,-0.677419,0,-1.0,-1.0 -1.0,0.0416667,-1.0,0.333333,-0.698113,-0.598174,-1.0,-1.0,0.328244,-1.0,-0.483871,0,-1.0,-1.0 -1.0,0.666667,-1.0,-1.0,-0.132075,-0.484018,-1.0,-1.0,0.221374,-1.0,-0.419355,-1.0,0.333333,-1.0 1.0,1.0,1.0,1.0,-0.415094,-0.187215,-1.0,1.0,0.389313,1.0,-1.0,-1.0,1.0,-1.0 -1.0,0.625,1.0,0.333333,-0.54717,-0.310502,-1.0,-1.0,0.221374,-1.0,-0.677419,-1.0,-0.333333,1.0 1.0,0.208333,1.0,1.0,-0.415094,-0.205479,-1.0,1.0,0.526718,-1.0,-1.0,-1.0,0.333333,1.0 1.0,0.291667,1.0,1.0,-0.415094,-0.39726,-1.0,1.0,0.0687023,1.0,-0.0967742,0,-0.333333,1.0 1.0,-0.0833333,1.0,1.0,-0.132075,-0.210046,-1.0,-1.0,0.557252,1.0,-0.483871,-1.0,-1.0,1.0 1.0,0.0833333,1.0,1.0,0.245283,-0.255708,-1.0,1.0,0.129771,1.0,-0.741935,0,-0.333333,1.0 -1.0,-0.0416667,1.0,-1.0,0.0943396,-0.214612,1.0,-1.0,0.633588,-1.0,-0.612903,0,-1.0,1.0 -1.0,0.291667,-1.0,0.333333,-0.849057,-0.123288,-1.0,-1.0,0.358779,-1.0,-1.0,-1.0,-0.333333,-1.0 -1.0,0.208333,1.0,0.333333,-0.792453,-0.479452,-1.0,1.0,0.267176,1.0,-0.806452,0,-1.0,1.0 1.0,0.458333,1.0,0.333333,-0.415094,-0.164384,-1.0,-1.0,-0.0839695,1.0,-0.419355,0,-1.0,1.0 -1.0,-0.666667,1.0,0.333333,-0.320755,-0.43379,-1.0,-1.0,0.770992,-1.0,0.129032,1.0,-1.0,-1.0 1.0,0.25,1.0,-1.0,0.433962,-0.260274,-1.0,1.0,0.343511,-1.0,-0.935484,0,-1.0,1.0 -1.0,-0.0833333,1.0,0.333333,-0.415094,-0.456621,1.0,1.0,0.450382,-1.0,-0.225806,0,-1.0,-1.0 -1.0,-0.416667,-1.0,0.333333,-0.471698,-0.60274,-1.0,-1.0,0.435115,-1.0,-0.935484,0,-1.0,-1.0 1.0,0.208333,1.0,1.0,-0.358491,-0.589041,-1.0,1.0,-0.0839695,1.0,-0.290323,0,1.0,1.0 -1.0,-1.0,1.0,-0.333333,-0.320755,-0.643836,-1.0,1.0,1.0,-1.0,-1.0,-1.0,-1.0,-1.0 -1.0,-0.5,-1.0,-0.333333,-0.320755,-0.643836,-1.0,1.0,0.541985,-1.0,-0.548387,-1.0,-1.0,-1.0 -1.0,0.416667,-1.0,0.333333,-0.226415,-0.424658,-1.0,1.0,0.541985,-1.0,-1.0,-1.0,-1.0,-1.0 -1.0,-0.0833333,1.0,0.333333,-1.0,-0.538813,-1.0,-1.0,0.267176,1.0,-1.0,-1.0,-0.333333,1.0 -1.0,0.0416667,1.0,0.333333,-0.509434,-0.39726,-1.0,1.0,0.160305,-1.0,-0.870968,0,-1.0,1.0 -1.0,-0.375,1.0,-0.333333,-0.509434,-0.570776,-1.0,-1.0,0.51145,-1.0,-1.0,-1.0,-1.0,-1.0 1.0,0.0416667,1.0,1.0,-0.698113,-0.484018,-1.0,-1.0,-0.160305,1.0,-0.0967742,0,-0.333333,1.0 1.0,0.5,1.0,1.0,-0.226415,-0.415525,-1.0,1.0,-0.145038,-1.0,-0.0967742,0,-0.333333,1.0 -1.0,0.166667,1.0,0.333333,0.0566038,-0.808219,-1.0,-1.0,0.572519,-1.0,-0.483871,-1.0,-1.0,-1.0 1.0,0.416667,1.0,1.0,-0.320755,-0.0684932,1.0,1.0,-0.0687023,1.0,-0.419355,-1.0,1.0,1.0 -1.0,-0.75,-1.0,1.0,-0.169811,-0.739726,-1.0,-1.0,0.694656,-1.0,-0.548387,-1.0,-1.0,-1.0 -1.0,-0.5,1.0,-0.333333,-0.226415,-0.648402,-1.0,-1.0,-0.0687023,-1.0,-1.0,0,-1.0,0.5 1.0,0.375,-1.0,0.333333,-0.320755,-0.374429,-1.0,-1.0,-0.603053,-1.0,-0.612903,0,-0.333333,1.0 1.0,-0.416667,-1.0,1.0,-0.283019,-0.0182648,1.0,1.0,-0.00763359,1.0,-0.0322581,0,-1.0,1.0 -1.0,0.208333,-1.0,-1.0,0.0566038,-0.283105,1.0,1.0,0.389313,-1.0,-0.677419,-1.0,-1.0,-1.0 -1.0,-0.0416667,1.0,-1.0,-0.54717,-0.726027,-1.0,1.0,0.816794,-1.0,-1.0,0,-1.0,0.5 1.0,0.333333,-1.0,1.0,-0.0377358,-0.173516,-1.0,1.0,0.145038,1.0,-0.677419,0,-1.0,1.0 1.0,-0.583333,1.0,1.0,-0.54717,-0.575342,-1.0,-1.0,0.0534351,-1.0,-0.612903,0,-1.0,1.0 -1.0,-0.333333,1.0,1.0,-0.603774,-0.388128,-1.0,1.0,0.740458,-1.0,-1.0,-1.0,-1.0,-1.0 1.0,-0.0416667,1.0,1.0,-0.358491,-0.410959,-1.0,-1.0,0.374046,1.0,-1.0,-1.0,-0.333333,1.0 -1.0,0.375,1.0,0.333333,-0.320755,-0.520548,-1.0,-1.0,0.145038,-1.0,-0.419355,0,1.0,1.0 1.0,0.375,-1.0,1.0,0.245283,-0.826484,-1.0,1.0,0.129771,-1.0,1.0,1.0,1.0,1.0 -1.0,0,-1.0,1.0,-0.169811,-0.506849,-1.0,1.0,0.358779,-1.0,-1.0,-1.0,-1.0,-1.0 1.0,-0.416667,1.0,1.0,-0.509434,-0.767123,-1.0,1.0,-0.251908,1.0,-0.193548,0,-1.0,1.0 -1.0,-0.25,1.0,0.333333,-0.169811,-0.401826,-1.0,1.0,0.29771,-1.0,-1.0,-1.0,-1.0,-1.0 -1.0,-0.0416667,1.0,-0.333333,-0.509434,-0.0913242,-1.0,-1.0,0.541985,-1.0,-0.935484,-1.0,-1.0,-1.0 1.0,0.625,1.0,0.333333,0.622642,-0.324201,1.0,1.0,0.206107,1.0,-0.483871,0,-1.0,1.0 -1.0,-0.583333,1.0,0.333333,-0.132075,-0.109589,-1.0,1.0,0.694656,-1.0,-1.0,-1.0,-1.0,-1.0 -1.0,0,-1.0,1.0,-0.320755,-0.369863,-1.0,1.0,0.0992366,-1.0,-0.870968,0,-1.0,-1.0 1.0,0.375,-1.0,1.0,-0.132075,-0.351598,-1.0,1.0,0.358779,-1.0,0.16129,1.0,0.333333,-1.0 -1.0,-0.0833333,-1.0,0.333333,-0.132075,-0.16895,-1.0,1.0,0.0839695,-1.0,-0.516129,-1.0,-0.333333,-1.0 1.0,0.291667,1.0,1.0,-0.320755,-0.420091,-1.0,-1.0,0.114504,1.0,-0.548387,-1.0,-0.333333,1.0 1.0,0.5,1.0,1.0,-0.698113,-0.442922,-1.0,1.0,0.328244,-1.0,-0.806452,-1.0,0.333333,0.5 -1.0,0.5,-1.0,0.333333,0.150943,-0.347032,-1.0,-1.0,0.175573,-1.0,-0.741935,-1.0,-1.0,-1.0 1.0,0.291667,1.0,0.333333,-0.132075,-0.730594,-1.0,1.0,0.282443,-1.0,-0.0322581,0,-1.0,-1.0 1.0,0.291667,1.0,1.0,-0.0377358,-0.287671,-1.0,1.0,0.0839695,1.0,-0.0967742,0,0.333333,1.0 1.0,0.0416667,1.0,1.0,-0.509434,-0.716895,-1.0,-1.0,-0.358779,-1.0,-0.548387,0,-0.333333,1.0 -1.0,-0.375,1.0,-0.333333,-0.320755,-0.575342,-1.0,1.0,0.78626,-1.0,-1.0,-1.0,-1.0,-1.0 1.0,-0.375,1.0,1.0,-0.660377,-0.251142,-1.0,1.0,0.251908,-1.0,-1.0,-1.0,-0.333333,-1.0 -1.0,-0.0833333,1.0,0.333333,-0.698113,-0.776256,-1.0,-1.0,-0.206107,-1.0,-0.806452,-1.0,-1.0,-1.0 -1.0,0.25,1.0,0.333333,0.0566038,-0.607306,1.0,-1.0,0.312977,-1.0,-0.483871,-1.0,-1.0,-1.0 -1.0,0.75,-1.0,-0.333333,0.245283,-0.196347,-1.0,-1.0,0.389313,-1.0,-0.870968,-1.0,0.333333,-1.0 -1.0,0.333333,1.0,0.333333,0.0566038,-0.465753,1.0,-1.0,0.00763359,1.0,-0.677419,0,-1.0,-1.0 1.0,0.0833333,1.0,1.0,-0.283019,0.0365297,-1.0,-1.0,-0.0687023,1.0,-0.612903,0,-0.333333,1.0 1.0,0.458333,1.0,0.333333,-0.132075,-0.0456621,-1.0,-1.0,0.328244,-1.0,-1.0,-1.0,-1.0,-1.0 -1.0,-0.416667,1.0,1.0,0.0566038,-0.447489,-1.0,-1.0,0.526718,-1.0,-0.516129,-1.0,-1.0,-1.0 -1.0,0.208333,-1.0,0.333333,-0.509434,-0.0228311,-1.0,-1.0,0.541985,-1.0,-1.0,-1.0,-1.0,-1.0 1.0,0.291667,1.0,1.0,-0.320755,-0.634703,-1.0,1.0,-0.0687023,1.0,-0.225806,0,0.333333,1.0 1.0,0.208333,1.0,-0.333333,-0.509434,-0.278539,-1.0,1.0,0.358779,-1.0,-0.419355,0,-1.0,-1.0 -1.0,-0.166667,1.0,-0.333333,-0.320755,-0.360731,-1.0,-1.0,0.526718,-1.0,-0.806452,-1.0,-1.0,-1.0 1.0,-0.208333,1.0,-0.333333,-0.698113,-0.52968,-1.0,-1.0,0.480916,-1.0,-0.677419,1.0,-1.0,1.0 -1.0,-0.0416667,1.0,0.333333,0.471698,-0.666667,1.0,-1.0,0.389313,-1.0,-0.83871,-1.0,-1.0,1.0 -1.0,-0.375,1.0,-0.333333,-0.509434,-0.374429,-1.0,-1.0,0.557252,-1.0,-1.0,-1.0,-1.0,1.0 -1.0,0.125,-1.0,-0.333333,-0.132075,-0.232877,-1.0,1.0,0.251908,-1.0,-0.580645,0,-1.0,-1.0 -1.0,0.166667,1.0,1.0,-0.132075,-0.69863,-1.0,-1.0,0.175573,-1.0,-0.870968,0,-1.0,0.5 1.0,0.583333,1.0,1.0,0.245283,-0.269406,-1.0,1.0,-0.435115,1.0,-0.516129,0,1.0,-1.0././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/knapsack.py0000644000076500000240000000675414456461441016261 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import random import numpy from deap import algorithms from deap import base from deap import creator from deap import tools IND_INIT_SIZE = 5 MAX_ITEM = 50 MAX_WEIGHT = 50 NBR_ITEMS = 20 # To assure reproducibility, the RNG seed is set prior to the items # dict initialization. It is also seeded in main(). random.seed(64) # Create the item dictionary: item name is an integer, and value is # a (weight, value) 2-tuple. items = {} # Create random items and store them in the items' dictionary. for i in range(NBR_ITEMS): items[i] = (random.randint(1, 10), random.uniform(0, 100)) creator.create("Fitness", base.Fitness, weights=(-1.0, 1.0)) creator.create("Individual", set, fitness=creator.Fitness) toolbox = base.Toolbox() # Attribute generator toolbox.register("attr_item", random.randrange, NBR_ITEMS) # Structure initializers toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_item, IND_INIT_SIZE) toolbox.register("population", tools.initRepeat, list, toolbox.individual) def evalKnapsack(individual): weight = 0.0 value = 0.0 for item in individual: weight += items[item][0] value += items[item][1] if len(individual) > MAX_ITEM or weight > MAX_WEIGHT: return 10000, 0 # Ensure overweighted bags are dominated return weight, value def cxSet(ind1, ind2): """Apply a crossover operation on input sets. The first child is the intersection of the two sets, the second child is the difference of the two sets. """ temp = set(ind1) # Used in order to keep type ind1 &= ind2 # Intersection (inplace) ind2 ^= temp # Symmetric Difference (inplace) return ind1, ind2 def mutSet(individual): """Mutation that pops or add an element.""" if random.random() < 0.5: if len(individual) > 0: # We cannot pop from an empty set individual.remove(random.choice(sorted(tuple(individual)))) else: individual.add(random.randrange(NBR_ITEMS)) return individual, toolbox.register("evaluate", evalKnapsack) toolbox.register("mate", cxSet) toolbox.register("mutate", mutSet) toolbox.register("select", tools.selNSGA2) def main(): random.seed(64) NGEN = 50 MU = 50 LAMBDA = 100 CXPB = 0.7 MUTPB = 0.2 pop = toolbox.population(n=MU) hof = tools.ParetoFront() stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean, axis=0) stats.register("std", numpy.std, axis=0) stats.register("min", numpy.min, axis=0) stats.register("max", numpy.max, axis=0) algorithms.eaMuPlusLambda(pop, toolbox, MU, LAMBDA, CXPB, MUTPB, NGEN, stats, halloffame=hof) return pop, stats, hof if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/knn.py0000644000076500000240000000655614456461441015254 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import numpy import csv import operator import random class KNN(object): def __init__(self, k): self.k = k self.data = None self.labels = None self.ndim = 0 def train(self, data, labels): self.data = numpy.array(data) self.labels = numpy.array(labels) self.classes = numpy.unique(self.labels) self.ndim = len(self.data[0]) def predict(self, data, features=None): data = numpy.array(data) if features is None: features = numpy.ones(self.data.shape[1]) else: features = numpy.array(features) if data.ndim == 1: dist = self.data - data elif data.ndim == 2: dist = numpy.zeros((data.shape[0],) + self.data.shape) for i, d in enumerate(data): dist[i, :, :] = self.data - d else: raise ValueError("Cannot process data with dimensionality > 2") dist = features * dist dist = dist * dist dist = numpy.sum(dist, -1) dist = numpy.sqrt(dist) nns = numpy.argsort(dist) if data.ndim == 1: classes = dict((cls, 0) for cls in self.classes) for n in nns[:self.k]: classes[self.labels[n]] += 1 labels = sorted(classes.items(), key=operator.itemgetter(1))[-1][0] elif data.ndim == 2: labels = list() for i, d in enumerate(data): classes = dict((cls, 0) for cls in self.classes) for n in nns[i, :self.k]: classes[self.labels[n]] += 1 labels.append(sorted(classes.items(), key=operator.itemgetter(1))[-1][0]) return labels # Create a default internal KNN object # Read data from file FILE="heart_scale.csv" N_TRAIN=175 K=1 with open(FILE, "r") as data_csv: data = csv.reader(data_csv) trainset = list() trainlabels = list() rows = [row for row in data] random.shuffle(rows) for row in rows: trainlabels.append(float(row[0])) trainset.append([float(e) for e in row[1:]]) _knn = KNN(K) _knn.train(trainset[:N_TRAIN], trainlabels[:N_TRAIN]) def classification_rate(features): """Returns the classification rate of the default KNN.""" labels = _knn.predict(trainset[N_TRAIN:], features) return sum(x == y for x, y in zip(labels, trainlabels[N_TRAIN:]))/float(len(trainlabels[N_TRAIN:])) if __name__ == "__main__": trainset = [[1, 0], [1, 1], [1, 2]] trainlabels = [1, 2, 3] knn = KNN(1) knn.train(trainset, trainlabels) print("Single Data ===========") print(knn.predict([1, 0], [1, 1])) print("Multiple Data ===========") print(knn.predict([[1, 3], [1, 0]], [1, 1])) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/kursawefct.py0000644000076500000240000000557414456461441016643 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import array import logging import random import numpy from deap import algorithms from deap import base from deap import benchmarks from deap import creator from deap import tools creator.create("FitnessMin", base.Fitness, weights=(-1.0, -1.0)) creator.create("Individual", array.array, typecode='d', fitness=creator.FitnessMin) toolbox = base.Toolbox() # Attribute generator toolbox.register("attr_float", random.uniform, -5, 5) # Structure initializers toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_float, 3) toolbox.register("population", tools.initRepeat, list, toolbox.individual) def checkBounds(min, max): def decorator(func): def wrappper(*args, **kargs): offspring = func(*args, **kargs) for child in offspring: for i in range(len(child)): if child[i] > max: child[i] = max elif child[i] < min: child[i] = min return offspring return wrappper return decorator toolbox.register("evaluate", benchmarks.kursawe) toolbox.register("mate", tools.cxBlend, alpha=1.5) toolbox.register("mutate", tools.mutGaussian, mu=0, sigma=3, indpb=0.3) toolbox.register("select", tools.selNSGA2) toolbox.decorate("mate", checkBounds(-5, 5)) toolbox.decorate("mutate", checkBounds(-5, 5)) def main(): random.seed(64) MU, LAMBDA = 50, 100 pop = toolbox.population(n=MU) hof = tools.ParetoFront() stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean, axis=0) stats.register("std", numpy.std, axis=0) stats.register("min", numpy.min, axis=0) stats.register("max", numpy.max, axis=0) algorithms.eaMuPlusLambda(pop, toolbox, mu=MU, lambda_=LAMBDA, cxpb=0.5, mutpb=0.2, ngen=150, stats=stats, halloffame=hof) return pop, stats, hof if __name__ == "__main__": pop, stats, hof = main() # import matplotlib.pyplot as plt # import numpy # # front = numpy.array([ind.fitness.values for ind in pop]) # plt.scatter(front[:,0], front[:,1], c="b") # plt.axis("tight") # plt.show() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/mo_rhv.py0000644000076500000240000001557614456461441015762 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . # Regular Hypervolume-based Algorithm (greedy version) import array import random import json import numpy from math import sqrt from deap import algorithms from deap import base from deap import benchmarks from deap.benchmarks.tools import diversity, convergence, hypervolume from deap.tools.indicator import hv from deap import creator from deap import tools creator.create("FitnessMin", base.Fitness, weights=(-1.0, -1.0)) # Hypervolume contribution creator.create("FitnessHV", base.Fitness, weights=(1.0,)) creator.create("Individual", array.array, typecode='d', fitness=creator.FitnessMin, fitness_hv=creator.FitnessHV) toolbox = base.Toolbox() # Problem definition # Functions zdt1, zdt2, zdt3, zdt6 have bounds [0, 1] BOUND_LOW, BOUND_UP = 0.0, 1.0 # Functions zdt4 has bounds x1 = [0, 1], xn = [-5, 5], with n = 2, ..., 10 # BOUND_LOW, BOUND_UP = [0.0] + [-5.0]*9, [1.0] + [5.0]*9 # Functions zdt1, zdt2, zdt3 have 30 dimensions, zdt4 and zdt6 have 10 NDIM = 30 def uniform(low, up, size=None): try: return [random.uniform(a, b) for a, b in zip(low, up)] except TypeError: return [random.uniform(a, b) for a, b in zip([low] * size, [up] * size)] def hypervolume_contrib(front, **kargs): """Returns the hypervolume contribution of each individual. The provided *front* should be a set of non-dominated individuals having each a :attr:`fitness` attribute. """ # Must use wvalues * -1 since hypervolume use implicit minimization # And minimization in deap use max on -obj wobj = numpy.array([ind.fitness.wvalues for ind in front]) * -1 ref = kargs.get("ref", None) if ref is None: ref = numpy.max(wobj, axis=0) + 1 total_hv = hv.hypervolume(wobj, ref) def contribution(i): # The contribution of point p_i in point set P # is the hypervolume of P without p_i return total_hv - hv.hypervolume(numpy.concatenate((wobj[:i], wobj[i+1:])), ref) # Parallelization note: Cannot pickle local function return map(contribution, range(len(front))) toolbox.register("attr_float", uniform, BOUND_LOW, BOUND_UP, NDIM) toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.attr_float) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("evaluate", benchmarks.zdt1) toolbox.register("mate", tools.cxSimulatedBinaryBounded, low=BOUND_LOW, up=BOUND_UP, eta=20.0) toolbox.register("mutate", tools.mutPolynomialBounded, low=BOUND_LOW, up=BOUND_UP, eta=20.0, indpb=1.0/NDIM) toolbox.register("sort", tools.sortLogNondominated) # Selection is based on HV fitness toolbox.register("select", tools.selBest, fit_attr="fitness_hv") def main(seed=None): random.seed(seed) NGEN = 250 MU = 100 CXPB = 0.9 stats = tools.Statistics(lambda ind: ind.fitness.values) # stats.register("avg", numpy.mean, axis=0) # stats.register("std", numpy.std, axis=0) stats.register("min", numpy.min, axis=0) stats.register("max", numpy.max, axis=0) logbook = tools.Logbook() logbook.header = "gen", "evals", "std", "min", "avg", "max" pop = toolbox.population(n=MU) # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in pop if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit record = stats.compile(pop) logbook.record(gen=0, evals=len(invalid_ind), **record) print(logbook.stream) # Begin the generational process for gen in range(1, NGEN): # Vary the population offspring = tools.selRandom(pop, len(pop)) offspring = [toolbox.clone(ind) for ind in offspring] for ind1, ind2 in zip(offspring[::2], offspring[1::2]): if random.random() <= CXPB: toolbox.mate(ind1, ind2) toolbox.mutate(ind1) toolbox.mutate(ind2) del ind1.fitness.values, ind2.fitness.values # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in offspring if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit # Select the next generation population pop = pop + offspring fronts = toolbox.sort(pop, len(pop)) chosen = [] for i, front in enumerate(fronts): # Move is front to chosen population til it is almost full if len(chosen) + len(front) <= MU: chosen.extend(front) else: # Assign hypervolume contribution to individuals of front that # cannot be completely move over to chosen individuals fitness_hv = hypervolume_contrib(front) for ind, fit_hv in zip(front, fitness_hv): ind.fitness_hv.values = (fit_hv,) # Fill chosen with best individuals from inspect front # (based on hypervolume contribution) chosen.extend(toolbox.select(front, MU - len(chosen))) break pop = chosen record = stats.compile(pop) logbook.record(gen=gen, evals=len(invalid_ind), **record) print(logbook.stream) print("Final population hypervolume is %f" % hypervolume(pop, [11.0, 11.0])) return pop, logbook if __name__ == "__main__": # with open("pareto_front/zdt1_front.json") as optimal_front_data: # optimal_front = json.load(optimal_front_data) # Use 500 of the 1000 points in the json file # optimal_front = sorted(optimal_front[i] for i in range(0, len(optimal_front), 2)) pop, stats = main() # pop.sort(key=lambda x: x.fitness.values) # print(stats) # print("Convergence: ", convergence(pop, optimal_front)) # print("Diversity: ", diversity(pop, optimal_front[0], optimal_front[-1])) # import matplotlib.pyplot as plt # import numpy # front = numpy.array([ind.fitness.values for ind in pop]) # optimal_front = numpy.array(optimal_front) # plt.scatter(optimal_front[:,0], optimal_front[:,1], c="r") # plt.scatter(front[:,0], front[:,1], c="b") # plt.axis("tight") # plt.show() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/nqueens.py0000644000076500000240000000641714456461441016140 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import random import numpy from deap import algorithms from deap import base from deap import creator from deap import tools #Problem parameter NB_QUEENS = 20 def evalNQueens(individual): """Evaluation function for the n-queens problem. The problem is to determine a configuration of n queens on a nxn chessboard such that no queen can be taken by one another. In this version, each queens is assigned to one column, and only one queen can be on each line. The evaluation function therefore only counts the number of conflicts along the diagonals. """ size = len(individual) #Count the number of conflicts with other queens. #The conflicts can only be diagonal, count on each diagonal line left_diagonal = [0] * (2*size-1) right_diagonal = [0] * (2*size-1) #Sum the number of queens on each diagonal: for i in range(size): left_diagonal[i+individual[i]] += 1 right_diagonal[size-1-i+individual[i]] += 1 #Count the number of conflicts on each diagonal sum_ = 0 for i in range(2*size-1): if left_diagonal[i] > 1: sum_ += left_diagonal[i] - 1 if right_diagonal[i] > 1: sum_ += right_diagonal[i] - 1 return sum_, creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", list, fitness=creator.FitnessMin) #Since there is only one queen per line, #individual are represented by a permutation toolbox = base.Toolbox() toolbox.register("permutation", random.sample, range(NB_QUEENS), NB_QUEENS) #Structure initializers #An individual is a list that represents the position of each queen. #Only the line is stored, the column is the index of the number in the list. toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.permutation) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("evaluate", evalNQueens) toolbox.register("mate", tools.cxPartialyMatched) toolbox.register("mutate", tools.mutShuffleIndexes, indpb=2.0/NB_QUEENS) toolbox.register("select", tools.selTournament, tournsize=3) def main(seed=0): random.seed(seed) pop = toolbox.population(n=300) hof = tools.HallOfFame(1) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("Avg", numpy.mean) stats.register("Std", numpy.std) stats.register("Min", numpy.min) stats.register("Max", numpy.max) algorithms.eaSimple(pop, toolbox, cxpb=0.5, mutpb=0.2, ngen=100, stats=stats, halloffame=hof, verbose=True) return pop, stats, hof if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/nsga2.py0000644000076500000240000001205014456461441015462 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import array import random import json import numpy from math import sqrt from deap import algorithms from deap import base from deap import benchmarks from deap.benchmarks.tools import diversity, convergence, hypervolume from deap import creator from deap import tools creator.create("FitnessMin", base.Fitness, weights=(-1.0, -1.0)) creator.create("Individual", array.array, typecode='d', fitness=creator.FitnessMin) toolbox = base.Toolbox() # Problem definition # Functions zdt1, zdt2, zdt3, zdt6 have bounds [0, 1] BOUND_LOW, BOUND_UP = 0.0, 1.0 # Functions zdt4 has bounds x1 = [0, 1], xn = [-5, 5], with n = 2, ..., 10 # BOUND_LOW, BOUND_UP = [0.0] + [-5.0]*9, [1.0] + [5.0]*9 # Functions zdt1, zdt2, zdt3 have 30 dimensions, zdt4 and zdt6 have 10 NDIM = 30 def uniform(low, up, size=None): try: return [random.uniform(a, b) for a, b in zip(low, up)] except TypeError: return [random.uniform(a, b) for a, b in zip([low] * size, [up] * size)] toolbox.register("attr_float", uniform, BOUND_LOW, BOUND_UP, NDIM) toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.attr_float) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("evaluate", benchmarks.zdt1) toolbox.register("mate", tools.cxSimulatedBinaryBounded, low=BOUND_LOW, up=BOUND_UP, eta=20.0) toolbox.register("mutate", tools.mutPolynomialBounded, low=BOUND_LOW, up=BOUND_UP, eta=20.0, indpb=1.0/NDIM) toolbox.register("select", tools.selNSGA2) def main(seed=None): random.seed(seed) NGEN = 250 MU = 100 CXPB = 0.9 stats = tools.Statistics(lambda ind: ind.fitness.values) # stats.register("avg", numpy.mean, axis=0) # stats.register("std", numpy.std, axis=0) stats.register("min", numpy.min, axis=0) stats.register("max", numpy.max, axis=0) logbook = tools.Logbook() logbook.header = "gen", "evals", "std", "min", "avg", "max" pop = toolbox.population(n=MU) # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in pop if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit # This is just to assign the crowding distance to the individuals # no actual selection is done pop = toolbox.select(pop, len(pop)) record = stats.compile(pop) logbook.record(gen=0, evals=len(invalid_ind), **record) print(logbook.stream) # Begin the generational process for gen in range(1, NGEN): # Vary the population offspring = tools.selTournamentDCD(pop, len(pop)) offspring = [toolbox.clone(ind) for ind in offspring] for ind1, ind2 in zip(offspring[::2], offspring[1::2]): if random.random() <= CXPB: toolbox.mate(ind1, ind2) toolbox.mutate(ind1) toolbox.mutate(ind2) del ind1.fitness.values, ind2.fitness.values # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in offspring if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit # Select the next generation population pop = toolbox.select(pop + offspring, MU) record = stats.compile(pop) logbook.record(gen=gen, evals=len(invalid_ind), **record) print(logbook.stream) print("Final population hypervolume is %f" % hypervolume(pop, [11.0, 11.0])) return pop, logbook if __name__ == "__main__": # with open("pareto_front/zdt1_front.json") as optimal_front_data: # optimal_front = json.load(optimal_front_data) # Use 500 of the 1000 points in the json file # optimal_front = sorted(optimal_front[i] for i in range(0, len(optimal_front), 2)) pop, stats = main() # pop.sort(key=lambda x: x.fitness.values) # print(stats) # print("Convergence: ", convergence(pop, optimal_front)) # print("Diversity: ", diversity(pop, optimal_front[0], optimal_front[-1])) # import matplotlib.pyplot as plt # import numpy # front = numpy.array([ind.fitness.values for ind in pop]) # optimal_front = numpy.array(optimal_front) # plt.scatter(optimal_front[:,0], optimal_front[:,1], c="r") # plt.scatter(front[:,0], front[:,1], c="b") # plt.axis("tight") # plt.show() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/nsga3.py0000644000076500000240000001015014456461441015462 0ustar00runnerstafffrom math import factorial import random import matplotlib.pyplot as plt import numpy import pymop.factory from deap import algorithms from deap import base from deap.benchmarks.tools import igd from deap import creator from deap import tools # Problem definition PROBLEM = "dtlz2" NOBJ = 3 K = 10 NDIM = NOBJ + K - 1 P = 12 H = factorial(NOBJ + P - 1) / (factorial(P) * factorial(NOBJ - 1)) BOUND_LOW, BOUND_UP = 0.0, 1.0 problem = pymop.factory.get_problem(PROBLEM, n_var=NDIM, n_obj=NOBJ) ## # Algorithm parameters MU = int(H + (4 - H % 4)) NGEN = 400 CXPB = 1.0 MUTPB = 1.0 ## # Create uniform reference point ref_points = tools.uniform_reference_points(NOBJ, P) # Create classes creator.create("FitnessMin", base.Fitness, weights=(-1.0,) * NOBJ) creator.create("Individual", list, fitness=creator.FitnessMin) ## # Toolbox initialization def uniform(low, up, size=None): try: return [random.uniform(a, b) for a, b in zip(low, up)] except TypeError: return [random.uniform(a, b) for a, b in zip([low] * size, [up] * size)] toolbox = base.Toolbox() toolbox.register("attr_float", uniform, BOUND_LOW, BOUND_UP, NDIM) toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.attr_float) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("evaluate", problem.evaluate, return_values_of=["F"]) toolbox.register("mate", tools.cxSimulatedBinaryBounded, low=BOUND_LOW, up=BOUND_UP, eta=30.0) toolbox.register("mutate", tools.mutPolynomialBounded, low=BOUND_LOW, up=BOUND_UP, eta=20.0, indpb=1.0/NDIM) toolbox.register("select", tools.selNSGA3, ref_points=ref_points) ## def main(seed=None): random.seed(seed) # Initialize statistics object stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean, axis=0) stats.register("std", numpy.std, axis=0) stats.register("min", numpy.min, axis=0) stats.register("max", numpy.max, axis=0) logbook = tools.Logbook() logbook.header = "gen", "evals", "std", "min", "avg", "max" pop = toolbox.population(n=MU) # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in pop if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit # Compile statistics about the population record = stats.compile(pop) logbook.record(gen=0, evals=len(invalid_ind), **record) print(logbook.stream) # Begin the generational process for gen in range(1, NGEN): offspring = algorithms.varAnd(pop, toolbox, CXPB, MUTPB) # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in offspring if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit # Select the next generation population from parents and offspring pop = toolbox.select(pop + offspring, MU) # Compile statistics about the new population record = stats.compile(pop) logbook.record(gen=gen, evals=len(invalid_ind), **record) print(logbook.stream) return pop, logbook if __name__ == "__main__": pop, stats = main() pop_fit = numpy.array([ind.fitness.values for ind in pop]) pf = problem.pareto_front(ref_points) print(igd(pop_fit, pf)) import matplotlib.pyplot as plt import mpl_toolkits.mplot3d as Axes3d fig = plt.figure(figsize=(7, 7)) ax = fig.add_subplot(111, projection="3d") p = numpy.array([ind.fitness.values for ind in pop]) ax.scatter(p[:, 0], p[:, 1], p[:, 2], marker="o", s=24, label="Final Population") ax.scatter(pf[:, 0], pf[:, 1], pf[:, 2], marker="x", c="k", s=32, label="Ideal Pareto Front") ref_points = tools.uniform_reference_points(NOBJ, P) ax.scatter(ref_points[:, 0], ref_points[:, 1], ref_points[:, 2], marker="o", s=24, label="Reference Points") ax.view_init(elev=11, azim=-25) ax.autoscale(tight=True) plt.legend() plt.tight_layout() plt.savefig("nsga3.png")././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/onemax.py0000644000076500000240000001227314456461441015746 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . # example which maximizes the sum of a list of integers # each of which can be 0 or 1 import random from deap import base from deap import creator from deap import tools creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", list, fitness=creator.FitnessMax) toolbox = base.Toolbox() # Attribute generator # define 'attr_bool' to be an attribute ('gene') # which corresponds to integers sampled uniformly # from the range [0,1] (i.e. 0 or 1 with equal # probability) toolbox.register("attr_bool", random.randint, 0, 1) # Structure initializers # define 'individual' to be an individual # consisting of 100 'attr_bool' elements ('genes') toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_bool, 100) # define the population to be a list of individuals toolbox.register("population", tools.initRepeat, list, toolbox.individual) # the goal ('fitness') function to be maximized def evalOneMax(individual): return sum(individual), #---------- # Operator registration #---------- # register the goal / fitness function toolbox.register("evaluate", evalOneMax) # register the crossover operator toolbox.register("mate", tools.cxTwoPoint) # register a mutation operator with a probability to # flip each attribute/gene of 0.05 toolbox.register("mutate", tools.mutFlipBit, indpb=0.05) # operator for selecting individuals for breeding the next # generation: each individual of the current generation # is replaced by the 'fittest' (best) of three individuals # drawn randomly from the current generation. toolbox.register("select", tools.selTournament, tournsize=3) #---------- def main(): random.seed(64) # create an initial population of 300 individuals (where # each individual is a list of integers) pop = toolbox.population(n=300) # CXPB is the probability with which two individuals # are crossed # # MUTPB is the probability for mutating an individual CXPB, MUTPB = 0.5, 0.2 print("Start of evolution") # Evaluate the entire population fitnesses = list(map(toolbox.evaluate, pop)) for ind, fit in zip(pop, fitnesses): ind.fitness.values = fit print(" Evaluated %i individuals" % len(pop)) # Extracting all the fitnesses of fits = [ind.fitness.values[0] for ind in pop] # Variable keeping track of the number of generations g = 0 # Begin the evolution while max(fits) < 100 and g < 1000: # A new generation g = g + 1 print("-- Generation %i --" % g) # Select the next generation individuals offspring = toolbox.select(pop, len(pop)) # Clone the selected individuals offspring = list(map(toolbox.clone, offspring)) # Apply crossover and mutation on the offspring for child1, child2 in zip(offspring[::2], offspring[1::2]): # cross two individuals with probability CXPB if random.random() < CXPB: toolbox.mate(child1, child2) # fitness values of the children # must be recalculated later del child1.fitness.values del child2.fitness.values for mutant in offspring: # mutate an individual with probability MUTPB if random.random() < MUTPB: toolbox.mutate(mutant) del mutant.fitness.values # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in offspring if not ind.fitness.valid] fitnesses = map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit print(" Evaluated %i individuals" % len(invalid_ind)) # The population is entirely replaced by the offspring pop[:] = offspring # Gather all the fitnesses in one list and print the stats fits = [ind.fitness.values[0] for ind in pop] length = len(pop) mean = sum(fits) / length sum2 = sum(x*x for x in fits) std = abs(sum2 / length - mean**2)**0.5 print(" Min %s" % min(fits)) print(" Max %s" % max(fits)) print(" Avg %s" % mean) print(" Std %s" % std) print("-- End of (successful) evolution --") best_ind = tools.selBest(pop, 1)[0] print("Best individual is %s, %s" % (best_ind, best_ind.fitness.values)) if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/onemax_island.py0000644000076500000240000001220514456461441017273 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import array import random import numpy from collections import deque from multiprocessing import Event, Pipe, Process from deap import algorithms from deap import base from deap import creator from deap import tools creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", array.array, typecode='b', fitness=creator.FitnessMax) toolbox = base.Toolbox() # Attribute generator toolbox.register("attr_bool", random.randint, 0, 1) # Structure initializers toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_bool, 100) toolbox.register("population", tools.initRepeat, list, toolbox.individual) def evalOneMax(individual): return sum(individual), def migPipe(deme, k, pipein, pipeout, selection, replacement=None): """Migration using pipes between initialized processes. It first selects *k* individuals from the *deme* and writes them in *pipeout*. Then it reads the individuals from *pipein* and replace some individuals in the deme. The replacement strategy shall not select twice the same individual. :param deme: A list of individuals on which to operate migration. :param k: The number of individuals to migrate. :param pipein: A :class:`~multiprocessing.Pipe` from which to read immigrants. :param pipeout: A :class:`~multiprocessing.Pipe` in which to write emigrants. :param selection: The function to use for selecting the emigrants. :param replacement: The function to use to select which individuals will be replaced. If :obj:`None` (default) the individuals that leave the population are directly replaced. """ emigrants = selection(deme, k) if replacement is None: # If no replacement strategy is selected, replace those who migrate immigrants = emigrants else: # Else select those who will be replaced immigrants = replacement(deme, k) pipeout.send(emigrants) buf = pipein.recv() for place, immigrant in zip(immigrants, buf): indx = deme.index(place) deme[indx] = immigrant toolbox.register("evaluate", evalOneMax) toolbox.register("mate", tools.cxTwoPoint) toolbox.register("mutate", tools.mutFlipBit, indpb=0.05) toolbox.register("select", tools.selTournament, tournsize=3) def main(procid, pipein, pipeout, sync, seed=None): random.seed(seed) toolbox.register("migrate", migPipe, k=5, pipein=pipein, pipeout=pipeout, selection=tools.selBest, replacement=random.sample) MU = 300 NGEN = 40 CXPB = 0.5 MUTPB = 0.2 MIG_RATE = 5 deme = toolbox.population(n=MU) hof = tools.HallOfFame(1) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) logbook = tools.Logbook() logbook.header = "gen", "deme", "evals", "std", "min", "avg", "max" for ind in deme: ind.fitness.values = toolbox.evaluate(ind) record = stats.compile(deme) logbook.record(gen=0, deme=procid, evals=len(deme), **record) hof.update(deme) if procid == 0: # Synchronization needed to log header on top and only once print(logbook.stream) sync.set() else: logbook.log_header = False # Never output the header sync.wait() print(logbook.stream) for gen in range(1, NGEN): deme = toolbox.select(deme, len(deme)) deme = algorithms.varAnd(deme, toolbox, cxpb=CXPB, mutpb=MUTPB) invalid_ind = [ind for ind in deme if not ind.fitness.valid] for ind in invalid_ind: ind.fitness.values = toolbox.evaluate(ind) hof.update(deme) record = stats.compile(deme) logbook.record(gen=gen, deme=procid, evals=len(deme), **record) print(logbook.stream) if gen % MIG_RATE == 0 and gen > 0: toolbox.migrate(deme) if __name__ == "__main__": random.seed(64) NBR_DEMES = 3 pipes = [Pipe(False) for _ in range(NBR_DEMES)] pipes_in = deque(p[0] for p in pipes) pipes_out = deque(p[1] for p in pipes) pipes_in.rotate(1) pipes_out.rotate(-1) e = Event() processes = [Process(target=main, args=(i, ipipe, opipe, e, random.random())) for i, (ipipe, opipe) in enumerate(zip(pipes_in, pipes_out))] for proc in processes: proc.start() for proc in processes: proc.join() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/onemax_island_scoop.py0000644000076500000240000000444014456461441020500 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import array import random import numpy from functools import partial from deap import algorithms from deap import base from deap import creator from deap import tools from scoop import futures creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", array.array, typecode='b', fitness=creator.FitnessMax) toolbox = base.Toolbox() # Attribute generator toolbox.register("attr_bool", random.randint, 0, 1) # Structure initializers toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_bool, 100) toolbox.register("population", tools.initRepeat, list, toolbox.individual) def evalOneMax(individual): return sum(individual), toolbox.register("evaluate", evalOneMax) toolbox.register("mate", tools.cxTwoPoint) toolbox.register("mutate", tools.mutFlipBit, indpb=0.05) toolbox.register("select", tools.selTournament, tournsize=3) toolbox.register("map", futures.map) def main(): random.seed(64) NISLES = 5 islands = [toolbox.population(n=300) for i in range(NISLES)] # Unregister unpicklable methods before sending the toolbox. toolbox.unregister("attr_bool") toolbox.unregister("individual") toolbox.unregister("population") NGEN, FREQ = 40, 5 toolbox.register("algorithm", algorithms.eaSimple, toolbox=toolbox, cxpb=0.5, mutpb=0.2, ngen=FREQ, verbose=False) for i in range(0, NGEN, FREQ): results = toolbox.map(toolbox.algorithm, islands) islands = [pop for pop, logbook in results] tools.migRing(islands, 15, tools.selBest) return islands if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/onemax_mp.py0000755000076500000240000000446114456461441016445 0ustar00runnerstaff#!/usr/bin/env python2.7 # This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import array import multiprocessing import random import sys if sys.version_info < (2, 7): print("mpga_onemax example requires Python >= 2.7.") exit(1) import numpy from deap import algorithms from deap import base from deap import creator from deap import tools def evalOneMax(individual): return sum(individual), def main(seed): random.seed(seed) creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", array.array, typecode='b', fitness=creator.FitnessMax) toolbox = base.Toolbox() # Attribute generator toolbox.register("attr_bool", random.randint, 0, 1) # Structure initializers toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_bool, 100) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("evaluate", evalOneMax) toolbox.register("mate", tools.cxTwoPoint) toolbox.register("mutate", tools.mutFlipBit, indpb=0.05) toolbox.register("select", tools.selTournament, tournsize=3) # Process Pool of 4 workers pool = multiprocessing.Pool(processes=4) toolbox.register("map", pool.map) pop = toolbox.population(n=300) hof = tools.HallOfFame(1) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) algorithms.eaSimple(pop, toolbox, cxpb=0.5, mutpb=0.2, ngen=40, stats=stats, halloffame=hof) pool.close() if __name__ == "__main__": main(64) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/onemax_multidemic.py0000644000076500000240000000611414456461441020157 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import array import random import numpy from deap import algorithms from deap import base from deap import creator from deap import tools creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", array.array, typecode='b', fitness=creator.FitnessMax) toolbox = base.Toolbox() # Attribute generator toolbox.register("attr_bool", random.randint, 0, 1) # Structure initializers toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_bool, 100) toolbox.register("population", tools.initRepeat, list, toolbox.individual) def evalOneMax(individual): return sum(individual), toolbox.register("evaluate", evalOneMax) toolbox.register("mate", tools.cxTwoPoint) toolbox.register("mutate", tools.mutFlipBit, indpb=0.05) toolbox.register("select", tools.selTournament, tournsize=3) toolbox.register("migrate", tools.migRing, k=5, selection=tools.selBest, replacement=random.sample) def main(): random.seed(64) NBR_DEMES = 3 MU = 300 NGEN = 40 CXPB = 0.5 MUTPB = 0.2 MIG_RATE = 5 demes = [toolbox.population(n=MU) for _ in range(NBR_DEMES)] hof = tools.HallOfFame(1) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) logbook = tools.Logbook() logbook.header = "gen", "deme", "evals", "std", "min", "avg", "max" for idx, deme in enumerate(demes): for ind in deme: ind.fitness.values = toolbox.evaluate(ind) logbook.record(gen=0, deme=idx, evals=len(deme), **stats.compile(deme)) hof.update(deme) print(logbook.stream) gen = 1 while gen <= NGEN and logbook[-1]["max"] < 100.0: for idx, deme in enumerate(demes): deme[:] = toolbox.select(deme, len(deme)) deme[:] = algorithms.varAnd(deme, toolbox, cxpb=CXPB, mutpb=MUTPB) invalid_ind = [ind for ind in deme if not ind.fitness.valid] for ind in invalid_ind: ind.fitness.values = toolbox.evaluate(ind) logbook.record(gen=gen, deme=idx, evals=len(deme), **stats.compile(deme)) hof.update(deme) print(logbook.stream) if gen % MIG_RATE == 0: toolbox.migrate(demes) gen += 1 return demes, logbook, hof if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/onemax_numpy.py0000644000076500000240000000613614456461441017177 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import random import numpy from deap import algorithms from deap import base from deap import creator from deap import tools creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", numpy.ndarray, fitness=creator.FitnessMax) toolbox = base.Toolbox() toolbox.register("attr_bool", random.randint, 0, 1) toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_bool, n=100) toolbox.register("population", tools.initRepeat, list, toolbox.individual) def evalOneMax(individual): return sum(individual), def cxTwoPointCopy(ind1, ind2): """Execute a two points crossover with copy on the input individuals. The copy is required because the slicing in numpy returns a view of the data, which leads to a self overwriting in the swap operation. It prevents :: >>> import numpy >>> a = numpy.array((1,2,3,4)) >>> b = numpy.array((5,6,7,8)) >>> a[1:3], b[1:3] = b[1:3], a[1:3] >>> print(a) [1 6 7 4] >>> print(b) [5 6 7 8] """ size = len(ind1) cxpoint1 = random.randint(1, size) cxpoint2 = random.randint(1, size - 1) if cxpoint2 >= cxpoint1: cxpoint2 += 1 else: # Swap the two cx points cxpoint1, cxpoint2 = cxpoint2, cxpoint1 ind1[cxpoint1:cxpoint2], ind2[cxpoint1:cxpoint2] \ = ind2[cxpoint1:cxpoint2].copy(), ind1[cxpoint1:cxpoint2].copy() return ind1, ind2 toolbox.register("evaluate", evalOneMax) toolbox.register("mate", cxTwoPointCopy) toolbox.register("mutate", tools.mutFlipBit, indpb=0.05) toolbox.register("select", tools.selTournament, tournsize=3) def main(): random.seed(64) pop = toolbox.population(n=300) # Numpy equality function (operators.eq) between two arrays returns the # equality element wise, which raises an exception in the if similar() # check of the hall of fame. Using a different equality function like # numpy.array_equal or numpy.allclose solve this issue. hof = tools.HallOfFame(1, similar=numpy.array_equal) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) algorithms.eaSimple(pop, toolbox, cxpb=0.5, mutpb=0.2, ngen=40, stats=stats, halloffame=hof) return pop, stats, hof if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/onemax_short.py0000644000076500000240000000402314456461441017157 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import array import random import numpy from deap import algorithms from deap import base from deap import creator from deap import tools creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", array.array, typecode='b', fitness=creator.FitnessMax) toolbox = base.Toolbox() # Attribute generator toolbox.register("attr_bool", random.randint, 0, 1) # Structure initializers toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_bool, 100) toolbox.register("population", tools.initRepeat, list, toolbox.individual) def evalOneMax(individual): return sum(individual), toolbox.register("evaluate", evalOneMax) toolbox.register("mate", tools.cxTwoPoint) toolbox.register("mutate", tools.mutFlipBit, indpb=0.05) toolbox.register("select", tools.selTournament, tournsize=3) def main(): random.seed(64) pop = toolbox.population(n=300) hof = tools.HallOfFame(1) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) pop, log = algorithms.eaSimple(pop, toolbox, cxpb=0.5, mutpb=0.2, ngen=40, stats=stats, halloffame=hof, verbose=True) return pop, log, hof if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.7217047 deap-1.4.1/examples/ga/pareto_front/0000755000076500000240000000000014456461475016611 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/pareto_front/dtlz1_front.json0000644000076500000240000007051014456461441021746 0ustar00runnerstaff[[0.0000000000,0.5000000000], [0.0005005005,0.4994994995], [0.0010010010,0.4989989990], [0.0015015015,0.4984984985], [0.0020020020,0.4979979980], [0.0025025025,0.4974974975], [0.0030030030,0.4969969970], [0.0035035035,0.4964964965], [0.0040040040,0.4959959960], [0.0045045045,0.4954954955], [0.0050050050,0.4949949950], [0.0055055055,0.4944944945], [0.0060060060,0.4939939940], [0.0065065065,0.4934934935], [0.0070070070,0.4929929930], [0.0075075075,0.4924924925], [0.0080080080,0.4919919920], [0.0085085085,0.4914914915], [0.0090090090,0.4909909910], [0.0095095095,0.4904904905], [0.0100100100,0.4899899900], [0.0105105105,0.4894894895], [0.0110110110,0.4889889890], [0.0115115115,0.4884884885], [0.0120120120,0.4879879880], [0.0125125125,0.4874874875], [0.0130130130,0.4869869870], [0.0135135135,0.4864864865], [0.0140140140,0.4859859860], [0.0145145145,0.4854854855], [0.0150150150,0.4849849850], [0.0155155155,0.4844844845], [0.0160160160,0.4839839840], [0.0165165165,0.4834834835], [0.0170170170,0.4829829830], [0.0175175175,0.4824824825], [0.0180180180,0.4819819820], [0.0185185185,0.4814814815], [0.0190190190,0.4809809810], [0.0195195195,0.4804804805], [0.0200200200,0.4799799800], [0.0205205205,0.4794794795], [0.0210210210,0.4789789790], [0.0215215215,0.4784784785], [0.0220220220,0.4779779780], [0.0225225225,0.4774774775], [0.0230230230,0.4769769770], [0.0235235235,0.4764764765], [0.0240240240,0.4759759760], [0.0245245245,0.4754754755], [0.0250250250,0.4749749750], [0.0255255255,0.4744744745], [0.0260260260,0.4739739740], [0.0265265265,0.4734734735], [0.0270270270,0.4729729730], [0.0275275275,0.4724724725], [0.0280280280,0.4719719720], [0.0285285285,0.4714714715], [0.0290290290,0.4709709710], [0.0295295295,0.4704704705], [0.0300300300,0.4699699700], [0.0305305305,0.4694694695], [0.0310310310,0.4689689690], [0.0315315315,0.4684684685], [0.0320320320,0.4679679680], [0.0325325325,0.4674674675], [0.0330330330,0.4669669670], [0.0335335335,0.4664664665], [0.0340340340,0.4659659660], [0.0345345345,0.4654654655], [0.0350350350,0.4649649650], [0.0355355355,0.4644644645], [0.0360360360,0.4639639640], [0.0365365365,0.4634634635], [0.0370370370,0.4629629630], [0.0375375375,0.4624624625], [0.0380380380,0.4619619620], [0.0385385385,0.4614614615], [0.0390390390,0.4609609610], [0.0395395395,0.4604604605], [0.0400400400,0.4599599600], [0.0405405405,0.4594594595], [0.0410410410,0.4589589590], [0.0415415415,0.4584584585], [0.0420420420,0.4579579580], [0.0425425425,0.4574574575], [0.0430430430,0.4569569570], [0.0435435435,0.4564564565], [0.0440440440,0.4559559560], [0.0445445445,0.4554554555], [0.0450450450,0.4549549550], [0.0455455455,0.4544544545], [0.0460460460,0.4539539540], [0.0465465465,0.4534534535], [0.0470470470,0.4529529530], [0.0475475475,0.4524524525], [0.0480480480,0.4519519520], [0.0485485485,0.4514514515], [0.0490490490,0.4509509510], [0.0495495495,0.4504504505], [0.0500500501,0.4499499499], [0.0505505506,0.4494494494], [0.0510510511,0.4489489489], [0.0515515516,0.4484484484], [0.0520520521,0.4479479479], [0.0525525526,0.4474474474], [0.0530530531,0.4469469469], [0.0535535536,0.4464464464], [0.0540540541,0.4459459459], [0.0545545546,0.4454454454], [0.0550550551,0.4449449449], [0.0555555556,0.4444444444], [0.0560560561,0.4439439439], [0.0565565566,0.4434434434], [0.0570570571,0.4429429429], [0.0575575576,0.4424424424], [0.0580580581,0.4419419419], [0.0585585586,0.4414414414], [0.0590590591,0.4409409409], [0.0595595596,0.4404404404], [0.0600600601,0.4399399399], [0.0605605606,0.4394394394], [0.0610610611,0.4389389389], [0.0615615616,0.4384384384], [0.0620620621,0.4379379379], [0.0625625626,0.4374374374], [0.0630630631,0.4369369369], [0.0635635636,0.4364364364], [0.0640640641,0.4359359359], [0.0645645646,0.4354354354], [0.0650650651,0.4349349349], [0.0655655656,0.4344344344], [0.0660660661,0.4339339339], [0.0665665666,0.4334334334], [0.0670670671,0.4329329329], [0.0675675676,0.4324324324], [0.0680680681,0.4319319319], [0.0685685686,0.4314314314], [0.0690690691,0.4309309309], [0.0695695696,0.4304304304], [0.0700700701,0.4299299299], [0.0705705706,0.4294294294], [0.0710710711,0.4289289289], [0.0715715716,0.4284284284], [0.0720720721,0.4279279279], [0.0725725726,0.4274274274], [0.0730730731,0.4269269269], [0.0735735736,0.4264264264], [0.0740740741,0.4259259259], [0.0745745746,0.4254254254], [0.0750750751,0.4249249249], [0.0755755756,0.4244244244], [0.0760760761,0.4239239239], [0.0765765766,0.4234234234], [0.0770770771,0.4229229229], [0.0775775776,0.4224224224], [0.0780780781,0.4219219219], [0.0785785786,0.4214214214], [0.0790790791,0.4209209209], [0.0795795796,0.4204204204], [0.0800800801,0.4199199199], [0.0805805806,0.4194194194], [0.0810810811,0.4189189189], [0.0815815816,0.4184184184], [0.0820820821,0.4179179179], [0.0825825826,0.4174174174], [0.0830830831,0.4169169169], [0.0835835836,0.4164164164], [0.0840840841,0.4159159159], [0.0845845846,0.4154154154], [0.0850850851,0.4149149149], [0.0855855856,0.4144144144], [0.0860860861,0.4139139139], [0.0865865866,0.4134134134], [0.0870870871,0.4129129129], [0.0875875876,0.4124124124], [0.0880880881,0.4119119119], [0.0885885886,0.4114114114], [0.0890890891,0.4109109109], [0.0895895896,0.4104104104], [0.0900900901,0.4099099099], [0.0905905906,0.4094094094], [0.0910910911,0.4089089089], [0.0915915916,0.4084084084], [0.0920920921,0.4079079079], [0.0925925926,0.4074074074], [0.0930930931,0.4069069069], [0.0935935936,0.4064064064], [0.0940940941,0.4059059059], [0.0945945946,0.4054054054], [0.0950950951,0.4049049049], [0.0955955956,0.4044044044], [0.0960960961,0.4039039039], [0.0965965966,0.4034034034], [0.0970970971,0.4029029029], [0.0975975976,0.4024024024], [0.0980980981,0.4019019019], [0.0985985986,0.4014014014], [0.0990990991,0.4009009009], [0.0995995996,0.4004004004], [0.1001001001,0.3998998999], [0.1006006006,0.3993993994], [0.1011011011,0.3988988989], [0.1016016016,0.3983983984], [0.1021021021,0.3978978979], [0.1026026026,0.3973973974], [0.1031031031,0.3968968969], [0.1036036036,0.3963963964], [0.1041041041,0.3958958959], [0.1046046046,0.3953953954], [0.1051051051,0.3948948949], [0.1056056056,0.3943943944], [0.1061061061,0.3938938939], [0.1066066066,0.3933933934], [0.1071071071,0.3928928929], [0.1076076076,0.3923923924], [0.1081081081,0.3918918919], [0.1086086086,0.3913913914], [0.1091091091,0.3908908909], [0.1096096096,0.3903903904], [0.1101101101,0.3898898899], [0.1106106106,0.3893893894], [0.1111111111,0.3888888889], [0.1116116116,0.3883883884], [0.1121121121,0.3878878879], [0.1126126126,0.3873873874], [0.1131131131,0.3868868869], [0.1136136136,0.3863863864], [0.1141141141,0.3858858859], [0.1146146146,0.3853853854], [0.1151151151,0.3848848849], [0.1156156156,0.3843843844], [0.1161161161,0.3838838839], [0.1166166166,0.3833833834], [0.1171171171,0.3828828829], [0.1176176176,0.3823823824], [0.1181181181,0.3818818819], [0.1186186186,0.3813813814], [0.1191191191,0.3808808809], [0.1196196196,0.3803803804], [0.1201201201,0.3798798799], [0.1206206206,0.3793793794], [0.1211211211,0.3788788789], [0.1216216216,0.3783783784], [0.1221221221,0.3778778779], [0.1226226226,0.3773773774], [0.1231231231,0.3768768769], [0.1236236236,0.3763763764], [0.1241241241,0.3758758759], [0.1246246246,0.3753753754], [0.1251251251,0.3748748749], [0.1256256256,0.3743743744], [0.1261261261,0.3738738739], [0.1266266266,0.3733733734], [0.1271271271,0.3728728729], [0.1276276276,0.3723723724], [0.1281281281,0.3718718719], [0.1286286286,0.3713713714], [0.1291291291,0.3708708709], [0.1296296296,0.3703703704], [0.1301301301,0.3698698699], [0.1306306306,0.3693693694], [0.1311311311,0.3688688689], [0.1316316316,0.3683683684], [0.1321321321,0.3678678679], [0.1326326326,0.3673673674], [0.1331331331,0.3668668669], [0.1336336336,0.3663663664], [0.1341341341,0.3658658659], [0.1346346346,0.3653653654], [0.1351351351,0.3648648649], [0.1356356356,0.3643643644], [0.1361361361,0.3638638639], [0.1366366366,0.3633633634], [0.1371371371,0.3628628629], [0.1376376376,0.3623623624], [0.1381381381,0.3618618619], [0.1386386386,0.3613613614], [0.1391391391,0.3608608609], [0.1396396396,0.3603603604], [0.1401401401,0.3598598599], [0.1406406406,0.3593593594], [0.1411411411,0.3588588589], [0.1416416416,0.3583583584], [0.1421421421,0.3578578579], [0.1426426426,0.3573573574], [0.1431431431,0.3568568569], [0.1436436436,0.3563563564], [0.1441441441,0.3558558559], [0.1446446446,0.3553553554], [0.1451451451,0.3548548549], [0.1456456456,0.3543543544], [0.1461461461,0.3538538539], [0.1466466466,0.3533533534], [0.1471471471,0.3528528529], [0.1476476476,0.3523523524], [0.1481481481,0.3518518519], [0.1486486486,0.3513513514], [0.1491491491,0.3508508509], [0.1496496496,0.3503503504], [0.1501501502,0.3498498498], [0.1506506507,0.3493493493], [0.1511511512,0.3488488488], [0.1516516517,0.3483483483], [0.1521521522,0.3478478478], [0.1526526527,0.3473473473], [0.1531531532,0.3468468468], [0.1536536537,0.3463463463], [0.1541541542,0.3458458458], [0.1546546547,0.3453453453], [0.1551551552,0.3448448448], [0.1556556557,0.3443443443], [0.1561561562,0.3438438438], [0.1566566567,0.3433433433], [0.1571571572,0.3428428428], [0.1576576577,0.3423423423], [0.1581581582,0.3418418418], [0.1586586587,0.3413413413], [0.1591591592,0.3408408408], [0.1596596597,0.3403403403], [0.1601601602,0.3398398398], [0.1606606607,0.3393393393], [0.1611611612,0.3388388388], [0.1616616617,0.3383383383], [0.1621621622,0.3378378378], [0.1626626627,0.3373373373], [0.1631631632,0.3368368368], [0.1636636637,0.3363363363], [0.1641641642,0.3358358358], [0.1646646647,0.3353353353], [0.1651651652,0.3348348348], [0.1656656657,0.3343343343], [0.1661661662,0.3338338338], [0.1666666667,0.3333333333], [0.1671671672,0.3328328328], [0.1676676677,0.3323323323], [0.1681681682,0.3318318318], [0.1686686687,0.3313313313], [0.1691691692,0.3308308308], [0.1696696697,0.3303303303], [0.1701701702,0.3298298298], [0.1706706707,0.3293293293], [0.1711711712,0.3288288288], [0.1716716717,0.3283283283], [0.1721721722,0.3278278278], [0.1726726727,0.3273273273], [0.1731731732,0.3268268268], [0.1736736737,0.3263263263], [0.1741741742,0.3258258258], [0.1746746747,0.3253253253], [0.1751751752,0.3248248248], [0.1756756757,0.3243243243], [0.1761761762,0.3238238238], [0.1766766767,0.3233233233], [0.1771771772,0.3228228228], [0.1776776777,0.3223223223], [0.1781781782,0.3218218218], [0.1786786787,0.3213213213], [0.1791791792,0.3208208208], [0.1796796797,0.3203203203], [0.1801801802,0.3198198198], [0.1806806807,0.3193193193], [0.1811811812,0.3188188188], [0.1816816817,0.3183183183], [0.1821821822,0.3178178178], [0.1826826827,0.3173173173], [0.1831831832,0.3168168168], [0.1836836837,0.3163163163], [0.1841841842,0.3158158158], [0.1846846847,0.3153153153], [0.1851851852,0.3148148148], [0.1856856857,0.3143143143], [0.1861861862,0.3138138138], [0.1866866867,0.3133133133], [0.1871871872,0.3128128128], [0.1876876877,0.3123123123], [0.1881881882,0.3118118118], [0.1886886887,0.3113113113], [0.1891891892,0.3108108108], [0.1896896897,0.3103103103], [0.1901901902,0.3098098098], [0.1906906907,0.3093093093], [0.1911911912,0.3088088088], [0.1916916917,0.3083083083], [0.1921921922,0.3078078078], [0.1926926927,0.3073073073], [0.1931931932,0.3068068068], [0.1936936937,0.3063063063], [0.1941941942,0.3058058058], [0.1946946947,0.3053053053], [0.1951951952,0.3048048048], [0.1956956957,0.3043043043], [0.1961961962,0.3038038038], [0.1966966967,0.3033033033], [0.1971971972,0.3028028028], [0.1976976977,0.3023023023], [0.1981981982,0.3018018018], [0.1986986987,0.3013013013], [0.1991991992,0.3008008008], [0.1996996997,0.3003003003], [0.2002002002,0.2997997998], [0.2007007007,0.2992992993], [0.2012012012,0.2987987988], [0.2017017017,0.2982982983], [0.2022022022,0.2977977978], [0.2027027027,0.2972972973], [0.2032032032,0.2967967968], [0.2037037037,0.2962962963], [0.2042042042,0.2957957958], [0.2047047047,0.2952952953], [0.2052052052,0.2947947948], [0.2057057057,0.2942942943], [0.2062062062,0.2937937938], [0.2067067067,0.2932932933], [0.2072072072,0.2927927928], [0.2077077077,0.2922922923], [0.2082082082,0.2917917918], [0.2087087087,0.2912912913], [0.2092092092,0.2907907908], [0.2097097097,0.2902902903], [0.2102102102,0.2897897898], [0.2107107107,0.2892892893], [0.2112112112,0.2887887888], [0.2117117117,0.2882882883], [0.2122122122,0.2877877878], [0.2127127127,0.2872872873], [0.2132132132,0.2867867868], [0.2137137137,0.2862862863], [0.2142142142,0.2857857858], [0.2147147147,0.2852852853], [0.2152152152,0.2847847848], [0.2157157157,0.2842842843], [0.2162162162,0.2837837838], [0.2167167167,0.2832832833], [0.2172172172,0.2827827828], [0.2177177177,0.2822822823], [0.2182182182,0.2817817818], [0.2187187187,0.2812812813], [0.2192192192,0.2807807808], [0.2197197197,0.2802802803], [0.2202202202,0.2797797798], [0.2207207207,0.2792792793], [0.2212212212,0.2787787788], [0.2217217217,0.2782782783], [0.2222222222,0.2777777778], [0.2227227227,0.2772772773], [0.2232232232,0.2767767768], [0.2237237237,0.2762762763], [0.2242242242,0.2757757758], [0.2247247247,0.2752752753], [0.2252252252,0.2747747748], [0.2257257257,0.2742742743], [0.2262262262,0.2737737738], [0.2267267267,0.2732732733], [0.2272272272,0.2727727728], [0.2277277277,0.2722722723], [0.2282282282,0.2717717718], [0.2287287287,0.2712712713], [0.2292292292,0.2707707708], [0.2297297297,0.2702702703], [0.2302302302,0.2697697698], [0.2307307307,0.2692692693], [0.2312312312,0.2687687688], [0.2317317317,0.2682682683], [0.2322322322,0.2677677678], [0.2327327327,0.2672672673], [0.2332332332,0.2667667668], [0.2337337337,0.2662662663], [0.2342342342,0.2657657658], [0.2347347347,0.2652652653], [0.2352352352,0.2647647648], [0.2357357357,0.2642642643], [0.2362362362,0.2637637638], [0.2367367367,0.2632632633], [0.2372372372,0.2627627628], [0.2377377377,0.2622622623], [0.2382382382,0.2617617618], [0.2387387387,0.2612612613], [0.2392392392,0.2607607608], [0.2397397397,0.2602602603], [0.2402402402,0.2597597598], [0.2407407407,0.2592592593], [0.2412412412,0.2587587588], [0.2417417417,0.2582582583], [0.2422422422,0.2577577578], [0.2427427427,0.2572572573], [0.2432432432,0.2567567568], [0.2437437437,0.2562562563], [0.2442442442,0.2557557558], [0.2447447447,0.2552552553], [0.2452452452,0.2547547548], [0.2457457457,0.2542542543], [0.2462462462,0.2537537538], [0.2467467467,0.2532532533], [0.2472472472,0.2527527528], [0.2477477477,0.2522522523], [0.2482482482,0.2517517518], [0.2487487487,0.2512512513], [0.2492492492,0.2507507508], [0.2497497497,0.2502502503], [0.2502502503,0.2497497497], [0.2507507508,0.2492492492], [0.2512512513,0.2487487487], [0.2517517518,0.2482482482], [0.2522522523,0.2477477477], [0.2527527528,0.2472472472], [0.2532532533,0.2467467467], [0.2537537538,0.2462462462], [0.2542542543,0.2457457457], [0.2547547548,0.2452452452], [0.2552552553,0.2447447447], [0.2557557558,0.2442442442], [0.2562562563,0.2437437437], [0.2567567568,0.2432432432], [0.2572572573,0.2427427427], [0.2577577578,0.2422422422], [0.2582582583,0.2417417417], [0.2587587588,0.2412412412], [0.2592592593,0.2407407407], [0.2597597598,0.2402402402], [0.2602602603,0.2397397397], [0.2607607608,0.2392392392], [0.2612612613,0.2387387387], [0.2617617618,0.2382382382], [0.2622622623,0.2377377377], [0.2627627628,0.2372372372], [0.2632632633,0.2367367367], [0.2637637638,0.2362362362], [0.2642642643,0.2357357357], [0.2647647648,0.2352352352], [0.2652652653,0.2347347347], [0.2657657658,0.2342342342], [0.2662662663,0.2337337337], [0.2667667668,0.2332332332], [0.2672672673,0.2327327327], [0.2677677678,0.2322322322], [0.2682682683,0.2317317317], [0.2687687688,0.2312312312], [0.2692692693,0.2307307307], [0.2697697698,0.2302302302], [0.2702702703,0.2297297297], [0.2707707708,0.2292292292], [0.2712712713,0.2287287287], [0.2717717718,0.2282282282], [0.2722722723,0.2277277277], [0.2727727728,0.2272272272], [0.2732732733,0.2267267267], [0.2737737738,0.2262262262], [0.2742742743,0.2257257257], [0.2747747748,0.2252252252], [0.2752752753,0.2247247247], [0.2757757758,0.2242242242], [0.2762762763,0.2237237237], [0.2767767768,0.2232232232], [0.2772772773,0.2227227227], [0.2777777778,0.2222222222], [0.2782782783,0.2217217217], [0.2787787788,0.2212212212], [0.2792792793,0.2207207207], [0.2797797798,0.2202202202], [0.2802802803,0.2197197197], [0.2807807808,0.2192192192], [0.2812812813,0.2187187187], [0.2817817818,0.2182182182], [0.2822822823,0.2177177177], [0.2827827828,0.2172172172], [0.2832832833,0.2167167167], [0.2837837838,0.2162162162], [0.2842842843,0.2157157157], [0.2847847848,0.2152152152], [0.2852852853,0.2147147147], [0.2857857858,0.2142142142], [0.2862862863,0.2137137137], [0.2867867868,0.2132132132], [0.2872872873,0.2127127127], [0.2877877878,0.2122122122], [0.2882882883,0.2117117117], [0.2887887888,0.2112112112], [0.2892892893,0.2107107107], [0.2897897898,0.2102102102], [0.2902902903,0.2097097097], [0.2907907908,0.2092092092], [0.2912912913,0.2087087087], [0.2917917918,0.2082082082], [0.2922922923,0.2077077077], [0.2927927928,0.2072072072], [0.2932932933,0.2067067067], [0.2937937938,0.2062062062], [0.2942942943,0.2057057057], [0.2947947948,0.2052052052], [0.2952952953,0.2047047047], [0.2957957958,0.2042042042], [0.2962962963,0.2037037037], [0.2967967968,0.2032032032], [0.2972972973,0.2027027027], [0.2977977978,0.2022022022], [0.2982982983,0.2017017017], [0.2987987988,0.2012012012], [0.2992992993,0.2007007007], [0.2997997998,0.2002002002], [0.3003003003,0.1996996997], [0.3008008008,0.1991991992], [0.3013013013,0.1986986987], [0.3018018018,0.1981981982], [0.3023023023,0.1976976977], [0.3028028028,0.1971971972], [0.3033033033,0.1966966967], [0.3038038038,0.1961961962], [0.3043043043,0.1956956957], [0.3048048048,0.1951951952], [0.3053053053,0.1946946947], [0.3058058058,0.1941941942], [0.3063063063,0.1936936937], [0.3068068068,0.1931931932], [0.3073073073,0.1926926927], [0.3078078078,0.1921921922], [0.3083083083,0.1916916917], [0.3088088088,0.1911911912], [0.3093093093,0.1906906907], [0.3098098098,0.1901901902], [0.3103103103,0.1896896897], [0.3108108108,0.1891891892], [0.3113113113,0.1886886887], [0.3118118118,0.1881881882], [0.3123123123,0.1876876877], [0.3128128128,0.1871871872], [0.3133133133,0.1866866867], [0.3138138138,0.1861861862], [0.3143143143,0.1856856857], [0.3148148148,0.1851851852], [0.3153153153,0.1846846847], [0.3158158158,0.1841841842], [0.3163163163,0.1836836837], [0.3168168168,0.1831831832], [0.3173173173,0.1826826827], [0.3178178178,0.1821821822], [0.3183183183,0.1816816817], [0.3188188188,0.1811811812], [0.3193193193,0.1806806807], [0.3198198198,0.1801801802], [0.3203203203,0.1796796797], [0.3208208208,0.1791791792], [0.3213213213,0.1786786787], [0.3218218218,0.1781781782], [0.3223223223,0.1776776777], [0.3228228228,0.1771771772], [0.3233233233,0.1766766767], [0.3238238238,0.1761761762], [0.3243243243,0.1756756757], [0.3248248248,0.1751751752], [0.3253253253,0.1746746747], [0.3258258258,0.1741741742], [0.3263263263,0.1736736737], [0.3268268268,0.1731731732], [0.3273273273,0.1726726727], [0.3278278278,0.1721721722], [0.3283283283,0.1716716717], [0.3288288288,0.1711711712], [0.3293293293,0.1706706707], [0.3298298298,0.1701701702], [0.3303303303,0.1696696697], [0.3308308308,0.1691691692], [0.3313313313,0.1686686687], [0.3318318318,0.1681681682], [0.3323323323,0.1676676677], [0.3328328328,0.1671671672], [0.3333333333,0.1666666667], [0.3338338338,0.1661661662], [0.3343343343,0.1656656657], [0.3348348348,0.1651651652], [0.3353353353,0.1646646647], [0.3358358358,0.1641641642], [0.3363363363,0.1636636637], [0.3368368368,0.1631631632], [0.3373373373,0.1626626627], [0.3378378378,0.1621621622], [0.3383383383,0.1616616617], [0.3388388388,0.1611611612], [0.3393393393,0.1606606607], [0.3398398398,0.1601601602], [0.3403403403,0.1596596597], [0.3408408408,0.1591591592], [0.3413413413,0.1586586587], [0.3418418418,0.1581581582], [0.3423423423,0.1576576577], [0.3428428428,0.1571571572], [0.3433433433,0.1566566567], [0.3438438438,0.1561561562], [0.3443443443,0.1556556557], [0.3448448448,0.1551551552], [0.3453453453,0.1546546547], [0.3458458458,0.1541541542], [0.3463463463,0.1536536537], [0.3468468468,0.1531531532], [0.3473473473,0.1526526527], [0.3478478478,0.1521521522], [0.3483483483,0.1516516517], [0.3488488488,0.1511511512], [0.3493493493,0.1506506507], [0.3498498498,0.1501501502], [0.3503503504,0.1496496496], [0.3508508509,0.1491491491], [0.3513513514,0.1486486486], [0.3518518519,0.1481481481], [0.3523523524,0.1476476476], [0.3528528529,0.1471471471], [0.3533533534,0.1466466466], [0.3538538539,0.1461461461], [0.3543543544,0.1456456456], [0.3548548549,0.1451451451], [0.3553553554,0.1446446446], [0.3558558559,0.1441441441], [0.3563563564,0.1436436436], [0.3568568569,0.1431431431], [0.3573573574,0.1426426426], [0.3578578579,0.1421421421], [0.3583583584,0.1416416416], [0.3588588589,0.1411411411], [0.3593593594,0.1406406406], [0.3598598599,0.1401401401], [0.3603603604,0.1396396396], [0.3608608609,0.1391391391], [0.3613613614,0.1386386386], [0.3618618619,0.1381381381], [0.3623623624,0.1376376376], [0.3628628629,0.1371371371], [0.3633633634,0.1366366366], [0.3638638639,0.1361361361], [0.3643643644,0.1356356356], [0.3648648649,0.1351351351], [0.3653653654,0.1346346346], [0.3658658659,0.1341341341], [0.3663663664,0.1336336336], [0.3668668669,0.1331331331], [0.3673673674,0.1326326326], [0.3678678679,0.1321321321], [0.3683683684,0.1316316316], [0.3688688689,0.1311311311], [0.3693693694,0.1306306306], [0.3698698699,0.1301301301], [0.3703703704,0.1296296296], [0.3708708709,0.1291291291], [0.3713713714,0.1286286286], [0.3718718719,0.1281281281], [0.3723723724,0.1276276276], [0.3728728729,0.1271271271], [0.3733733734,0.1266266266], [0.3738738739,0.1261261261], [0.3743743744,0.1256256256], [0.3748748749,0.1251251251], [0.3753753754,0.1246246246], [0.3758758759,0.1241241241], [0.3763763764,0.1236236236], [0.3768768769,0.1231231231], [0.3773773774,0.1226226226], [0.3778778779,0.1221221221], [0.3783783784,0.1216216216], [0.3788788789,0.1211211211], [0.3793793794,0.1206206206], [0.3798798799,0.1201201201], [0.3803803804,0.1196196196], [0.3808808809,0.1191191191], [0.3813813814,0.1186186186], [0.3818818819,0.1181181181], [0.3823823824,0.1176176176], [0.3828828829,0.1171171171], [0.3833833834,0.1166166166], [0.3838838839,0.1161161161], [0.3843843844,0.1156156156], [0.3848848849,0.1151151151], [0.3853853854,0.1146146146], [0.3858858859,0.1141141141], [0.3863863864,0.1136136136], [0.3868868869,0.1131131131], [0.3873873874,0.1126126126], [0.3878878879,0.1121121121], [0.3883883884,0.1116116116], [0.3888888889,0.1111111111], [0.3893893894,0.1106106106], [0.3898898899,0.1101101101], [0.3903903904,0.1096096096], [0.3908908909,0.1091091091], [0.3913913914,0.1086086086], [0.3918918919,0.1081081081], [0.3923923924,0.1076076076], [0.3928928929,0.1071071071], [0.3933933934,0.1066066066], [0.3938938939,0.1061061061], [0.3943943944,0.1056056056], [0.3948948949,0.1051051051], [0.3953953954,0.1046046046], [0.3958958959,0.1041041041], [0.3963963964,0.1036036036], [0.3968968969,0.1031031031], [0.3973973974,0.1026026026], [0.3978978979,0.1021021021], [0.3983983984,0.1016016016], [0.3988988989,0.1011011011], [0.3993993994,0.1006006006], [0.3998998999,0.1001001001], [0.4004004004,0.0995995996], [0.4009009009,0.0990990991], [0.4014014014,0.0985985986], [0.4019019019,0.0980980981], [0.4024024024,0.0975975976], [0.4029029029,0.0970970971], [0.4034034034,0.0965965966], [0.4039039039,0.0960960961], [0.4044044044,0.0955955956], [0.4049049049,0.0950950951], [0.4054054054,0.0945945946], [0.4059059059,0.0940940941], [0.4064064064,0.0935935936], [0.4069069069,0.0930930931], [0.4074074074,0.0925925926], [0.4079079079,0.0920920921], [0.4084084084,0.0915915916], [0.4089089089,0.0910910911], [0.4094094094,0.0905905906], [0.4099099099,0.0900900901], [0.4104104104,0.0895895896], [0.4109109109,0.0890890891], [0.4114114114,0.0885885886], [0.4119119119,0.0880880881], [0.4124124124,0.0875875876], [0.4129129129,0.0870870871], [0.4134134134,0.0865865866], [0.4139139139,0.0860860861], [0.4144144144,0.0855855856], [0.4149149149,0.0850850851], [0.4154154154,0.0845845846], [0.4159159159,0.0840840841], [0.4164164164,0.0835835836], [0.4169169169,0.0830830831], [0.4174174174,0.0825825826], [0.4179179179,0.0820820821], [0.4184184184,0.0815815816], [0.4189189189,0.0810810811], [0.4194194194,0.0805805806], [0.4199199199,0.0800800801], [0.4204204204,0.0795795796], [0.4209209209,0.0790790791], [0.4214214214,0.0785785786], [0.4219219219,0.0780780781], [0.4224224224,0.0775775776], [0.4229229229,0.0770770771], [0.4234234234,0.0765765766], [0.4239239239,0.0760760761], [0.4244244244,0.0755755756], [0.4249249249,0.0750750751], [0.4254254254,0.0745745746], [0.4259259259,0.0740740741], [0.4264264264,0.0735735736], [0.4269269269,0.0730730731], [0.4274274274,0.0725725726], [0.4279279279,0.0720720721], [0.4284284284,0.0715715716], [0.4289289289,0.0710710711], [0.4294294294,0.0705705706], [0.4299299299,0.0700700701], [0.4304304304,0.0695695696], [0.4309309309,0.0690690691], [0.4314314314,0.0685685686], [0.4319319319,0.0680680681], [0.4324324324,0.0675675676], [0.4329329329,0.0670670671], [0.4334334334,0.0665665666], [0.4339339339,0.0660660661], [0.4344344344,0.0655655656], [0.4349349349,0.0650650651], [0.4354354354,0.0645645646], [0.4359359359,0.0640640641], [0.4364364364,0.0635635636], [0.4369369369,0.0630630631], [0.4374374374,0.0625625626], [0.4379379379,0.0620620621], [0.4384384384,0.0615615616], [0.4389389389,0.0610610611], [0.4394394394,0.0605605606], [0.4399399399,0.0600600601], [0.4404404404,0.0595595596], [0.4409409409,0.0590590591], [0.4414414414,0.0585585586], [0.4419419419,0.0580580581], [0.4424424424,0.0575575576], [0.4429429429,0.0570570571], [0.4434434434,0.0565565566], [0.4439439439,0.0560560561], [0.4444444444,0.0555555556], [0.4449449449,0.0550550551], [0.4454454454,0.0545545546], [0.4459459459,0.0540540541], [0.4464464464,0.0535535536], [0.4469469469,0.0530530531], [0.4474474474,0.0525525526], [0.4479479479,0.0520520521], [0.4484484484,0.0515515516], [0.4489489489,0.0510510511], [0.4494494494,0.0505505506], [0.4499499499,0.0500500501], [0.4504504505,0.0495495495], [0.4509509510,0.0490490490], [0.4514514515,0.0485485485], [0.4519519520,0.0480480480], [0.4524524525,0.0475475475], [0.4529529530,0.0470470470], [0.4534534535,0.0465465465], [0.4539539540,0.0460460460], [0.4544544545,0.0455455455], [0.4549549550,0.0450450450], [0.4554554555,0.0445445445], [0.4559559560,0.0440440440], [0.4564564565,0.0435435435], [0.4569569570,0.0430430430], [0.4574574575,0.0425425425], [0.4579579580,0.0420420420], [0.4584584585,0.0415415415], [0.4589589590,0.0410410410], [0.4594594595,0.0405405405], [0.4599599600,0.0400400400], [0.4604604605,0.0395395395], [0.4609609610,0.0390390390], [0.4614614615,0.0385385385], [0.4619619620,0.0380380380], [0.4624624625,0.0375375375], [0.4629629630,0.0370370370], [0.4634634635,0.0365365365], [0.4639639640,0.0360360360], [0.4644644645,0.0355355355], [0.4649649650,0.0350350350], [0.4654654655,0.0345345345], [0.4659659660,0.0340340340], [0.4664664665,0.0335335335], [0.4669669670,0.0330330330], [0.4674674675,0.0325325325], [0.4679679680,0.0320320320], [0.4684684685,0.0315315315], [0.4689689690,0.0310310310], [0.4694694695,0.0305305305], [0.4699699700,0.0300300300], [0.4704704705,0.0295295295], [0.4709709710,0.0290290290], [0.4714714715,0.0285285285], [0.4719719720,0.0280280280], [0.4724724725,0.0275275275], [0.4729729730,0.0270270270], [0.4734734735,0.0265265265], [0.4739739740,0.0260260260], [0.4744744745,0.0255255255], [0.4749749750,0.0250250250], [0.4754754755,0.0245245245], [0.4759759760,0.0240240240], [0.4764764765,0.0235235235], [0.4769769770,0.0230230230], [0.4774774775,0.0225225225], [0.4779779780,0.0220220220], [0.4784784785,0.0215215215], [0.4789789790,0.0210210210], [0.4794794795,0.0205205205], [0.4799799800,0.0200200200], [0.4804804805,0.0195195195], [0.4809809810,0.0190190190], [0.4814814815,0.0185185185], [0.4819819820,0.0180180180], [0.4824824825,0.0175175175], [0.4829829830,0.0170170170], [0.4834834835,0.0165165165], [0.4839839840,0.0160160160], [0.4844844845,0.0155155155], [0.4849849850,0.0150150150], [0.4854854855,0.0145145145], [0.4859859860,0.0140140140], [0.4864864865,0.0135135135], [0.4869869870,0.0130130130], [0.4874874875,0.0125125125], [0.4879879880,0.0120120120], [0.4884884885,0.0115115115], [0.4889889890,0.0110110110], [0.4894894895,0.0105105105], [0.4899899900,0.0100100100], [0.4904904905,0.0095095095], [0.4909909910,0.0090090090], [0.4914914915,0.0085085085], [0.4919919920,0.0080080080], [0.4924924925,0.0075075075], [0.4929929930,0.0070070070], [0.4934934935,0.0065065065], [0.4939939940,0.0060060060], [0.4944944945,0.0055055055], [0.4949949950,0.0050050050], [0.4954954955,0.0045045045], [0.4959959960,0.0040040040], [0.4964964965,0.0035035035], [0.4969969970,0.0030030030], [0.4974974975,0.0025025025], [0.4979979980,0.0020020020], [0.4984984985,0.0015015015], [0.4989989990,0.0010010010], [0.4994994995,0.0005005005], [0.5000000000,0.0000000000]]././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/pareto_front/dtlz2_front.json0000644000076500000240000007051014456461441021747 0ustar00runnerstaff[[0.0000000000,1.0000000000], [0.0369034559,0.9993188355], [0.0554689064,0.9984604150], [0.0701154707,0.9975388818], [0.0825460836,0.9965872486], [0.0944933533,0.9955254925], [0.1052322500,0.9944476726], [0.1149755587,0.9933683209], [0.1240790518,0.9922723361], [0.1323880288,0.9911979670], [0.1401883532,0.9901248536], [0.1470407828,0.9891304303], [0.1540288320,0.9880663535], [0.1602847298,0.9870708209], [0.1664128767,0.9860561619], [0.1723284939,0.9850395374], [0.1779913023,0.9840320606], [0.1834929251,0.9830210305], [0.1888536496,0.9820052439], [0.1940749455,0.9809867051], [0.1990808625,0.9799830663], [0.2038239780,0.9790075516], [0.2085914966,0.9780028566], [0.2132114339,0.9770060821], [0.2178376178,0.9759850267], [0.2222055155,0.9749998507], [0.2265423242,0.9740013220], [0.2308470222,0.9729900577], [0.2350620607,0.9719803638], [0.2391443145,0.9709840353], [0.2431809463,0.9699809418], [0.2471722125,0.9689715669], [0.2510553101,0.9679727430], [0.2548588786,0.9669782583], [0.2586462636,0.9659721064], [0.2623167872,0.9649818149], [0.2660016761,0.9639725662], [0.2696215002,0.9629663788], [0.2731796937,0.9619630216], [0.2766542506,0.9609695238], [0.2801268556,0.9599629913], [0.2835449083,0.9589589590], [0.2869030002,0.9579596382], [0.2902298788,0.9569569570], [0.2935083252,0.9559565173], [0.2967507945,0.9549549550], [0.2999530859,0.9539539540], [0.3031103626,0.9529554597], [0.3062375464,0.9519551277], [0.3093316711,0.9509542141], [0.3124021328,0.9499499499], [0.3154217281,0.9489515970], [0.3184020635,0.9479557616], [0.3213660178,0.9469550584], [0.3243243243,0.9459459459], [0.3272221858,0.9449474277], [0.3300954464,0.9439475601], [0.3329379853,0.9429487250], [0.3357662943,0.9419453252], [0.3385707395,0.9409409409], [0.3413325513,0.9399425990], [0.3440838109,0.9389389389], [0.3468031496,0.9379379379], [0.3494914651,0.9369395476], [0.3521544727,0.9359418931], [0.3548192039,0.9349349349], [0.3574365344,0.9339374304], [0.3600347561,0.9329388910], [0.3626332503,0.9319319319], [0.3651931617,0.9309317669], [0.3677307710,0.9299322986], [0.3702561890,0.9289296822], [0.3727563119,0.9279292710], [0.3752418848,0.9269269269], [0.3777033956,0.9259266412], [0.3801498169,0.9249249249], [0.3825762444,0.9239239239], [0.3849807434,0.9229246054], [0.3873757477,0.9219219219], [0.3897494803,0.9209209209], [0.3921062878,0.9199199199], [0.3944464735,0.9189189189], [0.3967701220,0.9179180085], [0.3990771074,0.9169173694], [0.4013664253,0.9159175687], [0.4036459287,0.9149152771], [0.4059080659,0.9139139139], [0.4081514925,0.9129142124], [0.4103859950,0.9119119119], [0.4126030930,0.9109109109], [0.4148059255,0.9099099099], [0.4169947186,0.9089089089], [0.4191696921,0.9079079079], [0.4213310601,0.9069069069], [0.4234790309,0.9059059059], [0.4256138074,0.9049049049], [0.4277302163,0.9039064454], [0.4298445625,0.9029029029], [0.4319409211,0.9019019019], [0.4340248458,0.9009009009], [0.4360965147,0.8998998999], [0.4381524982,0.8989006554], [0.4402007852,0.8978993645], [0.4422397046,0.8968968969], [0.4442548729,0.8959004453], [0.4462716759,0.8948975312], [0.4482769549,0.8938947207], [0.4502691215,0.8928928929], [0.4522432984,0.8918946121], [0.4542173715,0.8908908909], [0.4561739547,0.8898906242], [0.4581228473,0.8888888889], [0.4600583792,0.8878886685], [0.4619863237,0.8868870484], [0.4639032196,0.8858858859], [0.4658097686,0.8848848849], [0.4677064034,0.8838838839], [0.4695932443,0.8828828829], [0.4714828031,0.8818752556], [0.4733427410,0.8808783398], [0.4751952113,0.8798803960], [0.4770406081,0.8788812538], [0.4788811021,0.8778797697], [0.4807150328,0.8768768769], [0.4825364754,0.8758758759], [0.4843489995,0.8748748749], [0.4861527050,0.8738738739], [0.4879476896,0.8728728729], [0.4897340493,0.8718718719], [0.4913662950,0.8709530206], [0.4930308523,0.8700118267], [0.4944409928,0.8692111968], [0.4958731824,0.8683949487], [0.4972038803,0.8676337369], [0.4984028062,0.8669455824], [0.4995380883,0.8662919244], [0.5006154830,0.8656697628], [0.5014559265,0.8651831909], [0.5022666475,0.8647127933], [0.5031221290,0.8642153223], [0.5038378201,0.8637982699], [0.5045948993,0.8633562344], [0.5052461589,0.8629752713], [0.5058661023,0.8626120139], [0.5064909416,0.8622452818], [0.5071004080,0.8618869858], [0.5077948321,0.8614780371], [0.5085028146,0.8610603275], [0.5091884560,0.8606550507], [0.5098623244,0.8602560143], [0.5104395997,0.8599136091], [0.5110220546,0.8595676004], [0.5116045741,0.8592210192], [0.5121824280,0.8588766853], [0.5127456864,0.8585405413], [0.5133164602,0.8581994009], [0.5139171891,0.8578398002], [0.5145203163,0.8574781887], [0.5151429121,0.8571042995], [0.5157917619,0.8567139886], [0.5164264251,0.8563315640], [0.5170709193,0.8559425591], [0.5176871000,0.8555700243], [0.5182871208,0.8552066770], [0.5188617486,0.8548581671], [0.5194335140,0.8545108686], [0.5200757604,0.8541201341], [0.5206731708,0.8537560830], [0.5212974684,0.8533750344], [0.5219719126,0.8529626735], [0.5226403853,0.8525532403], [0.5232753102,0.8521636872], [0.5238980396,0.8517809836], [0.5245113037,0.8514034839], [0.5251433508,0.8510137843], [0.5257516020,0.8506381446], [0.5263766517,0.8502515043], [0.5269964664,0.8498674746], [0.5276272887,0.8494759821], [0.5282513099,0.8490880717], [0.5288973285,0.8486858170], [0.5295458214,0.8482813348], [0.5301624602,0.8478960819], [0.5308048452,0.8474940804], [0.5314704031,0.8470768623], [0.5320732387,0.8466983339], [0.5326753017,0.8463196931], [0.5332994236,0.8459265481], [0.5339121576,0.8455399506], [0.5345345345,0.8451466331], [0.5351594859,0.8447510430], [0.5357890366,0.8443518865], [0.5364106345,0.8439571264], [0.5370230863,0.8435675461], [0.5376334769,0.8431786552], [0.5382034505,0.8428149535], [0.5387686643,0.8424537533], [0.5393584806,0.8420762611], [0.5399531958,0.8416950436], [0.5405405405,0.8413179684], [0.5411155897,0.8409482259], [0.5417229553,0.8405571008], [0.5423260737,0.8401680961], [0.5429669464,0.8397540682], [0.5435996064,0.8393446657], [0.5442146081,0.8389460413], [0.5448272687,0.8385482976], [0.5454368027,0.8381519517], [0.5460469885,0.8377545502], [0.5466735397,0.8373458312], [0.5473023850,0.8369349433], [0.5479290378,0.8365248171], [0.5485370289,0.8361262632], [0.5491247912,0.8357403686], [0.5497296206,0.8353426508], [0.5503275986,0.8349488213], [0.5509453978,0.8345412924], [0.5515814517,0.8341210356], [0.5521934967,0.8337159842], [0.5527651897,0.8333370537], [0.5533539033,0.8329462514], [0.5539294444,0.8325636136], [0.5545085146,0.8321780502], [0.5551259226,0.8317663194], [0.5557448952,0.8313528802], [0.5563254221,0.8309645147], [0.5568964635,0.8305819219], [0.5574640036,0.8302011110], [0.5580346413,0.8298176541], [0.5586159811,0.8294264197], [0.5592211739,0.8290185032], [0.5598067495,0.8286231974], [0.5603754700,0.8282386930], [0.5609717717,0.8278349300], [0.5615702950,0.8274290325], [0.5621655663,0.8270247131], [0.5627583491,0.8266214614], [0.5633666495,0.8262070069], [0.5639663209,0.8257977894], [0.5645982471,0.8253658700], [0.5652180106,0.8249415740], [0.5658627611,0.8244994455], [0.5665077826,0.8240563890], [0.5671386419,0.8236223411], [0.5677312926,0.8232139330], [0.5683074749,0.8228162699], [0.5688783385,0.8224216899], [0.5694700803,0.8220120606], [0.5700453735,0.8216132132], [0.5706220363,0.8212128176], [0.5711883696,0.8208190096], [0.5717565184,0.8204233564], [0.5723164842,0.8200328298], [0.5728546187,0.8196569928], [0.5734517451,0.8192393399], [0.5740430940,0.8188250889], [0.5746447945,0.8184029327], [0.5752422136,0.8179831268], [0.5758351001,0.8175658612], [0.5764334119,0.8171441254], [0.5770385373,0.8167169195], [0.5775944950,0.8163238324], [0.5781852812,0.8159054974], [0.5787508416,0.8155044227], [0.5793238635,0.8150974550], [0.5798826174,0.8147000368], [0.5804466148,0.8142983037], [0.5810258563,0.8138850990], [0.5816122879,0.8134661312], [0.5822313119,0.8130231850], [0.5828532438,0.8125774401], [0.5834723222,0.8121330243], [0.5840505573,0.8117172824], [0.5846356701,0.8112959591], [0.5851890576,0.8108968904], [0.5857589197,0.8104853410], [0.5863346929,0.8100689032], [0.5869043514,0.8096562741], [0.5874699609,0.8092459732], [0.5880140385,0.8088507220], [0.5885899410,0.8084317419], [0.5891712380,0.8080082007], [0.5897805084,0.8075635900], [0.5903722347,0.8071311074], [0.5909731865,0.8066912004], [0.5915521608,0.8062667307], [0.5921208026,0.8058492137], [0.5926768075,0.8054403776], [0.5932266964,0.8050354568], [0.5937712979,0.8046338582], [0.5943236246,0.8042259814], [0.5948727212,0.8038199087], [0.5954451305,0.8033959774], [0.5960177897,0.8029712289], [0.5965880254,0.8025476484], [0.5971592963,0.8021226682], [0.5977293661,0.8016979512], [0.5983197190,0.8012574579], [0.5989034379,0.8008212485], [0.5994961891,0.8003776104], [0.6000862860,0.7999352782], [0.6006803491,0.7994892859], [0.6012714947,0.7990447982], [0.6018623738,0.7985998266], [0.6024410249,0.7981633990], [0.6030140078,0.7977305976], [0.6035853496,0.7972983919], [0.6041675960,0.7968572745], [0.6047483389,0.7964166288], [0.6053359718,0.7959700756], [0.6059088682,0.7955340617], [0.6064873862,0.7950931080], [0.6070598467,0.7946561159], [0.6076082992,0.7942368379], [0.6081585019,0.7938156187], [0.6087008225,0.7933998416], [0.6092493277,0.7929787240], [0.6098030527,0.7925529869], [0.6103731258,0.7921140368], [0.6109665199,0.7916564353], [0.6115512499,0.7912048210], [0.6121478031,0.7907433637], [0.6127075106,0.7903097535], [0.6132794115,0.7898660415], [0.6138526102,0.7894206566], [0.6144516716,0.7889544621], [0.6150361282,0.7884989290], [0.6155715884,0.7880809727], [0.6161249092,0.7876484598], [0.6166805932,0.7872134691], [0.6172303276,0.7867825130], [0.6177829555,0.7863486631], [0.6183391364,0.7859113896], [0.6188979332,0.7854714179], [0.6194366405,0.7850466536], [0.6200040267,0.7845986279], [0.6205590118,0.7841597496], [0.6211145615,0.7837197851], [0.6216575860,0.7832891201], [0.6222173611,0.7828445284], [0.6227721219,0.7824032747], [0.6233246414,0.7819631650], [0.6239067112,0.7814988264], [0.6244931263,0.7810303037], [0.6250711759,0.7805677582], [0.6256102179,0.7801357928], [0.6261334159,0.7797159390], [0.6266502055,0.7793006608], [0.6271902164,0.7788661197], [0.6277586298,0.7784080566], [0.6283003785,0.7779708442], [0.6288407357,0.7775341338], [0.6293810109,0.7770968685], [0.6299102714,0.7766679149], [0.6304645970,0.7762180054], [0.6310086733,0.7757757758], [0.6316174909,0.7752801720], [0.6322085094,0.7747982967], [0.6328081424,0.7743086303], [0.6333784888,0.7738421609], [0.6339594258,0.7733663080], [0.6345165953,0.7729092381], [0.6350706268,0.7724540756], [0.6355849311,0.7720309549], [0.6361115568,0.7715971017], [0.6366398489,0.7711612690], [0.6371659714,0.7707266214], [0.6377124530,0.7702745143], [0.6382694686,0.7698130198], [0.6388120988,0.7693627898], [0.6393722504,0.7688973439], [0.6398901502,0.7684663920], [0.6404140943,0.7680298092], [0.6409410450,0.7675901099], [0.6414990653,0.7671238161], [0.6420366757,0.7666739248], [0.6425993635,0.7662023610], [0.6431668598,0.7657260544], [0.6437381844,0.7652458102], [0.6442759163,0.7647931378], [0.6447880509,0.7643614128], [0.6453596100,0.7638789000], [0.6458819044,0.7634373357], [0.6463891541,0.7630079039], [0.6469019786,0.7625731637], [0.6474281745,0.7621264717], [0.6479731485,0.7616631794], [0.6485163733,0.7612007052], [0.6490706137,0.7607281633], [0.6496196821,0.7602593430], [0.6501586847,0.7597984501], [0.6506855595,0.7593472873], [0.6512624614,0.7588525591], [0.6518486201,0.7583491125], [0.6524062744,0.7578694169], [0.6529552921,0.7573964527], [0.6534930151,0.7569325460], [0.6540287990,0.7564696491], [0.6545710321,0.7560005053], [0.6551098937,0.7555336043], [0.6556243355,0.7550872338], [0.6561249582,0.7546522638], [0.6566328735,0.7542103615], [0.6571569666,0.7537537538], [0.6576988639,0.7532809598], [0.6582401174,0.7528080418], [0.6587921703,0.7523249806], [0.6593218692,0.7518608068], [0.6598342919,0.7514111439], [0.6603684227,0.7509417729], [0.6608832360,0.7504887397], [0.6614001260,0.7500332482], [0.6619247438,0.7495702993], [0.6624657526,0.7490922016], [0.6630165568,0.7486047324], [0.6635694406,0.7481146954], [0.6641188509,0.7476270139], [0.6646432296,0.7471608778], [0.6651597065,0.7467011215], [0.6656823687,0.7462352069], [0.6661962464,0.7457764821], [0.6667464410,0.7452846324], [0.6672525404,0.7448315564], [0.6677715372,0.7443662903], [0.6682979983,0.7438936655], [0.6688166721,0.7434273731], [0.6693261124,0.7429687445], [0.6698347461,0.7425102107], [0.6703298317,0.7420632835], [0.6708735590,0.7415717550], [0.6714082569,0.7410876821], [0.6719538916,0.7405929837], [0.6725252041,0.7400742191], [0.6730822426,0.7395676404], [0.6736282685,0.7390703322], [0.6741566569,0.7385883847], [0.6746726115,0.7381171094], [0.6751854676,0.7376480085], [0.6756756757,0.7371990106], [0.6761607313,0.7367541418], [0.6766421534,0.7363120237], [0.6771586217,0.7358370751], [0.6776737898,0.7353626551], [0.6782096396,0.7348684813], [0.6787475351,0.7343716931], [0.6792935083,0.7338666974], [0.6798276103,0.7333719522], [0.6803548488,0.7328828553], [0.6808864775,0.7323889709], [0.6813888171,0.7319216351], [0.6819012364,0.7314442588], [0.6824316575,0.7309494051], [0.6829608407,0.7304549884], [0.6834903026,0.7299595922], [0.6840397074,0.7294447743], [0.6845764967,0.7289410265], [0.6850882559,0.7284600755], [0.6856044637,0.7279742573], [0.6860900097,0.7275166655], [0.6865771574,0.7270569489], [0.6870726626,0.7265887119], [0.6875677777,0.7261202043], [0.6880811093,0.7256337830], [0.6885950807,0.7251460645], [0.6891212070,0.7246460944], [0.6896491273,0.7241436882], [0.6901706210,0.7236466776], [0.6906770753,0.7231633133], [0.6911701553,0.7226920620], [0.6916916917,0.7221929130], [0.6922143667,0.7216919498], [0.6927315117,0.7211955718], [0.6932482963,0.7206988273], [0.6937626037,0.7202037556], [0.6942619006,0.7197224558], [0.6947787469,0.7192235347], [0.6952929865,0.7187264173], [0.6958120217,0.7182239418], [0.6963341710,0.7177177177], [0.6968524650,0.7172145021], [0.6973644298,0.7167167167], [0.6978707343,0.7162237347], [0.6983696435,0.7157372710], [0.6988439807,0.7152741367], [0.6993121350,0.7148164364], [0.6997858586,0.7143526805], [0.7002729450,0.7138752010], [0.7007571441,0.7133999054], [0.7012619608,0.7129036838], [0.7017843038,0.7123894939], [0.7023035839,0.7118775710], [0.7028540416,0.7113340961], [0.7033812989,0.7108127379], [0.7038927077,0.7103063114], [0.7044176915,0.7097856830], [0.7049148983,0.7092918906], [0.7053934668,0.7088159543], [0.7058758101,0.7083356131], [0.7063725126,0.7078402881], [0.7068638323,0.7073496466], [0.7073496466,0.7068638323], [0.7078402881,0.7063725126], [0.7083356131,0.7058758101], [0.7088159543,0.7053934668], [0.7092918906,0.7049148983], [0.7097856830,0.7044176915], [0.7103063114,0.7038927077], [0.7108127379,0.7033812989], [0.7113340961,0.7028540416], [0.7118775710,0.7023035839], [0.7123894939,0.7017843038], [0.7129036838,0.7012619608], [0.7133999054,0.7007571441], [0.7138752010,0.7002729450], [0.7143526805,0.6997858586], [0.7148164364,0.6993121350], [0.7152741367,0.6988439807], [0.7157372710,0.6983696435], [0.7162237347,0.6978707343], [0.7167167167,0.6973644298], [0.7172145021,0.6968524650], [0.7177177177,0.6963341710], [0.7182239418,0.6958120217], [0.7187264173,0.6952929865], [0.7192235347,0.6947787469], [0.7197224558,0.6942619006], [0.7202037556,0.6937626037], [0.7206988273,0.6932482963], [0.7211955718,0.6927315117], [0.7216919498,0.6922143667], [0.7221929130,0.6916916917], [0.7226920620,0.6911701553], [0.7231633133,0.6906770753], [0.7236466776,0.6901706210], [0.7241436882,0.6896491273], [0.7246460944,0.6891212070], [0.7251460645,0.6885950807], [0.7256337830,0.6880811093], [0.7261202043,0.6875677777], [0.7265887119,0.6870726626], [0.7270569489,0.6865771574], [0.7275166655,0.6860900097], [0.7279742573,0.6856044637], [0.7284600755,0.6850882559], [0.7289410265,0.6845764967], [0.7294447743,0.6840397074], [0.7299595922,0.6834903026], [0.7304549884,0.6829608407], [0.7309494051,0.6824316575], [0.7314442588,0.6819012364], [0.7319216351,0.6813888171], [0.7323889709,0.6808864775], [0.7328828553,0.6803548488], [0.7333719522,0.6798276103], [0.7338666974,0.6792935083], [0.7343716931,0.6787475351], [0.7348684813,0.6782096396], [0.7353626551,0.6776737898], [0.7358370751,0.6771586217], [0.7363120237,0.6766421534], [0.7367541418,0.6761607313], [0.7371990106,0.6756756757], [0.7376480085,0.6751854676], [0.7381171094,0.6746726115], [0.7385883847,0.6741566569], [0.7390703322,0.6736282685], [0.7395676404,0.6730822426], [0.7400742191,0.6725252041], [0.7405929837,0.6719538916], [0.7410876821,0.6714082569], [0.7415717550,0.6708735590], [0.7420632835,0.6703298317], [0.7425102107,0.6698347461], [0.7429687445,0.6693261124], [0.7434273731,0.6688166721], [0.7438936655,0.6682979983], [0.7443662903,0.6677715372], [0.7448315564,0.6672525404], [0.7452846324,0.6667464410], [0.7457764821,0.6661962464], [0.7462352069,0.6656823687], [0.7467011215,0.6651597065], [0.7471608778,0.6646432296], [0.7476270139,0.6641188509], [0.7481146954,0.6635694406], [0.7486047324,0.6630165568], [0.7490922016,0.6624657526], [0.7495702993,0.6619247438], [0.7500332482,0.6614001260], [0.7504887397,0.6608832360], [0.7509417729,0.6603684227], [0.7514111439,0.6598342919], [0.7518608068,0.6593218692], [0.7523249806,0.6587921703], [0.7528080418,0.6582401174], [0.7532809598,0.6576988639], [0.7537537538,0.6571569666], [0.7542103615,0.6566328735], [0.7546522638,0.6561249582], [0.7550872338,0.6556243355], [0.7555336043,0.6551098937], [0.7560005053,0.6545710321], [0.7564696491,0.6540287990], [0.7569325460,0.6534930151], [0.7573964527,0.6529552921], [0.7578694169,0.6524062744], [0.7583491125,0.6518486201], [0.7588525591,0.6512624614], [0.7593472873,0.6506855595], [0.7597984501,0.6501586847], [0.7602593430,0.6496196821], [0.7607281633,0.6490706137], [0.7612007052,0.6485163733], [0.7616631794,0.6479731485], [0.7621264717,0.6474281745], [0.7625731637,0.6469019786], [0.7630079039,0.6463891541], [0.7634373357,0.6458819044], [0.7638789000,0.6453596100], [0.7643614128,0.6447880509], [0.7647931378,0.6442759163], [0.7652458102,0.6437381844], [0.7657260544,0.6431668598], [0.7662023610,0.6425993635], [0.7666739248,0.6420366757], [0.7671238161,0.6414990653], [0.7675901099,0.6409410450], [0.7680298092,0.6404140943], [0.7684663920,0.6398901502], [0.7688973439,0.6393722504], [0.7693627898,0.6388120988], [0.7698130198,0.6382694686], [0.7702745143,0.6377124530], [0.7707266214,0.6371659714], [0.7711612690,0.6366398489], [0.7715971017,0.6361115568], [0.7720309549,0.6355849311], [0.7724540756,0.6350706268], [0.7729092381,0.6345165953], [0.7733663080,0.6339594258], [0.7738421609,0.6333784888], [0.7743086303,0.6328081424], [0.7747982967,0.6322085094], [0.7752801720,0.6316174909], [0.7757757758,0.6310086733], [0.7762180054,0.6304645970], [0.7766679149,0.6299102714], [0.7770968685,0.6293810109], [0.7775341338,0.6288407357], [0.7779708442,0.6283003785], [0.7784080566,0.6277586298], [0.7788661197,0.6271902164], [0.7793006608,0.6266502055], [0.7797159390,0.6261334159], [0.7801357928,0.6256102179], [0.7805677582,0.6250711759], [0.7810303037,0.6244931263], [0.7814988264,0.6239067112], [0.7819631650,0.6233246414], [0.7824032747,0.6227721219], [0.7828445284,0.6222173611], [0.7832891201,0.6216575860], [0.7837197851,0.6211145615], [0.7841597496,0.6205590118], [0.7845986279,0.6200040267], [0.7850466536,0.6194366405], [0.7854714179,0.6188979332], [0.7859113896,0.6183391364], [0.7863486631,0.6177829555], [0.7867825130,0.6172303276], [0.7872134691,0.6166805932], [0.7876484598,0.6161249092], [0.7880809727,0.6155715884], [0.7884989290,0.6150361282], [0.7889544621,0.6144516716], [0.7894206566,0.6138526102], [0.7898660415,0.6132794115], [0.7903097535,0.6127075106], [0.7907433637,0.6121478031], [0.7912048210,0.6115512499], [0.7916564353,0.6109665199], [0.7921140368,0.6103731258], [0.7925529869,0.6098030527], [0.7929787240,0.6092493277], [0.7933998416,0.6087008225], [0.7938156187,0.6081585019], [0.7942368379,0.6076082992], [0.7946561159,0.6070598467], [0.7950931080,0.6064873862], [0.7955340617,0.6059088682], [0.7959700756,0.6053359718], [0.7964166288,0.6047483389], [0.7968572745,0.6041675960], [0.7972983919,0.6035853496], [0.7977305976,0.6030140078], [0.7981633990,0.6024410249], [0.7985998266,0.6018623738], [0.7990447982,0.6012714947], [0.7994892859,0.6006803491], [0.7999352782,0.6000862860], [0.8003776104,0.5994961891], [0.8008212485,0.5989034379], [0.8012574579,0.5983197190], [0.8016979512,0.5977293661], [0.8021226682,0.5971592963], [0.8025476484,0.5965880254], [0.8029712289,0.5960177897], [0.8033959774,0.5954451305], [0.8038199087,0.5948727212], [0.8042259814,0.5943236246], [0.8046338582,0.5937712979], [0.8050354568,0.5932266964], [0.8054403776,0.5926768075], [0.8058492137,0.5921208026], [0.8062667307,0.5915521608], [0.8066912004,0.5909731865], [0.8071311074,0.5903722347], [0.8075635900,0.5897805084], [0.8080082007,0.5891712380], [0.8084317419,0.5885899410], [0.8088507220,0.5880140385], [0.8092459732,0.5874699609], [0.8096562741,0.5869043514], [0.8100689032,0.5863346929], [0.8104853410,0.5857589197], [0.8108968904,0.5851890576], [0.8112959591,0.5846356701], [0.8117172824,0.5840505573], [0.8121330243,0.5834723222], [0.8125774401,0.5828532438], [0.8130231850,0.5822313119], [0.8134661312,0.5816122879], [0.8138850990,0.5810258563], [0.8142983037,0.5804466148], [0.8147000368,0.5798826174], [0.8150974550,0.5793238635], [0.8155044227,0.5787508416], [0.8159054974,0.5781852812], [0.8163238324,0.5775944950], [0.8167169195,0.5770385373], [0.8171441254,0.5764334119], [0.8175658612,0.5758351001], [0.8179831268,0.5752422136], [0.8184029327,0.5746447945], [0.8188250889,0.5740430940], [0.8192393399,0.5734517451], [0.8196569928,0.5728546187], [0.8200328298,0.5723164842], [0.8204233564,0.5717565184], [0.8208190096,0.5711883696], [0.8212128176,0.5706220363], [0.8216132132,0.5700453735], [0.8220120606,0.5694700803], [0.8224216899,0.5688783385], [0.8228162699,0.5683074749], [0.8232139330,0.5677312926], [0.8236223411,0.5671386419], [0.8240563890,0.5665077826], [0.8244994455,0.5658627611], [0.8249415740,0.5652180106], [0.8253658700,0.5645982471], [0.8257977894,0.5639663209], [0.8262070069,0.5633666495], [0.8266214614,0.5627583491], [0.8270247131,0.5621655663], [0.8274290325,0.5615702950], [0.8278349300,0.5609717717], [0.8282386930,0.5603754700], [0.8286231974,0.5598067495], [0.8290185032,0.5592211739], [0.8294264197,0.5586159811], [0.8298176541,0.5580346413], [0.8302011110,0.5574640036], [0.8305819219,0.5568964635], [0.8309645147,0.5563254221], [0.8313528802,0.5557448952], [0.8317663194,0.5551259226], [0.8321780502,0.5545085146], [0.8325636136,0.5539294444], [0.8329462514,0.5533539033], [0.8333370537,0.5527651897], [0.8337159842,0.5521934967], [0.8341210356,0.5515814517], [0.8345412924,0.5509453978], [0.8349488213,0.5503275986], [0.8353426508,0.5497296206], [0.8357403686,0.5491247912], [0.8361262632,0.5485370289], [0.8365248171,0.5479290378], [0.8369349433,0.5473023850], [0.8373458312,0.5466735397], [0.8377545502,0.5460469885], [0.8381519517,0.5454368027], [0.8385482976,0.5448272687], [0.8389460413,0.5442146081], [0.8393446657,0.5435996064], [0.8397540682,0.5429669464], [0.8401680961,0.5423260737], [0.8405571008,0.5417229553], [0.8409482259,0.5411155897], [0.8413179684,0.5405405405], [0.8416950436,0.5399531958], [0.8420762611,0.5393584806], [0.8424537533,0.5387686643], [0.8428149535,0.5382034505], [0.8431786552,0.5376334769], [0.8435675461,0.5370230863], [0.8439571264,0.5364106345], [0.8443518865,0.5357890366], [0.8447510430,0.5351594859], [0.8451466331,0.5345345345], [0.8455399506,0.5339121576], [0.8459265481,0.5332994236], [0.8463196931,0.5326753017], [0.8466983339,0.5320732387], [0.8470768623,0.5314704031], [0.8474940804,0.5308048452], [0.8478960819,0.5301624602], [0.8482813348,0.5295458214], [0.8486858170,0.5288973285], [0.8490880717,0.5282513099], [0.8494759821,0.5276272887], [0.8498674746,0.5269964664], [0.8502515043,0.5263766517], [0.8506381446,0.5257516020], [0.8510137843,0.5251433508], [0.8514034839,0.5245113037], [0.8517809836,0.5238980396], [0.8521636872,0.5232753102], [0.8525532403,0.5226403853], [0.8529626735,0.5219719126], [0.8533750344,0.5212974684], [0.8537560830,0.5206731708], [0.8541201341,0.5200757604], [0.8545108686,0.5194335140], [0.8548581671,0.5188617486], [0.8552066770,0.5182871208], [0.8555700243,0.5176871000], [0.8559425591,0.5170709193], [0.8563315640,0.5164264251], [0.8567139886,0.5157917619], [0.8571042995,0.5151429121], [0.8574781887,0.5145203163], [0.8578398002,0.5139171891], [0.8581994009,0.5133164602], [0.8585405413,0.5127456864], [0.8588766853,0.5121824280], [0.8592210192,0.5116045741], [0.8595676004,0.5110220546], [0.8599136091,0.5104395997], [0.8602560143,0.5098623244], [0.8606550507,0.5091884560], [0.8610603275,0.5085028146], [0.8614780371,0.5077948321], [0.8618869858,0.5071004080], [0.8622452818,0.5064909416], [0.8626120139,0.5058661023], [0.8629752713,0.5052461589], [0.8633562344,0.5045948993], [0.8637982699,0.5038378201], [0.8642153223,0.5031221290], [0.8647127933,0.5022666475], [0.8651831909,0.5014559265], [0.8656697628,0.5006154830], [0.8662919244,0.4995380883], [0.8669455824,0.4984028062], [0.8676337369,0.4972038803], [0.8683949487,0.4958731824], [0.8692111968,0.4944409928], [0.8700118267,0.4930308523], [0.8709530206,0.4913662950], [0.8718718719,0.4897340493], [0.8728728729,0.4879476896], [0.8738738739,0.4861527050], [0.8748748749,0.4843489995], [0.8758758759,0.4825364754], [0.8768768769,0.4807150328], [0.8778797697,0.4788811021], [0.8788812538,0.4770406081], [0.8798803960,0.4751952113], [0.8808783398,0.4733427410], [0.8818752556,0.4714828031], [0.8828828829,0.4695932443], [0.8838838839,0.4677064034], [0.8848848849,0.4658097686], [0.8858858859,0.4639032196], [0.8868870484,0.4619863237], [0.8878886685,0.4600583792], [0.8888888889,0.4581228473], [0.8898906242,0.4561739547], [0.8908908909,0.4542173715], [0.8918946121,0.4522432984], [0.8928928929,0.4502691215], [0.8938947207,0.4482769549], [0.8948975312,0.4462716759], [0.8959004453,0.4442548729], [0.8968968969,0.4422397046], [0.8978993645,0.4402007852], [0.8989006554,0.4381524982], [0.8998998999,0.4360965147], [0.9009009009,0.4340248458], [0.9019019019,0.4319409211], [0.9029029029,0.4298445625], [0.9039064454,0.4277302163], [0.9049049049,0.4256138074], [0.9059059059,0.4234790309], [0.9069069069,0.4213310601], [0.9079079079,0.4191696921], [0.9089089089,0.4169947186], [0.9099099099,0.4148059255], [0.9109109109,0.4126030930], [0.9119119119,0.4103859950], [0.9129142124,0.4081514925], [0.9139139139,0.4059080659], [0.9149152771,0.4036459287], [0.9159175687,0.4013664253], [0.9169173694,0.3990771074], [0.9179180085,0.3967701220], [0.9189189189,0.3944464735], [0.9199199199,0.3921062878], [0.9209209209,0.3897494803], [0.9219219219,0.3873757477], [0.9229246054,0.3849807434], [0.9239239239,0.3825762444], [0.9249249249,0.3801498169], [0.9259266412,0.3777033956], [0.9269269269,0.3752418848], [0.9279292710,0.3727563119], [0.9289296822,0.3702561890], [0.9299322986,0.3677307710], [0.9309317669,0.3651931617], [0.9319319319,0.3626332503], [0.9329388910,0.3600347561], [0.9339374304,0.3574365344], [0.9349349349,0.3548192039], [0.9359418931,0.3521544727], [0.9369395476,0.3494914651], [0.9379379379,0.3468031496], [0.9389389389,0.3440838109], [0.9399425990,0.3413325513], [0.9409409409,0.3385707395], [0.9419453252,0.3357662943], [0.9429487250,0.3329379853], [0.9439475601,0.3300954464], [0.9449474277,0.3272221858], [0.9459459459,0.3243243243], [0.9469550584,0.3213660178], [0.9479557616,0.3184020635], [0.9489515970,0.3154217281], [0.9499499499,0.3124021328], [0.9509542141,0.3093316711], [0.9519551277,0.3062375464], [0.9529554597,0.3031103626], [0.9539539540,0.2999530859], [0.9549549550,0.2967507945], [0.9559565173,0.2935083252], [0.9569569570,0.2902298788], [0.9579596382,0.2869030002], [0.9589589590,0.2835449083], [0.9599629913,0.2801268556], [0.9609695238,0.2766542506], [0.9619630216,0.2731796937], [0.9629663788,0.2696215002], [0.9639725662,0.2660016761], [0.9649818149,0.2623167872], [0.9659721064,0.2586462636], [0.9669782583,0.2548588786], [0.9679727430,0.2510553101], [0.9689715669,0.2471722125], [0.9699809418,0.2431809463], [0.9709840353,0.2391443145], [0.9719803638,0.2350620607], [0.9729900577,0.2308470222], [0.9740013220,0.2265423242], [0.9749998507,0.2222055155], [0.9759850267,0.2178376178], [0.9770060821,0.2132114339], [0.9780028566,0.2085914966], [0.9790075516,0.2038239780], [0.9799830663,0.1990808625], [0.9809867051,0.1940749455], [0.9820052439,0.1888536496], [0.9830210305,0.1834929251], [0.9840320606,0.1779913023], [0.9850395374,0.1723284939], [0.9860561619,0.1664128767], [0.9870708209,0.1602847298], [0.9880663535,0.1540288320], [0.9891304303,0.1470407828], [0.9901248536,0.1401883532], [0.9911979670,0.1323880288], [0.9922723361,0.1240790518], [0.9933683209,0.1149755587], [0.9944476726,0.1052322500], [0.9955254925,0.0944933533], [0.9965872486,0.0825460836], [0.9975388818,0.0701154707], [0.9984604150,0.0554689064], [0.9993188355,0.0369034559], [1.0000000000,0.0000000000]]././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/pareto_front/dtlz3_front.json0000644000076500000240000007051014456461441021750 0ustar00runnerstaff[[0.0000000000,1.0000000000], [0.0023789418,0.9999971703], [0.0035957625,0.9999935352], [0.0046461501,0.9999892066], [0.0056164443,0.9999842277], [0.0065159141,0.9999787712], [0.0074534964,0.9999722223], [0.0083258899,0.9999653392], [0.0091587956,0.9999580574], [0.0099918644,0.9999500801], [0.0108327355,0.9999413242], [0.0117233814,0.9999312788], [0.0126708842,0.9999197211], [0.0135594612,0.9999080663], [0.0144810937,0.9998951435], [0.0154426100,0.9998807558], [0.0163588209,0.9998661855], [0.0172796659,0.9998506954], [0.0182956723,0.9998326202], [0.0192861105,0.9998140057], [0.0202946195,0.9997940430], [0.0212767154,0.9997736251], [0.0222383250,0.9997526979], [0.0232425368,0.9997298558], [0.0241988150,0.9997071658], [0.0251821942,0.9996828783], [0.0261513463,0.9996579951], [0.0271274637,0.9996319826], [0.0281038746,0.9996050081], [0.0291089804,0.9995762438], [0.0301146996,0.9995464496], [0.0311420800,0.9995149678], [0.0321305605,0.9994836802], [0.0331494292,0.9994504066], [0.0341224678,0.9994176590], [0.0351196254,0.9993831157], [0.0361104817,0.9993478039], [0.0371030349,0.9993114453], [0.0381098792,0.9992735547], [0.0391060032,0.9992350677], [0.0400819542,0.9991963956], [0.0410766009,0.9991560003], [0.0420738233,0.9991145046], [0.0430885441,0.9990712574], [0.0440880109,0.9990276509], [0.0450918416,0.9989828456], [0.0460950834,0.9989370567], [0.0471008766,0.9988901378], [0.0481098764,0.9988420495], [0.0491071431,0.9987935164], [0.0500806316,0.9987451779], [0.0510749448,0.9986948233], [0.0520726250,0.9986433005], [0.0530671044,0.9985909485], [0.0540624909,0.9985375542], [0.0550624603,0.9984829120], [0.0560756802,0.9984265211], [0.0570766944,0.9983697967], [0.0580787531,0.9983120046], [0.0590846125,0.9982529782], [0.0600803499,0.9981935441], [0.0610750549,0.9981331763], [0.0620820036,0.9980710520], [0.0630804729,0.9980084438], [0.0640708640,0.9979453514], [0.0650811265,0.9978799762], [0.0660806623,0.9978142844], [0.0670828705,0.9977474072], [0.0680916171,0.9976790725], [0.0690876065,0.9976105967], [0.0700892996,0.9975407210], [0.0710876698,0.9974700713], [0.0720852865,0.9973984718], [0.0730871962,0.9973255545], [0.0740941746,0.9972512488], [0.0750929511,0.9971765384], [0.0760929028,0.9971007322], [0.0770905309,0.9970240970], [0.0780853028,0.9969466814], [0.0790890122,0.9968675580], [0.0800843181,0.9967880928], [0.0810920505,0.9967066165], [0.0820960362,0.9966244232], [0.0831026587,0.9965409917], [0.0840977867,0.9964575065], [0.0850989512,0.9963725049], [0.0860976797,0.9962867005], [0.0871044729,0.9961991823], [0.0881031458,0.9961113571], [0.0891070246,0.9960220571], [0.0901066082,0.9959321258], [0.0911009665,0.9958416611], [0.0921012781,0.9957496445], [0.0930979201,0.9956569576], [0.0941009818,0.9955626576], [0.0951016757,0.9954675642], [0.0961036555,0.9953713314], [0.0971077489,0.9952738744], [0.0981081000,0.9951757637], [0.0991041628,0.9950770648], [0.1001077606,0.9949766009], [0.1011117730,0.9948750722], [0.1021127434,0.9947728322], [0.1031140974,0.9946695345], [0.1041165595,0.9945651020], [0.1051226787,0.9944592613], [0.1061153429,0.9943538274], [0.1071120699,0.9942469535], [0.1081102128,0.9941389148], [0.1091150101,0.9940291316], [0.1101122560,0.9939191572], [0.1111111111,0.9938079900], [0.1121136994,0.9936953851], [0.1131139276,0.9935820245], [0.1141231545,0.9934666102], [0.1151229650,0.9933512485], [0.1161257655,0.9932345174], [0.1171251493,0.9931171630], [0.1181284450,0.9929983235], [0.1191229956,0.9928795052], [0.1201258342,0.9927586736], [0.1211211211,0.9926377355], [0.1221221221,0.9925150816], [0.1231288797,0.9923906887], [0.1241286609,0.9922661314], [0.1251279986,0.9921406070], [0.1261333960,0.9920132894], [0.1271359168,0.9918853052], [0.1281380543,0.9917563406], [0.1291448225,0.9916257433], [0.1301428579,0.9914952529], [0.1311375140,0.9913641876], [0.1321343226,0.9912318199], [0.1331352793,0.9910978748], [0.1341341341,0.9909631850], [0.1351420164,0.9908262388], [0.1361438408,0.9906890807], [0.1371434507,0.9905511970], [0.1381422984,0.9904123916], [0.1391432443,0.9902722644], [0.1401403250,0.9901316525], [0.1411430141,0.9899892169], [0.1421455635,0.9898457651], [0.1431449617,0.9897017328], [0.1441476716,0.9895561878], [0.1451516891,0.9894094133], [0.1461565556,0.9892614726], [0.1471579737,0.9891130020], [0.1481567787,0.9889638866], [0.1491583681,0.9888133197], [0.1501545067,0.9886625431], [0.1511624053,0.9885089414], [0.1521608817,0.9883557386], [0.1531664898,0.9882003979], [0.1541589451,0.9880460615], [0.1551583027,0.9878896199], [0.1561619849,0.9877314587], [0.1571610807,0.9875729820], [0.1581629812,0.9874130196], [0.1591635308,0.9872522324], [0.1601631179,0.9870905610], [0.1611652964,0.9869274275], [0.1621621622,0.9867641224], [0.1631631632,0.9865990990], [0.1641641642,0.9864330323], [0.1651651652,0.9862659217], [0.1661661662,0.9860977666], [0.1671671672,0.9859285665], [0.1681710294,0.9857578328], [0.1691691692,0.9855870292], [0.1701726043,0.9854142706], [0.1711754161,0.9852405680], [0.1721785211,0.9850657627], [0.1731774838,0.9848906331], [0.1741768392,0.9847143894], [0.1751792598,0.9845365544], [0.1761813346,0.9843577283], [0.1771811334,0.9841782592], [0.1781785649,0.9839981702], [0.1791819245,0.9838159573], [0.1801815240,0.9836333760], [0.1811811812,0.9834497341], [0.1821849176,0.9832642858], [0.1831859705,0.9830782778], [0.1841861550,0.9828913777], [0.1851863886,0.9827034148], [0.1861889937,0.9825139483], [0.1871879123,0.9823241245], [0.1881881882,0.9821329879], [0.1891901325,0.9819404736], [0.1901908766,0.9817471316], [0.1911911912,0.9815528149], [0.1921921922,0.9813573056], [0.1931958360,0.9811602157], [0.1941991204,0.9809621306], [0.1951970208,0.9807640507], [0.1961969398,0.9805645113], [0.1971971972,0.9803638434], [0.1981997319,0.9801616531], [0.1992018002,0.9799584903], [0.2002040952,0.9797542142], [0.2012014977,0.9795498749], [0.2022030700,0.9793436161], [0.2032032032,0.9791365881], [0.2042060303,0.9789279326], [0.2052074063,0.9787185093], [0.2062097106,0.9785078208], [0.2072096854,0.9782965533], [0.2082098806,0.9780841710], [0.2092092092,0.9778709050], [0.2102102102,0.9776562113], [0.2112112112,0.9774404454], [0.2122138358,0.9772232539], [0.2132132132,0.9770056938], [0.2142177690,0.9767859271], [0.2152194718,0.9765657064], [0.2162268763,0.9763431456], [0.2172247210,0.9761216218], [0.2182252142,0.9758984352], [0.2192262943,0.9756740398], [0.2202286523,0.9754482768], [0.2212286337,0.9752219704], [0.2222264292,0.9749950842], [0.2232287358,0.9747660907], [0.2242272055,0.9745368953], [0.2252294622,0.9743057474], [0.2262295232,0.9740740233], [0.2272287819,0.9738414043], [0.2282282282,0.9736076601], [0.2292304065,0.9733721902], [0.2302302302,0.9731361884], [0.2312329865,0.9728984047], [0.2322325765,0.9726602852], [0.2332332332,0.9724208240], [0.2342342342,0.9721801909], [0.2352352352,0.9719384672], [0.2362373060,0.9716953922], [0.2372398441,0.9714511086], [0.2382404413,0.9712062047], [0.2392410008,0.9709602173], [0.2402405576,0.9707133843], [0.2412412412,0.9704651789], [0.2422450093,0.9702151078], [0.2432457261,0.9699646987], [0.2442479682,0.9697128080], [0.2452518361,0.9694594044], [0.2462527863,0.9692056362], [0.2472490611,0.9689519605], [0.2482482482,0.9686964474], [0.2492504764,0.9684390533], [0.2502502503,0.9681811877], [0.2512512513,0.9679219022], [0.2522522523,0.9676615117], [0.2532532533,0.9674000154], [0.2542542543,0.9671374123], [0.2552575598,0.9668730931], [0.2562597600,0.9666079533], [0.2572592603,0.9663424201], [0.2582593220,0.9660756299], [0.2592592593,0.9658077637], [0.2602609197,0.9655383233], [0.2612612613,0.9652681251], [0.2622648187,0.9649959403], [0.2632641986,0.9647237748], [0.2642660612,0.9644498167], [0.2652652653,0.9641754711], [0.2662678984,0.9638990644], [0.2672685868,0.9636220745], [0.2682693526,0.9633439440], [0.2692712328,0.9630643816], [0.2702722259,0.9627839446], [0.2712735368,0.9625022952], [0.2722722723,0.9622202501], [0.2732732733,0.9619364418], [0.2742761494,0.9616509730], [0.2752786807,0.9613644720], [0.2762795701,0.9610773117], [0.2772772773,0.9607899414], [0.2782792613,0.9605002097], [0.2792800783,0.9602096843], [0.2802823235,0.9599176106], [0.2812817558,0.9596252257], [0.2822831454,0.9593311346], [0.2832842485,0.9590359923], [0.2842880068,0.9587389265], [0.2852925352,0.9584404882], [0.2862915788,0.9581425426], [0.2872909969,0.9578433500], [0.2882899977,0.9575431464], [0.2892909832,0.9572412063], [0.2902902903,0.9569386330], [0.2912929148,0.9566339100], [0.2922922923,0.9563290312], [0.2932942328,0.9560222241], [0.2942942943,0.9557148468], [0.2952952953,0.9554060334], [0.2962969827,0.9550958581], [0.2972984499,0.9547845996], [0.2982993300,0.9544723724], [0.2993011060,0.9541587121], [0.3003006996,0.9538445837], [0.3013025019,0.9535286059], [0.3023030034,0.9532118831], [0.3033033033,0.9528940687], [0.3043043043,0.9525748739], [0.3053053053,0.9522545198], [0.3063067368,0.9519328669], [0.3073073073,0.9516103293], [0.3083083083,0.9512864905], [0.3093093093,0.9509614877], [0.3103108868,0.9506351317], [0.3113113113,0.9503079856], [0.3123123123,0.9499794838], [0.3133133133,0.9496498132], [0.3143154356,0.9493186014], [0.3153153153,0.9489869609], [0.3163163163,0.9486537767], [0.3173173173,0.9483194188], [0.3183187733,0.9479837333], [0.3193204370,0.9476468005], [0.3203210408,0.9473090472], [0.3213221831,0.9469699333], [0.3223223223,0.9466299808], [0.3233233233,0.9462885546], [0.3243243243,0.9459459459], [0.3253253253,0.9456021535], [0.3263272005,0.9452568742], [0.3273293622,0.9449103072], [0.3283288453,0.9445634808], [0.3293293293,0.9442151200], [0.3303303303,0.9438653892], [0.3313313313,0.9435144667], [0.3323335732,0.9431619141], [0.3333333333,0.9428090416], [0.3343343343,0.9424545362], [0.3353353353,0.9420988339], [0.3363363750,0.9417419194], [0.3373389033,0.9413832717], [0.3383385080,0.9410244705], [0.3393393393,0.9406640276], [0.3403411793,0.9403020162], [0.3413413413,0.9399394069], [0.3423423423,0.9395752874], [0.3433433433,0.9392099598], [0.3443451363,0.9388431323], [0.3453453453,0.9384756749], [0.3463466065,0.9381066188], [0.3473473473,0.9377365410], [0.3483483483,0.9373651520], [0.3493493493,0.9369925465], [0.3503510626,0.9366184564], [0.3513524033,0.9362432850], [0.3523523524,0.9358674157], [0.3533533739,0.9354899215], [0.3543548782,0.9351110203], [0.3553564497,0.9347308670], [0.3563595686,0.9343488952], [0.3573589347,0.9339671256], [0.3583583584,0.9335841082], [0.3593593594,0.9331992557], [0.3603603604,0.9328131703], [0.3613624468,0.9324254297], [0.3623623624,0.9320372945], [0.3633653536,0.9316467248], [0.3643651514,0.9312561605], [0.3653670219,0.9308635449], [0.3663671270,0.9304703801], [0.3673673674,0.9300759202], [0.3683693446,0.9296795286], [0.3693724147,0.9292814532], [0.3703712393,0.9288838168], [0.3713719774,0.9284841702], [0.3723723724,0.9280834102], [0.3733733734,0.9276811543], [0.3743743744,0.9272776433], [0.3753753754,0.9268728756], [0.3763774806,0.9264664010], [0.3773777160,0.9260594255], [0.3783792359,0.9256506651], [0.3793793794,0.9252412045], [0.3803803804,0.9248301283], [0.3813813814,0.9244177854], [0.3823823824,0.9240041740], [0.3833845378,0.9235888134], [0.3843877245,0.9231717485], [0.3853870001,0.9227550380], [0.3863891830,0.9223358386], [0.3873891583,0.9219162869], [0.3883907844,0.9214947632], [0.3893942570,0.9210711767], [0.3903917375,0.9206488425], [0.3913933222,0.9202234877], [0.3923929945,0.9197976614], [0.3933933934,0.9193702399], [0.3943943944,0.9189412722], [0.3953975550,0.9185100835], [0.3963971073,0.9180791542], [0.3973973974,0.9176466142], [0.3983997498,0.9172118836], [0.3994003620,0.9167766090], [0.4004012107,0.9163399317], [0.4014014014,0.9159022409], [0.4024024024,0.9154628920], [0.4034043517,0.9150218189], [0.4044067845,0.9145792216], [0.4054054054,0.9141370014], [0.4064064064,0.9136924170], [0.4074074074,0.9132465190], [0.4084084084,0.9127993054], [0.4094094094,0.9123507744], [0.4104115392,0.9119004159], [0.4114128541,0.9114491009], [0.4124129881,0.9109969963], [0.4134134134,0.9105434364], [0.4144144144,0.9100882886], [0.4154154154,0.9096318116], [0.4164164164,0.9091740032], [0.4174186674,0.9087142874], [0.4184201372,0.9082535928], [0.4194194194,0.9077925703], [0.4204204800,0.9073293889], [0.4214229047,0.9068642321], [0.4224241375,0.9063982834], [0.4234234234,0.9059318984], [0.4244244244,0.9054633664], [0.4254254254,0.9049934847], [0.4264264264,0.9045222512], [0.4274274274,0.9040496636], [0.4284284284,0.9035757200], [0.4294294294,0.9031004181], [0.4304304634,0.9026237401], [0.4314314314,0.9021457310], [0.4324330951,0.9016660237], [0.4334351935,0.9011847386], [0.4344353208,0.9007030321], [0.4354354354,0.9002199629], [0.4364364364,0.8997350927], [0.4374374374,0.8992488467], [0.4384384384,0.8987612229], [0.4394394394,0.8982722188], [0.4404404404,0.8977818323], [0.4414414414,0.8972900611], [0.4424437834,0.8967962414], [0.4434434434,0.8963023555], [0.4444444444,0.8958064165], [0.4454463415,0.8953086378], [0.4464477251,0.8948097165], [0.4474474972,0.8943102019], [0.4484484484,0.8938086983], [0.4494494494,0.8933057665], [0.4504518510,0.8928007224], [0.4514526015,0.8922951017], [0.4524531394,0.8917881793], [0.4534534535,0.8912799591], [0.4544544545,0.8907699753], [0.4554554555,0.8902585737], [0.4564564565,0.8897457521], [0.4574574575,0.8892315079], [0.4584589324,0.8887155941], [0.4594594595,0.8881987419], [0.4604604605,0.8876802151], [0.4614614615,0.8871602559], [0.4624625580,0.8866388117], [0.4634641023,0.8861156955], [0.4644644647,0.8855917575], [0.4654654700,0.8850660406], [0.4664664665,0.8845388831], [0.4674674675,0.8840102753], [0.4684701105,0.8834793464], [0.4694714720,0.8829476412], [0.4704729272,0.8824144291], [0.4714726513,0.8818806830], [0.4724724725,0.8813454276], [0.4734734735,0.8808080778], [0.4744744745,0.8802692617], [0.4754756000,0.8797289093], [0.4764764765,0.8791872197], [0.4774778479,0.8786437872], [0.4784784785,0.8780992801], [0.4794798626,0.8775528824], [0.4804804805,0.8770054207], [0.4814814815,0.8764562642], [0.4824824825,0.8759056194], [0.4834834835,0.8753534836], [0.4844852498,0.8747994300], [0.4854857936,0.8742445563], [0.4864864865,0.8736881014], [0.4874879901,0.8731296922], [0.4884884885,0.8725703391], [0.4894900830,0.8720088639], [0.4904904905,0.8714465438], [0.4914914915,0.8708823766], [0.4924924925,0.8703166923], [0.4934934935,0.8697494880], [0.4944944945,0.8691807608], [0.4954954955,0.8686105076], [0.4964967353,0.8680385889], [0.4974980710,0.8674650825], [0.4984992707,0.8668901182], [0.4995004652,0.8663136183], [0.5005005005,0.8657362468], [0.5015018074,0.8651565969], [0.5025047918,0.8645744238], [0.5035043517,0.8639926897], [0.5045045045,0.8634090600], [0.5055055055,0.8628233793], [0.5065065065,0.8622361387], [0.5075086402,0.8616466678], [0.5085100628,0.8610560470], [0.5095106642,0.8604643416], [0.5105105105,0.8598715129], [0.5115122540,0.8592759824], [0.5125125125,0.8586797567], [0.5135135135,0.8580815063], [0.5145154430,0.8574811128], [0.5155159317,0.8568799940], [0.5165165165,0.8562772262], [0.5175175303,0.8556726043], [0.5185196118,0.8550657356], [0.5195197838,0.8544584216], [0.5205205205,0.8538491598], [0.5215220771,0.8532377881], [0.5225238556,0.8526246655], [0.5235243128,0.8520107358], [0.5245245245,0.8513953389], [0.5255260434,0.8507775136], [0.5265265265,0.8501587010], [0.5275275275,0.8495379378], [0.5285306324,0.8489142304], [0.5295307330,0.8482907537], [0.5305305305,0.8476658281], [0.5315315315,0.8470385062], [0.5325325325,0.8464095355], [0.5335347999,0.8457781135], [0.5345345345,0.8451466331], [0.5355355355,0.8445126939], [0.5365365365,0.8438770911], [0.5375375375,0.8432398210], [0.5385393670,0.8426003502], [0.5395395395,0.8419602635], [0.5405405405,0.8413179684], [0.5415415415,0.8406739908], [0.5425425425,0.8400283266], [0.5435435435,0.8393809721], [0.5445445445,0.8387319232], [0.5455455455,0.8380811761], [0.5465465465,0.8374287268], [0.5475475475,0.8367745713], [0.5485485485,0.8361187056], [0.5495495495,0.8354611257], [0.5505505506,0.8348018276], [0.5515517276,0.8341406907], [0.5525525526,0.8334780601], [0.5535542448,0.8328131231], [0.5545552970,0.8321468756], [0.5555555556,0.8314794193], [0.5565565566,0.8308097251], [0.5575579506,0.8301380197], [0.5585585586,0.8294650907], [0.5595595596,0.8287901419], [0.5605605606,0.8281134330], [0.5615615616,0.8274349597], [0.5625625626,0.8267547177], [0.5635646789,0.8260719416], [0.5645645646,0.8253889098], [0.5655655656,0.8247033352], [0.5665665666,0.8240159741], [0.5675675676,0.8233268223], [0.5685685686,0.8226358750], [0.5695695696,0.8219431279], [0.5705717985,0.8212477231], [0.5715715716,0.8205522156], [0.5725727651,0.8198539069], [0.5735747959,0.8191531929], [0.5745745746,0.8184522333], [0.5755755756,0.8177485902], [0.5765765766,0.8170431147], [0.5775775776,0.8163358022], [0.5785785786,0.8156266477], [0.5795807837,0.8149147901], [0.5805810088,0.8142024885], [0.5815815816,0.8134880847], [0.5825827335,0.8127714061], [0.5835835836,0.8120530777], [0.5845851229,0.8113323820], [0.5855855856,0.8106105859], [0.5865869027,0.8098862918], [0.5875886488,0.8091597987], [0.5885907985,0.8084311176], [0.5895895896,0.8077029874], [0.5905905906,0.8069713466], [0.5915915916,0.8062377991], [0.5925932684,0.8055018425], [0.5935954680,0.8047635804], [0.5945945946,0.8040256638], [0.5955966761,0.8032836357], [0.5965965966,0.8025412768], [0.5975981234,0.8017957863], [0.5985985986,0.8010491357], [0.5995995996,0.8003001438], [0.6006006006,0.7995491971], [0.6016028124,0.7987953781], [0.6026031993,0.7980409665], [0.6036036036,0.7972845726], [0.6046053195,0.7965252084], [0.6056056056,0.7957649467], [0.6066066875,0.7950020922], [0.6076076076,0.7942373670], [0.6086086086,0.7934705801], [0.6096099182,0.7927015502], [0.6106113274,0.7919304306], [0.6116116116,0.7911581615], [0.6126129903,0.7903830236], [0.6136141937,0.7896059911], [0.6146146146,0.7888275322], [0.6156156717,0.7880465372], [0.6166172857,0.7872630583], [0.6176189981,0.7864774461], [0.6186186186,0.7856914182], [0.6196196196,0.7849022404], [0.6206206206,0.7841109904], [0.6216216216,0.7833176620], [0.6226231309,0.7825218444], [0.6236236236,0.7817247444], [0.6246246246,0.7809251426], [0.6256256256,0.7801234367], [0.6266270828,0.7793192536], [0.6276276276,0.7785136871], [0.6286295540,0.7777048822], [0.6296302197,0.7768949649], [0.6306306306,0.7760831191], [0.6316316316,0.7752686515], [0.6326326326,0.7744520335], [0.6336340432,0.7736329229], [0.6346346346,0.7728123191], [0.6356356356,0.7719892089], [0.6366378099,0.7711629523], [0.6376376376,0.7703364480], [0.6386386386,0.7695067831], [0.6396396396,0.7686749192], [0.6406406406,0.7678408491], [0.6416427965,0.7670035995], [0.6426442698,0.7661646967], [0.6436463401,0.7653230618], [0.6446457845,0.7644814010], [0.6456456456,0.7636371522], [0.6466466466,0.7627896921], [0.6476487003,0.7619390796], [0.6486486486,0.7610879914], [0.6496496496,0.7602337356], [0.6506506507,0.7593771993], [0.6516516517,0.7585183748], [0.6526526527,0.7576572543], [0.6536536537,0.7567938300], [0.6546546547,0.7559280939], [0.6556556557,0.7550600381], [0.6566566567,0.7541896547], [0.6576576577,0.7533169355], [0.6586597139,0.7524409487], [0.6596596597,0.7515644573], [0.6606606607,0.7506846818], [0.6616616617,0.7498025377], [0.6626626627,0.7489180165], [0.6636639380,0.7480308666], [0.6646655423,0.7471410288], [0.6656656657,0.7462501066], [0.6666666667,0.7453559925], [0.6676676677,0.7444594586], [0.6686687686,0.7435604064], [0.6696700121,0.7426587877], [0.6706708568,0.7417550821], [0.6716716717,0.7408489492], [0.6726726727,0.7399401837], [0.6736747602,0.7390279544], [0.6746746747,0.7381152236], [0.6756756757,0.7371990106], [0.6766766767,0.7362802967], [0.6776776777,0.7353590723], [0.6786786787,0.7344353281], [0.6796815187,0.7335073504], [0.6806834933,0.7325776286], [0.6816825186,0.7316481011], [0.6826831850,0.7307144920], [0.6836836837,0.7297784737], [0.6846846847,0.7288394079], [0.6856869699,0.7278965443], [0.6866885605,0.7269517321], [0.6876876877,0.7260066420], [0.6886886887,0.7250571633], [0.6896899925,0.7241047674], [0.6906910369,0.7231499786], [0.6916916917,0.7221929130], [0.6926940408,0.7212315619], [0.6936946854,0.7202691743], [0.6946963858,0.7193030874], [0.6956971197,0.7183352405], [0.6966966967,0.7173658152], [0.6976988128,0.7163912106], [0.6986986987,0.7154160527], [0.6996997298,0.7144370428], [0.7007007007,0.7134553441], [0.7017037971,0.7124687931], [0.7027036467,0.7114826667], [0.7037039479,0.7104933172], [0.7047047047,0.7095007253], [0.7057057057,0.7085050860], [0.7067070125,0.7075063240], [0.7077077077,0.7065053435], [0.7087087087,0.7055012163], [0.7097098302,0.7044941142], [0.7107107107,0.7034843891], [0.7117122799,0.7024710888], [0.7127127127,0.7014560493], [0.7137147039,0.7004365220], [0.7147148498,0.6994159589], [0.7157157157,0.6983917341], [0.7167167167,0.6973644298], [0.7177177177,0.6963341710], [0.7187187187,0.6953009445], [0.7197197197,0.6942647370], [0.7207207207,0.6932255353], [0.7217217217,0.6921833257], [0.7227227227,0.6911380948], [0.7237243839,0.6900891364], [0.7247258515,0.6890373286], [0.7257257257,0.6879841357], [0.7267277147,0.6869256355], [0.7277277277,0.6858661344], [0.7287287287,0.6848024824], [0.7297297297,0.6837357103], [0.7307318389,0.6826646172], [0.7317335427,0.6815908028], [0.7327341160,0.6805150368], [0.7337337337,0.6794371258], [0.7347348303,0.6783544274], [0.7357370699,0.6772672766], [0.7367367367,0.6761796956], [0.7377382823,0.6750868291], [0.7387404285,0.6739900439], [0.7397397397,0.6728930951], [0.7407409969,0.6717907230], [0.7417423300,0.6706849602], [0.7427429352,0.6695766813], [0.7437437437,0.6684648410], [0.7447447447,0.6673494326], [0.7457457457,0.6662306528], [0.7467468652,0.6651083516], [0.7477496769,0.6639807382], [0.7487507698,0.6628516310], [0.7497509003,0.6617201731], [0.7507507508,0.6605855813], [0.7517517518,0.6594462099], [0.7527527528,0.6583033444], [0.7537537538,0.6571569666], [0.7547547548,0.6560070580], [0.7557557558,0.6548536002], [0.7567572714,0.6536959785], [0.7577608951,0.6525323179], [0.7587598282,0.6513704961], [0.7597610024,0.6502024448], [0.7607615002,0.6490315399], [0.7617617618,0.6478572515], [0.7627636728,0.6466773380], [0.7637637638,0.6454958661], [0.7647647648,0.6443095953], [0.7657667975,0.6431183498], [0.7667667668,0.6419257943], [0.7677679668,0.6407279837], [0.7687687688,0.6395268408], [0.7697707260,0.6383204755], [0.7707728607,0.6371100354], [0.7717732667,0.6358978101], [0.7727727728,0.6346827882], [0.7737737738,0.6334620328], [0.7747757435,0.6322361484], [0.7757757758,0.6310086733], [0.7767767768,0.6297760229], [0.7777777778,0.6285393611], [0.7787799647,0.6272971916], [0.7797808234,0.6260526075], [0.7807810914,0.6248046793], [0.7817817818,0.6235521195], [0.7827827828,0.6222950385], [0.7837855860,0.6210315251], [0.7847858628,0.6197670123], [0.7857860662,0.6184983898], [0.7867876755,0.6172237468], [0.7877909140,0.6159427537], [0.7887915250,0.6146608253], [0.7897914009,0.6133755318], [0.7907924484,0.6120843925], [0.7917917918,0.6107910923], [0.7927927928,0.6094912532], [0.7937961556,0.6081839059], [0.7947956983,0.6068770864], [0.7957972547,0.6055631507], [0.7968006001,0.6042423385], [0.7978006176,0.6029213668], [0.7988012156,0.6015950614], [0.7998009797,0.6002652688], [0.8008020100,0.5989291617], [0.8018018018,0.5975900523], [0.8028033501,0.5962438940], [0.8038051655,0.5948926423], [0.8048048048,0.5935395742], [0.8058066380,0.5921787417], [0.8068068068,0.5908153489], [0.8078078078,0.5894459650], [0.8088090292,0.5880713854], [0.8098098098,0.5866924850], [0.8108108108,0.5853083197], [0.8118118118,0.5839191572], [0.8128128128,0.5825249620], [0.8138151346,0.5811238480], [0.8148148148,0.5797213275], [0.8158158158,0.5783118144], [0.8168174358,0.5768962441], [0.8178205144,0.5754733757], [0.8188210157,0.5740489040], [0.8198215946,0.5726190296], [0.8208227641,0.5711829741], [0.8218230662,0.5697427910], [0.8228285404,0.5682897088], [0.8238349478,0.5668297617], [0.8248384140,0.5653685442], [0.8258532896,0.5638850451], [0.8268672457,0.5623971533], [0.8278771762,0.5609094233], [0.8288727624,0.5594371670], [0.8298619650,0.5579687438], [0.8308483376,0.5564989128], [0.8318485214,0.5550027364], [0.8328354465,0.5535206582], [0.8338338338,0.5520155229], [0.8348361362,0.5504985247], [0.8358364463,0.5489785379], [0.8368382625,0.5474502009], [0.8378390119,0.5459173839], [0.8388388388,0.5443798329], [0.8398398398,0.5428342688], [0.8408408408,0.5412824405], [0.8418423655,0.5397234770], [0.8428431932,0.5381592252], [0.8438438438,0.5365888251], [0.8448448448,0.5350113907], [0.8458473234,0.5334250702], [0.8468468468,0.5318368340], [0.8478478478,0.5302395939], [0.8488516396,0.5286311512], [0.8498513361,0.5270224916], [0.8508508509,0.5254072988], [0.8518541278,0.5237790994], [0.8528538201,0.5221497502], [0.8538543001,0.5205120884], [0.8548548549,0.5188672057], [0.8558575680,0.5172115846], [0.8568599769,0.5155492024], [0.8578590836,0.5138849995], [0.8588601162,0.5122102116], [0.8598598599,0.5105301376], [0.8608635220,0.5088359229], [0.8618636728,0.5071400295], [0.8628679780,0.5054293745], [0.8638649378,0.5037235047], [0.8648656649,0.5020033683], [0.8658683931,0.5002718519], [0.8668677905,0.4985380967], [0.8678678679,0.4967950925], [0.8688718730,0.4950370373], [0.8698736211,0.4932746529], [0.8708723750,0.4915092130], [0.8718718719,0.4897340493], [0.8728728729,0.4879476896], [0.8738752272,0.4861502723], [0.8748774666,0.4843443181], [0.8758783709,0.4825319465], [0.8768768769,0.4807150328], [0.8778800077,0.4788806658], [0.8788808536,0.4770413453], [0.8798798799,0.4751961668], [0.8808817021,0.4733364839], [0.8818818819,0.4714704088], [0.8828841587,0.4695908456], [0.8838838839,0.4677064034], [0.8848864682,0.4658067607], [0.8858858859,0.4639032196], [0.8868880801,0.4619843432], [0.8878890006,0.4600577383], [0.8888888889,0.4581228473], [0.8898916606,0.4561719328], [0.8908916277,0.4542159262], [0.8918927976,0.4522468768], [0.8928951469,0.4502646518], [0.8938938939,0.4482786036], [0.8948948949,0.4462769623], [0.8958977603,0.4442602876], [0.8968979645,0.4422375395], [0.8979002899,0.4401988976], [0.8988988989,0.4381561018], [0.8998998999,0.4360965147], [0.9009023300,0.4340218795], [0.9019052986,0.4319338287], [0.9029056424,0.4298388081], [0.9039065779,0.4277299363], [0.9049132506,0.4255960630], [0.9059130618,0.4234637228], [0.9069101087,0.4213241682], [0.9079098397,0.4191655079], [0.9089127246,0.4169864014], [0.9099115880,0.4148022446], [0.9109114570,0.4126018875], [0.9119136867,0.4103820513], [0.9129129129,0.4081543990], [0.9139139139,0.4059080659], [0.9149167765,0.4036425300], [0.9159174342,0.4013667322], [0.9169192282,0.3990728367], [0.9179194591,0.3967667660], [0.9189213809,0.3944407379], [0.9199263475,0.3920912078], [0.9209313246,0.3897248970], [0.9219314736,0.3873530148], [0.9229364461,0.3849523562], [0.9239322618,0.3825561078], [0.9249289645,0.3801399881], [0.9259311199,0.3776924162], [0.9269299287,0.3752344697], [0.9279344792,0.3727433464], [0.9289377295,0.3702359987], [0.9299354536,0.3677227926], [0.9309309309,0.3651952927], [0.9319337420,0.3626285985], [0.9329401493,0.3600314956], [0.9339416807,0.3574254287], [0.9349441928,0.3547948088], [0.9359447383,0.3521469108], [0.9369519997,0.3494580809], [0.9379468684,0.3467789961], [0.9389476600,0.3440600119], [0.9399442674,0.3413279571], [0.9409457092,0.3385574874], [0.9419483491,0.3357578111], [0.9429462076,0.3329451148], [0.9439470603,0.3300968758], [0.9449524570,0.3272076619], [0.9459521587,0.3243062033], [0.9469537918,0.3213697499], [0.9479517402,0.3184140360], [0.9489543944,0.3154133119], [0.9499565125,0.3123821768], [0.9509555279,0.3093276320], [0.9519640614,0.3062097742], [0.9529639469,0.3030836780], [0.9539654496,0.2999165233], [0.9549600502,0.2967343973], [0.9559616024,0.2934917627], [0.9569658084,0.2902006919], [0.9579643450,0.2868872838], [0.9589720906,0.2835004929], [0.9599723687,0.2800947184], [0.9609664523,0.2766649194], [0.9619764369,0.2731324492], [0.9629758058,0.2695878288], [0.9639714133,0.2660058539], [0.9649758435,0.2623387533], [0.9659811043,0.2586126567], [0.9669917798,0.2548075701], [0.9679901121,0.2509883323], [0.9689941110,0.2470838173], [0.9700076093,0.2430745522], [0.9710116043,0.2390323500], [0.9720159574,0.2349148325], [0.9730324483,0.2306682781], [0.9740358201,0.2263939513], [0.9750207543,0.2221137741], [0.9760222141,0.2176709386], [0.9770250551,0.2131244746], [0.9780318938,0.2084553064], [0.9790453947,0.2036421252], [0.9800580079,0.1987116031], [0.9810811984,0.1935966999], [0.9821315948,0.1881954583], [0.9831871921,0.1826005072], [0.9841812952,0.1771642691], [0.9852222690,0.1712807072], [0.9862351536,0.1653487882], [0.9872695178,0.1590562770], [0.9883054593,0.1524871113], [0.9893535395,0.1455320372], [0.9904196033,0.1380905840], [0.9914345875,0.1306042062], [0.9925216962,0.1220683524], [0.9936003062,0.1129532269], [0.9946000689,0.1037819972], [0.9956227614,0.0934629176], [0.9966372661,0.0819399766], [0.9976101518,0.0690940309], [0.9985118883,0.0545344751], [0.9993366830,0.0364169478], [1.0000000000,0.0000000000]]././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/pareto_front/dtlz4_front.json0000644000076500000240000007051014456461441021751 0ustar00runnerstaff[[0.0000000000,1.0000000000], [0.0024376240,0.9999970290], [0.0036802178,0.9999932280], [0.0047339714,0.9999887947], [0.0057080250,0.9999837091], [0.0065972495,0.9999782379], [0.0074750663,0.9999720613], [0.0082930392,0.9999656122], [0.0092130046,0.9999575594], [0.0101694069,0.9999482902], [0.0110864071,0.9999385439], [0.0119747142,0.9999283005], [0.0128499955,0.9999174354], [0.0137610854,0.9999053118], [0.0145690681,0.9998938655], [0.0155405037,0.9998792391], [0.0164983942,0.9998638922], [0.0174200448,0.9998482595], [0.0183730015,0.9998312022], [0.0193122038,0.9998135020], [0.0202840371,0.9997942578], [0.0212768482,0.9997736222], [0.0222809149,0.9997517496], [0.0232513323,0.9997296512], [0.0242029392,0.9997070660], [0.0251831875,0.9996828532], [0.0261748439,0.9996573801], [0.0271739884,0.9996307190], [0.0281525004,0.9996036398], [0.0291354196,0.9995754736], [0.0301282126,0.9995460424], [0.0311013086,0.9995162373], [0.0321139185,0.9994842151], [0.0330989146,0.9994520808], [0.0340917610,0.9994187070], [0.0351106506,0.9993834310], [0.0361201345,0.9993474550], [0.0371240356,0.9993106654], [0.0381147359,0.9992733695], [0.0391119534,0.9992348348], [0.0401116218,0.9991952051], [0.0411149177,0.9991544243], [0.0420874618,0.9991139302], [0.0430838499,0.9990714598], [0.0440956172,0.9990273152], [0.0450929276,0.9989827966], [0.0460960293,0.9989370131], [0.0471014257,0.9988901119], [0.0480916857,0.9988429255], [0.0490864970,0.9987945313], [0.0500776278,0.9987453285], [0.0510828172,0.9986944206], [0.0520683706,0.9986435224], [0.0530737986,0.9985905927], [0.0540673112,0.9985372932], [0.0550745741,0.9984822439], [0.0560703438,0.9984268208], [0.0570812977,0.9983695335], [0.0580806256,0.9983118956], [0.0590791197,0.9982533033], [0.0600760736,0.9981938015], [0.0610794219,0.9981329091], [0.0620921520,0.9980704207], [0.0630949569,0.9980075282], [0.0640874302,0.9979442877], [0.0650982022,0.9978788624], [0.0661023217,0.9978128497], [0.0670930983,0.9977467194], [0.0680862574,0.9976794383], [0.0690824980,0.9976109505], [0.0700963842,0.9975402232], [0.0710925827,0.9974697212], [0.0720982253,0.9973975365], [0.0730953602,0.9973249562], [0.0740907900,0.9972515003], [0.0750785689,0.9971776213], [0.0760807102,0.9971016626], [0.0770868071,0.9970243849], [0.0780948102,0.9969459367], [0.0790960303,0.9968670012], [0.0800934694,0.9967873575], [0.0810943834,0.9967064267], [0.0820953907,0.9966244763], [0.0830961419,0.9965415351], [0.0840875863,0.9964583673], [0.0850850851,0.9963736891], [0.0860864750,0.9962876687], [0.0870964432,0.9961998843], [0.0880968714,0.9961119120], [0.0890971860,0.9960229372], [0.0900939480,0.9959332711], [0.0910945671,0.9958422465], [0.0920937126,0.9957503443], [0.0931010419,0.9956566657], [0.0941055363,0.9955622271], [0.0951045577,0.9954672888], [0.0961077250,0.9953709385], [0.0971077215,0.9952738771], [0.0981104624,0.9951755308], [0.0991152388,0.9950759616], [0.1001131241,0.9949760612], [0.1011094437,0.9948753090], [0.1021111442,0.9947729963], [0.1031117383,0.9946697791], [0.1041118913,0.9945655906], [0.1051191129,0.9944596382], [0.1061118212,0.9943542032], [0.1071091685,0.9942472660], [0.1081100177,0.9941389360], [0.1091122245,0.9940294374], [0.1101121508,0.9939191689], [0.1111126274,0.9938078205], [0.1121164374,0.9936950762], [0.1131187770,0.9935814724], [0.1141166455,0.9934673579], [0.1151151151,0.9933521582], [0.1161181012,0.9932354135], [0.1171252355,0.9931171528], [0.1181243744,0.9929988077], [0.1191263594,0.9928791017], [0.1201254734,0.9927587172], [0.1211220864,0.9926376178], [0.1221272126,0.9925144553], [0.1231235628,0.9923913484], [0.1241241241,0.9922666989], [0.1251280973,0.9921405945], [0.1261297051,0.9920137587], [0.1271271271,0.9918864318], [0.1281318463,0.9917571426], [0.1291357881,0.9916269199], [0.1301339324,0.9914964244], [0.1311348537,0.9913645395], [0.1321321321,0.9912321119], [0.1331335013,0.9910981136], [0.1341341341,0.9909631850], [0.1351373794,0.9908268712], [0.1361361361,0.9906901395], [0.1371401854,0.9905516491], [0.1381430255,0.9904122902], [0.1391426332,0.9902723502], [0.1401426651,0.9901313213], [0.1411464108,0.9899887326], [0.1421459659,0.9898457074], [0.1431458244,0.9897016080], [0.1441441441,0.9895567016], [0.1451451917,0.9894103665], [0.1461492859,0.9892625467], [0.1471496571,0.9891142393], [0.1481506966,0.9889647977], [0.1491491491,0.9888147103], [0.1501501502,0.9886632047], [0.1511511512,0.9885106623], [0.1521521522,0.9883570825], [0.1531541342,0.9882023129], [0.1541562342,0.9880464845], [0.1551551552,0.9878901142], [0.1561561562,0.9877323802], [0.1571617368,0.9875728776], [0.1581673166,0.9874123252], [0.1591644644,0.9872520819], [0.1601629302,0.9870905915], [0.1611634143,0.9869277349], [0.1621710749,0.9867626576], [0.1631766408,0.9865968700], [0.1641805745,0.9864303011], [0.1651864816,0.9862623517], [0.1661850043,0.9860945920], [0.1671808690,0.9859262432], [0.1681738832,0.9857573459], [0.1691691692,0.9855870292], [0.1701714936,0.9854144624], [0.1711732166,0.9852409502], [0.1721738929,0.9850665717], [0.1731731732,0.9848913910], [0.1741741742,0.9847148608], [0.1751764980,0.9845370458], [0.1761761762,0.9843586516], [0.1771771772,0.9841789715], [0.1781803976,0.9839978384], [0.1791847030,0.9838154513], [0.1801862087,0.9836325179], [0.1811844530,0.9834491314], [0.1821862264,0.9832640433], [0.1831883610,0.9830778323], [0.1841859485,0.9828914164], [0.1851886409,0.9827029904], [0.1861889928,0.9825139485], [0.1871875126,0.9823242006], [0.1881907775,0.9821324917], [0.1891891892,0.9819406554], [0.1901901902,0.9817472646], [0.1911921208,0.9815526338], [0.1921964651,0.9813564688], [0.1931985642,0.9811596785], [0.1941986011,0.9809622334], [0.1952024094,0.9807629782], [0.1961971621,0.9805644668], [0.1971989718,0.9803634864], [0.1982001985,0.9801615588], [0.1991991992,0.9799590191], [0.2002002002,0.9797550101], [0.2012025911,0.9795496503], [0.2022022022,0.9793437953], [0.2032033034,0.9791365673], [0.2042056070,0.9789280209], [0.2052052052,0.9787189708], [0.2062068572,0.9785084221], [0.2072096983,0.9782965506], [0.2082132451,0.9780834548], [0.2092124293,0.9778702160], [0.2102141420,0.9776553659], [0.2112112112,0.9774404454], [0.2122143762,0.9772231365], [0.2132145302,0.9770054064], [0.2142175608,0.9767859728], [0.2152166788,0.9765663219], [0.2162183061,0.9763450436], [0.2172172172,0.9761232917], [0.2182182182,0.9758999996], [0.2192192192,0.9756756295], [0.2202202202,0.9754501805], [0.2212212212,0.9752236519], [0.2222223137,0.9749960222], [0.2232237113,0.9747672413], [0.2242253312,0.9745373266], [0.2252252252,0.9743067268], [0.2262276870,0.9740744497], [0.2272300563,0.9738411069], [0.2282282282,0.9736076601], [0.2292300768,0.9733722679], [0.2302307812,0.9731360580], [0.2312322399,0.9728985822], [0.2322324473,0.9726603160], [0.2332332332,0.9724208240], [0.2342342342,0.9721801909], [0.2352352352,0.9719384672], [0.2362362362,0.9716956523], [0.2372386447,0.9714514015], [0.2382405006,0.9712061902], [0.2392412812,0.9709601482], [0.2402402402,0.9707134629], [0.2412412412,0.9704651789], [0.2422422422,0.9702157987], [0.2432432432,0.9699653213], [0.2442442442,0.9697137460], [0.2452459588,0.9694608913], [0.2462462462,0.9692072979], [0.2472472472,0.9689524234], [0.2482482482,0.9686964474], [0.2492492492,0.9684393692], [0.2502502503,0.9681811877], [0.2512526720,0.9679215334], [0.2522522523,0.9676615117], [0.2532532533,0.9674000154], [0.2542542543,0.9671374123], [0.2552574003,0.9668731352], [0.2562571120,0.9666086553], [0.2572572573,0.9663429534], [0.2582583893,0.9660758792], [0.2592592593,0.9658077637], [0.2602613898,0.9655381965], [0.2612612613,0.9652681251], [0.2622622623,0.9649966351], [0.2632651100,0.9647235261], [0.2642654661,0.9644499798], [0.2652680004,0.9641747186], [0.2662662663,0.9638995152], [0.2672677791,0.9636222986], [0.2682682683,0.9633442460], [0.2692696608,0.9630648212], [0.2702702703,0.9627844935], [0.2712712713,0.9625029337], [0.2722722723,0.9622202501], [0.2732732733,0.9619364418], [0.2742742743,0.9616515078], [0.2752752753,0.9613654471], [0.2762762763,0.9610782586], [0.2772788149,0.9607894976], [0.2782782783,0.9605004944], [0.2792792793,0.9602099167], [0.2802802803,0.9599182072], [0.2812812813,0.9596253648], [0.2822822823,0.9593313886], [0.2832832833,0.9590362774], [0.2842849564,0.9587398310], [0.2852852853,0.9584426462], [0.2862867741,0.9581439782], [0.2872872873,0.9578444626], [0.2882882883,0.9575436611], [0.2892892893,0.9572417182], [0.2902902903,0.9569386330], [0.2912917893,0.9566342527], [0.2922930540,0.9563287984], [0.2932932933,0.9560225123], [0.2942966170,0.9557141315], [0.2952971643,0.9554054557], [0.2962962963,0.9550960710], [0.2972976632,0.9547848446], [0.2982994581,0.9544723324], [0.2993013384,0.9541586392], [0.3003003003,0.9538447094], [0.3013013013,0.9535289853], [0.3023023023,0.9532121055], [0.3033033033,0.9528940687], [0.3043053572,0.9525745375], [0.3053053053,0.9522545198], [0.3063080000,0.9519324604], [0.3073089810,0.9516097888], [0.3083083083,0.9512864905], [0.3093096236,0.9509613855], [0.3103103103,0.9506353198], [0.3113113113,0.9503079856], [0.3123123123,0.9499794838], [0.3133133133,0.9496498132], [0.3143143969,0.9493189453], [0.3153153153,0.9489869609], [0.3163163163,0.9486537767], [0.3173182203,0.9483191167], [0.3183188165,0.9479837188], [0.3193193193,0.9476471771], [0.3203219689,0.9473087333], [0.3213213213,0.9469702258], [0.3223223223,0.9466299808], [0.3233233233,0.9462885546], [0.3243243243,0.9459459459], [0.3253264361,0.9456017713], [0.3263263263,0.9452571760], [0.3273278545,0.9449108295], [0.3283283283,0.9445636605], [0.3293293293,0.9442151200], [0.3303303303,0.9438653892], [0.3313313313,0.9435144667], [0.3323345267,0.9431615781], [0.3333363614,0.9428079710], [0.3343376385,0.9424533641], [0.3353359355,0.9420986203], [0.3363363363,0.9417419333], [0.3373373373,0.9413838329], [0.3383383383,0.9410245315], [0.3393393393,0.9406640276], [0.3403403403,0.9403023199], [0.3413413413,0.9399394069], [0.3423426276,0.9395751834], [0.3433447556,0.9392094435], [0.3443443443,0.9388434228], [0.3453453453,0.9384756749], [0.3463469230,0.9381065019], [0.3473473473,0.9377365410], [0.3483492393,0.9373648209], [0.3493493493,0.9369925465], [0.3503505674,0.9366186417], [0.3513513514,0.9362436798], [0.3523523524,0.9358674157], [0.3533533534,0.9354899292], [0.3543553204,0.9351108528], [0.3553562021,0.9347309611], [0.3563563564,0.9343501203], [0.3573595451,0.9339668921], [0.3583592842,0.9335837528], [0.3593610561,0.9331986023], [0.3603608116,0.9328129960], [0.3613613614,0.9324258504], [0.3623623624,0.9320372945], [0.3633633634,0.9316475010], [0.3643643644,0.9312564684], [0.3653653654,0.9308641951], [0.3663687537,0.9304697396], [0.3673682839,0.9300755582], [0.3683695078,0.9296794640], [0.3693701930,0.9292823363], [0.3703703704,0.9288841633], [0.3713713714,0.9284844126], [0.3723723724,0.9280834102], [0.3733742155,0.9276808153], [0.3743743744,0.9272776433], [0.3753753754,0.9268728756], [0.3763769050,0.9264666348], [0.3773787553,0.9260590019], [0.3783783784,0.9256510157], [0.3793793794,0.9252412045], [0.3803804304,0.9248301077], [0.3813817812,0.9244176204], [0.3823850997,0.9240030495], [0.3833864307,0.9235880276], [0.3843850740,0.9231728521], [0.3853853854,0.9227557124], [0.3863863864,0.9223370102], [0.3873888039,0.9219164358], [0.3883883884,0.9214957731], [0.3893901834,0.9210728989], [0.3903903904,0.9206494138], [0.3913913914,0.9202243089], [0.3923923924,0.9197979182], [0.3933933934,0.9193702399], [0.3943943944,0.9189412722], [0.3953972258,0.9185102252], [0.3963992697,0.9180782205], [0.3973994635,0.9176457194], [0.3984020005,0.9172109060], [0.3994007345,0.9167764467], [0.4004009036,0.9163400659], [0.4014014014,0.9159022409], [0.4024024024,0.9154628920], [0.4034034034,0.9150222370], [0.4044044044,0.9145802741], [0.4054054054,0.9141370014], [0.4064064064,0.9136924170], [0.4074074074,0.9132465190], [0.4084089819,0.9127990488], [0.4094094094,0.9123507744], [0.4104128790,0.9118998129], [0.4114124307,0.9114492920], [0.4124124124,0.9109972569], [0.4134134134,0.9105434364], [0.4144152103,0.9100879262], [0.4154170659,0.9096310578], [0.4164164164,0.9091740032], [0.4174174174,0.9087148616], [0.4184184184,0.9082543846], [0.4194194194,0.9077925703], [0.4204204204,0.9073294165], [0.4214221831,0.9068645674], [0.4224237180,0.9063984789], [0.4234234234,0.9059318984], [0.4244250516,0.9054630724], [0.4254254254,0.9049934847], [0.4264264264,0.9045222512], [0.4274287698,0.9040490290], [0.4284284284,0.9035757200], [0.4294303298,0.9030999900], [0.4304320727,0.9026229727], [0.4314314314,0.9021457310], [0.4324335813,0.9016657905], [0.4334334334,0.9011855851], [0.4344353513,0.9007030174], [0.4354354354,0.9002199629], [0.4364364364,0.8997350927], [0.4374385065,0.8992483267], [0.4384397109,0.8987606021], [0.4394394394,0.8982722188], [0.4404404404,0.8977818323], [0.4414416607,0.8972899532], [0.4424424424,0.8967969029], [0.4434434434,0.8963023555], [0.4444444444,0.8958064165], [0.4454454454,0.8953090836], [0.4464464464,0.8948103545], [0.4474474474,0.8943102268], [0.4484484484,0.8938086983], [0.4494497560,0.8933056122], [0.4504504505,0.8928014290], [0.4514514515,0.8922956836], [0.4524524525,0.8917885278], [0.4534535412,0.8912799145], [0.4544550743,0.8907696590], [0.4554554555,0.8902585737], [0.4564564565,0.8897457521], [0.4574574575,0.8892315079], [0.4584584585,0.8887158387], [0.4594608197,0.8881980383], [0.4604611149,0.8876798757], [0.4614624797,0.8871597262], [0.4624641660,0.8866379730], [0.4634634635,0.8861160297], [0.4644650912,0.8855914290], [0.4654654655,0.8850660430], [0.4664674311,0.8845383744], [0.4674674675,0.8840102753], [0.4684684685,0.8834802171], [0.4694704188,0.8829482011], [0.4704704705,0.8824157390], [0.4714719089,0.8818810799], [0.4724733641,0.8813449496], [0.4734749367,0.8808072913], [0.4744752965,0.8802688186], [0.4754756692,0.8797288718], [0.4764775552,0.8791866351], [0.4774786626,0.8786433445], [0.4784784785,0.8780992801], [0.4794794795,0.8775530917], [0.4804804805,0.8770054207], [0.4814822202,0.8764558584], [0.4824824825,0.8759056194], [0.4834834835,0.8753534836], [0.4844844845,0.8747998538], [0.4854854855,0.8742447274], [0.4864873961,0.8736875949], [0.4874886019,0.8731293507], [0.4884897154,0.8725696522], [0.4894894895,0.8720091970], [0.4904904905,0.8714465438], [0.4914914915,0.8708823766], [0.4924931862,0.8703162997], [0.4934938730,0.8697492727], [0.4944954278,0.8691802298], [0.4954954955,0.8686105076], [0.4964964965,0.8680387255], [0.4974975104,0.8674654040], [0.4984994477,0.8668900164], [0.4995003265,0.8663136983], [0.5005016274,0.8657355953], [0.5015035026,0.8651556142], [0.5025037718,0.8645750166], [0.5035035189,0.8639931750], [0.5045045045,0.8634090600], [0.5055067750,0.8628226356], [0.5065065065,0.8622361387], [0.5075075075,0.8616473349], [0.5085085085,0.8610569649], [0.5095099539,0.8604647621], [0.5105105105,0.8598715129], [0.5115122721,0.8592759717], [0.5125132777,0.8586792999], [0.5135135135,0.8580815063], [0.5145145145,0.8574816700], [0.5155155155,0.8568802444], [0.5165165165,0.8562772262], [0.5175177849,0.8556724503], [0.5185185185,0.8550663986], [0.5195195195,0.8544585823], [0.5205215857,0.8538485105], [0.5215215215,0.8532381277], [0.5225225225,0.8526254825], [0.5235235235,0.8520112208], [0.5245245245,0.8513953389], [0.5255255255,0.8507778335], [0.5265265265,0.8501587010], [0.5275277980,0.8495377698], [0.5285285285,0.8489155403], [0.5295295295,0.8482915049], [0.5305311399,0.8476654467], [0.5315319825,0.8470382232], [0.5325333534,0.8464090190], [0.5335347073,0.8457781719], [0.5345345345,0.8451466331], [0.5355355355,0.8445126939], [0.5365365365,0.8438770911], [0.5375375375,0.8432398210], [0.5385389105,0.8426006420], [0.5395411221,0.8419592494], [0.5405413821,0.8413174278], [0.5415420331,0.8406736741], [0.5425425425,0.8400283266], [0.5435435435,0.8393809721], [0.5445445356,0.8387319290], [0.5455455455,0.8380811761], [0.5465465465,0.8374287268], [0.5475475475,0.8367745713], [0.5485491382,0.8361183188], [0.5495495495,0.8354611257], [0.5505517055,0.8348010659], [0.5515520726,0.8341404625], [0.5525525526,0.8334780601], [0.5535535536,0.8328135826], [0.5545556718,0.8321466258], [0.5555555556,0.8314794193], [0.5565565566,0.8308097251], [0.5575575576,0.8301382837], [0.5585589897,0.8294648004], [0.5595600109,0.8287898372], [0.5605607480,0.8281133061], [0.5615615616,0.8274349597], [0.5625625626,0.8267547177], [0.5635635636,0.8260727025], [0.5645648651,0.8253887042], [0.5655655656,0.8247033352], [0.5665665666,0.8240159741], [0.5675675676,0.8233268223], [0.5685685686,0.8226358750], [0.5695695696,0.8219431279], [0.5705705706,0.8212485763], [0.5715715716,0.8205522156], [0.5725737337,0.8198532305], [0.5735735736,0.8191540488], [0.5745745746,0.8184522333], [0.5755755756,0.8177485902], [0.5765765766,0.8170431147], [0.5775775776,0.8163358022], [0.5785794568,0.8156260247], [0.5795795796,0.8149156465], [0.5805814353,0.8142021843], [0.5815815816,0.8134880847], [0.5825825826,0.8127715143], [0.5835848928,0.8120521368], [0.5845855244,0.8113320927], [0.5855855856,0.8106105859], [0.5865865866,0.8098865207], [0.5875875876,0.8091605693], [0.5885893149,0.8084321977], [0.5895896677,0.8077029304], [0.5905905906,0.8069713466], [0.5915915916,0.8062377991], [0.5925936476,0.8055015635], [0.5935947019,0.8047641455], [0.5945968537,0.8040239932], [0.5955962815,0.8032839283], [0.5965965966,0.8025412768], [0.5975982942,0.8017956590], [0.5985985986,0.8010491357], [0.5995995996,0.8003001438], [0.6006006006,0.7995491971], [0.6016018645,0.7987960920], [0.6026026026,0.7980414171], [0.6036036036,0.7972845726], [0.6046046046,0.7965257511], [0.6056056056,0.7957649467], [0.6066066066,0.7950021540], [0.6076076076,0.7942373670], [0.6086086086,0.7934705801], [0.6096096096,0.7927017875], [0.6106106106,0.7919309832], [0.6116116116,0.7911581615], [0.6126126126,0.7903833164], [0.6136136136,0.7896064420], [0.6146152013,0.7888270751], [0.6156156156,0.7880465810], [0.6166175888,0.7872628209], [0.6176176176,0.7864785302], [0.6186192566,0.7856909159], [0.6196196196,0.7849022404], [0.6206206206,0.7841109904], [0.6216216216,0.7833176620], [0.6226226226,0.7825222488], [0.6236236240,0.7817247442], [0.6246246246,0.7809251426], [0.6256256256,0.7801234367], [0.6266266266,0.7793196204], [0.6276280342,0.7785133593], [0.6286286286,0.7777056302], [0.6296296296,0.7768954431], [0.6306312736,0.7760825966], [0.6316316316,0.7752686515], [0.6326326326,0.7744520335], [0.6336348665,0.7736322486], [0.6346346346,0.7728123191], [0.6356361249,0.7719888061], [0.6366375449,0.7711631711], [0.6376376376,0.7703364480], [0.6386386386,0.7695067831], [0.6396396396,0.7686749192], [0.6406408378,0.7678406846], [0.6416425329,0.7670038201], [0.6426426426,0.7661660615], [0.6436436436,0.7653253295], [0.6446446446,0.7644823622], [0.6456458656,0.7636369663], [0.6466479317,0.7627886027], [0.6476482022,0.7619395029], [0.6486486486,0.7610879914], [0.6496496496,0.7602337356], [0.6506506507,0.7593771993], [0.6516527191,0.7585174578], [0.6526530179,0.7576569397], [0.6536536537,0.7567938300], [0.6546546547,0.7559280939], [0.6556568938,0.7550589630], [0.6566575390,0.7541888865], [0.6576576577,0.7533169355], [0.6586586587,0.7524418724], [0.6596596597,0.7515644573], [0.6606606607,0.7506846818], [0.6616629033,0.7498014420], [0.6626635918,0.7489171944], [0.6636667064,0.7480284104], [0.6646659255,0.7471406879], [0.6656668809,0.7462490225], [0.6666666667,0.7453559925], [0.6676676677,0.7444594586], [0.6686686687,0.7435604962], [0.6696704909,0.7426583560], [0.6706728119,0.7417533143], [0.6716716717,0.7408489492], [0.6726741610,0.7399388307], [0.6736754328,0.7390273413], [0.6746746747,0.7381152236], [0.6756757933,0.7371989028], [0.6766781542,0.7362789387], [0.6776776777,0.7353590723], [0.6786786787,0.7344353281], [0.6796796797,0.7335090545], [0.6806814878,0.7325794920], [0.6816816817,0.7316488809], [0.6826847773,0.7307130044], [0.6836872970,0.7297750886], [0.6846856371,0.7288385133], [0.6856856857,0.7278977541], [0.6866866867,0.7269535022], [0.6876876877,0.7260066420], [0.6886886887,0.7250571633], [0.6896906033,0.7241041857], [0.6906906907,0.7231503093], [0.6916927176,0.7221919305], [0.6926926927,0.7212328566], [0.6936936937,0.7202701294], [0.6946960628,0.7193033994], [0.6956978996,0.7183344851], [0.6966972731,0.7173652554], [0.6976976977,0.7163922966], [0.6986998634,0.7154149152], [0.6996996997,0.7144370723], [0.7007007007,0.7134553441], [0.7017017017,0.7124708568], [0.7027027027,0.7114835990], [0.7037040193,0.7104932464], [0.7047047047,0.7095007253], [0.7057057057,0.7085050860], [0.7067076778,0.7075056594], [0.7077090516,0.7065039974], [0.7087087087,0.7055012163], [0.7097097097,0.7044942356], [0.7107116143,0.7034834762], [0.7117117117,0.7024716645], [0.7127127127,0.7014560493], [0.7137148537,0.7004363694], [0.7147147147,0.6994160969], [0.7157157157,0.6983917341], [0.7167167167,0.6973644298], [0.7177177177,0.6963341710], [0.7187187187,0.6953009445], [0.7197201077,0.6942643348], [0.7207218083,0.6932244045], [0.7217238051,0.6921811534], [0.7227227227,0.6911380948], [0.7237237237,0.6900898287], [0.7247247247,0.6890385137], [0.7257261578,0.6879836800], [0.7267267267,0.6869266807], [0.7277277277,0.6858661344], [0.7287291544,0.6848020295], [0.7297310644,0.6837342859], [0.7307318073,0.6826646511], [0.7317330380,0.6815913447], [0.7327327327,0.6805165262], [0.7337343109,0.6794365026], [0.7347351592,0.6783540711], [0.7357357357,0.6772687260], [0.7367381340,0.6761781732], [0.7377377377,0.6750874242], [0.7387387387,0.6739918960], [0.7397406562,0.6728920876], [0.7407407407,0.6717910055], [0.7417425852,0.6706846781], [0.7427440214,0.6695754765], [0.7437437437,0.6684648410], [0.7447447447,0.6673494326], [0.7457463363,0.6662299917], [0.7467467467,0.6651084846], [0.7477477477,0.6639829107], [0.7487502106,0.6628522626], [0.7497497497,0.6617214767], [0.7507507508,0.6605855813], [0.7517525774,0.6594452687], [0.7527527528,0.6583033444], [0.7537537538,0.6571569666], [0.7547547548,0.6560070580], [0.7557557558,0.6548536002], [0.7567572359,0.6536960195], [0.7577577578,0.6525359611], [0.7587587588,0.6513717418], [0.7597597598,0.6502038968], [0.7607617770,0.6490312155], [0.7617629059,0.6478559062], [0.7627635738,0.6466774548], [0.7637637638,0.6454958661], [0.7647658552,0.6443083010], [0.7657657658,0.6431195783], [0.7667667668,0.6419257943], [0.7677677678,0.6407282222], [0.7687687688,0.6395268408], [0.7697703971,0.6383208721], [0.7707707708,0.6371125638], [0.7717717718,0.6358996244], [0.7727727728,0.6346827882], [0.7737737738,0.6334620328], [0.7747747748,0.6322373355], [0.7757757758,0.6310086733], [0.7767767768,0.6297760229], [0.7777783334,0.6285386735], [0.7787787788,0.6272986639], [0.7797797798,0.6260539075], [0.7807820152,0.6248035249], [0.7817817818,0.6235521195], [0.7827827828,0.6222950385], [0.7837837838,0.6210337996], [0.7847863227,0.6197664300], [0.7857867449,0.6184975275], [0.7867867868,0.6172248797], [0.7877884300,0.6159459307], [0.7887887888,0.6146643366], [0.7897897898,0.6133776063], [0.7907926824,0.6120840902], [0.7917931700,0.6107893057], [0.7927931709,0.6094907614], [0.7937937938,0.6081869885], [0.7947952646,0.6068776543], [0.7957974082,0.6055629489], [0.7967972425,0.6042467661], [0.7977997788,0.6029224767], [0.7987992098,0.6015977248], [0.7997997998,0.6002668409], [0.8008025015,0.5989285046], [0.8018018018,0.5975900523], [0.8028029418,0.5962444436], [0.8038038038,0.5948944822], [0.8048048048,0.5935395742], [0.8058058058,0.5921798741], [0.8068077651,0.5908140403], [0.8078095627,0.5894435600], [0.8088105678,0.5880692693], [0.8098138065,0.5866869683], [0.8108114218,0.5853074733], [0.8118136242,0.5839166374], [0.8128128128,0.5825249620], [0.8138138138,0.5811256976], [0.8148148148,0.5797213275], [0.8158169878,0.5783101611], [0.8168184406,0.5768948215], [0.8178186541,0.5754760195], [0.8188188188,0.5740520377], [0.8198198198,0.5726215705], [0.8208208208,0.5711857667], [0.8218218218,0.5697445859], [0.8228238876,0.5682964455], [0.8238243384,0.5668451813], [0.8248248248,0.5653883695], [0.8258258258,0.5639252658], [0.8268275482,0.5624555142], [0.8278292609,0.5609801376], [0.8288298715,0.5595007096], [0.8298298298,0.5580165352], [0.8308308308,0.5565250493], [0.8318371804,0.5550197342], [0.8328370086,0.5535183078], [0.8338364467,0.5520115762], [0.8348366113,0.5504978043], [0.8358384509,0.5489754857], [0.8368390892,0.5474489372], [0.8378378378,0.5459191859], [0.8388388388,0.5443798329], [0.8398414856,0.5428317226], [0.8408438187,0.5412778146], [0.8418439693,0.5397209755], [0.8428428428,0.5381597739], [0.8438438438,0.5365888251], [0.8448452503,0.5350107504], [0.8458458458,0.5334274131], [0.8468468468,0.5318368340], [0.8478487676,0.5302381231], [0.8488498054,0.5286340964], [0.8498498498,0.5270248881], [0.8508508509,0.5254072988], [0.8518541147,0.5237791207], [0.8528550532,0.5221477360], [0.8538538539,0.5205128205], [0.8548555562,0.5188660502], [0.8558578594,0.5172111024], [0.8568570450,0.5155540751], [0.8578597590,0.5138838720], [0.8588588589,0.5122123198], [0.8598598599,0.5105301376], [0.8608643134,0.5088345840], [0.8618632894,0.5071406810], [0.8628648541,0.5054347076], [0.8638671547,0.5037197029], [0.8648698076,0.5019962309], [0.8658694806,0.5002699697], [0.8668740170,0.4985272697], [0.8678697721,0.4967917659], [0.8688688689,0.4950423100], [0.8698722052,0.4932771499], [0.8708724574,0.4915090670], [0.8718718719,0.4897340493], [0.8728733747,0.4879467919], [0.8738759654,0.4861489453], [0.8748748749,0.4843489995], [0.8758758759,0.4825364754], [0.8768797248,0.4807098379], [0.8778795992,0.4788814147], [0.8788788789,0.4770449835], [0.8798798799,0.4751961668], [0.8808831295,0.4733338274], [0.8818854618,0.4714637127], [0.8828865393,0.4695863699], [0.8838866019,0.4677012668], [0.8848848849,0.4658097686], [0.8858858859,0.4639032196], [0.8868868869,0.4619866339], [0.8878878879,0.4600598858], [0.8888910506,0.4581186529], [0.8898922851,0.4561707147], [0.8908921884,0.4542148265], [0.8918918919,0.4522486630], [0.8928928929,0.4502691215], [0.8938952094,0.4482759803], [0.8948967283,0.4462732860], [0.8958976713,0.4442604670], [0.8968968969,0.4422397046], [0.8979013757,0.4401966827], [0.8989033242,0.4381470229], [0.8999065407,0.4360828109], [0.9009078226,0.4340104782], [0.9019070573,0.4319301564], [0.9029043232,0.4298415792], [0.9039052369,0.4277327703], [0.9049049049,0.4256138074], [0.9059059059,0.4234790309], [0.9069073164,0.4213301786], [0.9079106989,0.4191636468], [0.9089089089,0.4169947186], [0.9099123421,0.4148005903], [0.9109118139,0.4126010994], [0.9119119119,0.4103859950], [0.9129153075,0.4081490430], [0.9139171372,0.4059008085], [0.9149181522,0.4036394119], [0.9159183186,0.4013647140], [0.9169169169,0.3990781471], [0.9179199782,0.3967655650], [0.9189243362,0.3944338530], [0.9199199199,0.3921062878], [0.9209237083,0.3897428942], [0.9219256800,0.3873668036], [0.9229272591,0.3849743816], [0.9239276539,0.3825672364], [0.9249261890,0.3801467414], [0.9259296651,0.3776959826], [0.9269332918,0.3752261618], [0.9279340293,0.3727444664], [0.9289356506,0.3702412148], [0.9299412641,0.3677080981], [0.9309390654,0.3651745561], [0.9319389049,0.3626153299], [0.9329391161,0.3600341729], [0.9339474741,0.3574102904], [0.9349400788,0.3548056497], [0.9359359359,0.3521703051], [0.9369418354,0.3494853316], [0.9379434473,0.3467882491], [0.9389480540,0.3440589367], [0.9399461273,0.3413228350], [0.9409458343,0.3385571399], [0.9419450762,0.3357669929], [0.9429524712,0.3329273751], [0.9439566796,0.3300693671], [0.9449532058,0.3272054994], [0.9459634153,0.3242733677], [0.9469570607,0.3213601176], [0.9479562419,0.3184006336], [0.9489550553,0.3154113235], [0.9499623323,0.3123644780], [0.9509578453,0.3093205078], [0.9519580020,0.3062286115], [0.9529605489,0.3030943619], [0.9539580838,0.2999399514], [0.9549602688,0.2967336937], [0.9559631744,0.2934866424], [0.9569620350,0.2902131348], [0.9579638589,0.2868889072], [0.9589662177,0.2835203579], [0.9599641443,0.2801229046], [0.9609627600,0.2766777437], [0.9619627470,0.2731806607], [0.9629684589,0.2696140707], [0.9639745872,0.2659943518], [0.9649749823,0.2623419211], [0.9659803434,0.2586154985], [0.9669725468,0.2548805479], [0.9679833242,0.2510145097], [0.9690027864,0.2470497925], [0.9699937542,0.2431298355], [0.9709965510,0.2390934921], [0.9719965453,0.2349951404], [0.9730039315,0.2307885378], [0.9740234618,0.2264471149], [0.9750130104,0.2221477650], [0.9760117713,0.2177177584], [0.9770276696,0.2131124886], [0.9780334118,0.2084481841], [0.9790432917,0.2036522355], [0.9800456180,0.1987727009], [0.9810352692,0.1938293078], [0.9820827955,0.1884499477], [0.9830927312,0.1831083882], [0.9840943996,0.1776463136], [0.9851091672,0.1719300108], [0.9861321242,0.1659621451], [0.9871738308,0.1596490769], [0.9882408359,0.1529053638], [0.9892959260,0.1459231674], [0.9903307490,0.1387263765], [0.9913727364,0.1310728709], [0.9924590538,0.1225766151], [0.9935239974,0.1136224742], [0.9945787371,0.1039862289], [0.9956080275,0.0936197396], [0.9966472117,0.0818189182], [0.9976157780,0.0690127481], [0.9985299609,0.0542025562], [0.9993397922,0.0363315259], [1.0000000000,0.0000000000]]././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/pareto_front/zdt1_front.json0000644000076500000240000007051014456461441021572 0ustar00runnerstaff[[0.0000000001,0.9999894843], [0.0002103152,0.9854977511], [0.0006297138,0.9749059012], [0.0011612996,0.9659221532], [0.0017723350,0.9579008909], [0.0024447099,0.9505559926], [0.0031687919,0.9437079768], [0.0039380100,0.9372464341], [0.0047489797,0.9310871584], [0.0055918795,0.9252211292], [0.0064691896,0.9195687275], [0.0073740671,0.9141276114], [0.0082978126,0.9089076698], [0.0092456824,0.9038455287], [0.0102191774,0.8989100531], [0.0112003220,0.8941684261], [0.0122039498,0.8895285116], [0.0132172119,0.8850338664], [0.0142420309,0.8806600198], [0.0152859804,0.8763635151], [0.0163211988,0.8722455529], [0.0173715220,0.8681989301], [0.0184382204,0.8642125910], [0.0194878329,0.8604011717], [0.0205429616,0.8566718395], [0.0216119982,0.8529898023], [0.0226874279,0.8493765361], [0.0237651448,0.8458405214], [0.0248553652,0.8423441558], [0.0259467988,0.8389198994], [0.0270374622,0.8355692784], [0.0281393522,0.8322521170], [0.0292192881,0.8290634968], [0.0302957541,0.8259432445], [0.0313819561,0.8228504697], [0.0324638454,0.8198227390], [0.0335461426,0.8168439391], [0.0346247771,0.8139226584], [0.0357119454,0.8110239556], [0.0367916279,0.8081885616], [0.0378793499,0.8053738202], [0.0389433189,0.8026593834], [0.0399997201,0.8000006998], [0.0410631898,0.7973594567], [0.0421245632,0.7947573067], [0.0431927077,0.7921714464], [0.0442485743,0.7896465491], [0.0453108161,0.7871366259], [0.0463678279,0.7846680983], [0.0474309334,0.7822135602], [0.0484718366,0.7798367956], [0.0495183888,0.7774727236], [0.0505548969,0.7751558387], [0.0515956857,0.7728531626], [0.0526417769,0.7705620413], [0.0536804267,0.7683096317], [0.0547163017,0.7660848407], [0.0557571264,0.7638705304], [0.0567949321,0.7616831268], [0.0578289419,0.7595235108], [0.0588553007,0.7573988856], [0.0598860753,0.7552836840], [0.0609083639,0.7532038009], [0.0619349783,0.7511326089], [0.0629606370,0.7490804173], [0.0639905072,0.7470365498], [0.0650075274,0.7450342624], [0.0660281646,0.7430405391], [0.0670527767,0.7410544909], [0.0680657495,0.7391058653], [0.0690825195,0.7371644630], [0.0700995404,0.7352368221], [0.0711165289,0.7333231751], [0.0721371795,0.7314163454], [0.0731538057,0.7295303978], [0.0741739887,0.7276509800], [0.0751937914,0.7257851364], [0.0762103252,0.7239378237], [0.0772231944,0.7221093842], [0.0782394068,0.7202869206], [0.0792528891,0.7184811034], [0.0802645371,0.7166900335], [0.0812793931,0.7149045895], [0.0822923094,0.7131336384], [0.0833020611,0.7113790356], [0.0843148914,0.7096297340], [0.0853240314,0.7078972246], [0.0863361730,0.7061698228], [0.0873494798,0.7044505459], [0.0883580677,0.7027491503], [0.0893676528,0.7010557698], [0.0903801054,0.6993671585], [0.0913873872,0.6976965312], [0.0923974601,0.6960304948], [0.0934058712,0.6943762587], [0.0944170189,0.6927264754], [0.0954268425,0.6910876460], [0.0964393518,0.6894531407], [0.0974469856,0.6878350026], [0.0984532027,0.6862274667], [0.0994620038,0.6846240278], [0.1004686769,0.6830320570], [0.1014778843,0.6814440642], [0.1024834037,0.6798697083], [0.1034889593,0.6783030008], [0.1044969695,0.6767400899], [0.1055029432,0.6751878340], [0.1065113264,0.6736392695], [0.1075171046,0.6721019905], [0.1085246735,0.6705691674], [0.1095345919,0.6690398938], [0.1105412889,0.6675224987], [0.1115502884,0.6660085504], [0.1125586220,0.6645024262], [0.1135692240,0.6629996676], [0.1145703720,0.6615175455], [0.1155736397,0.6600387673], [0.1165790942,0.6585631915], [0.1175826917,0.6570966730], [0.1185884398,0.6556332772], [0.1195925525,0.6541784384], [0.1205987817,0.6527266470], [0.1216048952,0.6512810657], [0.1226117877,0.6498403398], [0.1236138187,0.6484124309], [0.1246178887,0.6469874100], [0.1256223393,0.6455675814], [0.1266288059,0.6441505854], [0.1276331204,0.6427422214], [0.1286375304,0.6413392544], [0.1296439087,0.6399390208], [0.1306492659,0.6385456241], [0.1316565646,0.6371549028], [0.1326627997,0.6357709516], [0.1336661888,0.6343961312], [0.1346714681,0.6330238861], [0.1356757096,0.6316581621], [0.1366794375,0.6302981775], [0.1376834503,0.6289427937], [0.1386893002,0.6275898763], [0.1396927198,0.6262451073], [0.1406979479,0.6249027488], [0.1417017801,0.6235670311], [0.1427051064,0.6222367059], [0.1437102024,0.6209087150], [0.1447137172,0.6195874381], [0.1457167842,0.6182713212], [0.1467215834,0.6169574653], [0.1477250332,0.6156498560], [0.1487301929,0.6143444634], [0.1497321205,0.6130476509], [0.1507357298,0.6117530041], [0.1517373003,0.6104652772], [0.1527405289,0.6091796718], [0.1537442813,0.6078976138], [0.1547476447,0.6066202285], [0.1557526398,0.6053449104], [0.1567551035,0.6040768970], [0.1577568569,0.6028138259], [0.1587602057,0.6015527567], [0.1597621685,0.6002973999], [0.1607657072,0.5990440084], [0.1617683085,0.5977956881], [0.1627724681,0.5965492990], [0.1637763198,0.5953071291], [0.1647798412,0.5940691670], [0.1657848951,0.5928330870], [0.1667889969,0.5916019137], [0.1677946146,0.5903725906], [0.1687971345,0.5891507156], [0.1698011474,0.5879306521], [0.1708037274,0.5867159240], [0.1718067926,0.5855041706], [0.1728113261,0.5842941832], [0.1738141690,0.5830897351], [0.1748184625,0.5818870218], [0.1758228774,0.5806876136], [0.1768242806,0.5794952073], [0.1778252033,0.5783067427], [0.1788275384,0.5771199480], [0.1798308764,0.5759352922], [0.1808348055,0.5747532416], [0.1818367667,0.5735767752], [0.1828401120,0.5724019271], [0.1838421355,0.5712318395], [0.1848455281,0.5700633441], [0.1858496519,0.5688971679], [0.1868523609,0.5677357742], [0.1878564187,0.5665759366], [0.1888596307,0.5654201677], [0.1898641784,0.5642659315], [0.1908653578,0.5631185998], [0.1918678536,0.5619727707], [0.1928709340,0.5608292655], [0.1938736345,0.5596891615], [0.1948776348,0.5585505297], [0.1958799520,0.5574167287], [0.1968825267,0.5562855348], [0.1978863809,0.5551557791], [0.1988885986,0.5540307201], [0.1998908284,0.5529084787], [0.2008943177,0.5517876422], [0.2018966761,0.5506708599], [0.2029002818,0.5495554620], [0.2039023420,0.5484445305], [0.2049056364,0.5473349622], [0.2059090098,0.5462280200], [0.2069098050,0.5451266055], [0.2079116420,0.5440267091], [0.2089146888,0.5429281361], [0.2099165426,0.5418334991], [0.2109195946,0.5407401666], [0.2119213054,0.5396508875], [0.2129237064,0.5385634318], [0.2139272900,0.5374772546], [0.2149292816,0.5363953391], [0.2159324438,0.5353146830], [0.2169335265,0.5342387666], [0.2179354426,0.5331644373], [0.2189385128,0.5320913414], [0.2199411096,0.5310212056], [0.2209443987,0.5299527698], [0.2219479223,0.5288865080], [0.2229497945,0.5278244029], [0.2239527947,0.5267634896], [0.2249543898,0.5257064308], [0.2259571022,0.5246505473], [0.2269598923,0.5235969225], [0.2279637924,0.5225444603], [0.2289652239,0.5214968925], [0.2299677528,0.5204504689], [0.2309696262,0.5194070056], [0.2319710025,0.5183663192], [0.2329726679,0.5173275771], [0.2339754122,0.5162899502], [0.2349757427,0.5152570344], [0.2359771401,0.5142252167], [0.2369784365,0.5131956897], [0.2379807928,0.5121672492], [0.2389819466,0.5111422021], [0.2399841512,0.5101182274], [0.2409854406,0.5090973206], [0.2419877723,0.5080774732], [0.2429890820,0.5070607725], [0.2439914253,0.5060451181], [0.2449932324,0.5050320895], [0.2459960658,0.5040200954], [0.2469978573,0.5030112101], [0.2480006667,0.5020033467], [0.2490023296,0.5009986678], [0.2500037119,0.4999962881], [0.2510060990,0.4989949112], [0.2520081964,0.4979958203], [0.2530090762,0.4969999242], [0.2540106605,0.4960052971], [0.2550132341,0.4950116496], [0.2560153554,0.4940204002], [0.2570169550,0.4930316035], [0.2580190482,0.4920442458], [0.2590211470,0.4910587981], [0.2600222448,0.4900762363], [0.2610243081,0.4890946192], [0.2620263758,0.4881148803], [0.2630278416,0.4871375997], [0.2640302625,0.4861612486], [0.2650327522,0.4851866822], [0.2660342736,0.4842148959], [0.2670367394,0.4832440234], [0.2680384765,0.4822756752], [0.2690402992,0.4813090524], [0.2700420822,0.4803442657], [0.2710447960,0.4793803730], [0.2720467529,0.4784189872], [0.2730496341,0.4774584857], [0.2740510135,0.4765011810], [0.2750524520,0.4755455673], [0.2760548037,0.4745908226], [0.2770557516,0.4736391432], [0.2780568760,0.4726890140], [0.2790589031,0.4717397393], [0.2800605035,0.4707925704], [0.2810630011,0.4698462477], [0.2820640714,0.4689029548], [0.2830660316,0.4679604981], [0.2840671392,0.4670205077], [0.2850691304,0.4660813447], [0.2860712583,0.4651437032], [0.2870727459,0.4642082999], [0.2880751084,0.4632737118], [0.2890765211,0.4623416316], [0.2900778233,0.4614112670], [0.2910799911,0.4604817046], [0.2920820420,0.4595538491], [0.2930830765,0.4586285227], [0.2940849672,0.4577039856], [0.2950867089,0.4567811593], [0.2960893022,0.4558591155], [0.2970902136,0.4549401743], [0.2980913730,0.4540225527], [0.2990933745,0.4531057008], [0.3000947068,0.4521909942], [0.3010968758,0.4512770500], [0.3020989432,0.4503647181], [0.3031018428,0.4494531420], [0.3041017718,0.4485457663], [0.3051025241,0.4476391359], [0.3061030785,0.4467341702], [0.3071044517,0.4458299433], [0.3081053668,0.4449276022], [0.3091070962,0.4440259933], [0.3101083583,0.4431262636], [0.3111099191,0.4422277176], [0.3121122872,0.4413298941], [0.3131149498,0.4404332481], [0.3141158017,0.4395396520], [0.3151174522,0.4386467670], [0.3161181726,0.4377561271], [0.3171196864,0.4368661914], [0.3181217641,0.4359771599], [0.3191235216,0.4350898110], [0.3201260665,0.4342031579], [0.3211272782,0.4333190685], [0.3221292714,0.4324356676], [0.3231305050,0.4315543078], [0.3241321845,0.4306739207], [0.3251346391,0.4297942134], [0.3261355068,0.4289172505], [0.3271364512,0.4280415651], [0.3281381625,0.4271665490], [0.3291396056,0.4262931013], [0.3301418116,0.4254203175], [0.3311425891,0.4245500986], [0.3321436033,0.4236809883], [0.3331453728,0.4228125324], [0.3341459404,0.4219464208], [0.3351472582,0.4210809571], [0.3361489823,0.4202164350], [0.3371507677,0.4193531471], [0.3381524780,0.4184912056], [0.3391549311,0.4176299019], [0.3401567307,0.4167704305], [0.3411582977,0.4159124229], [0.3421606009,0.4150550446], [0.3431622914,0.4141994441], [0.3441647139,0.4133444674], [0.3451667125,0.4124910958], [0.3461677626,0.4116397680], [0.3471695375,0.4107890552], [0.3481717360,0.4099392099], [0.3491724318,0.4090918584], [0.3501738456,0.4082451135], [0.3511740724,0.4074005802], [0.3521750124,0.4065566477], [0.3531759396,0.4057139245], [0.3541775770,0.4048717979], [0.3551786772,0.4040313119], [0.3561801620,0.4031916874], [0.3571823517,0.4023526527], [0.3581834097,0.4015157398], [0.3591851682,0.4006794112], [0.3601857453,0.3998452322], [0.3611863629,0.3990121774], [0.3621875029,0.3981798417], [0.3631893357,0.3973480808], [0.3641910201,0.3965175892], [0.3651921942,0.3956886612], [0.3661940554,0.3948603009], [0.3671954009,0.3940334985], [0.3681962477,0.3932082337], [0.3691974477,0.3923837990], [0.3701989322,0.3915602476], [0.3712010950,0.3907372529], [0.3722026586,0.3899158594], [0.3732040997,0.3890956706], [0.3742062136,0.3882760316], [0.3752075512,0.3874581229], [0.3762095577,0.3866407597], [0.3772115816,0.3858244701], [0.3782126106,0.3850100728], [0.3792143028,0.3841962140], [0.3802158394,0.3833835557], [0.3812180365,0.3825714321], [0.3822199836,0.3817605775], [0.3832214717,0.3809511557], [0.3842236149,0.3801422623], [0.3852249508,0.3793350737], [0.3862261312,0.3785290585], [0.3872279612,0.3777235653], [0.3882290167,0.3769197349], [0.3892307183,0.3761164224], [0.3902315329,0.3753148530], [0.3912329899,0.3745137972], [0.3922341711,0.3737139862], [0.3932356676,0.3729149439], [0.3942368997,0.3721171290], [0.3952387682,0.3713198204], [0.3962404890,0.3705236390], [0.3972416358,0.3697289188], [0.3982426535,0.3689353017], [0.3992443010,0.3681421829], [0.4002458160,0.3673501632], [0.4012476451,0.3665588859], [0.4022491451,0.3657688551], [0.4032503499,0.3649800398], [0.4042521769,0.3641917137], [0.4052537047,0.3634045989], [0.4062556224,0.3626181502], [0.4072569386,0.3618331421], [0.4082588710,0.3610486161], [0.4092606576,0.3602651662], [0.4102617331,0.3594832296], [0.4112624413,0.3587025329], [0.4122637590,0.3579223108], [0.4132653697,0.3571428077], [0.4142662300,0.3563648316], [0.4152676955,0.3555873252], [0.4162692852,0.3548106594], [0.4172703805,0.3540353102], [0.4182720770,0.3532604257], [0.4192733529,0.3524867933], [0.4202752273,0.3517136225], [0.4212756052,0.3509425255], [0.4222765645,0.3501718962], [0.4232781177,0.3494017233], [0.4242787754,0.3486331484], [0.4252800238,0.3478650264], [0.4262810561,0.3470979736], [0.4272826768,0.3463313708], [0.4282838288,0.3455660241], [0.4292855666,0.3448011244], [0.4302867936,0.3440375060], [0.4312875044,0.3432751684], [0.4322886274,0.3425134013], [0.4332899081,0.3417523960], [0.4342917680,0.3409918301], [0.4352931925,0.3402324708], [0.4362941875,0.3394743097], [0.4372952371,0.3387169766], [0.4382968603,0.3379600765], [0.4392987002,0.3372038774], [0.4403003992,0.3364486461], [0.4413025499,0.3356939336], [0.4423036524,0.3349408655], [0.4433045196,0.3341888259], [0.4443059523,0.3334372105], [0.4453071467,0.3326866203], [0.4463088270,0.3319365097], [0.4473098391,0.3311877400], [0.4483114119,0.3304393889], [0.4493127632,0.3296920385], [0.4503146730,0.3289451044], [0.4513157024,0.3281996558], [0.4523164657,0.3274552315], [0.4533177831,0.3267112187], [0.4543188312,0.3259682269], [0.4553201613,0.3252258443], [0.4563210560,0.3244845997], [0.4573225003,0.3237437614], [0.4583238268,0.3230038207], [0.4593257008,0.3222642840], [0.4603267214,0.3215261823], [0.4613282868,0.3207884816], [0.4623293171,0.3200519747], [0.4633308899,0.3193158663], [0.4643320495,0.3185808562], [0.4653337494,0.3178462421], [0.4663344235,0.3171131693], [0.4673354687,0.3163806112], [0.4683365246,0.3156488294], [0.4693381161,0.3149174385], [0.4703397160,0.3141868214], [0.4713408502,0.3134573209], [0.4723419756,0.3127286012], [0.4733436321,0.3120002674], [0.4743446608,0.3112731596], [0.4753462181,0.3105464351], [0.4763472245,0.3098208751], [0.4773487573,0.3090956960], [0.4783501167,0.3083714026], [0.4793520007,0.3076474881], [0.4803525678,0.3069252798], [0.4813533187,0.3062036908], [0.4823545902,0.3054824766], [0.4833559019,0.3047619819], [0.4843571489,0.3040422794], [0.4853589138,0.3033229487], [0.4863606119,0.3026044079], [0.4873615883,0.3018871235], [0.4883630792,0.3011702073], [0.4893643031,0.3004542166], [0.4903654765,0.2997389940], [0.4913671616,0.2990241363], [0.4923677196,0.2983108098], [0.4933687866,0.2975978456], [0.4943702513,0.2968853214], [0.4953720027,0.2961733149], [0.4963729128,0.2954626250], [0.4973743280,0.2947522931], [0.4983753948,0.2940429228], [0.4993769647,0.2933339086], [0.5003777125,0.2926261861], [0.5013789612,0.2919188173], [0.5023800189,0.2912122893], [0.5033815757,0.2905061130], [0.5043826400,0.2898009856], [0.5053842016,0.2890962079], [0.5063854717,0.2883923330], [0.5073872373,0.2876888058], [0.5083889007,0.2869860445], [0.5093897165,0.2862845689], [0.5103909713,0.2855834749], [0.5113927177,0.2848827245], [0.5123935702,0.2841832845], [0.5133949120,0.2834841858], [0.5143963044,0.2827857332], [0.5153968896,0.2820885224], [0.5163976842,0.2813918424], [0.5173989641,0.2806954997], [0.5184004520,0.2799996861], [0.5194012903,0.2793049949], [0.5204018643,0.2786111560], [0.5214029196,0.2779176504], [0.5224037735,0.2772249496], [0.5234051073,0.2765325803], [0.5244057131,0.2758413757], [0.5254067967,0.2751505007], [0.5264080749,0.2744601493], [0.5274095904,0.2737702909], [0.5284107867,0.2730813066], [0.5294124578,0.2723926486], [0.5304134599,0.2717051010], [0.5314145860,0.2710181168], [0.5324161841,0.2703314560], [0.5334174758,0.2696456505], [0.5344186562,0.2689605645], [0.5354198349,0.2682761211], [0.5364214821,0.2675919975], [0.5374231840,0.2669084750], [0.5384241045,0.2662261217], [0.5394254907,0.2655440853], [0.5404269200,0.2648626523], [0.5414278987,0.2641821566], [0.5424285603,0.2635025049], [0.5434296837,0.2628231666], [0.5444310258,0.2621443056], [0.5454323428,0.2614660856], [0.5464335108,0.2607885886], [0.5474343417,0.2601119398], [0.5484352321,0.2594358690], [0.5494365797,0.2587601065], [0.5504375180,0.2580852354], [0.5514389117,0.2574106709], [0.5524404694,0.2567366083], [0.5534411993,0.2560637129], [0.5544423819,0.2553911215], [0.5554438198,0.2547189659], [0.5564448574,0.2540476843], [0.5574461165,0.2533768578], [0.5584478256,0.2527063324], [0.5594491886,0.2520366395], [0.5604500000,0.2513679142], [0.5614512125,0.2506995179], [0.5624528717,0.2500314195], [0.5634549518,0.2493636355], [0.5644561107,0.2486970579], [0.5654577141,0.2480307759], [0.5664582806,0.2473657724], [0.5674592894,0.2467010624], [0.5684602005,0.2460370032], [0.5694615527,0.2453732362], [0.5704629576,0.2447100175], [0.5714641608,0.2440475142], [0.5724654756,0.2433855172], [0.5734672286,0.2427238096], [0.5744680511,0.2420632935], [0.5754693100,0.2414030649], [0.5764697757,0.2407439327], [0.5774704561,0.2400852311], [0.5784715703,0.2394268146], [0.5794722894,0.2387692273], [0.5804734408,0.2381119237], [0.5814749025,0.2374549833], [0.5824759068,0.2367989081], [0.5834765886,0.2361436073], [0.5844776999,0.2354885875], [0.5854784090,0.2348343911], [0.5864795462,0.2341804742], [0.5874804650,0.2335272575], [0.5884814714,0.2328745400], [0.5894829039,0.2322220999], [0.5904841227,0.2315703528], [0.5914852917,0.2309191904], [0.5924868846,0.2302683035], [0.5934884263,0.2296179998], [0.5944895742,0.2289684998], [0.5954904730,0.2283197080], [0.5964917927,0.2276711888], [0.5974928613,0.2270233760], [0.5984940903,0.2263760020], [0.5994956347,0.2257289656], [0.6004967023,0.2250827771], [0.6014981875,0.2244368578], [0.6024993232,0.2237917012], [0.6035002904,0.2231471887], [0.6045016730,0.2225029434], [0.6055025383,0.2218595639], [0.6065038176,0.2212164501], [0.6075050713,0.2205738834], [0.6085062356,0.2199319032], [0.6095078120,0.2192901871], [0.6105086888,0.2186494456], [0.6115099762,0.2180089667], [0.6125114577,0.2173688879], [0.6135126807,0.2167294971], [0.6145143125,0.2160903671], [0.6155151153,0.2154522862], [0.6165163252,0.2148144645], [0.6175175368,0.2141771594], [0.6185185992,0.2135404657], [0.6195195973,0.2129043277], [0.6205210002,0.2122684466], [0.6215226154,0.2116329437], [0.6225233172,0.2109985316], [0.6235244214,0.2103643743], [0.6245257479,0.2097305852], [0.6255267345,0.2090975190], [0.6265281218,0.2084647059], [0.6275295273,0.2078323869], [0.6285308933,0.2072005971], [0.6295318748,0.2065695526], [0.6305332546,0.2059387589], [0.6315339696,0.2053088842], [0.6325350814,0.2046792588], [0.6335364007,0.2040500011], [0.6345377382,0.2034212291], [0.6355385583,0.2027932776], [0.6365397727,0.2021655732], [0.6375407380,0.2015385182], [0.6385419343,0.2009118107], [0.6395432199,0.2002855385], [0.6405448978,0.1996595113], [0.6415457759,0.1990344727], [0.6425470447,0.1984096778], [0.6435481693,0.1977854593], [0.6445496837,0.1971614835], [0.6455511954,0.1965379938], [0.6465521894,0.1959153096], [0.6475530688,0.1952931784], [0.6485543353,0.1946712874], [0.6495554858,0.1940499483], [0.6505563874,0.1934292422], [0.6515576742,0.1928087747], [0.6525587103,0.1921889390], [0.6535596358,0.1915696469], [0.6545609448,0.1909505918], [0.6555621420,0.1903320792], [0.6565628695,0.1897143285], [0.6575639075,0.1890968569], [0.6585649820,0.1884798326], [0.6595660804,0.1878632625], [0.6605675589,0.1872469263], [0.6615690601,0.1866310431], [0.6625702503,0.1860158169], [0.6635718189,0.1854008231], [0.6645734446,0.1847862583], [0.6655743074,0.1841726240], [0.6665755394,0.1835592248], [0.6675771477,0.1829460558], [0.6685778451,0.1823339037], [0.6695786084,0.1817221692], [0.6705795914,0.1811107575], [0.6715809484,0.1804995739], [0.6725815516,0.1798893053], [0.6735824170,0.1792793307], [0.6745836545,0.1786695826], [0.6755849960,0.1780602236], [0.6765863940,0.1774512817], [0.6775870496,0.1768432412], [0.6785880749,0.1762354251], [0.6795890291,0.1756281002], [0.6805903522,0.1750209989], [0.6815913005,0.1744145710], [0.6825923463,0.1738085293], [0.6835936242,0.1732027914], [0.6845952689,0.1725972752], [0.6855961635,0.1719926549], [0.6865974237,0.1713882552], [0.6875983946,0.1707844703], [0.6885995083,0.1701810389], [0.6896009860,0.1695778266], [0.6906018299,0.1689754336], [0.6916030367,0.1683732588], [0.6926040693,0.1677716243], [0.6936051583,0.1671703906], [0.6946066087,0.1665693738], [0.6956081144,0.1659687570], [0.6966087417,0.1653690985], [0.6976097286,0.1647696554], [0.6986107311,0.1641706328], [0.6996120925,0.1635718247], [0.7006128849,0.1629737848], [0.7016140349,0.1623759585], [0.7026149662,0.1617786890], [0.7036162542,0.1611816322], [0.7046173934,0.1605850886], [0.7056187329,0.1599888495], [0.7066196879,0.1593932620], [0.7076209976,0.1587978854], [0.7086222330,0.1582029740], [0.7096231569,0.1576086676], [0.7106242107,0.1570147032], [0.7116250435,0.1564212879], [0.7126259416,0.1558282511], [0.7136271913,0.1552354226], [0.7146280968,0.1546432133], [0.7156293530,0.1540512113], [0.7166303756,0.1534597614], [0.7176317479,0.1528685179], [0.7186332155,0.1522776306], [0.7196340181,0.1516875469], [0.7206351688,0.1510976683], [0.7216358525,0.1505084741], [0.7226367795,0.1499195453], [0.7236380533,0.1493308203], [0.7246394866,0.1487424088], [0.7256403528,0.1481547366], [0.7266412523,0.1475674500], [0.7276424968,0.1469803655], [0.7286437738,0.1463936658], [0.7296449298,0.1458074398], [0.7306464295,0.1452214149], [0.7316471101,0.1446362703], [0.7326481332,0.1440513256], [0.7336491728,0.1434667708], [0.7346501133,0.1428826724], [0.7356513949,0.1422987730], [0.7366525763,0.1417153291], [0.7376535200,0.1411324200], [0.7386546873,0.1405497761], [0.7396557111,0.1399676105], [0.7406570737,0.1393856417], [0.7416580993,0.1388042619], [0.7426591227,0.1382232756], [0.7436604837,0.1376424850], [0.7446618415,0.1370620871], [0.7456626999,0.1364823685], [0.7466638944,0.1359028444], [0.7476649673,0.1353237789], [0.7486659356,0.1347451615], [0.7496671382,0.1341667954], [0.7506682343,0.1335888769], [0.7516696643,0.1330111510], [0.7526706712,0.1324340537], [0.7536717145,0.1318573191], [0.7546730904,0.1312807759], [0.7556745018,0.1307045946], [0.7566754738,0.1301290476], [0.7576762216,0.1295540100], [0.7586773001,0.1289791621], [0.7596783202,0.1284047269], [0.7606796703,0.1278304808], [0.7616811144,0.1272565587], [0.7626823795,0.1266831162], [0.7636839735,0.1261098619], [0.7646847839,0.1255374314], [0.7656859219,0.1249651882], [0.7666872205,0.1243932273], [0.7676885121,0.1238216437], [0.7686896065,0.1232505452], [0.7696910270,0.1226796326], [0.7706919092,0.1221093979], [0.7716931166,0.1215393483], [0.7726940288,0.1209698362], [0.7736952654,0.1204005085], [0.7746962081,0.1198317160], [0.7756974744,0.1192631072], [0.7766986881,0.1186948950], [0.7776994921,0.1181272813], [0.7787005425,0.1175598930], [0.7797015898,0.1169928710], [0.7807029587,0.1164260310], [0.7817041419,0.1158596594], [0.7827056459,0.1152934691], [0.7837066697,0.1147279120], [0.7847075981,0.1141627700], [0.7857088459,0.1135978081], [0.7867097986,0.1130333723], [0.7877110700,0.1124691160], [0.7887118670,0.1119054853], [0.7897129817,0.1113420333], [0.7907138703,0.1107790655], [0.7917150758,0.1102162758], [0.7927158703,0.1096540727], [0.7937169808,0.1090920469], [0.7947177613,0.1085305607], [0.7957188569,0.1079692511], [0.7967200529,0.1074082384], [0.7977208339,0.1068478103], [0.7987219291,0.1062875580], [0.7997228517,0.1057277530], [0.8007240878,0.1051681232], [0.8017250764,0.1046089813], [0.8027261482,0.1040501419], [0.8037271570,0.1034916860], [0.8047284777,0.1029334040], [0.8057294740,0.1023756498], [0.8067307814,0.1018180689], [0.8077319714,0.1012608992], [0.8087331365,0.1007040885], [0.8097339845,0.1001477985], [0.8107351420,0.0995916804], [0.8117364375,0.0990358290], [0.8127375240,0.0984804361], [0.8137385638,0.0979254112], [0.8147399117,0.0973705568], [0.8157408920,0.0968162468], [0.8167421797,0.0962621068], [0.8177431682,0.0957084717], [0.8187444633,0.0951550059], [0.8197455532,0.0946019919], [0.8207469489,0.0940491466], [0.8217481147,0.0934967652], [0.8227495857,0.0929445520], [0.8237510513,0.0923926778], [0.8247522421,0.0918412903], [0.8257537368,0.0912900700], [0.8267544619,0.0907396072], [0.8277552475,0.0901894442], [0.8287562618,0.0896394880], [0.8297572276,0.0890898905], [0.8307584954,0.0885404587], [0.8317597138,0.0879913850], [0.8327606867,0.0874427762], [0.8337619605,0.0868943323], [0.8347627055,0.0863465069], [0.8357636616,0.0857988943], [0.8367649176,0.0852514457], [0.8377660281,0.0847044040], [0.8387667224,0.0841579162], [0.8397676397,0.0836116327], [0.8407688554,0.0830655119], [0.8417696709,0.0825199343], [0.8427707840,0.0819745189], [0.8437714494,0.0814296709], [0.8447724117,0.0808849845], [0.8457735560,0.0803405217], [0.8467747674,0.0797963446], [0.8477754203,0.0792527924], [0.8487763687,0.0787094005], [0.8497772153,0.0781663842], [0.8507783567,0.0776235277], [0.8517792627,0.0770811180], [0.8527804629,0.0765388677], [0.8537814690,0.0759970406], [0.8547825058,0.0754555144], [0.8557838358,0.0749141468], [0.8567851958,0.0743730796], [0.8577865119,0.0738323522], [0.8587879522,0.0732918732], [0.8597887845,0.0727520372], [0.8607898296,0.0722124006], [0.8617911659,0.0716729209], [0.8627921776,0.0711339291], [0.8637930359,0.0705953325], [0.8647941843,0.0700568919], [0.8657951783,0.0695188458], [0.8667961226,0.0689811373], [0.8677973561,0.0684435841], [0.8687986808,0.0679062918], [0.8697993534,0.0673696588], [0.8708001677,0.0668332584], [0.8718011741,0.0662970633], [0.8728024680,0.0657610220], [0.8738039536,0.0652251856], [0.8748050259,0.0646898771], [0.8758060549,0.0641548980], [0.8768073370,0.0636200894], [0.8778089051,0.0630854334], [0.8788095456,0.0625515771], [0.8798103062,0.0620179606], [0.8808113516,0.0614844958], [0.8818125165,0.0609512704], [0.8828136959,0.0604183400], [0.8838144803,0.0598859217], [0.8848153253,0.0593537725], [0.8858164535,0.0588217738], [0.8868176417,0.0582900438], [0.8878189258,0.0577585629], [0.8888198590,0.0572275678], [0.8898210742,0.0566967221], [0.8908220833,0.0561662841], [0.8918233738,0.0556359951], [0.8928243253,0.0551061831], [0.8938254322,0.0545765858], [0.8948268196,0.0540471367], [0.8958277659,0.0535182168], [0.8968289919,0.0529894447], [0.8978298680,0.0524611522], [0.8988308562,0.0519330951], [0.8998318860,0.0514053099], [0.9008331944,0.0508776715], [0.9018345438,0.0503503047], [0.9028354051,0.0498234874], [0.9038365440,0.0492968161], [0.9048375013,0.0487705317], [0.9058384620,0.0482445367], [0.9068395794,0.0477187499], [0.9078404128,0.0471934022], [0.9088415223,0.0466681993], [0.9098422743,0.0461434729], [0.9108432594,0.0456189129], [0.9118445198,0.0450944969], [0.9128455886,0.0445704691], [0.9138465602,0.0440467793], [0.9148478062,0.0435232328], [0.9158489542,0.0430000239], [0.9168497246,0.0424772982], [0.9178507100,0.0419547453], [0.9188519685,0.0414323349], [0.9198527005,0.0409104836], [0.9208537049,0.0403887741], [0.9218546764,0.0398673652], [0.9228559198,0.0393460978], [0.9238569337,0.0388252325], [0.9248581429,0.0383045477], [0.9258591557,0.0377842468], [0.9268604392,0.0372640865], [0.9278612824,0.0367444356], [0.9288623956,0.0362249248], [0.9298637098,0.0357055897], [0.9308645785,0.0351867650], [0.9318657163,0.0346680797], [0.9328668805,0.0341496594], [0.9338681670,0.0336314538], [0.9348693061,0.0331136023], [0.9358707132,0.0325958894], [0.9368716642,0.0320786891], [0.9378725827,0.0315617817], [0.9388735835,0.0310451076], [0.9398748514,0.0305285711], [0.9408762012,0.0300122675], [0.9418775077,0.0294962609], [0.9428785621,0.0289806582], [0.9438795318,0.0284653728], [0.9448807671,0.0279502240], [0.9458817890,0.0274354577], [0.9468830759,0.0269208275], [0.9478839859,0.0264066630], [0.9488851602,0.0258926342], [0.9498859806,0.0253790580], [0.9508870648,0.0248656171], [0.9518880089,0.0243525181], [0.9528892163,0.0238395540], [0.9538902006,0.0233269736], [0.9548914475,0.0228145276], [0.9558923190,0.0223025422], [0.9568933803,0.0217907278], [0.9578947035,0.0212790472], [0.9588958951,0.0207677012], [0.9598973482,0.0202564886], [0.9608982736,0.0197458117], [0.9618994598,0.0192352679], [0.9629006974,0.0187249634], [0.9639021955,0.0182147916], [0.9649037413,0.0177048604], [0.9659055471,0.0171950615], [0.9669069401,0.0166857369], [0.9679082355,0.0161767255], [0.9689097900,0.0156678457], [0.9699112462,0.0151592787], [0.9709126936,0.0146509788], [0.9719142405,0.0141428904], [0.9729159660,0.0136349733], [0.9739176212,0.0131273531], [0.9749193101,0.0126199769], [0.9759212564,0.0121127309], [0.9769232360,0.0116057285], [0.9779252391,0.0110989741], [0.9789272710,0.0105924647], [0.9799293395,0.0100861959], [0.9809313029,0.0095802390], [0.9819335223,0.0090744113], [0.9829358524,0.0085687858], [0.9839384382,0.0080632892], [0.9849411070,0.0075580082], [0.9859440311,0.0070528558], [0.9869468965,0.0065479898], [0.9879500168,0.0060432520], [0.9889530664,0.0055388060], [0.9899563704,0.0050344878], [0.9909598639,0.0045303300], [0.9919635394,0.0040263360], [0.9929674602,0.0035224738], [0.9939714188,0.0030188473], [0.9949756310,0.0025153480], [0.9959800294,0.0020120094], [0.9969846811,0.0015087977], [0.9979895491,0.0010057312], [0.9989946569,0.0005027980], [1.0000000000,0.0000000000]]././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/pareto_front/zdt2_front.json0000644000076500000240000007051014456461441021573 0ustar00runnerstaff[[0.0000000000,1.0000000000], [0.0037548734,0.9999859009], [0.0056323101,0.9999682771], [0.0072002711,0.9999481561], [0.0085975093,0.9999260828], [0.0098827783,0.9999023307], [0.0111021022,0.9998767433], [0.0122644749,0.9998495827], [0.0133717654,0.9998211959], [0.0144556768,0.9997910334], [0.0154989513,0.9997597825], [0.0165198262,0.9997270953], [0.0175091577,0.9996934294], [0.0184792182,0.9996585185], [0.0194383563,0.9996221503], [0.0203994415,0.9995838628], [0.0213502373,0.9995441674], [0.0222798620,0.9995036077], [0.0232135253,0.9994611322], [0.0241284123,0.9994178197], [0.0250545142,0.9993722713], [0.0259635001,0.9993258967], [0.0268835323,0.9992772757], [0.0277878213,0.9992278370], [0.0286988944,0.9991763735], [0.0295955060,0.9991241060], [0.0305002006,0.9990697378], [0.0313914778,0.9990145751], [0.0322999824,0.9989567111], [0.0331957101,0.9988980448], [0.0341024348,0.9988370239], [0.0350154214,0.9987739203], [0.0359165055,0.9987100046], [0.0368284765,0.9986436633], [0.0377291560,0.9985765108], [0.0386233194,0.9985082392], [0.0395347862,0.9984370007], [0.0404357461,0.9983649504], [0.0413838286,0.9982873787], [0.0423210511,0.9982089286], [0.0432598654,0.9981285840], [0.0441884928,0.9980473771], [0.0451274643,0.9979635120], [0.0460566673,0.9978787834], [0.0469936799,0.9977915940], [0.0479285927,0.9977028500], [0.0488543871,0.9976132489], [0.0498041848,0.9975195432], [0.0507558612,0.9974238426], [0.0516986155,0.9973272532], [0.0526435088,0.9972286610], [0.0536006512,0.9971269702], [0.0545492479,0.9970243796], [0.0554916186,0.9969206803], [0.0564497778,0.9968134226], [0.0573998053,0.9967052624], [0.0583678442,0.9965931948], [0.0593370928,0.9964791094], [0.0602984253,0.9963640999], [0.0612621068,0.9962469543], [0.0622182087,0.9961288945], [0.0631922258,0.9960067426], [0.0641587364,0.9958836565], [0.0651270265,0.9957584704], [0.0660978578,0.9956310732], [0.0670615596,0.9955027472], [0.0680279356,0.9953722000], [0.0690027495,0.9952386206], [0.0699706776,0.9951041043], [0.0709476107,0.9949664365], [0.0719178176,0.9948278275], [0.0728887192,0.9946872346], [0.0738666736,0.9945437145], [0.0748381543,0.9943992507], [0.0758104622,0.9942527738], [0.0767933220,0.9941027857], [0.0777698921,0.9939518439], [0.0787528783,0.9937979842], [0.0797353113,0.9936422801], [0.0807116919,0.9934856228], [0.0816950256,0.9933259228], [0.0826724413,0.9931652675], [0.0836520716,0.9930023309], [0.0846324853,0.9928373424], [0.0856122078,0.9926705499], [0.0865915946,0.9925018957], [0.0875654427,0.9923322932], [0.0885486165,0.9921591425], [0.0895263321,0.9919850359], [0.0905159446,0.9918068638], [0.0915001473,0.9916277230], [0.0924829805,0.9914468983], [0.0934713885,0.9912630995], [0.0944545706,0.9910783341], [0.0954460041,0.9908900603], [0.0964322884,0.9907008137], [0.0974231726,0.9905087254], [0.0984090176,0.9903156653], [0.0993933392,0.9901209641], [0.1003891393,0.9899220207], [0.1013800005,0.9897220955], [0.1023716462,0.9895200461], [0.1033584889,0.9893170228], [0.1043457257,0.9891119695], [0.1053366579,0.9889041885], [0.1063229292,0.9886954347], [0.1073171467,0.9884830300], [0.1083067588,0.9882696460], [0.1092942220,0.9880547730], [0.1102871736,0.9878367393], [0.1112792090,0.9876169376], [0.1122702940,0.9873953811], [0.1132570045,0.9871728509], [0.1142428580,0.9869485694], [0.1152287028,0.9867223461], [0.1162242907,0.9864919142], [0.1172176143,0.9862600309], [0.1182067291,0.9860271692], [0.1192005741,0.9857912231], [0.1201937044,0.9855534734], [0.1211827316,0.9853147456], [0.1221738421,0.9850735523], [0.1231674304,0.9848297841], [0.1241570110,0.9845850366], [0.1251513739,0.9843371336], [0.1261417866,0.9840882497], [0.1271305733,0.9838378173], [0.1281188446,0.9835855617], [0.1291120346,0.9833300825], [0.1301014046,0.9830736245], [0.1310981670,0.9828132706], [0.1320911402,0.9825519307], [0.1330828331,0.9822889595], [0.1340757168,0.9820237022], [0.1350667537,0.9817569720], [0.1360640059,0.9814865863], [0.1370576036,0.9812152133], [0.1380482074,0.9809426924], [0.1390442299,0.9806667021], [0.1400366851,0.9803897268], [0.1410285212,0.9801109562], [0.1420297497,0.9798275502], [0.1430279818,0.9795429964], [0.1440227304,0.9792574531], [0.1450201414,0.9789691586], [0.1460141225,0.9786798760], [0.1470106774,0.9783878607], [0.1480038546,0.9780948590], [0.1490008035,0.9777987606], [0.1499987966,0.9775003610], [0.1509934697,0.9772009721], [0.1519892171,0.9768992779], [0.1529845440,0.9765957293], [0.1539824730,0.9762893980], [0.1549812527,0.9759808113], [0.1559768141,0.9756712335], [0.1569732565,0.9753593967], [0.1579682628,0.9750460279], [0.1589635779,0.9747305809], [0.1599602369,0.9744127226], [0.1609537910,0.9740938772], [0.1619508157,0.9737719333], [0.1629447714,0.9734490015], [0.1639420183,0.9731230146], [0.1649362321,0.9727960394], [0.1659285601,0.9724677129], [0.1669263423,0.9721355962], [0.1679242718,0.9718014389], [0.1689192361,0.9714662917], [0.1699143812,0.9711291031], [0.1709136645,0.9707885193], [0.1719100267,0.9704469427], [0.1729091065,0.9701024409], [0.1739075198,0.9697561746], [0.1749039959,0.9694085922], [0.1758976333,0.9690600226], [0.1768930813,0.9687088378], [0.1778885982,0.9683556466], [0.1788857928,0.9679998731], [0.1798809668,0.9676428378], [0.1808733880,0.9672848175], [0.1818737507,0.9669219388], [0.1828713622,0.9665580649], [0.1838675279,0.9661927322], [0.1848647298,0.9658250317], [0.1858592422,0.9654563421], [0.1868578763,0.9650841341], [0.1878538419,0.9647109341], [0.1888480200,0.9643364254], [0.1898479302,0.9639577634], [0.1908452072,0.9635781069], [0.1918426183,0.9631964098], [0.1928374365,0.9628137231], [0.1938386413,0.9624265811], [0.1948372605,0.9620384419], [0.1958345643,0.9616488234], [0.1968348666,0.9612560353], [0.1978338730,0.9608617587], [0.1988303570,0.9604664891], [0.1998255835,0.9600697362], [0.2008227242,0.9596702334], [0.2018185255,0.9592692828], [0.2028169428,0.9588652877], [0.2038160948,0.9584589995], [0.2048127978,0.9580517178], [0.2058088595,0.9576427134], [0.2068084964,0.9572302458], [0.2078057174,0.9568167838], [0.2088052907,0.9564003506], [0.2098024715,0.9559829230], [0.2107996014,0.9555635281], [0.2118015707,0.9551400946], [0.2128016905,0.9547154405], [0.2138009672,0.9542891464], [0.2147979086,0.9538618584], [0.2157940366,0.9534329338], [0.2167962249,0.9529993969], [0.2177980650,0.9525640029], [0.2187976010,0.9521276098], [0.2197968130,0.9516893610], [0.2207968137,0.9512487671], [0.2217945498,0.9508071777], [0.2227934740,0.9503630679], [0.2237901588,0.9499179648], [0.2247867449,0.9494709193], [0.2257873735,0.9490200620], [0.2267874112,0.9485674701], [0.2277870983,0.9481130378], [0.2287845918,0.9476576106], [0.2297817570,0.9472003441], [0.2307815350,0.9467398831], [0.2317791474,0.9462784268], [0.2327802895,0.9458133368], [0.2337802672,0.9453467867], [0.2347788662,0.9448788840], [0.2357753414,0.9444099884], [0.2367704676,0.9439397457], [0.2377710737,0.9434649165], [0.2387695744,0.9429890903], [0.2397695434,0.9425105660], [0.2407685174,0.9420305210], [0.2417654190,0.9415494822], [0.2427650570,0.9410651271], [0.2437638578,0.9405791816], [0.2447606123,0.9400922427], [0.2457599204,0.9396020615], [0.2467571968,0.9391108858], [0.2477530477,0.9386184274], [0.2487530373,0.9381219264], [0.2497510170,0.9376244295], [0.2507525225,0.9371231724], [0.2517543782,0.9366197331], [0.2527542404,0.9361152940], [0.2537544657,0.9356086711], [0.2547540027,0.9351003981], [0.2557521225,0.9345908518], [0.2567536907,0.9340775423], [0.2577544535,0.9335626417], [0.2587538466,0.9330464469], [0.2597513096,0.9325292572], [0.2607499713,0.9320094525], [0.2617490401,0.9314874400], [0.2627462021,0.9309644333], [0.2637450953,0.9304385247], [0.2647450974,0.9299100334], [0.2657445968,0.9293798093], [0.2667422165,0.9288485899], [0.2677393514,0.9283156397], [0.2687367900,0.9277805377], [0.2697371886,0.9272418491], [0.2707357322,0.9267021633], [0.2717368092,0.9261591065], [0.2727361892,0.9256149711], [0.2737365126,0.9250683217], [0.2747350083,0.9245206752], [0.2757357028,0.9239698222], [0.2767345816,0.9234179714], [0.2777350726,0.9228632295], [0.2787337615,0.9223074902], [0.2797339559,0.9217489139], [0.2807342099,0.9211883034], [0.2817336042,0.9206261763], [0.2827326525,0.9200622472], [0.2837299357,0.9194973236], [0.2847306145,0.9189284771], [0.2857312198,0.9183576700], [0.2867316128,0.9177849822], [0.2877302606,0.9172112971], [0.2887297852,0.9166351111], [0.2897292567,0.9160569578], [0.2907278411,0.9154773224], [0.2917247107,0.9148966932], [0.2927247433,0.9143122247], [0.2937230677,0.9137267595], [0.2947242214,0.9131376333], [0.2957236747,0.9125475082], [0.2967222911,0.9119558819], [0.2977226397,0.9113612298], [0.2987226420,0.9107647832], [0.2997209704,0.9101673399], [0.3007211623,0.9095667826], [0.3017196908,0.9089652282], [0.3027213448,0.9083597874], [0.3037216001,0.9077531896], [0.3047224279,0.9071442419], [0.3057227202,0.9065336184], [0.3067213760,0.9059219975], [0.3077219257,0.9053072164], [0.3087217834,0.9046908604], [0.3097200220,0.9040735080], [0.3107204423,0.9034528067], [0.3117192522,0.9028311078], [0.3127202311,0.9022060571], [0.3137200906,0.9015797047], [0.3147183568,0.9009523559], [0.3157189242,0.9003215609], [0.3167186443,0.8996893003], [0.3177167867,0.8990560435], [0.3187167327,0.8984196443], [0.3197151101,0.8977822484], [0.3207150263,0.8971418719], [0.3217133837,0.8965004987], [0.3227122877,0.8958567794], [0.3237134545,0.8952095994], [0.3247145488,0.8945604618], [0.3257140999,0.8939103251], [0.3267132688,0.8932584400], [0.3277109098,0.8926055596], [0.3287103536,0.8919495035], [0.3297082779,0.8912924515], [0.3307106586,0.8906304603], [0.3317115202,0.8899674674], [0.3327117172,0.8893029132], [0.3337120990,0.8886362350], [0.3347117763,0.8879680268], [0.3357131074,0.8872967095], [0.3367129451,0.8866243926], [0.3377145017,0.8859489154], [0.3387145731,0.8852724380], [0.3397137097,0.8845945954], [0.3407131349,0.8839145597], [0.3417134322,0.8832319303], [0.3427127583,0.8825479653], [0.3437106274,0.8818630046], [0.3447112096,0.8811741820], [0.3457119036,0.8804832797], [0.3467111493,0.8797913789], [0.3477113852,0.8790967926], [0.3487101824,0.8784012087], [0.3497105997,0.8777024965], [0.3507095860,0.8770027863], [0.3517093257,0.8763005502], [0.3527082230,0.8755969094], [0.3537068614,0.8748914562], [0.3547084670,0.8741819034], [0.3557086584,0.8734713503], [0.3567096721,0.8727582098], [0.3577092813,0.8720440701], [0.3587087780,0.8713280126], [0.3597068822,0.8706109589], [0.3607042064,0.8698924755], [0.3617058245,0.8691688965], [0.3627060558,0.8684443171], [0.3637073052,0.8677169961], [0.3647071765,0.8669886754], [0.3657070569,0.8662583485], [0.3667086267,0.8655247831], [0.3677099490,0.8647893934], [0.3687099346,0.8640529841], [0.3697105071,0.8633141409], [0.3707097257,0.8625742993], [0.3717101517,0.8618315631], [0.3727095392,0.8610875994], [0.3737075868,0.8603426395], [0.3747084124,0.8595936056], [0.3757092915,0.8588425283], [0.3767088375,0.8580904518], [0.3777096188,0.8573354439], [0.3787090743,0.8565794370], [0.3797085811,0.8558213934], [0.3807097313,0.8550601005], [0.3817099675,0.8542975007], [0.3827088932,0.8535339031], [0.3837103797,0.8527663445], [0.3847105592,0.8519977857], [0.3857114715,0.8512266608], [0.3867110851,0.8504545367], [0.3877100118,0.8496809468], [0.3887116145,0.8489032808], [0.3897119267,0.8481246142], [0.3907130047,0.8473433479], [0.3917134253,0.8465605924], [0.3927125683,0.8457768387], [0.3937119675,0.8449908866], [0.3947140174,0.8442008444], [0.3957147954,0.8434098007], [0.3967148913,0.8426172950], [0.3977152137,0.8418226088], [0.3987156539,0.8410258274], [0.3997162157,0.8402269469], [0.4007155251,0.8394270679], [0.4017167430,0.8386236584], [0.4027176025,0.8378185326], [0.4037172184,0.8370124076], [0.4047181532,0.8362032164], [0.4057191940,0.8353919356], [0.4067189998,0.8345796552], [0.4077185048,0.8337656208], [0.4087181103,0.8329495063], [0.4097169711,0.8321320036], [0.4107155685,0.8313127218], [0.4117172434,0.8304889115], [0.4127176998,0.8296641003], [0.4137174051,0.8288379087], [0.4147187587,0.8280083512], [0.4157189034,0.8271777934], [0.4167185316,0.8263456654], [0.4177183326,0.8255113946], [0.4187195083,0.8246739733], [0.4197194872,0.8238355521], [0.4207198251,0.8229948287], [0.4217204761,0.8221518401], [0.4227199399,0.8213078524], [0.4237197207,0.8204615983], [0.4247210901,0.8196119956], [0.4257212790,0.8187613926], [0.4267222242,0.8179081434], [0.4277232266,0.8170528414], [0.4287233936,0.8161962518], [0.4297236851,0.8153375545], [0.4307234570,0.8144773036], [0.4317220687,0.8136160554], [0.4327227208,0.8127510469], [0.4337242582,0.8118832678], [0.4347246393,0.8110144880], [0.4357256663,0.8101431438], [0.4367255433,0.8092707998], [0.4377254253,0.8083964521], [0.4387259858,0.8075195094], [0.4397258095,0.8066412125], [0.4407267822,0.8057599035], [0.4417266182,0.8048775948], [0.4427282448,0.8039917012], [0.4437287384,0.8031048067], [0.4447285233,0.8022165406], [0.4457296196,0.8013251062], [0.4467295917,0.8004326719], [0.4477300390,0.7995378122], [0.4487308600,0.7986406153], [0.4497305649,0.7977424190], [0.4507311698,0.7968414125], [0.4517306641,0.7959394071], [0.4527309640,0.7950346742], [0.4537301589,0.7941289429], [0.4547317747,0.7932190131], [0.4557322873,0.7923080823], [0.4567331553,0.7913948249], [0.4577329265,0.7904805680], [0.4587341907,0.7895629423], [0.4597349938,0.7886437354], [0.4607358668,0.7877224611], [0.4617356526,0.7868001871], [0.4627355127,0.7858758453], [0.4637356656,0.7849492325], [0.4647373456,0.7840191996], [0.4657391298,0.7830870630], [0.4667403516,0.7821534442], [0.4677404995,0.7812188252], [0.4687410432,0.7802818344], [0.4697414800,0.7793429420], [0.4707408513,0.7784030509], [0.4717417862,0.7774596872], [0.4727416591,0.7765153237], [0.4737421366,0.7755683880], [0.4747424913,0.7746195670], [0.4757417920,0.7736697474], [0.4767427173,0.7727163815], [0.4777431196,0.7717615117], [0.4787424745,0.7708056431], [0.4797437215,0.7698459617], [0.4807446946,0.7688845387], [0.4817454071,0.7679213628], [0.4827450802,0.7669571875], [0.4837444982,0.7659912605], [0.4847441394,0.7650231193], [0.4857427498,0.7640539810], [0.4867439287,0.7630803479], [0.4877440779,0.7621057145], [0.4887446887,0.7611286293], [0.4897442752,0.7601505450], [0.4907434877,0.7591708293], [0.4917429738,0.7581888477], [0.4927440231,0.7572033277], [0.4937440555,0.7562168077], [0.4947448247,0.7552275584], [0.4957450165,0.7542368786], [0.4967458918,0.7532435189], [0.4977457588,0.7522491596], [0.4987468469,0.7512515827], [0.4997469302,0.7502530058], [0.5007479399,0.7492515007], [0.5017479491,0.7482489956], [0.5027480910,0.7472443570], [0.5037472381,0.7462387201], [0.5047479628,0.7452294940], [0.5057476955,0.7442192685], [0.5067487937,0.7432056600], [0.5077489031,0.7421910514], [0.5087482942,0.7411751731], [0.5097494511,0.7401554971], [0.5107504552,0.7391339725], [0.5117513863,0.7381105186], [0.5127513385,0.7370860649], [0.5137512217,0.7360596822], [0.5147525094,0.7350298540], [0.5157529053,0.7339989406], [0.5167523310,0.7329670284], [0.5177543851,0.7319303967], [0.5187554695,0.7308927629], [0.5197570929,0.7298525644], [0.5207577512,0.7288113646], [0.5217592124,0.7277673242], [0.5227597126,0.7267222829], [0.5237611018,0.7256743082], [0.5247624920,0.7246243270], [0.5257629266,0.7235733450], [0.5267641490,0.7225195313], [0.5277644199,0.7214647171], [0.5287656287,0.7204069099], [0.5297673309,0.7193465751], [0.5307680861,0.7182852388], [0.5317684741,0.7172222900], [0.5327690726,0.7161571152], [0.5337707582,0.7150887777], [0.5347715039,0.7140194386], [0.5357718028,0.7129485753], [0.5367721463,0.7118756630], [0.5377728983,0.7108003098], [0.5387727192,0.7097239570], [0.5397735992,0.7086444616], [0.5407735512,0.7075639664], [0.5417749442,0.7064799098], [0.5427754119,0.7053948523], [0.5437751045,0.7043086357], [0.5447764053,0.7032186682], [0.5457767860,0.7021276999], [0.5467779259,0.7010338997], [0.5477781493,0.6999390992], [0.5487797162,0.6988408231], [0.5497803692,0.6977415456], [0.5507808823,0.6966404197], [0.5517822930,0.6955363012], [0.5527827950,0.6944311816], [0.5537827522,0.6933246634], [0.5547837348,0.6922150076], [0.5557839327,0.6911042202], [0.5567832306,0.6899924341], [0.5577845788,0.6888763636], [0.5587850282,0.6877592923], [0.5597850731,0.6866406719], [0.5607861014,0.6855189485], [0.5617862362,0.6843962249], [0.5627868694,0.6832709396], [0.5637866132,0.6821446548], [0.5647869812,0.6810156659], [0.5657864632,0.6798856781], [0.5667863677,0.6787532134], [0.5677880813,0.6776166947], [0.5687889112,0.6764791744], [0.5697903369,0.6753389720], [0.5707914239,0.6741971504], [0.5717916330,0.6730543284], [0.5727922091,0.6719090851], [0.5737919114,0.6707628424], [0.5747928556,0.6696131731], [0.5757940029,0.6684612663], [0.5767942797,0.6673083589], [0.5777954536,0.6661524138], [0.5787957601,0.6649954681], [0.5797958927,0.6638367229], [0.5807957294,0.6626763207], [0.5817966026,0.6615127133], [0.5827969968,0.6603476605], [0.5837965324,0.6591816087], [0.5847986687,0.6580105171], [0.5857999463,0.6568384229], [0.5868017357,0.6556637229], [0.5878026701,0.6544880211], [0.5888030814,0.6533109313], [0.5898044159,0.6521307510], [0.5908049003,0.6509495698], [0.5918060868,0.6497655556], [0.5928064265,0.6485805407], [0.5938072464,0.6473929541], [0.5948087191,0.6462025877], [0.5958099128,0.6450105478], [0.5968102653,0.6438175072], [0.5978103153,0.6426228269], [0.5988105996,0.6414258658], [0.5998121828,0.6402253454], [0.6008129297,0.6390238235], [0.6018134084,0.6378206215], [0.6028143007,0.6366149188], [0.6038147499,0.6354077478], [0.6048143703,0.6341995775], [0.6058156411,0.6329874090], [0.6068165160,0.6317737159], [0.6078165655,0.6305590228], [0.6088174526,0.6293413094], [0.6098175170,0.6281225960], [0.6108184435,0.6269008291], [0.6118185500,0.6256780619], [0.6128191996,0.6244526286], [0.6138190323,0.6232261956], [0.6148189560,0.6219976513], [0.6158192562,0.6207666437], [0.6168200734,0.6195329971], [0.6178200786,0.6182983505], [0.6188212651,0.6170602419], [0.6198216417,0.6158211325], [0.6208229481,0.6145788672], [0.6218243862,0.6133344327], [0.6228252818,0.6120886684], [0.6238253731,0.6108419038], [0.6248249263,0.6095938115], [0.6258256208,0.6083422924], [0.6268255152,0.6070897735], [0.6278268897,0.6058333966], [0.6288274656,0.6045760185], [0.6298277409,0.6033170168], [0.6308284625,0.6020554509], [0.6318283903,0.6007928852], [0.6328297731,0.5995264782], [0.6338303637,0.5982590701], [0.6348314384,0.5969890449], [0.6358317238,0.5957180191], [0.6368322957,0.5944446271], [0.6378334062,0.5931685459], [0.6388337311,0.5918914641], [0.6398346033,0.5906116804], [0.6408352031,0.5893302425], [0.6418350217,0.5880478049], [0.6428361808,0.5867616446], [0.6438365603,0.5854744836], [0.6448382648,0.5841836123], [0.6458398106,0.5828909390], [0.6468411991,0.5815964631], [0.6478418124,0.5803009861], [0.6488417767,0.5790043488], [0.6498430841,0.5777039660], [0.6508436201,0.5764025821], [0.6518444272,0.5750988428], [0.6528453459,0.5737929544], [0.6538454973,0.5724860657], [0.6548457623,0.5711770276], [0.6558466176,0.5698652142], [0.6568467091,0.5685524007], [0.6578483101,0.5672356009], [0.6588491486,0.5659177994], [0.6598495399,0.5645985847], [0.6608506818,0.5632763764], [0.6618520295,0.5619518910], [0.6628526198,0.5606264044], [0.6638532352,0.5592988821], [0.6648546560,0.5579682864], [0.6658557484,0.5566361223], [0.6668561595,0.5553028625], [0.6678570275,0.5539669908], [0.6688577411,0.5526293222], [0.6698577061,0.5512906536], [0.6708589585,0.5499482579], [0.6718604722,0.5486035059], [0.6728617433,0.5472570744], [0.6738622695,0.5459096417], [0.6748631339,0.5445597505], [0.6758642604,0.5432075016], [0.6768646453,0.5418542519], [0.6778657070,0.5404980833], [0.6788660295,0.5391409141], [0.6798673730,0.5377803551], [0.6808679791,0.5364187950], [0.6818689868,0.5350546849], [0.6828700058,0.5336885552], [0.6838702912,0.5323214249], [0.6848716407,0.5309508358], [0.6858722582,0.5295792455], [0.6868733810,0.5282049585], [0.6878737742,0.5268296708], [0.6888743691,0.5254521036], [0.6898754989,0.5240717961], [0.6908762384,0.5226900232], [0.6918770254,0.5213061817], [0.6928770886,0.5199213402], [0.6938772011,0.5185344298], [0.6948778114,0.5171448272], [0.6958789205,0.5157525280], [0.6968793095,0.5143592280], [0.6978801979,0.5129632293], [0.6988809228,0.5115654558], [0.6998811742,0.5101663419], [0.7008807109,0.5087662290], [0.7018824250,0.5073610614], [0.7028834243,0.5059548918], [0.7038844436,0.5045466901], [0.7048847511,0.5031374877], [0.7058852015,0.5017260823], [0.7068859163,0.5003123013], [0.7078859228,0.4988975203], [0.7088875851,0.4974783917], [0.7098885397,0.4960582612], [0.7108888828,0.4946369963], [0.7118903868,0.4932120771], [0.7128911864,0.4917861563], [0.7138919825,0.4903582373], [0.7148930803,0.4889278837], [0.7158942397,0.4874954376], [0.7168953773,0.4860610180], [0.7178958158,0.4846255976], [0.7188962346,0.4831882039], [0.7198965507,0.4817489563], [0.7208968372,0.4803077501], [0.7218969152,0.4788648438], [0.7228980439,0.4774184181], [0.7238984794,0.4759709916], [0.7248985473,0.4745220961], [0.7258985725,0.4730712625], [0.7268995657,0.4716170214], [0.7278998696,0.4701617798], [0.7289000165,0.4687047659], [0.7299007776,0.4672448548], [0.7309013965,0.4657831485], [0.7319013305,0.4643204424], [0.7329017974,0.4628549554], [0.7339029342,0.4613864831], [0.7349033882,0.4599170100], [0.7359041834,0.4584450328], [0.7369050893,0.4569708893], [0.7379053155,0.4554957453], [0.7389060278,0.4540178821], [0.7399060623,0.4525390189], [0.7409068101,0.4510570987], [0.7419084637,0.4495718315], [0.7429094411,0.4480855623], [0.7439105660,0.4465970698], [0.7449110172,0.4451075765], [0.7459121306,0.4436150934], [0.7469125722,0.4421216095], [0.7479130782,0.4406260275], [0.7489143233,0.4391273363], [0.7499154898,0.4376267582], [0.7509159879,0.4361251791], [0.7519164094,0.4346217133], [0.7529177058,0.4331149283], [0.7539191070,0.4316059801], [0.7549198431,0.4300960305], [0.7559213352,0.4285829350], [0.7569224886,0.4270683463], [0.7579229798,0.4255527567], [0.7589246123,0.4240334328], [0.7599255838,0.4225131070], [0.7609265638,0.4209907644], [0.7619278812,0.4194659038], [0.7629290969,0.4179391932], [0.7639296555,0.4164114814], [0.7649301145,0.4148819199], [0.7659303794,0.4133506538], [0.7669302055,0.4118180599], [0.7679308016,0.4102822839], [0.7689307459,0.4087455081], [0.7699310321,0.4072062058], [0.7709329274,0.4056624214], [0.7719341717,0.4041176345], [0.7729352533,0.4025710942], [0.7739363623,0.4010225071], [0.7749368238,0.3994729192], [0.7759377998,0.3979205308], [0.7769383207,0.3963668458], [0.7779381974,0.3948121611], [0.7789392652,0.3932536211], [0.7799405976,0.3916926642], [0.7809414697,0.3901304209], [0.7819417004,0.3885671771], [0.7829428248,0.3870005331], [0.7839433091,0.3854328881], [0.7849447045,0.3838618109], [0.7859461881,0.3822885894], [0.7869470336,0.3807143663], [0.7879475411,0.3791386725], [0.7889477963,0.3775613747], [0.7899486202,0.3759811774], [0.7909491668,0.3743994155], [0.7919490806,0.3728166538], [0.7929497720,0.3712306591], [0.7939498320,0.3696436642], [0.7949503135,0.3680539990], [0.7959503011,0.3664631181], [0.7969515721,0.3648681917], [0.7979528927,0.3632711811], [0.7989535850,0.3616731691], [0.7999548002,0.3600723177], [0.8009553888,0.3584704652], [0.8019561006,0.3568664128], [0.8029563271,0.3552611368], [0.8039576438,0.3536521070], [0.8049589254,0.3520411284], [0.8059595843,0.3504291485], [0.8069609103,0.3488140893], [0.8079616150,0.3471980287], [0.8089628692,0.3455790762], [0.8099635779,0.3439590025], [0.8109647699,0.3423361420], [0.8119658521,0.3407114550], [0.8129663173,0.3390857670], [0.8139677166,0.3374565563], [0.8149684999,0.3358263441], [0.8159695655,0.3341936681], [0.8169700171,0.3325599912], [0.8179705438,0.3309241895], [0.8189717626,0.3292852520], [0.8199729459,0.3276443680], [0.8209735179,0.3260024829], [0.8219740559,0.3243586514], [0.8229745108,0.3227129546], [0.8239760623,0.3210634487], [0.8249770051,0.3194129410], [0.8259776978,0.3177608427], [0.8269788523,0.3161059779], [0.8279800667,0.3144490092], [0.8289809674,0.3127905557], [0.8299812639,0.3111311016], [0.8309812489,0.3094701639], [0.8319817746,0.3078063267], [0.8329830448,0.3061392471], [0.8339838437,0.3044709485], [0.8349840421,0.3028016494], [0.8359858560,0.3011276486], [0.8369870696,0.2994526454], [0.8379877848,0.2977764725], [0.8389888231,0.2960977546], [0.8399892643,0.2944180358], [0.8409905999,0.2927348108], [0.8419913394,0.2910505844], [0.8429918039,0.2893648186], [0.8439929114,0.2876759655], [0.8449934252,0.2859861113], [0.8459945215,0.2842932696], [0.8469950254,0.2825994269], [0.8479960615,0.2809026797], [0.8489965067,0.2792049316], [0.8499975511,0.2775041632], [0.8509980060,0.2758023938], [0.8519985154,0.2740985298], [0.8529995778,0.2723917202], [0.8540000529,0.2706839097], [0.8550002400,0.2689745897], [0.8560004378,0.2672632505], [0.8570019396,0.2655476756], [0.8580028562,0.2638310988], [0.8590037053,0.2621126343], [0.8600039714,0.2603931692], [0.8610051818,0.2586700768], [0.8620058102,0.2569459832], [0.8630067094,0.2552194195], [0.8640080614,0.2534900698], [0.8650088331,0.2517597186], [0.8660097076,0.2500271864], [0.8670106433,0.2482925444], [0.8680112546,0.2465564619], [0.8690112892,0.2448193792], [0.8700130267,0.2430773334], [0.8710141875,0.2413342852], [0.8720152654,0.2395893770], [0.8730164118,0.2378423446], [0.8740175616,0.2360933021], [0.8750181379,0.2343432583], [0.8760187188,0.2325912043], [0.8770192391,0.2308372543], [0.8780206123,0.2290798044], [0.8790214144,0.2273213529], [0.8800219663,0.2255613389], [0.8810225877,0.2237991999], [0.8820239210,0.2220338029], [0.8830246858,0.2202674043], [0.8840256459,0.2184986575], [0.8850260393,0.2167289098], [0.8860270515,0.2149560640], [0.8870274983,0.2131822173], [0.8880283489,0.2114056515], [0.8890286356,0.2096280851], [0.8900302999,0.2078460652], [0.8910318396,0.2060622609], [0.8920328163,0.2042774546], [0.8930340663,0.2024901564], [0.8940347550,0.2007018568], [0.8950360844,0.1989104077], [0.8960368536,0.1971179570], [0.8970377732,0.1953232334], [0.8980385729,0.1935267217], [0.8990397954,0.1917274463], [0.9000404604,0.1899271697], [0.9010406568,0.1881257348], [0.9020418368,0.1863205247], [0.9030424612,0.1845143133], [0.9040437450,0.1827049072], [0.9050446075,0.1808942585], [0.9060456802,0.1790812254], [0.9070468502,0.1772660116], [0.9080474676,0.1754497966], [0.9090481832,0.1736314005], [0.9100489200,0.1718109631], [0.9110499684,0.1699879550], [0.9120504668,0.1681639460], [0.9130512773,0.1663373650], [0.9140523715,0.1645082622], [0.9150529174,0.1626781584], [0.9160534651,0.1608460490], [0.9170547161,0.1590106478], [0.9180554204,0.1571742451], [0.9190562956,0.1553355256], [0.9200571762,0.1534947925], [0.9210575125,0.1516530588], [0.9220585205,0.1498080847], [0.9230589853,0.1479621097], [0.9240597622,0.1461135559], [0.9250608477,0.1442624280], [0.9260614803,0.1424101348], [0.9270615722,0.1405568413], [0.9280630850,0.1386989102], [0.9290640575,0.1368399771], [0.9300655418,0.1349780880], [0.9310664869,0.1331151969], [0.9320675835,0.1312500198], [0.9330687497,0.1293827084], [0.9340693788,0.1275143956], [0.9350700730,0.1256439587], [0.9360702317,0.1237725214], [0.9370710634,0.1218978221], [0.9380713607,0.1200221222], [0.9390721206,0.1181435523], [0.9400723473,0.1162639819], [0.9410738335,0.1143800399], [0.9420747868,0.1124950960], [0.9430764485,0.1106068123], [0.9440775782,0.1087175264], [0.9450787494,0.1068261575], [0.9460800707,0.1049324998], [0.9470808622,0.1030378405], [0.9480818340,0.1011408360], [0.9490822775,0.0992428306], [0.9500834795,0.0973413820], [0.9510845102,0.0954382545], [0.9520850141,0.0935341259], [0.9530859384,0.0916271940], [0.9540863371,0.0897192613], [0.9550868011,0.0878092024], [0.9560877695,0.0858961770], [0.9570882140,0.0839821507], [0.9580884059,0.0820666065], [0.9590886162,0.0801490263], [0.9600896778,0.0782278105], [0.9610902176,0.0763055936], [0.9620904461,0.0743819735], [0.9630915722,0.0724546236], [0.9640921779,0.0705262724], [0.9650923980,0.0685966633], [0.9660923668,0.0666655389], [0.9670928311,0.0647314561], [0.9680927779,0.0627963734], [0.9690928236,0.0608590993], [0.9700925336,0.0589204763], [0.9710924163,0.0569795191], [0.9720917842,0.0550375631], [0.9730915101,0.0530929130], [0.9740914078,0.0511459293], [0.9750907922,0.0491979469], [0.9760898892,0.0472485283], [0.9770885645,0.0452979371], [0.9780877027,0.0433444458], [0.9790863306,0.0413899573], [0.9800852744,0.0394328550], [0.9810837091,0.0374747557], [0.9820819259,0.0355150907], [0.9830801314,0.0335534553], [0.9840781166,0.0315902604], [0.9850755958,0.0296260706], [0.9860728806,0.0276602742], [0.9870696611,0.0256934842], [0.9880665739,0.0237244456], [0.9890629838,0.0217544141], [0.9900591295,0.0197829200], [0.9910547741,0.0178104347], [0.9920503191,0.0158361644], [0.9930453645,0.0138609040], [0.9940402226,0.0118840359], [0.9950345858,0.0099061731], [0.9960284521,0.0079273226], [0.9970219902,0.0059471511], [0.9980151238,0.0039658127], [0.9990078085,0.0019833986], [1.0000000000,0.0000000000]]././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/pareto_front/zdt3_front.json0000644000076500000240000007123414456461441021600 0ustar00runnerstaff[[0.0000000000,1.0000000000], [0.0001669867,0.9870767898], [0.0003556074,0.9811384730], [0.0005020830,0.9775848727], [0.0005936368,0.9756242656], [0.0008526856,0.9707763811], [0.0009594493,0.9689961054], [0.0015383844,0.9607034357], [0.0020570965,0.9545119258], [0.0026049104,0.9487487404], [0.0030767688,0.9442344352], [0.0034180675,0.9411694253], [0.0036717564,0.9389823762], [0.0038889639,0.9371644873], [0.0042634278,0.9341357353], [0.0045677159,0.9317619051], [0.0048025025,0.9299780777], [0.0049491266,0.9288835705], [0.0052231746,0.9268752322], [0.0056348254,0.9239422323], [0.0059965327,0.9214397209], [0.0062303750,0.9198555571], [0.0064301391,0.9185217235], [0.0066199433,0.9172701090], [0.0068214844,0.9159570195], [0.0072291603,0.9133477888], [0.0076741700,0.9105653571], [0.0080639223,0.9081795586], [0.0085268555,0.9054020802], [0.0088785809,0.9033292734], [0.0091366688,0.9018273795], [0.0093681110,0.9004935189], [0.0095528069,0.8994374843], [0.0096573496,0.8988429411], [0.0099784197,0.8970308931], [0.0102322266,0.8956126871], [0.0105317498,0.8939543318], [0.0107778336,0.8926036594], [0.0110849122,0.8909324019], [0.0114248107,0.8890999667], [0.0116816063,0.8877270886], [0.0119375977,0.8863679309], [0.0122450122,0.8847476272], [0.0124563098,0.8836411373], [0.0126694291,0.8825308265], [0.0128683452,0.8814995256], [0.0130583286,0.8805189165], [0.0131986790,0.8797971649], [0.0134103869,0.8787126395], [0.0136429688,0.8775268110], [0.0139332104,0.8760549727], [0.0141327011,0.8750482925], [0.0143068967,0.8741724532], [0.0144956544,0.8732266709], [0.0149293257,0.8710661595], [0.0153483399,0.8689943461], [0.0156313482,0.8676032942], [0.0158346655,0.8666079009], [0.0162010255,0.8648223436], [0.0166327204,0.8627311017], [0.0170537110,0.8607043403], [0.0173191595,0.8594325272], [0.0175344871,0.8584042124], [0.0177394099,0.8574283093], [0.0181096151,0.8556718157], [0.0185920954,0.8533947644], [0.0189556499,0.8516876796], [0.0192777942,0.8501810417], [0.0194690215,0.8492892820], [0.0198037163,0.8477330068], [0.0202974665,0.8454473371], [0.0206546959,0.8438009602], [0.0209831653,0.8422923698], [0.0212487014,0.8410763951], [0.0215018774,0.8399199344], [0.0217359231,0.8388533410], [0.0219358993,0.8379438680], [0.0221884994,0.8367974771], [0.0224118827,0.8357858922], [0.0225142106,0.8353231873], [0.0227053361,0.8344600970], [0.0228607177,0.8337595036], [0.0229579167,0.8333217372], [0.0230225099,0.8330310289], [0.0231956360,0.8322526674], [0.0232116861,0.8321805673], [0.0235123826,0.8308316289], [0.0238751954,0.8292086653], [0.0242269208,0.8276400578], [0.0245668626,0.8261283879], [0.0250313925,0.8240695616], [0.0254135798,0.8223815507], [0.0256564042,0.8213117880], [0.0258458064,0.8204788343], [0.0259543603,0.8200020095], [0.0261712464,0.8190505805], [0.0262662654,0.8186342762], [0.0265035580,0.8175960166], [0.0269069841,0.8158353717], [0.0272859376,0.8141866940], [0.0276270372,0.8127069664], [0.0279624818,0.8112557002], [0.0281386232,0.8104952023], [0.0281838469,0.8103001203], [0.0288243220,0.8075448957], [0.0293430341,0.8053239043], [0.0297674992,0.8035134265], [0.0301836989,0.8017443297], [0.0305296931,0.8002782929], [0.0307107548,0.7995127936], [0.0308331559,0.7989959623], [0.0312153919,0.7973854390], [0.0314860627,0.7962481634], [0.0317119832,0.7953009439], [0.0320680775,0.7938117197], [0.0323274831,0.7927297883], [0.0325584125,0.7917687206], [0.0327545581,0.7909539735], [0.0329089323,0.7903137492], [0.0330877498,0.7895732754], [0.0332574191,0.7888718023], [0.0334145865,0.7882229944], [0.0337083799,0.7870127140], [0.0339404353,0.7860591181], [0.0341096686,0.7853650013], [0.0344325575,0.7840437769], [0.0347528391,0.7827373008], [0.0349601076,0.7818940097], [0.0353433487,0.7803393361], [0.0355824439,0.7793724529], [0.0358127931,0.7784431724], [0.0360432832,0.7775155381], [0.0362240647,0.7767895244], [0.0362597010,0.7766465731], [0.0364984920,0.7756900814], [0.0366765299,0.7749785273], [0.0370172041,0.7736207936], [0.0375036794,0.7716907923], [0.0378698896,0.7702448701], [0.0382213553,0.7688628704], [0.0385632853,0.7675238068], [0.0388163408,0.7665362934], [0.0389235753,0.7661187323], [0.0392235353,0.7649536098], [0.0394193855,0.7641951960], [0.0397264145,0.7630099790], [0.0398992225,0.7623449129], [0.0400762209,0.7616652420], [0.0402551451,0.7609797537], [0.0405720649,0.7597695091], [0.0408019665,0.7588947413], [0.0409289064,0.7584128930], [0.0411088328,0.7577313321], [0.0411312365,0.7576465840], [0.0414266470,0.7565315470], [0.0417815920,0.7551978412], [0.0420921689,0.7540363331], [0.0423322940,0.7531418556], [0.0424672908,0.7526403583], [0.0427556349,0.7515725323], [0.0429379854,0.7508995993], [0.0433199764,0.7494959371], [0.0437319122,0.7479914750], [0.0440034342,0.7470051430], [0.0441726619,0.7463925646], [0.0445044030,0.7451965726], [0.0447799423,0.7442081408], [0.0450253475,0.7433316240], [0.0453463382,0.7421906230], [0.0456599823,0.7410818081], [0.0460450198,0.7397289095], [0.0463325491,0.7387246698], [0.0465637319,0.7379210202], [0.0468612153,0.7368919134], [0.0471432534,0.7359215072], [0.0474087176,0.7350128587], [0.0475834041,0.7344174548], [0.0476606316,0.7341548746], [0.0479047065,0.7333276034], [0.0480227605,0.7329288973], [0.0482432416,0.7321867712], [0.0486030764,0.7309826655], [0.0488902048,0.7300282123], [0.0490264087,0.7295774426], [0.0491226194,0.7292598080], [0.0492906317,0.7287066721], [0.0494557620,0.7281649519], [0.0494762709,0.7280978048], [0.0496805787,0.7274305171], [0.0499937537,0.7264134172], [0.0501414608,0.7259361441], [0.0503484317,0.7252700226], [0.0504477231,0.7249515612], [0.0507211686,0.7240782418], [0.0509941463,0.7232118873], [0.0514111655,0.7218990444], [0.0516993609,0.7209993629], [0.0519333555,0.7202734989], [0.0520138186,0.7200248592], [0.0523655440,0.7189438029], [0.0527094385,0.7178960327], [0.0530505014,0.7168659813], [0.0533247669,0.7160442935], [0.0535522030,0.7153674210], [0.0536961857,0.7149410436], [0.0539226786,0.7142736870], [0.0542729460,0.7132497722], [0.0545718753,0.7123838129], [0.0548422347,0.7116069183], [0.0549807337,0.7112112661], [0.0552522995,0.7104400938], [0.0554245608,0.7099541031], [0.0554642028,0.7098426148], [0.0557355095,0.7090831390], [0.0559251621,0.7085559256], [0.0562772464,0.7075852652], [0.0567959585,0.7061745652], [0.0572856348,0.7048641986], [0.0576507424,0.7039008251], [0.0578722749,0.7033220194], [0.0579826175,0.7030353467], [0.0583233699,0.7021569099], [0.0585107057,0.7016783979], [0.0586683163,0.7012782580], [0.0589152172,0.7006559391], [0.0591712059,0.7000165547], [0.0593313410,0.6996196213], [0.0595679261,0.6990374856], [0.0596879886,0.6987440297], [0.0597919273,0.6984910564], [0.0598532005,0.6983423927], [0.0601420422,0.6976462759], [0.0603736874,0.6970936087], [0.0605760782,0.6966148402], [0.0607647336,0.6961720225], [0.0608835727,0.6958947984], [0.0611481301,0.6952824352], [0.0613933597,0.6947207334], [0.0616252405,0.6941948726], [0.0617114801,0.6940006080], [0.0620790585,0.6931805939], [0.0623719468,0.6925365176], [0.0625860127,0.6920710273], [0.0630987308,0.6909742466], [0.0635061169,0.6901211399], [0.0637844296,0.6895477285], [0.0640918984,0.6889231656], [0.0644701284,0.6881677698], [0.0647023437,0.6877110775], [0.0650249530,0.6870855944], [0.0652330033,0.6866877833], [0.0653423543,0.6864804475], [0.0654488482,0.6862796926], [0.0656172300,0.6859646177], [0.0656567874,0.6858910156], [0.0658327234,0.6855655908], [0.0660074892,0.6852454527], [0.0661995406,0.6848972472], [0.0663074061,0.6847033331], [0.0665094730,0.6843432793], [0.0667699400,0.6838853547], [0.0671436157,0.6832406132], [0.0673621585,0.6828702314], [0.0676548746,0.6823819064], [0.0680478574,0.6817403361], [0.0683270518,0.6812943328], [0.0687335562,0.6806595643], [0.0690300965,0.6802074678], [0.0693293144,0.6797606892], [0.0695505216,0.6794364778], [0.0697532285,0.6791439326], [0.0700261347,0.6787569641], [0.0703917670,0.6782509315], [0.0707729007,0.6777386210], [0.0712916128,0.6770663438], [0.0716774616,0.6765849831], [0.0719773117,0.6762219633], [0.0722712346,0.6758755200], [0.0724960238,0.6756168535], [0.0728875441,0.6751793644], [0.0732364471,0.6748034735], [0.0735861732,0.6744399355], [0.0738379780,0.6741864037], [0.0740166562,0.6740106769], [0.0742215332,0.6738134542], [0.0745353683,0.6735201959], [0.0748324729,0.6732524483], [0.0752210672,0.6729167612], [0.0755965740,0.6726080145], [0.0758688632,0.6723937510], [0.0760783642,0.6722343998], [0.0762090398,0.6721374295], [0.0762720685,0.6720913239], [0.0764187907,0.6719856749], [0.0765747129,0.6718759756], [0.0766011447,0.6718576424], [0.0767497593,0.6717559818], [0.0768078496,0.6717168998], [0.0770485469,0.6715588866], [0.0774273984,0.6713229792], [0.0776993770,0.6711632724], [0.0782800840,0.6708492640], [0.0788525184,0.6705756699], [0.0792997562,0.6703867107], [0.0797034248,0.6702348093], [0.0799846997,0.6701394109], [0.0801524418,0.6700865978], [0.0804359408,0.6700042596], [0.0806711539,0.6699425386], [0.0809076056,0.6698865116], [0.0811192571,0.6698414733], [0.0814211472,0.6697855776], [0.0816908262,0.6697439295], [0.0819181881,0.6697148794], [0.0823765250,0.6696731419], [0.0828473667,0.6696536196], [0.1824747079,0.6682957828], [0.1827373042,0.6668379181], [0.1829934200,0.6654064856], [0.1832008830,0.6642401022], [0.1835325807,0.6623625566], [0.1840130923,0.6596151795], [0.1844136563,0.6573003083], [0.1846547945,0.6558960797], [0.1850327646,0.6536790223], [0.1855514767,0.6506049186], [0.1859032316,0.6484997804], [0.1862571443,0.6463652113], [0.1865781222,0.6444151024], [0.1867381357,0.6434379418], [0.1870567377,0.6414825070], [0.1873506813,0.6396669210], [0.1875908212,0.6381755498], [0.1877572846,0.6371374939], [0.1878386950,0.6366285640], [0.1882765200,0.6338774632], [0.1888441256,0.6302759901], [0.1892961923,0.6273799277], [0.1896479177,0.6251099884], [0.1899216837,0.6233332085], [0.1901596799,0.6217815704], [0.1905021558,0.6195374578], [0.1907437681,0.6179463286], [0.1908541931,0.6172169635], [0.1910015634,0.6162414722], [0.1910592217,0.6158591627], [0.1913795526,0.6137285628], [0.1916022091,0.6122410756], [0.1918542490,0.6105508918], [0.1920630625,0.6091454935], [0.1922707349,0.6077432472], [0.1923574812,0.6071561924], [0.1925399478,0.6059188204], [0.1927127826,0.6047436241], [0.1932256466,0.6012386596], [0.1937303630,0.5977640158], [0.1944123056,0.5930305273], [0.1949310177,0.5894011703], [0.1952907647,0.5868698611], [0.1955428228,0.5850895151], [0.1957338951,0.5837362693], [0.1959506900,0.5821970824], [0.1961476249,0.5807954737], [0.1963512801,0.5793426559], [0.1965346412,0.5780317096], [0.1968033756,0.5761054945], [0.1970607366,0.5742554420], [0.1974781249,0.5712441366], [0.1978230478,0.5687457361], [0.1983417599,0.5649722025], [0.1987585410,0.5619264912], [0.1990311925,0.5599276545], [0.1992358082,0.5584243672], [0.1993614322,0.5575000768], [0.1995662000,0.5559913203], [0.1997751609,0.5544489585], [0.1999667287,0.5530326194], [0.2002564937,0.5508860807], [0.2003811045,0.5499614639], [0.2006038409,0.5483065276], [0.2010021822,0.5453399268], [0.2012337900,0.5436111052], [0.2015290071,0.5414034185], [0.2018860370,0.5387276261], [0.2020864756,0.5372227120], [0.2024146698,0.5347545517], [0.2027721744,0.5320604855], [0.2029652911,0.5306029304], [0.2036494328,0.5254273064], [0.2037918467,0.5243476959], [0.2040759367,0.5221919202], [0.2042545974,0.5208347687], [0.2043734498,0.5199313558], [0.2046445322,0.5178691539], [0.2048561060,0.5162581076], [0.2054073512,0.5120547358], [0.2054972178,0.5113687330], [0.2056650614,0.5100869645], [0.2060618135,0.5070545512], [0.2063499033,0.5048506040], [0.2063806988,0.5046149187], [0.2065289984,0.5034797026], [0.2065386776,0.5034055960], [0.2067652829,0.5016701953], [0.2068756268,0.5008248564], [0.2072025889,0.4983189841], [0.2079865360,0.4923056583], [0.2080552744,0.4917781338], [0.2082324576,0.4904182206], [0.2082767918,0.4900779188], [0.2084572594,0.4886925802], [0.2084982003,0.4883782832], [0.2089079600,0.4852323538], [0.2090520981,0.4841256760], [0.2096320525,0.4796731020], [0.2097606455,0.4786859815], [0.2103218452,0.4743792398], [0.2105433294,0.4726802577], [0.2106133311,0.4721433877], [0.2112569439,0.4672101715], [0.2114660166,0.4656089731], [0.2123187022,0.4590871651], [0.2131713877,0.4525824143], [0.2140240733,0.4460991906], [0.2141744858,0.4449581302], [0.2147429805,0.4406531527], [0.2148767588,0.4396419800], [0.2150226572,0.4385400544], [0.2151610011,0.4374960306], [0.2155880417,0.4342787333], [0.2156715366,0.4336506749], [0.2157294444,0.4332152810], [0.2157694979,0.4329142234], [0.2158193744,0.4325394410], [0.2164019203,0.4281712993], [0.2165821300,0.4268236007], [0.2165868632,0.4267882264], [0.2166382981,0.4264039060], [0.2166716890,0.4261544874], [0.2168971922,0.4244716742], [0.2174348155,0.4204714516], [0.2177286617,0.4182923973], [0.2182875011,0.4141633474], [0.2191401866,0.4079037996], [0.2192092469,0.4073990920], [0.2193259541,0.4065469676], [0.2197739050,0.4032858086], [0.2198406810,0.4028009851], [0.2199928722,0.4016973140], [0.2200434960,0.4013306013], [0.2203664505,0.3989959983], [0.2208455577,0.3955483864], [0.2211857673,0.3931121154], [0.2216982433,0.3894614995], [0.2224841626,0.3839100178], [0.2225509288,0.3834411184], [0.2230708562,0.3798047124], [0.2234036144,0.3774916878], [0.2236749044,0.3756144141], [0.2241705672,0.3722046966], [0.2242562999,0.3716176274], [0.2247012462,0.3685838327], [0.2251089855,0.3658233284], [0.2254846966,0.3632966806], [0.2256380391,0.3622702351], [0.2259616710,0.3601131499], [0.2268143566,0.3544914153], [0.2269037613,0.3539072694], [0.2276670421,0.3489624078], [0.2281103951,0.3461256564], [0.2284526376,0.3439541530], [0.2285197277,0.3435303676], [0.2293200719,0.3385237273], [0.2293396461,0.3384024239], [0.2293724132,0.3381994876], [0.2294968854,0.3374300133], [0.2296800171,0.3363020183], [0.2297577720,0.3358245769], [0.2300275251,0.3341751270], [0.2302250988,0.3329739099], [0.2304780201,0.3314447659], [0.2309139435,0.3288321015], [0.2310777843,0.3278577220], [0.2316447522,0.3245183980], [0.2317466886,0.3239234228], [0.2319304699,0.3228549533], [0.2322646184,0.3209262765], [0.2322675102,0.3209096643], [0.2326165189,0.3189148858], [0.2327831554,0.3179695717], [0.2334122828,0.3144425877], [0.2336185363,0.3133009319], [0.2336358410,0.3132054792], [0.2344885265,0.3085665094], [0.2350785884,0.3054315538], [0.2353412121,0.3040564232], [0.2353574766,0.3039716723], [0.2357106365,0.3021433829], [0.2361494584,0.2999037148], [0.2361938976,0.2996789056], [0.2362157389,0.2995685501], [0.2362451566,0.2994200560], [0.2365222345,0.2980294084], [0.2365486157,0.2978977571], [0.2370465832,0.2954375624], [0.2372412379,0.2944887902], [0.2373547984,0.2939386609], [0.2374943975,0.2932658180], [0.2378992687,0.2913359166], [0.2387519543,0.2873774051], [0.2391475268,0.2855905375], [0.2395096834,0.2839825392], [0.2395801353,0.2836728516], [0.2396046398,0.2835653755], [0.2404573254,0.2799030828], [0.2411490497,0.2770443042], [0.2413100109,0.2763936863], [0.2414336135,0.2758978465], [0.2420882410,0.2733267747], [0.2421626965,0.2730402465], [0.2425730674,0.2714828065], [0.2430153820,0.2698457218], [0.2432027902,0.2691651976], [0.2433419448,0.2686649669], [0.2433651935,0.2685818156], [0.2433674014,0.2685739249], [0.2437105387,0.2673609416], [0.2437217900,0.2673216175], [0.2437320374,0.2672858265], [0.2438680676,0.2668129659], [0.2439830416,0.2664165644], [0.2447207531,0.2639447247], [0.2448928051,0.2633861641], [0.2449517244,0.2631964526], [0.2451769624,0.2624786152], [0.2452816039,0.2621491187], [0.2453810174,0.2618384393], [0.2455734387,0.2612436334], [0.2455736491,0.2612429878], [0.2458092287,0.2605265523], [0.2458778269,0.2603203767], [0.2463140002,0.2590353079], [0.2464141094,0.2587466935], [0.2464261242,0.2587122139], [0.2472788098,0.2563528720], [0.2473870886,0.2560657000], [0.2475759375,0.2555715859], [0.2480189656,0.2544461828], [0.2480891828,0.2542721739], [0.2481314953,0.2541678948], [0.2482020169,0.2539950606], [0.2486844706,0.2528451466], [0.2489841809,0.2521594483], [0.2492553014,0.2515581637], [0.2493371796,0.2513801353], [0.2493457504,0.2513615955], [0.2494104516,0.2512222219], [0.2494378405,0.2511635346], [0.2496888176,0.2506343931], [0.2497552029,0.2504970399], [0.2498368664,0.2503295748], [0.2498807861,0.2502401945], [0.2500901454,0.2498207203], [0.2505251969,0.2489839818], [0.2506895520,0.2486801906], [0.2510571995,0.2480251737], [0.2515422375,0.2472130838], [0.2516620739,0.2470216019], [0.2518406640,0.2467429910], [0.2522596072,0.2461212008], [0.2523115515,0.2460472180], [0.2523949231,0.2459299122], [0.2524376295,0.2458705099], [0.2525663442,0.2456942908], [0.2532476086,0.2448322009], [0.2535325365,0.2445069822], [0.2535639434,0.2444724130], [0.2535644347,0.2444718742], [0.2539076627,0.2441107097], [0.2541002942,0.2439213408], [0.2546621954,0.2434238529], [0.2549529797,0.2431985863], [0.2550488590,0.2431291323], [0.2551936932,0.2430287561], [0.2553742605,0.2429112738], [0.2558056653,0.2426650537], [0.2559100389,0.2426127938], [0.2564216354,0.2423979110], [0.2565766752,0.2423463457], [0.2566583508,0.2423217197], [0.2566588832,0.2423215649], [0.2570180981,0.2422341244], [0.2572989269,0.2421894148], [0.2575110364,0.2421694194], [0.4098582019,0.2348828551], [0.4100040285,0.2329368690], [0.4101417500,0.2311003352], [0.4101986798,0.2303415450], [0.4104141778,0.2274713125], [0.4108833219,0.2212344319], [0.4109944356,0.2197597237], [0.4111714943,0.2174118150], [0.4112404293,0.2164983809], [0.4113085988,0.2155954751], [0.4114829782,0.2132875875], [0.4118471211,0.2084766701], [0.4120396284,0.2059381324], [0.4126998067,0.1972592212], [0.4135524922,0.1861154077], [0.4139115213,0.1814471928], [0.4140105118,0.1801627115], [0.4141069858,0.1789119971], [0.4144051778,0.1750532381], [0.4151431065,0.1655519011], [0.4151986117,0.1648400932], [0.4152578633,0.1640806927], [0.4156037319,0.1596573475], [0.4156466759,0.1591092789], [0.4157214432,0.1581556824], [0.4158089490,0.1570406116], [0.4161105489,0.1532057175], [0.4165466837,0.1476836195], [0.4169632344,0.1424362181], [0.4178159200,0.1317800534], [0.4183217051,0.1255159076], [0.4186686055,0.1212450299], [0.4186787865,0.1211200063], [0.4186814505,0.1210872954], [0.4187573183,0.1201562445], [0.4195212911,0.1108388954], [0.4196912985,0.1087802178], [0.4197446990,0.1081347040], [0.4203739766,0.1005693330], [0.4212266622,0.0904439550], [0.4220793477,0.0804702970], [0.4228425261,0.0716783516], [0.4229320333,0.0706558123], [0.4229376574,0.0705916230], [0.4230191873,0.0696619194], [0.4231561604,0.0681034315], [0.4237847188,0.0610078653], [0.4240658757,0.0578643885], [0.4245002961,0.0530451040], [0.4246374044,0.0515337262], [0.4246507852,0.0513864762], [0.4254169068,0.0430308882], [0.4254393427,0.0427884507], [0.4254579173,0.0425878348], [0.4254900899,0.0422405649], [0.4259170918,0.0376570251], [0.4263427755,0.0331354455], [0.4271954610,0.0242253203], [0.4274585653,0.0215164472], [0.4277141542,0.0189035410], [0.4280481466,0.0155170246], [0.4287973840,0.0080371276], [0.4289008321,0.0070172706], [0.4290926681,0.0051344203], [0.4291478856,0.0045944951], [0.4292687761,0.0034155882], [0.4297050276,-0.0008021133], [0.4297535177,-0.0012673577], [0.4298439841,-0.0021334331], [0.4299046629,-0.0027129362], [0.4300707096,-0.0042929680], [0.4306062032,-0.0093304099], [0.4307991437,-0.0111234744], [0.4309630779,-0.0126377513], [0.4310458812,-0.0133993772], [0.4314191740,-0.0168057931], [0.4314588888,-0.0171655750], [0.4318711408,-0.0208701208], [0.4323115744,-0.0247666858], [0.4323640925,-0.0252270614], [0.4323778282,-0.0253473187], [0.4324984386,-0.0264005891], [0.4326632565,-0.0278321036], [0.4331642599,-0.0321277250], [0.4332164032,-0.0325699452], [0.4332559566,-0.0329047785], [0.4332908224,-0.0331994892], [0.4333819941,-0.0339681867], [0.4334426579,-0.0344780954], [0.4337340703,-0.0369100367], [0.4338258496,-0.0376699375], [0.4339001649,-0.0382831180], [0.4340169455,-0.0392428295], [0.4345476674,-0.0435446602], [0.4346628819,-0.0444655343], [0.4348478610,-0.0459342397], [0.4348696310,-0.0461062958], [0.4353749557,-0.0500527542], [0.4355632364,-0.0514998544], [0.4357223166,-0.0527125847], [0.4362539063,-0.0566986161], [0.4363378884,-0.0573189226], [0.4363807794,-0.0576347267], [0.4364646315,-0.0582501762], [0.4365750021,-0.0590563260], [0.4374276877,-0.0651323234], [0.4374529676,-0.0653083224], [0.4378940237,-0.0683402368], [0.4380374175,-0.0693101147], [0.4382803732,-0.0709355587], [0.4383534569,-0.0714201053], [0.4383787512,-0.0715873315], [0.4387398436,-0.0739478443], [0.4391330588,-0.0764611964], [0.4391332880,-0.0764626443], [0.4393852604,-0.0780416508], [0.4393888542,-0.0780639924], [0.4396288252,-0.0795444322], [0.4397822029,-0.0804788439], [0.4398085693,-0.0806385438], [0.4399857443,-0.0817045878], [0.4401025811,-0.0824008119], [0.4404724574,-0.0845692636], [0.4408384299,-0.0866612754], [0.4411810648,-0.0885713055], [0.4415263797,-0.0904484668], [0.4415561080,-0.0906078198], [0.4416911154,-0.0913269970], [0.4416966088,-0.0913561036], [0.4417812955,-0.0918032620], [0.4417851622,-0.0918236092], [0.4421658579,-0.0937970853], [0.4422985945,-0.0944712620], [0.4423032247,-0.0944946493], [0.4424199989,-0.0950815637], [0.4424415629,-0.0951893336], [0.4425438010,-0.0956976891], [0.4426833911,-0.0963848289], [0.4427447461,-0.0966843135], [0.4433964865,-0.0997694912], [0.4434666353,-0.1000910500], [0.4437950990,-0.1015694125], [0.4438830561,-0.1019576390], [0.4442000924,-0.1033300618], [0.4442491721,-0.1035387491], [0.4447257997,-0.1055125495], [0.4451018576,-0.1070020186], [0.4458893498,-0.1099259077], [0.4459545432,-0.1101560686], [0.4461220094,-0.1107389276], [0.4461342828,-0.1107811704], [0.4462568346,-0.1111994144], [0.4468072287,-0.1129978846], [0.4468862824,-0.1132454372], [0.4470300929,-0.1136888213], [0.4470858412,-0.1138582850], [0.4472320330,-0.1142962628], [0.4472735211,-0.1144188641], [0.4476087225,-0.1153819101], [0.4476167235,-0.1154042984], [0.4476599143,-0.1155246713], [0.4483596140,-0.1173609793], [0.4485125998,-0.1177338558], [0.4487221788,-0.1182279460], [0.4491526219,-0.1191819743], [0.4493652854,-0.1196230903], [0.4496493598,-0.1201810691], [0.4497353421,-0.1203428969], [0.4497536984,-0.1203770201], [0.4498039672,-0.1204697004], [0.4501515607,-0.1210798084], [0.4502179709,-0.1211902547], [0.4508104132,-0.1220884801], [0.4508762980,-0.1221786821], [0.4510706565,-0.1224334587], [0.4513843057,-0.1228089196], [0.4517361098,-0.1231775371], [0.4519233420,-0.1233510443], [0.4522228486,-0.1235958014], [0.4524547621,-0.1237575671], [0.4525470994,-0.1238152261], [0.4527760276,-0.1239415880], [0.4528032170,-0.1239550242], [0.4531242504,-0.1240884232], [0.4531581815,-0.1240998012], [0.4531961700,-0.1241119223], [0.4536287131,-0.1242039021], [0.6184020756,-0.1243106190], [0.6184454865,-0.1250679966], [0.6190497100,-0.1355538383], [0.6196671059,-0.1461578293], [0.6199023956,-0.1501687797], [0.6199587998,-0.1511277725], [0.6207550811,-0.1645600367], [0.6207934439,-0.1652020771], [0.6209983076,-0.1686226116], [0.6212508782,-0.1728208152], [0.6216077667,-0.1787169427], [0.6219727086,-0.1847017514], [0.6219876207,-0.1849453356], [0.6224604522,-0.1926289728], [0.6225746064,-0.1944723125], [0.6233131378,-0.2062857523], [0.6239825747,-0.2168220720], [0.6239839713,-0.2168438789], [0.6240710760,-0.2182024938], [0.6240967403,-0.2186022478], [0.6241658233,-0.2196770641], [0.6243192383,-0.2220574690], [0.6245378035,-0.2254332346], [0.6246631475,-0.2273609009], [0.6250185089,-0.2327928572], [0.6252210187,-0.2358662376], [0.6252555762,-0.2363890811], [0.6256415168,-0.2421959227], [0.6256591138,-0.2424592645], [0.6258711944,-0.2456232543], [0.6259532367,-0.2468423375], [0.6267238800,-0.2581585591], [0.6275765655,-0.2703892646], [0.6277270264,-0.2725151175], [0.6284292511,-0.2823060601], [0.6286961317,-0.2859699288], [0.6287325749,-0.2864677680], [0.6292819366,-0.2938998386], [0.6299394401,-0.3026135805], [0.6301346222,-0.3051617041], [0.6304099012,-0.3087251433], [0.6307546121,-0.3131368690], [0.6309873077,-0.3160829787], [0.6310487510,-0.3168565668], [0.6313051128,-0.3200646101], [0.6318399933,-0.3266552094], [0.6318999444,-0.3273851798], [0.6321276444,-0.3301415343], [0.6325151072,-0.3347727595], [0.6326926788,-0.3368701750], [0.6332478210,-0.3433248264], [0.6334204619,-0.3453002275], [0.6335453644,-0.3467198926], [0.6336573460,-0.3479858928], [0.6336971764,-0.3484346386], [0.6339773887,-0.3515684940], [0.6343980499,-0.3561966244], [0.6344388464,-0.3566405571], [0.6348607985,-0.3611808023], [0.6352507355,-0.3652928835], [0.6356154423,-0.3690655298], [0.6361034210,-0.3740014404], [0.6364452642,-0.3773822703], [0.6368583045,-0.3813819896], [0.6369561066,-0.3823153290], [0.6369705750,-0.3824529550], [0.6370845178,-0.3835327566], [0.6375080245,-0.3874830959], [0.6378087921,-0.3902278522], [0.6382020221,-0.3937398281], [0.6382194765,-0.3938936953], [0.6386614777,-0.3977325875], [0.6387380999,-0.3983867820], [0.6389638372,-0.4002946383], [0.6391918052,-0.4021917537], [0.6395141632,-0.4048233927], [0.6403668488,-0.4114944107], [0.6404651097,-0.4122359285], [0.6407627079,-0.4144471837], [0.6407638189,-0.4144553413], [0.6412195343,-0.4177400749], [0.6420292062,-0.4232721666], [0.6420722199,-0.4235551138], [0.6422532320,-0.4247336695], [0.6424991181,-0.4263030876], [0.6429249054,-0.4289345559], [0.6437775910,-0.4338737337], [0.6437796731,-0.4338852516], [0.6438234593,-0.4341268612], [0.6446302765,-0.4383682882], [0.6448007468,-0.4392131548], [0.6449820746,-0.4400921146], [0.6454829621,-0.4424141729], [0.6460598213,-0.4448949556], [0.6463356476,-0.4460076574], [0.6464264773,-0.4463636359], [0.6471883332,-0.4491453310], [0.6475091364,-0.4502071611], [0.6478721399,-0.4513301403], [0.6480410188,-0.4518241062], [0.6483680612,-0.4527291844], [0.6486736402,-0.4535133600], [0.6488937043,-0.4540412215], [0.6490445958,-0.4543852976], [0.6490769649,-0.4544572142], [0.6496691777,-0.4556546720], [0.6497141963,-0.4557365144], [0.6497463899,-0.4557942443], [0.6502262359,-0.4565758810], [0.6504014060,-0.4568243693], [0.6505990754,-0.4570810740], [0.6508915100,-0.4574147149], [0.6512142266,-0.4577189410], [0.6513561638,-0.4578314766], [0.6514517610,-0.4578999436], [0.6518148357,-0.4581062052], [0.6523044465,-0.4582494224], [0.8236942423,-0.4656399170], [0.8244611317,-0.4810280805], [0.8245469278,-0.4827307625], [0.8247849896,-0.4874350785], [0.8249379580,-0.4904421186], [0.8252764784,-0.4970524946], [0.8253052504,-0.4976115054], [0.8253996134,-0.4994417560], [0.8254900917,-0.5011921484], [0.8256108952,-0.5035222955], [0.8259901515,-0.5107859382], [0.8262522989,-0.5157603793], [0.8263781215,-0.5181343938], [0.8263998990,-0.5185443940], [0.8264879094,-0.5201986387], [0.8266323925,-0.5229049146], [0.8266609167,-0.5234378040], [0.8267493168,-0.5250863775], [0.8269624824,-0.5290434770], [0.8271049845,-0.5316743727], [0.8271058621,-0.5316905391], [0.8271631321,-0.5327445643], [0.8279576700,-0.5471717444], [0.8286341178,-0.5591619086], [0.8286675882,-0.5597480764], [0.8288103556,-0.5622407796], [0.8289117204,-0.5640031175], [0.8293682480,-0.5718628113], [0.8296630411,-0.5768700500], [0.8302180569,-0.5861507110], [0.8305157267,-0.5910484223], [0.8308126715,-0.5958781397], [0.8310499835,-0.5996973866], [0.8313684122,-0.6047650671], [0.8322210978,-0.6180094673], [0.8326425958,-0.6243788564], [0.8330737833,-0.6307714262], [0.8331690621,-0.6321670428], [0.8335579400,-0.6377991851], [0.8335962178,-0.6383479833], [0.8339264689,-0.6430410760], [0.8343756460,-0.6493032557], [0.8345459403,-0.6516407149], [0.8347791544,-0.6548088851], [0.8349253950,-0.6567760459], [0.8356318400,-0.6660656663], [0.8357570559,-0.6676751432], [0.8364845255,-0.6768025834], [0.8368088552,-0.6807482529], [0.8368332286,-0.6810416698], [0.8370031084,-0.6830746718], [0.8371853783,-0.6852324055], [0.8373372111,-0.6870111593], [0.8379621683,-0.6941530781], [0.8380928583,-0.6956098636], [0.8381898966,-0.6966832817], [0.8382094851,-0.6968991112], [0.8387351664,-0.7025835533], [0.8387982285,-0.7032514909], [0.8390425822,-0.7058112109], [0.8393742696,-0.7092132857], [0.8397852094,-0.7133118602], [0.8398312494,-0.7137629928], [0.8398952677,-0.7143875855], [0.8400976177,-0.7163410894], [0.8407479533,-0.7224054283], [0.8410918730,-0.7254797457], [0.8412498750,-0.7268611987], [0.8416006388,-0.7298581530], [0.8417113899,-0.7307843563], [0.8424533244,-0.7367395687], [0.8433060099,-0.7430438863], [0.8438789555,-0.7469530783], [0.8441586955,-0.7487657224], [0.8442747933,-0.7494994499], [0.8448382803,-0.7529055420], [0.8450113810,-0.7539001049], [0.8458139940,-0.7581921713], [0.8458640666,-0.7584424769], [0.8462369389,-0.7602417031], [0.8464098786,-0.7610374021], [0.8467167521,-0.7623887010], [0.8467297492,-0.7624442189], [0.8467865333,-0.7626851412], [0.8475694377,-0.7657350633], [0.8475873208,-0.7657987988], [0.8479958367,-0.7671824333], [0.8481982545,-0.7678166157], [0.8484221232,-0.7684782767], [0.8486716850,-0.7691666322], [0.8488883339,-0.7697220602], [0.8489466629,-0.7698649017], [0.8490424709,-0.7700933541], [0.8492748088,-0.7706154841], [0.8498940763,-0.7717863694], [0.8498956098,-0.7717888699], [0.8501274943,-0.7721442618], [0.8509801799,-0.7730626213], [0.8511956627,-0.7731978855], [0.8513782046,-0.7732818778], [0.8517201435,-0.7733636553], [0.8518328654,-0.7733690123]]././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/pareto_front/zdt4_front.json0000644000076500000240000007051014456461441021575 0ustar00runnerstaff[[0.0000000001,0.9999894726], [0.0002105478,0.9854897362], [0.0006312647,0.9748750180], [0.0011630803,0.9658960376], [0.0017746306,0.9578736358], [0.0024505398,0.9504970728], [0.0031768967,0.9436360339], [0.0039503274,0.9371483698], [0.0047628349,0.9309867048], [0.0056133085,0.9250779837], [0.0064844270,0.9194740603], [0.0073825092,0.9140784704], [0.0083046512,0.9088701410], [0.0092513217,0.9038162085], [0.0102235345,0.8988885045], [0.0112037024,0.8941524567], [0.0122063005,0.8895178727], [0.0132289862,0.8849826699], [0.0142533197,0.8806127324], [0.0152866946,0.8763606266], [0.0163381500,0.8721792270], [0.0173920785,0.8681209701], [0.0184482339,0.8641757241], [0.0195148773,0.8603043403], [0.0205965083,0.8564851634], [0.0216666513,0.8528040377], [0.0227383129,0.8492077161], [0.0238229080,0.8456532864], [0.0249019551,0.8421964667], [0.0259929519,0.8387767019], [0.0270660633,0.8354823314], [0.0281500276,0.8322203004], [0.0292333708,0.8290223089], [0.0303128734,0.8258940741], [0.0314021618,0.8227934486], [0.0324911066,0.8197471037], [0.0335765797,0.8167608674], [0.0346709704,0.8137985757], [0.0357370146,0.8109576380], [0.0368111297,0.8081377324], [0.0378895903,0.8053475139], [0.0389532784,0.8026341508], [0.0400243289,0.7999391870], [0.0410894955,0.7972945598], [0.0421616561,0.7946669628], [0.0432213442,0.7921025634], [0.0442876083,0.7895537876], [0.0453412088,0.7870652476], [0.0463909832,0.7846143384], [0.0474467646,0.7821772174], [0.0484983540,0.7797765817], [0.0495450062,0.7774129245], [0.0505853782,0.7750880658], [0.0516311551,0.7727751001], [0.0526673294,0.7705063631], [0.0537086508,0.7682487307], [0.0547368762,0.7660408665], [0.0557699761,0.7638433229], [0.0568037616,0.7616646026], [0.0578314646,0.7595182655], [0.0588637743,0.7573814221], [0.0598984973,0.7552583048], [0.0609298646,0.7531602451], [0.0619606711,0.7510809949], [0.0629836759,0.7490345126], [0.0640108688,0.7469963068], [0.0650299931,0.7449902099], [0.0660450761,0.7430076342], [0.0670640897,0.7410326474], [0.0680825901,0.7390735925], [0.0691035230,0.7371245105], [0.0701179135,0.7352021270], [0.0711359997,0.7332866714], [0.0721531501,0.7313866160], [0.0731706023,0.7294993488], [0.0741806823,0.7276386915], [0.0751924292,0.7257876203], [0.0762062164,0.7239452655], [0.0772233979,0.7221090179], [0.0782425560,0.7202812914], [0.0792510937,0.7184842922], [0.0802628607,0.7166929921], [0.0812759972,0.7149105453], [0.0822884477,0.7131403693], [0.0833012730,0.7113804009], [0.0843171957,0.7096257661], [0.0853334216,0.7078811516], [0.0863411931,0.7061612805], [0.0873519226,0.7044464133], [0.0883637196,0.7027396435], [0.0893732618,0.7010463885], [0.0903856712,0.6993579018], [0.0913949697,0.6976839903], [0.0924070702,0.6960146875], [0.0934200164,0.6943531181], [0.0944275417,0.6927093530], [0.0954377690,0.6910699610], [0.0964475333,0.6894399682], [0.0974559260,0.6878206830], [0.0984669407,0.6862055758], [0.0994732780,0.6846061542], [0.1004805510,0.6830133268], [0.1014903611,0.6814244813], [0.1024949925,0.6798516087], [0.1035020978,0.6782825809], [0.1045074114,0.6767239394], [0.1055120333,0.6751738415], [0.1065190579,0.6736274247], [0.1075236424,0.6720920215], [0.1085305843,0.6705601963], [0.1095359091,0.6690379038], [0.1105435514,0.6675190963], [0.1115512301,0.6660071407], [0.1125570012,0.6645048418], [0.1135639375,0.6630075113], [0.1145731157,0.6615134925], [0.1155784246,0.6600317301], [0.1165859290,0.6585531828], [0.1175892437,0.6570871193], [0.1185930860,0.6556265313], [0.1195990615,0.6541690275], [0.1206040643,0.6527190413], [0.1216111695,0.6512720695], [0.1226168930,0.6498330498], [0.1236222729,0.6484004083], [0.1246297053,0.6469706736], [0.1256343337,0.6455506613], [0.1266409786,0.6441334821], [0.1276471982,0.6427225193], [0.1286513413,0.6413200016], [0.1296564281,0.6399216362], [0.1306634703,0.6385259756], [0.1316668478,0.6371407329], [0.1326721441,0.6357581242], [0.1336779496,0.6343800476], [0.1346824492,0.6330089249], [0.1356853479,0.6316450789], [0.1366873372,0.6302874938], [0.1376911695,0.6289323922], [0.1386940586,0.6275834878], [0.1396967886,0.6262396642], [0.1407006623,0.6248991305], [0.1417038828,0.6235642382], [0.1427088852,0.6222317044], [0.1437113389,0.6209072159], [0.1447140663,0.6195869794], [0.1457185366,0.6182690258], [0.1467241957,0.6169540553], [0.1477281975,0.6156457395], [0.1487317722,0.6143424158], [0.1497370456,0.6130412870], [0.1507395611,0.6117480700], [0.1517437491,0.6104569997], [0.1527491415,0.6091686534], [0.1537521086,0.6078876327], [0.1547567167,0.6066086977], [0.1557592375,0.6053365516], [0.1567622432,0.6040678806], [0.1577668585,0.6028012356], [0.1587687449,0.6015420413], [0.1597722169,0.6002848303], [0.1607742633,0.5990333389], [0.1617765541,0.5977854377], [0.1627804021,0.5965394665], [0.1637844766,0.5952970514], [0.1647863012,0.5940612100], [0.1657896532,0.5928272441], [0.1667919637,0.5915982815], [0.1677957846,0.5903711624], [0.1687999261,0.5891473183], [0.1698025772,0.5879289173], [0.1708067129,0.5867123122], [0.1718093417,0.5855010957], [0.1728134375,0.5842916436], [0.1738186323,0.5830843822], [0.1748211768,0.5818837759], [0.1758239238,0.5806863659], [0.1768265821,0.5794924708], [0.1778306658,0.5783002658], [0.1788332145,0.5771132368], [0.1798358264,0.5759294559], [0.1808398397,0.5747473225], [0.1818418499,0.5735708149], [0.1828452443,0.5723959257], [0.1838468916,0.5712262933], [0.1848493707,0.5700588753], [0.1858532128,0.5688930379], [0.1868559327,0.5677316427], [0.1878592674,0.5665726503], [0.1888620073,0.5654174333], [0.1898660817,0.5642637475], [0.1908674683,0.5631161845], [0.1918701717,0.5619701247], [0.1928737645,0.5608260430], [0.1938761620,0.5596862914], [0.1948789191,0.5585490751], [0.1958829695,0.5574133198], [0.1968839484,0.5562839327], [0.1978851741,0.5551571355], [0.1988876695,0.5540317618], [0.1998903999,0.5529089579], [0.2008922998,0.5517898932], [0.2018954520,0.5506722221], [0.2028989078,0.5495569871], [0.2039007023,0.5484463462], [0.2049011516,0.5473399160], [0.2059028251,0.5462348348], [0.2069049004,0.5451319967], [0.2079081919,0.5440304923], [0.2089091658,0.5429341778], [0.2099113416,0.5418391749], [0.2109132670,0.5407470555], [0.2119152403,0.5396574750], [0.2129184008,0.5385691809], [0.2139198230,0.5374853267], [0.2149224200,0.5364027394], [0.2159250466,0.5353226425], [0.2169277369,0.5342449818], [0.2179315886,0.5331685651], [0.2189353258,0.5320947469], [0.2199402161,0.5310221582], [0.2209406566,0.5299567503], [0.2219422321,0.5288925472], [0.2229444987,0.5278300108], [0.2239468923,0.5267697259], [0.2249504102,0.5257106261], [0.2259511071,0.5246568533], [0.2269529145,0.5236042459], [0.2279546225,0.5225540632], [0.2289564315,0.5215060799], [0.2299583451,0.5204602779], [0.2309608549,0.5194161312], [0.2319644550,0.5183731164], [0.2329659947,0.5173344899], [0.2339686130,0.5162969785], [0.2349707108,0.5152622247], [0.2359718198,0.5142306929], [0.2369739928,0.5132002539], [0.2379751637,0.5121730186], [0.2389762694,0.5111480087], [0.2399774526,0.5101250643], [0.2409787263,0.5091041594], [0.2419805638,0.5080848002], [0.2429834403,0.5070664951], [0.2439855244,0.5060510913], [0.2449886396,0.5050367290], [0.2459906664,0.5040255386], [0.2469925755,0.5030165239], [0.2479955026,0.5020085316], [0.2489970506,0.5010039573], [0.2499996077,0.5000003923], [0.2510019167,0.4989990851], [0.2520038840,0.4980001156], [0.2530057956,0.4970031853], [0.2540087011,0.4960072410], [0.2550103471,0.4950145080], [0.2560129786,0.4940227489], [0.2570152630,0.4930332723], [0.2580171398,0.4920461244], [0.2590199910,0.4910599338], [0.2600208175,0.4900776358], [0.2610226089,0.4890962822], [0.2620252361,0.4881159935], [0.2630267801,0.4871386346], [0.2640292794,0.4861622052], [0.2650319605,0.4851874511], [0.2660333481,0.4842157931], [0.2670347118,0.4832459852], [0.2680357884,0.4822782712], [0.2690371011,0.4813121352], [0.2700393471,0.4803468973], [0.2710418231,0.4793832282], [0.2720440694,0.4784215597], [0.2730454143,0.4774625235], [0.2740471733,0.4765048488], [0.2750498495,0.4755480485], [0.2760526149,0.4745929055], [0.2770562927,0.4736386292], [0.2780569657,0.4726889289], [0.2790577919,0.4717407910], [0.2800595170,0.4707935025], [0.2810605780,0.4698485329], [0.2820625321,0.4689044040], [0.2830633674,0.4679630018], [0.2840650890,0.4670224310], [0.2850662377,0.4660840537], [0.2860682669,0.4651464996], [0.2870698581,0.4642109948], [0.2880712825,0.4632772759], [0.2890735788,0.4623443678], [0.2900757018,0.4614132365], [0.2910783508,0.4604832248], [0.2920798864,0.4595558434], [0.2930822820,0.4586292564], [0.2940842385,0.4577046575], [0.2950856983,0.4567820894], [0.2960870729,0.4558611640], [0.2970886374,0.4549416202], [0.2980910475,0.4540228508], [0.2990936425,0.4531054558], [0.3000950324,0.4521906971], [0.3010967550,0.4512771601], [0.3020993122,0.4503643824], [0.3031014386,0.4494535092], [0.3041029020,0.4485447416], [0.3051051913,0.4476367216], [0.3061067375,0.4467308635], [0.3071080050,0.4458267373], [0.3081100901,0.4449233476], [0.3091118903,0.4440216818], [0.3101127726,0.4431223001], [0.3111144638,0.4422236436], [0.3121167514,0.4413258988], [0.3131186269,0.4404299625], [0.3141199527,0.4395359488], [0.3151211702,0.4386434554], [0.3161229768,0.4377518547], [0.3171255784,0.4368609600], [0.3181264009,0.4359730495], [0.3191280118,0.4350858368], [0.3201301025,0.4341995913], [0.3211313107,0.4333155104], [0.3221333005,0.4324321182], [0.3231351783,0.4315501972], [0.3241358160,0.4306707315], [0.3251372272,0.4297919439], [0.3261379206,0.4289151372], [0.3271393827,0.4280390025], [0.3281401366,0.4271648260], [0.3291410306,0.4262918594], [0.3301426868,0.4254195559], [0.3311443335,0.4245485829], [0.3321446186,0.4236801074], [0.3331456580,0.4228122853], [0.3341469535,0.4219455445], [0.3351488093,0.4210796175], [0.3361510327,0.4202146667], [0.3371524512,0.4193516975], [0.3381546145,0.4184893685], [0.3391566314,0.4176284421], [0.3401593896,0.4167681511], [0.3411604553,0.4159105759], [0.3421620297,0.4150538233], [0.3431643380,0.4141976972], [0.3441651948,0.4133440576], [0.3451662555,0.4124914847], [0.3461676576,0.4116398572], [0.3471697849,0.4107888452], [0.3481709012,0.4099399173], [0.3491727381,0.4090915993], [0.3501746238,0.4082444560], [0.3511757655,0.4073991516], [0.3521774192,0.4065546199], [0.3531797861,0.4057106882], [0.3541826948,0.4048674982], [0.3551827727,0.4040278759], [0.3561835555,0.4031888443], [0.3571849903,0.4023504453], [0.3581857802,0.4015137594], [0.3591872702,0.4006776575], [0.3601882868,0.3998431148], [0.3611899999,0.3990091516], [0.3621908668,0.3981770470], [0.3631924261,0.3973455168], [0.3641936381,0.3965154202], [0.3651955391,0.3956858936], [0.3661969049,0.3948579465], [0.3671984667,0.3940309689], [0.3682007123,0.3932045548], [0.3692019399,0.3923801024], [0.3702038472,0.3915562087], [0.3712055375,0.3907336071], [0.3722079045,0.3899115601], [0.3732089877,0.3890916700], [0.3742100083,0.3882729299], [0.3752116992,0.3874547370], [0.3762133237,0.3866376897], [0.3772148257,0.3858218291], [0.3782157057,0.3850075564], [0.3792172488,0.3841938221], [0.3802181001,0.3833817225], [0.3812196110,0.3825701570], [0.3822206018,0.3817600775], [0.3832222488,0.3809505280], [0.3842235710,0.3801422978], [0.3852251764,0.3793348919], [0.3862274336,0.3785280106], [0.3872282277,0.3777233512], [0.3882296692,0.3769192113], [0.3892307415,0.3761164039], [0.3902324583,0.3753141123], [0.3912346802,0.3745124460], [0.3922368074,0.3737118814], [0.3932378133,0.3729132330], [0.3942394571,0.3721150925], [0.3952405845,0.3713183759], [0.3962423467,0.3705221635], [0.3972435960,0.3697273637], [0.3982454771,0.3689330645], [0.3992472187,0.3681398741], [0.4002480479,0.3673483992], [0.4012489559,0.3665578512], [0.4022496843,0.3657684301], [0.4032503529,0.3649800374], [0.4042516431,0.3641921335], [0.4052527434,0.3634053539], [0.4062544628,0.3626190599], [0.4072557686,0.3618340587], [0.4082576907,0.3610495398], [0.4092589953,0.3602664654], [0.4102609131,0.3594838698], [0.4112626065,0.3587024041], [0.4122636316,0.3579224100], [0.4132652652,0.3571428890], [0.4142674430,0.3563638893], [0.4152688799,0.3555864062], [0.4162709214,0.3548093915], [0.4172720122,0.3540340472], [0.4182737041,0.3532591677], [0.4192747583,0.3524857080], [0.4202764107,0.3517127097], [0.4212772980,0.3509412214], [0.4222787805,0.3501701911], [0.4232804027,0.3493999672], [0.4242813186,0.3486311962], [0.4252828255,0.3478628783], [0.4262837535,0.3470959079], [0.4272846573,0.3463298559], [0.4282856329,0.3455646457], [0.4292871941,0.3447998824], [0.4302888245,0.3440359579], [0.4312904321,0.3432729394], [0.4322910343,0.3425115710], [0.4332919868,0.3417508171], [0.4342935181,0.3409905023], [0.4352953978,0.3402307996], [0.4362973516,0.3394719146], [0.4372982381,0.3387147075], [0.4382996979,0.3379579335], [0.4393006265,0.3372024242], [0.4403021258,0.3364473451], [0.4413034896,0.3356932263], [0.4423054221,0.3349395350], [0.4433068964,0.3341870410], [0.4443085740,0.3334352440], [0.4453096867,0.3326847172], [0.4463107974,0.3319350351], [0.4473124701,0.3311857731], [0.4483130258,0.3304381837], [0.4493141404,0.3296910113], [0.4503155791,0.3289444292], [0.4513163666,0.3281991615], [0.4523176354,0.3274543618], [0.4533194590,0.3267099741], [0.4543204053,0.3259670592], [0.4553219036,0.3252245532], [0.4563237931,0.3244825738], [0.4573242750,0.3237424492], [0.4583253047,0.3230027292], [0.4593262806,0.3222638562], [0.4603278024,0.3215253856], [0.4613291449,0.3207878498], [0.4623310314,0.3200507141], [0.4633320623,0.3193150051], [0.4643336345,0.3185796932], [0.4653356544,0.3178448458], [0.4663361052,0.3171119380], [0.4673370931,0.3163794231], [0.4683378402,0.3156478683], [0.4693391225,0.3149167040], [0.4703402348,0.3141864431], [0.4713410174,0.3134571991], [0.4723423318,0.3127283420], [0.4733434528,0.3120003977], [0.4743451036,0.3112728380], [0.4753467082,0.3105460797], [0.4763473095,0.3098208135], [0.4773484369,0.3090959279], [0.4783495419,0.3083718182], [0.4793511712,0.3076480871], [0.4803522870,0.3069254824], [0.4813539248,0.3062032539], [0.4823555688,0.3054817722], [0.4833565520,0.3047615143], [0.4843580541,0.3040416291], [0.4853584322,0.3033232943], [0.4863589651,0.3026055886], [0.4873600131,0.3018882517], [0.4883612140,0.3011715418], [0.4893620785,0.3004558066], [0.4903634552,0.2997404372], [0.4913642855,0.2990261877], [0.4923653881,0.2983124712], [0.4933670001,0.2975991172], [0.4943688830,0.2968862944], [0.4953700904,0.2961746734], [0.4963718042,0.2954634117], [0.4973732519,0.2947530561], [0.4983752042,0.2940430578], [0.4993770732,0.2933338319], [0.5003793995,0.2926249937], [0.5013798719,0.2919181743], [0.5023808440,0.2912117072], [0.5033817843,0.2905059660], [0.5043832226,0.2898005755], [0.5053841880,0.2890962175], [0.5063856496,0.2883922081], [0.5073868334,0.2876890894], [0.5083877579,0.2869868459], [0.5093891756,0.2862849479], [0.5103907542,0.2855836269], [0.5113920834,0.2848831680], [0.5123939033,0.2841830518], [0.5133954642,0.2834838005], [0.5143963096,0.2827857296], [0.5153972877,0.2820882452], [0.5163983179,0.2813914015], [0.5173998337,0.2806948952], [0.5184013999,0.2799990278], [0.5194023355,0.2793042698], [0.5204032304,0.2786102091], [0.5214043454,0.2779166631], [0.5224054528,0.2772237879], [0.5234070402,0.2765312445], [0.5244083215,0.2758395748], [0.5254100811,0.2751482351], [0.5264109890,0.2744581411], [0.5274123732,0.2737683750], [0.5284133357,0.2730795534], [0.5294144273,0.2723912952], [0.5304153500,0.2717038034], [0.5314167455,0.2710166357], [0.5324178038,0.2703303461], [0.5334193331,0.2696443790], [0.5344202693,0.2689594612], [0.5354216747,0.2682748640], [0.5364229975,0.2675909630], [0.5374247881,0.2669073809], [0.5384256524,0.2662250670], [0.5394265638,0.2655433547], [0.5404279400,0.2648619585], [0.5414289726,0.2641814268], [0.5424304684,0.2635012095], [0.5434313875,0.2628220109], [0.5444327680,0.2621431250], [0.5454332114,0.2614654975], [0.5464341140,0.2607881805], [0.5474353065,0.2601112878], [0.5484366190,0.2594349326], [0.5494372200,0.2587596746], [0.5504379207,0.2580849640], [0.5514390766,0.2574105599], [0.5524403307,0.2567367017], [0.5534419480,0.2560632097], [0.5544431444,0.2553906095], [0.5554444467,0.2547185453], [0.5564456248,0.2540471699], [0.5574472537,0.2533760962], [0.5584478796,0.2527062963], [0.5594489541,0.2520367963], [0.5604499619,0.2513679395], [0.5614512555,0.2506994892], [0.5624529959,0.2500313367], [0.5634541722,0.2493641547], [0.5644550290,0.2486977779], [0.5654563297,0.2480316963], [0.5664573800,0.2473663707], [0.5674588728,0.2467013389], [0.5684603141,0.2460369279], [0.5694611285,0.2453735172], [0.5704623831,0.2447103979], [0.5714634373,0.2440479927], [0.5724649304,0.2433858775], [0.5734659813,0.2427246331], [0.5744674695,0.2420636772], [0.5754682081,0.2414037911], [0.5764693822,0.2407441919], [0.5774704071,0.2400852633], [0.5784718661,0.2394266202], [0.5794729710,0.2387687795], [0.5804745086,0.2381112229], [0.5814755874,0.2374545342], [0.5824770975,0.2367981280], [0.5834781770,0.2361425676], [0.5844796864,0.2354872883], [0.5854812475,0.2348325363], [0.5864819621,0.2341788968], [0.5874826373,0.2335258404], [0.5884833481,0.2328733168], [0.5894844847,0.2322210704], [0.5904856555,0.2315693554], [0.5914866485,0.2309183083], [0.5924878516,0.2302676754], [0.5934891833,0.2296175084], [0.5944909378,0.2289676156], [0.5954917979,0.2283188496], [0.5964925865,0.2276706748], [0.5974937953,0.2270227718], [0.5984953253,0.2263752038], [0.5994965587,0.2257283689], [0.6004982106,0.2250818039], [0.6015002183,0.2244355486], [0.6025013431,0.2237904000], [0.6035022941,0.2231458991], [0.6045033654,0.2225018550], [0.6055046739,0.2218581916], [0.6065063968,0.2212147942], [0.6075067706,0.2205727932], [0.6085075567,0.2199310565], [0.6095084202,0.2192897976], [0.6105096950,0.2186488018], [0.6115107578,0.2180084669], [0.6125122307,0.2173683940], [0.6135136639,0.2167288695], [0.6145149966,0.2160899308], [0.6155167376,0.2154512523], [0.6165178715,0.2148134798], [0.6175194122,0.2141759661], [0.6185202474,0.2135394178], [0.6195210710,0.2129033916], [0.6205222991,0.2122676222], [0.6215235142,0.2116323737], [0.6225247801,0.2109976045], [0.6235264489,0.2103630905], [0.6245270350,0.2097297709], [0.6255276077,0.2090969669], [0.6265285809,0.2084644159], [0.6275298171,0.2078322040], [0.6285311785,0.2072004172], [0.6295320318,0.2065694537], [0.6305332832,0.2059387409], [0.6315344158,0.2053086034], [0.6325354850,0.2046790051], [0.6335369506,0.2040496557], [0.6345379412,0.2034211017], [0.6355390288,0.2027929825], [0.6365405109,0.2021651105], [0.6375414081,0.2015380985], [0.6385422167,0.2009116340], [0.6395434177,0.2002854149], [0.6405441118,0.1996600024], [0.6415451971,0.1990348340], [0.6425464899,0.1984100238], [0.6435472938,0.1977860050], [0.6445484870,0.1971622287], [0.6455498066,0.1965388581], [0.6465509890,0.1959160560], [0.6475520639,0.1952938027], [0.6485535260,0.1946717899], [0.6495552002,0.1940501255], [0.6505561784,0.1934293717], [0.6515571679,0.1928090883], [0.6525585422,0.1921890430], [0.6535599267,0.1915694670], [0.6545608471,0.1909506523], [0.6555621504,0.1903320740], [0.6565630740,0.1897142023], [0.6575643794,0.1890965660], [0.6585650162,0.1884798116], [0.6595656632,0.1878635194], [0.6605666900,0.1872474608], [0.6615674655,0.1866320233], [0.6625684896,0.1860168984], [0.6635698921,0.1854020058], [0.6645709906,0.1847877634], [0.6655724665,0.1841737523], [0.6665742784,0.1835599970], [0.6675753583,0.1829471508], [0.6685768138,0.1823345343], [0.6695776224,0.1817227717], [0.6705788052,0.1811112376], [0.6715797575,0.1805003005], [0.6725807373,0.1798898018], [0.6735820898,0.1792795301], [0.6745834687,0.1786696957], [0.6755850054,0.1780602179], [0.6765862213,0.1774513867], [0.6775878078,0.1768427806], [0.6785892295,0.1762347243], [0.6795910208,0.1756268922], [0.6805918094,0.1750201158], [0.6815929661,0.1744135623], [0.6825942324,0.1738073878], [0.6835950796,0.1732019112], [0.6845962934,0.1725966561], [0.6855974192,0.1719918967], [0.6865989106,0.1713873579], [0.6875995449,0.1707837768], [0.6886002694,0.1701805802], [0.6896013578,0.1695776028], [0.6906025356,0.1689750090], [0.6916034324,0.1683730209], [0.6926046915,0.1677712505], [0.6936054767,0.1671701995], [0.6946062606,0.1665695826], [0.6956072227,0.1659692915], [0.6966085451,0.1653692163], [0.6976100449,0.1647694660], [0.6986105718,0.1641707281], [0.6996114572,0.1635722044], [0.7006121260,0.1629742382], [0.7016131523,0.1623764854], [0.7026143784,0.1617790396], [0.7036156464,0.1611819945], [0.7046166041,0.1605855588], [0.7056179175,0.1599893349], [0.7066188136,0.1593937821], [0.7076197944,0.1587986006], [0.7086211294,0.1582036295], [0.7096225483,0.1576090288], [0.7106241994,0.1570147099], [0.7116252814,0.1564211469], [0.7126263761,0.1558279938], [0.7136278225,0.1552350490], [0.7146287960,0.1546427998], [0.7156301202,0.1540507579], [0.7166316652,0.1534589997], [0.7176326872,0.1528679636], [0.7186340584,0.1522771335], [0.7196350244,0.1516869537], [0.7206363388,0.1510969792], [0.7216369658,0.1505078189], [0.7226379399,0.1499188628], [0.7236388529,0.1493303503], [0.7246401122,0.1487420413], [0.7256407605,0.1481544973], [0.7266416984,0.1475671883], [0.7276429812,0.1469800816], [0.7286443305,0.1463933397], [0.7296457222,0.1458069760], [0.7306471410,0.1452209987], [0.7316479446,0.1446357825], [0.7326490906,0.1440507663], [0.7336502408,0.1434661473], [0.7346517328,0.1428817277], [0.7356528242,0.1422979397], [0.7366542565,0.1417143503], [0.7376554901,0.1411312731], [0.7386560386,0.1405489900], [0.7396569262,0.1399669040], [0.7406577527,0.1393852472], [0.7416589175,0.1388037869], [0.7426598805,0.1382228359], [0.7436611811,0.1376420806], [0.7446621188,0.1370619264], [0.7456633931,0.1364819671], [0.7466643144,0.1359026013], [0.7476655714,0.1353234296], [0.7486667367,0.1347446986], [0.7496678093,0.1341664078], [0.7506692164,0.1335883101], [0.7516700907,0.1330109051], [0.7526712983,0.1324336923], [0.7536726537,0.1318567781], [0.7546736671,0.1312804439], [0.7556750126,0.1307043008], [0.7566759716,0.1301287615], [0.7576772617,0.1295534125], [0.7586783077,0.1289785837], [0.7596796842,0.1284039444], [0.7606807290,0.1278298738], [0.7616821035,0.1272559920], [0.7626835892,0.1266824236], [0.7636844218,0.1261096054], [0.7646855826,0.1255369747], [0.7656866915,0.1249647484], [0.7666881279,0.1243927091], [0.7676890053,0.1238213622], [0.7686898894,0.1232503839], [0.7696910995,0.1226795913], [0.7706921334,0.1221092702], [0.7716934926,0.1215391343], [0.7726940395,0.1209698301], [0.7736949106,0.1204007102], [0.7746957206,0.1198319930], [0.7756964987,0.1192636611], [0.7766975999,0.1186955124], [0.7776986208,0.1181277753], [0.7786999640,0.1175602208], [0.7797013908,0.1169929837], [0.7807028390,0.1164260988], [0.7817032335,0.1158601731], [0.7827038182,0.1152945020], [0.7837047229,0.1147290116], [0.7847058172,0.1141637752], [0.7857067437,0.1135989939], [0.7867076609,0.1130345774], [0.7877088967,0.1124703404], [0.7887101225,0.1119064675], [0.7897110968,0.1113430939], [0.7907121409,0.1107800380], [0.7917135020,0.1102171602], [0.7927145190,0.1096548315], [0.7937158522,0.1090926803], [0.7947168235,0.1085310866], [0.7957181102,0.1079696697], [0.7967194474,0.1074085776], [0.7977202934,0.1068481129], [0.7987214536,0.1062878240], [0.7997222983,0.1057280624], [0.8007234564,0.1051684760], [0.8017248350,0.1046091161], [0.8027255034,0.1040505018], [0.8037264838,0.1034920615], [0.8047276559,0.1029338620], [0.8057284549,0.1023762175], [0.8067295650,0.1018187460], [0.8077305471,0.1012616915], [0.8087314389,0.1007050323], [0.8097326406,0.1001485453], [0.8107340433,0.0995922905], [0.8117355382,0.0990363280], [0.8127364320,0.0984810418], [0.8137376341,0.0979259265], [0.8147389434,0.0973710932], [0.8157398773,0.0968168085], [0.8167411185,0.0962626938], [0.8177419646,0.0957091372], [0.8187431170,0.0951557499], [0.8197441544,0.0946027643], [0.8207451668,0.0940501301], [0.8217464845,0.0934976644], [0.8227473602,0.0929457788], [0.8237482445,0.0923942241], [0.8247494330,0.0918428368], [0.8257506293,0.0912917799], [0.8267520451,0.0907409362], [0.8277530787,0.0901906361], [0.8287544152,0.0896405022], [0.8297557396,0.0890907073], [0.8307573662,0.0885410781], [0.8317586416,0.0879919729], [0.8327598709,0.0874432232], [0.8337614014,0.0868946384], [0.8347625903,0.0863465699], [0.8357636924,0.0857988775], [0.8367650944,0.0852513490], [0.8377664088,0.0847041960], [0.8387670887,0.0841577163], [0.8397680672,0.0836113994], [0.8407691192,0.0830653681], [0.8417702211,0.0825196345], [0.8427716209,0.0819740630], [0.8437722831,0.0814292171], [0.8447732422,0.0808845327], [0.8457744425,0.0803400397], [0.8467755021,0.0797959454], [0.8477768577,0.0792520119], [0.8487777194,0.0787086675], [0.8497788763,0.0781654832], [0.8507800883,0.0776225890], [0.8517815949,0.0770798545], [0.8527822920,0.0765378773], [0.8537832828,0.0759960591], [0.8547841926,0.0754546022], [0.8557852306,0.0749133929], [0.8567865615,0.0743723419], [0.8577873019,0.0738319257], [0.8587883345,0.0732916670], [0.8597894874,0.0727516582], [0.8607903014,0.0722121463], [0.8617914065,0.0716727913], [0.8627922701,0.0711338793], [0.8637934242,0.0705951236], [0.8647943790,0.0700567872], [0.8657955010,0.0695186724], [0.8667969127,0.0689807130], [0.8677978471,0.0684433205], [0.8687990703,0.0679060829], [0.8697998649,0.0673693845], [0.8708007436,0.0668329498], [0.8718016519,0.0662968074], [0.8728028478,0.0657608188], [0.8738040727,0.0652251219], [0.8748049300,0.0646899284], [0.8758060737,0.0641548880], [0.8768071610,0.0636201834], [0.8778082382,0.0630857893], [0.8788096010,0.0625515475], [0.8798105006,0.0620178570], [0.8808113618,0.0614844904], [0.8818125074,0.0609512753], [0.8828136140,0.0604183835], [0.8838146098,0.0598858528], [0.8848155274,0.0593536651], [0.8858167283,0.0588216278], [0.8868175377,0.0582900990], [0.8878186297,0.0577587200], [0.8888199055,0.0572275431], [0.8898209001,0.0566968143], [0.8908221764,0.0561662348], [0.8918229674,0.0556362103], [0.8928240393,0.0551063344], [0.8938248923,0.0545768713], [0.8948256992,0.0540477289], [0.8958267862,0.0535187344], [0.8968278265,0.0529900600], [0.8978288316,0.0524616992], [0.8988301158,0.0519334856], [0.8998311310,0.0514057079], [0.9008324247,0.0508780770], [0.9018335024,0.0503508530], [0.9028343789,0.0498240274], [0.9038355329,0.0492973478], [0.9048368316,0.0487708837], [0.9058381419,0.0482447048], [0.9068391628,0.0477189686], [0.9078400350,0.0471936004], [0.9088411833,0.0466683771], [0.9098423648,0.0461434255], [0.9108431288,0.0456189813], [0.9118440801,0.0450947272], [0.9128453063,0.0445706168], [0.9138461746,0.0440469810], [0.9148473171,0.0435234885], [0.9158485399,0.0430002404], [0.9168497027,0.0424773096], [0.9178508027,0.0419546970], [0.9188520076,0.0414323145], [0.9198534854,0.0409100744], [0.9208542194,0.0403885060], [0.9218552254,0.0398670793], [0.9228562900,0.0393459051], [0.9238575408,0.0388249167], [0.9248584690,0.0383043782], [0.9258596681,0.0377839806], [0.9268606115,0.0372639970], [0.9278618253,0.0367441538], [0.9288627097,0.0362247618], [0.9298638640,0.0357055097], [0.9308651963,0.0351864448], [0.9318666150,0.0346676142], [0.9328674020,0.0341493894], [0.9338684575,0.0336313035], [0.9348693830,0.0331135625], [0.9358705765,0.0325959600], [0.9368714974,0.0320787752], [0.9378726857,0.0315617285], [0.9388740816,0.0310448506], [0.9398748957,0.0305285483], [0.9408758181,0.0300124650], [0.9418770069,0.0294965189], [0.9428783036,0.0289807913], [0.9438793675,0.0284654574], [0.9448803501,0.0279504385], [0.9458814245,0.0274356451], [0.9468827639,0.0269209878], [0.9478839160,0.0264066989], [0.9488853326,0.0258925457], [0.9498863156,0.0253788861], [0.9508875625,0.0248653618], [0.9518884093,0.0243523129], [0.9528895194,0.0238393988], [0.9538907769,0.0233266785], [0.9548920663,0.0228142110], [0.9558929996,0.0223021941], [0.9568941951,0.0217903113], [0.9578951777,0.0212788049], [0.9588964219,0.0207674322], [0.9598976316,0.0202563439], [0.9608986212,0.0197456345], [0.9618998716,0.0192350579], [0.9629009653,0.0187248269], [0.9639023193,0.0182147285], [0.9649038050,0.0177048280], [0.9659050567,0.0171953110], [0.9669065681,0.0166859260], [0.9679076734,0.0161770112], [0.9689090376,0.0156682279], [0.9699104799,0.0151596678], [0.9709121808,0.0146512390], [0.9719141140,0.0141429546], [0.9729154699,0.0136352247], [0.9739169262,0.0131277052], [0.9749186401,0.0126203161], [0.9759204079,0.0121131604], [0.9769224329,0.0116061347], [0.9779241666,0.0110995163], [0.9789260691,0.0105930720], [0.9799281993,0.0100867718], [0.9809305859,0.0095806010], [0.9819330847,0.0090746321], [0.9829355173,0.0085689549], [0.9839382056,0.0080634065], [0.9849410844,0.0075580197], [0.9859437889,0.0070529778], [0.9869466316,0.0065481231], [0.9879497292,0.0060433967], [0.9889528489,0.0055389153], [0.9899562232,0.0050345618], [0.9909597362,0.0045303942], [0.9919633539,0.0040264291], [0.9929672257,0.0035225915], [0.9939712250,0.0030189445], [0.9949754780,0.0025154247], [0.9959799285,0.0020120600], [0.9969846132,0.0015088317], [0.9979894950,0.0010057583], [0.9989946210,0.0005028159], [1.0000000000,0.0000000000]]././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/pareto_front/zdt6_front.json0000644000076500000240000007051014456461441021577 0ustar00runnerstaff[[0.2807753191,0.9211652202], [0.2814990682,0.9207582746], [0.2822222294,0.9203506133], [0.2829444640,0.9199424303], [0.2836657932,0.9195337178], [0.2843873868,0.9191238143], [0.2851090310,0.9187128404], [0.2858297620,0.9183013472], [0.2865498629,0.9178891761], [0.2872696154,0.9174761681], [0.2879897830,0.9170618849], [0.2887090502,0.9166470843], [0.2894296667,0.9162304680], [0.2901500283,0.9158129611], [0.2908694956,0.9153949365], [0.2915887137,0.9149760220], [0.2923081531,0.9145559436], [0.2930283871,0.9141343644], [0.2937477359,0.9137122677], [0.2944686574,0.9132882098], [0.2951886965,0.9128636335], [0.2959092187,0.9124377343], [0.2966288636,0.9120113173], [0.2973486046,0.9115838073], [0.2980687980,0.9111549917], [0.2987889944,0.9107251368], [0.2995087588,0.9102945034], [0.3002276584,0.9098633531], [0.3009485794,0.9094299526], [0.3016692216,0.9089956808], [0.3023890030,0.9085608909], [0.3031093708,0.9081247093], [0.3038288826,0.9076880101], [0.3045484000,0.9072502720], [0.3052688710,0.9068109164], [0.3059884919,0.9063710428], [0.3067086687,0.9059297926], [0.3074280000,0.9054880248], [0.3081481037,0.9050447462], [0.3088673660,0.9046009502], [0.3095875091,0.9041555742], [0.3103068146,0.9037096808], [0.3110269959,0.9032622078], [0.3117467791,0.9028139457], [0.3124657314,0.9023651667], [0.3131855124,0.9019148348], [0.3139057114,0.9014632044], [0.3146260227,0.9010104659], [0.3153455094,0.9005572097], [0.3160651118,0.9001028451], [0.3167855450,0.8996469184], [0.3175051591,0.8991904740], [0.3182256463,0.8987324380], [0.3189453180,0.8982738841], [0.3196653185,0.8978140842], [0.3203854336,0.8973531739], [0.3211047395,0.8968917463], [0.3218255490,0.8964283160], [0.3225455513,0.8959643673], [0.3232654163,0.8954994706], [0.3239847460,0.8950338844], [0.3247054027,0.8945664015], [0.3254252596,0.8940984004], [0.3261461526,0.8936286872], [0.3268662488,0.8931584554], [0.3275857368,0.8926875850], [0.3283053726,0.8922155824], [0.3290261297,0.8917418060], [0.3297460975,0.8912675112], [0.3304663686,0.8907919792], [0.3311867047,0.8903153667], [0.3319062574,0.8898382363], [0.3326254653,0.8893602998], [0.3333458929,0.8888805157], [0.3340655420,0.8884002136], [0.3347857558,0.8879184977], [0.3355051949,0.8874362642], [0.3362254101,0.8869524736], [0.3369448539,0.8864681655], [0.3376647811,0.8859824956], [0.3383839408,0.8854963086], [0.3391042500,0.8850083076], [0.3398242021,0.8845195116], [0.3405445374,0.8840294180], [0.3412641108,0.8835388067], [0.3419831543,0.8830475222], [0.3427033174,0.8825544363], [0.3434237715,0.8820601132], [0.3441437123,0.8815651053], [0.3448629001,0.8810695801], [0.3455825476,0.8805727028], [0.3463021370,0.8800748299], [0.3470224033,0.8795754516], [0.3477430093,0.8790747995], [0.3484628686,0.8785736292], [0.3491830692,0.8780711842], [0.3499030062,0.8775678863], [0.3506225462,0.8770638301], [0.3513413480,0.8765592572], [0.3520618447,0.8760524575], [0.3527822893,0.8755446564], [0.3535019983,0.8750363372], [0.3542229553,0.8745260979], [0.3549431787,0.8740153399], [0.3556629548,0.8735038626], [0.3563825690,0.8729914645], [0.3571025403,0.8724777757], [0.3578231915,0.8719625637], [0.3585431169,0.8714468333], [0.3592634354,0.8709297840], [0.3599830318,0.8704122168], [0.3607027073,0.8698935570], [0.3614216648,0.8693743802], [0.3621420657,0.8688531243], [0.3628623050,0.8683309476], [0.3635818296,0.8678082532], [0.3643011961,0.8672846385], [0.3650204511,0.8667600703], [0.3657405850,0.8662338245], [0.3664600099,0.8657070611], [0.3671795890,0.8651791494], [0.3679004541,0.8646492559], [0.3686206718,0.8641188003], [0.3693408003,0.8635873732], [0.3700602269,0.8630554285], [0.3707799355,0.8625222394], [0.3714989456,0.8619885334], [0.3722182638,0.8614535641], [0.3729368870,0.8609180783], [0.3736579327,0.8603797493], [0.3743782827,0.8598409014], [0.3750981475,0.8593013797], [0.3758179965,0.8587608335], [0.3765371560,0.8582197702], [0.3772581153,0.8576763144], [0.3779783857,0.8571323399], [0.3786980305,0.8565878017], [0.3794175516,0.8560423215], [0.3801382362,0.8554949214], [0.3808582376,0.8549470028], [0.3815790568,0.8543974234], [0.3822997011,0.8538469385], [0.3830196662,0.8532959353], [0.3837394596,0.8527440272], [0.3844598890,0.8521905938], [0.3851796433,0.8516366424], [0.3858993552,0.8510816877], [0.3866183959,0.8505262160], [0.3873376386,0.8499695537], [0.3880574538,0.8494114126], [0.3887771485,0.8488523288], [0.3894961770,0.8482927281], [0.3902162679,0.8477312643], [0.3909361552,0.8471689225], [0.3916553797,0.8466060635], [0.3923760036,0.8460410718], [0.3930959657,0.8454755618], [0.3938168295,0.8449083048], [0.3945370336,0.8443405291], [0.3952574482,0.8437715497], [0.3959773500,0.8432019383], [0.3966973406,0.8426312199], [0.3974166779,0.8420599841], [0.3981367187,0.8414871532], [0.3988565577,0.8409134464], [0.3995757472,0.8403392223], [0.4002952696,0.8397636971], [0.4010157076,0.8391864023], [0.4017360769,0.8386081245], [0.4024558003,0.8380293288], [0.4031754576,0.8374495504], [0.4038957197,0.8368682476], [0.4046156350,0.8362861879], [0.4053351080,0.8357034502], [0.4060539424,0.8351201959], [0.4067746522,0.8345343823], [0.4074947236,0.8339480502], [0.4082149745,0.8333605346], [0.4089345900,0.8327725011], [0.4096543402,0.8321833216], [0.4103750895,0.8315922859], [0.4110955980,0.8310004093], [0.4118154751,0.8304080145], [0.4125357176,0.8298142817], [0.4132556326,0.8292197822], [0.4139759287,0.8286239305], [0.4146955982,0.8280275609], [0.4154156792,0.8274298134], [0.4161359398,0.8268308796], [0.4168555769,0.8262314280], [0.4175758225,0.8256304325], [0.4182959856,0.8250284684], [0.4190155288,0.8244259866], [0.4197356963,0.8238219453], [0.4204556855,0.8232170165], [0.4211750583,0.8226115703], [0.4218951128,0.8220045138], [0.4226149810,0.8213965778], [0.4233350181,0.8207874625], [0.4240544428,0.8201778295], [0.4247740379,0.8195670167], [0.4254945300,0.8189544050], [0.4262144120,0.8183412750], [0.4269344998,0.8177269329], [0.4276543475,0.8171117591], [0.4283735893,0.8164960680], [0.4290925939,0.8158795459], [0.4298128945,0.8152608757], [0.4305332794,0.8146410954], [0.4312530616,0.8140207969], [0.4319729298,0.8133993879], [0.4326929082,0.8127768472], [0.4334127928,0.8121533510], [0.4341323320,0.8115291183], [0.4348523102,0.8109034683], [0.4355716924,0.8102773008], [0.4362915142,0.8096497146], [0.4370114625,0.8090209816], [0.4377314683,0.8083911616], [0.4384514632,0.8077603144], [0.4391708669,0.8071289497], [0.4398914795,0.8064954862], [0.4406115019,0.8058615044], [0.4413321519,0.8052259317], [0.4420523419,0.8045897270], [0.4427723130,0.8039526788], [0.4434916987,0.8033151132], [0.4442122201,0.8026755036], [0.4449321570,0.8020353756], [0.4456517440,0.8013945231], [0.4463719076,0.8007521201], [0.4470920801,0.8001086719], [0.4478119674,0.7994644419], [0.4485322737,0.7988187994], [0.4492525001,0.7981721911], [0.4499721492,0.7975250649], [0.4506924144,0.7968763476], [0.4514128145,0.7962264709], [0.4521326398,0.7955760760], [0.4528531164,0.7949240550], [0.4535730198,0.7942715157], [0.4542928659,0.7936179920], [0.4550123930,0.7929637222], [0.4557317919,0.7923085339], [0.4564515473,0.7916519850], [0.4571716388,0.7909940927], [0.4578911632,0.7903356827], [0.4586102372,0.7896766503], [0.4593303281,0.7890156497], [0.4600498545,0.7883541313], [0.4607701063,0.7876909092], [0.4614897951,0.7870271691], [0.4622097775,0.7863621216], [0.4629295537,0.7856962283], [0.4636496779,0.7850289762], [0.4643692428,0.7843612063], [0.4650895898,0.7836916734], [0.4658093790,0.7830216224], [0.4665294545,0.7823502681], [0.4672489743,0.7816783961], [0.4679691586,0.7810048666], [0.4686894119,0.7803302351], [0.4694091118,0.7796550857], [0.4701288822,0.7789788341], [0.4708481705,0.7783020004], [0.4715686236,0.7776230333], [0.4722890871,0.7769430182], [0.4730094404,0.7762620693], [0.4737292452,0.7755806023], [0.4744495231,0.7748976500], [0.4751700634,0.7742134108], [0.4758900574,0.7735286532], [0.4766099258,0.7728429787], [0.4773299031,0.7721561636], [0.4780493374,0.7714688310], [0.4787688821,0.7707803576], [0.4794891935,0.7700901134], [0.4802089638,0.7693993511], [0.4809292340,0.7687070719], [0.4816489649,0.7680142746], [0.4823688992,0.7673202451], [0.4830890552,0.7666249647], [0.4838086745,0.7659291665], [0.4845292173,0.7652314376], [0.4852495068,0.7645329161], [0.4859692618,0.7638338766], [0.4866887396,0.7631340707], [0.4874087623,0.7624326984], [0.4881282532,0.7617308085], [0.4888477781,0.7610278498], [0.4895672547,0.7603239032], [0.4902871640,0.7596184968], [0.4910074998,0.7589116351], [0.4917273072,0.7582042553], [0.4924475871,0.7574953740], [0.4931673402,0.7567859746], [0.4938872506,0.7560753837], [0.4946073802,0.7553635394], [0.4953278463,0.7546503247], [0.4960477883,0.7539365917], [0.4967681530,0.7532214022], [0.4974879954,0.7525056944], [0.4982080396,0.7517887492], [0.4989275636,0.7510712863], [0.4996469268,0.7503529485], [0.5003672351,0.7496326300], [0.5010872988,0.7489115190], [0.5018072564,0.7481894774], [0.5025266975,0.7474669183], [0.5032460344,0.7467434288], [0.5039665115,0.7460177553], [0.5046869068,0.7452911261], [0.5054067880,0.7445639786], [0.5061264450,0.7438360217], [0.5068461670,0.7431069630], [0.5075665046,0.7423762434], [0.5082863309,0.7416450058], [0.5090058345,0.7409130605], [0.5097259023,0.7401795045], [0.5104454615,0.7394454308], [0.5111652901,0.7387100462], [0.5118846119,0.7379741441], [0.5126049237,0.7372361921], [0.5133252710,0.7364971661], [0.5140453835,0.7357573437], [0.5147656997,0.7350162744], [0.5154855121,0.7342746869], [0.5162058432,0.7335315274], [0.5169256718,0.7327878498], [0.5176453130,0.7320433299], [0.5183651464,0.7312975750], [0.5190844799,0.7305513027], [0.5198049085,0.7298028571], [0.5205249908,0.7290537339], [0.5212452963,0.7283033411], [0.5219651041,0.7275524301], [0.5226851358,0.7268002488], [0.5234050484,0.7260471553], [0.5241244659,0.7252935443], [0.5248444325,0.7245383217], [0.5255645614,0.7237818917], [0.5262841970,0.7230249439], [0.5270034717,0.7222673409], [0.5277238579,0.7215075298], [0.5284437525,0.7207472005], [0.5291634061,0.7199860896], [0.5298839437,0.7192230062], [0.5306039914,0.7184594043], [0.5313237776,0.7176950433], [0.5320435301,0.7169296821], [0.5327632502,0.7161633192], [0.5334834468,0.7153954120], [0.5342036586,0.7146264511], [0.5349233850,0.7138569722], [0.5356433433,0.7130862088], [0.5363628178,0.7123149277], [0.5370834134,0.7115414070], [0.5378035746,0.7107673151], [0.5385232536,0.7099927053], [0.5392435519,0.7092163917], [0.5399633692,0.7084395599], [0.5406841037,0.7076607000], [0.5414044614,0.7068812091], [0.5421243399,0.7061012001], [0.5428439770,0.7053204166], [0.5435631904,0.7045390581], [0.5442835200,0.7037554498], [0.5450033730,0.7029713234], [0.5457234746,0.7021858893], [0.5464435527,0.7013994437], [0.5471631564,0.7006124803], [0.5478830807,0.6998241299], [0.5486025320,0.6990352619], [0.5493225900,0.6982446921], [0.5500421761,0.6974536045], [0.5507624228,0.6966607536], [0.5514821986,0.6958673847], [0.5522019002,0.6950730614], [0.5529222531,0.6942769820], [0.5536421368,0.6934803843], [0.5543624929,0.6926822264], [0.5550823810,0.6918835503], [0.5558026852,0.6910833751], [0.5565225227,0.6902826817], [0.5572423527,0.6894809603], [0.5579623525,0.6886780132], [0.5586818878,0.6878745483], [0.5594016781,0.6870697626], [0.5601210053,0.6862644594], [0.5608410932,0.6854572681], [0.5615612694,0.6846489408], [0.5622809837,0.6838400954], [0.5630012083,0.6830296395], [0.5637214336,0.6822181453], [0.5644411988,0.6814061331], [0.5651609659,0.6805930827], [0.5658815721,0.6797780463], [0.5666017196,0.6789624914], [0.5673214448,0.6781463783], [0.5680414563,0.6773289040], [0.5687610114,0.6765109119], [0.5694809629,0.6756914329], [0.5702009041,0.6748709290], [0.5709203907,0.6740499074], [0.5716398683,0.6732278610], [0.5723600084,0.6724040208], [0.5730802102,0.6715790726], [0.5737999595,0.6707536064], [0.5745202005,0.6699265392], [0.5752404531,0.6690984211], [0.5759602548,0.6682697849], [0.5766805417,0.6674395528], [0.5774003789,0.6666088025], [0.5781207176,0.6657764359], [0.5788406075,0.6649435511], [0.5795606178,0.6641094903], [0.5802801809,0.6632749117], [0.5809999913,0.6624390101], [0.5817195652,0.6616023475], [0.5824386939,0.6607651678], [0.5831592491,0.6599252902], [0.5838793591,0.6590848940], [0.5845996542,0.6582432444], [0.5853195055,0.6574010765], [0.5860394054,0.6565578153], [0.5867588632,0.6557140364], [0.5874792607,0.6548681182], [0.5881992165,0.6540216817], [0.5889192043,0.6531741708], [0.5896388234,0.6523260580], [0.5903589673,0.6514762897], [0.5910786721,0.6506260034], [0.5917980249,0.6497750977], [0.5925178026,0.6489226536], [0.5932383267,0.6480682877], [0.5939584133,0.6472134033], [0.5946785809,0.6463573854], [0.5953983126,0.6455008494], [0.5961184062,0.6446428458], [0.5968386041,0.6437836807], [0.5975583674,0.6429239975], [0.5982786461,0.6420626616], [0.5989984913,0.6412008075], [0.5997185778,0.6403376274], [0.6004386868,0.6394733834], [0.6011583640,0.6386086214], [0.6018783330,0.6377424723], [0.6025978714,0.6368758054], [0.6033178491,0.6360075730], [0.6040373972,0.6351388228], [0.6047571169,0.6342688296], [0.6054765079,0.6333981983], [0.6061972873,0.6325248489], [0.6069176382,0.6316509805], [0.6076376926,0.6307764345], [0.6083577064,0.6299009010], [0.6090776041,0.6290244722], [0.6097977013,0.6281467635], [0.6105173733,0.6272685369], [0.6112372454,0.6263890298], [0.6119573665,0.6255081816], [0.6126774577,0.6246263328], [0.6133971258,0.6237439661], [0.6141169374,0.6228603872], [0.6148363272,0.6219762908], [0.6155566476,0.6210900136], [0.6162765465,0.6202032182], [0.6169966257,0.6193151639], [0.6177165894,0.6184262152], [0.6184361335,0.6175367488], [0.6191562255,0.6166455684], [0.6198758989,0.6157538700], [0.6205958158,0.6148608334], [0.6213162428,0.6139661264], [0.6220366679,0.6130703838], [0.6227569013,0.6121738418], [0.6234767183,0.6112767817], [0.6241969342,0.6103781873], [0.6249167347,0.6094790748], [0.6256361485,0.6085794096], [0.6263561775,0.6076779389], [0.6270758762,0.6067758455], [0.6277953706,0.6058729727], [0.6285151297,0.6049687318], [0.6292344767,0.6040639734], [0.6299540887,0.6031578461], [0.6306743052,0.6022499208], [0.6313941104,0.6013414773], [0.6321144403,0.6004313344], [0.6328343597,0.5995206732], [0.6335545093,0.5986086837], [0.6342745561,0.5976957875], [0.6349950220,0.5967813220], [0.6357150792,0.5958663381], [0.6364353898,0.5949499947], [0.6371555351,0.5940328241], [0.6378752735,0.5931151355], [0.6385950657,0.5921963421], [0.6393144522,0.5912770312], [0.6400336516,0.5903569248], [0.6407539188,0.5894344156], [0.6414738017,0.5885113617], [0.6421940438,0.5875868101], [0.6429138820,0.5866617404], [0.6436340793,0.5857351719], [0.6443539131,0.5848080347], [0.6450739684,0.5838795752], [0.6457940691,0.5829500203], [0.6465137683,0.5820199474], [0.6472337655,0.5810884528], [0.6479533622,0.5801564404], [0.6486738154,0.5792222813], [0.6493939433,0.5782875063], [0.6501136720,0.5773522134], [0.6508332792,0.5764160427], [0.6515537377,0.5754777269], [0.6522737979,0.5745388926], [0.6529938173,0.5735990745], [0.6537134398,0.5726587387], [0.6544335756,0.5717166951], [0.6551533153,0.5707741335], [0.6558734667,0.5698299956], [0.6565932228,0.5688853397], [0.6573130176,0.5679395969], [0.6580329040,0.5669926972], [0.6587529885,0.5660445002], [0.6594726793,0.5650957852], [0.6601926480,0.5641456675], [0.6609122242,0.5631950319], [0.6616318052,0.5622433544], [0.6623517813,0.5612901179], [0.6630717484,0.5603358565], [0.6637921402,0.5593799947], [0.6645121410,0.5584236144], [0.6652319562,0.5574664445], [0.6659518938,0.5565080752], [0.6666714422,0.5555491882], [0.6673913968,0.5545887235], [0.6681109630,0.5536277411], [0.6688313419,0.5526646361], [0.6695513328,0.5517010128], [0.6702709451,0.5507368602], [0.6709910465,0.5497710155], [0.6717107616,0.5488046528], [0.6724308805,0.5478367110], [0.6731513504,0.5468672595], [0.6738716051,0.5458970599], [0.6745914748,0.5449263421], [0.6753111307,0.5439548768], [0.6760311021,0.5429819489], [0.6767506902,0.5420085033], [0.6774711204,0.5410328810], [0.6781913842,0.5400564464], [0.6789112655,0.5390794936], [0.6796312006,0.5381014312], [0.6803507544,0.5371228510], [0.6810708573,0.5361424873], [0.6817905796,0.5351616056], [0.6825101891,0.5341798417], [0.6832304223,0.5331961901], [0.6839502757,0.5322120203], [0.6846705176,0.5312262824], [0.6853903806,0.5302400262], [0.6861106120,0.5292522281], [0.6868304654,0.5282639117], [0.6875505019,0.5272743074], [0.6882706788,0.5262834728], [0.6889904789,0.5252921200], [0.6897104199,0.5242995367], [0.6904301616,0.5233061919], [0.6911498808,0.5223118422], [0.6918700377,0.5213158509], [0.6925899260,0.5203191944], [0.6933094401,0.5193220203], [0.6940291111,0.5183235929], [0.6947492452,0.5173234862], [0.6954690061,0.5163228615], [0.6961892740,0.5153204947], [0.6969091694,0.5143176096], [0.6976294392,0.5133131655], [0.6983493609,0.5123081701], [0.6990695134,0.5113018154], [0.6997892950,0.5102949426], [0.7005093077,0.5092867098], [0.7012294810,0.5082772150], [0.7019494236,0.5072670067], [0.7026689971,0.5062562806], [0.7033889333,0.5052440085], [0.7041088861,0.5042306765], [0.7048284709,0.5032168266], [0.7055483281,0.5022015568], [0.7062680573,0.5011854312], [0.7069874199,0.5001687882], [0.7077076588,0.4991498697], [0.7084275312,0.4981304331], [0.7091478574,0.4971093164], [0.7098682030,0.4960871344], [0.7105881831,0.4950644341], [0.7113083272,0.4940404636], [0.7120281068,0.4930159751], [0.7127478884,0.4919904476], [0.7134678376,0.4909636447], [0.7141874236,0.4899363239], [0.7149074533,0.4889073332], [0.7156276836,0.4878770185], [0.7163475514,0.4868461856], [0.7170676867,0.4858139326], [0.7177876267,0.4847809230], [0.7185072056,0.4837473955], [0.7192273635,0.4827119996], [0.7199473773,0.4816757739], [0.7206674670,0.4806384020], [0.7213871970,0.4796005120], [0.7221072699,0.4785610907], [0.7228269838,0.4775211515], [0.7235469640,0.4764797909], [0.7242665860,0.4754379124], [0.7249868992,0.4743939960], [0.7257068545,0.4733495613], [0.7264266556,0.4723043140], [0.7271468324,0.4712574841], [0.7278666526,0.4702101360], [0.7285864734,0.4691617507], [0.7293059386,0.4681128479], [0.7300261037,0.4670618879], [0.7307463976,0.4660097023], [0.7314663366,0.4649569985], [0.7321865760,0.4639028180], [0.7329064611,0.4628481192], [0.7336260877,0.4617927634], [0.7343462725,0.4607355520], [0.7350661042,0.4596778225], [0.7357860871,0.4586188341], [0.7365063252,0.4575584329], [0.7372263975,0.4564972388], [0.7379462953,0.4554352652], [0.7386658419,0.4543727740], [0.7393856459,0.4533088666], [0.7401050995,0.4522444417], [0.7408252424,0.4511779602], [0.7415452445,0.4501106504], [0.7422652220,0.4490423402], [0.7429848503,0.4479735122], [0.7437049028,0.4469030176], [0.7444246067,0.4458320049], [0.7451444102,0.4447598079], [0.7458643206,0.4436864152], [0.7465843437,0.4426118177], [0.7473041778,0.4415364658], [0.7480236652,0.4404605962], [0.7487438310,0.4393826755], [0.7494641180,0.4383035359], [0.7501840588,0.4372238780], [0.7509045014,0.4361424297], [0.7516245985,0.4350604629], [0.7523448935,0.4339771613], [0.7530648436,0.4328933413], [0.7537851005,0.4318080223], [0.7545050132,0.4307221850], [0.7552250702,0.4296350934], [0.7559452401,0.4285467939], [0.7566655525,0.4274572417], [0.7573855220,0.4263671711], [0.7581054373,0.4252761459], [0.7588250108,0.4241846029], [0.7595446221,0.4230919671], [0.7602645552,0.4219978060], [0.7609844179,0.4209027157], [0.7617045887,0.4198061195], [0.7624244191,0.4187090052], [0.7631440889,0.4176110996], [0.7638639016,0.4165119398], [0.7645836183,0.4154118906], [0.7653029962,0.4143113240], [0.7660233826,0.4132081772], [0.7667434303,0.4121045120], [0.7674632302,0.4110001902], [0.7681828732,0.4098950734], [0.7689032031,0.4087878643], [0.7696231956,0.4076801368], [0.7703434003,0.4065710457], [0.7710632683,0.4054614363], [0.7717833008,0.4043505365], [0.7725029975,0.4032391188], [0.7732231120,0.4021260191], [0.7739428912,0.4010124012], [0.7746627350,0.3998976470], [0.7753828112,0.3987814961], [0.7761029323,0.3976642385], [0.7768227193,0.3965464628], [0.7775425517,0.3954275802], [0.7782623628,0.3943076947], [0.7789822031,0.3931867273], [0.7797017108,0.3920652423], [0.7804218564,0.3909417260], [0.7811419884,0.3898171940], [0.7818617884,0.3886921438], [0.7825819105,0.3875655533], [0.7833020847,0.3864378441], [0.7840219278,0.3853096167], [0.7847421200,0.3841798050], [0.7854619818,0.3830494751], [0.7861818099,0.3819181617], [0.7869019836,0.3807852681], [0.7876221980,0.3796512732], [0.7883420831,0.3785167599], [0.7890617585,0.3773815412], [0.7897813435,0.3762454295], [0.7905014234,0.3751074995], [0.7912211754,0.3739690516], [0.7919411977,0.3728291395], [0.7926610027,0.3716885347], [0.7933807013,0.3705470629], [0.7941004800,0.3694044276], [0.7948208839,0.3682597625], [0.7955409613,0.3671145790], [0.7962610153,0.3659683955], [0.7969811363,0.3648210684], [0.7977009319,0.3636732232], [0.7984207949,0.3625242342], [0.7991410558,0.3613735729], [0.7998611314,0.3602221705], [0.8005813064,0.3590695719], [0.8013011574,0.3579164551], [0.8020211083,0.3567621419], [0.8027408959,0.3556070540], [0.8034609458,0.3544505085], [0.8041806731,0.3532934450], [0.8049006140,0.3521350017], [0.8056203791,0.3509758048], [0.8063403053,0.3498153120], [0.8070599102,0.3486543014], [0.8077796764,0.3474919943], [0.8084999692,0.3463277999], [0.8092199410,0.3451630870], [0.8099397717,0.3439975663], [0.8106599806,0.3428303958], [0.8113798696,0.3416627071], [0.8120998833,0.3404937796], [0.8128195778,0.3393243340], [0.8135391027,0.3381541285], [0.8142591218,0.3369820826], [0.8149790796,0.3358090998], [0.8156987194,0.3346355991], [0.8164183952,0.3334610039], [0.8171385539,0.3322845838], [0.8178583951,0.3311076455], [0.8185785261,0.3299291966], [0.8192986947,0.3287496488], [0.8200185468,0.3275695829], [0.8207381323,0.3263889181], [0.8214584471,0.3252060197], [0.8221784461,0.3240226028], [0.8228985981,0.3228378973], [0.8236186938,0.3216522473], [0.8243384746,0.3204660792], [0.8250581998,0.3192789669], [0.8257778827,0.3180908884], [0.8264981981,0.3169007285], [0.8272181996,0.3157100502], [0.8279382728,0.3145182164], [0.8286580329,0.3133258645], [0.8293777385,0.3121325668], [0.8300976355,0.3109379155], [0.8308172204,0.3097427464], [0.8315377029,0.3085450486], [0.8322578733,0.3073468323], [0.8329779310,0.3061477664], [0.8336979931,0.3049476563], [0.8344177441,0.3037470283], [0.8351375001,0.3025453560], [0.8358576647,0.3013419643], [0.8365775191,0.3001380545], [0.8372976172,0.2989327002], [0.8380175773,0.2977265401], [0.8387372281,0.2965198622], [0.8394573693,0.2953113251], [0.8401772016,0.2941022699], [0.8408974576,0.2928914658], [0.8416174052,0.2916801433], [0.8423371703,0.2904680916], [0.8430568787,0.2892550993], [0.8437770792,0.2880402406], [0.8444970735,0.2868246928], [0.8452167610,0.2856086270], [0.8459370934,0.2843904340], [0.8466571628,0.2831716486], [0.8473769261,0.2819523452], [0.8480968712,0.2807316970], [0.8488165108,0.2795105310], [0.8495366200,0.2782875313], [0.8502564239,0.2770640135], [0.8509765722,0.2758388736], [0.8516966670,0.2746127874], [0.8524164574,0.2733861832], [0.8531364033,0.2721582774], [0.8538560454,0.2709298537], [0.8545756644,0.2697004339], [0.8552959489,0.2684688398], [0.8560159302,0.2672367273], [0.8567357685,0.2660038229], [0.8574553685,0.2647702911], [0.8581754563,0.2635348862], [0.8588952420,0.2622989633], [0.8596154269,0.2610613179], [0.8603353101,0.2598231542], [0.8610553354,0.2585837094], [0.8617754763,0.2573430284], [0.8624955528,0.2561014215], [0.8632153286,0.2548592964], [0.8639350406,0.2536162455], [0.8646553356,0.2523711506], [0.8653753306,0.2511255372], [0.8660953514,0.2498788422], [0.8668150730,0.2486316292], [0.8675352658,0.2473825626], [0.8682554825,0.2461324171], [0.8689754005,0.2448817533], [0.8696951640,0.2436303217], [0.8704149170,0.2423778723], [0.8711351117,0.2411236172], [0.8718552522,0.2398684191], [0.8725750954,0.2386127029], [0.8732949565,0.2373559189], [0.8740145210,0.2360986172], [0.8747342740,0.2348399499], [0.8754543919,0.2335796077], [0.8761746912,0.2323179106], [0.8768946943,0.2310556951], [0.8776145736,0.2297926602], [0.8783346712,0.2285282053], [0.8790546531,0.2272629169], [0.8797743401,0.2259971105], [0.8804939120,0.2247304709], [0.8812138446,0.2234621601], [0.8819337161,0.2221929204], [0.8826535948,0.2209226316], [0.8833731799,0.2196518251], [0.8840929474,0.2183796604], [0.8848132759,0.2171054668], [0.8855336076,0.2158302299], [0.8862536463,0.2145544745], [0.8869734963,0.2132780168], [0.8876933734,0.2120004748], [0.8884133130,0.2107217854], [0.8891329608,0.2094425781], [0.8898530169,0.2081616083], [0.8905731261,0.2068795070], [0.8912929442,0.2055968875], [0.8920131251,0.2043125847], [0.8927330152,0.2030277635], [0.8934529241,0.2017418725], [0.8941730851,0.2004544939], [0.8948929561,0.1991665971], [0.8956128046,0.1978777042], [0.8963327750,0.1965875565], [0.8970527813,0.1952963075], [0.8977724988,0.1940045404], [0.8984924094,0.1927113902], [0.8992125684,0.1914167569], [0.8999325260,0.1901214486], [0.9006521957,0.1888256224], [0.9013716648,0.1875291219], [0.9020917071,0.1862305520], [0.9028114621,0.1849314639], [0.9035313368,0.1836311235], [0.9042514045,0.1823293975], [0.9049711855,0.1810271534], [0.9056912515,0.1797233570], [0.9064110312,0.1784190425], [0.9071314180,0.1771125904], [0.9078515188,0.1758056198], [0.9085715015,0.1744978266], [0.9092916706,0.1731886578], [0.9100115925,0.1718789016], [0.9107316949,0.1705677798], [0.9114515127,0.1692561399], [0.9121715956,0.1679429801], [0.9128913943,0.1666293022], [0.9136109934,0.1653149527], [0.9143309149,0.1639989781], [0.9150508896,0.1626818694], [0.9157705811,0.1613642428], [0.9164906707,0.1600448506], [0.9172108055,0.1587243383], [0.9179309281,0.1574028113], [0.9186507682,0.1560807660], [0.9193705967,0.1547577059], [0.9200905249,0.1534334261], [0.9208101714,0.1521086283], [0.9215305408,0.1507814624], [0.9222506286,0.1494537780], [0.9229708262,0.1481248540], [0.9236907427,0.1467954119], [0.9244106520,0.1454649464], [0.9251306075,0.1441333591], [0.9258505602,0.1428007402], [0.9265702329,0.1414676034], [0.9272899034,0.1401334351], [0.9280100090,0.1387974231], [0.9287298353,0.1374608930], [0.9294498354,0.1361230034], [0.9301695567,0.1347845958], [0.9308898667,0.1334440560], [0.9316098981,0.1321029978], [0.9323298397,0.1307610700], [0.9330496743,0.1294183052], [0.9337696518,0.1280742373], [0.9344893518,0.1267296514], [0.9352095347,0.1253831262], [0.9359296283,0.1240357309], [0.9366497522,0.1226872416], [0.9373695994,0.1213382342], [0.9380893274,0.1199884138], [0.9388092215,0.1186372456], [0.9395292431,0.1172848013], [0.9402489889,0.1159318389], [0.9409686707,0.1145779608], [0.9416885002,0.1132227687], [0.9424086551,0.1118659268], [0.9431285349,0.1105085666], [0.9438485784,0.1091498611], [0.9445685689,0.1077902186], [0.9452882851,0.1064300581], [0.9460079488,0.1050689608], [0.9467280640,0.1037059728], [0.9474482126,0.1023418845], [0.9481680874,0.1009772779], [0.9488879960,0.0996115710], [0.9496083099,0.0982440577], [0.9503283507,0.0968760259], [0.9510483007,0.0955071297], [0.9517679782,0.0941377157], [0.9524880536,0.0927665077], [0.9532078569,0.0913947816], [0.9539280500,0.0900212754], [0.9546482500,0.0886467188], [0.9553681782,0.0872716440], [0.9560881137,0.0858955189], [0.9568080998,0.0845182601], [0.9575278150,0.0831404834], [0.9582479488,0.0817608687], [0.9589679894,0.0803803954], [0.9596877596,0.0789994040], [0.9604074374,0.0776175543], [0.9611276344,0.0762336704], [0.9618476359,0.0748491252], [0.9625673680,0.0734640621], [0.9632875712,0.0720770552], [0.9640075052,0.0706895300], [0.9647272650,0.0693013042], [0.9654473401,0.0679114335], [0.9661671467,0.0665210446], [0.9668871452,0.0651292484], [0.9676068757,0.0637369341], [0.9683268611,0.0623430901], [0.9690465788,0.0609487281], [0.9697663836,0.0595531612], [0.9704859373,0.0581570455], [0.9712052242,0.0567604125], [0.9719258029,0.0553602336], [0.9726461145,0.0539595359], [0.9733664680,0.0525577189], [0.9740865550,0.0511553833], [0.9748066103,0.0497520726], [0.9755266577,0.0483477401], [0.9762464394,0.0469428896], [0.9769663969,0.0455366594], [0.9776863258,0.0441294483], [0.9784061080,0.0427214878], [0.9791256255,0.0413130095], [0.9798456390,0.0399025237], [0.9805655893,0.0384911251], [0.9812857853,0.0370782076], [0.9820057169,0.0356647719], [0.9827258942,0.0342498168], [0.9834460511,0.0328338646], [0.9841659443,0.0314173941], [0.9848857478,0.0300000638], [0.9856057979,0.0285812112], [0.9863255849,0.0271618406], [0.9870451228,0.0257419255], [0.9877653545,0.0243196044], [0.9884853237,0.0228967649], [0.9892051324,0.0214732061], [0.9899251805,0.0200481371], [0.9906449667,0.0186225500], [0.9913647890,0.0171958551], [0.9920848096,0.0157677306], [0.9928045688,0.0143390881], [0.9935245161,0.0129090360], [0.9942442024,0.0114784659], [0.9949642472,0.0100461467], [0.9956841510,0.0086130714], [0.9964038065,0.0071794544], [0.9971233996,0.0057449260], [0.9978427330,0.0043098803], [0.9985620044,0.0028739234], [0.9992811316,0.0014372201], [1.0000000000,0.0000000000]]././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/sortingnetwork.py0000644000076500000240000001041114456461441017546 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . from itertools import product class SortingNetwork(list): """Sorting network class. From Wikipedia : A sorting network is an abstract mathematical model of a network of wires and comparator modules that is used to sort a sequence of numbers. Each comparator connects two wires and sort the values by outputting the smaller value to one wire, and a larger value to the other. """ def __init__(self, dimension, connectors = []): self.dimension = dimension for wire1, wire2 in connectors: self.addConnector(wire1, wire2) def addConnector(self, wire1, wire2): """Add a connector between wire1 and wire2 in the network.""" if wire1 == wire2: return if wire1 > wire2: wire1, wire2 = wire2, wire1 index = 0 for level in reversed(self): if self.checkConflict(level, wire1, wire2): break index -= 1 if index == 0: self.append([(wire1, wire2)]) else: self[index].append((wire1, wire2)) def checkConflict(self, level, wire1, wire2): """Check if a connection between `wire1` and `wire2` can be added on this `level`.""" for wires in level: if wires[1] >= wire1 and wires[0] <= wire2: return True def sort(self, values): """Sort the values in-place based on the connectors in the network.""" for level in self: for wire1, wire2 in level: if values[wire1] > values[wire2]: values[wire1], values[wire2] = values[wire2], values[wire1] def assess(self, cases=None): """Try to sort the **cases** using the network, return the number of misses. If **cases** is None, test all possible cases according to the network dimensionality. """ if cases is None: cases = product((0, 1), repeat=self.dimension) misses = 0 ordered = [[0]*(self.dimension-i) + [1]*i for i in range(self.dimension+1)] for sequence in cases: sequence = list(sequence) self.sort(sequence) misses += (sequence != ordered[sum(sequence)]) return misses def draw(self): """Return an ASCII representation of the network.""" str_wires = [["-"]*7 * self.depth] str_wires[0][0] = "0" str_wires[0][1] = " o" str_spaces = [] for i in range(1, self.dimension): str_wires.append(["-"]*7 * self.depth) str_spaces.append([" "]*7 * self.depth) str_wires[i][0] = str(i) str_wires[i][1] = " o" for index, level in enumerate(self): for wire1, wire2 in level: str_wires[wire1][(index+1)*6] = "x" str_wires[wire2][(index+1)*6] = "x" for i in range(wire1, wire2): str_spaces[i][(index+1)*6+1] = "|" for i in range(wire1+1, wire2): str_wires[i][(index+1)*6] = "|" network_draw = "".join(str_wires[0]) for line, space in zip(str_wires[1:], str_spaces): network_draw += "\n" network_draw += "".join(space) network_draw += "\n" network_draw += "".join(line) return network_draw @property def depth(self): """Return the number of parallel steps that it takes to sort any input. """ return len(self) @property def length(self): """Return the number of comparison-swap used.""" return sum(len(level) for level in self) ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1689936700.723445 deap-1.4.1/examples/ga/tsp/0000755000076500000240000000000014456461475014715 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/tsp/gr120.json0000644000076500000240000021543214456461441016443 0ustar00runnerstaff{ "TourSize" : 120, "OptTour" : [1, 76, 59, 15, 30, 29, 120, 32, 92, 28, 45, 78, 86, 94, 81, 22, 66, 31, 117, 85, 18, 19, 25, 108, 43, 79, 52, 33, 100, 58, 91, 68, 65, 69, 113, 107, 20, 46, 50, 44, 75, 14, 87, 74, 105, 40, 72, 38, 7, 56, 41, 42, 98, 17, 118, 49, 13, 51, 11, 23, 9, 103, 119, 3, 82, 2, 115, 21, 93, 53, 64, 109, 88, 97, 12, 95, 77, 39, 63, 5, 27, 80, 101, 102, 48, 110, 112, 106, 114, 73, 57, 83, 67, 37, 62, 99, 10, 35, 104, 36, 84, 6, 89, 55, 47, 71, 26, 4, 34, 116, 70, 8, 54, 90, 96, 111, 24, 60, 16, 61], "OptDistance" : 6942, "DistanceMatrix" : [[0, 534, 434, 294, 593, 409, 332, 232, 464, 566, 552, 802, 633, 257, 187, 91, 412, 400, 472, 389, 610, 340, 510, 153, 511, 269, 525, 150, 80, 130, 401, 134, 666, 259, 505, 453, 627, 339, 710, 243, 376, 449, 505, 322, 185, 353, 324, 388, 447, 360, 605, 656, 573, 293, 372, 330, 610, 598, 214, 154, 70, 606, 631, 642, 503, 372, 641, 561, 478, 247, 317, 272, 575, 219, 293, 54, 648, 211, 568, 497, 290, 475, 654, 445, 375, 268, 261, 710, 396, 295, 651, 175, 585, 250, 717, 246, 788, 426, 596, 634, 507, 463, 408, 529, 192, 529, 434, 535, 630, 446, 166, 471, 442, 523, 566, 235, 432, 435, 369, 121], [534, 0, 107, 241, 190, 351, 320, 354, 124, 508, 80, 316, 432, 641, 577, 450, 624, 752, 805, 665, 76, 730, 152, 447, 844, 283, 157, 539, 507, 520, 791, 524, 942, 281, 447, 358, 334, 275, 283, 353, 520, 594, 781, 611, 575, 638, 314, 234, 664, 606, 133, 932, 113, 384, 283, 479, 297, 874, 604, 401, 464, 349, 228, 129, 779, 762, 348, 837, 754, 316, 336, 382, 276, 479, 683, 529, 188, 601, 844, 150, 680, 65, 341, 387, 765, 658, 519, 184, 291, 424, 927, 565, 67, 640, 221, 454, 302, 596, 575, 910, 209, 186, 169, 389, 412, 286, 710, 811, 108, 252, 518, 313, 718, 230, 45, 313, 822, 653, 167, 511], [434, 107, 0, 148, 137, 240, 232, 261, 88, 397, 127, 336, 479, 541, 477, 357, 531, 659, 712, 572, 183, 630, 134, 354, 751, 190, 95, 446, 414, 427, 691, 431, 849, 188, 336, 247, 251, 187, 254, 260, 427, 501, 688, 518, 482, 545, 191, 127, 571, 513, 180, 839, 101, 274, 199, 386, 234, 781, 504, 308, 371, 266, 175, 176, 686, 662, 265, 744, 661, 223, 212, 289, 199, 386, 583, 429, 182, 501, 751, 67, 580, 42, 278, 276, 665, 558, 426, 271, 180, 278, 834, 472, 146, 540, 251, 361, 322, 503, 330, 817, 111, 79, 105, 278, 319, 231, 617, 718, 191, 145, 425, 202, 625, 147, 139, 220, 722, 560, 79, 418], [294, 241, 148, 0, 374, 190, 139, 113, 171, 347, 259, 509, 552, 407, 337, 210, 384, 512, 565, 425, 317, 490, 217, 207, 604, 42, 232, 299, 267, 280, 551, 284, 702, 40, 286, 234, 408, 94, 491, 113, 280, 354, 541, 371, 335, 398, 106, 124, 424, 366, 312, 692, 280, 143, 153, 239, 391, 634, 364, 161, 224, 387, 412, 349, 539, 522, 422, 597, 514, 76, 95, 142, 356, 239, 443, 289, 355, 361, 604, 204, 440, 182, 435, 226, 525, 418, 279, 417, 177, 183, 687, 325, 292, 400, 424, 213, 495, 356, 377, 670, 201, 155, 116, 310, 172, 310, 470, 571, 337, 227, 277, 252, 478, 304, 273, 73, 582, 413, 77, 271], [593, 190, 137, 374, 0, 258, 494, 372, 202, 331, 234, 222, 586, 706, 636, 509, 690, 818, 871, 731, 192, 789, 248, 470, 910, 332, 42, 598, 566, 579, 850, 583, 1008, 364, 354, 265, 168, 313, 117, 419, 586, 660, 847, 677, 634, 704, 275, 219, 730, 672, 287, 998, 79, 319, 229, 545, 107, 940, 663, 460, 523, 183, 38, 121, 845, 821, 152, 903, 820, 360, 289, 448, 86, 545, 742, 588, 68, 660, 910, 70, 739, 137, 151, 294, 824, 717, 585, 239, 198, 308, 993, 624, 135, 699, 137, 373, 208, 662, 237, 976, 139, 157, 242, 236, 477, 177, 776, 877, 165, 162, 437, 166, 784, 81, 228, 371, 881, 719, 205, 570], [409, 351, 240, 190, 258, 0, 310, 188, 328, 171, 365, 470, 723, 522, 452, 325, 506, 634, 687, 547, 442, 605, 370, 280, 726, 148, 257, 414, 382, 395, 666, 399, 824, 180, 110, 59, 239, 281, 375, 235, 402, 476, 663, 493, 450, 520, 91, 113, 546, 488, 418, 814, 329, 129, 39, 361, 275, 756, 479, 276, 339, 216, 296, 369, 661, 637, 262, 719, 636, 176, 105, 264, 240, 361, 558, 404, 316, 476, 726, 257, 555, 282, 319, 50, 640, 533, 401, 487, 53, 118, 809, 440, 385, 515, 385, 183, 456, 478, 179, 792, 172, 161, 298, 127, 293, 165, 592, 693, 424, 111, 247, 99, 600, 188, 383, 187, 697, 535, 259, 386], [332, 320, 232, 139, 494, 310, 0, 208, 188, 467, 249, 588, 417, 184, 375, 248, 210, 338, 391, 251, 396, 394, 175, 246, 430, 169, 316, 279, 305, 318, 474, 314, 528, 142, 406, 354, 528, 45, 611, 89, 106, 180, 367, 197, 231, 224, 225, 289, 250, 192, 264, 518, 359, 262, 273, 65, 511, 460, 402, 199, 262, 507, 532, 428, 365, 456, 542, 423, 340, 170, 218, 84, 476, 167, 238, 327, 434, 231, 430, 288, 317, 261, 555, 346, 384, 231, 141, 496, 297, 302, 513, 311, 371, 277, 503, 332, 574, 182, 497, 496, 408, 251, 131, 430, 160, 430, 296, 397, 416, 347, 336, 372, 304, 424, 352, 168, 441, 239, 153, 309], [232, 354, 261, 113, 372, 188, 208, 0, 284, 345, 372, 584, 621, 391, 321, 141, 408, 536, 589, 449, 430, 474, 330, 113, 628, 63, 355, 283, 251, 264, 535, 268, 726, 72, 284, 232, 406, 184, 489, 133, 300, 378, 565, 395, 319, 422, 104, 168, 448, 390, 425, 716, 403, 60, 151, 259, 389, 658, 348, 78, 208, 385, 410, 472, 563, 506, 420, 621, 538, 39, 79, 162, 354, 259, 427, 273, 430, 345, 628, 327, 424, 295, 433, 224, 509, 402, 299, 530, 175, 100, 711, 309, 405, 384, 499, 130, 570, 380, 375, 694, 286, 216, 229, 308, 154, 308, 494, 595, 450, 225, 114, 250, 502, 302, 386, 43, 566, 437, 190, 255], [464, 124, 88, 171, 202, 328, 188, 284, 0, 485, 61, 392, 411, 372, 507, 380, 398, 526, 579, 439, 202, 582, 46, 377, 618, 213, 160, 469, 437, 450, 662, 454, 716, 211, 424, 335, 300, 143, 319, 283, 294, 368, 555, 372, 419, 412, 244, 215, 438, 380, 112, 706, 163, 314, 324, 253, 322, 648, 534, 331, 394, 354, 240, 232, 553, 644, 314, 611, 528, 246, 266, 234, 287, 317, 426, 459, 238, 479, 618, 155, 505, 65, 366, 364, 572, 419, 329, 300, 268, 354, 701, 495, 175, 465, 307, 384, 378, 370, 552, 684, 213, 167, 57, 366, 342, 319, 484, 585, 220, 233, 448, 290, 492, 235, 121, 243, 629, 427, 97, 441], [566, 508, 397, 347, 331, 171, 467, 345, 485, 0, 522, 502, 874, 679, 609, 482, 663, 791, 844, 704, 515, 762, 527, 437, 883, 305, 330, 571, 539, 552, 823, 556, 981, 337, 70, 182, 166, 438, 354, 392, 559, 633, 820, 650, 607, 677, 248, 270, 703, 645, 575, 971, 402, 286, 196, 518, 248, 913, 636, 433, 496, 147, 306, 428, 818, 794, 189, 876, 793, 333, 262, 421, 250, 518, 715, 561, 362, 633, 883, 335, 712, 439, 266, 125, 797, 690, 558, 546, 218, 275, 966, 597, 458, 672, 417, 340, 488, 635, 100, 949, 329, 318, 455, 125, 450, 182, 749, 850, 483, 268, 404, 210, 757, 255, 540, 344, 854, 692, 416, 543], [552, 80, 127, 259, 234, 365, 249, 372, 61, 522, 0, 386, 354, 433, 595, 468, 459, 587, 640, 500, 141, 643, 72, 465, 679, 301, 167, 557, 525, 538, 723, 542, 777, 299, 461, 372, 348, 204, 351, 368, 355, 429, 616, 446, 480, 473, 332, 254, 499, 441, 55, 767, 157, 402, 324, 314, 331, 709, 622, 419, 482, 363, 272, 226, 614, 705, 362, 672, 589, 334, 354, 295, 296, 378, 487, 547, 232, 480, 679, 164, 566, 85, 375, 410, 633, 480, 390, 249, 305, 442, 762, 583, 147, 526, 301, 472, 372, 431, 589, 745, 223, 206, 118, 403, 430, 328, 545, 646, 188, 272, 536, 358, 553, 174, 60, 331, 690, 488, 185, 529], [802, 316, 336, 509, 222, 470, 588, 584, 392, 502, 386, 0, 738, 915, 845, 718, 892, 1020, 1073, 933, 233, 998, 438, 715, 1112, 544, 254, 807, 775, 788, 1059, 792, 1210, 549, 566, 477, 331, 543, 202, 621, 788, 862, 1049, 879, 843, 906, 487, 431, 932, 874, 439, 1200, 235, 531, 441, 747, 254, 1142, 872, 669, 732, 357, 210, 187, 1047, 1030, 313, 1105, 1022, 572, 501, 650, 266, 747, 951, 797, 154, 869, 1112, 282, 948, 321, 298, 506, 1033, 926, 787, 168, 410, 520, 1195, 833, 249, 908, 95, 585, 23, 864, 407, 1178, 351, 369, 437, 447, 680, 379, 978, 1079, 190, 374, 649, 378, 986, 293, 314, 583, 1090, 921, 435, 779], [633, 432, 479, 552, 586, 723, 417, 621, 411, 874, 354, 738, 0, 390, 572, 661, 227, 524, 413, 274, 492, 444, 380, 659, 407, 582, 521, 488, 572, 543, 524, 530, 446, 555, 819, 767, 700, 458, 679, 502, 340, 256, 289, 359, 462, 282, 638, 606, 188, 273, 313, 444, 509, 675, 686, 378, 693, 370, 599, 612, 567, 715, 640, 578, 245, 506, 714, 311, 261, 583, 631, 497, 672, 474, 352, 595, 584, 466, 348, 516, 506, 437, 755, 759, 434, 420, 422, 616, 710, 715, 407, 504, 499, 466, 653, 745, 724, 253, 941, 414, 575, 558, 468, 755, 573, 680, 346, 336, 540, 624, 749, 679, 300, 596, 411, 581, 491, 220, 537, 518], [257, 641, 541, 407, 706, 522, 184, 391, 372, 679, 433, 915, 390, 0, 196, 228, 169, 151, 257, 146, 723, 125, 359, 345, 296, 382, 638, 112, 196, 167, 238, 154, 423, 372, 618, 566, 740, 229, 823, 209, 146, 206, 262, 69, 86, 110, 437, 501, 204, 117, 448, 413, 686, 451, 485, 119, 723, 355, 223, 298, 191, 719, 744, 755, 260, 209, 754, 318, 235, 360, 430, 180, 688, 83, 50, 219, 761, 74, 325, 610, 139, 588, 767, 558, 160, 53, 43, 823, 509, 491, 408, 128, 698, 99, 830, 472, 901, 183, 709, 391, 620, 576, 315, 642, 228, 642, 191, 292, 743, 559, 435, 584, 199, 636, 679, 348, 217, 192, 482, 142], [187, 577, 477, 337, 636, 452, 375, 321, 507, 609, 595, 845, 572, 196, 0, 158, 351, 270, 342, 328, 653, 185, 553, 275, 381, 312, 568, 96, 88, 59, 238, 63, 605, 302, 548, 496, 670, 382, 753, 286, 322, 388, 444, 261, 124, 292, 367, 431, 386, 299, 648, 595, 616, 381, 415, 276, 653, 537, 49, 228, 121, 649, 674, 685, 442, 209, 684, 500, 417, 290, 360, 315, 618, 172, 232, 92, 691, 81, 507, 540, 98, 518, 697, 488, 245, 138, 200, 753, 439, 421, 590, 76, 628, 89, 760, 402, 831, 365, 639, 573, 550, 506, 451, 572, 235, 572, 373, 474, 673, 489, 365, 514, 381, 566, 609, 278, 302, 374, 412, 99], [91, 450, 357, 210, 509, 325, 248, 141, 380, 482, 468, 718, 661, 228, 158, 0, 383, 371, 443, 360, 526, 311, 426, 98, 482, 185, 441, 120, 77, 101, 372, 105, 637, 175, 421, 369, 504, 250, 626, 159, 306, 420, 476, 293, 156, 324, 240, 304, 418, 331, 520, 627, 489, 201, 288, 260, 526, 569, 185, 63, 27, 522, 547, 558, 474, 343, 557, 532, 449, 163, 233, 188, 491, 149, 264, 82, 564, 182, 539, 413, 261, 391, 570, 361, 346, 239, 232, 626, 312, 241, 622, 146, 501, 221, 633, 237, 704, 397, 512, 605, 423, 379, 325, 445, 108, 445, 405, 506, 546, 362, 150, 387, 413, 439, 482, 151, 403, 406, 286, 84], [412, 624, 531, 384, 690, 506, 210, 408, 398, 663, 459, 892, 227, 169, 351, 383, 0, 167, 220, 53, 700, 223, 385, 500, 259, 365, 615, 267, 351, 322, 303, 309, 357, 338, 602, 550, 724, 255, 807, 285, 112, 45, 196, 94, 241, 61, 421, 485, 36, 46, 474, 347, 663, 458, 469, 150, 707, 289, 378, 453, 346, 703, 728, 732, 141, 285, 738, 252, 116, 366, 414, 280, 672, 253, 131, 374, 738, 243, 259, 587, 285, 565, 751, 542, 213, 199, 211, 800, 493, 498, 342, 283, 675, 245, 807, 528, 818, 27, 693, 325, 604, 553, 341, 626, 383, 626, 125, 226, 720, 543, 590, 568, 81, 620, 656, 364, 270, 29, 459, 297], [400, 752, 659, 512, 818, 634, 338, 536, 526, 791, 587, 1020, 524, 151, 270, 371, 167, 0, 57, 112, 828, 67, 513, 488, 96, 493, 743, 255, 339, 310, 138, 297, 280, 466, 730, 678, 852, 350, 935, 413, 240, 204, 119, 142, 226, 125, 549, 613, 202, 150, 602, 270, 791, 586, 597, 278, 835, 212, 319, 441, 334, 831, 856, 860, 151, 120, 866, 175, 132, 494, 542, 408, 800, 235, 101, 362, 866, 189, 182, 715, 155, 693, 879, 670, 48, 132, 183, 928, 621, 626, 265, 271, 803, 178, 935, 656, 1006, 181, 821, 248, 732, 681, 469, 754, 371, 754, 82, 94, 848, 671, 578, 696, 137, 748, 784, 492, 105, 190, 587, 285], [472, 805, 712, 565, 871, 687, 391, 589, 579, 844, 640, 1073, 413, 257, 342, 443, 220, 57, 0, 165, 881, 139, 566, 560, 39, 546, 796, 327, 411, 382, 126, 369, 217, 519, 783, 731, 905, 436, 988, 466, 293, 257, 136, 224, 298, 178, 602, 666, 255, 203, 655, 200, 844, 639, 650, 331, 888, 197, 391, 513, 406, 884, 909, 913, 168, 108, 919, 192, 149, 547, 595, 461, 853, 339, 174, 434, 919, 261, 150, 768, 227, 746, 932, 723, 59, 204, 294, 981, 674, 679, 282, 343, 856, 250, 988, 709, 1059, 234, 874, 185, 785, 734, 522, 807, 443, 807, 142, 76, 901, 724, 650, 749, 184, 801, 837, 545, 69, 243, 640, 357], [389, 665, 572, 425, 731, 547, 251, 449, 439, 704, 500, 933, 274, 146, 328, 360, 53, 112, 165, 0, 741, 168, 426, 477, 204, 406, 656, 244, 328, 299, 248, 286, 279, 379, 643, 591, 765, 296, 848, 326, 153, 101, 118, 84, 218, 38, 462, 526, 88, 63, 515, 269, 704, 499, 510, 191, 748, 211, 355, 430, 323, 744, 769, 773, 116, 230, 779, 174, 91, 407, 455, 321, 713, 230, 108, 351, 779, 220, 181, 628, 229, 606, 792, 583, 158, 202, 178, 841, 534, 539, 264, 260, 716, 220, 848, 569, 919, 82, 734, 247, 645, 594, 382, 667, 360, 667, 47, 129, 761, 584, 567, 609, 53, 661, 697, 405, 215, 64, 500, 274], [610, 76, 183, 317, 192, 442, 396, 430, 202, 515, 141, 233, 492, 723, 653, 526, 700, 828, 881, 741, 0, 806, 213, 523, 920, 359, 188, 615, 583, 596, 867, 600, 1018, 357, 538, 449, 360, 351, 272, 429, 596, 670, 857, 687, 651, 714, 390, 310, 740, 682, 193, 1008, 131, 460, 413, 555, 302, 950, 680, 477, 540, 375, 233, 98, 855, 838, 344, 913, 830, 392, 412, 458, 289, 555, 759, 605, 177, 677, 920, 216, 756, 141, 346, 478, 841, 734, 595, 108, 382, 500, 1003, 641, 57, 716, 190, 530, 203, 672, 421, 986, 275, 262, 245, 420, 488, 344, 786, 887, 43, 346, 594, 350, 794, 265, 81, 389, 898, 729, 243, 587], [340, 730, 630, 490, 789, 605, 394, 474, 582, 762, 643, 998, 444, 125, 185, 311, 223, 67, 139, 168, 806, 0, 569, 428, 178, 465, 721, 195, 279, 250, 101, 237, 336, 455, 701, 649, 823, 439, 906, 439, 296, 260, 175, 146, 166, 181, 520, 584, 258, 206, 658, 326, 769, 534, 568, 334, 806, 268, 234, 381, 274, 802, 827, 838, 207, 72, 837, 231, 188, 443, 513, 464, 771, 228, 105, 302, 844, 129, 238, 693, 88, 671, 850, 641, 42, 72, 162, 906, 592, 574, 321, 211, 781, 96, 913, 555, 984, 237, 792, 304, 703, 659, 525, 725, 311, 725, 145, 154, 826, 642, 518, 667, 209, 719, 762, 431, 99, 246, 555, 225], [510, 152, 134, 217, 248, 370, 175, 330, 46, 527, 72, 438, 380, 359, 553, 426, 385, 513, 566, 426, 213, 569, 0, 423, 605, 259, 206, 515, 483, 496, 649, 500, 703, 257, 466, 377, 346, 130, 365, 226, 281, 355, 542, 372, 406, 399, 290, 232, 425, 367, 89, 693, 209, 360, 370, 240, 368, 635, 580, 377, 440, 400, 286, 278, 540, 631, 360, 598, 515, 292, 312, 221, 333, 304, 413, 505, 284, 406, 605, 201, 492, 111, 412, 406, 559, 406, 316, 321, 310, 400, 688, 541, 221, 452, 353, 430, 424, 357, 460, 671, 259, 213, 72, 408, 388, 365, 471, 572, 266, 279, 494, 332, 479, 281, 132, 289, 616, 414, 111, 487], [153, 447, 354, 207, 470, 280, 246, 113, 377, 437, 465, 715, 659, 345, 275, 98, 500, 488, 560, 477, 523, 428, 423, 0, 599, 170, 438, 237, 205, 218, 489, 222, 754, 172, 376, 324, 504, 248, 587, 157, 338, 537, 593, 410, 273, 441, 238, 302, 535, 448, 517, 744, 486, 151, 241, 297, 487, 686, 302, 47, 162, 483, 508, 555, 591, 460, 518, 649, 566, 133, 213, 186, 452, 263, 381, 227, 561, 299, 656, 410, 378, 388, 531, 316, 463, 356, 342, 623, 273, 191, 739, 263, 498, 338, 631, 167, 701, 514, 467, 722, 384, 376, 322, 400, 167, 406, 522, 623, 543, 323, 90, 348, 530, 400, 479, 137, 520, 523, 283, 209], [511, 844, 751, 604, 910, 726, 430, 628, 618, 883, 679, 1112, 407, 296, 381, 482, 259, 96, 39, 204, 920, 178, 605, 599, 0, 585, 835, 366, 450, 412, 165, 408, 194, 558, 822, 770, 944, 475, 1027, 505, 332, 296, 129, 263, 337, 217, 641, 705, 294, 242, 694, 177, 883, 678, 689, 370, 927, 157, 430, 552, 445, 923, 948, 952, 162, 147, 958, 186, 159, 586, 634, 500, 892, 378, 213, 473, 958, 300, 127, 807, 266, 785, 971, 762, 98, 243, 333, 1020, 713, 718, 276, 382, 895, 289, 1027, 748, 1098, 273, 913, 162, 824, 773, 561, 846, 482, 846, 145, 75, 940, 763, 689, 788, 194, 840, 876, 584, 108, 282, 679, 396], [269, 283, 190, 42, 332, 148, 169, 63, 213, 305, 301, 544, 582, 382, 312, 185, 365, 493, 546, 406, 359, 465, 259, 170, 585, 0, 274, 274, 242, 255, 526, 259, 683, 27, 244, 192, 366, 136, 449, 94, 261, 335, 522, 352, 310, 379, 64, 128, 405, 347, 354, 673, 322, 101, 111, 220, 349, 615, 339, 136, 199, 345, 370, 391, 520, 497, 380, 578, 495, 37, 53, 123, 314, 220, 418, 264, 390, 336, 585, 246, 415, 224, 393, 184, 500, 393, 260, 459, 135, 141, 668, 300, 334, 375, 459, 171, 530, 337, 335, 651, 202, 176, 158, 268, 153, 268, 451, 552, 379, 185, 235, 210, 459, 262, 315, 48, 557, 394, 119, 246], [525, 157, 95, 232, 42, 257, 316, 355, 160, 330, 167, 254, 521, 638, 568, 441, 615, 743, 796, 656, 188, 721, 206, 438, 835, 274, 0, 530, 498, 511, 782, 515, 933, 272, 353, 264, 179, 271, 159, 344, 511, 585, 772, 602, 566, 629, 227, 163, 655, 597, 220, 923, 57, 318, 228, 470, 149, 865, 595, 392, 455, 194, 80, 132, 770, 753, 193, 828, 745, 307, 248, 373, 127, 470, 674, 520, 100, 592, 835, 28, 671, 95, 193, 293, 756, 649, 510, 241, 197, 307, 918, 556, 131, 631, 169, 372, 240, 587, 236, 901, 87, 115, 200, 235, 403, 159, 701, 802, 161, 161, 436, 165, 709, 75, 189, 304, 813, 644, 163, 502], [150, 539, 446, 299, 598, 414, 279, 283, 469, 571, 557, 807, 488, 112, 96, 120, 267, 255, 327, 244, 615, 195, 515, 237, 366, 274, 530, 0, 63, 56, 256, 34, 521, 264, 509, 458, 632, 286, 715, 190, 220, 304, 360, 177, 40, 208, 329, 393, 302, 215, 610, 511, 578, 343, 377, 174, 615, 453, 123, 190, 83, 611, 636, 647, 358, 227, 646, 416, 333, 252, 397, 193, 580, 79, 148, 119, 653, 105, 423, 502, 144, 480, 659, 450, 230, 123, 98, 715, 401, 383, 506, 32, 590, 105, 722, 364, 793, 281, 601, 489, 512, 468, 413, 534, 119, 534, 289, 390, 635, 451, 402, 476, 297, 528, 571, 240, 287, 290, 375, 35], [80, 507, 414, 267, 566, 382, 305, 251, 437, 539, 525, 775, 572, 196, 88, 77, 351, 339, 411, 328, 583, 279, 483, 205, 450, 242, 498, 63, 0, 25, 340, 29, 605, 232, 478, 426, 600, 312, 683, 216, 322, 388, 444, 261, 124, 292, 297, 361, 386, 299, 577, 595, 546, 311, 345, 276, 583, 537, 115, 158, 47, 579, 604, 615, 442, 311, 614, 500, 417, 220, 290, 245, 548, 139, 232, 31, 621, 150, 507, 470, 176, 448, 627, 418, 314, 207, 200, 683, 369, 351, 590, 76, 558, 189, 690, 332, 761, 365, 569, 573, 480, 436, 382, 502, 165, 502, 373, 474, 603, 419, 295, 444, 381, 496, 539, 208, 371, 374, 343, 29], [130, 520, 427, 280, 579, 395, 318, 264, 450, 552, 538, 788, 543, 167, 59, 101, 322, 310, 382, 299, 596, 250, 496, 218, 412, 255, 511, 56, 25, 0, 311, 22, 576, 245, 491, 439, 613, 325, 696, 229, 293, 359, 415, 232, 95, 263, 310, 374, 357, 270, 591, 566, 559, 324, 358, 247, 596, 508, 86, 171, 64, 592, 617, 628, 413, 282, 627, 471, 388, 233, 378, 258, 561, 134, 203, 43, 634, 121, 478, 483, 164, 461, 640, 431, 285, 178, 171, 696, 382, 364, 561, 47, 571, 160, 703, 345, 774, 336, 582, 544, 493, 449, 394, 515, 178, 515, 344, 445, 616, 432, 383, 457, 352, 509, 552, 221, 342, 345, 356, 42], [401, 791, 691, 551, 850, 666, 474, 535, 662, 823, 723, 1059, 524, 238, 238, 372, 303, 138, 126, 248, 867, 101, 649, 489, 165, 526, 782, 256, 340, 311, 0, 298, 416, 516, 762, 710, 884, 519, 967, 500, 376, 340, 255, 247, 227, 261, 581, 645, 338, 286, 738, 406, 830, 595, 629, 414, 867, 348, 287, 442, 335, 863, 888, 899, 287, 29, 898, 311, 268, 504, 574, 544, 832, 289, 206, 363, 905, 190, 318, 754, 140, 732, 911, 702, 90, 185, 275, 967, 653, 635, 401, 272, 842, 151, 974, 616, 1045, 317, 853, 384, 764, 720, 605, 786, 372, 786, 225, 202, 887, 703, 579, 728, 285, 780, 823, 492, 77, 326, 626, 286], [134, 524, 431, 284, 583, 399, 314, 268, 454, 556, 542, 792, 530, 154, 63, 105, 309, 297, 369, 286, 600, 237, 500, 222, 408, 259, 515, 34, 29, 22, 298, 0, 563, 249, 494, 443, 617, 321, 700, 225, 253, 346, 402, 219, 82, 250, 314, 378, 344, 257, 595, 553, 563, 328, 362, 207, 600, 495, 90, 175, 68, 596, 621, 632, 400, 269, 631, 458, 375, 237, 382, 228, 565, 112, 190, 58, 638, 108, 465, 487, 136, 465, 644, 435, 272, 165, 131, 700, 386, 368, 548, 30, 575, 147, 707, 349, 778, 323, 586, 531, 497, 453, 398, 519, 154, 519, 331, 432, 620, 436, 387, 461, 339, 513, 556, 225, 329, 332, 360, 36], [666, 942, 849, 702, 1008, 824, 528, 726, 716, 981, 777, 1210, 446, 423, 605, 637, 357, 280, 217, 279, 1018, 336, 703, 754, 194, 683, 933, 521, 605, 576, 416, 563, 0, 656, 920, 868, 1042, 573, 1125, 603, 430, 394, 161, 361, 495, 315, 739, 803, 392, 340, 792, 42, 981, 776, 787, 468, 1025, 84, 632, 707, 600, 1021, 1046, 1050, 201, 398, 1056, 154, 226, 684, 732, 598, 990, 507, 385, 628, 1056, 497, 98, 905, 424, 883, 1069, 860, 326, 401, 455, 1118, 811, 816, 174, 537, 993, 447, 1125, 846, 1196, 371, 1011, 32, 922, 871, 659, 944, 637, 944, 238, 175, 1038, 861, 844, 886, 261, 938, 974, 682, 383, 380, 777, 551], [259, 281, 188, 40, 364, 180, 142, 72, 211, 337, 299, 549, 555, 372, 302, 175, 338, 466, 519, 379, 357, 455, 257, 172, 558, 27, 272, 264, 232, 245, 516, 249, 656, 0, 276, 224, 398, 102, 481, 67, 234, 308, 495, 325, 300, 352, 95, 159, 378, 320, 352, 646, 320, 132, 143, 193, 381, 588, 329, 126, 189, 377, 402, 389, 493, 487, 412, 551, 468, 34, 88, 96, 346, 193, 408, 254, 395, 326, 558, 244, 405, 222, 425, 216, 490, 383, 233, 457, 167, 172, 641, 290, 332, 365, 464, 202, 535, 310, 367, 624, 278, 210, 155, 300, 126, 300, 424, 525, 877, 217, 189, 242, 432, 294, 313, 32, 547, 367, 116, 236], [505, 447, 336, 286, 354, 110, 406, 284, 424, 70, 461, 566, 819, 618, 548, 421, 602, 730, 783, 643, 538, 701, 466, 376, 822, 244, 353, 509, 478, 491, 762, 494, 920, 276, 0, 95, 162, 377, 345, 331, 498, 572, 759, 589, 546, 616, 187, 209, 642, 584, 514, 910, 425, 225, 135, 457, 239, 852, 575, 372, 435, 139, 293, 465, 757, 733, 185, 815, 732, 272, 201, 360, 237, 457, 654, 500, 412, 572, 822, 353, 651, 378, 262, 64, 736, 629, 497, 583, 157, 214, 905, 536, 481, 611, 481, 279, 552, 574, 91, 888, 268, 257, 394, 80, 389, 137, 688, 789, 520, 207, 343, 156, 696, 284, 479, 283, 793, 631, 355, 482], [453, 358, 247, 234, 265, 59, 354, 232, 335, 182, 372, 477, 767, 566, 496, 369, 550, 678, 731, 591, 449, 649, 377, 324, 770, 192, 264, 458, 426, 439, 710, 443, 868, 224, 95, 0, 177, 325, 337, 279, 446, 520, 707, 537, 494, 564, 135, 120, 590, 532, 425, 858, 336, 173, 83, 405, 231, 800, 523, 320, 383, 154, 261, 376, 705, 681, 200, 763, 680, 220, 149, 308, 205, 405, 602, 448, 323, 520, 770, 264, 599, 289, 254, 57, 684, 577, 445, 494, 67, 162, 853, 484, 392, 559, 392, 227, 463, 522, 117, 836, 141, 168, 305, 68, 337, 106, 636, 737, 431, 141, 291, 61, 644, 195, 390, 231, 741, 579, 266, 430], [627, 334, 251, 408, 168, 239, 528, 406, 300, 166, 348, 331, 700, 740, 670, 504, 724, 852, 905, 765, 360, 823, 346, 504, 944, 366, 179, 632, 600, 613, 884, 617, 1042, 398, 162, 177, 0, 425, 183, 453, 620, 694, 881, 711, 668, 738, 309, 219, 764, 706, 401, 1032, 236, 353, 263, 579, 77, 974, 697, 494, 557, 22, 138, 260, 879, 855, 23, 937, 854, 394, 323, 482, 82, 579, 776, 622, 194, 694, 944, 184, 773, 272, 100, 193, 858, 751, 619, 378, 215, 342, 1027, 658, 303, 733, 246, 407, 317, 696, 72, 1010, 173, 219, 344, 112, 511, 75, 810, 911, 315, 162, 471, 135, 818, 104, 366, 405, 915, 753, 305, 604], [339, 275, 187, 94, 313, 281, 45, 184, 143, 438, 204, 543, 458, 229, 382, 250, 255, 350, 436, 296, 351, 439, 130, 248, 475, 136, 271, 286, 312, 325, 519, 321, 573, 102, 377, 325, 425, 0, 430, 96, 151, 225, 412, 242, 276, 269, 196, 229, 295, 237, 219, 563, 314, 233, 244, 110, 408, 505, 409, 201, 269, 447, 351, 383, 410, 501, 439, 468, 385, 146, 189, 91, 373, 174, 283, 334, 389, 276, 475, 243, 362, 216, 452, 317, 429, 276, 186, 451, 268, 273, 558, 318, 326, 322, 458, 303, 529, 227, 468, 541, 252, 206, 86, 401, 162, 370, 341, 442, 371, 272, 295, 311, 349, 321, 307, 144, 486, 284, 108, 316], [710, 283, 254, 491, 117, 375, 611, 489, 319, 354, 351, 202, 679, 823, 753, 626, 807, 935, 988, 848, 272, 906, 365, 587, 1027, 449, 159, 715, 683, 696, 967, 700, 1125, 481, 345, 337, 183, 430, 0, 536, 703, 777, 964, 794, 751, 821, 392, 336, 847, 789, 404, 1115, 176, 436, 346, 662, 106, 1057, 780, 577, 640, 209, 79, 161, 962, 938, 165, 1020, 937, 477, 406, 565, 141, 662, 859, 705, 95, 777, 1027, 187, 856, 254, 103, 411, 941, 834, 702, 279, 315, 425, 1110, 741, 215, 816, 117, 490, 188, 779, 259, 1093, 256, 274, 359, 299, 594, 231, 893, 994, 216, 279, 554, 276, 901, 193, 308, 488, 998, 836, 322, 687], [243, 353, 260, 113, 419, 235, 89, 133, 283, 392, 368, 621, 502, 209, 286, 159, 285, 413, 466, 326, 429, 439, 226, 157, 505, 94, 344, 190, 216, 229, 500, 225, 603, 67, 331, 279, 453, 96, 536, 0, 181, 255, 442, 272, 207, 299, 150, 214, 325, 267, 421, 593, 392, 187, 198, 140, 436, 535, 313, 110, 173, 432, 457, 461, 440, 471, 467, 498, 415, 95, 143, 29, 401, 126, 248, 238, 467, 310, 505, 316, 389, 294, 480, 271, 474, 367, 166, 529, 222, 227, 588, 222, 404, 349, 536, 257, 607, 257, 422, 571, 333, 282, 228, 355, 71, 355, 371, 472, 449, 272, 247, 297, 379, 349, 385, 93, 531, 314, 189, 220], [376, 520, 427, 280, 586, 402, 106, 300, 294, 559, 355, 788, 340, 146, 322, 306, 112, 240, 293, 153, 596, 296, 281, 338, 332, 261, 511, 220, 322, 293, 376, 253, 430, 234, 498, 446, 620, 151, 703, 181, 0, 82, 269, 99, 212, 126, 317, 381, 152, 94, 370, 420, 559, 354, 365, 46, 603, 362, 349, 291, 295, 599, 624, 628, 267, 354, 634, 325, 242, 262, 310, 142, 568, 154, 140, 327, 634, 204, 332, 483, 290, 461, 647, 438, 286, 204, 114, 696, 389, 394, 415, 256, 571, 250, 703, 424, 774, 84, 589, 398, 500, 449, 237, 522, 207, 522, 198, 299, 616, 439, 428, 464, 206, 516, 552, 260, 343, 141, 356, 268], [449, 594, 501, 354, 660, 476, 180, 378, 368, 633, 429, 862, 256, 206, 388, 420, 45, 204, 257, 101, 670, 260, 355, 537, 296, 335, 585, 304, 388, 359, 340, 346, 394, 308, 572, 520, 694, 225, 777, 255, 82, 0, 233, 106, 278, 82, 391, 455, 68, 58, 444, 384, 633, 428, 439, 120, 677, 326, 415, 490, 383, 673, 698, 702, 231, 322, 708, 289, 206, 336, 384, 250, 642, 290, 168, 411, 708, 280, 296, 557, 322, 535, 721, 512, 250, 236, 248, 770, 463, 468, 379, 320, 645, 282, 777, 498, 848, 19, 663, 362, 574, 523, 311, 596, 420, 596, 162, 263, 690, 513, 627, 538, 126, 590, 626, 334, 307, 78, 429, 334], [505, 781, 688, 541, 847, 663, 367, 565, 555, 820, 616, 1049, 289, 262, 444, 476, 196, 119, 136, 118, 857, 175, 542, 593, 129, 522, 772, 360, 444, 415, 255, 402, 161, 495, 759, 707, 881, 412, 964, 442, 269, 233, 0, 200, 334, 154, 578, 642, 231, 179, 631, 151, 820, 615, 626, 307, 864, 93, 471, 546, 439, 860, 885, 889, 44, 237, 895, 65, 55, 523, 571, 437, 829, 346, 224, 467, 895, 336, 63, 744, 263, 722, 908, 699, 165, 240, 294, 957, 650, 655, 155, 376, 832, 286, 964, 685, 1035, 210, 850, 129, 761, 710, 498, 783, 476, 783, 77, 58, 877, 700, 683, 725, 90, 777, 813, 521, 222, 219, 616, 390], [322, 611, 518, 371, 677, 493, 197, 395, 372, 650, 446, 879, 359, 69, 261, 293, 94, 142, 224, 84, 687, 146, 372, 410, 263, 352, 602, 177, 261, 232, 247, 219, 361, 325, 589, 537, 711, 242, 794, 272, 99, 106, 200, 0, 151, 46, 408, 472, 130, 48, 461, 351, 650, 445, 456, 126, 694, 293, 288, 363, 256, 690, 715, 719, 198, 218, 725, 256, 173, 353, 401, 267, 659, 136, 41, 284, 725, 143, 263, 574, 195, 552, 738, 529, 169, 109, 67, 787, 480, 485, 346, 193, 662, 155, 794, 515, 865, 83, 680, 329, 591, 540, 328, 613, 232, 613, 129, 230, 707, 530, 500, 555, 137, 607, 643, 351, 226, 123, 446, 207], [185, 575, 482, 335, 634, 450, 231, 319, 419, 607, 480, 843, 462, 86, 124, 156, 241, 226, 298, 218, 651, 166, 406, 273, 337, 310, 566, 40, 124, 95, 227, 82, 495, 300, 546, 494, 668, 276, 751, 207, 212, 278, 334, 151, 0, 182, 365, 429, 276, 189, 495, 485, 614, 379, 413, 166, 651, 427, 151, 226, 119, 647, 672, 683, 332, 198, 682, 390, 307, 288, 358, 159, 616, 621, 122, 147, 689, 37, 397, 538, 116, 516, 695, 486, 201, 86, 90, 751, 437, 419, 480, 56, 626, 76, 758, 400, 829, 255, 637, 463, 548, 504, 362, 570, 136, 570, 263, 364, 671, 487, 363, 512, 271, 564, 607, 276, 258, 264, 411, 70], [353, 638, 545, 398, 704, 520, 224, 422, 412, 677, 473, 906, 282, 110, 292, 324, 61, 125, 178, 38, 714, 181, 399, 441, 217, 379, 629, 208, 292, 263, 261, 250, 315, 352, 616, 564, 738, 269, 821, 299, 126, 82, 154, 46, 182, 0, 435, 499, 96, 31, 488, 305, 677, 472, 483, 164, 721, 247, 319, 396, 287, 717, 742, 746, 152, 243, 752, 210, 127, 380, 428, 294, 686, 194, 72, 315, 752, 184, 217, 601, 226, 579, 765, 556, 171, 140, 142, 814, 507, 512, 300, 224, 689, 186, 821, 542, 892, 67, 707, 283, 618, 567, 355, 640, 324, 640, 83, 184, 734, 557, 531, 582, 91, 634, 670, 378, 228, 84, 473, 238], [324, 314, 191, 106, 275, 91, 225, 104, 244, 248, 332, 487, 638, 437, 367, 240, 421, 549, 602, 462, 390, 520, 290, 238, 641, 64, 227, 329, 297, 310, 581, 314, 739, 95, 187, 135, 309, 196, 392, 150, 317, 391, 578, 408, 365, 435, 0, 64, 461, 403, 385, 729, 353, 83, 54, 276, 292, 671, 394, 191, 254, 288, 313, 422, 576, 552, 323, 634, 551, 92, 21, 179, 257, 276, 473, 319, 333, 391, 641, 199, 470, 255, 336, 127, 555, 448, 316, 490, 78, 114, 724, 355, 365, 430, 402, 157, 473, 393, 278, 707, 138, 112, 188, 211, 209, 211, 507, 608, 410, 128, 221, 153, 515, 205, 346, 103, 612, 450, 149, 301], [388, 234, 127, 124, 219, 113, 289, 168, 215, 270, 254, 431, 606, 501, 431, 304, 485, 613, 666, 526, 310, 584, 232, 302, 705, 128, 163, 393, 361, 374, 645, 378, 803, 159, 209, 120, 219, 229, 336, 214, 381, 455, 642, 472, 429, 499, 64, 0, 525, 467, 307, 793, 220, 147, 72, 340, 236, 735, 458, 255, 378, 232, 257, 330, 640, 616, 233, 698, 615, 156, 85, 243, 201, 340, 537, 383, 277, 455, 705, 135, 534, 169, 280, 149, 619, 512, 380, 448, 53, 151, 788, 419, 261, 494, 346, 221, 417, 457, 221, 771, 74, 48, 160, 169, 273, 155, 571, 672, 306, 50, 285, 93, 579, 149, 266, 167, 676, 514, 121, 365], [447, 664, 571, 424, 730, 546, 250, 448, 438, 703, 499, 932, 188, 204, 386, 418, 36, 202, 255, 88, 740, 258, 425, 535, 294, 405, 655, 302, 386, 357, 338, 344, 392, 378, 642, 590, 764, 295, 847, 325, 152, 68, 231, 130, 276, 96, 461, 525, 0, 82, 514, 382, 603, 498, 509, 190, 747, 324, 413, 488, 381, 743, 768, 772, 174, 320, 778, 287, 149, 406, 454, 320, 712, 288, 166, 409, 778, 278, 294, 627, 320, 605, 791, 582, 248, 234, 246, 840, 533, 538, 377, 318, 715, 280, 847, 568, 918, 65, 733, 360, 644, 593, 381, 666, 418, 666, 160, 261, 760, 583, 625, 608, 111, 660, 696, 404, 305, 34, 499, 332], [360, 606, 513, 366, 672, 488, 192, 390, 380, 645, 441, 874, 273, 117, 299, 331, 46, 150, 203, 63, 682, 206, 367, 448, 242, 347, 597, 215, 299, 270, 286, 257, 340, 320, 584, 532, 706, 237, 789, 267, 94, 58, 179, 48, 189, 31, 403, 467, 82, 0, 456, 330, 645, 440, 451, 132, 689, 272, 326, 401, 294, 685, 710, 714, 177, 268, 720, 235, 152, 348, 396, 262, 654, 201, 89, 322, 720, 191, 242, 569, 243, 547, 733, 524, 196, 157, 159, 782, 475, 480, 325, 231, 657, 203, 789, 510, 860, 35, 675, 308, 586, 535, 323, 608, 331, 608, 108, 209, 702, 525, 538, 550, 116, 602, 638, 346, 253, 75, 441, 245], [605, 133, 180, 312, 287, 418, 264, 425, 112, 575, 55, 439, 313, 448, 648, 520, 474, 602, 655, 515, 193, 658, 89, 517, 694, 354, 220, 610, 577, 591, 738, 595, 792, 352, 514, 425, 401, 219, 404, 421, 370, 444, 631, 461, 495, 488, 385, 307, 514, 456, 0, 782, 210, 455, 377, 329, 384, 724, 675, 471, 534, 416, 325, 279, 629, 720, 415, 687, 604, 387, 407, 310, 349, 393, 502, 600, 285, 495, 694, 217, 581, 138, 428, 454, 648, 495, 405, 310, 358, 495, 777, 636, 200, 541, 354, 525, 425, 446, 642, 760, 276, 259, 169, 456, 482, 381, 560, 661, 241, 325, 589, 380, 568, 297, 112, 384, 705, 503, 238, 581], [656, 932, 839, 692, 998, 814, 518, 716, 706, 971, 767, 1200, 444, 413, 595, 627, 347, 270, 200, 269, 1008, 326, 693, 744, 177, 673, 923, 511, 595, 566, 406, 553, 42, 646, 910, 858, 1032, 563, 1115, 593, 420, 384, 151, 351, 485, 305, 729, 793, 382, 330, 782, 0, 971, 766, 777, 458, 1015, 85, 622, 697, 590, 1011, 1036, 1040, 199, 388, 1046, 152, 224, 674, 722, 588, 980, 497, 375, 618, 1046, 487, 96, 895, 414, 873, 1059, 850, 316, 391, 445, 1108, 801, 806, 173, 527, 983, 437, 1115, 836, 1186, 361, 1001, 33, 912, 861, 649, 934, 627, 934, 228, 160, 1028, 851, 834, 876, 259, 928, 964, 672, 373, 370, 767, 541], [573, 113, 101, 280, 79, 329, 359, 403, 163, 402, 157, 235, 509, 686, 616, 489, 663, 791, 844, 704, 131, 769, 209, 486, 883, 322, 57, 578, 546, 559, 830, 563, 981, 320, 425, 336, 236, 314, 176, 392, 559, 633, 820, 650, 614, 677, 353, 220, 603, 645, 210, 971, 0, 375, 300, 518, 186, 913, 643, 440, 503, 262, 117, 75, 818, 801, 231, 876, 793, 355, 375, 421, 165, 518, 722, 568, 81, 640, 883, 85, 719, 92, 230, 365, 804, 697, 558, 184, 269, 364, 966, 604, 74, 679, 150, 429, 221, 635, 308, 949, 144, 172, 208, 307, 451, 231, 749, 850, 104, 233, 493, 237, 757, 147, 158, 352, 861, 692, 206, 550], [293, 384, 274, 143, 319, 129, 262, 60, 314, 286, 402, 531, 675, 451, 381, 201, 458, 586, 639, 499, 460, 534, 360, 151, 678, 101, 318, 343, 311, 324, 595, 328, 776, 132, 225, 173, 353, 233, 436, 187, 354, 428, 615, 445, 379, 472, 83, 147, 498, 440, 455, 766, 375, 0, 90, 313, 336, 708, 408, 138, 268, 332, 357, 430, 613, 566, 367, 671, 588, 83, 62, 216, 301, 313, 487, 333, 377, 405, 678, 292, 484, 325, 380, 165, 569, 462, 353, 560, 122, 40, 761, 369, 435, 444, 446, 70, 517, 430, 316, 744, 233, 195, 259, 249, 246, 255, 544, 645, 480, 172, 134, 197, 552, 249, 416, 95, 626, 487, 220, 315], [372, 283, 199, 153, 229, 39, 273, 151, 324, 196, 324, 441, 686, 485, 415, 288, 469, 597, 650, 510, 413, 568, 370, 241, 689, 111, 228, 377, 345, 358, 629, 362, 787, 143, 135, 83, 263, 244, 346, 198, 365, 439, 626, 456, 413, 483, 54, 72, 509, 451, 377, 777, 300, 90, 0, 324, 246, 719, 442, 239, 302, 242, 267, 340, 624, 600, 277, 682, 599, 139, 68, 227, 211, 324, 521, 367, 287, 439, 689, 228, 518, 241, 290, 75, 603, 496, 364, 458, 32, 79, 772, 403, 356, 478, 356, 144, 427, 441, 212, 755, 143, 120, 269, 225, 256, 165, 555, 656, 395, 82, 208, 107, 563, 159, 342, 150, 660, 498, 230, 349], [330, 479, 386, 239, 545, 361, 65, 259, 253, 518, 314, 747, 378, 119, 276, 260, 150, 278, 331, 191, 555, 334, 240, 297, 370, 220, 470, 174, 276, 247, 414, 207, 468, 193, 457, 405, 579, 110, 662, 140, 46, 120, 307, 126, 166, 164, 276, 340, 190, 132, 329, 458, 518, 313, 324, 0, 562, 400, 303, 250, 249, 558, 583, 587, 305, 396, 593, 363, 280, 221, 269, 96, 527, 108, 167, 281, 593, 166, 370, 442, 252, 420, 606, 397, 324, 166, 76, 655, 348, 353, 453, 210, 530, 212, 662, 383, 733, 122, 548, 436, 459, 408, 196, 481, 161, 481, 236, 337, 575, 398, 372, 423, 244, 475, 511, 219, 381, 179, 315, 222], [610, 297, 234, 391, 107, 275, 511, 389, 322, 248, 331, 254, 693, 723, 653, 526, 707, 835, 888, 748, 302, 806, 368, 487, 927, 349, 149, 615, 583, 596, 867, 600, 1025, 381, 239, 231, 77, 408, 106, 436, 603, 677, 864, 694, 651, 721, 292, 236, 747, 689, 384, 1015, 186, 336, 246, 562, 0, 957, 680, 477, 540, 103, 69, 191, 862, 838, 59, 920, 837, 377, 306, 465, 35, 562, 759, 605, 125, 677, 927, 167, 756, 255, 44, 311, 841, 734, 602, 309, 215, 325, 1010, 641, 245, 716, 169, 390, 240, 679, 153, 983, 156, 202, 327, 193, 494, 125, 793, 894, 246, 179, 454, 170, 801, 87, 335, 388, 898, 736, 288, 587], [598, 874, 781, 634, 940, 756, 460, 658, 648, 913, 709, 1142, 370, 355, 537, 569, 289, 212, 197, 211, 950, 268, 635, 686, 157, 615, 865, 453, 537, 508, 348, 495, 84, 588, 852, 800, 974, 505, 1057, 535, 362, 326, 93, 293, 427, 247, 671, 735, 324, 272, 724, 85, 913, 708, 719, 400, 957, 0, 564, 639, 532, 953, 978, 982, 125, 330, 988, 77, 150, 616, 664, 530, 922, 439, 317, 560, 988, 429, 30, 837, 356, 815, 1001, 792, 258, 333, 387, 1050, 743, 748, 90, 469, 925, 379, 1057, 778, 1128, 303, 943, 52, 854, 803, 591, 876, 569, 876, 170, 140, 970, 793, 776, 818, 185, 870, 906, 614, 315, 312, 709, 483], [214, 604, 504, 364, 663, 479, 402, 348, 534, 636, 622, 872, 599, 223, 49, 185, 378, 319, 391, 355, 680, 234, 580, 302, 430, 339, 595, 123, 115, 86, 287, 90, 632, 329, 575, 523, 697, 409, 780, 313, 349, 415, 471, 288, 151, 319, 394, 458, 413, 326, 675, 622, 643, 408, 442, 303, 680, 564, 0, 255, 148, 676, 701, 712, 469, 258, 711, 527, 444, 317, 387, 342, 645, 199, 259, 84, 718, 160, 534, 567, 147, 545, 724, 515, 294, 187, 227, 780, 466, 448, 617, 103, 655, 138, 787, 429, 858, 392, 666, 600, 577, 533, 478, 599, 262, 599, 400, 501, 700, 516, 392, 541, 408, 593, 636, 305, 351, 401, 439, 126], [154, 401, 308, 161, 460, 276, 199, 78, 331, 433, 419, 669, 612, 298, 228, 63, 453, 441, 513, 430, 477, 381, 377, 47, 552, 136, 392, 190, 158, 171, 442, 175, 707, 126, 372, 320, 494, 201, 577, 110, 291, 490, 546, 363, 226, 396, 191, 255, 488, 401, 471, 697, 440, 138, 239, 250, 477, 639, 255, 0, 115, 473, 498, 509, 544, 413, 508, 602, 519, 92, 184, 139, 442, 216, 334, 180, 515, 252, 609, 364, 331, 342, 521, 312, 416, 309, 295, 577, 263, 178, 692, 216, 452, 291, 584, 174, 655, 467, 463, 675, 374, 330, 276, 396, 120, 396, 475, 576, 497, 313, 137, 338, 483, 390, 433, 94, 473, 476, 237, 162], [70, 464, 371, 224, 523, 339, 262, 208, 394, 496, 482, 732, 567, 191, 121, 27, 346, 334, 406, 323, 540, 274, 440, 162, 445, 199, 455, 83, 47, 64, 335, 68, 600, 189, 435, 383, 557, 269, 640, 173, 295, 383, 439, 256, 119, 287, 254, 378, 381, 294, 534, 590, 503, 268, 302, 249, 540, 532, 148, 115, 0, 536, 561, 572, 437, 306, 571, 495, 412, 177, 247, 209, 505, 153, 227, 53, 578, 145, 502, 427, 224, 405, 584, 375, 309, 202, 195, 640, 326, 308, 585, 109, 515, 184, 647, 289, 718, 360, 526, 568, 437, 393, 339, 459, 110, 459, 368, 469, 560, 376, 252, 401, 376, 453, 496, 165, 366, 369, 300, 55], [606, 349, 266, 387, 183, 216, 507, 385, 354, 147, 363, 357, 715, 719, 649, 522, 703, 831, 884, 744, 375, 802, 400, 483, 923, 345, 194, 611, 579, 592, 863, 596, 1021, 377, 139, 154, 22, 447, 209, 432, 599, 673, 860, 690, 647, 717, 288, 232, 743, 685, 416, 1011, 262, 332, 242, 558, 103, 953, 676, 473, 536, 0, 153, 275, 858, 834, 45, 916, 833, 373, 302, 461, 97, 558, 755, 601, 209, 673, 923, 199, 752, 287, 122, 170, 837, 730, 598, 393, 206, 321, 1006, 637, 318, 712, 272, 386, 343, 675, 47, 989, 208, 254, 391, 89, 490, 77, 789, 890, 348, 175, 450, 147, 797, 119, 381, 384, 894, 732, 352, 583], [631, 228, 175, 412, 38, 296, 532, 410, 240, 306, 272, 210, 640, 744, 674, 547, 728, 856, 909, 769, 233, 827, 286, 508, 948, 370, 80, 636, 604, 617, 888, 621, 1046, 402, 293, 261, 138, 351, 79, 457, 624, 698, 885, 715, 672, 742, 313, 257, 768, 710, 325, 1036, 117, 357, 267, 583, 69, 978, 701, 498, 561, 153, 0, 122, 883, 859, 122, 941, 858, 398, 327, 486, 56, 583, 780, 626, 56, 698, 948, 108, 777, 175, 113, 332, 862, 755, 623, 240, 236, 346, 1031, 662, 176, 737, 125, 411, 196, 700, 212, 1014, 177, 195, 280, 231, 515, 155, 814, 915, 177, 200, 475, 191, 822, 108, 266, 409, 919, 757, 243, 608], [642, 129, 176, 349, 121, 369, 428, 472, 232, 428, 226, 187, 578, 755, 685, 558, 732, 860, 913, 773, 98, 838, 278, 555, 952, 391, 132, 647, 615, 628, 899, 632, 1050, 389, 465, 376, 260, 383, 161, 461, 628, 702, 889, 719, 683, 746, 422, 330, 772, 714, 279, 1040, 75, 430, 340, 587, 191, 982, 712, 509, 572, 275, 122, 0, 887, 870, 244, 945, 862, 424, 444, 490, 178, 587, 791, 637, 66, 709, 952, 160, 788, 161, 235, 405, 873, 766, 627, 118, 309, 419, 1035, 673, 62, 748, 92, 484, 173, 704, 334, 1018, 219, 247, 277, 353, 590, 247, 818, 919, 55, 273, 548, 277, 826, 192, 155, 421, 930, 761, 275, 619], [503, 779, 686, 539, 845, 661, 365, 563, 553, 818, 614, 1047, 245, 260, 442, 474, 141, 151, 168, 116, 855, 207, 540, 591, 162, 520, 770, 358, 442, 413, 287, 400, 201, 493, 757, 705, 879, 410, 962, 440, 267, 231, 44, 198, 332, 152, 576, 640, 174, 177, 629, 199, 818, 613, 624, 305, 862, 125, 469, 544, 437, 858, 883, 887, 0, 269, 893, 66, 25, 521, 569, 435, 827, 344, 222, 465, 893, 334, 103, 742, 295, 720, 906, 697, 197, 272, 292, 955, 648, 653, 162, 374, 830, 318, 962, 683, 1033, 208, 848, 169, 759, 708, 496, 781, 474, 781, 76, 91, 875, 698, 681, 723, 60, 775, 811, 519, 254, 137, 614, 388], [372, 762, 662, 522, 821, 637, 456, 506, 644, 794, 705, 1030, 506, 209, 209, 343, 285, 120, 108, 230, 838, 72, 631, 460, 147, 497, 753, 227, 311, 282, 29, 269, 398, 487, 733, 681, 855, 501, 938, 471, 354, 322, 237, 218, 198, 243, 552, 616, 320, 268, 720, 388, 801, 566, 600, 396, 838, 330, 258, 413, 306, 834, 859, 870, 269, 0, 869, 293, 250, 475, 545, 526, 803, 260, 177, 334, 876, 161, 300, 725, 111, 703, 882, 673, 72, 156, 246, 938, 624, 606, 383, 243, 813, 122, 945, 587, 1016, 299, 824, 366, 735, 691, 587, 757, 343, 757, 207, 184, 858, 674, 550, 699, 267, 751, 794, 463, 66, 308, 587, 257], [641, 348, 265, 422, 152, 262, 542, 420, 314, 189, 362, 313, 714, 754, 684, 557, 738, 866, 919, 779, 344, 837, 360, 518, 958, 380, 193, 646, 614, 627, 898, 631, 1056, 412, 185, 200, 23, 439, 165, 467, 634, 708, 895, 725, 682, 752, 323, 233, 778, 720, 415, 1046, 231, 367, 277, 593, 59, 988, 711, 508, 571, 45, 122, 244, 893, 869, 0, 951, 868, 408, 337, 496, 66, 593, 790, 636, 178, 708, 958, 198, 787, 286, 77, 216, 872, 765, 633, 362, 238, 356, 1041, 672, 287, 747, 228, 421, 299, 710, 95, 1024, 187, 233, 358, 135, 525, 89, 824, 925, 299, 176, 485, 149, 832, 118, 380, 419, 929, 767, 319, 618], [561, 837, 744, 597, 903, 719, 423, 621, 611, 876, 672, 1105, 311, 318, 500, 532, 252, 175, 192, 174, 913, 231, 598, 649, 186, 578, 828, 416, 500, 471, 311, 458, 154, 551, 815, 763, 937, 468, 1020, 498, 325, 289, 65, 256, 390, 210, 634, 698, 287, 235, 687, 152, 876, 671, 682, 363, 920, 77, 527, 602, 495, 916, 941, 945, 66, 293, 951, 0, 91, 579, 627, 493, 885, 402, 280, 523, 951, 392, 56, 800, 319, 778, 964, 755, 221, 296, 350, 1013, 706, 711, 96, 432, 888, 342, 1020, 741, 1091, 266, 906, 122, 817, 766, 554, 839, 532, 839, 133, 113, 933, 756, 739, 781, 126, 833, 869, 577, 278, 275, 672, 446], [478, 754, 661, 514, 820, 636, 340, 538, 528, 793, 589, 1022, 261, 235, 417, 449, 116, 132, 149, 91, 830, 188, 515, 566, 159, 495, 745, 333, 417, 388, 268, 375, 226, 468, 732, 680, 854, 385, 937, 415, 242, 206, 55, 173, 307, 127, 551, 615, 149, 152, 604, 224, 793, 588, 599, 280, 837, 150, 444, 519, 412, 833, 858, 862, 25, 250, 868, 91, 0, 496, 544, 410, 802, 319, 197, 440, 868, 309, 120, 717, 276, 695, 881, 672, 178, 253, 267, 930, 623, 628, 187, 349, 805, 299, 937, 658, 1008, 183, 823, 194, 734, 683, 471, 756, 449, 756, 51, 88, 850, 673, 656, 698, 35, 750, 786, 494, 235, 112, 589, 363], [247, 316, 223, 76, 360, 176, 170, 39, 246, 333, 334, 572, 583, 360, 290, 163, 366, 494, 547, 407, 392, 443, 292, 133, 586, 37, 307, 252, 220, 233, 504, 237, 684, 34, 272, 220, 394, 146, 477, 95, 262, 336, 523, 353, 288, 380, 92, 156, 406, 348, 387, 674, 355, 83, 139, 221, 377, 616, 317, 92, 177, 373, 398, 424, 521, 475, 408, 579, 496, 0, 70, 124, 342, 221, 396, 242, 418, 314, 586, 279, 393, 257, 421, 212, 478, 371, 261, 492, 163, 123, 669, 278, 367, 353, 487, 153, 558, 338, 363, 652, 274, 204, 191, 296, 127, 296, 452, 553, 412, 213, 150, 238, 460, 290, 348, 12, 535, 395, 152, 224], [317, 336, 212, 95, 289, 105, 218, 79, 266, 262, 354, 501, 631, 430, 360, 233, 414, 542, 595, 455, 412, 513, 312, 213, 634, 53, 248, 397, 290, 378, 574, 382, 732, 88, 201, 149, 323, 189, 406, 143, 310, 384, 571, 401, 358, 428, 21, 85, 454, 396, 407, 722, 375, 62, 68, 269, 306, 664, 387, 184, 247, 302, 327, 444, 569, 545, 337, 627, 544, 70, 0, 172, 271, 269, 466, 312, 347, 384, 634, 220, 463, 277, 350, 141, 548, 441, 309, 512, 92, 93, 717, 423, 387, 423, 416, 132, 487, 386, 292, 700, 159, 133, 211, 225, 202, 225, 500, 601, 432, 142, 196, 167, 508, 219, 368, 92, 605, 443, 172, 294], [272, 382, 289, 142, 448, 264, 84, 162, 234, 421, 295, 650, 497, 180, 315, 188, 280, 408, 461, 321, 458, 464, 221, 186, 500, 123, 373, 193, 245, 258, 544, 228, 598, 96, 360, 308, 482, 91, 565, 29, 142, 250, 437, 267, 159, 294, 179, 243, 320, 262, 310, 588, 421, 216, 227, 96, 465, 530, 342, 139, 209, 461, 486, 490, 435, 526, 496, 493, 410, 124, 172, 0, 430, 97, 219, 267, 496, 196, 500, 345, 275, 323, 509, 300, 454, 227, 137, 558, 251, 256, 583, 225, 433, 235, 565, 286, 636, 252, 451, 566, 362, 311, 177, 384, 74, 384, 366, 467, 478, 301, 276, 326, 374, 378, 414, 122, 511, 309, 218, 249], [575, 276, 199, 356, 86, 240, 476, 354, 287, 250, 296, 266, 672, 688, 618, 491, 672, 800, 853, 713, 289, 771, 333, 452, 892, 314, 127, 580, 548, 561, 832, 565, 990, 346, 237, 205, 82, 373, 141, 401, 568, 642, 829, 659, 616, 686, 257, 201, 712, 654, 349, 980, 165, 301, 211, 527, 35, 922, 645, 442, 505, 97, 56, 178, 827, 803, 66, 885, 802, 342, 271, 430, 0, 527, 724, 570, 112, 642, 892, 132, 721, 220, 79, 276, 832, 699, 567, 296, 180, 290, 975, 606, 232, 681, 181, 355, 252, 644, 156, 958, 121, 167, 292, 175, 459, 99, 758, 859, 233, 144, 419, 135, 766, 52, 314, 353, 863, 701, 253, 552], [219, 479, 386, 239, 545, 361, 167, 259, 317, 518, 378, 747, 474, 83, 172, 149, 253, 235, 339, 230, 555, 228, 304, 263, 378, 220, 470, 79, 139, 134, 289, 112, 507, 193, 457, 405, 579, 174, 662, 126, 154, 290, 346, 136, 621, 194, 276, 340, 288, 201, 393, 497, 518, 313, 324, 108, 562, 439, 199, 216, 153, 558, 583, 587, 344, 260, 593, 402, 319, 221, 269, 97, 527, 0, 134, 170, 593, 99, 409, 442, 178, 420, 606, 397, 263, 130, 69, 655, 348, 353, 492, 104, 530, 138, 662, 383, 733, 267, 548, 475, 459, 408, 260, 481, 96, 481, 275, 376, 575, 398, 353, 423, 283, 475, 511, 219, 320, 276, 315, 104], [293, 683, 583, 443, 742, 558, 238, 427, 426, 715, 487, 951, 352, 50, 232, 264, 131, 101, 174, 108, 759, 105, 413, 381, 213, 418, 674, 148, 232, 203, 206, 190, 385, 408, 654, 602, 776, 283, 859, 248, 140, 168, 224, 41, 122, 72, 473, 537, 166, 89, 502, 375, 722, 487, 521, 167, 759, 317, 259, 334, 227, 755, 780, 791, 222, 177, 790, 280, 197, 396, 466, 219, 724, 134, 0, 255, 797, 125, 287, 646, 154, 624, 803, 594, 128, 68, 82, 859, 545, 527, 370, 164, 734, 114, 866, 508, 937, 145, 745, 353, 656, 612, 369, 678, 264, 678, 153, 176, 779, 595, 471, 620, 161, 672, 715, 384, 185, 154, 518, 178], [54, 529, 429, 289, 588, 404, 327, 273, 459, 561, 547, 797, 595, 219, 92, 82, 374, 362, 434, 351, 605, 302, 505, 227, 473, 264, 520, 119, 31, 43, 363, 58, 628, 254, 500, 448, 622, 334, 705, 238, 327, 411, 467, 284, 147, 315, 319, 383, 409, 322, 600, 618, 568, 333, 367, 281, 605, 560, 84, 180, 53, 601, 626, 637, 465, 334, 636, 523, 440, 242, 312, 267, 570, 170, 255, 0, 643, 173, 530, 492, 190, 470, 649, 440, 337, 230, 223, 705, 391, 373, 613, 99, 580, 212, 712, 354, 783, 388, 591, 596, 502, 458, 403, 524, 187, 524, 396, 497, 625, 441, 317, 466, 404, 518, 561, 230, 394, 397, 364, 60], [648, 188, 182, 355, 68, 316, 434, 430, 238, 362, 232, 154, 584, 761, 691, 564, 738, 866, 919, 779, 177, 844, 284, 561, 958, 390, 100, 653, 621, 634, 905, 638, 1056, 395, 412, 323, 194, 389, 95, 467, 634, 708, 895, 725, 689, 752, 333, 277, 778, 720, 285, 1046, 81, 377, 287, 593, 125, 988, 718, 515, 578, 209, 56, 66, 893, 876, 178, 951, 868, 418, 347, 496, 112, 593, 797, 643, 0, 715, 958, 128, 794, 167, 169, 352, 879, 772, 633, 179, 256, 366, 1041, 679, 120, 754, 69, 431, 154, 710, 268, 1024, 197, 215, 283, 287, 526, 211, 824, 925, 121, 220, 495, 224, 832, 139, 213, 429, 936, 767, 281, 625], [211, 601, 501, 361, 660, 476, 231, 345, 479, 633, 480, 869, 466, 74, 81, 182, 243, 189, 261, 220, 677, 129, 406, 299, 300, 336, 592, 105, 150, 121, 190, 108, 497, 326, 572, 520, 694, 276, 777, 310, 204, 280, 336, 143, 37, 184, 391, 455, 278, 191, 495, 487, 640, 405, 439, 166, 677, 429, 160, 252, 145, 673, 698, 709, 334, 161, 708, 392, 309, 314, 384, 196, 642, 99, 125, 173, 715, 0, 399, 564, 79, 542, 721, 512, 164, 57, 90, 777, 463, 445, 482, 57, 652, 39, 784, 426, 855, 257, 663, 465, 574, 530, 362, 596, 182, 596, 265, 276, 697, 513, 389, 538, 273, 590, 633, 302, 221, 266, 436, 96], [568, 844, 751, 604, 910, 726, 430, 628, 618, 883, 679, 1112, 348, 325, 507, 539, 259, 182, 150, 181, 920, 238, 605, 656, 127, 585, 835, 423, 507, 478, 318, 465, 98, 558, 822, 770, 944, 475, 1027, 505, 332, 296, 63, 263, 397, 217, 641, 705, 294, 242, 694, 96, 883, 678, 689, 370, 927, 30, 534, 609, 502, 923, 948, 952, 103, 300, 958, 56, 120, 586, 634, 500, 892, 409, 287, 530, 958, 399, 0, 807, 326, 785, 971, 762, 228, 303, 357, 1020, 713, 718, 112, 439, 895, 349, 1027, 748, 1098, 273, 913, 66, 824, 773, 561, 846, 539, 846, 140, 110, 940, 763, 746, 788, 155, 840, 876, 584, 285, 282, 679, 453], [497, 150, 67, 204, 70, 257, 288, 327, 155, 335, 164, 282, 516, 610, 540, 413, 587, 715, 768, 628, 216, 693, 201, 410, 807, 246, 28, 502, 470, 483, 754, 487, 905, 244, 353, 264, 184, 243, 187, 316, 483, 557, 744, 574, 538, 601, 199, 135, 627, 569, 217, 895, 85, 292, 228, 442, 167, 837, 567, 364, 427, 199, 108, 160, 742, 725, 198, 800, 717, 279, 220, 345, 132, 442, 646, 492, 128, 564, 807, 0, 643, 88, 211, 293, 728, 627, 482, 269, 197, 281, 890, 528, 159, 603, 197, 346, 268, 559, 241, 873, 59, 87, 172, 240, 375, 164, 673, 774, 189, 108, 410, 139, 681, 80, 182, 276, 785, 616, 135, 474], [290, 680, 580, 440, 739, 555, 317, 424, 505, 712, 566, 948, 506, 139, 98, 261, 285, 155, 227, 229, 756, 88, 492, 378, 266, 415, 671, 144, 176, 164, 140, 136, 424, 405, 651, 599, 773, 362, 856, 389, 290, 322, 263, 195, 116, 226, 470, 534, 320, 243, 581, 414, 719, 484, 518, 252, 756, 356, 147, 331, 224, 752, 777, 788, 295, 111, 787, 319, 276, 393, 463, 275, 721, 178, 154, 190, 794, 79, 326, 643, 0, 621, 800, 591, 130, 86, 176, 856, 542, 524, 409, 112, 731, 40, 863, 505, 934, 299, 742, 392, 653, 609, 448, 675, 261, 675, 233, 242, 776, 592, 468, 617, 293, 669, 712, 381, 177, 308, 515, 175], [475, 65, 42, 182, 137, 282, 261, 295, 65, 439, 85, 321, 437, 588, 518, 391, 565, 693, 746, 606, 141, 671, 111, 388, 785, 224, 95, 480, 448, 461, 732, 465, 883, 222, 378, 289, 272, 216, 254, 294, 461, 535, 722, 552, 516, 579, 255, 169, 605, 547, 138, 873, 92, 325, 241, 420, 255, 815, 545, 342, 405, 287, 175, 161, 720, 703, 286, 778, 695, 257, 277, 323, 220, 420, 624, 470, 167, 542, 785, 88, 621, 0, 299, 318, 706, 599, 460, 229, 222, 365, 868, 506, 104, 581, 236, 395, 307, 537, 506, 851, 147, 121, 110, 320, 353, 252, 651, 752, 149, 187, 459, 244, 659, 168, 97, 254, 763, 594, 108, 452], [654, 341, 278, 435, 151, 319, 555, 433, 366, 266, 375, 298, 755, 767, 697, 570, 751, 879, 932, 792, 346, 850, 412, 531, 971, 393, 193, 659, 627, 640, 911, 644, 1069, 425, 262, 254, 100, 452, 103, 480, 647, 721, 908, 738, 695, 765, 336, 280, 791, 733, 428, 1059, 230, 380, 290, 606, 44, 1001, 724, 521, 584, 122, 113, 235, 906, 882, 77, 964, 881, 421, 350, 509, 79, 606, 803, 649, 169, 721, 971, 211, 800, 299, 0, 355, 885, 778, 646, 353, 259, 369, 1054, 685, 289, 760, 213, 434, 284, 723, 172, 1037, 200, 246, 371, 212, 538, 148, 837, 938, 290, 223, 498, 214, 845, 131, 379, 432, 942, 780, 332, 631], [445, 387, 276, 226, 294, 50, 346, 224, 364, 125, 410, 506, 759, 558, 488, 361, 542, 670, 723, 583, 478, 641, 406, 316, 762, 184, 293, 450, 418, 431, 702, 435, 860, 216, 64, 57, 193, 317, 411, 271, 438, 512, 699, 529, 486, 556, 127, 149, 582, 524, 454, 850, 365, 165, 75, 397, 311, 792, 515, 312, 375, 170, 332, 405, 697, 673, 216, 755, 672, 212, 141, 300, 276, 397, 594, 440, 352, 512, 762, 293, 591, 318, 355, 0, 676, 569, 437, 523, 97, 154, 845, 476, 421, 551, 421, 219, 492, 514, 133, 828, 208, 197, 334, 84, 329, 168, 628, 729, 460, 147, 283, 118, 636, 224, 419, 223, 733, 571, 295, 422], [375, 765, 665, 525, 824, 640, 384, 509, 572, 797, 633, 1033, 434, 160, 245, 346, 213, 48, 59, 158, 841, 42, 559, 463, 98, 500, 756, 230, 314, 285, 90, 272, 326, 490, 736, 684, 858, 429, 941, 474, 286, 250, 165, 169, 201, 171, 555, 619, 248, 196, 648, 316, 804, 569, 603, 324, 841, 258, 294, 416, 309, 837, 862, 873, 197, 72, 872, 221, 178, 478, 548, 454, 832, 263, 128, 337, 879, 164, 228, 728, 130, 706, 885, 676, 0, 107, 197, 941, 627, 609, 311, 246, 816, 138, 948, 590, 1019, 227, 827, 294, 738, 694, 515, 760, 346, 760, 135, 123, 861, 677, 553, 702, 195, 754, 797, 466, 57, 236, 600, 260], [268, 658, 558, 418, 717, 533, 231, 402, 419, 690, 480, 926, 420, 53, 138, 239, 199, 132, 204, 202, 734, 72, 406, 356, 243, 393, 649, 123, 207, 178, 185, 165, 401, 383, 629, 577, 751, 276, 834, 367, 204, 236, 240, 109, 86, 140, 448, 512, 234, 157, 495, 391, 697, 462, 496, 166, 734, 333, 187, 309, 202, 730, 755, 766, 272, 156, 765, 296, 253, 371, 441, 227, 699, 130, 68, 230, 772, 57, 303, 627, 86, 599, 778, 569, 107, 0, 90, 834, 520, 502, 386, 114, 709, 46, 841, 483, 912, 213, 720, 369, 631, 587, 362, 653, 239, 653, 210, 219, 754, 570, 446, 595, 270, 647, 690, 359, 164, 222, 493, 153], [261, 519, 426, 279, 585, 401, 141, 299, 329, 558, 390, 787, 422, 43, 200, 232, 211, 183, 294, 178, 595, 162, 316, 342, 333, 260, 510, 98, 200, 171, 275, 131, 455, 233, 497, 445, 619, 186, 702, 166, 114, 248, 294, 67, 90, 142, 316, 380, 246, 159, 405, 445, 558, 353, 364, 76, 602, 387, 227, 295, 195, 598, 623, 627, 292, 246, 633, 350, 267, 261, 309, 137, 567, 69, 82, 223, 633, 90, 357, 482, 176, 460, 646, 437, 197, 90, 0, 695, 388, 393, 440, 134, 570, 136, 702, 423, 773, 225, 588, 423, 499, 448, 272, 521, 165, 521, 223, 324, 615, 438, 432, 462, 231, 515, 551, 259, 254, 234, 355, 146], [710, 184, 271, 417, 239, 487, 496, 530, 300, 546, 249, 168, 616, 823, 753, 626, 800, 928, 981, 841, 108, 906, 321, 623, 1020, 459, 241, 715, 683, 696, 967, 700, 1118, 457, 583, 494, 378, 451, 279, 529, 696, 770, 957, 787, 751, 814, 490, 448, 840, 782, 310, 1108, 184, 560, 458, 655, 309, 1050, 780, 577, 640, 393, 240, 118, 955, 938, 362, 1013, 930, 492, 512, 558, 296, 655, 859, 705, 179, 777, 1020, 269, 856, 229, 353, 523, 941, 834, 695, 0, 427, 537, 1103, 741, 121, 816, 162, 630, 138, 772, 452, 1086, 328, 350, 345, 471, 588, 365, 886, 987, 80, 391, 694, 395, 894, 310, 189, 489, 998, 829, 343, 687], [396, 291, 180, 177, 198, 53, 297, 175, 268, 218, 305, 410, 710, 509, 439, 312, 493, 621, 674, 534, 382, 592, 310, 273, 713, 135, 197, 401, 369, 382, 653, 386, 811, 167, 157, 67, 215, 268, 315, 222, 389, 463, 650, 480, 437, 507, 78, 53, 533, 475, 358, 801, 269, 122, 32, 348, 215, 743, 466, 263, 326, 206, 236, 309, 648, 624, 238, 706, 623, 163, 92, 251, 180, 348, 545, 391, 256, 463, 713, 197, 542, 222, 259, 97, 627, 520, 388, 427, 0, 111, 796, 427, 325, 502, 325, 176, 396, 465, 185, 779, 112, 101, 238, 133, 280, 137, 579, 680, 364, 51, 240, 67, 587, 128, 323, 174, 684, 522, 199, 373], [295, 424, 278, 183, 308, 118, 302, 100, 354, 275, 442, 520, 715, 491, 421, 241, 498, 626, 679, 539, 500, 574, 400, 191, 718, 141, 307, 383, 351, 364, 635, 368, 816, 172, 214, 162, 342, 273, 425, 227, 394, 468, 655, 485, 419, 512, 114, 151, 538, 480, 495, 806, 364, 40, 79, 353, 325, 748, 448, 178, 308, 321, 346, 419, 653, 606, 356, 711, 628, 123, 93, 256, 290, 353, 527, 373, 366, 445, 718, 281, 524, 365, 369, 154, 609, 502, 393, 537, 111, 0, 801, 409, 438, 484, 435, 65, 506, 470, 305, 784, 222, 199, 299, 238, 286, 244, 584, 685, 474, 161, 129, 186, 592, 238, 456, 135, 666, 527, 260, 355], [651, 927, 834, 687, 993, 809, 513, 711, 701, 966, 762, 1195, 407, 408, 590, 622, 342, 265, 282, 264, 1003, 321, 688, 739, 276, 668, 918, 506, 590, 561, 401, 548, 174, 641, 905, 853, 1027, 558, 1110, 588, 415, 379, 155, 346, 480, 300, 724, 788, 377, 325, 777, 173, 966, 761, 772, 453, 1010, 90, 617, 692, 585, 1006, 1031, 1035, 162, 383, 1041, 96, 187, 669, 717, 583, 975, 492, 370, 613, 1041, 482, 112, 890, 409, 868, 1054, 845, 311, 386, 440, 1103, 796, 801, 0, 522, 978, 432, 1110, 831, 1181, 355, 996, 142, 907, 856, 644, 929, 622, 929, 223, 205, 1023, 846, 829, 871, 222, 923, 959, 667, 368, 365, 762, 536], [175, 565, 472, 325, 624, 440, 311, 309, 495, 597, 583, 833, 504, 128, 76, 146, 283, 271, 343, 260, 641, 211, 541, 263, 382, 300, 556, 32, 76, 47, 272, 30, 537, 290, 536, 484, 658, 318, 741, 222, 256, 320, 376, 193, 56, 224, 355, 419, 318, 231, 636, 527, 604, 369, 403, 210, 641, 469, 103, 216, 109, 637, 662, 673, 374, 243, 672, 432, 349, 278, 423, 225, 606, 104, 164, 99, 679, 57, 439, 528, 112, 506, 685, 476, 246, 114, 134, 741, 427, 409, 522, 0, 616, 96, 748, 390, 819, 297, 627, 505, 538, 494, 439, 560, 151, 560, 305, 406, 661, 477, 353, 502, 313, 554, 597, 266, 303, 306, 401, 47], [585, 67, 146, 292, 135, 385, 371, 405, 175, 458, 147, 249, 499, 698, 628, 501, 675, 803, 856, 716, 57, 781, 221, 498, 895, 334, 131, 590, 558, 571, 842, 575, 993, 332, 481, 392, 303, 326, 215, 404, 571, 645, 832, 662, 626, 689, 365, 261, 715, 657, 200, 983, 74, 435, 356, 530, 245, 925, 655, 452, 515, 318, 176, 62, 830, 813, 287, 888, 805, 367, 387, 433, 232, 530, 734, 580, 120, 652, 895, 159, 731, 104, 289, 421, 816, 709, 570, 121, 325, 438, 978, 616, 0, 691, 154, 505, 235, 647, 364, 961, 218, 225, 220, 363, 463, 287, 761, 862, 41, 289, 569, 293, 769, 208, 93, 364, 873, 704, 218, 562], [250, 640, 540, 400, 699, 515, 277, 384, 465, 672, 526, 908, 466, 99, 89, 221, 245, 178, 250, 220, 716, 96, 452, 338, 289, 375, 631, 105, 189, 160, 151, 147, 447, 365, 611, 559, 733, 322, 816, 349, 250, 282, 286, 155, 76, 186, 430, 494, 280, 203, 541, 437, 679, 444, 478, 212, 716, 379, 138, 291, 184, 712, 737, 748, 318, 122, 747, 342, 299, 353, 423, 235, 681, 138, 114, 212, 754, 39, 349, 603, 40, 581, 760, 551, 138, 46, 136, 816, 502, 484, 432, 96, 691, 0, 823, 465, 894, 259, 702, 415, 613, 569, 408, 635, 221, 635, 241, 265, 736, 552, 428, 577, 301, 629, 672, 341, 195, 268, 475, 135], [717, 221, 251, 424, 137, 385, 503, 499, 307, 417, 301, 95, 653, 830, 760, 633, 807, 935, 988, 848, 190, 913, 353, 631, 1027, 459, 169, 722, 690, 703, 974, 707, 1125, 464, 481, 392, 246, 458, 117, 536, 703, 777, 964, 794, 758, 821, 402, 346, 847, 789, 354, 1115, 150, 446, 356, 662, 169, 1057, 787, 584, 647, 272, 125, 92, 962, 945, 228, 1020, 937, 487, 416, 565, 181, 662, 866, 712, 69, 784, 1027, 197, 863, 236, 213, 421, 948, 841, 702, 162, 325, 435, 1110, 748, 154, 823, 0, 500, 81, 779, 322, 1003, 266, 284, 352, 362, 595, 294, 893, 994, 147, 289, 564, 293, 901, 208, 247, 498, 1005, 836, 350, 694], [246, 454, 361, 213, 373, 183, 332, 130, 384, 340, 472, 585, 745, 472, 402, 237, 528, 656, 709, 569, 530, 555, 430, 167, 748, 171, 372, 364, 332, 345, 616, 349, 846, 202, 279, 227, 407, 303, 490, 257, 424, 498, 685, 515, 400, 542, 157, 221, 568, 510, 525, 836, 429, 70, 144, 383, 390, 778, 429, 174, 289, 386, 411, 484, 683, 587, 421, 741, 658, 153, 132, 286, 355, 383, 508, 354, 431, 426, 748, 346, 505, 395, 434, 219, 590, 483, 423, 630, 176, 65, 831, 390, 505, 465, 500, 0, 571, 500, 370, 814, 287, 269, 329, 303, 294, 309, 614, 715, 550, 226, 80, 251, 622, 303, 486, 165, 647, 557, 290, 336], [788, 302, 322, 495, 208, 456, 574, 570, 378, 488, 372, 23, 724, 901, 831, 704, 818, 1006, 1059, 919, 203, 984, 424, 701, 1098, 530, 240, 793, 761, 774, 1045, 778, 1196, 535, 552, 463, 317, 529, 188, 607, 774, 848, 1035, 865, 829, 892, 473, 417, 918, 860, 425, 1186, 221, 517, 427, 733, 240, 1128, 858, 655, 718, 343, 196, 173, 1033, 1016, 299, 1091, 1008, 558, 487, 636, 252, 733, 937, 783, 154, 855, 1098, 268, 934, 307, 284, 492, 1019, 912, 773, 138, 396, 506, 1181, 819, 235, 894, 81, 571, 0, 850, 393, 1164, 337, 355, 423, 433, 666, 365, 964, 1065, 160, 360, 635, 364, 972, 279, 284, 569, 1076, 907, 421, 765], [426, 596, 503, 356, 662, 478, 182, 380, 370, 635, 431, 864, 253, 183, 365, 397, 27, 181, 234, 82, 672, 237, 357, 514, 273, 337, 587, 281, 365, 336, 317, 323, 371, 310, 574, 522, 696, 227, 779, 257, 84, 19, 210, 83, 255, 67, 393, 457, 65, 35, 446, 361, 635, 430, 441, 122, 679, 303, 392, 467, 360, 675, 700, 704, 208, 299, 710, 266, 183, 338, 386, 252, 644, 267, 145, 388, 710, 257, 273, 559, 299, 537, 723, 514, 227, 213, 225, 772, 465, 470, 355, 297, 647, 259, 779, 500, 850, 0, 665, 339, 576, 525, 313, 598, 397, 598, 139, 240, 692, 515, 604, 540, 108, 592, 628, 336, 284, 56, 431, 311], [596, 575, 330, 377, 237, 179, 497, 375, 552, 100, 589, 407, 941, 709, 639, 512, 693, 821, 874, 734, 421, 792, 460, 467, 913, 335, 236, 601, 569, 582, 853, 586, 1011, 367, 91, 117, 72, 468, 259, 422, 589, 663, 850, 680, 637, 707, 278, 221, 733, 675, 642, 1001, 308, 316, 212, 548, 153, 943, 666, 463, 526, 47, 212, 334, 848, 824, 95, 906, 823, 363, 292, 451, 156, 548, 745, 591, 268, 663, 913, 241, 742, 506, 172, 133, 827, 720, 588, 452, 185, 305, 996, 627, 364, 702, 322, 370, 393, 665, 0, 979, 208, 251, 388, 52, 480, 80, 779, 880, 394, 167, 434, 128, 787, 161, 607, 374, 884, 722, 349, 573], [634, 910, 817, 670, 976, 792, 496, 694, 684, 949, 745, 1178, 414, 391, 573, 605, 325, 248, 185, 247, 986, 304, 671, 722, 162, 651, 901, 489, 573, 544, 384, 531, 32, 624, 888, 836, 1010, 541, 1093, 571, 398, 362, 129, 329, 463, 283, 707, 771, 360, 308, 760, 33, 949, 744, 755, 436, 983, 52, 600, 675, 568, 989, 1014, 1018, 169, 366, 1024, 122, 194, 652, 700, 566, 958, 475, 353, 596, 1024, 465, 66, 873, 392, 851, 1037, 828, 294, 369, 423, 1086, 779, 784, 142, 505, 961, 415, 1003, 814, 1164, 339, 979, 0, 890, 839, 627, 912, 605, 912, 206, 143, 1006, 829, 812, 854, 229, 906, 942, 650, 351, 348, 745, 519], [507, 209, 111, 201, 139, 172, 408, 286, 213, 329, 223, 351, 575, 620, 550, 423, 604, 732, 785, 645, 275, 703, 259, 384, 824, 202, 87, 512, 480, 493, 764, 497, 922, 278, 268, 141, 173, 252, 256, 333, 500, 574, 761, 591, 548, 618, 138, 74, 644, 586, 276, 912, 144, 233, 143, 459, 156, 854, 577, 374, 437, 208, 177, 219, 759, 735, 187, 817, 734, 274, 159, 362, 121, 459, 656, 502, 197, 574, 824, 59, 653, 147, 200, 208, 738, 631, 499, 328, 112, 222, 907, 538, 218, 613, 266, 287, 337, 576, 208, 890, 0, 46, 183, 156, 391, 131, 690, 791, 248, 49, 351, 80, 698, 69, 244, 285, 795, 633, 144, 484], [463, 186, 79, 155, 157, 161, 251, 216, 167, 318, 206, 369, 558, 576, 506, 379, 553, 681, 734, 594, 262, 659, 213, 376, 773, 176, 115, 468, 436, 449, 720, 453, 871, 210, 257, 168, 219, 206, 274, 282, 449, 523, 710, 540, 504, 567, 112, 48, 593, 535, 259, 861, 172, 195, 120, 408, 202, 803, 533, 330, 393, 254, 195, 247, 708, 691, 233, 766, 683, 204, 133, 311, 167, 408, 612, 458, 215, 530, 773, 87, 609, 121, 246, 197, 694, 587, 448, 350, 101, 199, 856, 494, 225, 569, 284, 269, 355, 525, 251, 839, 46, 0, 137, 199, 341, 177, 639, 740, 270, 66, 333, 123, 647, 115, 218, 215, 751, 582, 98, 440], [408, 169, 105, 116, 242, 298, 131, 229, 57, 455, 118, 437, 468, 315, 451, 325, 341, 469, 522, 382, 245, 525, 72, 322, 561, 158, 200, 413, 382, 394, 605, 398, 659, 155, 394, 305, 344, 86, 359, 228, 237, 311, 498, 328, 362, 355, 188, 160, 381, 323, 169, 649, 208, 259, 269, 196, 327, 591, 478, 276, 339, 391, 280, 277, 496, 587, 358, 554, 471, 191, 211, 177, 292, 260, 369, 403, 283, 362, 561, 172, 448, 110, 371, 334, 515, 362, 272, 345, 238, 299, 644, 439, 220, 408, 352, 329, 423, 313, 388, 627, 183, 137, 0, 336, 287, 314, 427, 528, 265, 203, 393, 260, 435, 240, 178, 188, 572, 370, 39, 386], [529, 389, 278, 310, 236, 127, 430, 308, 366, 125, 403, 447, 755, 642, 572, 445, 626, 754, 807, 667, 420, 725, 408, 400, 846, 268, 235, 534, 502, 515, 786, 519, 944, 300, 80, 68, 112, 401, 299, 355, 522, 596, 783, 613, 570, 640, 211, 169, 666, 608, 456, 934, 307, 249, 225, 481, 193, 876, 599, 396, 459, 89, 231, 353, 781, 757, 135, 839, 756, 296, 225, 384, 175, 481, 678, 524, 287, 596, 846, 240, 675, 320, 212, 84, 760, 653, 521, 471, 133, 238, 929, 560, 363, 635, 362, 303, 433, 598, 52, 912, 156, 199, 336, 0, 413, 87, 712, 813, 393, 115, 367, 76, 720, 160, 421, 307, 817, 655, 297, 506], [192, 412, 319, 172, 477, 293, 160, 154, 342, 450, 430, 680, 573, 228, 235, 108, 383, 371, 443, 360, 488, 311, 388, 167, 482, 153, 403, 119, 165, 178, 372, 154, 637, 126, 389, 337, 511, 162, 594, 71, 207, 420, 476, 232, 136, 324, 209, 273, 418, 331, 482, 627, 451, 246, 256, 161, 494, 569, 262, 120, 110, 490, 515, 590, 474, 343, 525, 532, 449, 127, 202, 74, 459, 96, 264, 187, 526, 182, 539, 375, 261, 353, 538, 329, 346, 239, 165, 588, 280, 286, 622, 151, 463, 221, 595, 294, 666, 397, 480, 605, 391, 341, 287, 413, 0, 413, 405, 506, 508, 330, 257, 355, 413, 407, 444, 115, 403, 406, 248, 169], [529, 286, 231, 310, 177, 165, 430, 308, 319, 182, 328, 379, 680, 642, 572, 445, 626, 754, 807, 667, 344, 725, 365, 406, 846, 268, 159, 534, 502, 515, 786, 519, 944, 300, 137, 106, 75, 370, 231, 355, 522, 596, 783, 613, 570, 640, 211, 155, 666, 608, 381, 934, 231, 255, 165, 481, 125, 876, 599, 396, 459, 77, 155, 247, 781, 757, 89, 839, 756, 296, 225, 384, 99, 481, 678, 524, 211, 596, 846, 164, 675, 252, 148, 168, 760, 653, 521, 365, 137, 244, 929, 560, 287, 635, 294, 309, 365, 598, 80, 912, 131, 177, 314, 87, 413, 0, 712, 813, 317, 98, 373, 70, 720, 84, 346, 307, 817, 655, 275, 506], [434, 710, 617, 470, 776, 592, 296, 494, 484, 749, 545, 978, 346, 191, 373, 405, 125, 82, 142, 47, 786, 145, 471, 522, 145, 451, 701, 289, 373, 344, 225, 331, 238, 424, 688, 636, 810, 341, 893, 371, 198, 162, 77, 129, 263, 83, 507, 571, 160, 108, 560, 228, 749, 544, 555, 236, 793, 170, 400, 475, 368, 789, 814, 818, 76, 207, 824, 133, 51, 452, 500, 366, 758, 275, 153, 396, 824, 265, 140, 673, 233, 651, 837, 628, 135, 210, 223, 886, 579, 584, 223, 305, 761, 241, 893, 614, 964, 139, 779, 206, 690, 639, 427, 712, 405, 712, 0, 82, 806, 629, 612, 654, 55, 706, 742, 450, 192, 148, 545, 319], [535, 811, 718, 571, 877, 693, 397, 595, 585, 850, 646, 1079, 336, 292, 474, 506, 226, 94, 76, 129, 887, 154, 572, 623, 75, 552, 802, 390, 474, 445, 202, 432, 175, 525, 789, 737, 911, 442, 994, 472, 299, 263, 58, 230, 364, 184, 608, 672, 261, 209, 661, 160, 850, 645, 656, 337, 894, 140, 501, 576, 469, 890, 915, 919, 91, 184, 925, 113, 88, 553, 601, 467, 859, 376, 176, 497, 925, 276, 110, 774, 242, 752, 938, 729, 123, 219, 324, 987, 680, 685, 205, 406, 862, 265, 994, 715, 1065, 240, 880, 143, 791, 740, 528, 813, 506, 813, 82, 0, 907, 730, 713, 755, 123, 807, 843, 551, 145, 249, 646, 520], [630, 108, 191, 337, 165, 424, 416, 450, 220, 483, 188, 190, 540, 743, 673, 546, 720, 848, 901, 761, 43, 826, 266, 543, 940, 379, 161, 635, 603, 616, 887, 620, 1038, 877, 520, 431, 315, 371, 216, 449, 616, 690, 877, 707, 671, 734, 410, 306, 760, 702, 241, 1028, 104, 480, 395, 575, 246, 970, 700, 497, 560, 348, 177, 55, 875, 858, 299, 933, 850, 412, 432, 478, 233, 575, 779, 625, 121, 697, 940, 189, 776, 149, 290, 460, 861, 754, 615, 80, 364, 474, 1023, 661, 41, 736, 147, 550, 160, 692, 394, 1006, 248, 270, 265, 393, 508, 317, 806, 907, 0, 319, 614, 323, 814, 238, 124, 409, 819, 749, 263, 607], [446, 252, 145, 227, 162, 111, 347, 225, 233, 268, 272, 374, 624, 559, 489, 362, 543, 671, 724, 584, 346, 642, 279, 323, 763, 185, 161, 451, 419, 432, 703, 436, 861, 217, 207, 141, 162, 272, 279, 272, 439, 513, 700, 530, 487, 557, 128, 50, 583, 525, 325, 851, 233, 172, 82, 398, 179, 793, 516, 313, 376, 175, 200, 273, 698, 674, 176, 756, 673, 213, 142, 301, 144, 398, 595, 441, 220, 513, 763, 108, 592, 187, 223, 147, 677, 570, 438, 391, 51, 161, 846, 477, 289, 552, 289, 226, 360, 515, 167, 829, 49, 66, 203, 115, 330, 98, 629, 730, 319, 0, 290, 39, 637, 92, 284, 224, 734, 572, 164, 423], [166, 518, 425, 277, 437, 247, 336, 114, 448, 404, 536, 649, 749, 435, 365, 150, 590, 578, 650, 567, 594, 518, 494, 90, 689, 235, 436, 402, 295, 383, 579, 387, 844, 189, 343, 291, 471, 295, 554, 247, 428, 627, 683, 500, 363, 531, 221, 285, 625, 538, 589, 834, 493, 134, 208, 372, 454, 776, 392, 137, 252, 450, 475, 548, 681, 550, 485, 739, 656, 150, 196, 276, 419, 353, 471, 317, 495, 389, 746, 410, 468, 459, 498, 283, 553, 446, 432, 694, 240, 129, 829, 353, 569, 428, 564, 80, 635, 604, 434, 812, 351, 333, 393, 367, 257, 373, 612, 713, 614, 290, 0, 315, 620, 367, 550, 154, 610, 613, 354, 299], [471, 313, 202, 252, 166, 99, 372, 250, 290, 210, 358, 378, 679, 584, 514, 387, 568, 696, 749, 609, 350, 667, 332, 348, 788, 210, 165, 476, 444, 457, 728, 461, 886, 242, 156, 61, 135, 311, 276, 297, 464, 538, 725, 555, 512, 582, 153, 93, 608, 550, 380, 876, 237, 197, 107, 423, 170, 818, 541, 338, 401, 147, 191, 277, 723, 699, 149, 781, 698, 238, 167, 326, 135, 423, 620, 466, 224, 538, 788, 139, 617, 244, 214, 118, 702, 595, 462, 395, 67, 186, 871, 502, 293, 577, 293, 251, 364, 540, 128, 854, 80, 123, 260, 76, 355, 70, 654, 755, 323, 39, 315, 0, 662, 87, 345, 249, 759, 597, 221, 448], [442, 718, 625, 478, 784, 600, 304, 502, 492, 757, 553, 986, 300, 199, 381, 413, 81, 137, 184, 53, 794, 209, 479, 530, 194, 459, 709, 297, 381, 352, 285, 339, 261, 432, 696, 644, 818, 349, 901, 379, 206, 126, 90, 137, 271, 91, 515, 579, 111, 116, 568, 259, 757, 552, 563, 244, 801, 185, 408, 483, 376, 797, 822, 826, 60, 267, 832, 126, 35, 460, 508, 374, 766, 283, 161, 404, 832, 273, 155, 681, 293, 659, 845, 636, 195, 270, 231, 894, 587, 592, 222, 313, 769, 301, 901, 622, 972, 108, 787, 229, 698, 647, 435, 720, 413, 720, 55, 123, 814, 637, 620, 662, 0, 714, 750, 458, 252, 77, 553, 327], [523, 230, 147, 304, 81, 188, 424, 302, 235, 255, 174, 293, 596, 636, 566, 439, 620, 748, 801, 661, 265, 719, 281, 400, 840, 262, 75, 528, 496, 509, 780, 513, 938, 294, 284, 195, 104, 321, 193, 349, 516, 590, 777, 607, 564, 634, 205, 149, 660, 602, 297, 928, 147, 249, 159, 475, 87, 870, 593, 390, 453, 119, 108, 192, 775, 751, 118, 833, 750, 290, 219, 378, 52, 475, 672, 518, 139, 590, 840, 80, 669, 168, 131, 224, 754, 647, 515, 310, 128, 238, 923, 554, 208, 629, 208, 303, 279, 592, 161, 906, 69, 115, 240, 160, 407, 84, 706, 807, 238, 92, 367, 87, 714, 0, 262, 301, 811, 649, 201, 500], [566, 45, 139, 273, 228, 383, 352, 386, 121, 540, 60, 314, 411, 679, 609, 482, 656, 784, 837, 697, 81, 762, 132, 479, 876, 315, 189, 571, 539, 552, 823, 556, 974, 313, 479, 390, 366, 307, 308, 385, 552, 626, 813, 643, 607, 670, 346, 266, 696, 638, 112, 964, 158, 416, 342, 511, 335, 906, 636, 433, 496, 381, 266, 155, 811, 794, 380, 869, 786, 348, 368, 414, 314, 511, 715, 561, 213, 633, 876, 182, 712, 97, 379, 419, 797, 690, 551, 189, 323, 456, 959, 597, 93, 672, 247, 486, 284, 628, 607, 942, 244, 218, 178, 421, 444, 346, 742, 843, 124, 284, 550, 345, 750, 262, 0, 345, 854, 685, 199, 543], [235, 313, 220, 73, 371, 187, 168, 43, 243, 344, 331, 583, 581, 348, 278, 151, 364, 492, 545, 405, 389, 431, 289, 137, 584, 48, 304, 240, 208, 221, 492, 225, 682, 32, 283, 231, 405, 144, 488, 93, 260, 334, 521, 351, 276, 378, 103, 167, 404, 346, 384, 672, 352, 95, 150, 219, 388, 614, 305, 94, 165, 384, 409, 421, 519, 463, 419, 577, 494, 12, 92, 122, 353, 219, 384, 230, 429, 302, 584, 276, 381, 254, 432, 223, 466, 359, 259, 489, 174, 135, 667, 266, 364, 341, 498, 165, 569, 336, 374, 650, 285, 215, 188, 307, 115, 307, 450, 551, 409, 224, 154, 249, 458, 301, 345, 0, 523, 393, 149, 212], [432, 822, 722, 582, 881, 697, 441, 566, 629, 854, 690, 1090, 491, 217, 302, 403, 270, 105, 69, 215, 898, 99, 616, 520, 108, 557, 813, 287, 371, 342, 77, 329, 383, 547, 793, 741, 915, 486, 998, 531, 343, 307, 222, 226, 258, 228, 612, 676, 305, 253, 705, 373, 861, 626, 660, 381, 898, 315, 351, 473, 366, 894, 919, 930, 254, 66, 929, 278, 235, 535, 605, 511, 863, 320, 185, 394, 936, 221, 285, 785, 177, 763, 942, 733, 57, 164, 254, 998, 684, 666, 368, 303, 873, 195, 1005, 647, 1076, 284, 884, 351, 795, 751, 572, 817, 403, 817, 192, 145, 819, 734, 610, 759, 252, 811, 854, 523, 0, 293, 657, 317], [435, 653, 560, 413, 719, 535, 239, 437, 427, 692, 488, 921, 220, 192, 374, 406, 29, 190, 243, 64, 729, 246, 414, 523, 282, 394, 644, 290, 374, 345, 326, 332, 380, 367, 631, 579, 753, 284, 836, 314, 141, 78, 219, 123, 264, 84, 450, 514, 34, 75, 503, 370, 692, 487, 498, 179, 736, 312, 401, 476, 369, 732, 757, 761, 137, 308, 767, 275, 112, 395, 443, 309, 701, 276, 154, 397, 767, 266, 282, 616, 308, 594, 780, 571, 236, 222, 234, 829, 522, 527, 365, 306, 704, 268, 836, 557, 907, 56, 722, 348, 633, 582, 370, 655, 406, 655, 148, 249, 749, 572, 613, 597, 77, 649, 685, 393, 293, 0, 488, 320], [369, 167, 79, 77, 205, 259, 153, 190, 97, 416, 185, 435, 537, 482, 412, 286, 459, 587, 640, 500, 243, 555, 111, 283, 679, 119, 163, 375, 343, 356, 626, 360, 777, 116, 355, 266, 305, 108, 322, 189, 356, 429, 616, 446, 411, 473, 149, 121, 499, 441, 238, 767, 206, 220, 230, 315, 288, 709, 439, 237, 300, 352, 243, 275, 614, 587, 319, 672, 589, 152, 172, 218, 253, 315, 518, 364, 281, 436, 679, 135, 515, 108, 332, 295, 600, 493, 355, 343, 199, 260, 762, 401, 218, 475, 350, 290, 421, 431, 349, 745, 144, 98, 39, 297, 248, 275, 545, 646, 263, 164, 354, 221, 553, 201, 199, 149, 657, 488, 0, 347], [121, 511, 418, 271, 570, 386, 309, 255, 441, 543, 529, 779, 518, 142, 99, 84, 297, 285, 357, 274, 587, 225, 487, 209, 396, 246, 502, 35, 29, 42, 286, 36, 551, 236, 482, 430, 604, 316, 687, 220, 268, 334, 390, 207, 70, 238, 301, 365, 332, 245, 581, 541, 550, 315, 349, 222, 587, 483, 126, 162, 55, 583, 608, 619, 388, 257, 618, 446, 363, 224, 294, 249, 552, 104, 178, 60, 625, 96, 453, 474, 175, 452, 631, 422, 260, 153, 146, 687, 373, 355, 536, 47, 562, 135, 694, 336, 765, 311, 573, 519, 484, 440, 386, 506, 169, 506, 319, 520, 607, 423, 299, 448, 327, 500, 543, 212, 317, 320, 347, 0]] }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/tsp/gr17.json0000644000076500000240000000310314456461441016356 0ustar00runnerstaff{ "TourSize" : 17, "OptTour" : [15, 11, 8, 4, 1, 9, 10, 2, 14, 13, 16, 5, 7, 6, 12, 3, 0], "OptDistance" : 2085, "DistanceMatrix" : [[0, 633, 257, 91, 412, 150, 80, 134, 259, 505, 353, 324, 70, 211, 268, 246, 121], [633, 0, 390, 661, 227, 488, 572, 530, 555, 289, 282, 638, 567, 466, 420, 745, 518], [257, 390, 0, 228, 169, 112, 196, 154, 372, 262, 110, 437, 191, 74, 53, 472, 142], [91, 661, 228, 0, 383, 120, 77, 105, 175, 476, 324, 240, 27, 182, 239, 237, 84], [412, 227, 169, 383, 0, 267, 351, 309, 338, 196, 61, 421, 346, 243, 199, 528, 297], [150, 488, 112, 120, 267, 0, 63, 34, 264, 360, 208, 329, 83, 105, 123, 364, 35], [80, 572, 196, 77, 351, 63, 0, 29, 232, 444, 292, 297, 47, 150, 207, 332, 29], [134, 530, 154, 105, 309, 34, 29, 0, 249, 402, 250, 314, 68, 108, 165, 349, 36], [259, 555, 372, 175, 338, 264, 232, 249, 0, 495, 352, 95, 189, 326, 383, 202, 236], [505, 289, 262, 476, 196, 360, 444, 402, 495, 0, 154, 578, 439, 336, 240, 685, 390], [353, 282, 110, 324, 61, 208, 292, 250, 352, 154, 0, 435, 287, 184, 140, 542, 238], [324, 638, 437, 240, 421, 329, 297, 314, 95, 578, 435, 0, 254, 391, 448, 157, 301], [70, 567, 191, 27, 346, 83, 47, 68, 189, 439, 287, 254, 0, 145, 202, 289, 55], [211, 466, 74, 182, 243, 105, 150, 108, 326, 336, 184, 391, 145, 0, 57, 426, 96], [268, 420, 53, 239, 199, 123, 207, 165, 383, 240, 140, 448, 202, 57, 0, 483, 153], [246, 745, 472, 237, 528, 364, 332, 349, 202, 685, 542, 157, 289, 426, 483, 0, 336], [121, 518, 142, 84, 297, 35, 29, 36, 236, 390, 238, 301, 55, 96, 153, 336, 0]] }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/tsp/gr24.json0000644000076500000240000000562514456461441016367 0ustar00runnerstaff{ "TourSize" : 24, "OptTour" : [16, 11, 3, 7, 6, 24, 8, 21, 5, 10, 17, 22, 18, 19, 15, 2, 20, 14, 13, 9, 23, 4, 12, 1], "OptDistance" : 1272, "DistanceMatrix" : [[0, 257, 187, 91, 150, 80, 130, 134, 243, 185, 214, 70, 272, 219, 293, 54, 211, 290, 268, 261, 175, 250, 192, 121], [257, 0, 196, 228, 112, 196, 167, 154, 209, 86, 223, 191, 180, 83, 50, 219, 74, 139, 53, 43, 128, 99, 228, 142], [187, 196, 0, 158, 96, 88, 59, 63, 286, 124, 49, 121, 315, 172, 232, 92, 81, 98, 138, 200, 76, 89, 235, 99], [91, 228, 158, 0, 120, 77, 101, 105, 159, 156, 185, 27, 188, 149, 264, 82, 182, 261, 239, 232, 146, 221, 108, 84], [150, 112, 96, 120, 0, 63, 56, 34, 190, 40, 123, 83, 193, 79, 148, 119, 105, 144, 123, 98, 32, 105, 119, 35], [80, 196, 88, 77, 63, 0, 25, 29, 216, 124, 115, 47, 245, 139, 232, 31, 150, 176, 207, 200, 76, 189, 165, 29], [130, 167, 59, 101, 56, 25, 0, 22, 229, 95, 86, 64, 258, 134, 203, 43, 121, 164, 178, 171, 47, 160, 178, 42], [134, 154, 63, 105, 34, 29, 22, 0, 225, 82, 90, 68, 228, 112, 190, 58, 108, 136, 165, 131, 30, 147, 154, 36], [243, 209, 286, 159, 190, 216, 229, 225, 0, 207, 313, 173, 29, 126, 248, 238, 310, 389, 367, 166, 222, 349, 71, 220], [185, 86, 124, 156, 40, 124, 95, 82, 207, 0, 151, 119, 159, 62, 122, 147, 37, 116, 86, 90, 56, 76, 136, 70], [214, 223, 49, 185, 123, 115, 86, 90, 313, 151, 0, 148, 342, 199, 259, 84, 160, 147, 187, 227, 103, 138, 262, 126], [70, 191, 121, 27, 83, 47, 64, 68, 173, 119, 148, 0, 209, 153, 227, 53, 145, 224, 202, 195, 109, 184, 110, 55], [272, 180, 315, 188, 193, 245, 258, 228, 29, 159, 342, 209, 0, 97, 219, 267, 196, 275, 227, 137, 225, 235, 74, 249], [219, 83, 172, 149, 79, 139, 134, 112, 126, 62, 199, 153, 97, 0, 134, 170, 99, 178, 130, 69, 104, 138, 96, 104], [293, 50, 232, 264, 148, 232, 203, 190, 248, 122, 259, 227, 219, 134, 0, 255, 125, 154, 68, 82, 164, 114, 264, 178], [54, 219, 92, 82, 119, 31, 43, 58, 238, 147, 84, 53, 267, 170, 255, 0, 173, 190, 230, 223, 99, 212, 187, 60], [211, 74, 81, 182, 105, 150, 121, 108, 310, 37, 160, 145, 196, 99, 125, 173, 0, 79, 57, 90, 57, 39, 182, 96], [290, 139, 98, 261, 144, 176, 164, 136, 389, 116, 147, 224, 275, 178, 154, 190, 79, 0, 86, 176, 112, 40, 261, 175], [268, 53, 138, 239, 123, 207, 178, 165, 367, 86, 187, 202, 227, 130, 68, 230, 57, 86, 0, 90, 114, 46, 239, 153], [261, 43, 200, 232, 98, 200, 171, 131, 166, 90, 227, 195, 137, 69, 82, 223, 90, 176, 90, 0, 134, 136, 165, 146], [175, 128, 76, 146, 32, 76, 47, 30, 222, 56, 103, 109, 225, 104, 164, 99, 57, 112, 114, 134, 0, 96, 151, 47], [250, 99, 89, 221, 105, 189, 160, 147, 349, 76, 138, 184, 235, 138, 114, 212, 39, 40, 46, 136, 96, 0, 221, 135], [192, 228, 235, 108, 119, 165, 178, 154, 71, 136, 262, 110, 74, 96, 264, 187, 182, 261, 239, 165, 151, 221, 0, 169], [121, 142, 99, 84, 35, 29, 42, 36, 220, 70, 126, 55, 249, 104, 178, 60, 96, 175, 153, 146, 47, 135, 169, 0]] }././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/tsp.py0000644000076500000240000000467614456461441015275 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import array import random import json import numpy from deap import algorithms from deap import base from deap import creator from deap import tools # gr*.json contains the distance map in list of list style in JSON format # Optimal solutions are : gr17 = 2085, gr24 = 1272, gr120 = 6942 with open("tsp/gr17.json", "r") as tsp_data: tsp = json.load(tsp_data) distance_map = tsp["DistanceMatrix"] IND_SIZE = tsp["TourSize"] creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", array.array, typecode='i', fitness=creator.FitnessMin) toolbox = base.Toolbox() # Attribute generator toolbox.register("indices", random.sample, range(IND_SIZE), IND_SIZE) # Structure initializers toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.indices) toolbox.register("population", tools.initRepeat, list, toolbox.individual) def evalTSP(individual): distance = distance_map[individual[-1]][individual[0]] for gene1, gene2 in zip(individual[0:-1], individual[1:]): distance += distance_map[gene1][gene2] return distance, toolbox.register("mate", tools.cxPartialyMatched) toolbox.register("mutate", tools.mutShuffleIndexes, indpb=0.05) toolbox.register("select", tools.selTournament, tournsize=3) toolbox.register("evaluate", evalTSP) def main(): random.seed(169) pop = toolbox.population(n=300) hof = tools.HallOfFame(1) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) algorithms.eaSimple(pop, toolbox, 0.7, 0.2, 40, stats=stats, halloffame=hof) return pop, stats, hof if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/ga/xkcd.py0000644000076500000240000001036014456461441015403 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . """This example shows a possible answer to a problem that can be found in this xkcd comics: http://xkcd.com/287/. In the comic, the characters want to get exactly 15.05$ worth of appetizers, as fast as possible.""" import random from operator import attrgetter from collections import Counter # We delete the reduction function of the Counter because it doesn't copy added # attributes. Because we create a class that inherit from the Counter, the # fitness attribute was not copied by the deepcopy. del Counter.__reduce__ import numpy from deap import algorithms from deap import base from deap import creator from deap import tools IND_INIT_SIZE = 3 # Create the item dictionary: item id is an integer, and value is # a (name, weight, value) 3-uple. Since the comic didn't specified a time for # each menu item, random was called to generate a time. ITEMS_NAME = "Mixed Fruit", "French Fries", "Side Salad", "Hot Wings", "Mozzarella Sticks", "Sampler Plate" ITEMS_PRICE = 2.15, 2.75, 3.35, 3.55, 4.2, 5.8 ITEMS = dict((name, (price, random.uniform(1, 5))) for name, price in zip(ITEMS_NAME, ITEMS_PRICE)) creator.create("Fitness", base.Fitness, weights=(-1.0, -1.0)) creator.create("Individual", Counter, fitness=creator.Fitness) toolbox = base.Toolbox() toolbox.register("attr_item", random.choice, ITEMS_NAME) toolbox.register("individual", tools.initRepeat, creator.Individual, toolbox.attr_item, IND_INIT_SIZE) toolbox.register("population", tools.initRepeat, list, toolbox.individual) def evalXKCD(individual, target_price): """Evaluates the fitness and return the error on the price and the time taken by the order if the chef can cook everything in parallel.""" price = 0.0 times = list() for item, number in individual.items(): price += ITEMS[item][0] * number times.append(ITEMS[item][1]) return abs(price - target_price), max(times) def cxCounter(ind1, ind2, indpb): """Swaps the number of particular items between two individuals""" for key in ITEMS.keys(): if random.random() < indpb: ind1[key], ind2[key] = ind2[key], ind1[key] return ind1, ind2 def mutCounter(individual): """Adds or remove an item from an individual""" if random.random() > 0.5: individual.update([random.choice(ITEMS_NAME)]) else: val = random.choice(ITEMS_NAME) individual.subtract([val]) if individual[val] < 0: del individual[val] return individual, toolbox.register("evaluate", evalXKCD, target_price=15.05) toolbox.register("mate", cxCounter, indpb=0.5) toolbox.register("mutate", mutCounter) toolbox.register("select", tools.selNSGA2) def main(): NGEN = 40 MU = 100 LAMBDA = 200 CXPB = 0.3 MUTPB = 0.6 pop = toolbox.population(n=MU) hof = tools.ParetoFront() price_stats = tools.Statistics(key=lambda ind: ind.fitness.values[0]) time_stats = tools.Statistics(key=lambda ind: ind.fitness.values[1]) stats = tools.MultiStatistics(price=price_stats, time=time_stats) stats.register("avg", numpy.mean, axis=0) stats.register("std", numpy.std, axis=0) stats.register("min", numpy.min, axis=0) algorithms.eaMuPlusLambda(pop, toolbox, MU, LAMBDA, CXPB, MUTPB, NGEN, stats, halloffame=hof) return pop, stats, hof if __name__ == "__main__": _, _, hof = main() from matplotlib import pyplot as plt error_price = [i.fitness.values[0] for i in hof] time = [i.fitness.values[1] for i in hof] plt.plot(error_price, time, 'bo') plt.xlabel("Price difference") plt.ylabel("Total time") plt.show() ././@PaxHeader0000000000000000000000000000003300000000000010211 xustar0027 mtime=1689936700.730371 deap-1.4.1/examples/gp/0000755000076500000240000000000014456461475014126 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/gp/__init__.py0000644000076500000240000000126414456461441016233 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see .././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/gp/adf_symbreg.py0000644000076500000240000001411414456461441016754 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import random import operator import math import numpy from functools import partial from deap import base from deap import creator from deap import gp from deap import tools # Define new functions def protectedDiv(left, right): try: return left / right except ZeroDivisionError: return 1 adfset2 = gp.PrimitiveSet("ADF2", 2) adfset2.addPrimitive(operator.add, 2) adfset2.addPrimitive(operator.sub, 2) adfset2.addPrimitive(operator.mul, 2) adfset2.addPrimitive(protectedDiv, 2) adfset2.addPrimitive(operator.neg, 1) adfset2.addPrimitive(math.cos, 1) adfset2.addPrimitive(math.sin, 1) adfset1 = gp.PrimitiveSet("ADF1", 2) adfset1.addPrimitive(operator.add, 2) adfset1.addPrimitive(operator.sub, 2) adfset1.addPrimitive(operator.mul, 2) adfset1.addPrimitive(protectedDiv, 2) adfset1.addPrimitive(operator.neg, 1) adfset1.addPrimitive(math.cos, 1) adfset1.addPrimitive(math.sin, 1) adfset1.addADF(adfset2) adfset0 = gp.PrimitiveSet("ADF0", 2) adfset0.addPrimitive(operator.add, 2) adfset0.addPrimitive(operator.sub, 2) adfset0.addPrimitive(operator.mul, 2) adfset0.addPrimitive(protectedDiv, 2) adfset0.addPrimitive(operator.neg, 1) adfset0.addPrimitive(math.cos, 1) adfset0.addPrimitive(math.sin, 1) adfset0.addADF(adfset1) adfset0.addADF(adfset2) pset = gp.PrimitiveSet("MAIN", 1) pset.addPrimitive(operator.add, 2) pset.addPrimitive(operator.sub, 2) pset.addPrimitive(operator.mul, 2) pset.addPrimitive(protectedDiv, 2) pset.addPrimitive(operator.neg, 1) pset.addPrimitive(math.cos, 1) pset.addPrimitive(math.sin, 1) pset.addEphemeralConstant("rand101", partial(random.randint, -1, 1)) pset.addADF(adfset0) pset.addADF(adfset1) pset.addADF(adfset2) pset.renameArguments(ARG0='x') psets = (pset, adfset0, adfset1, adfset2) creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Tree", gp.PrimitiveTree) creator.create("Individual", list, fitness=creator.FitnessMin) toolbox = base.Toolbox() toolbox.register('adf_expr0', gp.genFull, pset=adfset0, min_=1, max_=2) toolbox.register('adf_expr1', gp.genFull, pset=adfset1, min_=1, max_=2) toolbox.register('adf_expr2', gp.genFull, pset=adfset2, min_=1, max_=2) toolbox.register('main_expr', gp.genHalfAndHalf, pset=pset, min_=1, max_=2) toolbox.register('ADF0', tools.initIterate, creator.Tree, toolbox.adf_expr0) toolbox.register('ADF1', tools.initIterate, creator.Tree, toolbox.adf_expr1) toolbox.register('ADF2', tools.initIterate, creator.Tree, toolbox.adf_expr2) toolbox.register('MAIN', tools.initIterate, creator.Tree, toolbox.main_expr) func_cycle = [toolbox.MAIN, toolbox.ADF0, toolbox.ADF1, toolbox.ADF2] toolbox.register('individual', tools.initCycle, creator.Individual, func_cycle) toolbox.register('population', tools.initRepeat, list, toolbox.individual) def evalSymbReg(individual): # Transform the tree expression in a callable function func = toolbox.compile(individual) # Evaluate the sum of squared difference between the expression # and the real function : x**4 + x**3 + x**2 + x values = (x/10. for x in range(-10, 10)) diff_func = lambda x: (func(x)-(x**4 + x**3 + x**2 + x))**2 diff = sum(map(diff_func, values)) return diff, toolbox.register('compile', gp.compileADF, psets=psets) toolbox.register('evaluate', evalSymbReg) toolbox.register('select', tools.selTournament, tournsize=3) toolbox.register('mate', gp.cxOnePoint) toolbox.register('expr', gp.genFull, min_=1, max_=2) toolbox.register('mutate', gp.mutUniform, expr=toolbox.expr) def main(): random.seed(1024) pop = toolbox.population(n=100) hof = tools.HallOfFame(1) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) logbook = tools.Logbook() logbook.header = "gen", "evals", "std", "min", "avg", "max" CXPB, MUTPB, NGEN = 0.5, 0.2, 40 # Evaluate the entire population for ind in pop: ind.fitness.values = toolbox.evaluate(ind) hof.update(pop) record = stats.compile(pop) logbook.record(gen=0, evals=len(pop), **record) print(logbook.stream) for g in range(1, NGEN): # Select the offspring offspring = toolbox.select(pop, len(pop)) # Clone the offspring offspring = [toolbox.clone(ind) for ind in offspring] # Apply crossover and mutation for ind1, ind2 in zip(offspring[::2], offspring[1::2]): for tree1, tree2 in zip(ind1, ind2): if random.random() < CXPB: toolbox.mate(tree1, tree2) del ind1.fitness.values del ind2.fitness.values for ind in offspring: for tree, pset in zip(ind, psets): if random.random() < MUTPB: toolbox.mutate(individual=tree, pset=pset) del ind.fitness.values # Evaluate the individuals with an invalid fitness invalids = [ind for ind in offspring if not ind.fitness.valid] for ind in invalids: ind.fitness.values = toolbox.evaluate(ind) # Replacement of the population by the offspring pop = offspring hof.update(pop) record = stats.compile(pop) logbook.record(gen=g, evals=len(invalids), **record) print(logbook.stream) print('Best individual : ', hof[0][0], hof[0].fitness) return pop, stats, hof if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.7322202 deap-1.4.1/examples/gp/ant/0000755000076500000240000000000014456461475014710 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/gp/ant/AntSimulatorFast.cpp0000644000076500000240000002641414456461441020654 0ustar00runnerstaff#include "AntSimulatorFast.hpp" AntSimulatorFast::AntSimulatorFast(unsigned int inMaxMoves) : mMaxMoves(inMaxMoves), mNbPiecesAvail(0), mRowStart(0), mColStart(0), mDirectionStart(AntSimulatorFast::eAntEast), mNbMovesAnt(0), mNbPiecesEaten(0), mRowAnt(0), mColAnt(0), mDirectionAnt(AntSimulatorFast::eAntEast) { } void AntSimulatorFast::parseMatrix(char* inFileStr){ std::fstream lFileHandle; lFileHandle.open(inFileStr, std::fstream::in); mOrigTrail.resize(ROWS_NBR); mExecTrail.resize(ROWS_NBR); for(unsigned int i = 0; i < ROWS_NBR; i++){ mOrigTrail[i].resize(COLS_NBR); mExecTrail[i].resize(COLS_NBR); } char lBuffer; for(unsigned int i=0; i> lBuffer; switch(lBuffer) { case eStart: { mOrigTrail[i][j] = eStart; mRowStart = i; mColStart = j; mExecTrail[i][j] = eStart; break; } case eEmpty: case eFoodPiece: { mOrigTrail[i][j] = lBuffer; mExecTrail[i][j] = lBuffer; break; } case ePassed: { mOrigTrail[i][j] = eEmpty; mExecTrail[i][j] = ePassed; break; } case eEatenPiece: { mOrigTrail[i][j] = eFoodPiece; mExecTrail[i][j] = eEatenPiece; break; } case eAntNorth: case eAntEast: case eAntSouth: case eAntWest: { mOrigTrail[i][j] = eEmpty; mExecTrail[i][j] = lBuffer; break; } default: { } } } } lFileHandle.close(); } void AntSimulatorFast::turnLeft(void){ if(mNbMovesAnt >= mMaxMoves) return; ++mNbMovesAnt; switch(mDirectionAnt) { case eAntNorth: { mDirectionAnt = eAntWest; break; } case eAntEast: { mDirectionAnt = eAntNorth; break; } case eAntSouth: { mDirectionAnt = eAntEast; break; } case eAntWest: { mDirectionAnt = eAntSouth; break; } default: { } } } void AntSimulatorFast::turnRight(void){ if(mNbMovesAnt >= mMaxMoves) return; ++mNbMovesAnt; switch(mDirectionAnt) { case eAntNorth: { mDirectionAnt = eAntEast; break; } case eAntEast: { mDirectionAnt = eAntSouth; break; } case eAntSouth: { mDirectionAnt = eAntWest; break; } case eAntWest: { mDirectionAnt = eAntNorth; break; } default: { } } } void AntSimulatorFast::moveForward(void){ if(mNbMovesAnt >= mMaxMoves) return; ++mNbMovesAnt; switch(mDirectionAnt) { case eAntNorth: { if(mRowAnt == 0) mRowAnt = (mExecTrail.size()-1); else --mRowAnt; break; } case eAntEast: { ++mColAnt; if(mColAnt >= mExecTrail.front().size()) mColAnt = 0; break; } case eAntSouth: { ++mRowAnt; if(mRowAnt >= mExecTrail.size()) mRowAnt = 0; break; } case eAntWest: { if(mColAnt == 0) mColAnt = (mExecTrail.front().size()-1); else --mColAnt; break; } default: { } } switch(mExecTrail[mRowAnt][mColAnt]) { case eStart: case ePassed: case eEatenPiece: break; case eEmpty: { mExecTrail[mRowAnt][mColAnt] = ePassed; break; } case eFoodPiece: { mExecTrail[mRowAnt][mColAnt] = eEatenPiece; ++mNbPiecesEaten; break; } default: { } } } void AntSimulatorFast::ifFoodAhead(PyObject* inIfTrue, PyObject* inIfFalse){ unsigned int lAheadRow = mRowAnt; unsigned int lAheadCol = mColAnt; switch(mDirectionAnt) { case eAntNorth: { if(lAheadRow == 0) lAheadRow = (mExecTrail.size()-1); else --lAheadRow; break; } case eAntEast: { ++lAheadCol; if(lAheadCol >= mExecTrail.front().size()) lAheadCol = 0; break; } case eAntSouth: { ++lAheadRow; if(lAheadRow >= mExecTrail.size()) lAheadRow = 0; break; } case eAntWest: { if(lAheadCol == 0) lAheadCol = (mExecTrail.front().size()-1); else --lAheadCol; break; } default: { } } PyObject_CallFunctionObjArgs((mExecTrail[lAheadRow][lAheadCol] == eFoodPiece) ? inIfTrue : inIfFalse, NULL); } void AntSimulatorFast::run(PyObject* inWrappedFunc){ this->reset(); while(mNbMovesAnt < mMaxMoves) PyObject_CallFunctionObjArgs(inWrappedFunc, NULL); } void AntSimulatorFast::reset(void){ mExecTrail = mOrigTrail; mNbMovesAnt = 0; mNbPiecesEaten = 0; mRowAnt = mRowStart; mColAnt = mColStart; mDirectionAnt = mDirectionStart; } /* * * Python wrappers * * */ typedef struct { PyObject_HEAD AntSimulatorFast *mInnerClass; } AntSimulatorWrapper; static int wrapAntSimulatorConstructor(AntSimulatorWrapper *self, PyObject *args, PyObject *kwargs){ int lMaxMoves; const char *keywords[] = {"max_moves", NULL}; if (!PyArg_ParseTupleAndKeywords(args, kwargs, (char *) "i", (char **) keywords, &lMaxMoves)) { return -1; } self->mInnerClass = new AntSimulatorFast(lMaxMoves); return 0; } static PyObject* wrapTurnLeft(AntSimulatorWrapper *self){ self->mInnerClass->turnLeft(); Py_INCREF(Py_None); return Py_None; } static PyObject* wrapTurnRight(AntSimulatorWrapper *self){ self->mInnerClass->turnRight(); Py_INCREF(Py_None); return Py_None; } static PyObject* wrapMoveForward(AntSimulatorWrapper *self){ self->mInnerClass->moveForward(); Py_INCREF(Py_None); return Py_None; } static PyObject* wrapIfFoodAhead(AntSimulatorWrapper *self, PyObject *args){ self->mInnerClass->ifFoodAhead(PyTuple_GET_ITEM(args, 0), PyTuple_GET_ITEM(args, 1)); Py_INCREF(Py_None); return Py_None; } static PyObject* wrapRun(AntSimulatorWrapper *self, PyObject *args){ PyObject* func = PyTuple_GetItem(args, 0); self->mInnerClass->run(func); Py_INCREF(Py_None); return Py_None; } static PyObject* wrapParseMatrix(AntSimulatorWrapper *self, PyObject *args){ self->mInnerClass->parseMatrix(PyString_AsString(PyFile_Name(PyTuple_GetItem(args, 0)))); Py_INCREF(Py_None); return Py_None; } static PyObject* wrapGetEaten(AntSimulatorWrapper *self, void *closure){ PyObject *py_retval; py_retval = Py_BuildValue((char *) "i", self->mInnerClass->mNbPiecesEaten); return py_retval; } // Getters and setters (here only for the 'eaten' attribute) static PyGetSetDef AntSimulatorWrapper_getsets[] = { { (char*) "eaten", /* attribute name */ (getter) wrapGetEaten, /* C function to get the attribute */ NULL, /* C function to set the attribute */ NULL, /* optional doc string */ NULL /* optional additional data for getter and setter */ }, { NULL, NULL, NULL, NULL, NULL } }; // Class method declarations static PyMethodDef AntSimulatorWrapper_methods[] = { {(char *) "turn_left", (PyCFunction) wrapTurnLeft, METH_NOARGS, NULL }, {(char *) "turn_right", (PyCFunction) wrapTurnRight, METH_NOARGS, NULL }, {(char *) "move_forward", (PyCFunction) wrapMoveForward, METH_NOARGS, NULL }, {(char *) "if_food_ahead", (PyCFunction) wrapIfFoodAhead, METH_VARARGS, NULL }, {(char *) "parse_matrix", (PyCFunction) wrapParseMatrix, METH_VARARGS, NULL }, {(char *) "run", (PyCFunction) wrapRun, METH_VARARGS, NULL }, {NULL, NULL, 0, NULL} }; static void AntSimulatorWrapperDealloc(AntSimulatorWrapper *self){ delete self->mInnerClass; self->ob_type->tp_free((PyObject*)self); } static PyObject* AntSimulatorWrapperRichcompare(AntSimulatorWrapper *self, AntSimulatorWrapper *other, int opid){ Py_INCREF(Py_NotImplemented); return Py_NotImplemented; } PyTypeObject AntSimulatorWrapper_Type = { PyObject_HEAD_INIT(NULL) 0, /* ob_size */ (char *) "AntC.AntSimulatorFast", /* tp_name */ sizeof(AntSimulatorWrapper), /* tp_basicsize */ 0, /* tp_itemsize */ /* methods */ (destructor)AntSimulatorWrapperDealloc, /* tp_dealloc */ (printfunc)0, /* tp_print */ (getattrfunc)NULL, /* tp_getattr */ (setattrfunc)NULL, /* tp_setattr */ (cmpfunc)NULL, /* tp_compare */ (reprfunc)NULL, /* tp_repr */ (PyNumberMethods*)NULL, /* tp_as_number */ (PySequenceMethods*)NULL, /* tp_as_sequence */ (PyMappingMethods*)NULL, /* tp_as_mapping */ (hashfunc)NULL, /* tp_hash */ (ternaryfunc)NULL, /* tp_call */ (reprfunc)NULL, /* tp_str */ (getattrofunc)NULL, /* tp_getattro */ (setattrofunc)NULL, /* tp_setattro */ (PyBufferProcs*)NULL, /* tp_as_buffer */ Py_TPFLAGS_DEFAULT, /* tp_flags */ NULL, /* Documentation string */ (traverseproc)NULL, /* tp_traverse */ (inquiry)NULL, /* tp_clear */ (richcmpfunc)AntSimulatorWrapperRichcompare, /* tp_richcompare */ 0, /* tp_weaklistoffset */ (getiterfunc)NULL, /* tp_iter */ (iternextfunc)NULL, /* tp_iternext */ (struct PyMethodDef*)AntSimulatorWrapper_methods, /* tp_methods */ (struct PyMemberDef*)0, /* tp_members */ AntSimulatorWrapper_getsets, /* tp_getset */ NULL, /* tp_base */ NULL, /* tp_dict */ (descrgetfunc)NULL, /* tp_descr_get */ (descrsetfunc)NULL, /* tp_descr_set */ 0, /* tp_dictoffset */ (initproc)wrapAntSimulatorConstructor, /* tp_init */ (allocfunc)PyType_GenericAlloc, /* tp_alloc */ (newfunc)PyType_GenericNew, /* tp_new */ (freefunc)0, /* tp_free */ (inquiry)NULL, /* tp_is_gc */ NULL, /* tp_bases */ NULL, /* tp_mro */ NULL, /* tp_cache */ NULL, /* tp_subclasses */ NULL, /* tp_weaklist */ (destructor) NULL /* tp_del */ }; PyObject* progn(PyObject *self, PyObject *args){ for(Py_ssize_t i = 0; i < PyTuple_Size(args); i++) PyObject_CallFunctionObjArgs(PyTuple_GET_ITEM(args, i), NULL); Py_INCREF(Py_None); return Py_None; } static PyMethodDef AntC_functions[] = { {"progn", progn, METH_VARARGS, "Boum"}, {NULL, NULL, 0, NULL} }; PyMODINIT_FUNC initAntC(void) { PyObject *m; m = Py_InitModule3((char *) "AntC", AntC_functions, NULL); if (m == NULL) { return; } /* Register the 'AntSimulatorWrapper' class */ if (PyType_Ready(&AntSimulatorWrapper_Type)) { return; } PyModule_AddObject(m, (char *) "AntSimulatorFast", (PyObject *) &AntSimulatorWrapper_Type); } ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/gp/ant/AntSimulatorFast.hpp0000644000076500000240000000331014456461441020647 0ustar00runnerstaff#include #if PY_MAJOR_VERSION >= 3 #define PY3K #endif #include #include #include #include #include #include #include #include #define ROWS_NBR 32 #define COLS_NBR 32 class AntSimulatorFast { public: enum State {eStart='S', eEmpty='.', ePassed='x', eFoodPiece='#', eEatenPiece='@', eAntNorth='^', eAntEast='}', eAntSouth='v', eAntWest='{'}; AntSimulatorFast(unsigned int inMaxMoves); void parseMatrix(char* inFileStr); void turnLeft(void); void turnRight(void); void moveForward(void); void ifFoodAhead(PyObject* inIfTrue, PyObject* inIfFalse); void run(PyObject* inWrappedFunc); unsigned int mNbPiecesEaten; //!< Number of food pieces eaten. private: void reset(void); std::vector< std::vector > mOrigTrail; //!< Initial trail set-up. unsigned int mMaxMoves; //!< Maximum number of moves allowed. unsigned int mNbPiecesAvail; //!< Number of food pieces available. unsigned int mRowStart; //!< Row at which the ant starts collecting food. unsigned int mColStart; //!< Column at which the ant starts collecting food. unsigned int mDirectionStart; //!< Direction at which the ant is looking when starting. std::vector< std::vector > mExecTrail; //!< Execution trail set-up. unsigned int mNbMovesAnt; //!< Number of moves done by the ant. unsigned int mRowAnt; //!< Row of the actual ant position. unsigned int mColAnt; //!< Column of the actual ant position. char mDirectionAnt; //!< Direction in which the ant is looking. }; ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/gp/ant/buildAntSimFast.py0000644000076500000240000000046414456461441020310 0ustar00runnerstafffrom distutils.core import setup, Extension module1 = Extension('AntC', sources = ['AntSimulatorFast.cpp']) setup (name = 'AntC', version = '1.0', description = 'Fast version of the Ant Simulator (aims to replace the AntSimulator class)', ext_modules = [module1]) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/gp/ant/santafe_trail.txt0000644000076500000240000000204114456461441020253 0ustar00runnerstaffS###............................ ...#............................ ...#.....................###.... ...#....................#....#.. ...#....................#....#.. ...####.#####........##......... ............#................#.. ............#.......#........... ............#.......#........#.. ............#.......#........... ....................#........... ............#................#.. ............#................... ............#.......#.....###... ............#.......#..#........ .................#.............. ................................ ............#...........#....... ............#...#..........#.... ............#...#............... ............#...#............... ............#...#.........#..... ............#..........#........ ............#................... ...##. .#####....#............... .#..............#............... .#..............#............... .#......#######................. .#.....#........................ .......#........................ ..####.......................... ................................ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/gp/ant.py0000644000076500000240000001532014456461441015254 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . """ This example is from "John R. Koza. Genetic Programming: On the Programming of Computers by Natural Selection. MIT Press, Cambridge, MA, USA, 1992.". The problem is called The Artificial Ant Problem. The goal of this example is to show how to use DEAP and its GP framework with with complex system of functions and object. Given an AntSimulator ant, this solution should get the 89 pieces of food within 543 moves. ant.routine = ant.if_food_ahead(ant.move_forward, prog3(ant.turn_left, prog2(ant.if_food_ahead(ant.move_forward, ant.turn_right), prog2(ant.turn_right, prog2(ant.turn_left, ant.turn_right))), prog2(ant.if_food_ahead(ant.move_forward, ant.turn_left), ant.move_forward))) Best solution found with DEAP: prog3(prog3(move_forward, turn_right, if_food_ahead(if_food_ahead(prog3(move_forward, move_forward, move_forward), prog2(turn_left, turn_right)), turn_left)), if_food_ahead(turn_left, turn_left), if_food_ahead(move_forward, turn_right)) fitness = (89,) """ import copy import random import numpy from functools import partial from deap import algorithms from deap import base from deap import creator from deap import tools from deap import gp def progn(*args): for arg in args: arg() def prog2(out1, out2): return partial(progn,out1,out2) def prog3(out1, out2, out3): return partial(progn,out1,out2,out3) def if_then_else(condition, out1, out2): out1() if condition() else out2() class AntSimulator(object): direction = ["north","east","south","west"] dir_row = [1, 0, -1, 0] dir_col = [0, 1, 0, -1] def __init__(self, max_moves): self.max_moves = max_moves self.moves = 0 self.eaten = 0 self.routine = None def _reset(self): self.row = self.row_start self.col = self.col_start self.dir = 1 self.moves = 0 self.eaten = 0 self.matrix_exc = copy.deepcopy(self.matrix) @property def position(self): return (self.row, self.col, self.direction[self.dir]) def turn_left(self): if self.moves < self.max_moves: self.moves += 1 self.dir = (self.dir - 1) % 4 def turn_right(self): if self.moves < self.max_moves: self.moves += 1 self.dir = (self.dir + 1) % 4 def move_forward(self): if self.moves < self.max_moves: self.moves += 1 self.row = (self.row + self.dir_row[self.dir]) % self.matrix_row self.col = (self.col + self.dir_col[self.dir]) % self.matrix_col if self.matrix_exc[self.row][self.col] == "food": self.eaten += 1 self.matrix_exc[self.row][self.col] = "passed" def sense_food(self): ahead_row = (self.row + self.dir_row[self.dir]) % self.matrix_row ahead_col = (self.col + self.dir_col[self.dir]) % self.matrix_col return self.matrix_exc[ahead_row][ahead_col] == "food" def if_food_ahead(self, out1, out2): return partial(if_then_else, self.sense_food, out1, out2) def run(self,routine): self._reset() while self.moves < self.max_moves: routine() def parse_matrix(self, matrix): self.matrix = list() for i, line in enumerate(matrix): self.matrix.append(list()) for j, col in enumerate(line): if col == "#": self.matrix[-1].append("food") elif col == ".": self.matrix[-1].append("empty") elif col == "S": self.matrix[-1].append("empty") self.row_start = self.row = i self.col_start = self.col = j self.dir = 1 self.matrix_row = len(self.matrix) self.matrix_col = len(self.matrix[0]) self.matrix_exc = copy.deepcopy(self.matrix) ant = AntSimulator(600) pset = gp.PrimitiveSet("MAIN", 0) pset.addPrimitive(ant.if_food_ahead, 2) pset.addPrimitive(prog2, 2) pset.addPrimitive(prog3, 3) pset.addTerminal(ant.move_forward) pset.addTerminal(ant.turn_left) pset.addTerminal(ant.turn_right) creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", gp.PrimitiveTree, fitness=creator.FitnessMax) toolbox = base.Toolbox() # Attribute generator toolbox.register("expr_init", gp.genFull, pset=pset, min_=1, max_=2) # Structure initializers toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.expr_init) toolbox.register("population", tools.initRepeat, list, toolbox.individual) def evalArtificialAnt(individual): # Transform the tree expression to functional Python code routine = gp.compile(individual, pset) # Run the generated routine ant.run(routine) return ant.eaten, toolbox.register("evaluate", evalArtificialAnt) toolbox.register("select", tools.selTournament, tournsize=7) toolbox.register("mate", gp.cxOnePoint) toolbox.register("expr_mut", gp.genFull, min_=0, max_=2) toolbox.register("mutate", gp.mutUniform, expr=toolbox.expr_mut, pset=pset) def main(): random.seed(69) with open("ant/santafe_trail.txt") as trail_file: ant.parse_matrix(trail_file) pop = toolbox.population(n=300) hof = tools.HallOfFame(1) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) algorithms.eaSimple(pop, toolbox, 0.5, 0.2, 40, stats, halloffame=hof) return pop, hof, stats if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/gp/multiplexer.py0000644000076500000240000000633714456461441017054 0ustar00runnerstaff# This file is part of EAP. # # EAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # EAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with EAP. If not, see . import random import operator import numpy from deap import algorithms from deap import base from deap import creator from deap import tools from deap import gp def if_then_else(condition, out1, out2): return out1 if condition else out2 # Initialize Multiplexer problem input and output vectors MUX_SELECT_LINES = 3 MUX_IN_LINES = 2 ** MUX_SELECT_LINES MUX_TOTAL_LINES = MUX_SELECT_LINES + MUX_IN_LINES # input : [A0 A1 A2 D0 D1 D2 D3 D4 D5 D6 D7] for a 8-3 mux inputs = [[0] * MUX_TOTAL_LINES for i in range(2 ** MUX_TOTAL_LINES)] outputs = [None] * (2 ** MUX_TOTAL_LINES) for i in range(2 ** MUX_TOTAL_LINES): value = i divisor = 2 ** MUX_TOTAL_LINES # Fill the input bits for j in range(MUX_TOTAL_LINES): divisor /= 2 if value >= divisor: inputs[i][j] = 1 value -= divisor # Determine the corresponding output indexOutput = MUX_SELECT_LINES for j, k in enumerate(inputs[i][:MUX_SELECT_LINES]): indexOutput += k * 2**j outputs[i] = inputs[i][indexOutput] pset = gp.PrimitiveSet("MAIN", MUX_TOTAL_LINES, "IN") pset.addPrimitive(operator.and_, 2) pset.addPrimitive(operator.or_, 2) pset.addPrimitive(operator.not_, 1) pset.addPrimitive(if_then_else, 3) pset.addTerminal(1) pset.addTerminal(0) creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", gp.PrimitiveTree, fitness=creator.FitnessMax) toolbox = base.Toolbox() toolbox.register("expr", gp.genFull, pset=pset, min_=2, max_=4) toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.expr) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("compile", gp.compile, pset=pset) def evalMultiplexer(individual): func = toolbox.compile(expr=individual) return sum(func(*in_) == out for in_, out in zip(inputs, outputs)), toolbox.register("evaluate", evalMultiplexer) toolbox.register("select", tools.selTournament, tournsize=7) toolbox.register("mate", gp.cxOnePoint) toolbox.register("expr_mut", gp.genGrow, min_=0, max_=2) toolbox.register("mutate", gp.mutUniform, expr=toolbox.expr_mut, pset=pset) def main(): # random.seed(10) pop = toolbox.population(n=40) hof = tools.HallOfFame(1) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) algorithms.eaSimple(pop, toolbox, 0.8, 0.1, 40, stats, halloffame=hof) return pop, stats, hof if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/gp/parity.py0000644000076500000240000000560514456461441016007 0ustar00runnerstaff# This file is part of EAP. # # EAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # EAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with EAP. If not, see . import random import operator import numpy from deap import algorithms from deap import base from deap import creator from deap import tools from deap import gp # Initialize Parity problem input and output matrices PARITY_FANIN_M = 6 PARITY_SIZE_M = 2**PARITY_FANIN_M inputs = [None] * PARITY_SIZE_M outputs = [None] * PARITY_SIZE_M for i in range(PARITY_SIZE_M): inputs[i] = [None] * PARITY_FANIN_M value = i dividor = PARITY_SIZE_M parity = 1 for j in range(PARITY_FANIN_M): dividor /= 2 if value >= dividor: inputs[i][j] = 1 parity = int(not parity) value -= dividor else: inputs[i][j] = 0 outputs[i] = parity pset = gp.PrimitiveSet("MAIN", PARITY_FANIN_M, "IN") pset.addPrimitive(operator.and_, 2) pset.addPrimitive(operator.or_, 2) pset.addPrimitive(operator.xor, 2) pset.addPrimitive(operator.not_, 1) pset.addTerminal(1) pset.addTerminal(0) creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", gp.PrimitiveTree, fitness=creator.FitnessMax) toolbox = base.Toolbox() toolbox.register("expr", gp.genFull, pset=pset, min_=3, max_=5) toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.expr) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("compile", gp.compile, pset=pset) def evalParity(individual): func = toolbox.compile(expr=individual) return sum(func(*in_) == out for in_, out in zip(inputs, outputs)), toolbox.register("evaluate", evalParity) toolbox.register("select", tools.selTournament, tournsize=3) toolbox.register("mate", gp.cxOnePoint) toolbox.register("expr_mut", gp.genGrow, min_=0, max_=2) toolbox.register("mutate", gp.mutUniform, expr=toolbox.expr_mut, pset=pset) def main(): random.seed(21) pop = toolbox.population(n=300) hof = tools.HallOfFame(1) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) algorithms.eaSimple(pop, toolbox, 0.5, 0.2, 40, stats, halloffame=hof) return pop, stats, hof if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/gp/spambase.csv0000644000076500000240000253473614456461441016453 0ustar00runnerstaff0,0.64,0.64,0,0.32,0,0,0,0,0,0,0.64,0,0,0,0.32,0,1.29,1.93,0,0.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.778,0,0,3.756,61,278,1 0.21,0.28,0.5,0,0.14,0.28,0.21,0.07,0,0.94,0.21,0.79,0.65,0.21,0.14,0.14,0.07,0.28,3.47,0,1.59,0,0.43,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0.07,0,0,0,0,0,0,0,0,0,0,0,0,0.132,0,0.372,0.18,0.048,5.114,101,1028,1 0.06,0,0.71,0,1.23,0.19,0.19,0.12,0.64,0.25,0.38,0.45,0.12,0,1.75,0.06,0.06,1.03,1.36,0.32,0.51,0,1.16,0.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0,0.12,0,0.06,0.06,0,0,0.01,0.143,0,0.276,0.184,0.01,9.821,485,2259,1 0,0,0,0,0.63,0,0.31,0.63,0.31,0.63,0.31,0.31,0.31,0,0,0.31,0,0,3.18,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.137,0,0.137,0,0,3.537,40,191,1 0,0,0,0,0.63,0,0.31,0.63,0.31,0.63,0.31,0.31,0.31,0,0,0.31,0,0,3.18,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.135,0,0.135,0,0,3.537,40,191,1 0,0,0,0,1.85,0,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.223,0,0,0,0,3,15,54,1 0,0,0,0,1.92,0,0,0,0,0.64,0.96,1.28,0,0,0,0.96,0,0.32,3.85,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.054,0,0.164,0.054,0,1.671,4,112,1 0,0,0,0,1.88,0,0,1.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.206,0,0,0,0,2.45,11,49,1 0.15,0,0.46,0,0.61,0,0.3,0,0.92,0.76,0.76,0.92,0,0,0,0,0,0.15,1.23,3.53,2,0,0,0.15,0,0,0,0,0,0,0,0,0.15,0,0,0,0,0,0,0,0,0,0.3,0,0,0,0,0,0,0.271,0,0.181,0.203,0.022,9.744,445,1257,1 0.06,0.12,0.77,0,0.19,0.32,0.38,0,0.06,0,0,0.64,0.25,0,0.12,0,0,0.12,1.67,0.06,0.71,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0,0,0,0.04,0.03,0,0.244,0.081,0,1.729,43,749,1 0,0,0,0,0,0,0.96,0,0,1.92,0.96,0,0,0,0,0,0,0.96,3.84,0,0.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.96,0,0,0,0,0,0,0,0,0,0,0,0.462,0,0,1.312,6,21,1 0,0,0.25,0,0.38,0.25,0.25,0,0,0,0.12,0.12,0.12,0,0,0,0,0,1.16,0,0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.022,0.044,0,0.663,0,0,1.243,11,184,1 0,0.69,0.34,0,0.34,0,0,0,0,0,0,0.69,0,0,0,0.34,0,1.39,2.09,0,1.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.056,0,0.786,0,0,3.728,61,261,1 0,0,0,0,0.9,0,0.9,0,0,0.9,0.9,0,0.9,0,0,0,0,0,2.72,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.083,7,25,1 0,0,1.42,0,0.71,0.35,0,0.35,0,0.71,0,0.35,0,0,0,5.35,0,0,3.21,0,2.85,0,0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.102,0,0.357,0,0,1.971,24,205,1 0,0.42,0.42,0,1.27,0,0.42,0,0,1.27,0,0,0,0,0,1.27,0,0,1.7,0.42,1.27,0,0,0.42,0,0,0,0,0,0,0,0,0,0,0,0,1.27,0,0,0.42,0,0,0,0,0,0,0,0,0,0.063,0,0.572,0.063,0,5.659,55,249,1 0,0,0,0,0.94,0,0,0,0,0,0,0,0,0,0,0,0,0,1.88,0,2.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.428,0,0,4.652,31,107,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.11,0,0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.975,0.37,0,35.461,95,461,1 0,0,0.55,0,1.11,0,0.18,0,0,0,0,0,0.92,0,0.18,0,0.37,0.37,3.15,0,0.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.182,0,0.455,0,0,1.32,4,70,1 0,0.63,0,0,1.59,0.31,0,0,0.31,0,0,0.63,0,0,1.27,0.63,0.31,3.18,2.22,0,1.91,0,0.31,0.63,0,0,0,0.31,0,0,0,0,0,0,0,0,0,0,0,1.59,0,0,0,0,0,0,0,0,0,0.275,0,0.055,0.496,0,3.509,91,186,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.729,0,0.729,0,0,3.833,9,23,1 0.05,0.07,0.1,0,0.76,0.05,0.15,0.02,0.55,0,0.1,0.47,0.02,0,0,0,0.02,0.13,2.09,0.1,1.57,0,0.05,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0.1,0,0,0,0,0,0,0,0,0,0,0,0.042,0.101,0.016,0.25,0.046,0.059,2.569,66,2259,1 0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.404,0.404,0,0.809,0,0,4.857,12,34,1 0,0,0,0,1.16,0,0,0,0,0,0,0.58,0,0,0,1.16,0,1.16,1.16,0,1.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.133,0,0.667,0,0,1.131,5,69,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.44,0,0,0,0,0.196,0,0.392,0.196,0,5.466,22,82,1 0.05,0.07,0.1,0,0.76,0.05,0.15,0.02,0.55,0,0.1,0.47,0.02,0,0,0,0.02,0.13,2.09,0.1,1.57,0,0.05,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0.1,0,0,0,0,0,0,0,0,0,0,0,0.042,0.101,0.016,0.25,0.046,0.059,2.565,66,2258,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.44,0,0,0,0,0.196,0,0.392,0.196,0,5.466,22,82,1 0,0,0,0,0,0,1.66,0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.368,0,0,2.611,12,47,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.352,0,0.352,0,0,4,11,36,1 0,0,0,0,0.65,0,0.65,0,0,0,0.65,0.65,0,0,0,0.65,1.3,0,1.3,5.22,1.3,0,0,0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.459,0,0.091,0,0,2.687,66,129,1 1.17,0,0,0,0,0,0,0,0,0,0,1.17,0,0,0,0,1.17,0,3.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.17,0,0,0,0,0,0,0.886,0,0,1.966,10,59,1 0,0,3.03,0,0,0,0,0,0,0,0,0,0,0,0,3.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.352,0,2.112,0,0,3.909,11,43,1 0,0,0,0,1.89,0.27,0,0,0,0,0,0.81,0,0,0,0.27,0,0,3.51,0,2.7,0,0,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.045,0,0,0.091,0,1.39,11,89,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.83,4.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.302,0,1.7,5,17,1 0,0.68,0,0,0,0,0,0,0,0.68,1.36,0,0,0,0,0,0,0,2.04,0,0.68,0,0,0,0.68,0,0,0.68,0,0,1.36,0,0,0,0.68,0,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0.185,0,0,0,3.826,30,264,1 0,0,2.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.308,0,1.543,0,0,2.777,6,25,1 0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.436,0.436,0,0.873,0,0,4.142,12,29,1 0,0,0.48,0,1.46,0,0.48,0,0,0,0,0.97,0,0,0,0.48,0.97,0,2.43,0,2.43,0,0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0.081,0,0.488,0.244,0,5.431,78,239,1 0,0.48,0.48,0,0.48,0,0,0,0,0,0,0.97,0,0,0,0.48,0,0.97,1.46,0,0.97,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0.963,0,0,3.1,61,186,1 0,0.41,1.66,0,0.41,0,0,0,0,0,0,0.41,0,0,0,0.41,0,0.83,2.08,0,1.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.41,0,0,0,0,0.068,0,0.75,0,0,3.851,121,285,1 0.3,0,0,0,0.61,0.92,0,2.45,0,0,0,0.3,1.53,0,0,0,0,0.3,2.76,0,0.61,0,0.3,0.61,0,0,0,0,0,0,0,0,0,0,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0,0,0.051,0,0.207,0.207,0,2.132,30,226,1 0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.335,0.335,0,0.671,0,0,4,12,28,1 0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.355,0.355,0,0.711,0,0,4,12,28,1 0,0,0.55,0,1.11,0,0.18,0,0,0,0,0.18,0.92,0,0.18,0,0.37,0.37,3.15,0,0.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.182,0,0.426,0,0,1.283,4,68,1 0,0,0,0,0.52,0,0.26,0.52,0,0.26,0.26,0.52,0,0,0,0.26,1.56,0.26,1.82,2.08,0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.073,0,0.813,0.036,0.147,2.145,38,339,1 0.15,0.45,1.05,0,0.45,0,0,1.81,0.6,0.75,0,0.9,0.3,0,0.3,0,0,0,4.07,0,1.51,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.15,0,0,0,0,0.25,0,1.318,0.068,0,5.301,130,774,1 0.18,0,0.18,0,1.57,0.36,0.06,0.06,0.06,0.12,0.06,0.54,0.3,0.06,0,0,0.72,0.06,4.54,0.24,1.09,0,0.84,0.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0.06,0,0,0,0.01,0.052,0,0.01,0.167,0,1.733,12,442,1 0.49,0,0.99,0,0,0.99,0,0,0,0.99,0.99,2.48,0.49,0,0,4.97,0.99,0,3.48,0,1.99,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0.17,0,0,1.468,8,94,1 0.46,0.3,0.46,0,0.05,0.12,0.05,0.28,0.43,0.74,0.25,0.97,0.56,1.23,0,0.25,0.43,0.02,3.22,0,1.46,0,1.05,0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0,0,0,0,0.065,0,0.325,0.756,0.153,5.891,193,3040,1 0.46,0.46,0.26,0,0,0.33,0.06,0.33,0,1.12,0.39,0.73,0.79,0,0.26,0.26,0,0.26,3.51,0,0.66,0,0.19,0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.036,0.084,0,0.278,0.23,0.084,3.887,40,898,1 0,1.92,0,0,1.92,0,0,0,0,0,0,1.92,0,0,0,0,0,0,1.92,0,3.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.75,12,33,1 0.73,0.36,1.09,0,0,0.73,0.73,1.09,0.36,0.36,0,0.36,0,0,0,1.09,0.36,0.36,2.19,2.19,2.19,0,1.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0.498,0.332,0,3.254,30,179,1 0.06,0.12,0.77,0,0.19,0.32,0.38,0,0.06,0,0,0.64,0.25,0,0.12,0,0,0.12,1.67,0.06,0.7,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0,0,0,0.04,0.03,0,0.244,0.071,0,1.732,43,750,1 0,1.26,0,0,0,1.26,0,0,0,0,0,1.26,0,0,0,0,0,0,0,0,1.26,0,0,1.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.26,0,0,0,0,0.198,0,0.198,0.596,0,3.833,17,69,1 0.73,0.36,0.73,0,0,0.73,0.73,1.1,0.36,0.36,0,0.36,0,0,0,1.1,0.36,0.36,2.2,2.2,2.2,0,1.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.111,0,0.5,0.333,0,3.259,30,176,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.08,0,0,1.08,0,2.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.478,0,0,2,30,106,1 0,0,0,0,0,0,1.04,0,0,0,0,1.04,0,0,0,0,1.04,0,3.66,0,2.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.061,0.246,0,0.615,0.061,0.061,3.318,59,146,1 0,0,1.26,0,0,0,0,0,0,0,0,2.53,0,0,0,0,0,0,2.53,0,5.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.642,8,51,1 0,0.45,0.45,0,0.45,0,0,0,0,0,0,0.45,0,0,0,0.45,0,0.91,1.36,0,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.501,0,0,2.777,61,200,1 0,0.42,1.68,0,0.42,0,0,0,0,0,0,0.42,0,0,0,0.42,0,0.84,2.1,0,1.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.42,0,0,0,0,0.066,0,0.669,0,0,3.837,121,284,1 0,0.59,0,0,0,0,0.59,0,0,0.59,0,0.59,0,0,0,0,0,1.18,1.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.257,0,0,0,0,8.586,66,249,1 0.23,0,0.47,0,0.23,0,0,0,0,0,0,0,0,0.23,0,0.23,0.23,0,7.1,0,1.89,0,0,0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.71,0,0,0,0.043,0.043,0,0.175,0,0,1.294,11,66,1 0,0,0.46,0,1.39,0,0.93,0.93,0,0,0.46,0.93,0,0,0,1.39,0,0.46,0.93,0,0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.46,0,0,0,0,0,0,0,0,0,0,0.069,0,0,0,0,0.069,1.442,8,75,1 0,0.34,0,0,0.68,0,0.68,0,0,0.34,0.34,0,0,0,0,0.34,0,1.36,3.42,0,2.73,0,0,0,0.34,0.34,0,0,0,0,0,0,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.048,0.048,0,1.411,15,96,1 0.12,0.24,0.12,0,1.32,0.36,0,0.36,0,0,0.36,0.72,0,0,0,0,0,0,4.1,0,3.01,0,0.12,0,0,0,0,0,0,0,0,0,0.12,0,0,0,0.12,0,0,0.12,0,0,0,0,0,0,0,0,0,0.059,0,0.019,0.019,0,1.714,34,180,1 0.66,0,0.66,0,0,0,0,0,0,0.66,0,0,0,0,0,1.98,1.32,0,1.32,0,1.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.205,0,0,3.184,34,121,1 0,0.48,0.48,0,1.46,0,0.48,0,0,0.97,0.48,0,0,0,0,0.48,0,0,0.97,0.48,1.95,0,0,0.48,0,0,0,0,0,0,0,0,0,0,0,0,1.46,0,0,0.48,0,0,0,0,0,0,0,0,0,0.073,0,0.589,0.294,0,4.85,47,194,1 0,0,0,0,0,0,1.47,0,0,1.47,0,1.47,0,0,0,0,0,0,5.88,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.5,0,0,1.214,3,17,1 0.3,0,0.61,0,0,0,0,0,0,0.92,0.3,0.92,0.3,0.3,0,2.15,0.61,0,5.53,0,1.23,0,0,0.3,0,0,0,0,0,0,0,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0.3,0,0,0,0,0.1,0,1.053,0.351,0.25,3.884,66,303,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.96,0,0,0,0,0,0,0,0,0,0,0,0,0,1.96,0,0,0,0,0,0,0,0,0,0,0,0,0.201,0,0,0.1,0,4.548,59,141,1 0,0,0,0,1.26,0,2.53,1.26,1.26,1.26,1.26,1.26,0,0,0,0,5.06,0,2.53,1.26,3.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.537,0,0,72.5,287,290,1 0,0.53,0.53,0,0.53,0,0,0,0,0,0,0.53,0,0,0,0.53,0,1.06,1.6,0,1.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.087,0,0.877,0,0,3.4,61,187,1 0,0.44,0.89,0,0.44,0,0,0,0,0,0,0.44,0,0,0,0.44,0,0.89,2.24,0,1.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0,0,0,0,0.073,0,0.807,0,0,3.849,121,281,1 0,0.46,0.46,0,0.46,0.46,0.46,0,0,0,0.46,0.46,0,0,0,0.92,0,0.92,2.76,0,1.38,0,0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.46,0,0,0,0,0.46,0,0,0,0,0,0,0.298,0.223,0,2.156,13,110,1 0,0,0.48,0,1.44,0,0.48,0,0,0,0,0.96,0,0,0,0.48,0.96,0,2.41,0,2.41,0,0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0.081,0,0.486,0.243,0,5.13,78,236,1 0,0.94,0.94,0,0,0,0,0,0,0.94,0,0,0,0,0,2.83,0,0,0.94,0,0.94,0,1.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.366,0,0,26.5,245,318,1 0,0,1.77,0,0,0,0,0,0,0,0,0.59,0,0,0,0,0,0.59,4.14,0,1.18,0,0,0,0,0,0,0,0,0,0,0,0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.165,0,0.165,0.082,0,2.325,16,100,1 0.75,0.18,0.37,0,0.18,0.12,0,0.25,0.75,0.31,0.25,1.51,0.31,0.37,0,0.37,0.44,0.12,2.96,0.69,1.26,0,0.44,0.75,0,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0,0.12,0,0,0.06,0,0,0,0,0,0,0.085,0.053,0.437,0.234,0.064,3.675,45,1066,1 0,0.41,0.2,0,1.67,0.2,0.2,0,0,1.04,0.2,0,0.2,0,0,0.83,0.2,0,2.09,0,0.62,0,0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.41,0.62,0,0.2,0,0,0,0.132,0,0,1.65,15,175,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.26,5.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.338,0,1.666,5,10,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.55,0,1.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.431,0,0,4.071,29,114,1 0,0,0.23,0,0,0,0.23,0,0,0.95,0,0.47,0,0.23,0,0.23,0.95,0,2.38,0,1.9,0,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0.23,0.23,0,0,0,0,0,0,0,0,0,0,0,0,0.123,0,0.197,0,0.024,5.038,280,519,1 0,0.72,0.72,0,0,0,0,1.45,0,0,0.72,0,0,0,0,2.91,0,0.72,1.45,0,0,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.123,0,0.495,0,0,1.525,8,61,1 0,0,1.28,0,1.28,1.28,0,0,0,0,0,0,0,0,0,1.28,0,0,2.56,0,1.28,0,1.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.181,0,0.724,0,0,3.071,9,43,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.278,0,0.834,0,0,5.13,27,118,1 0,0.46,0.46,0,1.4,0,0.46,1.86,0,0.93,0.46,0,0,0,0,1.86,0,0,0.93,0.46,1.4,0,0,0.46,0,0,0,0,0,0,0,0,0,0,0,0,1.4,0,0,0.46,0,0,0,0,0,0,0,0,0,0.071,0,0.571,0.214,0,4.63,64,213,1 0,0,0.38,0,1.15,0.76,0,0,0,0,0,0.38,0.38,0,0,0.38,0,0.38,2.69,0,2.3,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.122,0,0.061,0.061,0,1.775,20,158,1 0,0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0.79,1.58,1.58,3.96,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.79,0,0,0,0,0.268,0,0.268,0,0,2.815,26,107,1 0.06,0.06,0.47,0,0.4,0,0,0,0.67,0.06,0,0.33,0.13,0,0,0.2,0,0,1.14,0.13,1.21,0,0,0.06,0,0,0,0,0.06,0,0,0,0,0,0,0,0,0,0,0,0,0.13,0,0,0.06,0,0,0,0.021,0.107,0,0.096,0.085,0.01,3.353,144,845,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.153,0,0,0,0,3.8,23,38,1 0,0.56,1.12,0,2.24,0,1.12,0,0,0,0,0.56,0.56,0,0,0.56,2.8,0,3.93,0,1.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.25,0,1.083,0.333,0,4.974,140,194,1 0.47,0.31,0.47,0,0.05,0.13,0.05,0.26,0.44,0.76,0.26,0.97,0.58,1.26,0,0.26,0.44,0,3.25,0,1.5,0,1.05,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0,0,0,0.004,0.066,0,0.322,0.764,0.159,6.1,193,3038,1 0.59,0.44,0.29,0,0.14,0.03,0.03,0.14,0.56,0.67,0.29,0.67,0.59,1.23,0.03,0.22,0.44,0.07,3.43,0,1.53,0,0.59,0.63,0,0,0,0,0,0,0,0,0,0,0.03,0,0,0,0,0,0,0,0,0,0.07,0,0,0,0,0.075,0,0.613,0.532,0.137,7.3,763,2453,1 0.59,0.44,0.29,0,0.14,0.03,0.03,0.14,0.56,0.67,0.29,0.67,0.59,1.23,0.03,0.22,0.44,0.07,3.43,0,1.53,0,0.59,0.63,0,0,0,0,0,0,0,0,0,0,0.03,0,0,0,0,0,0,0,0,0,0.07,0,0,0,0,0.075,0,0.612,0.531,0.137,7.3,763,2453,1 0.46,0,0.46,0,0,0,0,0.46,0,0,0,1.38,0,0,2.31,0,0.46,0.46,2.77,0,2.31,0,1.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.46,0,0,0,0,0,0,0,0,0,0.494,0,0.082,0.823,0,3.4,12,102,1 0,0,0.46,0,0,0,0.46,0,0,0,0.46,0,0,0,0,0,0,1.4,1.87,0,0,0.93,0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.46,0,0,0,0,0,0,0,0,0,2.676,32,91,1 0,0.35,0.7,0,0.35,0,0,0,0,0,0,0.7,0,0,0,1.05,0,0.7,2.11,0,1.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.35,0,0,0,0,0.122,0,1.284,0,0,3.639,61,313,1 0,0.43,0.43,0,0.43,0,0,0,0,0,0,0.43,0,0,0,0.43,0,0.86,1.29,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.147,0,0.736,0,0,2.81,61,222,1 0,0,0,0,0,0.6,0,0,0,1.21,0,0,0,0,0,0.6,0,0,1.21,0,0,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.207,0.518,0.414,0.31,0,0,4.897,17,191,1 1.24,0.41,1.24,0,0,0,0,0,0,0,0,0.41,0,0,0,0.41,0,0.82,3.73,0,1.24,0,0,0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.41,0,0,0,0,0.065,0,0.461,0.527,0,3.166,19,114,1 0,0,0,0,4.25,0,0.7,0,0,0,0,0,0,0,0,2.83,0,0,4.96,0,1.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.38,0,0,1.153,3,30,1 0,0,0.64,0,0,0.64,0,0,0,0,0,0,0,0,0,0.64,0,0,2.59,0,0,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0.094,0.189,0.284,0.662,0,0,10.068,131,292,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0.305,0.611,0,1.529,0,0,5.5,22,66,1 0,0,0.64,0,0,0.64,0,0,0,0,0,0,0,0,0,0.64,0,0,2.59,0,0,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0.094,0.189,0.284,0.662,0,0,10.068,131,292,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0.305,0.611,0,1.529,0,0,5.5,22,66,1 0,0,0.64,0,0,0.64,0,0,0,0,0,0,0,0,0,0.64,0,0,2.59,0,0,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0.094,0.189,0.284,0.662,0,0,10.068,131,292,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0.305,0.611,0,1.529,0,0,5.5,22,66,1 0,0,0,0,0,0.79,0,0,0,0,0,0,0,0,0,0.79,0,0,1.58,0,0,0,0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0.79,0,0,0,0,0,0,0,0,0,0,0,0.115,0.231,0.347,0.462,0,0,5.793,22,168,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0.305,0.611,0,1.529,0,0,5.5,22,66,1 0,0,0,0,0,0,1.96,0,0,1.96,0,1.96,0,0,0,0,0,0,3.92,0,1.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.166,60,74,1 0,0,0,0,0,0,2.46,0,0,0,0,0,0,0,0,2.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.907,0,0,1.285,7,36,1 0,0,0,0,0,0.79,0,0,0,0,0,0,0,0,0,0,0,0,2.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.147,0,0,0,0,2.913,27,67,1 0,0,0.76,0,0.38,0,0.76,0,0,0,0,0.38,0,0,0,0,0,0.76,1.52,0,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.177,0.059,3.836,79,211,1 0,0,0,0,0.95,0,0,0,0,0,0,0,0,0,0,0,0,0.47,0.95,0,0.95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.365,0,0,0,0,1.238,6,78,1 0.12,1.76,0.63,0,0.88,0,0.12,0.5,0.25,3.9,0.5,0.88,0.12,0,0,0.25,0.12,0,2.9,0.25,1.38,0,1.13,0.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0,0,0,0,0.019,0.379,0.159,0,0.119,0,4.155,38,507,1 0,0,1.02,0,0.51,0,0,0,0,0,0,0,0,0,0,0.51,0,0,1.53,0,1.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.51,0,0,0,0,0.09,0,0.542,0,0,1.972,19,146,1 0.05,0.3,0.4,0,0.1,0.05,0,0.05,0.1,0,0,0.3,0.2,0,0.05,0,0,0.5,1.55,0.3,0.75,0,0.15,0.2,0.05,0.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.05,0.05,0,0,0,0,0.045,0,0.054,0.118,0,2.37,96,588,1 0.05,0.3,0.4,0,0.1,0.05,0,0.05,0.1,0,0,0.3,0.2,0,0.05,0,0,0.5,1.55,0.3,0.75,0,0.15,0.2,0.05,0.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.05,0.05,0,0,0,0,0.036,0,0.054,0.118,0,2.379,96,583,1 0,0,0,0,1.28,0,2.56,1.28,1.28,1.28,1.28,1.28,0,0,0,0,5.12,0,2.56,1.28,5.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.542,0,0,102.666,304,308,1 0,0.55,0.55,0,2.23,0,1.11,0,0,0,0,0.55,0.55,0,0,0.55,2.79,0,3.91,0,1.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.248,0,1.158,0.331,0,4.875,140,195,1 0.05,0.3,0.4,0,0.1,0.05,0,0.05,0.1,0,0,0.3,0.2,0,0.05,0,0,0.5,1.55,0.3,0.75,0,0.15,0.2,0.05,0.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.05,0.05,0,0,0,0,0.045,0,0.054,0.118,0,2.37,96,588,1 0.05,0.3,0.4,0,0.1,0.05,0,0.05,0.1,0,0,0.3,0.2,0,0.05,0,0,0.5,1.55,0.3,0.75,0,0.15,0.2,0.05,0.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.05,0.05,0,0,0,0,0.036,0,0.054,0.118,0,2.379,96,583,1 0.5,0.46,0.34,0,0.15,0.03,0,0.19,0.57,0.65,0.3,0.73,0.65,1.27,0.03,0.23,0.42,0,3.08,0,1.34,0,0.5,0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0,0,0,0.011,0.077,0,0.335,1.281,0.125,7.202,595,2413,1 0,0.32,0.8,0,0.32,0,0.16,0,0,0.48,0.16,0,0.16,0,0.16,0.16,0,0.8,0.16,0.16,0.64,0,0,0,0,0,0,0.16,0,0,0,0,0.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.271,0.024,0.049,5.709,149,982,1 0,0,0,0,0.92,0,0.3,0,0,0,0,0,0,0,0,0,0,0,0.3,0,0.61,0,0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.046,0,0,0.092,0.322,0,2.074,49,278,1 0.16,0,0.67,0,0.33,0.16,0.33,0.84,0.16,0.5,0.33,1.51,0,0,0,0,1.68,0.33,2.18,1.68,3.69,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0,0,0,0,0,0,0.19,0,1.194,0.054,0,5.567,101,657,1 0.5,0,0.5,0,1.51,0,0,0,0,0,0.5,1.01,0,0,0,0,0,0,4.04,0,3.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0.5,0,0,0,0,0,0,0,0,0,1.01,0,0,0,0,0.089,0,0.089,0.178,0,3.416,53,164,1 0,0,0,0,0,0,0.59,0,0,0,0,1.19,0,0,0,0,0,0.59,4.76,0,1.19,0,0,0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.427,0,0,10,33,170,1 0,0,0,0,1.6,0,0.4,1.2,0,0.4,0,0.8,0,0,0,0,1.6,0.4,4,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.706,0.212,0,1.838,13,114,1 0.41,0,0.41,0,0,0.41,0,0,0,0,0,2.07,0,0,0,0.41,0,0,7.05,0,2.48,0,0.82,0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.97,0.149,0,32.142,335,450,1 0,0,0.38,0,0.76,0,0.38,0,0,1.14,0,0,0,0,0,0.38,0.76,0,3.04,0,1.52,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.14,0,0,0,0,0.299,0,0.598,0.179,0,4.523,78,285,1 0,0,0,0,0.4,0.4,0.4,0.4,0,0,0.4,0,0,0,0,0.4,0,0,4,0,2,0,0,0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.121,0,0,1.979,12,95,1 0,0,1.12,0,0.56,0,0,0,0,0.56,0,0,0,0,0,0.56,0,0,2.25,0,1.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.56,0,0,0,0,0.101,0,0.606,0,0,2.36,19,144,1 0,0,0.8,0,1.44,0.16,0.16,0,0,0,0,0.64,0.8,0,0,0,0.16,0.16,1.6,0,0.47,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.024,0,0.299,0.174,0,1.891,24,174,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.26,0,0,0,0,0.215,0,0.215,0.431,0,4,25,76,1 0,0.39,0.39,0,0.19,0,0,0.19,0,0,0.39,0.39,0,0,0,0.98,0.19,0.39,0.59,0,0.78,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0,0,0,0,0,0,0.128,0,0.16,0.16,0,2.128,31,730,1 0,0.39,0.39,0,0.19,0,0,0.19,0,0,0.39,0.39,0,0,0,0.98,0.19,0.39,0.59,0,0.78,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0,0,0,0,0,0,0.128,0,0.16,0.16,0,2.128,31,730,1 1,0,0.33,0,0.66,0.66,0,0,0,0,0,0.33,0.66,0,0,0.66,0.66,0,2.33,0,0.33,0,1.66,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0.12,0.541,0,5.428,21,304,1 0,0,0,0,1.49,0,0,0,0,0,0,0,0,0,0,2.98,0,1.49,0,0,1.49,0,0,0,1.49,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.1,2,11,1 0,0,0,0,1.65,0,0,0,0.82,0,0,1.65,0,0,0,0.82,0,0,1.65,0,0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.415,0,0,0,0,1.769,11,69,1 1,0,0.33,0,0.66,0.66,0,0,0,0,0,0.33,0.66,0,0,0.66,0.66,0,2.33,0,0.33,0,1.66,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0.12,0.541,0,5.428,21,304,1 0,0,0,0,0,0,1.58,0,0,0,0,0,1.58,0,0,0,0,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.558,0.279,0,3.272,23,36,1 0.5,0.46,0.34,0,0.15,0.03,0,0.19,0.57,0.65,0.3,0.73,0.65,1.27,0.03,0.23,0.42,0,3.08,0,1.34,0,0.5,0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0,0,0,0.011,0.077,0,0.335,1.281,0.125,7.202,595,2413,1 0,0,0,0,0,0,1.58,0,0,0,0,0,1.58,0,0,0,0,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.558,0.279,0,3.272,23,36,1 0,0,1.38,0,0,0,0,0,0,0,0,1.38,0,0,0,2.77,0,4.16,4.16,0,1.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.715,0,0,1.181,2,13,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.26,0,0,0,0,0.215,0,0.215,0.431,0,4.277,27,77,1 1,0,0.33,0,0.66,0.66,0,0,0,0,0,0.33,0.66,0,0,0.66,0.66,0,2.33,0,0.33,0,1.66,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0.12,0.541,0,5.428,21,304,1 0,0.29,0.72,0,0.29,0,0.14,0,0,0.43,0.29,0,0.14,0,0.14,0.14,0,0.72,0.58,0.14,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.14,0,0,0,0,0.14,0,0,0,0,0,0,0.865,0.023,0.046,5.133,132,1001,1 0.36,0,1.09,0,0,0,0,0,0,0,0,0.72,1.81,0,0,0,0,0,0.72,0,1.09,0,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.063,0.126,0,0.063,0.126,0,2.562,35,123,1 0,0,0.27,0,0.81,0.81,0,2.98,0.54,0.81,0.27,0.54,0.27,0,0,0.81,1.63,0.27,2.17,1.35,2.44,0,0,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.04,0,0.565,0.121,0,1.617,18,131,1 0.39,0,0.39,0,0,0.39,0,0,0,0,0,0.39,0.78,0,0,0,1.17,0.78,3.13,0,1.17,0,0,1.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.261,0,0,1.461,19,114,1 0,0.56,0.56,0,2.25,0,1.12,0,0,0,0,0.56,0.56,0,0,0.56,2.82,0,3.95,0,1.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.247,0,1.32,0.33,0,5.135,140,190,1 0.67,0,0.67,0,2.7,0,0,0,0,0,0,0,0,0,0,0,0.67,0.67,4.05,0,1.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0,1.064,3,33,1 0,0,0.62,0,0.62,0,0,0,0,0.62,0,0,0,0,0,0.62,0,0,1.24,0,0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.105,0,1.055,0,0,2.033,16,120,1 0,0,1.68,0,0.33,0,0,0,0,0.33,0,0,0,0,0,0.33,0,0,2.02,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0.06,0,0.484,0,0,1.796,19,203,1 0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,4.76,0,0,4.76,0,2.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.375,11,38,1 0,0,0,0,1.31,0,1.31,1.31,1.31,1.31,0,0,0,0,0,0,1.31,0,1.31,1.31,3.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.117,0.117,0,48.5,186,291,1 0,0,0,0,1.36,0.45,0.45,0,0,0,0,0,0.45,0,0,0.45,0.45,0.45,1.81,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.135,0,0.135,0,0,5.571,46,117,1 0.42,0,0,0,0.85,0.85,0,0,0,0.42,0,2.13,0,0,0,0,1.7,0,0.85,0,0.85,0,0,0.42,0,0,0,0,0,0,0,0,0,0,0,0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.088,0,0,5.714,107,200,1 0,0,0,0,0.27,0,0,0,0,0.83,0,0,0,0,0,0,0,0,0.27,0,0.27,8.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.092,0,0.185,0.232,7.313,99,607,1 0,0,0,0,0.43,0,0,0,0,0.65,0,0,0,0,0,0.43,0,0.21,0.21,0,0.43,6.75,0,0,0.21,0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.073,0.146,0.146,0.183,6.233,99,642,1 0.46,0,0.46,0,0,0,0,0.46,0,0,0,1.38,0,0,2.31,0,0.46,0.46,2.77,0,2.31,0,1.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.46,0,0,0,0,0,0,0,0,0,0.49,0,0.081,0.816,0,3.4,12,102,1 0.14,0.14,0.29,0,0.29,0.29,0,0.29,0,0,0.29,0,0.14,0,0,0.87,0.29,0.43,3.66,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.29,0,0,0,0,0,0,0,0.58,0,0,0,0,0.024,0,0.265,0,0,3.121,38,437,1 0,0.34,0.68,0,0,0,0.34,0,0,0.34,0,0,0,0,0.34,0.68,0,1.37,1.03,0,1.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.094,0,0,0,0,3.131,13,119,1 0.46,0,0.46,0,0,0,0,0.46,0,0,0,1.38,0,0,2.31,0,0.46,0.46,2.77,0,2.31,0,1.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.46,0,0,0,0,0,0,0,0,0,0.49,0,0.081,0.816,0,3.4,12,102,1 0.62,0,0.62,0,0,0,0.62,0,0,0,0,3.1,0,0,0,0,1.24,1.24,5.59,0,1.86,0,0,0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.31,0,0.517,0,0,3.363,22,111,1 0,0,0,0,2.1,0,1.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.5,34,108,1 0,0.71,0.35,0,0.35,0,0,0,0,0,0,0.71,0,0,0,0.35,0,1.42,1.77,0,1.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.058,0,0.7,0,0,3.768,61,260,1 0,0.3,0.61,0,0.3,0,0.15,0,0,0.45,0.15,0,0.15,0,0.15,0.15,0,0.76,0.15,0.15,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.15,0,0,0,0,0,0,0,0,0,0,0,0.567,0.024,0.049,5.425,132,944,1 0,0,0,0,0,0,0.57,0,0,0.57,0,1.15,0.57,0,0,0,0,0.57,4.62,0,1.15,0,0,0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.518,0,0,10.117,33,172,1 0.52,0,2.38,0,0.26,0,0.26,0,0.52,0,0.26,0,0,0,0,0.79,0,0,1.32,0,1.05,0,0,0.52,0,0,0,0,0,0,0,0,0.26,0,0,0.26,0.26,0,0.52,0,0,0,0,0,0,0,0,0,0,0.656,0,0.31,0,0,5.549,71,566,1 0.17,0,0.08,0,0.42,0.08,0.08,0.42,0.08,0.08,0,0.6,0.17,0.17,0,0,0.17,0.08,1.2,0,3.17,0,0.34,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.081,0.027,0.095,0.013,0,4.07,48,574,1 0,0,1,0,0.5,0,0,0,0,0.5,0,0,0,0,0,0.5,0,0,2.5,0,1.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.5,0,0,0,0,0.357,0,0.892,0,0,2,19,172,1 0,0,0.54,0,0.54,0,0,0,0,0.54,0,0,0,0,0,0.54,0,0,1.64,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,0,0,0,0,0.096,0,1.443,0,0,1.969,16,130,1 0,0,0,0,0,0.78,0,2.34,0,0.78,0.78,1.56,0,0,0,0,0.78,0,3.12,0,0.78,0,0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.889,0,0,2.13,15,49,1 0,0,0,0,0,0,0,2.04,0,0,1.02,0,0,0,0,0,0,0,4.08,0,1.02,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.968,0,0,2.179,18,85,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,18.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.1,2,11,1 0.44,0,0,0,0.89,0,0,0,0,0.44,0,1.34,0,0,0,0.44,0,0,4.03,0,1.79,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0,0,0,0,0,0,0.944,0.145,0.072,2.451,28,152,1 0,0.66,0.66,0,0.33,0,0,0,0,0,0,0.66,0,0,0,0.33,0,1.32,2.64,0,1.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0.053,0,0.583,0,0,4.024,121,326,1 0,0,0,0,0,0,0,2.04,0,0,1.02,0,0,0,0,0,0,0,4.08,0,1.02,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.969,0,0,2.179,18,85,1 0.34,0.25,0.25,0,0.08,0.43,0.08,0.25,0.08,1.46,0.34,0.51,0.94,0,0.17,0.08,0,0,3.01,0,0.77,0.17,0.34,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.17,0,0,0,0,0.048,0,0.258,0.258,0.113,5.297,300,694,1 0.34,0.26,0.26,0,0.08,0.43,0.08,0.26,0.08,1.47,0.34,0.52,0.95,0,0.17,0.08,0,0,3.03,0,0.78,0,0.34,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.17,0,0,0,0,0.048,0,0.259,0.259,0.064,3.335,62,537,1 0.43,0,0,0,0.87,0.87,0,0,0,0.43,0,2.18,0,0,0,0,1.74,0,0.87,0,0.87,0,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.835,0,0,5.114,107,179,1 0.44,0,0,0,0.89,0,0,0,0,0.44,0,1.33,0,0,0,0.44,0,0,4.46,0,1.78,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0,0,0,0,0,0,1.083,0.144,0.072,2.428,28,153,1 0,0,0,0,0,0,0,0,0,0.36,0,0,0,0,0,0,0,0,2.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.36,0,0,0,0,0.059,0,0.118,0,0,1.307,7,68,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.754,0,0,1,1,7,1 0,0.41,0.53,0,0.11,0.05,0,0.05,0.11,0,0,0.17,0.05,0,0,0.05,0,0.53,1.19,0.35,0.53,0,0.23,0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.064,0.01,0.032,0.14,0,1.364,14,303,1 0,0,0,0,6.25,0,3.12,0,0,0,0,3.12,0,3.12,0,3.12,0,0,6.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.428,60,66,1 2.12,0,0,0,0.53,0.53,0,0,0,1.59,0,1.59,0,0,0,1.59,0.53,0.53,6.91,0,1.59,0,0.53,0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.467,0,0.28,0.186,0,2.823,85,240,1 0,0,0,0,1.4,0.46,0.93,0,0,0,0,0,0.46,0,0,0.46,0.46,0,1.87,0,1.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.135,0,0.135,0,0,4,46,96,1 0,1.12,0.56,0,0.56,0.56,1.12,1.12,0,0,0.56,2.25,0,0,0,2.25,0,1.12,2.25,0,2.82,0,0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.416,5,51,1 0,0,1.32,0,0.66,0,0,0,0,0,0,0.66,0,0,0,0,0.66,0,5.29,2.64,5.29,0,0,1.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.66,0,0,0,0,0,0,0,0,0,0,0,0.83,0.069,0,3.215,43,164,1 0,0.8,0,0,0.8,0,0,0,0,0.8,0,0.8,0,0,0,1.61,0,0.8,0.8,0,2.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,0,0,0,0,0,1.192,0,0,1.463,12,101,1 0,0.29,0.87,0,0.29,0,0.14,0,0,0.43,0.14,0,0.14,0,0.14,0.14,0,0.72,0.43,0.14,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.14,0,0,0,0,0,0,0,0,0,0,0,0.585,0.046,0.046,5.02,132,979,1 0.17,0,0.08,0,0.42,0.08,0.08,0.42,0.08,0.08,0,0.6,0.17,0.17,0,0,0.17,0.08,1.2,0,3.17,0,0.34,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.081,0.027,0.095,0.013,0,4.07,48,574,1 0,0,0,0,0,0,0,0,0,0.81,0,0.81,0,0,0,0,0,0,1.63,0,0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.147,0,0,0.294,0.147,0,2.333,11,63,1 0.54,0,1.08,0,0.54,0,1.08,0,0,0,0,0.54,0,0,0,0.54,0.54,0,4.32,0,1.08,0,1.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,0,0,0,0,0,0,1.18,0.252,0,5.323,68,181,1 0.17,0,0.08,0,0.42,0.08,0.08,0.42,0.08,0.08,0,0.6,0.17,0.17,0,0,0.17,0.08,1.2,0,3.17,0,0.34,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.081,0.027,0.108,0.013,0,4.07,48,574,1 0.53,0,1.07,0,0.53,0,1.07,0,0,0,0,0.53,0,0,0,0.53,0.53,0,4.3,0,1.07,0,1.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.53,0,0,0,0,0,0,1.183,0.253,0,5.454,68,180,1 0.51,0.51,0,0,0,0,0.51,0,0,0.51,0,0,0,0,0.51,2.07,0,2.07,1.03,0,1.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.135,0,0.067,0,0,2.676,17,91,1 0,0.54,0.54,0,2.19,0,1.09,0,0,0,0,0.54,0.54,0,0,0.54,3.29,0,3.84,0,1.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.241,0,1.045,0.321,0,5.047,140,212,1 0,0,0.38,0,1.15,0,0,0,0,0.77,0,0.38,0,0,0,0.38,0.77,0,2.7,0,1.15,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.15,0,0,0,0,0.061,0,0.985,0.184,0,3.923,78,255,1 0,0,0.39,0,1.17,0,0,0,0,0.78,0,0.39,0,0,0,0.39,0.78,0,2.73,0,1.17,0,0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.17,0,0,0,0,0.062,0,0.869,0.186,0,4,78,256,1 0.43,0,0.43,0,0.43,0,0.86,0,0,0,0,0.43,0,0,0,0,0.86,0.43,1.29,0,4.76,0,0,1.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.14,0,0,0.915,0,0,3.891,47,144,1 0.45,0,0,0,0.68,0.45,0,0.45,0,0.22,0.22,0,1.6,0,0.45,0,0.91,1.83,1.83,0,0.68,0,1.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.91,0,0,0,0,0,0,0,0,0,0.037,0,0.187,0.112,0,3.184,30,363,1 0,0,1.12,0,0.56,0,0,0,0,0.56,0,0,0,0,0,0.56,0,0,2.25,0,1.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.56,0,0,0,0,0.102,0,0.615,0,0,2.403,19,137,1 0,0,0.55,0,0.55,0,0,0,0,0.55,0,0,0,0,0,0.55,0,0,1.67,0,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.55,0,0,0,0,0.099,0,0.893,0,0,2.122,16,121,1 0,0,1.31,0,0.65,0,0,0,0,0,0,0.65,0,0,0,0,0,0,5.26,1.97,4.6,0,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.65,0,0,0,0,0,0,0,0,0,0,0,0.816,0.068,0,3.173,43,165,1 0,0,0.61,0,0,0,0.61,0,0,0,0,0,0,0,0,0,1.23,1.85,2.46,0,1.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.176,0,0.353,0,0,2.25,13,81,1 0.22,0.22,0.22,0,1.77,0.22,0.44,0.44,0.22,2.88,0,0.88,0.22,0,1.11,0.44,0,0.44,3.33,0,3.33,0,0.44,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0.563,0.15,0,86.65,1038,1733,1 0.34,0.42,0.25,0,0.08,0.42,0.08,0.25,0.08,1.63,0.34,0.51,0.94,0,0.17,0.08,0,0,3,0,0.94,0,0.34,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.17,0,0,0,0,0.063,0,0.287,0.223,0.079,3.314,62,537,1 0,0,0,0,0,0,1.04,0,0,0,0,0,0,0,0,2.08,0,0,2.08,0,2.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.812,11,61,1 0,0,0,0,0,0,1.33,0,0,0,0,0,0,0,0,0,0,0,1.33,0,5.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.202,1.417,0,29.125,223,233,1 0.54,0,1.08,0,0.54,0,1.08,0,0,0,0,0.54,0,0,0,0.54,0.54,0,4.32,0,1.08,0,1.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,0,0,0,0,0,0,1.182,0.253,0,5.454,68,180,1 0,0,0,0,2.5,0,0,0,0,0,0,0.62,0,0,0,0,1.25,0,3.12,0,0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,2.111,18,57,1 0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0.9,0,1.81,3.63,0,2.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.9,0,0,0,0,0,0,0.165,0.165,0,6.266,41,94,1 0.07,0.02,0.15,0,0.25,0.2,0,0.02,0.15,0,0,0.25,0.25,0.07,0,0.05,0.22,0,0.05,0,0.02,0,0.37,0.02,0,0,0,0,0.02,0,0,0,0,0,0,0.05,0.3,0.02,0,0.02,0,0,0.02,0,0.02,0,0,0,0.011,0.022,0,0,0.022,0,1.423,20,965,1 0.07,0.02,0.15,0,0.25,0.2,0,0.02,0.15,0,0,0.25,0.25,0.07,0,0.05,0.22,0,0.05,0,0.02,0,0.37,0.02,0,0,0,0,0.02,0,0,0,0,0,0,0.05,0.3,0.02,0,0.02,0,0,0.02,0,0.02,0,0,0,0.011,0.022,0,0,0.022,0,1.423,20,965,1 0.17,0.26,1.07,0,0.35,0.62,0.53,0.17,0.62,0.8,0.26,1.25,0.17,0,0.62,0.62,0.08,1.43,2.5,0.17,1.16,0,0.89,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0.08,0,0,0,0,0.066,0,0.212,0.185,0.013,6.815,583,1329,1 0,0,0.48,0,0.96,0,0,0,0.48,0,0,0,0,0,0,0.96,0.96,0,1.44,0,0.48,0,0.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0.133,0.066,0.468,0.267,0,3.315,61,242,1 0.46,0,0.46,0,0,0,0,0.46,0,0,0,1.38,0,0,2.31,0,0.46,0.46,2.77,0,2.31,0,1.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.46,0,0,0,0,0,0,0,0,0,0.49,0,0.081,0.816,0,3.4,12,102,1 1.03,0,0.68,0,1.03,0,0.68,0,0,0.68,0,0.68,0,0,0.34,0.68,0,0,5.86,0,1.37,0,0.34,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0,0,0,0.366,0.061,0,1.895,12,91,1 0,0,0.18,0,0.18,0,0,0,0.54,0.36,0.36,0.9,0,0.36,0,0.72,0,0.18,2.7,0.18,0.72,0,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0.36,0,0,0,0,0.18,0,0,0,0,0,0,0,0,0.633,0.063,0,9.043,363,841,1 0.26,0.26,0.52,0,0.39,0,0.39,0.13,0,0.26,0,0.78,0.26,0,0,1.57,0,0.26,2.61,0,1.57,0,0.13,0.39,0,0,0,0,0,0,0,0,0,0,0,0,0.13,0,0,0,0,0,0,0,0,0,0,0,0,0.129,0,0.779,0.021,0.021,2.689,49,476,1 0,0,0.32,0,0.65,0.65,0.32,0.32,0,0,0,0.32,0.32,0,0,0.32,0.32,0,2.28,0,3.25,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0.104,0,0,0.157,0.052,1.537,10,143,1 0,0,0.32,0,0.64,0.64,0.32,0.32,0,0,0,0.32,0.32,0,0,0.32,0.32,0,2.27,0,3.24,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0.106,0,0,0.159,0.053,1.537,10,143,1 0.19,0.19,0.39,0,0.19,0,0,0.59,0,0,0,0.39,0,0,0,0.59,0.39,1.37,4.52,0,3.14,0,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.258,0.032,0,3.112,43,305,1 0.46,0,0,0,0.69,0.46,0,0.46,0,0.23,0.23,0,1.61,0,0.46,0,0.92,1.84,1.84,0,0.69,0,1.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.92,0,0,0,0,0,0,0,0,0,0.037,0,0.188,0.112,0,3.105,30,354,1 0,0,0.71,0,0.71,0,0,0,0,0,0,0,0,0,0,0.71,0,0,1.42,0,0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.71,0,0,0,0,0.121,0,1.094,0,0,2.021,16,95,1 0,1.49,0,0,0,0,2.98,0,0,1.49,0,0,0,0,0,1.49,2.98,0,0,0,2.98,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.171,0,0,0.171,0.171,13,140,156,1 0,0,0.16,0,0.33,0,0.16,0,0.5,0,0.16,0,0,0,0,0.5,0,1.5,0.66,0,0,0,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.088,0,0.884,0.752,0.022,5.328,47,1087,1 0,0,1.1,0,0.55,0,0,0,0,0.55,0,0,0,0,0,0.55,0,0,2.2,0,1.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.55,0,0,0,0,0.097,0,0.683,0,0,2.338,19,145,1 0.16,0.32,0.65,0,0.32,0,0.16,0,0,0.49,0.16,0,0.16,0,0.16,0.16,0,0.81,0.32,0.16,0.98,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0,0,0,0,0,0,0,0,0.773,0.08,0.08,6.586,132,955,1 0,0,0.72,0,1.81,0,0,0,0,0.36,0,0.36,0,0,0,0,0.72,0,0.72,0,0.36,0,0,0,0,0,0,0,0,0,0,0,0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.053,0.265,0,0,0,0,1.543,13,88,1 0.84,0.84,0,0,0,0,1.69,0,0.84,0.84,0,0.84,0,0,0,10.16,0.84,0,0.84,0,2.54,0,0,0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.113,0.278,0.092,173,418,519,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.29,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.103,3,32,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.29,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.103,3,32,1 0.17,0,0.08,0,0.43,0.08,0.08,0.43,0.08,0.08,0,0.6,0.17,0.17,0,0,0.17,0.08,1.2,0,3.14,0,0.34,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.081,0.027,0.088,0.013,0,4.16,48,1140,1 0,0.54,0.54,0,1.09,0.54,2.18,0,0,0.54,0,0.54,0,0,0,0,0,0.54,3.27,0,1.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,0,0,0,0,0.157,0,0.471,0,0.078,15.08,147,377,1 0,0,0.42,0,0,0,0,0,0,0,0,0.85,0,0,0,0.85,0,0.85,4.7,0,0.85,5.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.42,0,0,0,0,0,0,0,0,0,0,0,0,0.082,0,0,0.082,0.248,7.17,42,294,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.05,0,0,0,0,2.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.05,0,0,0,0,0,0,0,1.806,0,0,1.293,5,75,1 0,0,0.45,0,0.22,0.22,0,0,0.67,0.45,0.22,0.9,0,0,0,0.22,0,0,1.35,0,1.12,0.22,0.22,0.22,0,0,0,0,0,0,0,0,0,0,0,0.45,0,0,0,0,0,0,0,0,0,0,0,0,0.064,0.258,0,0.129,0.193,0,7.258,71,617,1 0,0.55,0.55,0,1.11,0.55,2.23,0,0,0.55,0,0.55,0,0,0,0,0,0.55,3.35,0,1.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.55,0,0,0,0,0.159,0,0.479,0,0.079,16.739,147,385,1 0,0,0,0,0,1.12,0,2.24,0,0,1.12,1.12,0,0,0,0,0,0,4.49,0,0,0,1.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.814,0,0,2.6,15,39,1 0,0,0.32,0,0.65,0.65,0.32,0.32,0,0,0,0.32,0.32,0,0,0.32,0.32,0,2.28,0,3.26,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0.105,0,0,0.158,0,1.494,10,139,1 0,0,1.43,0,0.71,0,0,0.71,0,0.71,0,0,0,0,0,0,2.87,2.87,1.43,0,3.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.937,0,0,11.888,116,214,1 0,0.55,0.55,0,1.11,0.55,2.23,0,0,0.55,0,0.55,0,0,0,0,0,0.55,3.35,0,1.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.55,0,0,0,0,0.159,0,0.479,0,0.079,16.739,147,385,1 0,0,0,0,0,0,1.07,0,0,0,0,0,0,0,0,2.15,0,0,0,0,2.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.07,0,0,0,0,0,0,0,0,0,0,0,0.145,0.437,0.291,1.823,10,62,1 0,0,0.47,0,0.95,0,0,0,0.47,0,0,0,0,0,0,0.95,0.95,0,1.42,0,0.47,0,0.95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.47,0,0,0,0,0.131,0.065,0.461,0.263,0,3.493,61,255,1 0,0,0.15,0,0.31,0,0.15,0,0.63,0.15,0.15,0,0,0,0,1.11,0,1.27,0.79,0,0,0,0,0,0.95,0,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0.15,0,0,0,0,0,0,0,0,0,0.088,0,0.862,0.707,0.022,5.423,51,1128,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.39,0,0.39,0,0,0,3.58,0.39,0,0,0,0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0.39,0.39,0,0,0,0,0,0,0,0,0,2.5,21,130,1 0,0,0,0,0,2.3,0,0,0,0,0,0.76,0.76,0,0,0,0,0,2.3,0,1.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.829,0,2.766,0.829,0,5.607,25,157,1 0.08,0.16,0.32,0,1.38,0.16,0.08,0,0.24,0.08,0,1.3,0,0.08,0,0.48,0.08,0.08,3.5,0,0.73,0,0.08,0.16,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.085,0.061,0.39,0.097,0.012,5.594,119,1561,1 0.48,0.2,0.55,0,0.27,0.2,0,0.27,0.27,0.97,0.41,1.04,0.13,0,0,1.11,0.69,0.06,2.37,0,1.04,0,0.06,0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0,0,0,0.13,0,0,0,0,0.105,0,0.75,0.305,0,3.401,94,966,1 0.48,0.2,0.55,0,0.27,0.2,0,0.27,0.27,0.97,0.41,0.97,0.13,0,0,1.11,0.69,0.06,2.23,0,0.97,0,0.06,0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0,0,0,0.13,0,0,0,0,0.105,0,0.75,0.305,0,3.401,94,966,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.453,0,0,4.153,26,54,1 0,0,1.42,0,0.71,0,0,0.71,0,0.71,0,0,0,0,0,0,2.85,2.85,1.42,0,3.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.931,0,0,12.055,117,217,1 0.8,0,0.8,0,1.6,0,0,0,0,0,0,0,0,0,0,0.8,0.8,0,1.6,0,2.4,0,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.62,0.347,0,2.604,22,125,1 0,0,0.33,0,0.99,0.99,0.33,0.33,0,0,0,0.33,0.33,0,0,0.33,0.33,0,1.98,0,3.3,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0.108,0,0,0.162,0.054,2.195,50,202,1 0.07,0.37,0.81,0,0.51,0.29,0.07,0,0.07,0.37,0.07,1.48,0.14,0,0.07,0,0.14,0.44,3.55,0,1.85,0,0,0.14,0,0,0,0,0,0,0,0,0,0,0,0,0.14,0,0,0,0,0,0,0,0,0,0,0,0.049,0.069,0,0.159,0.159,0.009,3.456,44,802,1 0,0,0.33,0,0.99,0.99,0.33,0.33,0,0,0,0.33,0.33,0,0,0.33,0.33,0,1.98,0,3.3,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0.107,0,0,0.161,0.053,2.195,50,202,1 0,0,0.15,0,0.31,0,0.15,0,0.63,0.15,0.15,0,0,0,0,1.11,0,1.27,0.79,0,0,0,0,0,0.95,0,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0.15,0,0,0,0,0,0,0,0,0,0.088,0,0.862,0.707,0.022,5.423,51,1128,1 0,0,0.62,0,1.24,0.62,0,0,0,0,0,0,0,0,0,0.31,0,0,2.48,0,0.93,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0.086,0,0.043,0,0,1.741,14,155,1 0,0.34,0.69,0,0.34,0,0.17,0,0,0.51,0.17,0,0.17,0,0.17,0.17,0,0.86,0.17,0.17,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.17,0,0,0,0,0,0,0,0,0,0,0,0.665,0.083,0.083,6.294,132,963,1 0,0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0.79,0.79,1.58,3.17,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.79,0,0,0,0,0.271,0,0.271,0.135,0,3.257,26,114,1 0.14,0.14,0.29,0,0,0,0,0,1.17,0.29,0.14,0.58,0,0,0,0.14,0,0.14,2.35,0.14,0.88,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0.204,0,0.204,0.127,0.102,2.962,73,400,1 0,0,0,0,1.11,0,0,0,0,0,0,0,0,0,0,0,0,0,2.22,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.784,0,0,5.687,39,91,1 0,0,1,0,0,0.25,0,0.25,0,0,0,1.5,0.25,0,0,0.25,0.5,0,2.5,0,1.5,0,0,0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.203,0.203,0,2.866,34,129,1 0.58,0,0,0,2.33,0,1.16,0,0,0,0.58,0,0,0.58,0,0.58,0,0.58,2.92,1.16,2.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0.09,0.09,0,1.829,9,75,1 0.14,0.14,0.29,0,0,0,0,0,1.17,0.29,0.14,0.58,0,0,0,0.14,0,0.14,2.35,0.14,0.88,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0.204,0,0.204,0.127,0.102,2.962,73,400,1 0.14,0.14,0.29,0,0,0,0,0,1.17,0.29,0.14,0.58,0,0,0,0.14,0,0.14,2.35,0.14,0.88,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0.204,0,0.204,0.127,0.102,2.962,73,400,1 0,0,0.58,0,1.17,0,0.58,0,0,0,0,0.58,0,0,0,0.58,0,0,1.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.829,0,0,2.529,8,86,1 0.25,0.25,0,0,0.75,0,0,0,0.25,0.75,0,1.51,0,1.26,0,0,0.5,0,3.29,0,1.01,0,0,0,0,0,0,0,0,0,0,0,0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.082,0,0.041,0.124,0.124,3.181,32,210,1 0,0,0,0,6.25,0,3.12,0,0,0,0,3.12,0,0,0,0,0,0,6.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.428,60,66,1 0,0.57,0.57,0,1.14,0.57,2.28,0,0,0.57,0,0.57,0,0,0,0,0,0.57,3.42,0,1.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.57,0,0,0,0,0.081,0,0.487,0,0.081,16.217,147,373,1 0,0.17,0,0,0,0,0.17,0.52,0,0.17,0.35,0.52,0,0,0,0,0.17,0.7,0.88,0,0.7,1.93,0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.028,0.143,0.028,0.085,0.057,0.229,3.564,39,417,1 0,0,0.47,0,0.95,0,0,0,0.47,0,0,0,0,0,0,0.95,0.95,0,1.42,0,0.47,0,0.95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.47,0,0,0,0,0.133,0.066,0.401,0.267,0,3.459,61,256,1 0,0.57,0.57,0,1.14,0.57,2.28,0,0,0.57,0,0.57,0,0,0,0,0,0.57,3.42,0,1.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.57,0,0,0,0,0.081,0,0.487,0,0.081,16.217,147,373,1 0,0.34,0.69,0,0.34,0,0.17,0,0,0.51,0.17,0,0.17,0,0.17,0.17,0,0.86,0.34,0.17,0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.315,0,0.026,6.364,149,942,1 0,0.57,0.57,0,1.14,0.57,2.28,0,0,0.57,0,0.57,0,0,0,0,0,0.57,3.42,0,1.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.57,0,0,0,0,0.081,0,0.487,0,0.081,16.217,147,373,1 0,1.63,0,0,0,0,3.27,0,0,0,0,0,0,0,0,1.63,1.63,0,0,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0.36,0,0.54,0.18,0.18,14.818,140,163,1 0,0,0.14,0,0.29,0,0.14,0,0.58,0,0.29,0,0,0,0,0.87,0,1.46,0.58,0,0,0,0,0,0,0,0,0.43,0,0,0,0,0,0,0,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0.764,0.784,0.02,4.979,45,1200,1 0,0.38,0.76,0,0.38,0,0.19,0,0,0.57,0.19,0,0.19,0,0.19,0.19,0,0.95,0.19,0.19,0.95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0.632,0.03,0.09,6.789,132,869,1 0.4,0,0.6,0,0.2,0.6,0.2,0.6,0.2,0.2,0.2,1.2,0,0,0,0.4,1.61,0.4,2.21,1.81,2.62,0,0.2,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.096,0,1.453,0.129,0,3.946,64,513,1 0,0,0,0,0,0,0,0,0.91,0,0,0,0,0,0,0,0,0,0.91,0,2.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.132,0,0.531,0,0,2.9,28,87,1 0,0,0.15,0,0.3,0,0.15,0,0.61,0,0.3,0,0,0,0,0.92,0,1.53,0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0,0,0.083,0,0.732,0.753,0.02,5.058,45,1128,1 0,0.52,0.52,0,0.52,0,0,0,0,0,0,0,0,0,0,0.52,0,0,1.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.192,0,0.867,0,0,2.22,20,131,1 0,0,0.85,0,0.42,0,0,0,0,0,0,0,0,0,0,0.42,0,0,2.14,0,1.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.42,0,0,0,0,0.159,0,1.117,0,0,1.206,7,117,1 0.18,0,0.18,0,1.57,0.36,0.06,0.06,0.06,0.12,0.06,0.54,0.3,0.06,0,0,0.72,0.06,4.48,0.24,1.15,0,0.84,0.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0.06,0,0,0,0.01,0.052,0,0.01,0.167,0,1.733,12,442,1 0.1,0.1,0.73,0,0.2,0.1,0.2,0.62,0.1,0.31,0.31,1.04,0,0,0,0.1,1.14,0.31,2.4,0.93,2.92,0,0,0.2,0.1,0.1,0,0,0,0,0,0,0,0,0,0,0.1,0,0.1,0.1,0,0,0,0,0,0,0,0,0,0.163,0,0.785,0.065,0,4.064,92,817,1 0,0,0,0,0,0,5.4,0,0,0,0,0,0,0,0,5.4,0,0.9,1.8,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.841,0.21,0,24.785,295,347,1 0.17,0.17,0.71,0,0.53,0.17,0.17,0.89,0.17,0.53,0.35,1.61,0,0,0,0,1.79,0,1.97,1.61,4.12,0,0,0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.17,0,0,0,0,0,0,0,0,0,0.115,0,1.158,0.057,0,5.163,63,599,1 0.08,0.17,0.34,0,1.46,0.17,0.08,0,0.25,0.08,0,1.37,0,0.08,0,0.51,0.08,0.08,3.43,0,0.77,0,0.08,0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.077,0.064,0.348,0.103,0.012,5.392,119,1456,1 0,0.46,0,0,1.15,0,0.23,0.23,0,0.46,0,0.69,0.23,0,0,0,0.69,0.69,2.76,0,1.84,0,0,0.23,0,0,0,0,0,0,0,0,0,0,0,1.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0.036,0.036,0.841,0.036,0,1.862,52,285,1 0,0,0.39,0,0.78,0,0,0.06,0.06,0.19,0.13,0.26,0.13,0,0,0,0,0,0.32,0,0.06,0,0,0,0,0,0,0,0,0.06,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0,0,0,0,0,0.032,0,0,0.032,0,1.206,15,240,1 0,0,0,0,0,0,0,1.05,0,0,0.52,1.05,0.52,0,0,1.05,0,0,3.7,1.05,1.05,0,1.58,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.702,0.263,0,6.487,47,266,1 0,0,0.32,0,0.64,0.64,0.32,0.64,0,0,0,0.32,0.32,0,0,0.32,0.32,0,2.27,0,3.24,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0.105,0,0,0.157,0,1.494,10,139,1 0.54,0,0.54,0,1.63,0,0,0,0,0,0,0.54,0,0,0,0.54,0.54,0,2.17,0,5.97,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.715,0.318,0,2.345,22,129,1 1.63,0,1.63,0,0,0,0,0,1.63,0,0,0,0,0,0,1.63,0,0,3.27,0,3.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.759,0.253,0,2,16,36,1 0,1.32,0.56,0,0,0.94,0,0.18,0.37,0.75,0,2.07,0,0,0,0,0.37,0,2.45,0,0.94,0,0,0.18,0,0,0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0.242,0.107,0,2.623,35,244,1 0.35,0,0.35,0,0.35,0.7,0.35,1.41,0,0,0.35,1.06,0,0,0,0.7,1.06,0,5.3,2.82,2.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.35,0,0,0,0,0,0,0.411,0,0,2.917,60,213,1 0.34,1.03,0.34,0,1.03,0,2.41,0.34,0,1.72,2.06,2.06,0.68,0,0.34,0,0,3.44,4.13,0,2.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.116,0,0,0,0,1.888,6,68,1 0,1.32,0.56,0,0,0.94,0,0.37,0.37,0.75,0,2.07,0,0,0,0,0.37,0,2.45,0,0.94,0,0,0.18,0,0,0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0.295,0.107,0,2.542,34,239,1 0.64,0,0.64,0,1.28,0,0.64,0,0,0,0,0.64,0,0,0,0.64,0.64,0,1.28,0,3.2,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.551,0.459,0,2.333,22,119,1 0,0,0.32,0,0.65,0.65,0.32,0.32,0,0,0,0.32,0.32,0,0,0.32,0.32,0,2.28,0,3.26,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0.106,0,0,0.159,0,1.494,10,139,1 0.64,0,0.64,0,1.28,0,0.64,0,0,0,0,0.64,0,0,0,0.64,0.64,0,1.28,0,2.56,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.554,0.369,0,2.333,22,119,1 0,0,0.56,0,0,0.18,0,0,0,1.32,0,0.75,0.75,0.18,0,0.18,0,0,0.94,0,0,0,0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.14,0,0.07,0.07,0,2.616,23,191,1 0,0,0,0,0,0,0,0,0.91,0,0,0,0,0,0,0,0,0,0.91,0,2.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.133,0,0.532,0,0,2.9,28,87,1 0,0,0,0,0,0,0,0,0,0,0,1.81,0,0,0,0,0,0,0,0,1.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.309,0,0,1.333,11,56,1 0,0,0,0,1.29,0.43,0.43,0,0,0,0,0,0.43,0,0,0.43,0.43,0.43,1.72,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.129,0,0.129,0,0,5.8,46,116,1 0,0,0.86,0,0.43,0,0,0,0,0,0,0,0,0,0,0.43,0,0,2.17,0,1.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.43,0,0,0,0,0.161,0,1.133,0,0,1.2,6,114,1 0,0.68,0.34,0,0.34,0,0,0,0,0,0,0.68,0,0,0,0.34,0,1.37,1.72,0,1.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.055,0,0.718,0,0,3.718,61,264,1 0,0,0.16,0,0.16,0,0.16,0,0.65,0.16,0.16,0,0,0,0,1.64,0,0.65,0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0,0,0,0,0,0,0.066,0,0.906,0.663,0,5.289,52,1116,1 0.18,0,0.18,0,1.57,0.36,0.06,0.06,0.06,0.12,0.06,0.54,0.3,0.06,0,0,0.72,0.06,4.49,0.24,1.09,0,0.85,0.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0.06,0,0,0,0.01,0.052,0,0.01,0.167,0,1.74,12,442,1 0,0.11,0.23,0,0.58,0.34,0.11,0,0.34,0,0.23,0.92,0.46,0,0,0.46,0.23,0.34,0.58,0,0.58,0,0.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0,0,0,0,0.075,0.037,0,0.322,0.094,0.018,2.576,48,389,1 0,0,0,0,0,0,0,0,0,0,1.23,1.23,0,0,0,0,0,0,2.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.284,0,0,1.357,5,19,1 0,0,0,0,0.91,0,0.91,0,0,0.91,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.134,0,0.672,0.269,0,4.35,31,87,1 0,0,1.31,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0,0,1.31,0,5.26,0,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.199,0,4.818,25,53,1 0,1.31,0.56,0,0,0.93,0,0.18,0.37,0.75,0,2.06,0,0,0,0,0.37,0,2.44,0,0.93,0,0,0.18,0,0,0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0.268,0.107,0,2.698,42,251,1 0,1.32,0.56,0,0,0.94,0,0.18,0.37,0.75,0,2.07,0,0,0,0,0.37,0,2.45,0,0.94,0,0,0.18,0,0,0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0.241,0.107,0,2.623,35,244,1 0,0,0.32,0,0.65,0.65,0.32,0.32,0,0,0,0.32,0.32,0,0,0.32,0.32,0,2.28,0,3.25,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0.105,0,0,0.157,0,1.494,10,139,1 0,0,0,0,0,0,0,0.67,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.34,0,0,0,0,0,0,0,0,0,0,0,0.17,0.511,0.085,0.511,0,0,4.617,27,217,1 0,0.62,1.24,0,0.31,0,0,0,0,0,0,0.62,0,0,0,0.31,0,1.24,2.49,0,1.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0,0.05,0,1.152,0,0,4.592,121,349,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.266,4,19,1 0,0,0,0,0,0,0,0.67,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.34,0,0,0,0,0,0,0,0,0,0,0,0.17,0.511,0.085,0.511,0,0,4.617,27,217,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.266,4,19,1 0,0,0.17,0.52,0.17,0,0.17,0,0.69,0.17,0.17,0,0,0,0,1.74,0,0.69,1.04,0,0.17,0,0,0,0.17,0,0,0,0,0,0,0,0,0.17,0,0,0.34,0,0,0.17,0,0,0,0,0,0,0,0,0,0.072,0,0.754,0.681,0,4.74,52,967,1 0,1,1,0,2,0,1,0,0,0,0,0,0,0,0,0,2,3,2,0,4,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.275,0.137,0,2.538,11,33,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.05,1.05,0,3.15,0,2.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.05,0,0,0,0,0.351,0,0.351,0.175,0,3.343,28,107,1 0,0,0.16,0.16,0.32,0,0.16,0,0.65,0.16,0.16,0,0,0,0,2.13,0,0.65,0.98,0,0,0,0,0,0.49,0,0,0,0,0,0,0,0,0,0,0.16,0.32,0,0,0.16,0,0,0,0,0,0,0,0,0,0.089,0,0.693,0.67,0,4.835,52,1030,1 0,0,1.53,0,0,0,0,0,0,0,1.53,0,0,0,0,0,0,0,3.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.53,0,0,0,0,0,0,1.434,0,0,7.055,75,127,1 0,0,1.31,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0,0,1.31,0,5.26,0,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.201,0,4.5,25,54,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.28,0,0,3.84,0,3.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.28,0,0,0,0,0,0,0.23,0.23,0,5.538,41,72,1 0,0,0.47,0,0,0,0.94,0,0,0,0,0.47,0,0,0,0,0.47,0,0.94,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.47,0,0,0,0,0.072,0,0.217,0,0,1.48,11,77,1 0.25,0,0,0,0.51,0.51,0,0,0.25,0,0.25,0,0.25,0,0,0,0.25,0,2.81,0,0.25,0,0.25,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.041,0,0.209,0.209,0,2.776,75,211,1 0,0,0.73,0,0.36,0,0,0,0,0,0,0.73,0,0,0,0.36,0.73,0,1.09,0,1.46,0.36,0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.36,0,0,0,0,0,0,0,0,0,0,0,0.604,0.181,0,3.787,58,356,1 0.64,0,0.64,0,1.93,0,0,0,0,0,0,1.29,0,0,0,1.29,0.64,0,1.93,0,2.58,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.462,0.37,0,2.44,22,122,1 0,0,0,0,0,0,0,1.29,0,0.43,0,0,0,0,0,0.43,0,1.73,0.43,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0.124,0,0.31,0.062,0,1.477,8,65,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.28,0,0,3.84,0,3.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.28,0,0,0,0,0,0,0.23,0.23,0,5.538,41,72,1 0,0,0,0,0,0.6,0.6,0,0,0,0.6,0,0,0,0,0,0,1.21,1.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.095,0,0,0,0,1.583,11,38,1 0,0,0.32,0,0.65,0.65,0.32,0.32,0,0,0,0.32,0.32,0,0,0.32,0.32,0,2.28,0,3.25,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0.106,0,0,0.159,0,1.494,10,139,1 0,0,1.29,0,0,0,0,0,0,0,0,1.29,0,0,0,0,0,0,1.29,0,5.19,0,0,1.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.198,0,4.23,25,55,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.28,0,0,3.84,0,3.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.28,0,0,0,0,0,0,0.23,0.23,0,5.538,41,72,1 0.63,0,0.63,0,1.27,0,0.63,0,0,0,0,0.63,0,0,0,0.63,0.63,0,1.27,0,2.54,0,0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0.501,0.3,0,2.458,22,118,1 0.65,0,0.65,0,1.3,0,0,0,0,0,0,0.65,0,0,0,1.3,0.65,0,1.96,0,2.61,0,0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.834,0.463,0,2.44,22,122,1 0.19,0.19,0.19,0.19,1.16,0,0,0,0.58,0.38,0,0,0,0,0,0,0,0,0.19,0.38,0.58,0,0,0,0.19,0,0,0.19,0,0,0,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.07,0,0,1.121,0,1021.5,2042,2043,1 0,0,0,0,0,0,0,1.29,0,0.43,0,0,0,0,0,0.43,0,1.73,0.43,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0.124,0,0.31,0.062,0,1.477,8,65,1 0.08,0,0.08,0,0.16,0,0,0,0,0,0,0.23,0,0,0,0.08,0.23,0,0.4,0.16,0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0,0,0.16,0,0,0,0,0,0.228,0,0.406,0.038,0,2.811,67,1254,1 0.64,0,0.64,0,1.93,0,0,0,0,0,0,1.29,0,0,0,1.29,0.64,0,1.93,0,2.58,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.462,0.37,0,2.44,22,122,1 0,0,0,0,0,0,0,1.29,0,0.43,0,0,0,0,0,0.43,0,1.73,0.43,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0.124,0,0.31,0.062,0,1.477,8,65,1 0,0,0.73,0,0.36,0,0,0,0,0,0,0.73,0,0,0,0.36,0.73,0,1.09,0,1.46,0.36,0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.36,0,0,0,0,0,0,0,0,0,0,0,0.604,0.181,0,3.787,58,356,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.06,0,0,0,0,0,0,0,1.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.342,0,0,0,0,2.217,10,51,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.28,0,0,3.84,0,3.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.28,0,0,0,0,0,0,0.23,0.23,0,5.538,41,72,1 0,0,0.32,0,0.65,0.65,0.32,0.32,0,0,0,0.32,0.32,0,0,0.32,0.32,0,2.28,0,3.25,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0.105,0,0,0.158,0,1.494,10,139,1 0.18,0,0.18,0,1.57,0.36,0.06,0.06,0.06,0.12,0.06,0.54,0.3,0.06,0,0,0.72,0.06,4.49,0.24,1.09,0,0.85,0.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0.06,0,0,0,0.01,0.052,0,0.01,0.167,0,1.736,12,441,1 0,0,1.31,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0,0,1.31,0,5.26,0,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.201,0,4.5,25,54,1 0,0,0,0,0,0,0.45,0.91,0.45,0.91,0,0,0,0,0,0,0.45,0.45,0.91,0,0.45,0,0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.254,0,0.063,0.127,0,4.735,46,161,1 0,0,0,0,0,0,1.16,0,0,0,0,0,0,0,0,4.65,2.32,0,3.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,21,1 0,0,0,0,0,0,1.25,0,0,0,0,0,0,0,0,6.25,0,0,3.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,19,1 0,0.02,0.05,0,0.02,0,0,0.05,0,0.35,0,0.02,0,0,0,0.05,0.1,0.38,0.07,0.2,0.17,0,0,0,0.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0,0.107,0.017,0.017,3.922,489,3271,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,1,1,2,1 0.48,0,1.45,0,0.48,0,0,0,0,0,0,0,0,0,0,0.48,0,0,4.36,0,1.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0.09,0,2.443,0,0,1.227,8,81,1 0,0,0.71,0,0.23,0,0,0,0.23,0.23,0.23,1.9,0,0,0,0.23,0,0,3.81,0.23,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.109,0,1.018,0.036,0,4.022,97,543,1 0,0,0.32,0,0.65,0.65,0.32,0.32,0,0,0,0.32,0.32,0,0,0.32,0.32,0,2.28,0,3.25,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0.105,0,0,0.158,0,1.494,10,139,1 0,0,0,0,7.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.588,0,0,0,0,1,1,6,1 0,0,0.71,0,0.23,0,0,0,0.23,0.23,0.23,1.9,0,0,0,0.23,0,0,3.81,0.23,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.109,0,1.018,0.036,0,4.022,97,543,1 0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,5,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0,3.178,62,89,1 0.05,0,0.29,0,0.23,0.17,0.05,0,0,0,0.65,0.82,0,0,0,0.76,0.11,0.11,1.53,0.29,1.3,0,0.23,0.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.009,0.129,0,0.102,0.259,0,1.493,8,660,1 0,0,0.32,0,0.64,0.64,0.32,0.32,0,0,0,0.32,0.32,0,0,0.32,0.32,0,2.27,0,3.24,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0.104,0,0,0.157,0,1.494,10,139,1 0,0,1.34,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0.67,0.67,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.67,0,0,0,0,0,0,0.332,0.11,0,2.315,12,132,1 0,0.02,0.05,0,0.02,0,0,0.05,0,0.35,0,0.02,0,0,0,0.05,0.1,0.38,0.07,0.2,0.17,0,0,0,0.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0,0.107,0.017,0.017,3.922,489,3271,1 0,0,1.35,0,0.67,0,0,0,0,0.67,0,0,0,0,0,0,0,0.67,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.67,0,0,0,0,0,0,0.802,0.114,0,2.527,20,139,1 0.2,0.81,0.61,0,0,0,0,0,0.2,0,0,0.4,0,0,0,0.2,0,0,0.2,0,0.2,0,0,0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.831,0.338,0.03,1102.5,2204,2205,1 0,0,1.22,0,1.22,0,0,0,0,0,0,0,0,0,0,0.61,0,0.61,1.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.61,0,0,0,0,0,0,0.517,0.103,0,2.966,28,178,1 0,0,0,0,1.48,0.74,1.48,0,0,0.74,0.74,0.74,0.74,0,0,0.74,0.74,0,2.22,0,3.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.108,0,0,2.346,12,61,1 0,0,0,0,0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.076,0.153,0,0,3.317,11,136,1 0.35,0.46,0.31,0,0.15,0.03,0,0.35,0.58,0.66,0.31,0.7,0.62,1.28,0.03,0.23,0.42,0,3.12,0,1.36,0,0.46,0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0,0,0,0.005,0.088,0,0.319,0.479,0.124,6.11,116,2218,1 0,0.35,0.7,0,0.7,0,0.35,0.35,0,0.35,0.7,0,0,0,0,0.7,0,0.35,4.25,0,1.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0.113,0,0.397,0,0,3.388,58,183,1 0,0,0,0,1.21,0,0,0,0,0,0,0,0,0,0,2.43,0,0,3.65,0,1.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,31,1 0,0,1.31,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0,0,1.31,0,5.26,0,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.199,0,4.818,25,53,1 0.6,0,0.36,0,1.44,0,0,0,0.24,1.32,0.72,2.52,0.6,0,0,0.6,0.24,0,4.44,0,1.8,0,0.72,0.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0,0,0,0.04,0,0.101,0.202,0,3.548,54,479,1 0,0,1.33,0,1.78,0.44,0,0.44,0,0,0,0,0,0,0,0,0,0,4.46,0.89,0.89,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0,0,0,0.078,0.078,0,0,0,0,1.541,5,37,1 0.08,0.08,0.76,0,0.85,1.02,0.25,0.17,0.59,0.08,0.17,0.59,0.17,0,2.22,0.25,0.08,0.94,1.62,0.17,0.42,0,0.85,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0.17,0.08,0.08,0.08,0,0,0,0.063,0,0.42,0.114,0.012,7.497,669,1402,1 0,0,0.46,0,0.46,0,0,0,0,0,0,0,0,0,0,0.46,0,0,2.8,0,0.93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.46,0,0,0,0,0.082,0,0.663,0,0,1.428,20,120,1 0,0,0.14,0,0.14,0,0.14,0,0.57,0.14,0.14,0,0,0,0,0.86,0,0.57,0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0.826,0.745,0,5.456,72,1315,1 0.18,0,0.18,0,1.59,0.36,0,0.06,0.06,0.06,0.06,0.55,0.3,0.06,0,0,0.73,0,4.4,0.24,1.1,0,0.85,0.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0,0,0,0,0.01,0.052,0,0.01,0.169,0,1.748,12,444,1 0.18,0,0.18,0,1.59,0.36,0,0.06,0.06,0.06,0.06,0.55,0.3,0.06,0,0,0.73,0,4.4,0.24,1.1,0,0.85,0.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0,0,0,0,0.01,0.052,0,0.01,0.169,0,1.775,12,451,1 0.76,0.19,0.38,0,0.19,0.12,0,0.25,0.76,0.31,0.25,1.52,0.31,0.38,0,0.38,0.44,0.06,2.98,0.69,1.26,0,0.44,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0,0.06,0,0,0,0,0,0,0.085,0.053,0.429,0.236,0.064,3.664,45,1059,1 0.08,0.08,0.35,0,1.52,0.17,0.08,0,0.35,0.17,0,1.43,0,0.08,0,0.53,0.08,0,3.58,0,0.89,0,0.08,0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.054,0.068,0.369,0.109,0.027,4.911,119,1277,1 0.08,0,0.93,0,1.52,0.33,0,0.08,0.67,0,0.25,0.67,0.16,0,1.69,0.08,0,1.1,1.86,0.16,0.42,0,1.1,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0.16,0,0.08,0.08,0,0,0.012,0.101,0,0.356,0.101,0.012,11.32,669,1834,1 0,0,0.48,0,0.48,0.48,0.48,0,0,0.96,0,0,0,0,0,0,0.96,0,3.36,0,0.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.163,0,0.163,0,0,1.696,17,95,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.578,0,0,0,0,3.8,15,19,1 0,0,0.59,0,0.59,0,0,0.59,0,0,0,1.19,0,0,2.38,0,0.59,0.59,2.97,0,2.97,0,1.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.59,0,0,0,0,0,0,0,0,0,0.105,0,0.105,0.42,0,3.428,12,72,1 0.6,0,0,0,1.21,0,0.6,0,0,0,0,0.6,0,0,0,0,0,0.6,3.65,0,1.21,0,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,1.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.101,0.304,0,3.217,30,74,1 0.76,0.19,0.38,0,0.19,0.12,0,0.25,0.76,0.31,0.25,1.52,0.31,0.38,0,0.38,0.44,0.06,2.98,0.69,1.26,0,0.44,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0,0.06,0,0,0,0,0,0,0.085,0.053,0.428,0.235,0.064,3.702,45,1070,1 0,0,0,0,0,0,0,0,0,0,0,0,0,2.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.03,0,0,0,0,0,0,0,0,1.03,0,0,0,0,0.163,0,0.163,0.326,0,3.545,21,78,1 0,0,0.33,0,0.33,0,0.33,0.33,0,0,0,0.33,0,0,0,1.65,0,1.65,2.64,0,0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.161,0.537,0,0,2.517,9,141,1 0,0,0.67,0,0,0,0.67,2.02,0,0,0,0,0,0,0,0,0.67,0,3.37,0,1.35,0,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.095,0,0.286,0,0,5.558,39,189,1 0.11,0.23,0.11,0,0.46,0.46,0,0.11,0.93,1.74,0.11,0.34,0.23,0.11,2.09,0,0.46,0,3.49,0,1.28,0,0.46,0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0,0,0,0.34,0,0,0,0.019,0.172,0,0.23,0.134,0,4.281,144,655,1 0,0,0,0,1.55,0,0,0,0,0.31,0,0.31,0,0,0,0.31,0.62,0,2.79,0,0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.93,0,0,0,0,0.048,0,0,0,0,2.09,22,115,1 0,0,0,0,0.96,0,0.96,0,0,0,0.96,0,0,0,0,0,0,0,2.88,0,2.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.123,0,0.616,0,0,1.181,3,13,1 1.05,0,0.7,0,1.05,0,0.7,0,0,0.35,0,0.7,0,0,0.35,0.7,0,0.35,5.96,0,1.4,0,0.35,0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.35,0,0,0,0,0,0,0.367,0.061,0,1.88,12,94,1 0,0,0.55,0,0.55,0,0,0,0,0,0,0,0,0,0,0.55,0,0,3.31,0,1.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.55,0,0,0,0,0.104,0,0.524,0,0,1.229,7,75,1 0.29,0,0.29,0,0.29,0,0,0.29,0,0,0.29,0,0,0,0,0,2.93,0.58,1.75,0,1.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.58,0,0,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.361,6,113,1 0,0,0,0,0,0,0,0,0.89,0,0,0,0,0,0,0,0,0,0.89,0,3.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.13,0,0.52,0,0,3.5,46,105,1 0.08,0.08,0.35,0,1.52,0.17,0.08,0,0.35,0.17,0,1.43,0,0.08,0,0.53,0.08,0,3.58,0,0.89,0,0.08,0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.054,0.068,0.369,0.109,0.027,4.896,119,1278,1 0,0,1.16,0,3.48,0,0,0.58,0.58,0,0,0.58,0,0,0,1.74,0,0,1.16,0,3.48,0,0,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.082,0,0.165,0.082,0,2.17,12,102,1 0.1,0,0.03,0,0.1,0.03,0,0,0,0.1,0.1,0.43,0,0,0,0.37,0.1,0,0.43,0,0.4,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0,0.13,0.06,0,0,0,0,0.06,0,0.03,0,0,0,0.2,0.014,0.078,0,0.034,0.019,0.019,4.93,113,3550,1 0,0,0,0.42,0.84,0,0,0.42,0,0,0,0,0,0,0,0,0,0.42,0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.488,0,2.636,0.683,0,3.168,36,301,1 0.25,0,0.51,0,0.25,0.51,0.25,0,0,0,0,0.76,0,0,0,0.25,0,0.76,2.29,0,0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.044,0,0.132,0.354,0,0,2.593,14,153,1 0.08,0.08,0.76,0,0.85,1.02,0.25,0.17,0.59,0.08,0.17,0.59,0.17,0,2.21,0.25,0.08,0.93,1.61,0.17,0.42,0,0.85,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0.17,0.08,0.08,0.08,0,0,0,0.063,0,0.394,0.114,0.012,7.484,669,1407,1 0,0.24,0.72,0,0.24,0,0.12,0,0,0.36,0.12,0,0.12,0,0.12,0.12,0,0.6,0.36,0.12,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0,0,0,0,0,0,0,0,0.018,0,0.34,0,0.018,5.634,158,1234,1 0,0,0.43,0,0.87,0,0,0,0,0,0,0,0,0,0,0.43,0.87,0,2.62,0,1.31,0.43,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0.072,0,0.577,0.216,0,6.274,90,320,1 0.14,0.14,0.29,0,0,0,0,0,1.02,0.29,0.14,0.58,0,0,0,0,0,0.14,2.35,0.14,1.02,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0.29,0,0,0,0,0,0,0.14,0,0,0,0,0.204,0,0.153,0.153,0.102,2.705,73,368,1 0,0,0.14,0,0.28,0,0.14,0,0,0,0,0,0,0,0,0,0,0,2.89,2.31,2.02,7.97,0.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.28,0,0,0,0,0.053,0,0.269,0.08,0.484,15.086,74,1222,1 0.62,0,0.62,0,1.25,0,0.62,0,0,0,0,0.62,0,0,0,0.62,0.62,0,1.25,0,2.51,0,0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.456,0.273,0,2.52,22,121,1 0.16,0,0.67,0,0.33,0.16,0.33,0.84,0.16,0.5,0.33,1.51,0,0,0,0,1.68,0.33,2.02,1.68,3.87,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0,0,0,0,0,0,0.218,0,1.118,0.054,0,4.928,63,621,1 0,0,0.14,0,0.28,0,0.14,0,0,0,0.14,0.14,0,0,0,0,0,0,2.86,2.14,2,3.86,0.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.14,0,0,0,0,0.048,0,0.241,0.072,0.435,6.238,37,1229,1 0,0,0.15,0,0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,2.56,2.26,2.11,4.07,0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.15,0,0,0,0,0.051,0,0.255,0.076,0.46,6.3,37,1216,1 0.51,0.43,0.29,0,0.14,0.03,0,0.18,0.54,0.62,0.29,0.65,0.65,1.2,0.03,0.21,0.43,0.03,3.03,0,1.35,0,0.51,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0,0,0,0.012,0.078,0,0.443,0.51,0.133,6.59,739,2333,1 0,0,0,0,0,0.68,0,1.36,0.68,0.68,0,0,0,0,0,0.68,2.73,0.68,1.36,3.42,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.68,0,0,0,0,0,0,0,0,0,0,0,1.143,0.519,0,3.737,75,228,1 0.33,0,0.66,0,0.22,0,0,0,0.44,0.11,0,0.33,0,0,0,0.55,0,0,1.76,0,1.1,0,0,0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0,0,0,0.11,0,0,0,0,0,0,0.173,0,0.367,0.193,0.077,2.559,75,389,1 0,0,0.49,0,1.48,0,0.49,0,0,0,0,0.99,0,0,0,0.49,0.99,0,2.47,0,2.97,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.49,0,0,0,0,0.082,0,0.497,0.165,0,5.113,78,225,1 0,0,0.94,0,0.94,0,0,0,0,0,0,0,0,0,0,0.94,0,0,4.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.17,0,3.571,0,0,1.3,7,52,1 0.49,0.28,0.4,0,0.09,0.11,0.02,0.21,0.42,0.75,0.23,0.89,0.54,1.06,0,0.16,0.33,0.02,3.23,0,1.46,0,1.03,0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0,0,0,0,0.058,0,0.382,0.847,0.141,5.783,193,3210,1 0.33,0,0.66,0,0.22,0,0,0,0.44,0.11,0,0.33,0,0,0,0.55,0,0,1.76,0,1.1,0,0,0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0,0,0,0.11,0,0,0,0,0,0,0.173,0,0.367,0.193,0.077,2.559,75,389,1 0,0,1.56,0,0,0,1.56,0,0,0,0,0,0,0,0,1.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.56,0,0,0,0,0.233,0,0.7,0,0,2.125,12,34,1 0,1.11,1.11,0,1.11,0,2.22,0,0,0,0,0,0,0,0,3.33,0,0,3.33,0,2.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.146,0,0,2.058,5,35,1 0,0,3.03,0,0.43,0,0.86,0,0,0,0.43,0.43,0,0,0,2.16,0,1.29,3.46,0,1.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0.801,0,0,4.77,41,353,1 0,0,0.91,0,1.82,0.45,0,0,0,0,0,0.45,0,0,0,1.36,0,0,2.28,0,4.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.073,0,0,0,0,2.032,12,126,1 0,0,0.76,0,0.76,0,0.5,0.5,0,1.01,0,0.25,1.52,0,0.76,0,0,1.52,2.03,0,1.52,0,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.5,0,0,0,0,0,0,0,0,0,0.074,0,0.412,0.412,0,2.441,19,249,1 0,0,1.44,0,0,0,0,0,0,0,0,2.89,0,0,0,1.44,0,0,5.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.277,0,0,1.312,3,21,1 0,0,0.76,0,0.76,0,0.5,0.5,0,1.01,0,0.25,1.52,0,0.76,0,0,1.52,2.03,0,1.52,0,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.5,0,0,0,0,0,0,0,0,0,0.074,0,0.412,0.412,0,2.441,19,249,1 0,0.71,0.71,0,0.35,0.35,0,0,0,0,0,0.71,0,0,0,0.35,0,1.43,1.79,0,1.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.057,0,1.257,0,0,3.895,61,261,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.88,0,5.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.5,4,9,1 0.6,0,0.36,0,1.44,0,0,0,0.24,1.32,0.72,2.52,0.6,0,0,0.6,0.24,0,4.44,0,1.8,0,0.72,0.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0,0,0,0.04,0,0.101,0.222,0,3.577,54,483,1 0,0,0.88,0,0.88,0,0,0,0,0,0,0.88,0,0,0,0,0,0,0.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.935,0,0,3.417,51,229,1 0,0,0.36,0,0.6,0.12,0.12,0,0,0,0.12,0.48,0.12,0.12,0,0.12,0,0.6,2.41,0,0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.056,0.037,0,0.056,0.094,0,1.246,14,389,1 0,0,0,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,0,1.31,0,2.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.251,0,1.007,0,0,1.44,8,36,1 0,0,0,0,0,1.2,0,0,0,0,0,0,0,0,0,0,0,0,1.2,0,3.61,0,0,3.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.421,0.21,0,3.454,17,38,1 0,0,0,0,0,0,0,1.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.07,0,0,0,0,0.19,0,0.19,0.38,0,3.6,16,72,1 0,0,1.92,0,0,0,0,0,0,0,0,0,0,0,0,0,3.84,0,1.92,0,1.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.336,0,0,1.21,3,23,1 0.59,0.09,0.09,0,0.29,0.09,0,0.59,0.59,2.09,0.29,0.09,0.29,0,0.39,0.09,0.79,0.39,3.19,0.09,1.69,0,1.39,0.99,0,0,0,0,0,0,0,0,0,0,0,0.09,0,0,0.09,0,0,0,0,0,0.19,0,0,0,0.044,0.078,0,0.334,0.133,0.011,15.493,1171,2541,1 0,0,0,0,0,0,0,0,0,0,0,0,5.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.125,0,2.411,10,41,1 0.27,0,0.41,0,0,0,0.13,0.13,0,0,0,0.41,0,0,0,0,0,0.41,0.69,0,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0.133,0,0,0,0,1.531,20,144,1 0,0.62,0.62,0,0.31,0,0,0,0,0,0,0.62,0,0,0,0.31,0,1.25,2.51,0,1.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0,0.051,0,0.777,0,0,3.39,61,278,1 0,0,0,0,0.26,0,0.26,0,0,0,0,0,0,0,0,0.26,0,0,0,0,0.52,17.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.26,0,0,0,0,0.451,0.082,0.082,0.369,0,1.026,13.82,104,1078,1 0.33,0,0.67,0,0.22,0,0,0,0.44,0.11,0,0.33,0,0,0,0.56,0,0,1.79,0,1.12,0,0,0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0,0,0,0.11,0,0,0,0,0,0,0.157,0,0.373,0.196,0.078,2.576,75,389,1 0.12,0.12,0.24,0,1.34,0.12,0,0.12,0,0,0.36,0.85,0,0,0,0.24,0.24,0,2.33,0,0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.98,0,0,0,0.12,0,0,0,0,0.12,0,0,0,0.061,0.02,0,0.041,0.041,0,2.351,69,254,1 0.12,0.12,0.24,0,1.34,0.12,0,0.12,0,0,0.36,0.85,0,0,0,0.24,0.24,0,2.33,0,0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.98,0,0,0,0.12,0,0,0,0,0.12,0,0,0,0.061,0.02,0,0.041,0.041,0,2.351,69,254,1 0.31,0.31,0.31,0,0,0,0.31,0,0.31,0.31,0.31,0.31,0,0,0,0.94,0,0,0.31,0,2.51,0,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0.31,0,0,1.88,0,0,0,0,0,0,0,0,0,0,0.75,0,0.89,0.046,0.046,12.382,138,421,1 0,0,0.51,0,0.51,0,0,0,0,0,0,1.03,0,0,0,0,0,0,1.54,0,1.03,0,0,0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0.177,0,3.125,12,100,1 0,0.48,0.48,0,0.48,0,0,0.48,0,0,0,0.96,0,0,1.92,0,0.48,0.96,2.88,0,2.88,0,1.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0,0,0,0,0,0.084,0,0.084,0.336,0,3.2,12,80,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.45,0,0,3.22,0,6.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,2,1 0,0,0.89,0,1.79,0.44,0,0,0,0,0,0.44,0,0,0,1.34,0,0,2.24,0,4.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.073,0,0,0,0,2.25,12,144,1 0,0,0.71,0,0.17,0,0.35,0.35,0,0.17,0.17,0.35,0,0,0,0.35,0,0.17,0.53,0,0.17,0,0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0.35,0,0,0,0,0,0,0,0,0,0,0,0,0.057,0,0.057,0.171,0,1.974,34,229,1 0,1.72,0,0,0,0,0,0,0,1.72,0,0,0,0,0,1.72,0,0.86,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.125,0,0.251,0.251,0,2.022,12,91,1 0,0,0,0,0,0,0,2.53,0,0,0,0,0,0,0,0,1.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.26,0,0,0,0,0.186,0,0.186,0.186,0,4,23,84,1 0,0,0,0,0.42,0.42,0.42,0,0,0,0,0.42,0,0,0,0,0,0,0.84,0,0.42,8.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.062,0,0.187,0,1.002,7.951,74,493,1 0,0,0,0,0.45,0.45,0.45,0,0,0,0,0.45,0,0,0,0,0,0,0.9,0,0.45,9.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.203,0,1.084,8.517,72,477,1 0,0,0,0,0,0,1,0,0,1,0,1,0,0,0,0,0,0,3.01,0,0,1.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.176,0,0,4.476,20,94,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.385,0,0,20,169,180,1 0,0,1.25,0,2.5,0,0,0,0,0,0,0,0,0,0,1.25,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.2,4,36,1 0,0,0,0,0,0,0,1.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.08,0,0,0,0,0.194,0,0,0.389,0,3.6,16,72,1 0,0,0,0,0,0,0,0,0,0,0,0,1.38,0,0,0,0,0,0,0,1.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.486,0,0,2.681,11,59,1 0.21,0.1,0.52,0,1.26,0.1,0,0,0.42,0.52,0.21,0.52,0.42,0,0,0,0.52,0,4.53,0,2,0,0.31,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.028,0,0.141,3.305,0,11.288,193,1016,1 0,0.23,0,0,0.23,0.47,0,0.47,0,0.95,2.61,1.66,0,2.61,0,0,0,0,3.8,0,0.95,0,0.23,0,0.71,0,0,0,0,0,0,0,0,0,0,0,0.95,0,0,0,0,0.23,0,0,0.47,0,0,0,0,0.121,0.04,0,0.04,0,3.78,55,189,1 0.09,0.18,0.36,0,0.09,0,0.09,0,0.55,0.27,0.09,0.83,0.36,0,0,0,0,0.09,3.69,0.55,1.56,0,0,0.36,0,0,0,0,0,0,0,0,0,0,0,0.09,0,0,0,0,0,0,0,0,0.09,0,0,0,0,0.056,0,0.341,0.085,0,7.273,103,1171,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.546,0,0,2.3,9,23,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.631,0,0,1.666,5,15,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.844,0,0,1.666,5,15,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.081,0,0,2.3,9,23,1 0,0,0.64,0,0.64,0,0,0,0,1.29,0,0,0,0,0,2.59,0,0,3.24,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0,0,0,0,0.574,0,0,5.833,30,105,1 0,0,0,0,0,0,0,0,0.93,0,0,0,0,0,0,0,0,0,0.93,0,3.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.134,0,0.536,0,0,2.166,23,65,1 0,0,0.32,0,0.64,0.64,0.64,0.32,0,0,0,0.32,0.32,0,0,0.32,0.32,0.32,2.27,0,3.24,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0.156,0,0,0.156,0,1.688,19,157,1 0,0,0,0,0,0,0,1.08,0,0,0,0,0,1.08,0,0,0,0,0,0,0,0,1.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.08,0,0,0,0,0.191,0,0.191,0.383,0,3.95,23,79,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.448,0,0,2.666,11,24,1 0,0,0,0,0,0,0,1.08,0,0,0,0,0,1.08,0,0,0,0,0,0,0,0,1.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.08,0,0,0,0,0.191,0,0.191,0.383,0,3.95,23,79,1 0,0,0,0,0,0.05,0,0.34,0,0,0.11,0.81,0.05,0.11,0,0,0.75,0,0,0,0,0,0.05,0,1.16,0,0,0,0,0,0,0,0.05,0,0,0.23,0.05,0,0,0,0,0,0,0,0,0,0,0,0.283,0.107,0,0,0.053,0,1.864,32,910,1 0,0,0,0,0.88,0,0,0,0,0,0.44,0.44,0,0,0,0,0,0.44,1.32,0,1.32,0,0,0,0.44,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.072,0.072,0,0.291,0,0,1.348,3,58,1 0,0,0.41,0,0.82,0.61,0.2,0,0.2,0.61,0.41,1.23,0.2,0,0,0.61,0,0,2.89,3.09,1.23,0,0,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.098,0.065,0,0.816,0.065,0,3.716,45,301,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.347,0,0,1,1,2,1 0.1,0,0.43,0,0.1,0.1,0.1,0.53,0.1,0,0,0.64,0,0.32,0,0,0.1,0,0,0,0,0,0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.049,0,0.016,0.065,0,1.901,29,329,1 0.65,0.49,0.32,0,0.32,0.16,0,0.49,0.65,0.49,0.16,1.3,0,0,0.16,1.14,1.3,0.16,3.6,0.49,1.8,0,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0.225,0,0.902,0.225,2.233,5.833,47,595,1 0.09,0,0.09,0,0.39,0.09,0.09,0,0.19,0.29,0.39,0.48,0,0.58,0,0.87,0.19,0,1.66,4.1,1.66,0,0.39,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.136,0,0.318,0.151,0,6.813,494,1458,1 0,0,0,0,0,0,0,3.33,3.33,0,0,0,0,0,0,0,3.33,0,0,0,6.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.396,0,0.396,3.714,11,26,1 0.1,0,0.1,0,0.4,0.1,0.1,0,0.2,0.2,0.4,0.5,0,0.6,0,0.91,0.2,0,1.72,4.26,1.72,0,0.4,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.093,0,0.297,0.156,0,6.8,494,1428,1 0,0,0.37,0,1.11,0.74,0,2.96,0,2.96,0,0,0.74,0,0,0,2.22,0,5.18,0,2.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.149,0,1.096,0,0,5.16,107,289,1 0.08,0.08,0.76,0,0.85,1.02,0.25,0.17,0.59,0.08,0.17,0.59,0.17,0,2.21,0.25,0.08,0.93,1.61,0.17,0.42,0,0.85,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0.17,0.08,0.08,0.08,0,0,0,0.063,0,0.394,0.114,0.012,7.484,669,1407,1 0.25,0,0.51,0,0.25,1.28,0,0,0.77,0.51,0,0.25,0,0,0,0,0,0.51,1.79,0,0.77,0,2.05,0,0.51,0.51,0,0,0,0,0,0,0,0,0,0,0.25,0,0,0,0,0,0,0,0,0,0,0,0,0.086,0,0.26,0.173,0,3.298,16,287,1 0,0,0,0,1.05,2.1,1.05,0,0,0,0,0,0,0,0,0,0,0,3.15,0,1.05,0,2.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.171,0.171,0,2.05,6,41,1 0,0.1,0.3,0,1.02,0.3,0.1,0.4,0,0.2,0.1,0.92,0,0.1,0,1.94,0.92,0.4,1.94,0.4,0.61,0.92,0.51,0.1,0,0,0,0,0,0,0,0,0,0,0,0.3,0.1,0,0,0.1,0,0,0,0,0,0,0,0,0,0.048,0.016,0.518,0.162,0.34,8.181,283,1890,1 0.1,0,0.1,0,0.4,0.1,0.1,0,0.2,0.2,0.4,0.5,0,0.6,0,0.91,0.2,0,1.72,4.26,1.72,0,0.4,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.093,0,0.297,0.156,0,6.8,494,1428,1 0,0,0,0,0.44,0.44,0.44,0,0,0,0,0.44,0,0,0,0,0,0,0.88,0,0.44,9.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.19,0,1.017,8.033,72,474,1 0,0.1,0.62,0,0.31,0,0.1,0,0.2,0.62,0.1,0.62,0.41,0,0,0.1,0.1,0.2,3.43,0.1,1.66,0,0.1,1.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0,0,0,0,0.611,0.264,0.049,3.794,69,702,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.17,0,0,0,0,0.422,0,0.422,0.634,0,4.066,17,61,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.363,11,26,1 0,0.62,0.62,0,0,0.62,0,2.82,0,0.31,0.31,2.5,0,0,0,2.5,0,0,5.32,0.31,1.56,0,0,0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.055,0,0.111,0.111,0.055,1.672,6,92,1 0.23,0.29,0.64,0,0.17,0.17,0.11,0.05,0.05,0.47,0.11,1.17,0.47,0.05,0.17,0.05,0.11,0.29,3.93,0,2.05,0,0.47,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.133,0,0.492,0.338,0.092,6.033,87,1460,1 0.51,0.43,0.29,0,0.14,0.03,0,0.18,0.54,0.62,0.29,0.65,0.65,1.2,0.03,0.21,0.43,0.03,2.99,0,1.35,0,0.51,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0,0,0,0.012,0.078,0,0.478,0.509,0.127,6.518,611,2340,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,16,33,1 0,0,0,0,0,0,0,1.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.12,0,0,0,0,0.204,0,0.408,0.408,0,4.1,25,82,1 0,0,0.48,0,0.48,0,0,0.48,0,0,0,0.96,0,0,1.93,0,0.48,0.48,2.41,0,2.41,0,3.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0.48,0,0,0,0,0,0,0,0,0,0.084,0,0.084,0.761,0,5.322,46,165,1 0.08,0.08,0.76,0,0.85,1.02,0.25,0.17,0.59,0.08,0.17,0.59,0.17,0,2.21,0.25,0.08,0.93,1.61,0.17,0.42,0,0.85,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0.17,0.08,0.08,0.08,0,0,0,0.063,0,0.394,0.114,0.012,7.54,669,1410,1 0,0,0.19,0,0.19,0,0,0.19,0.19,0.19,0,0.19,0.19,0,0,0.76,0,0,0.95,0,1.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0.19,0,0,0,0.38,0,0.19,0,0,0,0,0.058,0,0.264,0,0,4.053,93,381,1 0,0.35,0.35,0,1.07,0,0,0.35,0,1.07,0,0.71,0,0,0,0,0.71,0.71,2.85,0,2.5,0,1.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.35,0,0,0,0,0.35,0,0,0,0,0.233,0,0.233,0.233,0,3.414,25,140,1 0,0,0,0,0,0,0.31,0,0,0,0,1.26,0,0,0,0,0,0.31,1.9,0,0.31,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0.043,0.086,0,0.13,0.173,0.26,3.244,60,279,1 0.4,0.4,0.26,0,0.13,0.2,0.06,0.33,0,1.14,0.33,1.07,1,0,0.26,0.4,0.06,0,4.1,0,0.94,0,0.53,0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.088,0,1.06,0.151,0.05,4.623,123,1045,1 0,0.39,1.18,0,0.39,0,0,0,0,0.78,0.78,0.78,0,0,0.39,3.54,0,0,1.18,0,1.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.39,0,0,0,0,0.064,0,0.843,0.129,0.064,5.87,42,364,1 0,0,0,0,0,0,0,4.62,0,0,0,0,0.92,0,0,0,0.92,0,0,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.919,0.367,0,2.84,16,71,1 0.32,0.28,0.57,0,0.12,0.2,0.16,0.2,0,0.32,0.08,0.98,0.41,0.04,0.04,0,0,0.41,3.74,0,1.64,0,0.45,0.53,0.04,0.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0,0,0.171,0,0.507,0.493,0.028,5.608,133,1991,1 0,0.43,0.87,0,0,0,0,0,0.43,0.43,0.43,0,0,0,0,0,0,0,6.14,0,0.43,0,0,0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.075,0.075,0,0,0.151,0,5.086,33,117,1 0,0,0.53,0,0.53,0,0,0.53,0,0,0,1.06,0,0,2.12,0,0.53,0.53,2.65,0,2.65,0,1.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.53,0.53,0,0,0,0,0,0,0,0,0,0.186,0,0.093,0.466,0,5.038,60,131,1 0.35,0.08,0.35,0,0.35,0,0,0.52,0.61,1.76,0.17,0.26,0.79,0,0.26,0,0.7,0.35,2.64,0,2.03,0,0.61,0.7,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0.08,0.17,0,0,0,0,0.17,0,0,0,0,0.081,0,0.556,0.069,0.011,19.234,1170,3116,1 0.51,0.17,0.51,0,1.7,0.34,0,0,0.85,0.17,0,0.68,0.17,0.34,0,0.17,0.17,0,2.9,0,2.05,0,0.68,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.387,1.961,0.025,11,183,660,1 0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.53,0,1.6,0,1.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.062,0,0.311,0,0,1.954,11,43,1 0.26,0.72,0.85,0,0,0.19,0.06,0.33,0.72,0.46,0.72,0.79,0.19,1.05,0.06,0.59,0.19,0.33,3.5,0.06,1.52,0,0.06,0.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0,0,0,0.06,0,0,0,0,0.131,0,0.101,0.101,0.202,4.398,79,1280,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.17,0,0,0,0,0.417,0,0.208,0.626,0,4.066,17,61,1 0,0,0.52,0,0,1.05,0.52,0,0,0,0,0,0,0,0,1.05,0,0,2.63,0,0,0,1.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.194,0,0.291,0,0,3.333,43,120,1 0.31,0,0,0,0,0,0,0,0,0,0.31,0.31,0.31,0,0,0,0.31,0,2.79,0,1.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0.114,0,0,0.057,0,0,2.972,18,110,1 0,0,0,0,0,1.29,0,0.64,0,0,0,0,0,0,0,0,0,0,3.87,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.116,0.111,0,1.8,12,63,1 0,0,0,0,0,1.28,0,0.64,0,0,0,0,0,0,0,0,0,0,3.84,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.103,0.11,0,1.777,12,64,1 0,0,0.15,0,0.62,0,0.31,0,1.09,0,0,0,0.46,0,0,0.15,0.15,1.4,2.19,0,1.09,0,0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.252,0,0.378,4.017,0,3.278,23,259,1 0,0.11,0.35,0,1.18,0.47,0.23,0.35,0,0.11,0.11,0.95,0,0.11,0,2.13,0.95,0.23,1.9,0.35,0.35,0,0.59,0.11,0,0,0,0,0,0,0,0,0,0,0,0.35,0,0,0,0.11,0,0,0,0,0,0,0,0,0,0.057,0,0.42,0.191,0.21,8.026,283,1509,1 0.09,0,0.27,0,0.36,0.09,0,0.18,0.09,0,0,0.73,0,0.36,0,0,0,0,2.01,0,3.38,0,0.36,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0.015,0.046,0.031,0.249,0.031,0.031,3.689,69,535,1 0,0,0.47,0,0.47,0,0,0.47,0,0,0,0.94,0,0,1.88,0,0.47,0.47,2.83,0,2.35,0,1.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.47,0.47,0,0,0,0,0,0,0,0,0,0.164,0,0.082,0.41,0,5.074,60,137,1 0.14,0.14,0.29,0,0,0,0,0,1.03,0.29,0.14,0.59,0,0,0,0,0,0.14,2.36,0.14,0.88,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0.205,0,0.153,0.128,0.102,2.686,73,368,1 0,0,0,0,0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0.81,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.125,0,0,3.871,26,151,1 0.14,0.14,0.29,0,0,0,0,0,1.03,0.29,0.14,0.59,0,0,0,0,0,0.14,2.36,0.14,0.88,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0.205,0,0.153,0.128,0.102,2.686,73,368,1 0.5,0.4,0.33,0,0.13,0.03,0.13,0.1,0.54,0.77,0.3,0.7,0.54,1.14,0.03,0.27,0.43,0.03,3.2,0,1.45,0,0.37,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0,0,0,0.071,0,0.456,0.5,0.11,6.049,129,2220,1 0,0,0.2,0,0.4,0.2,0,0,0,0,0,0,0,0,0,0,0.61,0.4,2.45,0.2,0.61,4.49,0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0,0,0,0,0,0.382,0.223,0.478,7.538,55,490,1 0.57,0,0.57,0,0,0,0,0.57,0,0,0,1.14,0,0,0,0,0,0,5.14,0,1.14,0,2.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.211,0.74,0,2.9,32,116,1 0.59,0,0.59,0,0,0,0,0.59,0,0,0,1.18,0,0,0,0,0,0,5.32,0,1.18,0,2.95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.109,0.763,0,3,32,114,1 0,0,0.2,0,0.81,1.01,0,0,0,0,0.2,1.21,0,0,0,0,0,0.2,1.21,0,0,0,0.6,0.4,0,0,0,0,0,0,0,0,0,0,0,1.62,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0.152,0,0.121,0.121,0,2.61,10,261,1 0.19,0.19,0,0,1.55,0.19,0.77,0,0.19,0.19,0,0.77,0.58,0,0,0.19,0.58,2.33,0.77,0,0.38,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0,0,0,0,0,0,0.03,0.061,0.03,0.185,0.216,0,1.948,11,113,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.17,0,0,0,0,0.421,0,0.21,0.632,0,3.75,15,60,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.17,0,0,0,0,0.407,0,0.203,0.61,0,4.133,17,62,1 0,0,0.53,0,0.21,0.1,0.1,0.53,0.1,0.21,0,0.64,0,0,0,0,0.1,0,0,0,0,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.097,0,0.016,0.065,0,2.104,29,381,1 0.9,0,0,0,0,0,0.9,0,0,0,0,0,0,0,0,0,0.9,0.9,1.81,0,2.72,0,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.537,0,0,2.782,19,64,1 0,0,0,0,0,0,1.02,0,0,0,0,2.04,0,0,0,2.04,0,2.04,3.06,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.17,0,0,0.17,0,1.47,4,25,1 0,0.56,0.28,0,0,0,0.56,0,0,0.56,0.28,0.56,0.28,0,0,1.41,0.28,0,1.97,0,1.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.28,0,0,0,0,0.114,0,0.153,0,0.153,9.25,394,555,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.92,0.46,0.92,1.85,0.46,1.85,0.46,0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.142,0,0.142,0.5,0.285,1.636,10,126,1 0.28,0,0.28,0,1.43,0.28,0,0.14,0,0,0,1.14,0,0,0,0.14,0.42,0,3.86,0,1.28,0,0.14,0.42,0,0,0,0,0,0,0,0,0.14,0,0,0,0,0,0,0,0,0,0,0.14,0,0,0,0,0,0.047,0,0.094,0.118,0.023,1.42,27,250,1 0,0,0,0,0.87,0,0,1.16,0,0,0.29,1.74,0,0,0,0,0.87,0,4.95,0,2.91,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.151,0,0,0,0,0,1.095,4,46,1 0,0.55,0.55,0,2.23,0.55,0,0.55,0,0,0.55,0,0,0,0,0,0,1.11,1.67,0,2.23,0,0,0.55,0,0,0,0,0,0,0.55,0,0,0,0,0,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0.265,0,0.088,0.353,0,2.571,11,108,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.5,0,0,0,0,12.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,2,1 0,0,0,0,0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0.81,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.121,0,0,3.871,26,151,1 0,1.47,0,0,0,0,0,0,0,1.47,0,0,0,0,0,0,0,1.47,4.41,0,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.333,12,21,1 0.26,1.07,0,0,1.61,0,1.07,0.26,0.26,0,0,0.8,0,0,0,0,0,1.61,3.5,0,1.34,0,0,0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,0,0,0.092,0,0.324,0,0,7.369,52,339,1 0.33,0.67,0,0,0,0,0.67,0.33,0.33,0.33,0,0.67,0,0,0,0,0.67,1.01,2.02,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.048,0,1.204,6,59,1 0,0,0.4,0,0.4,0,0.4,0,0,0,0,0,0.4,0,0,0,0,0,2.04,0,0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.4,0,0,0,0,0,0,0.11,0,0,1.594,11,118,1 0.41,0,0.61,0,0.41,0.61,0,0.82,0.2,0.2,0.2,1.44,0,0,0,0.41,1.03,0.2,1.65,1.65,3.09,0,0.2,0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.099,0,1.689,0.132,0,4.913,102,565,1 0,0.9,1.81,0,0,0,0.9,3.63,0,1.81,0,0.9,0,0,0,0,0.9,0,2.72,0,3.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.774,0,0,1,1,18,1 0,2.66,0,0,2,0,0,0.66,0,0,0,2,1.33,0,0.66,0,0,6.66,3.33,0,1.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.111,0,0,2.133,7,32,1 0.5,0.43,0.28,0,0.14,0.03,0,0.18,0.54,0.61,0.28,0.65,0.65,1.19,0.03,0.21,0.43,0.03,2.96,0,1.34,0,0.5,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0,0,0,0.012,0.078,0,0.439,0.505,0.132,6.683,798,2426,1 0.56,0,0.84,0,0.28,0.84,0,0.84,0.28,0.28,0.28,1.41,0,0,0,0,1.41,0,0.84,1.98,2.83,0,0.28,0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.128,0,1.289,0.042,0,3.979,47,386,1 0.33,0.16,0.33,0,0,0.16,0,0.16,0.16,0.08,0.16,0.57,0.24,0,0,0.16,0.24,0.24,3.47,0,2.06,0,0,0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.66,0,0,0,0,0,0,0.213,0.113,0,3.15,76,441,1 0,0.34,1.02,0,0.68,0.34,0.34,0,0,0,0,0.34,0,0,0,2.04,0,0.34,4.76,0,2.38,0,0,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.161,0,0.215,0,3.879,6.978,56,328,1 0.64,0,0.25,0,0,0.38,0,0,0,0.25,0.64,0.25,1.03,0,0,0.77,0.9,0.12,1.93,0,0.51,0.12,0.12,1.03,0,0,0,0,0,0,0,0,0,0.12,0,0,0,0,0,0.12,0,0,0,0,0.12,0,0,0,0,0.161,0,1.082,0.299,0.092,5.274,146,981,1 0,0,0.78,0,1.17,0,0,0,0,0,0,0.39,0,0,0,0.78,0,0,1.56,0,1.96,0,0,0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.092,0,4.111,20,222,1 0,0,0.49,0,1.48,0,0,0,0.49,0,0,0,0,0.16,0,0.66,0.33,0,0.82,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0,0,0,0,0,0.16,0,0,0,0,0.197,0,0.616,0,0,5.778,128,549,1 0,0,0.68,0,0,0,0,1.36,0,0,0.68,0.68,0,0,0,0,0,0,3.4,0,1.36,0,0.68,0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.232,0.232,0,2.232,19,96,1 0,0,0.32,0,0.64,0.64,0.64,0.32,0.32,0,0,0.32,0.32,0,0,0.32,0.32,0.32,2.25,0,3.21,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0.156,0,0,0.156,0,1.752,19,149,1 0,0.45,0,0,0.91,0,1.36,0,0,0,0,0.45,0,0,0,1.82,0.45,0,2.73,0,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.368,0,0,1.68,17,158,1 0,1.25,0.62,0,0,0,1.25,0,0,0,0.62,0.62,0,0,0.62,2.5,0,1.25,5,0,1.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.045,0,0.225,0,0,2.35,29,134,1 0,1.25,0.62,0,0,0,1.25,0,0,0,0.62,0.62,0,0,0.62,2.5,0,1.25,5,0,1.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.044,0,0.223,0,0,2.35,29,134,1 2.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.666,13,44,1 0,0,0,0,0,0,2.1,0,0,0,0,1.05,0,0,0,0,0,0,4.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.695,15,62,1 0,0,0.99,0,0.24,0,0,0,0.24,0.49,0,0.49,0,0,0.24,0.24,0,0,0.24,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.035,0,0,0,0,5.555,209,400,1 0.52,0.34,0.4,0,0.14,0.17,0.05,0.14,0.46,0.52,0.31,0.89,0.4,1.16,0.05,0.11,0.23,0.11,2.9,0,1.1,0,0.63,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0.073,0,0.363,0.535,0.132,6.171,159,2771,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.53,6.32,0,1.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.649,0,0.432,5.875,46,94,1 0,0,0,0,0,0,0,3.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.04,0,0,0,0,0.189,0,0.189,0.189,0,3.857,25,81,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.66,5.33,1.33,1.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.699,0,0.466,9.2,46,92,1 0.5,0,0.75,0,0.25,0.25,0.25,0.5,0,0,0.5,2.26,0,0,0,0.5,1,0.25,4.03,0,2.01,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.034,0,0.375,0.034,0,4.2,60,231,1 1.03,0,0,0,0,0,0,0,0,0,0,1.03,0,0,0,1.03,0,0,3.62,0,1.03,0,0,0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.176,0.176,0,2.766,26,83,1 1.18,0.39,0.59,0,0,0.98,0.19,0.19,1.38,0.39,0,0.98,0,0.19,0,0.98,0,0,2.56,0.39,1.38,0,0,1.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.231,0,0.745,0.308,0.025,6.652,76,632,1 1.18,0.39,0.59,0,0,0.98,0.19,0.19,1.38,0.39,0,0.98,0,0.19,0,0.98,0,0,2.56,0.39,1.38,0,0,1.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.232,0,0.749,0.31,0.025,6.652,76,632,1 1.18,0.39,0.59,0,0,0.98,0.19,0.19,1.38,0.39,0,0.98,0,0.19,0,0.98,0,0,2.56,0.39,1.38,0,0,1.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.23,0,0.741,0.306,0.025,6.652,76,632,1 0,0,0,0,0,0,2.1,0,0,0,0,1.05,0,0,0,0,0,0,4.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.695,15,62,1 0,0,0,0,0,0,0,0,0,0,0,0,0,2.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.05,0,0,0,0,0,0,0,0,1.05,0,0,0,0,0.169,0,0,0.338,0,4.047,29,85,1 0.13,0.13,0.13,0,0.55,0.27,0.27,0.13,1.1,0.27,0,0.97,0.27,0,0.13,0,0,0,3.88,0.13,2.77,0,0.13,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.13,0,0,0,0,0,0,0,0,0,0.017,0,1.316,0.177,0,4.947,232,757,1 0,0,0.46,0,0,0,0,0.15,0,0,0,0.15,0,0,0,0,0,0.46,0.93,0,0,0,0.46,0,0,0,0,0,0,0,0,0,0,0,0,0.15,0,0,0,0.93,0,0,0,0,0,0,0,0,0.071,0.071,0,0.095,0.023,0,62.75,1505,2761,1 0.09,0,0.27,0,0.36,0.09,0,0.18,0.09,0,0,0.72,0,0.36,0,0,0,0,2,0,3.27,0,0.36,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0.015,0.045,0.03,0.242,0.03,0.03,3.816,69,542,1 0.13,0.13,0.13,0,0.55,0.27,0.27,0.13,1.11,0.27,0,0.97,0.27,0,0.13,0,0,0,3.91,0.13,2.65,0,0.13,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.13,0,0,0,0,0,0,0,0,0,0.018,0,1.294,0.182,0,4.745,232,726,1 1.18,0.39,0.59,0,0,0.98,0.19,0.19,1.38,0.39,0,0.98,0,0.19,0,0.98,0,0,2.56,0.39,1.38,0,0,1.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.232,0,0.749,0.31,0.025,6.652,76,632,1 0,0,0,0,0.28,0.86,0,0,0,0,0,0.57,0.28,0,0,0,0.28,0,0.28,0,0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.084,0.126,0,0,0,0,27.479,772,1319,1 0,0,0.36,0,0.36,0,0,0,0.36,0.36,0,0.36,0,1.09,0,1.81,0,0,3.63,0,1.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.125,0,0.125,0,0,1.287,5,94,1 0,0.23,0.47,0,1.18,0,0.23,0,0.7,0.7,0,0.47,0.23,0,0,0.23,0.7,0,2.83,0,1.89,0,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.072,0.108,0,2.438,20,178,1 2.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.277,0,0,3.2,13,48,1 0,0,0,0,0,0,0,0,0,1.51,0,1.51,0,0,0,0,0,0,7.57,0,1.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.769,15,36,1 0,0.54,0,0,0,0,1.08,0,0,0.54,0.54,0.54,0,0,0,2.17,0,0.54,3.26,0,1.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,0,0,0,0,0,0,0.334,0,0,1.325,5,53,1 0.45,0.68,0.68,0,1.92,0,0.56,0.45,0,0.45,0.22,1.81,0,0,0.79,0.22,0.11,1.81,2.38,0,1.36,0,0.11,0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0,0,0,0,0.11,0,0,0.019,0.057,0,0.574,0.134,0.019,3.155,94,385,1 0,0,0,0,0,0,0,0,0,0,0,1.36,0,0,0,0.68,0.68,0,4.76,4.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.255,0,0,2.818,21,124,1 0,0,0.55,0,0.22,0.22,0.11,0,0.11,0.22,0,0.33,0.33,0,0,0,0.22,0,0,0,0,0,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.155,0,0.034,0.12,0,1.961,14,302,1 0,0,0,0,0,0,0.76,0,0,0,0,0,0.76,0,0,0,0,0,0.76,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.76,0,0,0,0,0,0,0,0,0.274,0,0,11.035,110,309,1 0.68,0.11,0.11,0,0.45,0.11,0,0.57,0.79,2.73,0.34,0.11,0.22,0,0.45,0.11,0.68,0.45,3.07,0,1.71,0,1.82,1.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0.11,0,0,0,0,0.11,0,0,0,0.067,0.118,0,0.388,0.236,0.016,9.827,164,1592,1 0,0,0,0,0,1.88,0,0,0,0,0,0,0,0,0,0,0,0,3.77,0,1.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.307,16,30,1 0,0,0,0,1.28,0,1.28,0,0,0,0,0,0,0,0,0.64,0,1.28,1.28,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.085,0.17,0,2.466,18,111,1 0,0,0,0,0,1.12,0,0,0,0,0,0,0,0,0,0,0,0,2.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.392,0,3.333,0,0,2.551,12,74,1 0.9,0,0,0,0,0,0.9,0,0,0,0,0,0,0,0,0,0.9,0.9,1.81,0,2.72,0,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.546,0,0,2.818,19,62,1 0.49,0.32,0.46,0,0.05,0.16,0.05,0.24,0.46,0.79,0.27,1.01,0.6,1.23,0,0.21,0.38,0,3.3,0,1.5,0,1.09,0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0,0,0,0,0.064,0,0.322,0.626,0.165,6.896,193,3269,1 0.39,0,0,0,0,0.39,0.79,0,0,0,0,0.79,0,0,0,0,0.39,0,2.37,0,2.76,0,1.18,0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.061,0,0.612,0.183,0,2.678,13,75,1 0,0.58,0.58,0,0,0,0,0.58,0.58,7.55,0.58,1.16,0,0,0,0,0.58,0,4.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.192,0,0.192,0,0.096,1.526,10,58,1 0.17,0.17,0.69,0,0.34,0.17,0,0.86,0.17,0.69,0.34,1.38,0,0,0,0,1.73,0.34,2.07,1.55,3.8,0,0,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.17,0,0,0,0,0,0,0,0,0,0.194,0,1.718,0.055,0,5.175,63,621,1 0.51,0,0.77,0,0.25,0.25,0,0,0,0.51,0,1.55,0,0,0,0.77,1.55,0,4.9,0,2.58,0,0.77,0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.25,0,0,0,0,0,0,0,0,0,0,0.179,0,0.359,0.403,0.134,5.774,56,358,1 0,0,1.24,0,1.24,0.62,0,0,0,0,0,0,0,0,0,1.24,0.62,0,0.62,0,1.86,0.62,0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.62,0,0,0,0,0.107,0,0.321,0.107,0.107,3.846,30,150,1 0,0,0,0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.45,14.5,42,87,1 0,0.84,0.84,0,0,0,0.84,0,0,1.68,0.84,0,0,0,0,0.84,0,0,3.36,0,0.84,0,0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.519,0,0,5,43,125,1 0,0,0,0,0,0,0,0.71,0,0,0,0.71,0,0,0,1.43,0,0,4.31,0,1.43,0,0,0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.15,0,0,0,0,0.265,0,0.132,0,0,2.322,16,72,1 0.09,0,0.27,0,0.36,0.09,0,0.18,0.09,0,0,0.82,0,0.36,0,0,0,0,2.01,0,3.38,0,0.36,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0.015,0.046,0.03,0.246,0.03,0.03,3.771,69,528,1 0.11,0.22,0.11,0,0.45,0.45,0,0.11,1.02,1.59,0.11,0.34,0.22,0.11,2.16,0,0.45,0.11,3.53,0,1.25,0,0.45,0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0,0,0,0.45,0,0,0,0.018,0.17,0,0.265,0.132,0,4.215,144,666,1 0.44,0,0.88,0,0.44,1.32,0.44,0,0,0,0,0,0,0,0,0,0,0.44,1.76,0,2.2,0,2.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.216,0,0,0.433,0.361,0,2.375,16,133,1 0.1,0.2,1.01,0,0.8,0.8,0.5,0,0.8,0.1,0.3,0.7,0.3,0,1.61,0.1,0,1.11,1.31,0.2,0.7,0,0.6,0.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0.1,0.1,0,0,0,0.107,0,0.474,0.152,0.015,8.55,669,1351,1 0.11,0.22,0.11,0,0.45,0.45,0,0.11,1.02,1.59,0.11,0.34,0.22,0.11,2.16,0,0.45,0.11,3.53,0,1.25,0,0.45,0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0,0,0,0.45,0,0,0,0.018,0.17,0,0.265,0.132,0,4.215,144,666,1 0.42,0.46,0.38,0,0.19,0.11,0,0.07,0.58,0.62,0.34,0.77,0.5,1.32,0.03,0.23,0.54,0,3.06,0,1.51,0,0.38,0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0,0,0,0,0.076,0,0.438,0.585,0.127,6.134,153,2184,1 0,0,0.9,0,0.45,0,0,0,0,0,0,0.9,0.45,0,0,0.45,0.9,0,4.52,0,0.9,0,0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.35,0,0,0,0,0,0,2.115,0.07,0,2.651,14,114,1 0.33,0,0.67,0,0.22,0,0,0,0.44,0.11,0,0.33,0,0,0,0.56,0,0,1.79,0,1.12,0,0,0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0,0,0,0.11,0,0,0,0,0,0,0.157,0,0.392,0.176,0.078,2.606,75,391,1 0,0,0,0,0,1.88,0,0,0,0,0,0,0,0,0,0,0,0,3.77,0,1.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.307,16,30,1 0,0,0,0,0,1.27,0,0.63,0,0,0,0,0,0,0,0,0,0,3.82,0,0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.096,0.109,0,1.916,12,69,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.37,0,0,0,2.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.218,0,0,1.827,11,53,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.04,0,0,8.84,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.125,0,1.126,7.054,37,261,1 0,0.47,0.47,0,1.41,0,0.47,0,0,0.47,0.47,0.94,0,0,0,0.94,0,0,1.88,0,0.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.218,0,0,1.102,6,54,1 0,0,0,0,0,0,0,0,0,1.47,0,1.47,0,0,0,0,0,0,7.35,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.769,15,36,1 0.19,0.19,0.29,0,1.07,0.19,0.19,0.97,0.87,0.58,0.09,1.07,0.19,0.87,0.09,0,0,1.17,3.71,0.68,1.75,0,0.09,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.09,0.09,0,0,0,0,0,0,0,0,0,0,0.194,0.404,0.224,0.029,4.285,49,870,1 0,0,0,0,0.82,0,0,1.65,0,0.82,0,0,0,0,0,0,0.82,0,1.65,0,2.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.113,0,0.113,0,0,1.25,4,50,1 0.79,0.19,0.09,0,0,0,0,0.09,0.29,0.09,0.29,0.59,0.69,0,0,0.09,0,0.59,4.09,0,0.89,0,0.39,0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.267,0,0.19,0.247,0,2.324,19,365,1 0,0,0,0,0,0.68,1.37,0.68,0,0,0,0.68,0,0,0,0,0,0.34,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0,0,0.34,0,0,0,0,0,0,0,0.103,0,0.206,0.309,0,4.029,69,270,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0.85,0,0,0,0,0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.126,0,0,0,0,3.925,51,106,1 0,0,1.47,0,0,1.1,0.36,0,0,0,0.36,0.36,0,0,0,0.36,0,0,2.21,1.1,2.95,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.118,0,0.414,0.888,0.177,3,33,177,1 0,0,0.31,0,0.62,0.62,0.62,0.31,0,1.88,0.62,1.25,0,0,0.31,1.56,0.31,0,3.76,0,1.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.77,0,0,2.481,11,134,1 0.1,0.1,0.71,0,0.61,0.3,0.4,0.1,1.42,0.81,0.1,0.5,0,0,0,0.1,0,1.01,2.34,0.5,2.03,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0.256,0.928,0.384,0.032,3.179,56,1043,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.55,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,13,1 0.52,0.42,0.35,0,0.14,0.03,0.03,0.1,0.56,0.8,0.28,0.7,0.56,1.19,0.03,0.24,0.45,0,3.18,0,1.47,0,0.38,0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.07,0,0,0,0,0.075,0,0.452,0.528,0.116,6.152,260,2184,1 0,0,0,0,0,0,0,0,0,1.49,0,1.49,0,0,0,0,0,0,7.46,0,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.692,15,35,1 0,0.35,0.71,0,0.35,0,0.17,0,0,0.53,0.17,0,0.17,0,0.35,0.17,0,1.07,0.17,0.17,0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.027,0,0.167,0.027,0.055,7.527,149,956,1 0,0.64,0.64,0,0.32,0,0,0,0,0,0,0.64,0,0,0,0.32,0,1.29,1.62,0,0.97,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.053,0,1.065,0,0,3.932,61,291,1 0.56,0,0.84,0,0.28,0.84,0,0.84,0.28,0.28,0.28,1.41,0,0,0,0,1.41,0,0.84,1.98,2.83,0,0.28,0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.129,0,1.294,0.043,0,3.979,47,386,1 0,0.64,1.29,0,0.32,0,0,0,0,0,0,0.64,0,0,0,0.32,0,1.29,2.59,0,1.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0.053,0,0.531,0,0,4.337,121,334,1 0.34,0.05,0.58,0,0.63,0.17,0,0,0.75,0.23,0.34,1.27,0.34,0,0,0.58,0.05,0.17,3.01,2.61,1.5,0,0.17,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.201,0,0.127,0.182,0.027,4.225,131,1107,1 0,0,0.63,0,0,1.27,1.27,0.63,0,0,0,0.63,0,0,0,0,0.63,0,4.45,3.18,3.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.983,0.089,0,3.488,59,157,1 0.44,0,0.88,0,0.44,1.32,0.44,0,0,0,0,0,0,0,0,0,0,0.44,1.76,0,2.2,0,2.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.215,0,0,0.43,0.358,0,2.403,16,137,1 1.26,0.42,1.26,0,0,0,0,0,0,0,0,0.42,0,0,0,0.42,0,0.84,3.79,0,1.26,0,0,0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.42,0,0,0,0,0.067,0,0.472,0.472,0,3,19,108,1 0,0,0,0,0,0,2.94,1.47,1.47,1.47,1.47,0,0,0,0,0,2.94,0,0,1.47,4.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.141,0,0.425,0.141,0,140,279,280,1 0,0.57,0,0,0.57,0,0.57,0,0,0.57,0,0.57,0,0,0,0,0,0.57,4.57,0,1.14,0,0,0,0.57,0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.57,0,0,0,0.072,0,0,0.072,0.289,0.144,7.512,114,293,1 0.89,0,0.89,0,0,0,1.78,0,0,0,0.89,1.78,0,0,0,0,0,0,6.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.89,0,0,0,0,0,0,1.344,0,0,5.25,16,84,1 0,0,0,0,0,0,4.08,0,0,0,0,0,0,0,0,0,0,0,2.04,0,0,0,0,2.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.257,0,0,4.181,26,46,1 0,0,0,0,0,0,2.94,1.47,1.47,1.47,1.47,0,0,0,0,0,1.47,0,0,1.47,4.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0.142,0,0.427,0.142,0,92.333,274,277,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.52,0,0,2.17,0,2.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.149,0,0,9.1,33,91,1 0.13,0.26,0.52,0,0.26,0,0.13,0,0,0.39,0.13,0.13,0.13,0,0.26,0.13,0,0.78,0.39,0.13,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.366,0,0.04,7.138,149,1235,1 0,0,0,0,0,0,1.94,0,0,0,0,0,0,0,0,2.91,3.88,0,1.94,0,1.94,0,0,1.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.97,0,0,0,0,0.97,0,0,0,0,0.13,0,0.52,0.13,0,6.266,26,94,1 0,0.43,0,0,0.43,0,0.86,0,1.3,0.86,0,1.3,0,0,0,0,0,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0.063,0.126,0,0,0.063,0,4.297,30,159,1 0,0.44,0.44,0,0,0,0,0,0,0,0,0.88,1.32,0,0,0,0,0.88,3.96,0,3.08,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.074,0,0.669,0.297,0,3.666,82,165,1 0,0.65,0.98,0,0.32,0,0,0,0,0,0,0.65,0,0,0,0.32,0,1.3,2.61,0,1.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0.053,0,0.477,0,0,4.273,121,312,1 0,0,0,0,0.89,0,0,0,0,0,0,0.89,0,0,0,0,0,0,1.78,0,2.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.121,0,1.336,0,0,6.611,51,238,1 0.33,0,0.33,0,0,0,0.66,0,0,0,0,1.32,0,0,0,0.66,0.99,0,2.64,0,0.99,0,0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.66,0,0,0,0,0,0,0,0,0,0,0.116,0,0.406,0.464,0.348,6.932,43,513,1 0,0,0.94,0,0,0,0.94,0,0,1.88,0,1.88,0,0,0,0,0,0,4.71,0,0.94,0,0,0.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.163,1.305,0,2.571,20,36,1 0.73,0,0.36,0,0.36,0.36,1.1,0,0,0,0,0.36,0,0,0,0.36,1.84,0.73,2.58,0,1.1,0,0.36,1.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.186,0,0.435,0.062,0,4.411,190,300,1 0,0.66,0.66,0,1.33,0.33,0.33,0,0.33,0,0.33,0.33,0,0,0,0.33,0.66,1,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.66,0,0,0,0,0.266,0,0.372,0.159,0,1.894,14,161,1 0,0.3,0.75,0,0.3,0,0.15,0,0,0.45,0.15,0,0.15,0,0.15,0.15,0,0.75,0.15,0.15,0.6,0,0,0,0,0,0,0.15,0,0,0,0,0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0,0.352,0.02,0.041,5.938,149,1057,1 0.57,0,1.72,0,0,0,0,0.57,0,0,0,0.57,1.72,0,0,0,0.57,0,4.59,0,0,0,0,0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.099,0,0.396,0.099,0,2.333,11,70,1 0,0,0,0,0,0,1.04,0,0,0,0,1.04,0,0,0,0,1.04,0,3.66,0,2.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.062,0.248,0,0.621,0.062,0.062,3.902,59,160,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,1 0,0.28,0.84,0,0.28,0,0.14,0,0,0.42,0.14,0,0.14,0,0.14,0.14,0,0.7,0.42,0.14,0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.14,0,0,0,0,0,0,0,0,0,0,0,0.583,0,0.067,5.415,132,991,1 0.51,0.25,0.49,0,0.04,0.23,0.04,0.32,0.38,0.81,0.21,0.9,0.79,1.24,0.02,0.21,0.36,0.04,3.49,0,1.54,0,1.09,0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0,0.1,0,0,0,0,0.058,0,0.308,0.672,0.128,5.459,193,3243,1 0.7,0,0.35,0,0.7,0.35,0.7,0,0.7,0,0,0,0.7,0,0,0,1.05,0,3.16,0,1.4,0,0,1.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.173,0,0.23,0,0,4.596,60,262,1 0.3,0,1.23,0,1.54,0.92,0.61,0.92,0.3,0.3,0,0.3,0,0,0,0,0.3,0,2.47,0.92,0.92,0,1.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.3,0,0,0,0,0.045,0,0.728,0.182,0,4.339,60,243,1 0,0,0.84,0,0.56,0,0,0.56,0,0,0,0,0,0,0,0.28,0,0,1.13,0,0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.28,0,0,0,0,0,0,0,0,0,0.278,0,0.046,0,0,1.661,6,118,1 0,0.7,1.05,0,0.35,0,0,0,0,0,0,0.7,0,0,0,0.35,0,1.4,2.46,0,1.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.35,0,0,0,0,0.056,0,0.897,0,0,4.43,121,350,1 0.56,0,0.32,0,1.13,0.08,0,0,0.16,0,0.08,0.72,0.56,0,0,0.24,1.13,0,4.6,0,2.01,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0.121,0,0.337,0.054,0,3.502,79,606,1 0,0.26,0.26,0,0.39,0,0.13,0,0,0.26,0,0.26,0.26,0,0.13,0.26,0,0.13,3.14,0.26,1.44,7.33,0.13,0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.13,0,0,0,0,0,0,0.022,0.022,0.889,12.454,107,1096,1 0,0,1.29,0,0,1.29,0,0,0,0,0,0,0,0,0,2.59,0,0,1.29,0,1.29,1.29,2.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.439,0,1.098,0,0.439,3.571,36,125,1 0.09,0.38,0.57,0,0.48,0.38,0,0,0,0.38,0,1.53,0.19,0,0.09,0,0.09,0,3.55,0,1.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.038,0.038,0,0.246,0.894,0.012,4,70,640,1 0.34,0,1.7,0,1.02,0,0,0,0.68,1.02,0,0,0,0,0,0,0,0,0.34,1.02,0.68,0,0,0,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.902,0.106,0,664,1327,1328,1 0,0.8,0,0,0.8,0,0.8,0,0,0.8,0,0,0,0,0,0.8,0.8,0.8,1.61,0,1.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.134,0,0.269,0,0,3.115,19,81,1 0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0,0,6.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.111,0,0,1.409,10,31,1 0,0,0.28,0,0.16,0.18,0,0,0,0,0.02,0.09,0.11,0,0,0,0,0,0.14,0,0.02,0,0,0.04,0,0,0,0,0,0,0,0,0,0.07,0,0,0,0,0.02,0,0,0,0,0,0,0,0,0,0,0.116,0.021,0.03,0,0,16.644,154,9088,1 0.82,0,0,0,0.41,0,0.82,0,0,0,0,0.82,0,0,0,0,0.41,0,2.46,0,1.23,0,0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.41,0,0.171,0,0.513,0.114,0,2.953,44,189,1 0,0.42,0,0,0.42,0.42,0,0,0,0,0,0,0,0,0,0.42,0,0.42,4.2,0,0,0,0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.42,0,0,0,0,0.08,0,0.644,0.161,0,2.522,14,111,1 0.1,0.1,0.7,0,0.6,0.2,0.4,0.1,1.41,0.8,0.1,0.5,0,0,0,0.1,0,1.11,2.22,0.4,1.92,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0.26,0.991,0.39,0.032,3.173,56,1044,1 0,0,0.28,0,0.16,0.18,0,0,0,0,0.02,0.09,0.11,0,0,0,0,0,0.14,0,0.02,0,0,0.04,0,0,0,0,0,0,0,0,0,0.07,0,0,0,0,0.02,0,0,0,0,0,0,0,0,0,0,0.116,0.021,0.034,0,0,16.587,154,9090,1 0.87,0.17,0.52,0,0,0.32,0,0.04,0.29,0.42,0.39,1.37,0.87,1.69,0,0.32,0.54,0.22,3.47,0.29,1.32,0,0.34,0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0.07,0,0.04,0,0.016,0.058,0,0.64,0.166,0.183,3.697,117,3498,1 0.43,0,0,0,0,0,0,0,0,0,0,1.29,0,0,0,0,0,0.43,1.29,0,1.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0.122,0,0.061,0,0,1.456,13,67,1 0,0.81,0.61,0,0,1.02,0,0.2,0.4,0.61,0,2.25,0,0,0,0,0.61,0,2.86,0,1.02,0,0,0.2,0,0,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.095,0,0.158,0.063,0,2,31,156,1 2.32,0,0.77,0,1.55,0,0,0,0,0,0,0.77,0,0,0,0.77,0,0,2.32,0,0.77,0,0,0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.77,0,0.134,0,0.671,0,0,129.5,515,518,1 0.08,0.08,0.48,0,0.16,0.24,0,0,0.24,0.08,0,0.56,0,0,0,0,0,0.08,0.88,0.08,0.48,4.57,0.4,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.233,0.202,0.326,8.763,102,1481,1 0.07,0,0.55,0,0.63,0.23,0.07,0.23,0,0.23,0.07,0.55,0.63,0,0,0.47,0.31,0.31,2.76,0,1.49,0,0.55,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0,1.132,0.077,0.012,3.382,77,707,1 0.52,0.42,0.35,0,0.14,0.03,0.03,0.1,0.56,0.8,0.28,0.7,0.56,1.19,0.03,0.24,0.45,0,3.19,0,1.43,0,0.38,0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.07,0,0,0,0,0.077,0,0.453,0.543,0.119,6.305,286,2207,1 0.4,0.18,0.32,0,0.25,0.18,0.03,1.01,0.4,0.4,0.1,0.72,0.65,0.36,0.25,0.54,0.36,0.36,3.05,0.14,1.41,0,0.29,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0,0,0,0,0.07,0,0,0,0.012,0.042,0.073,0.337,0.141,0,3.305,181,1613,1 0,0,2.22,0,0,0,2.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.22,0,0,0,0,0,0,0,0,0,0,0,0,0.439,0,3,11,24,1 0,0,0,0,1.91,0,0.31,0.31,0,0.31,0.63,1.59,0.63,0,0.63,0,0.63,2.23,3.19,0,1.59,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.63,0,0,0,0,0,0,0,0,0,0.092,0,0.323,0,0,2.15,18,86,1 0,0.75,0.25,0,0,0,0,0.5,0,0.5,0.25,0.75,0,0,0,1.5,0,1.5,4.26,0,4.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.045,0.18,0,0,0,0,1.023,3,86,1 0,0,0,0,0,0,0,0.74,0,0,0,0,0,0,0,2.23,0,0,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.109,0,0,2.263,24,86,1 0,0,1.29,0,0,1.29,0,0,0,0,0,0,0,0,0,2.59,0,0,1.29,0,1.29,1.29,2.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.439,0,1.098,0,0.439,3.571,36,125,1 0,0,0,0,0,0,0,0,0,0,0,0,2.58,0,0,0,0,0,2.58,0,1.72,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.142,0,0,3.851,0,0,13.266,70,199,1 0,0,0,0,0.59,0,0.59,0,0,0,0,0,0,0,0,0,0,0,0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.18,0,0,0,0,0,0,0,0,0.59,0,0,0,0,0.052,0,0.052,0.105,0,2.886,27,127,1 0.1,0.2,1.01,0,0.8,0.8,0.5,0,0.8,0.1,0.3,0.7,0.3,0,1.61,0.1,0,1.11,1.31,0.2,0.7,0,0.6,0.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0.1,0.1,0,0,0,0.11,0,0.49,0.158,0.015,8.55,669,1351,1 0,0.33,0.33,0,1.65,0.33,0.66,0,0,0.16,0.16,0.99,0,0,0,0.82,0.33,0.16,2.81,0,0.99,0,0.49,0.33,0,0,0,0,0,0,0,0,0.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.024,0.074,0.248,0.049,0.049,6.161,350,727,1 0.4,0.26,0.93,0,0,0.8,0,0.8,0.8,1.2,0,0.8,0.4,0,1.46,0,0.26,2.26,2.4,0.53,1.06,0,0.8,0.93,0,0,0,0,0,0,0,0,0,0,0.13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.593,0.217,0.039,11.463,525,1112,1 0.1,0.2,1.01,0,0.8,0.8,0.5,0,0.8,0.1,0.3,0.7,0.3,0,1.61,0.1,0,1.11,1.31,0.2,0.7,0,0.6,0.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0.1,0.1,0,0,0,0.11,0,0.49,0.158,0.015,8.55,669,1351,1 0.15,0.21,0.58,0,0.15,0.15,0.05,0.1,0,0.42,0.1,0.95,0.42,0.05,0.05,0,0,0.36,3.16,0,1.58,0,0.52,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.027,0.184,0,0.406,0.388,0.036,4.829,116,1589,1 0.1,0.1,0.71,0,0.51,0.2,0.2,0.1,1.43,0.82,0.1,0.51,0,0,0,0.1,0,1.02,2.15,0.41,1.84,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0.264,0.974,0.396,0.033,3.163,56,1028,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.118,17.481,49,472,1 0,0,0.71,0,0.71,0,0,0,0,0,0,0,0,0,0,0.71,0,1.43,2.15,0,2.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.256,0.128,0,0,1.779,11,105,1 0,0.81,1.47,0,1.3,0,0.98,0.98,0.32,1.79,0,0.81,0,0,0.32,0.49,0.65,0,0.98,0.16,1.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0.16,0,0,0,0,0,0,0,0,0,0.247,0,0.179,0.674,0,2.922,113,640,1 0.1,0.1,0.7,0,0.6,0.2,0.4,0.1,1.41,0.8,0.1,0.6,0,0,0,0.1,0,1.01,2.22,0.4,2.02,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0.265,0.977,0.397,0.033,3.16,56,1046,1 0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.166,19,49,1 0,0,0,0,0.84,1.27,0.42,0,0,0.42,0.42,0.42,0,0,0,0,0,0.42,2.11,0,1.27,0,3.38,0,0,0,0,0,0,0,0,0,0,0,0,0.42,0,0,0,0,0,0,0,0,0.42,0,0,0,0.097,0.097,1.171,0.244,0.39,0,26.405,363,977,1 0.11,0.11,0.47,0,0,0.11,0.23,0,0.35,0.35,0.11,0.94,0.11,0,0,0.11,0,0,3.76,0,1.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.23,0,0,0,0,0.233,0,0.287,0.107,0.035,3.439,64,509,1 0,0.37,0,0,0,0.74,1.12,0,0,0,0.74,1.49,0.74,0,0,0.37,0,1.49,4.49,0,1.87,0,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.223,0.223,0,2.021,10,93,1 0.24,0,0.99,0,0.99,0,0.49,0.99,0,0.24,0,0.49,0,0,0,0.49,0.99,0.74,1.98,0.74,0.99,0,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.179,0,0.847,0.077,0,3.219,114,499,1 0.4,0.14,0.32,0,0.25,0.18,0.03,1.01,0.4,0.4,0.1,0.72,0.65,0.36,0.25,0.54,0.36,0.32,3.05,0.14,1.45,0,0.29,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0,0,0,0,0.07,0,0,0,0.012,0.042,0.073,0.343,0.141,0,3.328,181,1621,1 0.95,0,0.47,0.95,0,0.95,0,0,0.47,0,0.47,0,0,0,1.42,0.47,0.47,2.38,0,0,0.95,0,0,0.47,0,0,0,0,0,0,0,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0.47,0,0,0,0.791,0,0.169,0.452,0.113,9.64,259,723,1 0,0,0,0,0,0.63,0,1.58,0.31,0.63,0,0.95,0,0,0,0,0,0,1.26,0,0.63,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.103,0,0.206,0.206,0,4.171,76,292,1 0,0,0,0,0.47,0,1.41,0,0,0,0.47,0.47,0,0,0,0.47,0,1.88,1.41,0.47,1.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.145,0.145,0,4.041,54,194,1 0,0.81,1.47,0,1.3,0,0.98,0.98,0.32,1.79,0,0.81,0,0,0.32,0.49,0.65,0,0.98,0.16,1.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0.16,0,0,0,0,0,0,0,0,0,0.246,0,0.179,0.673,0,2.922,113,640,1 0,0,0,0,0.47,0,1.41,0,0,0,0.47,0.47,0,0,0,0.47,0,1.88,1.41,0.47,1.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.144,0.288,0,3.745,54,191,1 0,0,0,0,0,0,1.96,0,0,0,0,0,0,0,0,0,0,0,1.96,0,1.96,0,0,1.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.334,0,0,3.214,22,45,1 0,0,0.47,0,0.7,0,0.7,0.23,0,0,0,0.47,0,0,0,1.65,0.7,0.23,2.12,0,1.65,0,0,0.23,0,0,0,0,0,0,0,0,0,0,0,0.23,0,0,0,0,0,0,0,0,0.23,0,0,0,0,0.037,0.037,1.362,0.037,0,5.236,111,576,1 0,0,0,0,0.38,0.38,0.38,0.38,0,0,0.38,0,0,0,0,0.38,0,0,3.5,0,1.94,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.122,0,0,2.08,12,104,1 0.33,0,1.65,0,0.99,0,0.33,0,0.66,1.32,0,0,0,0,0,0,0,0,0.33,0.99,0.66,0,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.859,0.101,0,337.25,1146,1349,1 0.32,0,1.64,0,0.98,0,0.32,0,0.65,1.31,0,0,0,0,0,0,0,0,0.32,0.98,0.65,0,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.858,0.101,0,337.25,1146,1349,1 0,0.2,0.61,0,1.03,0,0.41,0.2,0,0.2,0,0.41,0.2,0,2.06,0.2,0,2.47,2.06,0,1.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0,0,0,0.2,0,0,0,0,0,0,0.238,0.034,0,3.632,32,247,1 0.1,0.2,1.01,0,0.8,0.8,0.5,0,0.8,0.1,0.3,0.7,0.3,0,1.61,0.1,0,1.11,1.31,0.2,0.7,0,0.6,0.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0.1,0.1,0,0,0,0.11,0,0.49,0.158,0.015,8.55,669,1351,1 0.16,0.24,1.24,0,0.41,0.58,0.49,0.33,0.66,0.66,0.24,1.24,0.16,0,0.66,0.82,0.16,1.57,2.32,0.16,1.16,0,0.91,0.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0.08,0,0,0,0,0.132,0,0.25,0.224,0.013,5.872,581,1339,1 0.16,0.24,1.24,0,0.41,0.58,0.49,0.33,0.66,0.66,0.24,1.24,0.16,0,0.66,0.82,0.16,1.57,2.32,0.16,1.16,0,0.91,0.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0.08,0,0,0,0,0.132,0,0.25,0.224,0.026,5.872,581,1339,1 0.93,0,0.93,0,0.93,0.93,0,0.93,0,0,0,0,0.93,0,0,0,0,0,3.73,0,2.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.343,0,0.343,0.171,0,2.235,15,38,1 0,0,1.63,0,0,0.65,0,0,0,0,0.32,0.32,0,0,0,0.32,0,0,1.96,0.98,2.94,0,1.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0.276,0.83,0.166,3.8,33,228,1 0,0,0,0,0.39,0.39,0.39,0.39,0,0,0.39,0,0,0,0,0.39,0,0,3.52,0,1.96,0,0,0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.124,0,0,1.94,12,97,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.055,3,38,1 0,0.36,0,0,0,0.36,1.47,0,0,0.36,0.36,0.73,0,0,0,0.36,0,1.1,2.2,0,0.73,0,0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.183,0,0.367,0.061,0.122,4,36,264,1 0,0,0.24,0,0.72,0,0,0,0.48,0,0,0.48,0,0,0,0,0,0.96,0.96,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.24,0,0,0,0,0,0,0.328,0,0,1.74,48,141,1 0.17,0,0.17,0.17,1.44,0.34,0.05,0.05,0.05,0.05,0.05,0.51,0.28,0.05,0,0,0.69,0.05,4.14,0.23,1.09,0.17,0.74,0.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.05,0,0.01,0.161,0.03,2.051,51,521,1 0.98,0,0.32,0,0.98,0,0,0,0,0,0,0,0.98,0,0,0.65,0,0,3.6,0,3.93,0,0.32,0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.529,0.411,0,3.964,132,222,1 0.07,0.64,0.64,0,0.35,0.71,0.57,0.14,1.14,0.5,0.07,0.35,0.21,0,1,0.14,0.07,1.14,1.5,0,1.14,0,0.35,0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.07,0,0,0,0,0,0.07,0,0,0,0.08,0,0.309,0.103,0,4.923,117,1295,1 0,0,0.71,0,0.89,0.17,0.17,0,0,1.24,0.17,0,0,0,0.89,0,0.17,0.35,1.24,0.17,1.42,6.41,1.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.278,0.216,0.836,8.523,58,895,1 0,0,0.38,0,1.15,0.38,0,0.19,0.19,0,0,1.72,0,0,0,0,0.19,0,4.03,0,2.3,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0,0,0,0,0,0,0.031,0,0.349,0,0,5.886,105,312,1 0,0,0.72,0,2.91,0,0.72,0,0,0,0,0,0,0,0,1.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.72,0,0,0,0,0,0,0,0,0,0,0.373,0,0.124,0.124,0,1.781,12,114,1 0,0,0.22,0,0.67,0,0,0,0.44,0,0,0.44,0,0,0,0,0,0.89,0.89,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.22,0,0,0,0,0,0,0.305,0,0,1.895,48,163,1 0,0,0,0,0,0,0,0,0,1.35,0,0,0,0,0,1.35,0,0,1.35,0,0,0,2.7,0,1.35,1.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.836,0,0,3.285,19,92,1 0,0.53,0.53,0,0.8,0,0.26,0.26,0,0.26,0,0.53,0.53,0.53,0,0,0,0,2.15,0.26,0.8,0,0,0,0.26,0.26,0,0,0,0,0,0,0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.04,0,2.779,21,164,1 0,0,0.89,0,1.79,0.44,0,0,0,0,0,0.44,0,0,0,1.34,0,0,2.24,0,4.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.075,0.075,0,0,0,1.968,11,124,1 0,0,0.34,0,0,0,0.34,0,0,0.34,0,0.34,0,0,0,0.68,0.34,0.34,0.68,0.34,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0,0,0,0.34,0,0,0.393,0,0.224,0.056,0,2.257,17,158,1 0,0,0.52,0,1.58,0,1.05,0,0,1.05,0.52,1.58,0,0,0,0.52,0,0,1.05,0,0.52,0,0,0,0.52,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.25,0,0,0,0,2.82,23,110,1 0,0,0,0,0.43,0.43,0.43,0,0,0.43,0,0.43,0,0,0,0,0,0,0.87,0,0,9.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.065,0,1.047,9.269,89,482,1 0,0,0.67,0,0.27,0.27,0.13,0,0.13,0.27,0,0.4,0.4,0,0,0,0.27,0,0,0,0,0,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.109,0,0.021,0,0,1.728,12,204,1 0.24,0,0.24,0,0,0.48,0.24,0,0,0.48,0.24,0.72,1.2,0,0,1.68,0.72,0,1.92,0,1.68,0,0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.677,0.828,0.15,4.333,52,429,1 0,0.18,1.1,0,0.73,0.73,0.73,0.09,0.83,0.27,0.27,0.64,0.27,0,1.47,0.09,0,1.2,1.38,0.18,0.64,0,0.55,0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.18,0,0.09,0.09,0,0,0,0.094,0,0.432,0.135,0.013,8.445,696,1478,1 0,0,0,0,0,0,0,0,0,0,0,6.25,0,0,0,0,0,0,12.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,2,1 0,0,0.34,0,0,0,0.34,0,0,0.34,0,0.34,0,0,0,0.68,0.34,0.34,0.68,0.34,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0,0,0,0.34,0,0,0.393,0,0.225,0.056,0,2.257,17,158,1 0.68,0.17,0.51,0,0.34,0,0.51,0,0,0.51,0,0.51,0.51,0.17,0.17,0.34,0.17,1.02,4.96,0,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0.063,0.095,0,0.126,0,0,2.285,40,224,1 0,1.15,0.86,0,0.57,0.28,0.57,0,0,0.28,0,0.57,0,0,0,1.72,0,0.86,4.32,0,2.01,0,0,0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.142,0,0.19,0,3.423,6.584,56,349,1 0,0,0,0,0,0,7.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.291,0,0,2.444,8,44,1 0,0,0.75,0,0.75,0,0.5,0.25,0,1.01,0,0.25,1.51,0,0.75,0,0,1.51,2.02,0,1.51,0,0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.5,0,0,0,0,0,0,0,0,0,0.078,0,0.432,0.432,0,2.375,19,247,1 0,0,0.32,0,0.65,0.32,0.32,0.32,0,0,0.65,1.3,0,0,0,0.98,0,0.65,2.61,2.61,3.26,0,0,0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.051,0,0.103,0,0.103,5.85,137,234,1 0,0.67,0.67,0,0.5,0,0.16,0.16,0,0,0,0.33,0.67,0.67,0.5,0,0,0,2.52,0.5,1.51,0,0,0.16,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.079,0.105,0.052,2,32,260,1 0,0.66,0.66,0,0.49,0,0.16,0.16,0,0,0,0.33,0.66,0.49,0.66,0,0,0,2.47,0.49,1.48,0,0,0.16,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0,0,0,0.076,0.101,0.05,2.03,32,264,1 0,0.69,0.69,0,0.51,0,0.17,0.17,0,0,0,0.34,0.69,0.69,0.69,0,0,0,2.59,0.51,1.55,0,0,0.17,0,0,0,0,0,0,0,0,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.079,0.106,0.053,2,32,260,1 0,0,1.47,0,0,1.1,0.36,0,0,0,0.36,0.36,0,0,0,0.36,0,0,2.21,1.1,2.95,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.124,0,0.435,0.932,0.186,2.932,33,173,1 0.46,0.33,0.2,0,0.13,0.53,0.06,0.2,0,1.13,0.33,0.66,0.93,0,0.2,0,0,0,3.6,0,1.13,0,0.13,0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.012,0.038,0,0.41,0.192,0.115,4.754,268,813,1 0,0,0.56,0,0,0,0,0,0,1.12,0,0,0,0,0,0,0,0,1.69,0,0.56,2.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.493,0.197,0.394,0,0.394,3.537,22,237,1 0,0.67,0.67,0,0.5,0,0.16,0.16,0,0,0,0.33,0.67,0.67,0.5,0,0,0,2.37,0.5,1.52,0,0,0.16,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0.107,0.053,2.015,32,258,1 0,0,0,0,0.76,0.38,0.38,0.38,0,0,0.38,0,0,0,0,0.38,0,0,3.46,0,1.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.38,0,0,0,0,0,0,0.117,0,0,2.061,12,101,1 0,0,0,0,0.93,0,0,0,0,0.93,0,0,0,0,0,0,0,0,2.8,0,2.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.066,73,166,1 0,0,0,0,3.69,0.56,0,0.56,0.56,0.56,0,0,0,0,0,0,3.4,0,0.85,1.13,0.56,0,0,0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.092,0,0.371,0.046,0,14.58,97,452,1 0,0,0,0,0.94,0,0,0,0,0.94,0,0,0,0,0,0,0,0,2.83,0,2.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.533,65,158,1 0.26,0.08,0.26,0,0.53,0.08,0.08,0.08,0.97,0.62,0.08,1.15,0.08,0.7,0.17,0.35,0.08,0,4.16,0.26,2.21,0,0.17,1.5,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0.08,0,0,0,0,0,0,0,0,0,0,0,0.044,0,0.339,0.162,0.014,4.137,74,753,1 0,0.47,0.47,0,1.41,0,0.47,0,0,0.47,0.47,0.94,0,0,0,0.94,0,0,1.88,0,0.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.224,0,0,1.102,6,54,1 0,0.89,0,0,0.89,0,0,0,0,0,0,0,0.89,0,0,0.89,0,0,6.25,2.67,1.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.89,0,0,0,0,0.89,0,0,0,0,0,0,1.129,0.483,0,1.826,10,42,1 0,0,0.18,0,0.55,0.37,0.18,0.18,0,0.18,0,0.18,0,0,0,0,0,0,0.74,0,0.37,0,0.18,0,0,0,0,0,0,0,0,0,0.55,0,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0.081,0,0,0.027,1.625,2.326,11,363,1 0.17,0,0.17,0,1.45,0.34,0.05,0.05,0.05,0.05,0.05,0.52,0.29,0.05,0,0,0.69,0.05,4.24,0.23,1.04,0,0.75,0.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.051,0,0.02,0.163,0,1.796,12,460,1 0,0,0,5.03,0,0,0,0,0,0,0,0,0,0,0,1.16,0,0,0.77,0,0,7.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.431,0,0,0.215,0,0.539,7.552,43,506,1 0,0,0.44,0,1.32,0,0.44,0,0,1.32,0,0,0,0,0,0,0.44,0,4.42,0,3.09,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.301,0,0.301,0,0,2.787,19,131,1 0.49,0,0.74,0,0.24,0.24,0.24,0.49,0,0,0.49,2.24,0,0,0,0.49,0.99,0.24,3.99,0,1.99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.035,0,0.315,0.035,0,4.071,60,228,1 0,0,0,0,0.52,0,0,0,0,0,0,0,0,0.52,0,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.52,0,0,0,0,0,0,0,0,0,0.52,0,0,1.56,0,0,0,0.077,0,0.077,0,0,1.388,11,75,1 0,0,0,0,0,1.29,0,0.64,0,0,0,0,0,0,0,0,0,0,3.87,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.179,0.117,0,1.8,12,63,1 0.42,0,0.42,0,2.53,0.42,0.42,0,0,0.84,0.42,0.84,0,0,0,1.68,0,0,2.95,0,2.1,0,2.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.291,0,0.072,1.457,0.072,2.632,12,179,1 0,0,0,0,3.98,0.44,0,0.44,0,0.88,0,0,0,0,0,0.88,0,0,0.88,0.44,1.32,0,0,0,0,0,0,0,0,0,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.257,0,0.064,0.128,0,2.215,23,113,1 0,0,0,0,0.49,0,0.98,0,0.49,0,0,0,0,0,0,0,0,0,1.47,0,2.46,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.49,0,0,0,0,0,0,0.466,0,0,0,0,2.373,12,197,1 0.09,0.19,0.98,0,0.78,0.78,0.49,0,0.78,0.19,0.29,0.68,0.29,0,1.57,0.09,0,1.08,1.28,0.19,0.68,0,0.59,0.09,0.09,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.19,0,0.09,0.09,0,0,0,0.102,0,0.393,0.145,0.014,8.323,669,1415,1 0,0,0,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.496,0,0,0,0.248,1.985,3.15,12,63,1 0.17,0,0.17,0,1.47,0.35,0,0.05,0.05,0.05,0.05,0.52,0.29,0.05,0,0,0.7,0,4.17,0.23,1.11,0,0.76,0.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.051,0,0.01,0.165,0,1.79,12,453,1 0,0,0,0,0,0,0,0,0,0,0,6.06,0,0,0,0,0,0,9.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.4,14,24,1 0.09,0,0.27,0,0.36,0.09,0,0.18,0.09,0,0,0.81,0,0.36,0,0,0,0,1.99,0,3.35,0,0.36,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0.015,0.047,0.031,0.253,0.031,0.031,3.771,69,528,1 0,0,0.55,0,0.22,0.22,0.11,0,0.11,0.22,0,0.33,0.33,0,0,0,0.22,0,0,0,0,0,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0.035,0.124,0,1.98,14,305,1 0,0,0,0,0.64,0,0.64,0,0,0,0,0.64,0.64,0,0,0,0,0,3.89,1.29,1.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0.64,0,0,0,0,0.115,0,0.921,0.345,0,1.833,11,55,1 0,0.95,0.95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.47,0.47,1.9,0,0,0.47,0,0,0,0,0,0,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.066,0.2,0,0.267,0,0,4.18,45,464,1 0,0,0.54,0,0.21,0.21,0.1,0,0.1,0.21,0,0.21,0.32,0,0,0,0.21,0,0,0,0,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.152,0,0.033,0.118,0,1.987,14,306,1 0.09,0,0.27,0,0.36,0.09,0,0.18,0.09,0,0,0.72,0,0.36,0,0,0,0,2,0,3.27,0,0.36,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0.015,0.047,0.031,0.252,0.031,0.031,3.816,69,542,1 0.09,0,0.27,0,0.36,0.09,0,0.18,0.09,0,0,0.72,0,0.36,0,0,0,0,2,0,3.27,0,0.36,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0.015,0.047,0.031,0.252,0.031,0.031,3.816,69,542,1 0,0.54,0,0,0,0,1.08,0,0,0.54,0.54,0.54,0,0,0,2.17,0,0.54,3.26,0,1.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,0,0,0,0,0,0,0.347,0,0,1.325,5,53,1 0.32,0,0.32,0,0.98,0.32,0.65,0,0,0.32,0,0.98,0.32,0,0,0,0.65,0,2.61,0,2.28,0,0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.288,0.173,0,3.065,28,141,1 1.18,0.39,0.59,0,0,0.98,0.19,0.19,1.38,0.39,0,0.98,0,0.19,0,0.98,0,0,2.56,0.39,1.38,0,0,1.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.238,0,0.766,0.317,0.026,6.652,76,632,1 0,0.31,0,7.18,0,0,0.31,0.62,0,1.25,0,0,0,0,0,0,0,0.62,0.93,0,0.62,0.31,0,0.31,0,0,0,0,0,0,0,0,0,0,0.62,0,0,0,0,0.31,0,0,0,0,0,0,0,0,0,0.183,0,0.61,0,0.122,9.218,51,507,1 1.01,0.33,0.5,13.63,0,0.67,0,0.16,1.34,0.33,0,0.67,0,0.16,0,0.5,0,0.16,2.02,0.33,0.84,0,0,1.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.204,0,0.613,0.229,0.051,9.652,151,888,1 0,0.56,0,0,0.56,0,0,0,1.01,0.56,0.11,1.79,0.22,0.11,0,0.11,0.22,0.89,1.79,0,2.8,0,0,0,0.11,0.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.186,0,0.056,0.056,0,2.153,53,532,1 0.72,0,0,0,1.45,0.72,0.72,0,0,1.45,0,0,0,0,0,0,0,0,1.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0.404,0,0.134,0,0,3.066,14,92,1 0.09,0,0.27,0,0.36,0.09,0,0.18,0.09,0,0,0.72,0,0.36,0,0,0,0,2,0,3.36,0,0.36,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0.015,0.047,0.031,0.237,0.031,0.031,3.758,69,530,1 0,0.27,0.82,0,1.37,0,0.82,0,0,0.82,0,0.82,0,0,0,0.82,0.27,0,2.75,0,1.1,0,0.82,0.27,0,0,0,0,0,0,0,0,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.235,0,0.235,0.188,0,5.622,124,298,1 0,0,0.74,0,1.85,0.37,0.37,0,0,0.74,0,0.37,0,0,0,1.11,0,0,1.85,0,3.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.264,0,0,0,0,2.492,12,172,1 0,0,0.68,0,0,0,0,1.36,0,0,0.68,0.68,0,0,0,0,0,0,3.4,0,1.36,0,0.68,0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.238,0.238,0,2.232,19,96,1 0,0.5,0.25,0,0.5,0,1.01,0,0,0.76,0.76,0.5,0.25,0,0,1.26,0.25,0.25,1.77,0,1.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.25,0,0,0,0,0.113,0,0.189,0,0.151,8.972,447,646,1 0.07,0.22,0.82,0,0.52,0,0.07,0,0.67,0.59,0.22,0.82,0.07,0,0.14,0,0.07,0,3.29,0.22,1.87,0,0,0.22,0,0,0,0,0,0,0,0,0,0,0.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.095,0,0.119,0.071,0.167,3.429,74,974,1 0.08,0.08,0.76,0,0.85,1.02,0.25,0.17,0.59,0.08,0.17,0.59,0.17,0,2.21,0.25,0.08,0.93,1.61,0.17,0.42,0,0.85,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0.17,0.08,0.08,0.08,0,0,0,0.065,0,0.408,0.118,0.013,7.55,669,1412,1 0,0,0,0,0,0,0,0,1.28,0,0,0,0,0,0,0,0,0,1.28,0,2.56,0,0,0,0,0,1.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.61,0,0,3.809,28,80,1 0,0.56,0.28,0,0,0,0.56,0,0,0.56,0.28,0.56,0.28,0,0,1.41,0.28,0,1.97,0,1.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.28,0,0,0,0,0.118,0,0.158,0,0.158,9.25,394,555,1 0,0,0,0.81,0,0,0,0,0,1.63,0,0,0,0,0,0,0,0,2.45,0,0,0,0,2.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.249,0,0,0.124,0,0,3.707,14,152,1 0.34,0,1.7,0,1.02,0,0,0,0.68,1.02,0,0,0,0,0,0,0,0,0.34,1.02,0.68,0,0,0,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.898,0.105,0,443.666,1325,1331,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.17,0,0,0,0,0.443,0,0.221,0.665,0,3.812,15,61,1 0,0.51,0,0,0.51,0.51,1.02,0,0,0,0,0,0,0,0,0,0,0.51,0.51,0,0.51,0,0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.142,0,0.071,1.212,0,7.025,130,281,1 0.48,0.97,0.97,0,0.48,0,0,0.48,0,1.95,0,2.43,0,0.48,0,0.48,0,0,1.95,0,5.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.26,0,1.739,0.173,0.086,56.538,636,735,1 0.34,0,1.7,0,1.02,0,0,0,0.68,1.02,0,0,0,0,0,0,0,0,0.34,1.02,0.68,0,0,0,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.898,0.105,0,443.333,1325,1330,1 0.09,0,0.27,0,0.36,0.09,0,0.18,0.09,0,0,0.72,0,0.36,0,0,0,0,2,0,3.36,0,0.36,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0.015,0.047,0.031,0.237,0.031,0.031,3.758,69,530,1 0.08,0.08,0.76,0,0.85,1.02,0.25,0.17,0.59,0.08,0.17,0.59,0.17,0,2.21,0.25,0.08,0.93,1.61,0.17,0.42,0,0.85,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0.17,0.08,0.08,0.08,0,0,0,0.065,0,0.408,0.118,0.013,7.55,669,1412,1 0,0,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.48,0,2.32,0,0,2.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.733,42,131,1 0,0,0,0,0,0,0,0,1.35,0,0,0,0,0,0,0,0,0,2.7,0,2.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.636,0,0,3.809,28,80,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.57,0,0,0,0,0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.184,0,8.161,31,253,1 0.43,0.43,0.43,0,0.14,0.1,0.03,0.07,0.54,1.01,0.28,0.79,0.47,1.19,0.03,0.25,0.39,0,3,0,1.3,0,0.39,0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0,0.011,0.077,0,0.458,2.33,0.113,6.601,266,2370,1 0.23,0.34,0.58,0,0.46,0.11,0.11,0.23,1.04,0.93,0,0.46,0,0.23,0.23,0,0.11,0,3.72,0.46,1.74,0,0,0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.46,0,0,0.075,0,0.131,0.056,0.018,4.47,74,675,1 0,0.67,0.33,0,0.33,0.33,0.33,0.33,0,0,0.67,1,0,0,0,1,0.33,0.33,2.68,2.68,3.02,0,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.053,0,0.16,0,0.107,6.111,139,275,1 0.47,0.95,0.95,0,0.47,0,0,0.47,0,1.9,0,2.38,0,0.95,0,0.47,0,0,1.9,0,5.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.253,0,1.687,0.168,0.084,57.076,634,742,1 0.46,0.93,0.93,0,0.46,0,0,0.46,0,1.86,0,2.33,0,0.46,0,0.46,0,0,1.86,0,5.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.252,0,1.683,0.168,0.084,57.076,634,742,1 0.09,0,0.27,0,0.36,0.09,0,0.18,0.09,0,0,0.73,0,0.36,0,0,0,0,2.01,0,3.38,0,0.36,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0.016,0.048,0.032,0.257,0.032,0.032,3.689,69,535,1 0,0,0.73,0,0,0,0.73,0,0,0,0,0,0,0,0,2.2,0,0,1.47,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.73,0,0,0,0,0,0,0,0,0,0,0,0,0.088,0,0,3.048,29,125,1 0,0,0,1.26,0,0,0.63,0,0,1.26,0,0,0,0,0,0.63,0,0,0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.195,0,0,0.979,0,0.293,8.476,68,356,1 0,0,0.74,0,0,0,0.74,0,0,0,0,0,0,0,0,2.22,0,0,1.48,0,1.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0.088,0,0,3.048,29,125,1 0.71,0,0.11,0,0.47,0.11,0,0.59,0.71,2.86,0.23,0.11,0.23,0,0.47,0.11,0.59,0.47,3.21,0,1.66,0,1.9,1.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0.11,0,0,0,0,0.11,0,0,0,0.072,0.127,0,0.418,0.254,0.018,9.705,148,1514,1 0,0,0.73,0,0,0.73,0.73,0,0,0,0,0,0,0,0,0.73,0,0,1.47,0,0,0,0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.875,0.109,13.129,2.08,12,52,1 0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.36,0,0,2.04,0,0,0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.125,0.375,0,1.001,0,0.25,4.551,32,132,1 0.5,0.4,0.33,0,0.13,0.03,0.13,0.1,0.54,0.78,0.3,0.71,0.54,1.15,0.03,0.27,0.44,0.03,3.19,0,1.42,0,0.37,0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0,0,0,0.074,0,0.445,0.519,0.119,6.029,136,2213,1 0.67,0.16,1.35,0,1.01,0,0,0,0,0.16,0.16,1.69,0.5,0,0,0.33,0,0.16,5.77,0,1.35,0,0,0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.061,0,0.123,0.278,0.03,3.774,46,268,1 0,0,0,0,2.38,0,0,2.38,2.38,2.38,0,0,0,0,0,0,7.14,0,0,2.38,2.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.396,0,0,22.714,149,159,1 0.18,0,0.09,0,0.36,0.09,0,0.36,0.09,0,0,0.63,0.09,0.36,0,0,0.09,0,1.27,0,3.38,0,0.36,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0.03,0.03,0.015,0,4.192,48,566,1 0,0,0.53,0,0.53,0,0,0.53,0,0,0,1.06,0,0,2.12,0,0.53,0.53,2.65,0,2.65,0,1.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.53,0.53,0,0,0,0,0,0,0,0,0,0.191,0,0.095,0.478,0,5.038,60,131,1 0,0,0.52,0,0.52,0,0,0.52,0,0,0,1.05,0,0,2.11,0,0.52,0.52,2.64,0,2.64,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.52,0.52,0,0,0,0,0,0,0,0,0,0.19,0,0.095,0.475,0,5.038,60,131,1 0,0,0.51,0,0.51,0,0,0.51,0,0,0,1.02,0,0,2.05,0,0,0.51,2.56,0,2.56,0,1.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.51,0.51,0,0,0,0,0.51,0,0,0,0,0.189,0,0.094,0.473,0,5.038,60,131,1 0,0.1,0.31,0.1,1.05,0.42,0,0.31,0,0.1,0.1,0.84,0,0.1,0,2,0.84,0.21,1.69,0.31,0.31,0,0.52,0.1,0,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0.1,0,0,0,0,0,0,0,0,0,0.054,0,0.384,0.182,0.201,8.851,299,1726,1 0,0,1.11,0,0,0,1.11,0,0,0,1.11,1.11,0,0,0,2.22,0,0,3.33,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.138,0,0.138,0.138,0,2.7,11,54,1 0.31,0,0.63,0,0.47,0.47,0.15,0.79,0.15,0.63,0.31,1.42,0,0,0,0,1.58,0,2.05,1.58,3.95,0,0.15,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.15,0,0,0,0,0.15,0,0,0,0,0.076,0,1.3,0.127,0,5.241,97,650,1 0,0,0,0,0,0,0.91,0,0,0,0,0.91,0,0,0,0.91,0,1.83,4.58,0,1.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.151,0,0.455,0,0,2.842,10,54,1 0.31,0,0.63,0,0.47,0.47,0.15,0.79,0.15,0.63,0.31,1.42,0,0,0,0,1.58,0,2.05,1.58,3.95,0,0.15,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.15,0,0,0,0,0.15,0,0,0,0,0.076,0,1.3,0.127,0,5.241,97,650,1 0,0,0.32,0,0.64,0.64,0.64,0.32,0,0,0,0.32,0.32,0,0,0.32,0.32,0.32,2.27,0,3.24,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0.166,0,0,0.166,0,1.688,19,157,1 0,0,0,0,1.26,0,1.26,0,0,0,0,0,0,0,0,0,0,1.26,0,0,1.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.26,0,0,0,0.431,0,0,0,0.215,1.724,3.529,13,60,1 0.1,0,0.1,0,0.4,0.1,0.1,0,0.2,0.2,0.4,0.5,0,0.6,0,0.91,0.2,0,1.72,4.26,1.72,0,0.4,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.096,0,0.336,0.16,0,6.758,494,1426,1 0,0,0,0,0.09,0,0,0,0,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.032,0,0.016,0,0,24.375,135,3315,1 0,0,0,0,0,0,0,1.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.12,0,0,0,0,0.212,0,0.424,0.424,0,4.1,25,82,1 0.59,0,0,0,0,0,1.18,0.59,0.59,1.18,0,1.18,0,0,0,0,2.95,0,4.14,2.36,2.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.421,0,0,6.275,46,182,1 0,0,0,0,0,0,4.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,9,18,1 0.06,0.65,0.71,0,0.39,0.65,0.52,0.19,1.04,0.52,0.06,0.39,0.32,0,1.17,0.13,0.06,1.1,1.3,0,1.04,0,0.52,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0,0,0,0,0.06,0,0,0,0.085,0,0.287,0.106,0,4.742,117,1342,1 1.23,0,0,0,0,0,0,0,0,0,0,2.46,0,0,0,0,0,0,6.17,0,2.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.843,0,0,2,19,106,1 0,1.5,1.5,0,0.75,0,0,0,0.75,3.75,0,2.25,0,0,1.5,0,1.5,0,0.75,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.224,1.223,0,107.4,412,537,1 0,1.51,1.51,0,0.75,0,0,0,0.75,3.78,0,2.27,0,0,1.51,0,0.75,0,0.75,0,3.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.249,1.237,0,105.8,404,529,1 0.1,0.2,0.52,0,0.31,1.14,0.2,0.62,1.04,0.52,0.2,0.62,0,0,1.66,0,0.2,1.45,2.08,0.2,1.25,0,1.14,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0,0,0,0,0.095,0.143,0,0.334,0.175,0.031,7.439,689,1287,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.46,0,0,0,0,0,0,1.235,0,0,4.466,10,134,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.17,0,0,0,0,0.441,0,0.441,0.662,0,4.066,17,61,1 0,0,0.31,0,0.31,0.31,0.31,0.31,0,0,0.63,0.95,0,0,0,0.95,0.63,0.31,2.54,2.54,3.5,0,0,0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.051,0,0.102,0,0.102,5.708,138,274,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.04,0,0,1.02,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.174,0,0.174,0,0,1.787,7,59,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.96,0,0,0.98,0.98,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.173,0,0.173,0,0,1.787,7,59,1 0,0.39,1.17,0,0.39,0,0,0,0,0.78,0.78,0.78,0,0,0.39,3.51,0,0,1.17,0,1.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.39,0,0,0,0,0.066,0,0.864,0.132,0.066,5.87,44,364,1 0,0,0,0,0,0,0,0,0,0,0,1.42,0,0,0,1.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.058,5,35,1 0.09,0,0.09,0,0.39,0.09,0.09,0,0.19,0.29,0.39,0.48,0,0.58,0,0.87,0.19,0,1.66,4.1,1.66,0,0.39,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.14,0,0.326,0.155,0,6.813,494,1458,1 0.1,0,0.41,0,0.1,0.1,0.1,0.52,0.1,0,0,0.62,0,0.2,0,0,0.1,0,0,0,0,0,0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.048,0,0.016,0.064,0,1.915,29,339,1 0.14,0,0.28,0,0.09,0.24,0.04,0.04,0.24,0,0,0.52,0.04,0.09,0,0,0.14,0,0.24,0.04,0.28,0,0.38,0.14,0,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0.04,0,0,0,0.04,0.09,0,0,0,0,0.061,0,0.007,0.099,0,1.867,14,521,1 0.36,0.27,0.63,0,0.82,0.36,0,0.36,0.27,4.1,0.09,1.27,0.45,0,1.27,1.18,0.27,2.1,2.73,0,2.83,0,0.09,0.27,0,0,0,0,0,0,0,0,0,0,0,0.09,0,0,0,0.09,0,0,0,0,0,0,0,0,0,0.101,0,0.611,0.014,0,3.707,127,875,1 0,0,1.11,0,1.11,0,0.74,0,0,0,0.74,0.37,0,0,0,0,0.37,0,3.35,2.98,2.61,0,0,0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.368,0.552,0,1.58,7,79,1 0,0,0,0,0,0,1.38,0,0,0,0,0,0,0,0,0,0,0,0,0,1.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.47,0,0,0.235,0.235,1.882,6.266,41,94,1 0,0.37,1.11,0,0.37,0,0,0,0,0.74,0.37,0.74,0,0,0.37,3.34,0,0,0.74,0,1.48,0,0,0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.292,0,0.878,0.175,0.058,5.985,58,425,1 0,0.37,1.11,0,0.37,0,0,0,0,0.74,0.37,0.74,0,0,0.37,3.34,0,0,0.74,0,1.48,0,0,0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.292,0,0.878,0.175,0.058,5.985,58,425,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.733,0,0,2.666,11,24,1 0.17,0,0.51,0,0.17,0,0.17,0.34,0.17,0,0,0,0,0.34,0,0,0,0,0,0,0,0,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.026,0,0,0,0,1.559,10,145,1 0.17,0,0.17,0.17,1.43,0.34,0.05,0.05,0.05,0.05,0.05,0.51,0.28,0.05,0,0,0.69,0.05,4.2,0.23,1.03,0.17,0.74,0.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.05,0,0.02,0.161,0.03,2.093,51,536,1 0,0,0,0,1.46,0,0.83,0,0.2,1.04,0,0.41,0,0,0,0,0,1.46,1.04,0,0.2,0,0.2,0,1.46,1.46,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0,0,0,0,0,0,0,0,0.182,0.401,0.109,0.182,0.146,0,3.791,26,364,1 0,0.37,0,0,0.37,0.37,0.37,0.74,0.37,0.37,0,0.74,0.37,0,0,0.37,1.49,0,3.73,2.61,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.74,0,0,0,0,0.37,0,0,0,0,0,0,1.199,0.505,0,3.337,64,267,1 0.45,0,0.67,0,0.22,0.67,0,0.67,0.22,0.22,0.22,1.35,0,0,0,0.45,1.35,0.22,1.57,1.57,3.37,0,0.22,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.111,0,1.599,0.148,0,4.947,102,564,1 0,0.19,0.57,0,0.09,0.28,0.09,0.09,0.38,0.19,0,0.57,0.57,0,0,0.19,0,0,2.01,0,1.43,0,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.035,0.23,0,0.088,0.124,0,2.405,50,368,1 0,0,0.44,0,0.88,0.22,0,0,0,0,0,0.44,0,0.22,0,0,0,0,0.66,0,0.44,0,0,0,0,0,0,0,0,0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0,0,0,0,0.037,0.224,0,0,0.187,0.149,3.384,21,264,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.843,0,0,1.666,5,15,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,1 0,0,0,0,0,0,0,2.2,0,0,1.47,0.73,0,0,0,2.94,0,0,5.14,0,0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.6,8,48,1 0.46,0.92,0.92,0,0.46,0,0,0.46,0,1.85,0,2.31,0,0.46,0,0.46,0,0,1.85,0,5.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.249,0,1.663,0.249,0.083,49.866,636,748,1 0.47,0.94,0.94,0,0.47,0,0,0.47,0,1.88,0,2.35,0,0.47,0,0.47,0,0,1.88,0,5.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.253,0,1.687,0.168,0.084,57.23,636,744,1 0.9,0,0.9,0,0.9,0,0.9,0,0,0,0,0,0,0,0,0,0,0,5.45,0,0,2.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.153,0,2.143,0.612,0.459,10.125,54,162,1 0.07,0.22,0.82,0,0.52,0,0.07,0,0.67,0.6,0.22,0.82,0.07,0,0.15,0,0.07,0,3.3,0.22,1.87,0,0,0.22,0,0,0,0,0,0,0,0,0,0,0.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.095,0,0.131,0.071,0.167,3.446,74,972,1 0,0,1.28,0,0,0,0,0,0,0,0,1.28,0,0,0,0,0,0,1.28,0,6.41,0,0,1.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.204,0,5.181,25,57,1 0,0,0.98,0.49,0,0,0.49,0,0,0.98,0,0.98,0,0,0,2.94,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.49,0,0,0,0.156,0,0,0.862,0,0,12.148,272,571,1 0.47,0.95,0.95,0,0.47,0,0,0.47,0,1.91,0,2.39,0,0.95,0,0.47,0,0,1.91,0,5.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.252,0,1.683,0.168,0.084,57.23,636,744,1 0.47,0.94,0.94,0,0.47,0,0,0.47,0,1.88,0,2.35,0,0.47,0,0.47,0,0,1.88,0,5.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.252,0,1.68,0.168,0.084,57.23,636,744,1 0,0,0.89,0,1.79,0.44,0,0,0,0,0,0.44,0,0,0,1.34,0,0,2.24,0,4.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.074,0,0,0,0,2.25,12,144,1 0,0,0.89,0,1.78,0.44,0,0,0,0,0,0.44,0,0,0,1.33,0,0,2.23,0,4.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.074,0,0,0,0,2.25,12,144,1 0.34,0,1.7,0,1.02,0,0,0,0.68,1.02,0,0,0,0,0,0,0,0,0.34,1.02,0.68,0,0,0,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.899,0.105,0,667,1333,1334,1 0,0,0,0,0,0,0,1.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.08,0,0,0,0,0.194,0,0,0.389,0,3.476,16,73,1 0,0,0,0,0,0,0,0,0.91,0,0,0,0,0,0,0,0,0,0.91,0,2.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.143,0,0.572,0,0,2.9,28,87,1 0,0,0,0,0,0,0,0,0,0,0,1.33,0,0,0,1.33,0,0,2.66,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.326,0,0,2.2,11,44,1 0,0,0,0,0.73,0,0,0,0,0,0,0.36,1.1,0,0,0.36,0,0,3.69,0,0.73,0,0,0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.119,0,0.238,0.059,0,2.93,29,211,1 0,0,0,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0.96,0,0,0,0,0,0,0,0,0,0,0,0,2.24,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0,0,0,0,0,0.18,0.27,0.045,0,0,0,2,14,178,1 0.41,0.41,0.41,0,0.13,0.1,0.03,0.06,0.52,0.94,0.27,0.76,0.45,1.15,0.03,0.24,0.41,0.03,2.99,0,1.25,0,0.34,0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0,0.011,0.076,0,0.425,0.573,0.112,5.761,131,2224,1 0.31,0,0,0,0.94,0,0,0,0,0,0.31,0,0,0,0,0.31,0,0.31,3.76,0.31,0.62,0,0,0.62,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0,0,0,0,0,0,1.25,0,0,0,0,0.055,0,0.055,0.111,0,2.358,32,125,1 0,0,1.13,0,1.13,0.56,0.56,0,0,0.56,0,1.13,0,0,0,3.97,0,0,2.84,0,0.56,0,0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.288,0,0.768,0,0,11.685,296,409,1 0,0,0.14,0,0.29,0,0,0,0,0,0,0.89,0,0,0,0.14,0,0,0,0,0,0,0.14,0,0,0,0,0,0,0,0,0,0.44,0,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0.022,0.067,0,0,0.022,0,2.227,11,294,1 0,0,0.55,0,0,0.55,0,0.27,0,0,0.55,0.27,0.27,0,0,1.1,0.27,0.83,2.49,0,3.04,0,0,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0,0.194,0,0.582,0.291,0.582,2.309,35,291,1 0,0,1.31,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0,0,1.31,0,5.26,0,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.21,0,4.5,25,54,1 0,0,1.31,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0,0,1.31,0,5.26,0,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.21,0,4.5,25,54,1 0,0,0,0,0,0,2.32,0,0,0,0,0,0,0,0,0,0,0,4.65,0,4.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.701,0.35,0,1.434,6,33,1 0,0,2.99,0.42,0.42,0,0.85,0,0,0,0.42,0.42,0,0,0,0.42,0,1.28,3.41,0,1.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.204,0,0.819,0,0,4.84,42,363,1 0,2.08,0,0,3.12,0,1.04,0,0,0,0,2.08,0,0,0,0,0,4.16,2.08,0,1.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.263,0,0,1.428,4,20,1 0.29,0.04,0.04,0,0.14,0.04,0,0.29,0.29,0.94,0.14,0.04,0.14,0,0.19,0.04,0.39,0.19,1.6,0.04,0.79,9.53,0.69,0.47,0,0,0,0,0,0,0,0,0,0,0,0.04,0,0,0.04,0,0,0,0,0,0.19,0,0,0,1.117,0.053,0,0.356,0.09,0.011,12.332,1171,9163,1 0,0,0.76,0,0.76,0,0.5,0.5,0,1.01,0,0.25,1.52,0,0.76,0,0,1.52,2.03,0,1.52,0,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.5,0,0,0,0,0,0,0,0,0,0.078,0,0.433,0.433,0,2.441,19,249,1 0,0,0,0,0,0,0,2.5,0,0,0,0,0,0,0,0,1.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.25,0,0,0,0,0.182,0,0,0.182,0,3.545,21,78,1 0.31,0,0.63,1.91,0.21,0,0,0,0.42,0.1,0,0.31,0,0,0,0.53,0,0,1.7,0,1.06,0,0,0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0,0,0.1,0,0,0,0,0,0,0.169,0,0.358,0.188,0.075,2.847,75,447,1 0,0.75,0.37,0,0,0,0.75,0,0,0.37,0,0.75,0,0,0,1.87,0.37,0,2.63,0,1.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.37,0,0,0,0,0,0,0.242,0,0.145,9.584,332,508,1 0,1.96,0.98,0,0,0,1.96,0,0,0,0,0,0,0,0,0.98,0,0,0.98,0,0.98,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.98,0,0,0,0,0.168,0,1.011,0,0,2.888,12,52,1 0.51,0.43,0.29,0,0.14,0.03,0,0.18,0.54,0.62,0.29,0.65,0.65,1.2,0.03,0.21,0.43,0.03,3,0,1.35,0,0.51,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0,0,0,0.012,0.08,0,0.454,0.523,0.136,6.59,739,2333,1 0.2,0.4,0.4,0,0,0.4,0,0.2,1.43,0.61,0,0.2,0,0,0,0,0,0,2.66,0.2,2.04,0,0,0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0,0,0.029,0.059,0.447,0.298,0.149,0.029,11.96,376,909,1 0,0,0,0,0,0,0,0,0,0,0,0,0,2.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.03,0,0,0,0,0,0,0,0,1.03,0,0,0,0,0.17,0,0.17,0.341,0,3.809,24,80,1 0,0,1.31,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0,0,1.31,0,5.26,0,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.21,0,4.5,25,54,1 0,0,0,0,0,0.54,1.63,0,0,0,0.54,0.54,0,0,0,0.54,2.73,0.54,4.91,0,2.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.171,0,2.592,32,70,1 0,0,0,0,0,0.65,0,1.3,0.65,0.65,0,0,0,0,0,0.65,2.61,0.65,1.3,3.26,1.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.65,0,0,0,0,0,0,0,0,0,0,0,1.154,0.524,0,3.89,78,249,1 2.35,0,0,0,0,0,2.35,0,2.35,0,0,1.17,0,0,0,1.17,0,0,2.35,0,0,0,2.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.168,0.336,0,4.576,17,119,1 0,0,0.32,0,0.65,0.65,0.32,0.32,0,0,0,0.32,0.32,0,0,0.32,0.32,0,2.28,0,3.25,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0.112,0,0,0.169,0,1.494,10,139,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.25,0,0,0,0,0,8.29,0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.218,0.087,0,0.174,0.174,0.437,9.186,126,937,1 0,0,0.24,0,0.49,0,0,0.24,0,0.24,0.24,0.49,0,0,0,0.99,0.24,0,2.47,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.478,0,0,2.868,44,175,1 0,0,0.32,0,0.64,0.64,0.32,0.64,0,0,0,0.32,0.32,0,0,0.32,0.32,0,2.27,0,3.24,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0.112,0,0,0.168,0,1.494,10,139,1 0,0,0.32,0,0.64,0.64,0.32,0.64,0,0,0,0.32,0.32,0,0,0.32,0.32,0,2.27,0,3.24,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0.112,0,0,0.168,0,1.494,10,139,1 0.78,0,0.78,0,1.56,0,0,0,0,0,0,0,0,0,0,0.78,0.78,0,1.56,0,2.34,0,0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.792,0.339,0,2.627,22,113,1 0,0,0,0,0,0,0,0.4,0,0,0,0,0,0,0,0,0,0.4,0.4,0,0.4,7.63,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.133,0,0.601,0.133,1.068,10.578,108,603,1 0.78,0,0.78,0,1.57,0,0,0,0,0,0,0,0,0,0,1.57,0.78,0,1.57,0,2.36,0,0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.682,0.341,0,2.555,22,115,1 0,0.75,0.37,0,1.51,0,0,0.37,0,0.37,0.75,1.89,0,0,0,0.75,0.37,1.13,6.06,0,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.37,0,0,0,0,0,0,0,0,0,0,0,0.415,0.138,0,1.937,11,93,1 0,0,0,0,0.96,0,0.96,0,0,0.48,0.48,0.96,0,0,0,1.44,0,0,3.36,0,0.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.149,0,0.149,0.074,0,2.586,44,150,1 0,0,0,0,0.43,0.43,0.43,0.43,0,0,0,0.43,0,0,0,0,0,0,0.87,0,0,9.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.395,0,1.121,7.983,72,495,1 0.78,0,0.78,0,1.57,0,0,0,0,0,0,0,0,0,0,1.57,0.78,0,1.57,0,2.36,0,0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.682,0.341,0,2.555,22,115,1 0,0.81,1.62,0,2.43,0,0,0,0,0.81,0,0,0,0,0,0.81,0,0.81,2.43,0.81,0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.121,0,0.121,0,0,4.035,43,113,1 0.38,0.46,0.31,0,0.15,0.03,0,0.19,0.58,0.66,0.31,0.66,0.58,1.24,0.03,0.23,0.38,0,3.11,0,1.32,0,0.46,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0,0,0,0.006,0.091,0,0.36,0.524,0.137,6.186,122,2227,1 0.43,0,0.87,0,0.87,0,0.87,0,0,0,0,0.43,0,0,0,0,0.43,0.43,4.38,0,1.31,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0.43,0,0,0,0,0,0,0,0,0,0.43,0,0,0,0,0.145,0,1.021,0.218,0,3.35,59,134,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.23,1.23,0,4.93,0,3.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.23,0,0,0,0,0,0,0.234,0,0,4.176,41,71,1 0,0,1.58,0,1.58,0,1.58,0,0,0,0,1.58,0,0,0,1.58,0,0,3.17,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.371,0,0,3.538,21,46,1 0,0,0,0,0,0,0,0,0.9,0,0,0,0,0,0,0,0,0,0.9,0,3.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.139,0,0.559,0,0,2.9,28,87,1 0,0,0,40.13,0,0,0,0,0,0.32,0,0,0,0,0,0,0,0.32,0.98,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.185,0,0.185,0.061,0,10.585,124,434,1 0,0.47,0,0,0.94,0,0.94,0,0,0,0,0.47,0.47,0,0.47,0,0,0,1.89,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0.074,0.074,0,0,0,2.125,11,102,1 0,0,0,0,0,0,0,0,0.91,0,0,0,0,0,0,0,0,0,0.91,0,2.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.141,0,0.565,0,0,2.9,28,87,1 0.05,0.05,0.4,0,0.34,0,0,0,0.57,0.05,0,0.28,0.11,0,0,0.17,0,0,1.15,0.05,0.92,0,0,0.05,0,0,0,0,0.05,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0,0.05,0,0,0,0.019,0.099,0,0.089,0.079,0.009,4.913,95,1312,1 0.05,0.05,0.4,0,0.34,0,0,0,0.57,0.05,0,0.28,0.11,0,0,0.17,0,0,1.04,0.05,0.92,0,0,0.05,0,0,0,0,0.05,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0,0.05,0,0,0,0.019,0.099,0,0.089,0.079,0.009,4.924,95,1310,1 0,0,0,0,0,0,0.86,0,0,0,0,0,0,0,0,0.86,0,1.73,3.47,0,1.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.152,0,0.457,0,0,2.75,10,55,1 0,0,0,0,0,0,0.86,0,0,0,0,0,0,0,0,0.86,0,1.73,3.47,0,1.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.151,0,0.759,0,0,2.75,10,55,1 0.38,0,1.9,0,1.14,0,0,0,0.38,0.38,0,0,0,0,0,0,0,0,0.38,0.76,0,0,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.008,0.059,0,295,1177,1180,1 0.38,0,1.9,0,1.14,0,0,0,0.38,0.38,0,0,0,0,0,0,0,0,0.38,0.76,0,0,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.005,0.059,0,295,1177,1180,1 0.38,0,1.9,0,1.14,0,0,0,0.38,0.38,0,0,0,0,0,0,0,0,0.38,0.76,0,0,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.008,0.059,0,589,1177,1178,1 0,0,0,0,0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.205,0,0.034,0,0,3.168,15,339,1 0,0,0,0,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.204,0,0.034,0,0,2.588,15,277,1 0,0.65,0,0,0.65,0,1.31,0,0,0,0,0.65,0,0,0.65,0,0,0,3.28,0,0.65,0,0,0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.104,0.522,0,0,1.69,11,71,1 0,0,0,0,0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.205,0,0.034,0,0,3.168,15,339,1 0,0,0,0,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.204,0,0.034,0,0,2.588,15,277,1 0,0,0.3,0,0.3,0.3,0.3,0.3,0,0,0.6,0.9,0,0,0,0.9,0.6,0.3,2.4,2.7,3,0,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.048,0,0.144,0,0.096,5.403,139,281,1 0,0,0.32,0,0.64,0.64,0.32,0.32,0,0,0,0.32,0.32,0,0,0.32,0.32,0,2.27,0,3.24,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0.111,0,0,0.166,0,1.494,10,139,1 0.08,0.08,0.76,0,0.85,1.02,0.25,0.17,0.59,0.08,0.17,0.59,0.17,0,2.22,0.25,0.08,0.94,1.62,0.17,0.42,0,0.85,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0.17,0.08,0.08,0.08,0,0,0,0.065,0,0.435,0.118,0.013,7.497,669,1402,1 0,0,1.83,0.91,0,0,0.45,0,0,0.91,0,0,0,0,0,2.75,0,0,1.83,0,0.91,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.134,0,0,2.077,0,0.134,12.176,338,621,1 0,0,0,0,3.09,0,1.03,1.03,0,1.03,0,1.03,0,0,0,2.06,0,0,2.06,0,1.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.03,0,0,0,0,0,0,0,0,0,10.692,65,139,1 0,0,0,0,3.12,0,1.04,1.04,0,1.04,0,1.04,0,0,0,2.08,0,0,2.08,0,1.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.04,0,0,0,0,0,0,0,0,0,10.692,65,139,1 0,0,0,0,3.09,0,1.03,1.03,0,1.03,0,1.03,0,0,0,2.06,0,0,2.06,0,1.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.03,0,0,0,0,0,0,0,0,0,10.692,65,139,1 0,0.46,0.46,0,2.8,0,0,0,0,0,0,1.4,0,0,0,1.4,0,1.4,1.86,0,0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.93,0,0,0,0,0,0,0,0,0,0,0,0,0.148,0,0.74,0,0,2.673,21,139,1 0,0,0,0,0,0,1.21,0,0,1.21,0,1.21,1.21,0,0,1.21,0,0,4.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.676,15,57,1 0,0,1.31,0,2.63,0,0,0.65,0,0,0,0.65,0,0,0,1.97,0,0,1.31,0,2.63,0,0,0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.65,0,0,0,0,0.198,0,0.198,0.099,0,2.195,12,101,1 0,0,0,0,0,0,0,0,0,0,0.26,0.26,0,0,0,0,0,0,1.05,1.32,0.26,10.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.306,0.043,0.087,0.175,0.043,0.35,8.271,69,885,1 0.74,0,0,0,0,0,0.74,0,0,1.49,0.74,0.74,0,0,0,0.74,3.73,0,4.47,0,0.74,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.355,0.118,0.237,2.095,7,44,1 0,0,1.29,0,0,0,0,0,0,0,0,1.29,0,0,0,0,0,0,1.29,0,5.19,0,0,1.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.208,0,4.818,25,53,1 0,0.02,0.05,0,0.02,0,0,0.05,0,0.35,0,0.02,0,0,0,0.05,0.1,0.38,0.07,0.2,0.17,0,0,0,0.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.004,0,0.112,0.018,0.018,3.922,489,3271,1 0,0,1.33,0,0.66,0,0,0,0,0,0,0,0,0,0,0,0,0.66,0.66,0,0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.66,0,0,0,0,0,0,0.355,0.118,0,2.315,12,132,1 0,0,1.35,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0.67,0.67,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.67,0,0,0,0,0,0,0.356,0.118,0,2.315,12,132,1 0,0,0,0,0.53,0,1.07,0,0,0.53,0,0,0,0,0,0,0,1.61,1.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.335,0,0,0,0,2.333,14,119,1 0,0,0,0,0,0,0,0,0,0.27,0,0.27,0,0,0,0.27,0,0,1.09,1.36,0.27,10.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0.333,0.047,0.095,0.142,0.047,0.381,2.353,13,273,1 0,0,0,0,0.53,0,1.07,0,0,0.53,0,0,0,0,0,0,0,1.61,1.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.336,0,0,0,0,2.333,14,119,1 1.23,0,0,0,0,0.46,0,0.15,0,0.61,0,0.3,1.07,0,0,0,0,0,1.84,0,0.92,0,0.76,1.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.057,0,0.52,0.289,0.144,4.33,84,446,1 0,0,0.71,0,0.23,0,0,0,0.23,0.23,0.23,1.9,0,0,0,0.23,0,0,3.81,0.23,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.112,0,1.046,0.037,0,4.022,97,543,1 0.95,0,0.23,0,0.23,0.23,0.23,0,0,0.23,0,0.23,0,0,0,0,0.71,0,3.8,0,1.9,0,0,0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.238,0,0,3.184,38,242,1 0,0,0.71,0,0.23,0,0,0,0.23,0.23,0.23,1.9,0,0,0,0.23,0,0,3.81,0.23,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.112,0,1.046,0.037,0,4.022,97,543,1 0,0,0.57,0,0.28,0,0,0.57,0,0,0,0.28,0,0,0,0.57,1.15,0,0.86,2.31,2.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.047,0,1.147,0.191,0.191,11.735,489,622,1 0.9,0,0,0,0,0,0.9,0,0,1.8,0.9,0.9,0,0,0,0.9,4.5,0,5.4,0,0.9,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.391,0.13,0.26,2.15,7,43,1 0.74,0,0,0,0,0,0.74,0,0,1.49,0.74,0.74,0,0,0,0.74,3.73,0,4.47,0,0.74,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.355,0.118,0.236,2.15,7,43,1 0,0.16,0,0,0.16,0.16,0,1.14,1.3,0.32,0.32,0.48,0,0,0,1.95,0,0.32,0.81,0.48,1.46,2.93,0.16,0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.673,0.35,0.053,0.134,0.107,0.026,5.216,57,1038,1 0.27,0.27,0.27,0,0,0,0,0.54,0,0.27,0,0.27,0,0,0,1.08,0,0.27,1.08,0,0.27,0,0.27,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.229,0,0.458,0.504,0,2.934,64,578,1 0,0,0.85,0,0.85,0.21,0.21,0,0,1.5,0,0,0,0,1.07,0,0.21,0,0.64,0.21,1.71,7.08,1.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.465,0.271,0.969,9.052,58,869,1 0.64,0,0.64,0,1.29,0,0.64,0,0,0,0,0.64,0,0,0,0.64,0.64,0,1.29,0,3.22,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.104,0,0.522,0.313,0,2.511,22,113,1 0,0,0,0,0,0.54,1.63,0,0,0,0.54,0.54,0,0,0,0.54,2.73,0.54,4.91,0,2.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.171,0,2.592,32,70,1 0,0,0,0,0,0,0,0,0.52,0.52,0,2.08,0,0,0,0,0,0,4.16,0,4.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.073,0.073,0,0.367,0.073,0.073,2.34,27,103,1 0,1.32,0.56,0,0,0.94,0,0.18,0.37,0.75,0,2.08,0,0,0,0,0.37,0,2.65,0,0.94,0,0,0.18,0,0,0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.084,0,0.31,0.112,0,2.548,34,237,1 0,0,1.07,0,0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,3.22,0.53,1.07,8.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.265,0.088,1.151,11.066,67,332,1 0,0,1.31,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0,0,1.31,0,5.26,0,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.213,0,4.818,25,53,1 0,0,0,0,0,0.84,0,0,0,0,0,0,0,0,0,0,0,0,3.36,0,0,12.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.587,0,1.468,7,35,273,1 1.36,0,0.68,0,0,0,0.68,0,0,0,0,4.1,0.68,0,0,1.36,0,0,2.73,0,2.05,0,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.68,0,0,0,0,0,0,1.706,0.2,0,4.281,38,137,1 0,1.16,0.38,0,0,0,1.16,0,0,0.77,0.38,0.77,0,0,0,1.93,0,0.38,2.32,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.38,0,0,0,0,0,0,0,0.38,0,0,0,0,0,0,0.198,0,0.148,9.266,332,556,1 0,0,0.27,0,0.27,0.27,0.27,0.27,0,0,0.54,0.82,0,0,0,0.82,0.54,1.09,2.46,2.46,2.73,0,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.043,0,0.13,0,0.086,4.6,139,276,1 0,0,0.94,0,0.31,0,0,0,0.31,0,0,0.62,0,0,0,1.25,0.62,0,3.14,0,1.25,0,0.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0,0.146,0.048,0.39,0.438,0.097,3.322,61,319,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.09,1.09,0,3.29,0,2.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.09,0,0,0,0,0.371,0,0.371,0,0,3.096,28,96,1 0.1,0.2,1.01,0,0.8,0.8,0.5,0,0.8,0.1,0.3,0.7,0.3,0,1.61,0.1,0,1.11,1.31,0.2,0.7,0,0.6,0.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0.1,0.1,0,0,0,0.111,0,0.491,0.158,0.015,8.55,669,1351,1 0,0,0.81,0,0.81,0,0,0,0,0,0,0,0,0,0,0,0.81,0,3.27,0,0.81,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.26,0,0.651,13.5,86,189,1 1.24,0,0,0,0,0,0,0,0,0.62,0,1.24,0,0,0,0.62,0,0,1.86,0,3.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.24,0,0,0,0,0.1,0,1.105,0.201,0,12.904,155,271,1 0,0,0,0,0,0,0,1.25,0,0.41,0,0,0,0,0,0.41,0,1.67,0.41,0,0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0.125,0,0.312,0.062,0,1.477,8,65,1 0,0.41,0.41,0,2.06,0,1.65,0.82,0,0,0,0,0,0,0,2.47,0,0.82,2.47,0,0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.41,0,0,0,0,0.265,0,0.199,0,0,15.892,226,445,1 0,0.41,0.41,0,2.06,0,1.65,0.82,0,0,0,0,0,0,0,2.47,0,0.82,2.47,0,0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.41,0,0,0,0,0.265,0,0.199,0,0,15.892,226,445,1 0,0.41,0.41,0,2.06,0,1.65,0.82,0,0,0,0,0,0,0,2.47,0,0.82,2.47,0,0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.41,0,0,0,0,0.265,0,0.199,0,0,15.892,226,445,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.29,0,0,3.89,0,3.89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.29,0,0,0,0,0,0,0.236,0,0,7.181,41,79,1 0.45,0.9,0.9,0,0.45,0,0,0.45,0,1.8,0,2.25,0,0.45,0,0.45,0,0,1.8,0,5.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.243,0,1.626,0.162,0.081,69.727,706,767,1 0.45,0.9,0.9,0,0.45,0,0,0.45,0,1.8,0,2.26,0,0.45,0,0.45,0,0,1.8,0,4.97,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.244,0,1.631,0.244,0.081,64.416,708,773,1 0.45,0.91,0.91,0,0.45,0,0,0.45,0,1.83,0,2.29,0,0.91,0,0.45,0,0,1.83,0,5.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.245,0,1.633,0.245,0.081,64.416,708,773,1 0.82,0,0.82,0,0.41,0,0.41,0.82,0.41,1.23,1.65,0.41,0,0,0,2.47,1.65,0,1.23,1.23,2.06,0,0,0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.53,0.132,0.132,6.404,76,301,1 0.09,0.49,0.59,0,0.39,0.19,0,0,0.09,0.39,0,1.57,0.19,0,0,0,0.09,0,3.75,0.09,1.08,0,0,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.768,0.037,0,5.848,1.313,0,5.96,54,757,1 0,0,1.31,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0,0,1.31,0,5.26,0,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.212,0,4.818,25,53,1 0,0,1.31,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0,0,1.31,0,5.26,0,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.212,0,4.818,25,53,1 0,0,0.6,0,0,0.6,0,0,0.6,0,0,1.8,0,0,0,0.3,0,0,2.7,0,1.2,0,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.094,0.047,0.189,0.141,0,1.932,31,201,1 0.47,0,0.94,0,0.94,0,0.94,0,0,0,0,0.47,0,0,0,0,0.47,0,4.24,0,0.94,0,1.41,0,0,0,0,0,0,0,0,0,0,0,0.47,0,0,0,0,0,0,0,0,0,0.47,0,0,0,0,0.073,0,1.254,0.221,0,5.918,91,219,1 0,0.72,1.81,0,0,0.36,0,0.36,0.72,1.08,0.36,0.72,0,0.36,0,0.36,0.36,0.36,1.08,0,2.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.08,0,0,0,0,0,0,0,0.36,0,0,0,0,0.334,0,1.203,0.467,0.066,18.4,393,736,1 1.47,0,0,0,0,0,0,0,0,1.47,0,0,1.47,0,0,7.35,0,0,2.94,0,1.47,0,0,4.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.272,0.254,0,6.294,63,107,1 1.47,0,0,0,0,0,0,0,0,1.47,0,0,1.47,0,0,7.35,0,0,2.94,0,1.47,0,0,4.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.272,0.254,0,6.055,63,109,1 0,0,0.51,0,0.51,0.51,0.51,0,0,0,0,0,0,0,0,0,1.03,1.03,3.1,0,1.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.09,0,0.18,0,0,1.773,17,94,1 0,0,0.5,0,0.5,0.5,0.5,0,0,0,0,0,0,0,0,0,1.01,1.01,3.04,0,1.01,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.089,0,0.178,0,0,1.792,17,95,1 0,0.74,1.85,0,0,0.37,0,0.37,0.74,1.11,0.37,0.74,0,0.37,0,0.37,0.37,0.37,1.48,0,2.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.11,0,0,0,0,0,0,0,0.37,0,0,0,0,0.336,0,1.211,0.471,0.067,18.4,393,736,1 0.09,0.49,0.59,0,0.39,0.19,0,0,0.09,0.39,0,1.57,0.19,0,0,0,0.09,0,3.75,0.09,1.08,0,0,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.767,0.037,0,5.84,1.311,0,5.96,54,757,1 0,0.72,1.81,0,0,0.36,0,0.36,0.72,1.08,0.36,0.72,0,0.36,0,0.36,0.36,0.36,1.08,0,2.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.08,0,0,0,0,0,0,0,0.36,0,0,0,0,0.334,0,1.203,0.467,0.066,18.4,393,736,1 0,0,0.15,0,0.9,0.15,0,0,0.9,0,0,0.75,0.15,0,0,0,0.3,0,2.26,0,0.9,0,0.15,0.3,0,0,0,0,0,0,0,0,0,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.1,0,0.376,0.05,0.025,2.271,49,427,1 0.15,0.15,0.3,0,0.75,0,0,0,0,0,0,0.15,0.15,0,0,0,0,0.75,1.51,0,0.45,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.058,0.145,0,0.087,0,0,1.54,18,208,1 0.12,0.19,0.7,0,0.44,0,0.06,0,0.57,0.5,0.25,0.95,0.06,0,0.19,0,0.06,0,3.82,0.19,2.48,0,0,0.19,0,0,0,0,0,0,0,0,0,0,0.06,0,0,0,0,0,0,0.06,0,0,0,0,0,0,0,0.102,0,0.133,0.041,0.143,3.29,74,1030,1 0,0,0,0,0,0,0,0,0,0,0,0,0.91,0,0,0,0,0,0.91,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.143,0,0.573,0,0,2.884,18,75,1 0.05,0.05,0.4,0,0.34,0,0,0,0.57,0.05,0,0.28,0.11,0,0,0.17,0,0,1.09,0.05,0.92,0,0,0.05,0,0,0,0,0.05,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0,0.05,0,0,0,0.019,0.099,0,0.099,0.079,0.009,4.906,95,1310,1 0,0,0,0,0,0,0,0,0,1.19,0,0,0,0,0,1.19,0,0,3.57,0,3.57,0,0,0,1.19,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.19,0,0,0,0,0,0,0.204,0,0,8.636,41,95,1 0,0,0.44,0,1.34,0,0.44,0,0,0,0,0,0,0,0,0.44,0.89,0,2.24,0,1.34,0,0.44,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.34,0,0,0,0,0.068,0,0.482,0.896,0,6.77,78,325,1 0,0,0,0,0.77,0,0,0,0,0,0,1.55,0,0,0,0.77,0.77,0,2.32,0,2.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.77,0,0,0,0,0.268,0,0.672,0.403,0,2.794,29,109,1 0.28,0.14,0.14,0,0,0,0.14,0,0.42,0,0.84,0.98,0,0,0,0,0.28,0,1.82,2.53,1.12,10.82,0.84,0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.137,0.045,0.342,1.233,14.88,79,1622,1 0.1,0.2,1.01,0,0.8,0.8,0.5,0,0.8,0.1,0.3,0.7,0.3,0,1.61,0.1,0,1.11,1.31,0.2,0.7,0,0.6,0.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0.1,0.1,0,0,0,0.11,0,0.49,0.158,0.015,8.55,669,1351,1 0,0,0,0,0.8,0,0,0,0,0,0,1.6,0,0,0,0.8,0.8,0,2.4,0,2.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,0,0,0.274,0,0.823,0,0,2.815,29,107,1 1.63,0,1.63,0,0,0,0,0,1.63,0,0,0,0,0,0,1.63,0,0,3.27,0,3.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0.266,0,2,16,36,1 0.69,0,0,0,1.39,0,0.69,0,0,0,0,0.69,0,0,0,0,0,0,3.49,0,1.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.196,0,0,6.1,57,183,1 0,0,0,0,0,0,0,1.04,0,0,0.52,1.04,0.52,0,0,1.04,0,0,3.66,1.04,1.04,0,1.57,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.539,0.269,0,5.787,47,272,1 0,0,0.32,0,0.65,0.65,0.32,0.32,0,0,0,0.32,0.32,0,0,0.32,0.32,0,2.28,0,3.25,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.65,0,0,0,0,0.111,0,0,0.166,0,1.494,10,139,1 0,0,0.32,0,0.65,0.65,0.32,0.32,0,0,0,0.32,0.32,0,0,0.32,0.32,0,2.28,0,3.25,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.65,0,0,0,0,0.111,0,0,0.166,0,1.494,10,139,1 0.08,0.08,0.76,0,0.85,1.02,0.25,0.17,0.59,0.08,0.17,0.59,0.17,0,2.21,0.25,0.08,0.93,1.62,0.17,0.42,0,0.85,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0.17,0.08,0.08,0.08,0,0,0,0.065,0,0.486,0.118,0.013,7.561,669,1414,1 0.17,0,0.17,0,1.52,0.35,0.05,0.05,0.05,0.05,0.05,0.52,0.29,0.05,0,0,0.64,0.05,4.21,0.23,1.11,0,0.82,0.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.05,0,0.05,0,0,0,0.01,0.052,0,0.01,0.167,0,1.818,13,462,1 0.27,0,0.27,0,0,0,0,0,0,0,0,1.62,0.27,0,0,0,0.27,0,4.87,0,0.81,0.27,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0.874,0.051,0.051,5.582,61,374,1 0,1.32,0,0,0,0.44,0,0,1.32,0,0,2.65,0,0.44,0,0.44,0,0.44,3.53,0,1.76,0,0,1.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.066,0,0.663,0.132,0.066,8.666,123,442,1 0,0,0,0,2.29,0,0,0,0,0,0,1.14,0,0,0,4.59,0,0,3.44,0,1.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.14,0,0,0,0,0.646,0,1.939,0,0,8.461,30,110,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.57,0,3.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.606,0.606,0,3.111,10,28,1 0.54,0,0,0,2.16,0,0,0.54,0,1.08,0,0,0,0,0,0,1.08,0,2.7,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0.051,0,0,0,0,1.49,19,82,1 0,0,0,0,0,0,0,0,0.91,0,0,0,0,0,0,0,0,0,0.91,0,2.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.141,0,0.567,0,0,2.9,28,87,1 1.06,0,0.7,0,1.06,0,0.7,0,0,0.7,0,0.7,0,0,0.35,0.7,0,0,6,0,1.41,0,0.35,0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.35,0,0,0,0,0,0,0.381,0.063,0,2.021,12,95,1 0.68,0.68,0.68,0,0.68,0,2.73,0,0,0.68,0,2.05,0,0,0,0,0,0.68,4.1,0,2.73,0,0,0.68,0,0,0,0,0,0,0,0,0,0,0,0,0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.244,0,0,2.472,9,89,1 0,0,0,9.16,0.27,0,0.55,0.27,0.27,0.27,0,0.27,0,0,0,0,1.11,0,0.55,0.27,0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.387,0,0,0.301,0,0.043,19.482,694,1130,1 0,0,0.09,0,0.58,0.29,0.09,0,0.38,0,0.29,0.48,0.38,0,0,0,0.19,0,0.77,0,0.67,0,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0.09,0,0,0,0,0,0.09,0,0.09,0,0,0,0.063,0.047,0,0.559,0.047,0.031,1.694,23,432,1 0,0,1.61,0,0,0,1.61,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0.268,0,0.804,0,0,2.466,17,37,1 0,0,0,0,0.97,0,0.97,0,0,0,0,0,0,0,0,0.97,0,0,0,0,0.97,0,0,0,0.97,0.97,0,0,0,0,0,0,0,0,0,0,0.97,0,0,0,0,0,0,0,0,0,0,0,0.503,0.167,0,0,0.167,1.342,3.5,13,77,1 0,0.56,0.56,0,1.12,0.56,2.25,0,0,0.56,0,0.56,0,0,0,0,0,0.56,3.38,0,1.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.56,0,0,0,0,0.083,0,0.503,0,0.083,16.304,148,375,1 0.1,0.1,0.71,0,0.61,0.3,0.4,0.1,1.42,0.81,0.1,0.5,0,0,0,0.1,0,1.11,2.23,0.5,2.03,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0.264,0.976,0.397,0.033,3.186,56,1042,1 0.8,0,0.8,0,1.61,0,0,0,0,0,0,0,0,0,0,0.8,0.8,0,1.61,0,2.41,0,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.562,0.36,0,2.638,22,124,1 0,0,1.31,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0,0,1.31,0,5.26,0,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.212,0,4.818,25,53,1 0,1.47,0,0,0,0,1.47,0,0,0,0,0,0,0,0,1.47,1.47,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0.338,0,0.508,0.169,0.169,10.625,140,170,1 0.05,0.05,0.4,0,0.34,0,0,0,0.57,0.05,0,0.28,0.11,0,0,0.17,0,0,1.04,0.05,0.92,0,0,0.05,0,0,0,0,0.05,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0,0.05,0,0,0,0.019,0.099,0,0.099,0.079,0.009,4.881,95,1313,1 0,0.4,0,0,0.81,0,0.81,0,0,0.4,0,0,0,0,0,0,0,0,1.22,0,0.81,0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.123,0.246,0.061,0,0.123,0.185,4.793,23,302,1 0.29,0.58,0.58,0,0.87,0,0.58,0,0,1.16,0,0.87,0,0,0,0,0.87,0,2.62,0,1.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.054,0,0,0.271,0,1.67,14,162,1 0.18,0,0.18,0,1.57,0.36,0.06,0.06,0.06,0.06,0.06,0.54,0.3,0.06,0,0,0.72,0.06,4.41,0.24,1.08,0,0.84,0.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0,0,0,0,0.01,0.052,0,0.01,0.169,0,1.766,12,447,1 0.06,0,0.24,0,0.1,0,0,0.17,0.17,0.17,0,0.1,0.03,0,0,0.03,0,0,0.45,0,0.2,0,0.03,0,1.18,1.22,0,0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0,0.06,0,0.45,0,0,0,0.179,0.305,0.029,0.029,0.011,0.023,2.813,26,2510,1 1.24,0.41,1.24,0,0,0,0,0,0,0,0,0.41,0,0,0,0.41,0,0.82,3.73,0,1.24,0,0,0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.41,0,0,0,0,0.068,0,0.481,0.549,0,3.166,19,114,1 0.08,0,0.32,4.31,0.08,0.16,0.08,0.08,0,0,0.08,0.24,0.32,0,0,0.08,0,0.32,1.87,0,0.57,0,0.16,0.24,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0,0,0,0.16,0,0,0,0.344,0.068,0,0.55,0.082,0.151,15.547,339,2923,1 0.1,0.1,0.71,0,0.6,0.3,0.4,0.1,1.42,0.81,0.1,0.5,0,0,0,0.1,0,1.01,2.23,0.5,2.03,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0.264,0.977,0.397,0.033,3.166,56,1045,1 0,0,0,0,0,0,0.45,0,0,0.45,0.22,0.22,0,0,0.22,0.22,0,0.22,1.58,0,1.13,13.34,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.037,0.15,1.584,13.936,114,1324,1 0,0,1.31,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0,0,1.31,0,5.26,0,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.212,0,4.818,25,53,1 0,0,0,0,0,0.4,0,0,0,0.81,0,0,0,0,0,0.4,0,0,1.22,0,0,0,0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.198,0.529,0.33,0.198,0,0,5.019,18,261,1 0,0,0,0,0.38,0.38,0.38,0.38,0,0,0.38,0,0,0,0,0.38,0,0,3.43,0,2.29,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.38,0,0,0,0,0,0,0.121,0,0,2.08,12,104,1 0,0,0,0,0,0,1.78,0,0,1.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.544,0,0,1.777,5,16,1 0,0.06,0.2,0,0.61,0.13,0,0,0.75,0,0.27,0.75,0.27,0,0,0,0.2,0.13,1.16,0,1.23,0,0.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0.06,0,0,0,0.034,0.057,0,0.472,0.092,0.023,2.086,104,703,1 0,1.36,0,0,0,0,1.36,0,0,0,0,0,0,0,0,1.36,1.36,0,0,0,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.17,0,0.17,0.17,0.17,9.411,128,160,1 0,0,0,0,0,2.3,0,0,0,0,0,0.76,0.76,0,0,0,0,0,2.3,0,1.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.868,0,2.894,0.868,0,5.607,25,157,1 1.63,0,0,0,2.45,0,0,0,0,0,0,0,0,0,0,0.81,0,0,3.27,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.258,0,0,2.826,20,65,1 0.16,0,0.67,0,0.33,0.16,0.33,0.84,0.16,0.5,0.33,1.51,0,0,0,0,1.68,0.33,2.02,1.68,3.87,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0,0,0,0,0,0,0.224,0,1.151,0.056,0,4.928,63,621,1 0.09,0.49,0.59,0,0.39,0.19,0,0,0.09,0.39,0,1.58,0.19,0,0,0,0.09,0,3.75,0,1.08,0,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.778,0.037,0,5.213,0.979,0,5.781,54,740,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.597,0,0,4.153,26,54,1 0.17,0,0.17,0,1.45,0.34,0.05,0.05,0.05,0.05,0.05,0.52,0.29,0.05,0,0,0.69,0.05,4.25,0.23,1.04,0,0.75,0.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.05,0,0.01,0.162,0,1.794,12,454,1 0,0,0.27,0,0.54,0.27,0.27,1.08,0,0.81,0,0,0,0,0,0,0,0,2.45,0,1.36,0,0,0.27,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0.04,0,0.489,0.04,0,2.121,19,227,1 1.61,0,0,0,3.22,0,0,0,0,0,0,0,0,0,0,0.8,0,0,3.22,0,1.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.511,0,0,2.909,20,64,1 0,0.55,0.55,0,1.1,0.55,2.2,0,0,0.55,0,0.55,0,0,0,0,0,0.55,3.31,0,1.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.55,0,0,0,0,0.165,0,0.496,0,0.082,16.782,148,386,1 0,0.55,0.55,0,1.1,0.55,2.2,0,0,0.55,0,0.55,0,0,0,0,0,0.55,3.31,0,1.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.55,0,0,0,0,0.165,0,0.496,0,0.082,16.826,148,387,1 0,0,0.31,0,0.63,0.63,0.31,0.31,0,0,0,0.31,0.31,0,0,0.31,0.31,0,2.55,0,3.19,0,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0,0.108,0,0,0.162,0.054,1.515,10,144,1 1.04,0,0.69,0,1.04,0,0.69,0,0,0.69,0,0.69,0,0,0.34,0.69,0,0,5.9,0,1.38,0,0.34,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0,0,0,0.379,0.063,0,2.042,12,96,1 0,1.56,0,0,0,0,1.56,0,0,1.56,0,0,0,0,0,1.56,1.56,0,0,0,1.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.56,0,0,0,0,0,0,0,0,0,0,0,0,0.183,0,0,0.183,0.183,11.714,140,164,1 0,0.54,0.54,0,1.08,0.54,2.16,0,0,0.54,0,0.54,0,0,0,0,0,0.54,3.24,0,1.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,0,0,0,0,0.162,0,0.488,0,0.081,15.16,148,379,1 0.14,0,0.57,0,0.28,0.14,0.28,0.28,0,0.43,0.14,0.28,0,0,0,1.88,0.14,0.14,1.01,0,1.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.14,0,0,0,0,0,0,0,0,0,0.07,0,0.21,0,0,2.293,32,477,1 0,0,0.44,0,0.22,0.22,0,0,0.66,0.44,0.22,0.88,0,0,0,0.22,0,0,1.32,0,1.1,0.22,0.22,0.22,0,0,0,0,0,0,0,0,0,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0.065,0.261,0,0.13,0.196,0,7.4,75,629,1 0,0,0.29,0,0.88,0.14,0,0,0.88,0,0,0.73,0.14,0,0,0,0.29,0,2.2,0,0.88,0,0.14,0.29,0,0,0,0,0,0,0,0,0,0,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.097,0,0.388,0.048,0.024,2.264,49,428,1 0.07,0.37,0.81,0,0.51,0.29,0.07,0,0.07,0.37,0.07,1.48,0.14,0,0.07,0,0.14,0.44,3.55,0,1.85,0,0,0.07,0,0,0,0,0,0,0,0,0,0,0,0,0.14,0,0,0,0,0,0,0,0,0,0,0,0.052,0.073,0,0.167,0.167,0.01,3.412,44,795,1 0,0,0.31,0,0.63,0.63,0.31,0.31,0,0,0,0.31,0.31,0,0,0.31,0.31,0,2.55,0,3.19,0,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0,0.108,0,0,0.163,0.054,1.515,10,144,1 0,0,0.31,0,0.63,0.63,0.31,0.31,0,0,0,0.31,0.31,0,0,0.31,0.31,0,2.55,0,3.19,0,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0,0.108,0,0,0.163,0.054,1.515,10,144,1 0.17,0,0.17,0,1.52,0.35,0.05,0.05,0.05,0.05,0.05,0.52,0.29,0.05,0,0,0.64,0.05,4.21,0.23,1.11,0,0.81,0.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.05,0,0,0,0,0,0.01,0.052,0,0.01,0.167,0,1.838,13,467,1 0.48,0,0.97,0,0.48,0,0.97,0,0,0,0,0.48,0,0,0,0,0.48,0.48,4.36,0,1.45,0,1.45,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0,0,1.085,0.232,0.077,5.166,58,186,1 1.24,0.41,1.24,0,0,0,0,0,0,0,0,0.41,0,0,0,0.41,0,0.82,3.73,0,1.24,0,0,0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.41,0,0,0,0,0.068,0,0.48,0.549,0,3.166,19,114,1 0.34,0.42,0.25,0,0.08,0.42,0.08,0.25,0.08,1.62,0.34,0.51,0.94,0,0.17,0.08,0,0,3,0,0.94,0,0.34,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.17,0,0,0,0,0.065,0,0.261,0.294,0.065,3.282,62,535,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,8.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.233,0,0.233,9.5,84,323,1 0,1.63,0,0,0,0,1.63,0,0,1.63,0,0,0,0,0,0,0,0,1.63,0,3.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.2,2,12,1 0.17,0,0.08,0,0.43,0.08,0.08,0.43,0.08,0.08,0,0.6,0.17,0.17,0,0,0.17,0.08,1.2,0,3.14,0,0.34,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.085,0.028,0.092,0.014,0,4.16,48,1140,1 0.17,0,0.08,0,0.43,0.08,0.08,0.43,0.08,0.08,0,0.6,0.17,0.17,0,0,0.17,0.08,1.2,0,3.14,0,0.34,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.085,0.028,0.092,0.014,0,4.16,48,1140,1 0,0.34,0.69,0,0.34,0.69,0.34,0,0,1.04,0.34,1.38,0,0,0,0.69,0,0.69,4.86,0,1.73,0,0,1.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.058,0,0.638,0.174,0,2.17,8,89,1 0,0,0.29,0,0.29,0.29,0.29,0.29,0,0,0.58,0.87,0,0,0,0.87,0.58,0.29,2.61,2.61,2.9,0,0,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.046,0,0.14,0,0.14,4.892,139,274,1 0,0,0,0,0.45,0.45,0.45,0,0,0,0,0.45,0,0,0,0,0,0,0.9,0,0,9.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.072,0,1.083,7.857,45,440,1 0,0,0.4,0,0,0,0.2,0,0.8,0.2,0,0.4,0,1.41,0.2,0.4,0,0,3.44,3.03,2.22,0,0,0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.033,0.066,0,0.133,0.066,0,2.704,30,192,1 0,0,0,0,0,0,1.33,0,0,0,0,0,0,0,0,0,0,0,1.33,0,5.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.213,1.492,0,29.125,223,233,1 0.22,0.22,0.22,0,1.77,0.22,0.44,0.44,0.22,2.88,0,0.88,0.22,0,1.1,0.44,0,0.44,3.32,0,3.32,0,0.44,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0.588,0.156,0,86.7,1038,1734,1 0,0.9,0,0,0,0,0,0,0,0,0,0.9,0,0,0,0,0,1.81,6.36,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.146,0,0.292,0,0,3,38,75,1 0.74,0,0,0,0.74,0,0.74,0,0.74,0,0,0,0,0,0,0,0,0,0,0,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0.467,0.233,0,1.846,12,72,1 0,0,0,42.81,1.28,0,0.28,0,0,0,0,0.28,0,0,0,0.14,0,0,1.7,0,0.85,0,0,0.42,0,0,0,0,0,0,0,0,0,0,0.14,0,0,0,0,0,0,0,0,0,0,0,0,0.14,0.026,0.078,0,0.13,0,0,7,137,826,1 0,0,0.37,0,1.13,0,0.37,0,0,0.75,0,0.37,0,0,0,0.37,0.75,0,2.65,0,1.13,0,0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.13,0,0,0,0,0.063,0,0.882,0.189,0,4.08,78,253,1 0,0,0,0,0,0,0,0,0,0,0,0,1.33,0,0,0,2.66,5.33,2.66,0,2.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.799,0.514,0,1.823,11,62,1 0,0,0.72,0,1.45,0.36,0,0,0,1.45,0,1.09,0,0,0,0.72,0,0,2.54,1.81,0.72,0,0,0,0.36,0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.056,0,0.963,0.113,0.17,2.622,47,139,1 0.54,0,1.08,0,0.54,0,1.08,0,0,0,0,0.54,0,0,0,0.54,0.54,0,4.32,0,1.08,0,1.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,0,0,0,0,0,0,1.218,0.261,0,5.323,68,181,1 0,0,0,0,0,0,0,0,0,1.81,0,0,0,0,1.81,1.81,0,0,1.81,0,0,0,5.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.628,0,0.628,0.943,0,2.944,9,53,1 0,0,0.48,0,0.96,0,0.48,0,0,0,0,0,0,0,0,0.48,0.96,0,1.92,0,1.44,0,0.48,0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.44,0,0,0,0,0.073,0,0.515,0.957,0,6.833,78,328,1 0,0,0,0,0.98,0,0,0,0,0.98,0.98,0.98,0,0,0,0.98,0,0.98,2.94,0,1.96,0,0,0,0.98,0.98,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.98,0,0,0,0,0,0.278,0,0,2.95,18,59,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.833,0,0,1.375,3,11,1 0,0,0,19.16,0.18,0,0.18,0,0,0,0,0,0,0,0,1.89,0,0,0.56,0,0,9.48,0,0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.294,25.22,180,1261,1 0,0,0.6,0,0,0.6,0,0,0.6,0,0,1.82,0,0,0,0.3,0,0,2.74,0,1.21,0,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.143,0.047,0.191,0.143,0,2.041,31,196,1 0,0,0,0,0,0,0,0,0,0,0,1.33,0,0,0,1.33,0,0,2.66,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.326,0,0,2.2,11,44,1 0.37,0.17,0.3,0.06,0.23,0.17,0.03,0.95,0.37,0.37,0.1,0.64,0.61,0.34,0.2,0.51,0.34,0.34,2.75,0.13,1.36,0,0.27,0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0,0,0,0,0.03,0,0,0,0.011,0.041,0.071,0.379,0.136,0,3.341,181,1955,1 0,0,0.6,0,0,0.6,0,0,0.6,0,0,1.81,0,0,0,0.3,0,0,2.72,0,1.21,0,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.142,0.047,0.19,0.142,0,2.03,31,199,1 0.58,0,0,35.46,0.58,0,0.58,0.58,0,0,0,0,0,0.58,0,0.58,0.58,0.58,0.58,0,1.74,0,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.239,0.239,0,3.338,123,207,1 0,0,1.4,0,0.46,0,0.46,1.4,0,0.46,0,0,0,0,0,0,0,0,2.8,0,1.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.93,0,0,0,0,0,0,0,0,0,0.123,0,0.37,0,0,6.137,54,313,1 0,0,0.3,0,0.3,0.91,0,0.3,0,0,0,0.3,0.3,0,0,0.3,0.3,0.3,2.12,0,3.03,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.3,0,0,0,0,0.211,0,0,0.211,0.052,1.745,11,185,1 0,0,0.3,0,0.3,0.9,0,0.3,0,0,0,0.3,0.3,0,0,0.3,0.3,0.3,2.11,0,3.02,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.3,0,0,0,0,0.21,0,0,0.21,0.052,1.738,11,186,1 1.19,0.59,0,0,0.59,0,0,0.59,0,0,0,0,0.59,0,0,0,0,0.59,3.57,0,6.54,0,0,0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.103,0,0,1.437,11,161,1 0.5,0.25,0.42,0,0.08,0.23,0.02,0.35,0.35,0.69,0.21,0.9,0.5,0.92,0.02,0.33,0.42,0.02,3.05,0,1.43,0,0.94,0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0,0,0,0,0.02,0,0.02,0,0,0.069,0,0.325,0.523,0.124,6.723,445,4128,1 0.58,0,0,35.46,0.58,0,0.58,0.58,0,0,0,0,0,0.58,0,0.58,0.58,0.58,0.58,0,1.74,0,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.239,0.239,0,3.338,123,207,1 0,0,0.3,0,0.3,0.91,0,0.3,0,0,0,0.3,0.3,0,0,0.3,0.3,0.3,2.12,0,3.03,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.3,0,0,0,0,0.211,0,0,0.211,0.052,1.752,11,184,1 0,0,0.3,0,0.3,0.91,0,0.3,0,0,0,0.3,0.3,0,0,0.3,0.3,0.3,2.12,0,3.03,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.3,0,0,0,0,0.211,0,0,0.211,0.052,1.752,11,184,1 0.47,0,1.19,0,0.23,0.23,0,0,0,0.47,0,1.43,0,0,0,0.71,1.43,0,5.26,0,2.63,0,0.71,0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.23,0,0,0,0,0,0,0,0,0,0,0.178,0,0.402,0.402,0.089,5.681,49,392,1 0,0,1.79,0,0,0.59,0,0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.103,10,204,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.347,0,0,3,7,9,1 0,0,0.48,0,0.72,0.48,0,0,0.24,0,0.48,0.24,0,0,0,0.48,0,0,1.2,0,1.44,0,0.48,0.24,0,0,0,0,0,0.24,0,0,0,0,0,0,0,0,0,0.24,0,0,0.24,0,0,0,0,0.24,0,0.036,0,0.036,0.184,0,2.336,66,264,1 0,0,0.48,0,0.72,0.48,0,0,0.24,0,0.48,0.24,0,0,0,0.48,0,0,1.2,0,1.44,0,0.48,0.24,0,0,0,0,0,0.24,0,0,0,0,0,0,0,0,0,0.24,0,0,0.24,0,0,0,0,0.24,0,0.036,0,0.036,0.184,0,2.336,66,264,1 0.34,0.25,0.25,0,0.08,0.43,0.08,0.25,0.08,1.47,0.34,0.51,0.95,0,0.17,0.08,0,0,3.03,0,0.77,0,0.34,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.17,0,0,0,0,0.049,0,0.263,0.263,0.065,3.076,62,526,1 0.43,0,0,0,0.87,0.87,0,0,0,0.43,0,2.18,0,0,0,0,1.74,0,0.87,0,0.87,0,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.802,0,0,5.114,107,179,1 0.43,0,0,0,0.87,0.87,0,0,0,0.43,0,2.18,0,0,0,0,1.74,0,0.87,0,0.87,0,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.874,0,0,5.114,107,179,1 0,0,0.29,0,0.29,0.29,0.29,0.29,0,0,0.58,0.87,0,0,0,0.87,0.58,0.29,2.61,2.61,2.9,0,0,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.046,0,0.14,0,0.14,4.892,139,274,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.4,7.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.543,0,0,5,15,50,1 0,0,0,0,0.36,0.36,0,0.36,0.36,0.36,0,0.36,0,0,0,0,0.73,0,2.94,0,4.04,0,0,0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.36,0,0,0,0,0,0,0,0.653,0.118,0,1.53,13,75,1 0,0,0.97,0,0.38,0.19,0,0,0,0.19,0,1.16,0,0,0,0,0,0,0.58,0,0.38,0,0.77,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0.208,0,0.364,0.312,0,7.541,192,543,1 0.17,0,0.08,0,0.42,0.08,0.08,0.42,0.08,0.08,0,0.6,0.17,0.17,0,0,0.17,0.08,1.2,0,3.17,0,0.34,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.085,0.028,0.099,0.014,0,4.07,48,574,1 0.52,0,2.38,0,0.26,0,0.26,0,0.52,0,0.26,0,0,0,0,0.79,0,0,1.32,0,1.05,0,0,0.52,0,0,0,0,0,0,0,0,0.26,0,0,0.26,0.26,0,0.52,0,0,0,0,0,0,0,0,0,0,0.69,0,0.327,0,0,5.549,71,566,1 0.46,0.31,0.46,0,0.05,0.13,0.05,0.26,0.44,0.75,0.26,0.96,0.57,1.22,0,0.1,0.44,0,3.21,0,1.48,0,1.01,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0,0,0,0,0.068,0,0.346,0.793,0.159,6.05,199,3213,1 0.18,0,0.54,0,1.09,0.18,0.54,0,0.54,0.54,0,0.18,0,0,0.18,0.36,0.18,0.54,1.82,0,2,0,0,0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.18,0,0,0,0,0,0,0,0.166,0,0.249,0.305,0,3.921,59,447,1 0.17,0,0.08,0,0.42,0.08,0.08,0.42,0.08,0.08,0,0.6,0.17,0.17,0,0,0.17,0.08,1.2,0,3.17,0,0.34,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.085,0.028,0.099,0.014,0,4.07,48,574,1 0,0,1.26,0,0,0,0,0,0,0,0,0,1.26,0,0,0,2.53,5.06,2.53,0,3.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.9,0.475,0,1.763,11,67,1 0,0,0,0,0,0,0.36,0,0,0,0,0,0,0,0,0.36,0,0,3.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.73,0,0,0,0,0,0,0.053,0.053,0,18.37,134,496,1 0.37,0.75,1.13,0,0.37,0,0,0.37,0.37,1.88,0.37,2.64,0,0.37,0,0.37,0,0,2.26,0,4.52,0,0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.197,0,1.248,0.197,0.065,58.705,842,998,1 0,0.57,0,0,0,0,0,0,0,0,0.57,0.57,1.15,0,0,0,0,1.73,3.46,0,1.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.107,0,0,1.421,7,54,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.17,0,2.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.411,0,0,4.307,15,56,1 0.09,0.49,0.59,0,0.39,0.19,0,0,0.09,0.39,0,1.59,0.19,0,0,0,0.09,0,3.79,0,1.09,0,0,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.778,0.037,0,5.924,1.33,0,5.8,54,725,1 0,0,0,0,0.38,0.38,0.38,0.38,0,0,0.38,0,0,0,0,0.38,0,0,3.87,0,1.93,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.123,0,0,2.062,12,99,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.17,0,3.17,0,3.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.298,0.597,0,3.333,12,30,1 0,0.49,1.97,0,2.46,0,0,0,0,0,0,0,0.49,0,0,0.49,1.47,0.49,4.43,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.424,0,0,12.692,152,330,1 0,0,0,0,0.38,0.38,0.38,0.38,0,0,0.38,0,0,0,0,0.38,0,0,3.87,0,1.93,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.123,0,0,2.062,12,99,1 0.06,0.12,0.77,0,0.19,0.32,0.38,0,0.06,0,0,0.64,0.25,0,0.12,0,0,0.12,1.67,0.06,0.7,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0,0,0,0.041,0.031,0,0.25,0.073,0,1.764,37,766,1 0.74,0.74,0.74,0,0,0,0.37,0,0.37,1.12,1.12,1.12,0,0,0,0,0,0.74,2.99,0,2.24,0,0,0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.061,0.061,0.122,0,4.727,57,208,1 0,0,0,0,1.58,0,0.39,1.19,0,0.39,0,0.79,0,0,0,0,1.58,0.39,3.96,0,1.98,0,0,0,0,0,0,0,0,0,0,0,0,0,0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.705,0.211,0,1.903,13,118,1 1.24,0,0.82,0,0,0,0.41,0,0,0.41,0,0.41,0,0,0,1.65,0.41,0,2.9,0,0.41,0,0.41,0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.193,0,0.904,5.3,0,7.478,92,344,1 0,0.09,0.14,0,1.04,0.09,0.09,0,0.79,0,0.04,0.29,0.19,0,0,0,0.14,0.04,1.53,0.24,1.23,0,0.29,0.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.04,0,0.04,0,0,0,0.015,0.119,0.007,0.431,0.111,0,3.37,87,1645,1 1.24,0,0.82,0,0,0,0.41,0,0,0.41,0,0.41,0,0,0,1.65,0.41,0,2.9,0,0.41,0,0.41,0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.193,0,0.904,5.3,0,7.478,92,344,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.722,57,85,1 0,0,0,0,1.21,0,1.21,1.21,1.21,1.21,1.21,1.21,0,0,0,0,4.87,0,2.43,1.21,4.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.555,0,0,104.666,311,314,1 1.44,0,0,0,0,0,0,0,0,0.48,0,2.4,0,0,0,0.96,0,0,6.73,0,1.92,0,0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0,0,0.174,0.087,0,1.612,12,50,1 0.47,0.31,0.47,0,0.05,0.13,0.05,0.26,0.42,0.76,0.26,0.97,0.57,1.23,0,0.1,0.47,0,3.23,0,1.49,0,0.99,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0,0,0,0,0.067,0,0.328,0.858,0.157,5.928,199,3160,1 0,0,0,0,1.47,1.47,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.562,0,0,5,95,170,1 0.53,0,1.06,0,0.53,0,1.06,0,0,0,0,0.53,0,0,0,1.06,0.53,0,4.25,0,1.06,0,1.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.53,0,0,0,0,0,0,1.208,0.259,0,5.558,76,189,1 1.24,0.41,1.24,0,0,0,0,0,0,0,0,0.41,0,0,0,0.41,0,0.82,3.73,0,1.24,0,0,0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.41,0,0,0,0,0.068,0,0.48,0.549,0,3.166,19,114,1 0,0.55,0.55,0,2.23,0,0.55,0,0,0,0,0.55,0.55,0,0,0.55,2.79,0,3.91,0,1.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.259,0,1.208,0.345,0,4.761,140,200,1 0,0.55,0.55,0,2.23,0,0.55,0,0,0,0,0.55,0.55,0,0,0.55,2.79,0,3.91,0,1.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.258,0,1.207,0.345,0,4.761,140,200,1 0.37,0.75,1.13,0,0.37,0,0,0.37,0.37,1.89,0.37,2.65,0,0.37,0,0.37,0,0,2.27,0,4.54,0,0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0.37,0,0,0,0,0,0,0,0,0,0,0,0,0.196,0,1.246,0.196,0.065,62.5,845,1000,1 0.34,0,0.69,0,0.17,0.51,0,0.51,0.17,0.17,0.17,1.38,0,0,0,0.34,1.03,0.17,1.9,1.55,3.81,0,0.17,0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.17,0,0,0,0,0.17,0,1.275,0.141,0,5.598,78,711,1 0,0.89,1.15,0,0.12,0,0,0.12,0.25,0.12,0.12,0.38,0.12,0,1.15,0,0.12,2.04,2.81,0.12,1.27,0,0,0.12,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0.164,0,0.371,0.061,0,2.89,84,477,1 0,0.47,0.47,0,1.89,0,1.18,0.23,0,0.47,0.23,0.7,0.23,0,0.47,0.23,1.41,0,2.83,0,1.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,1.844,13,83,1 0.47,0.31,0.47,0,0.05,0.13,0.05,0.26,0.44,0.76,0.26,0.97,0.58,1.26,0,0.26,0.44,0,3.24,0,1.5,0,1.02,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0,0,0,0.009,0.067,0,0.329,0.78,0.162,6.045,193,3059,1 0,0,0,0,1.35,0.45,0,0,0,0,0,0,0.45,0,0,0.45,0.45,0.45,1.8,0,1.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.138,0,0.138,0,0,5.809,46,122,1 0,0,0,0,0,0,1.23,0,0,0,0,0,0,0,0,1.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.939,0,0,1.379,8,40,1 0,0,0,0,0,0,1.23,0,0,0,0,0,0,0,0,1.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.947,0,0,1.379,8,40,1 0,0,0,0,0,0,1.23,0,0,0,0,0,0,0,0,1.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.949,0,0,1.379,8,40,1 0.19,0,0,0,0.09,0.09,0.19,0,0,0.09,0.09,0.69,0.09,0,0,0,0,0.19,1.38,0,0.49,0,0.39,0,0,0,0,0,0,0,0,0,0,0,0,0.09,0,0,0,0,0,0,0,0,0,0,0,0.09,0.017,0.068,0,0.586,0.189,0.017,2.349,31,477,1 1.03,0,0.68,0,1.03,0,0.68,0,0,0.68,0,0.68,0,0,0.34,0.68,0,0,5.86,0,1.37,0,0.34,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0,0,0,0.503,0.062,0,1.82,12,91,1 0.27,0,0.27,0,0,0,0,0,0,0.27,0.27,0.55,0,0,0,0,0,0,2.2,0,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.136,0.182,0,8.207,30,435,1 0,1.09,0,0,0,0,0,1.09,0,0,0,0,0,0,0,0,0,0,0,0,1.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.09,0,0,0,0,0,0,0,1.09,0,0,0,0.173,0.519,0,0,0.692,0,4.941,25,84,1 0,0.89,1.14,0,0.12,0,0,0.12,0.25,0.12,0.12,0.38,0.12,0,1.14,0,0.12,2.04,2.8,0.12,1.27,0,0,0.12,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0.165,0,0.371,0.061,0,2.878,84,475,1 0,0,0,0,1.2,0,1.2,1.2,1.2,1.2,1.2,1.2,0,0,0,0,4.81,0,2.4,1.2,3.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.331,0,0,50.166,295,301,1 0.49,0,0.74,0,0.24,0.74,0.24,0.74,0.24,0.24,0.24,1.23,0,0,0,0,1.23,0,1.23,1.73,2.47,0,0.24,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.116,0,1.634,0.155,0,3.975,47,485,1 0,0,0.6,0,0.6,0,0.6,0,0,0,0,0,0.6,0,0,0,0,0.6,1.81,0,1.21,0,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.412,0,0.619,0.103,0,6.166,33,259,1 0,0.46,0.46,0,1.38,0,0,1.85,0,0.92,0.46,0,0,0,0,0.92,0,0,0.92,0.46,1.38,0,0,0.46,0,0,0,0,0,0,0,0,0,0,0,0,1.38,0,0,0.46,0,0,0,0,0,0,0,0,0,0.072,0,0.795,0.217,0,4.869,66,224,1 0.67,0,0.67,0,0,0,0,0,0,0.67,0,0,0,0,0,0,0,0,1.35,0,1.35,0,0,0,0,0,0,0,0,0,0,0,0,1.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.362,0,0,3.384,37,132,1 0,0,0,0,0,0,1.47,1.47,1.47,1.47,1.47,0,0,0,0,0,2.94,0,0,1.47,2.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0.147,0,0.294,0.147,0,72,281,288,1 0,0.49,0.49,0,1.49,0,0,0,0,0.99,0.49,0,0,0,0,0.49,0,0,0.99,0.49,1.99,0,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,1.49,0,0,0.49,0,0,0,0,0,0,0,0,0,0.078,0,0.625,0.312,0,4.75,47,190,1 0,0.53,0,0,0,0.53,0.53,0,0,0,0,0,0,0,0,0,0,1.6,2.67,0,1.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.255,0,0,0,0,2.131,12,81,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.55,0,1.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.473,0,0,4.071,29,114,1 0,0.56,0.56,0,2.27,0,0.56,0,0,0,0,0.56,0.56,0,0,0.56,3.4,0,3.97,0,1.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.262,0,1.135,0.349,0,5.105,140,194,1 0.23,0.59,0.23,0,0.23,0.11,0,0,0.82,1.18,0.11,2,0.23,0,0,0,0.11,0,4.84,4.96,1.77,0,0,0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0,0,0,0.186,0,0.13,0.168,0.018,5.76,175,795,1 0,0,0.56,0,1.12,0,0,0,0,0,0,0,0.93,0,0.18,0,0.37,0.37,3.18,0,0.93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.189,0,0.441,0,0,1.372,4,70,1 0,0,0.47,0,1.42,0,0,0,0,0,0,0.95,0,0,0,0,0.95,0,2.38,0,2.38,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.47,0,0,0,0,0.082,0,0.496,0.248,0,5.187,80,249,1 0,0,0.56,0,1.12,0,0,0,0,0,0,0,0.93,0,0.18,0,0.37,0.37,3.18,0,0.93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.189,0,0.441,0,0,1.372,4,70,1 0,0,0.48,0,0.72,0.48,0,0,0.24,0,0.48,0.24,0,0,0,0.48,0,0.24,1.21,0,1.45,0,0.48,0.24,0,0,0,0,0,0.24,0,0,0,0,0,0,0,0,0,0.24,0,0,0.24,0,0,0,0,0.24,0,0.036,0,0.036,0.184,0,2.276,66,255,1 0,0.36,0.72,0,1.44,0,0.36,0,0,1.44,0.72,0.36,0.36,0,0,0,0,0,2.89,0,2.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.517,6.685,60,234,1 0.67,0,0.67,0,0,0,0,0,0,0.67,0,0,0,0,0,0,0,0,1.35,0,1.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.413,0,0,3.384,37,132,1 0,0.47,0,0,0.47,0,0,0,0,0,0.47,0,0,0,0,0.47,0,0.95,1.9,0,1.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.056,0,0,0,0,7.18,182,359,1 0,0.47,0,0,0.47,0,0,0,0,0,0.47,0,0,0,0,0.47,0,0.95,1.9,0,1.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.056,0,0,0,0,7.18,182,359,1 0.43,0.28,0.43,0,0.04,0.11,0.04,0.21,0.4,0.69,0.23,0.88,0.52,1.14,0,0.23,0.4,0,2.93,0,1.36,0,0.97,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0,0,0,0,0.064,0,0.311,0.734,0.145,5.328,144,3016,1 0,0,2.5,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.303,0,0.909,0,0,2.857,11,40,1 0.1,0.2,1.01,0,0.8,0.8,0.5,0,0.8,0.1,0.3,0.7,0.3,0,1.61,0.1,0,1.11,1.31,0.2,0.7,0,0.6,0.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0.1,0.1,0,0,0,0.11,0,0.49,0.158,0.015,8.55,669,1351,1 0,0,0,0,0,0,1.47,1.47,1.47,1.47,1.47,0,0,0,0,0,2.94,0,0,1.47,2.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0.147,0,0.294,0.147,0,71.5,281,286,1 0,0.56,0.56,0,2.25,0,0.56,0,0,0,0,0.56,0.56,0,0,0.56,3.38,0,3.95,0,1.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.26,0,1.217,0.347,0,5.105,140,194,1 0,0,0,0,2.38,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,11,20,1 0,0,0.55,0,1.11,0,0,0,0,0,0,0,0.92,0,0.18,0,0.37,0.37,3.14,0,0.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.186,0,0.434,0,0,1.377,4,73,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.462,0,0.462,0,0,3.125,6,25,1 0.47,0.31,0.47,0,0.05,0.15,0.05,0.23,0.44,0.76,0.26,0.97,0.58,1.27,0,0.26,0.44,0,3.25,0,1.5,0,1.11,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0,0,0,0,0.068,0,0.344,0.784,0.154,6.094,193,3029,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.81,0,0,4.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.176,0,0.706,0,0,8.411,55,143,1 0,0.47,0,0,0,0.47,0,0,0.23,0.23,0,1.19,0.47,0,0,0.23,0,0.47,2.63,0,0.47,0,0,0.23,0,0,0,0,0,0,0,0,0,0,0.23,0,0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.327,1.357,0.046,5.769,72,450,1 0,0,0,42.73,0,0,0.42,0,0,0.42,0,0.42,0,0,0.42,0,0,1.28,2.99,0,2.13,0,1.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.349,0,7,105,441,1 0,0,0.54,0,1.08,0,0,0,0,0,0,0.18,0.9,0,0.18,0,0.36,0.36,3.06,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.029,0.178,0,0.416,0,0,1.373,6,92,1 0,0,0.58,0.58,0,0,0,0.29,0,0,0,0,0.29,0,0,0,0.29,0.58,2.91,0.87,1.74,0,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.43,0,1.434,0,0.047,3.281,64,361,1 0,0,0.48,0,1.44,0.48,0,0,0,0,0,0.96,0,0,0,0,0.96,0,2.41,0,2.41,0,0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0.083,0,0.502,0.251,0,5.488,80,247,1 0,0,0.48,0,1.45,0,0,0,0,0,0,0.97,0,0,0,0,0.97,0,2.42,0,2.42,0,0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0.084,0,0.421,0.252,0,5.173,80,238,1 0,0,0.49,0,1.47,0,0,0,0,0,0,0.98,0,0,0,0,0.98,0,2.45,0,2.45,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.49,0,0,0,0,0.084,0,0.677,0.254,0,5.2,80,234,1 0.72,0,0,0,0,0,1.45,0,0,0,0,0.72,0,0,0,0,1.45,0,2.18,1.45,5.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.105,0,0,2.689,25,78,1 0.45,0.28,0.42,0,0.04,0.11,0.04,0.21,0.4,0.69,0.23,0.88,0.52,1.14,0,0.23,0.4,0,2.93,0,1.36,0,1,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0,0,0,0,0.065,0,0.318,0.754,0.152,5.349,144,3033,1 1.17,0,0,0,0,0,0,0,0,0,0,1.17,0,0,0,0,1.17,0,3.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.17,0,0,0,0,0,0,0.924,0,0,1.966,10,59,1 0,0,0,0,0.64,0,0,0,0,0,0.64,0.64,0,0,0,0,1.29,0,1.29,5.19,1.29,0,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.468,0,0.093,0,0,2.755,66,135,1 0,0,0,0,0.64,0,0,0,0,0,0.64,0.64,0,0,0,0,1.29,0,1.29,5.19,1.29,0,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.468,0,0.093,0,0,2.755,66,135,1 0,0,0,0,0.64,0,0,0,0,0,0.64,0.64,0,0,0,0,1.29,0,1.29,5.19,1.29,0,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.468,0,0.093,0,0,2.755,66,135,1 0,0,0.3,0,0,0,0,0,0,0.3,0,0.3,0,0,0.3,0.3,0,0.15,0.15,0,0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.76,0,0,0,0,0,0,0,0,0,0,0.3,0.472,0.067,0,0,0.044,0.067,1.607,16,418,1 0.41,0,1.25,0,0.2,0.2,0,0,0,0.41,0,1.25,0,0,0,0.62,1.25,0,4.6,0,2.3,1.67,0.62,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0,0,0,0,0,0,0,0,0,0.153,0,0.345,0.345,0.306,5.132,37,426,1 0,0,0,0,3.7,0,0,0,0,0,0,0,0,0,0,0,0,0,3.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.166,28,79,1 0.47,0.31,0.47,0,0.07,0.13,0.05,0.26,0.44,0.76,0.26,0.97,0.57,1.26,0,0.26,0.44,0,3.22,0,1.47,0,1.1,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0,0,0,0,0.068,0,0.331,0.79,0.159,6.073,193,3043,1 0,0,0.55,0,1.11,0,0,0,0,0,0,0,0.92,0,0.18,0,0.37,0.37,3.15,0,0.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.185,0,0.464,0,0,1.392,4,78,1 0,0.63,0,0,1.59,0.31,0,0,0.31,0,0,0.63,0,0,1.27,0.63,0.31,3.18,2.22,0,1.91,0,0.31,0.63,0,0,0,0.31,0,0,0,0,0,0,0,0,0,0,0,1.59,0,0,0,0,0,0,0,0,0,0.278,0,0.055,0.501,0,3.509,91,186,1 0,0.56,0.56,0,2.25,0,1.12,0,0,0,0,0.56,0.56,0,0,0.56,3.38,0,3.95,0,2.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.26,0,1.13,0.347,0,4.875,140,195,1 0,0,0.55,0,1.11,0,0,0,0,0,0,0,0.92,0,0.18,0,0.37,0.37,3.15,0,0.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.185,0,0.464,0,0,1.392,4,78,1 0,0,0,0,0.94,0,0,0,0,0,0,0,0,0,0,0,0,0,1.88,0,2.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.443,0,0,4.652,31,107,1 0.17,0,0.17,0.44,0.17,0,0,0,0,0,0,0.35,0.52,0.17,0,0.08,0.52,0,4.04,0,2.64,1.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.17,0,0,0,0,0,0,0,0,0,0,0,0.709,0.105,0,0,0,0,2.039,18,414,1 0,0,0,0,0,0,0,0,0,0.33,0,0.67,0,0,0,0,0,0,1.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0,0,0,0,0,0.33,0.33,0,0,0.28,0.28,0,0.112,0.336,0,2.96,19,222,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.88,2.65,0,0.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.88,0,0,0,0,0,0,0,0,0,0,0,0,0.14,0,0,0,0,1.512,7,62,1 0,0,0.12,0,0.36,0.24,0,0,0,0,0.12,0.12,0.12,0,0,0,0,0,1.21,0,0.96,0,0,0.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.043,0,0.693,0,0,1.335,11,195,1 0.19,0.19,0.29,0,1.07,0.19,0.19,0.97,0.87,0.58,0.09,1.07,0.19,0.87,0.09,0,0,1.17,3.81,0.68,1.75,0,0.09,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.09,0.09,0,0,0,0,0,0,0,0,0,0,0.202,0.405,0.233,0.031,4.32,49,877,1 0,0,0,0,0,0,0,0,0,0,0,0,0,1.56,0,0,0,0,1.56,6.25,1.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0.754,0.188,0,5.551,119,161,1 0.73,0,0.36,0,0.36,0.36,1.09,0,0,0,0,0.36,0,0,0,0.36,1.83,0.73,2.56,0,1.09,0,0.36,1.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.183,0,0.427,0.061,0,4.42,192,305,1 0,0,0.22,7.07,0,0,0,0.45,0,0,0,0,0,0,0,0.45,0,0,0.22,0,0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.22,0,0,0,0,0,0,0,0,0.22,0,0,0,0.153,0.092,0,0,0,0.03,2.47,27,425,1 0,0.19,0,0,0.68,0.09,0.09,0,0.29,0.09,0.48,0.77,0.09,1.65,0,0.58,0.87,0.19,3.21,0,2.43,0,0,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0,0.06,0.045,0,1.597,20,329,1 0,0.42,0.42,0,0,0,0,0,0,0,0,0.84,0,0,0,0,0,0.84,2.95,0,2.53,0,0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.075,0,0.6,0.3,0,4.02,82,197,1 0,0.42,0.42,0,0,0,0,0,0,0,0,0.84,0,0,0,0,0,0.84,2.95,0,2.53,0,0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.075,0,0.6,0.3,0,4.02,82,197,1 0,0.4,0.4,0,0.4,0,0.4,0,0,2.4,0,0,0,0,0.4,0.8,0,0,2,0.4,2,0,0,0,0,0,0,0,0,0,0.4,0,0.4,0,0,0,0,0,0,0.4,0,0,0,0,0,0,0,0,0,0.232,0,0.116,0.116,0,4.058,54,207,1 0,0,0,0,0,0.63,0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,1.91,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.613,0,1.251,12,174,1 0,1.25,0,0,0,0,1.25,0,0,0,0,0,0,0,0,1.25,1.25,1.25,1.25,0,3.75,0,0,0,0,0,0,0,0,0,0,0,1.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.199,0,0,0.298,0,0,3.976,32,171,1 0,0,0.79,0,0.26,0,0.26,0.26,0,0,0,1.31,0,0,0,0,0,0.26,1.58,0,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.52,0.26,0,0,0.26,0,0,0,0,0,0,0,0.26,0.038,0.038,0,0.077,0,0,1.8,29,171,1 0,0.7,0,0,2.83,0,0,0,0,0.7,0,0.7,0,0,0,1.41,1.41,0,7.09,0,5.67,0,0,0,0,0,0,0,0,0,0,0,0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.126,4.176,44,142,1 0,0.55,0.55,0,2.22,0,0.55,0,0,0,0,0.55,0.55,0,0,0.55,3.88,0,3.88,0,1.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.255,0,1.191,0.34,0,4.59,140,202,1 0,0,0.72,0,0.72,0,0.72,0,0,0,0,0,0.72,0,0,0,0,0,1.45,0,1.45,0,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.364,0,0.729,0.121,0,7.781,32,249,1 0,0,0.84,0,0.84,0,0.84,0,0,0,0,0,0.84,0,0,0,0,0,2.54,0,1.69,0,0,0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.388,0,0.776,0.129,0,10.375,168,249,1 0,0,0.72,0,0.72,0,0.72,0,0,0,0,0,0.72,0,0,0,0,0,1.45,0,1.45,0,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.364,0,0.729,0.121,0,7.781,32,249,1 0,1.22,0.81,0,0.4,0,0.81,0.4,0,0.81,0,0.4,2.04,0,0,3.27,0,1.22,0.81,0,0.4,0,2.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0.4,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0.64,0.8,0,7.651,181,505,1 0.34,0.05,0.58,0,0.63,0.17,0,0,0.75,0.23,0.34,1.27,0.34,0,0,0.58,0.05,0.17,3.01,2.61,1.5,0,0.17,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.204,0,0.13,0.186,0.027,4.225,131,1107,1 0.71,0,0,0,5,0,0,0,0,0,0,0,0,0,0,2.85,0,0,2.14,0,0,0,0,0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.417,0,0,3.029,12,103,1 0.65,0,0,0,1.3,0,0,0,0,0,0.65,1.3,0.65,0,0,1.3,1.3,0,2.61,0,3.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.65,0,0,0,0,0,0,0.515,0.103,0,2.04,12,51,1 0,0,0,0,0,0,1.61,0,0,1.61,0,1.61,0,0,0,0,0,0,3.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.23,0,0,4.941,60,84,1 0,0,0,0,0.32,0,0,0.32,0.32,0.64,0,1.28,0,0,0,2.56,0.96,0,3.84,0,0.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.098,0.049,0.492,0,0,2.184,25,166,1 0,0.64,0.64,0,0.64,0,0.64,0,2.59,1.29,1.29,1.94,0,0,0,0.64,0.64,0.64,3.24,0,2.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.233,0,0,1.136,4,25,1 0,0,0,0,0.49,0,0.98,0,0,0,0,0.98,0,0,0,0,0.98,0,2.45,0,1.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.064,0.258,0,0.645,0.064,0.064,3.552,25,135,1 0.44,0,0.88,0,0.44,1.32,0,0,0,0,0,0,0,0,0,0,0,0.44,1.76,0,2.2,0,2.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.222,0,0,0.444,0.37,0,2.413,16,140,1 0,0,0.69,0,0.69,0,0.69,0,0,0,0,0,0.69,0,0,0,0,0,1.38,0,2.08,0,0,0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.35,0,0.701,0.116,0,8.781,34,281,1 0.44,0,0.88,0,0.44,1.32,0,0,0,0,0,0,0,0,0,0,0,0.44,1.76,0,2.2,0,2.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.222,0,0,0.444,0.37,0,2.413,16,140,1 0.44,0,0.88,0,0.44,1.32,0,0,0,0,0,0,0,0,0,0,0,0.44,1.76,0,2.2,0,2.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.222,0,0,0.444,0.37,0,2.413,16,140,1 0.44,0,0.88,0,0.44,1.32,0,0,0,0,0,0,0,0,0,0,0,0.44,1.76,0,2.2,0,2.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.222,0,0,0.444,0.37,0,2.448,16,142,1 0,0,0,0,0,0,0,0,0,0,0,0.59,0,0,0,0.59,0,0,1.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.177,0,0.443,0.088,0,1.693,16,83,1 0.1,0.2,1.01,0,0.8,0.8,0.5,0,0.8,0.1,0.3,0.7,0.3,0,1.61,0.1,0,1.11,1.31,0.2,0.7,0,0.6,0.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0.1,0.1,0,0,0,0.11,0,0.49,0.158,0.015,8.55,669,1351,1 0,0,1.66,0,1.66,0,1.66,0,0,0,0,1.66,0,0,0,3.33,0,0,1.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.284,0,1.424,0,0,24.333,59,146,1 0.1,0.2,1.01,0,0.8,0.8,0.5,0,0.8,0.1,0.3,0.7,0.3,0,1.61,0.1,0,1.11,1.31,0.2,0.7,0,0.6,0.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0.1,0.1,0,0,0,0.11,0,0.49,0.158,0.015,8.55,669,1351,1 0.17,0.17,0,0,0.52,0,0,0.43,0,0.17,0.17,0.35,0,0,0,0.87,0,0,1.4,0.17,0.87,0,0,0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.376,0,0.25,0.721,0,2.742,35,617,1 0,0,0.8,0,0.8,1.61,0,0,0,0,0,0.8,1.61,0,0,0,0,0,4.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.875,0,0,6,48,138,1 0,0.52,1.05,0,2.63,0.52,1.05,0,0,0,0.52,1.05,0,0,0,1.05,1.05,1.05,4.21,0,1.57,0,0.52,0,0,0,0,0,0,0,0,0,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.091,1.276,0.729,0.091,3.062,19,98,1 0.17,0,0.17,0,1.45,0.34,0.05,0.05,0.05,0.05,0.05,0.52,0.29,0.05,0,0,0.69,0.05,4.24,0.23,1.04,0,0.75,0.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.051,0,0.02,0.163,0,1.796,12,458,1 0.17,0.17,0,0,0.52,0,0,0.52,0,0.17,0.17,0.34,0,0,0,0.87,0,0,1.39,0.17,0.87,0,0,0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.373,0,0.342,0.716,0,2.973,35,336,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.46,0,2.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.415,0,0,1.909,9,42,1 0.1,0.3,0.4,0,0.2,0.9,0.2,0.5,0.8,0.8,0.2,0.8,0,0,1.5,0,0.2,1.6,2.2,0.2,1,0,0.1,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0,0,0,0,0,0.175,0,0.307,0.175,0.014,6.937,669,1214,1 0,0,1.04,0,1.04,0,0,1.39,0.34,0,0,0.34,0,0,0,0,0,0,3.83,2.09,1.04,0,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.105,0,0.157,0.105,0,2.366,60,142,1 0,0.89,1.14,0,0.12,0,0,0.12,0.25,0.12,0.12,0.38,0.12,0,1.14,0,0.12,2.04,2.8,0.12,1.27,0,0,0.12,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0.185,0,0.371,0.061,0,2.878,84,475,1 0,0.89,1.15,0,0.12,0,0,0.12,0.25,0.12,0.12,0.38,0.12,0,1.15,0,0.12,2.04,2.81,0.12,1.27,0,0,0.12,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0.185,0,0.37,0.061,0,2.878,84,475,1 0.29,0.19,0.68,0,0,0.58,0,0.58,0.58,0.77,0,0.58,0.38,0,0.97,0,0.19,1.46,1.75,0.38,0.77,0,0.58,0.68,0,0,0,0,0,0,0,0,0,0,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.034,0,0.569,0.207,0.034,12.064,691,1689,1 0.31,0.2,0.72,0,0,0.62,0,0.62,0.62,0.93,0,0.62,0.41,0,1.04,0,0.2,1.56,1.87,0.41,0.83,0,0.62,0.72,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.548,0.199,0.033,14.301,685,1516,1 0,0.3,0.3,0,0.61,0.3,0,0,0,0.3,0.3,0.3,0,0,0,0.92,0,0,0.61,0,0,0,0,0,0,0,0,0,0,3.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0.3,0,0,0,0,0,0.094,0,0,0,0,2.141,38,212,1 0,0,0,0,1.13,0,1.13,0,0,0,0,0,0,0,0,1.13,1.13,0,1.13,0,0,0,0,0,0,0,0,0,0,0,0,0,1.13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.319,0,0,0,0.479,3.925,17,106,1 0.17,0,0.17,0,1.45,0.34,0.05,0.05,0.05,0.05,0.05,0.52,0.29,0.05,0,0,0.69,0.05,4.24,0.23,1.04,0,0.75,0.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.051,0,0.02,0.163,0,1.796,12,458,1 0,0,0,0,0.32,0.64,0,0,0,0.64,0,0.32,0,0,0,0,0,0,1.94,0,0,0,0,0,0,0,0,0,0,0.97,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.052,0,0.263,0.105,0,3.418,110,188,1 0.31,0.2,0.72,0,0,0.62,0,0.62,0.62,0.93,0,0.62,0.31,0,1.14,0,0.2,1.56,1.87,0.41,0.83,0,0.62,0.72,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.548,0.199,0.033,14.283,685,1514,1 0,0.39,0.99,0,0.39,0,0.19,0,0.19,0.19,0,0.39,0,0,0,0,0.19,0.19,0.59,0.59,0.39,0,0.19,0.39,0,0,0,0.59,0,0,0,0,0,0,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0.074,0,0.174,0.548,0,4.965,97,993,1 0,0,0,0,0.43,0.86,0,0,0,0,0,0,0,0,0,0,0,0,2.17,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.068,0,0,0,0,2.923,55,114,1 0.1,0.5,0.6,0,0.3,0.2,0,0,0.1,0.4,0,1.6,0.2,0,0,0,0.1,0,3.81,0,1.1,0,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.78,0.037,0,5.942,1.334,0,5.838,54,724,1 0.39,0,0,0,0,0.39,0,0,0,0,0,1.19,0,0,0,0.39,0.39,0,2.39,0,2.78,0,1.19,0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0.256,0,3.5,30,112,1 0,0,0,0,0,1.25,0,0,0,0,0,0,0,0,0,0,0,0,1.25,0,0,0,0,0,0,0,0,0,0,0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.098,0,0.392,0,0,3.965,67,115,1 0,0,0,0,0.54,0.27,0,1.62,0,1.62,0,0,0,0,0.54,0,0,0.27,2.16,0,2.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.35,0,0,0,1.62,0,0,0,0,0,0,0,0,0,0.038,0.038,0.463,0,0,7.941,65,405,1 0,0,0.26,0,0.26,0,0,0,0,0,0.26,1.06,0,0.26,0.26,0.8,0,0.26,1.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.299,0,0.471,0,0,2.088,15,188,1 0,0.9,0,0,0.9,0,0.9,0,0,0.9,0,0,0,0,0,1.81,0,1.81,1.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0.319,0,1.492,0,19.829,5.3,66,106,1 0.44,0.44,0,0,0,0,0,0,0,2.64,0,1.76,0,0,0,0,0,0.44,2.64,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.231,0,0,0.231,0,5.977,70,263,1 0,0.55,0.55,0,0.55,0,0,0.55,0,0,0,1.11,0,0,0,1.11,0,0.55,1.66,0,2.22,0,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.55,0,0,0,0,0,0,0,0.484,0.08,0,8.375,85,201,1 0,0,0,0,1.21,0,0.8,0,0,0.8,0.4,0.8,0.4,0,0,1.61,0,0,1.61,0,1.21,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.07,0,0.21,0,0.07,4.49,24,229,1 0,0.53,0,0,1.06,0,1.6,0,0,0.53,0,0,0,0,0.53,0,0,0.53,2.13,0,0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.53,0,0,0,0,0.239,0.079,0.159,0,0,4.555,51,123,1 0,2.35,0,0,1.17,0,0,0,0,2.35,0,1.17,0,0,0,1.17,0,0,2.35,0,3.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.25,20,42,1 0,0,0,0,0,0,0,6.06,0,0,0,0,0,0,0,0,0,0,6.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0.48,0,1.684,10,32,1 0,0.8,0,0,0.8,0,0.8,0,0,0.8,0,0,0,0,0,0.8,0.8,0.8,1.6,0,1.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.135,0,0.27,0,0,3.115,19,81,1 0,0.8,0,0,0.8,0,0.8,0,0,0.8,0,0,0,0,0,0.8,0.8,0.8,1.6,0,1.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.134,0,0.269,0,0,3.115,19,81,1 0.59,0,0.35,0,1.66,0,0,0,0.23,1.3,0.71,2.49,0.59,0,0,0.59,0.11,0,4.51,0,1.66,0,0.47,0.83,0,0,0,0,0,0,0,0,0,0,0.23,0,0,0,0,0.11,0,0,0,0,0.11,0,0,0,0,0.038,0,0.155,0.233,0.019,3.625,54,504,1 0.17,0.26,1.21,0,0.43,0.6,0.43,0.26,0.69,0.52,0.26,1.3,0.17,0,0.6,0.78,0.17,1.39,2.43,0.17,1.13,0,0.95,0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0.08,0,0,0,0,0.108,0,0.271,0.243,0.013,6.395,583,1375,1 0.1,0.1,0.7,0,0.6,0.2,0.4,0.1,1.41,0.81,0.1,0.5,0,0,0,0.1,0,1.11,2.22,0.4,1.92,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0.26,0.994,0.391,0.032,3.176,56,1042,1 0.22,0,0,0,0,0.22,0.22,0,0,0.22,0,0.22,0,0,0,0.22,0,0,2.03,0,0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.271,0,0.647,0,0,1.869,21,215,1 0.1,0.3,0.4,0,0.2,0.9,0.2,0.5,0.8,0.8,0.2,0.8,0,0,1.6,0,0.2,1.7,2.2,0.2,1,0,0.1,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0,0,0,0,0,0.174,0,0.334,0.174,0.014,6.896,669,1200,1 0.49,0.49,0.49,0,0,0,0.49,0,0,0,0,1.98,0,0,0,0.49,0,0.49,3.46,0,0.99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0.195,0,0.845,0.195,0,7.205,47,281,1 0,0,0.65,0,0.65,0,0.65,0,0,0,0,0.65,0,0,0,0.65,0,0,4.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.65,0,0,0,0,0,0,0,0,0,0,0,0.233,0,0,2.5,23,135,1 0,0,1.25,0,1.25,0.62,0,0,0,0,0,0,0,0,0,1.25,0.62,0,0.62,0,1.88,0.62,0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.62,0,0,0,0,0.11,0,0.331,0.11,0.11,3.897,30,152,1 0.9,0,0,0,0,0,0.9,0,0,0,0,0,0,0,0,0,0.9,0.9,1.81,0,2.72,0,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.564,0,0,2.818,19,62,1 0,0.29,0,0,0,0.29,0.29,2.04,0,0,0.29,1.16,0.29,0,0.29,1.16,2.33,1.16,2.33,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.132,0,0.044,0,0,1.559,24,145,1 0,0.95,0.95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.47,0.47,1.91,0,0,0.47,0,0,0,0,0,0,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.067,0.202,0,0.269,0,0,4.18,45,464,1 0.54,0.13,0.38,0,0.05,0.16,0,0.05,0.35,0.16,0.24,1.11,0.38,1.19,0.13,0.19,0.43,0.48,3.56,0,0.81,0,1.14,0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.05,0,0.02,0,0,0.086,0,0.268,0.15,0.159,6.761,195,3313,1 0.54,0.13,0.38,0,0.05,0.19,0,0.05,0.35,0.16,0.24,1.11,0.38,1.19,0.13,0.19,0.43,0.48,3.56,0,0.81,0,1.14,0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.05,0,0.02,0,0,0.086,0,0.273,0.15,0.159,6.789,195,3327,1 0.27,0.27,0.55,0,0.27,0.27,0,1.39,0.27,0.83,0.27,0.55,0,0,0,0,1.39,0.55,1.67,1.95,3.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0,0,0,0,0,0,0.279,0,2.001,0.093,0,3.706,63,341,1 0,0,0,0,0,0,0,0,0,0,0,0,0,2.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.05,0,0,0,0,0,0,0,0,1.05,0,0,0,0,0.177,0,0,0.354,0,4.047,29,85,1 0.1,0.1,0.03,0,0.07,0.03,0,0.03,0,0.1,0,0.53,0,0,0,0.17,0.03,0,0.81,0.03,1.35,0,0.1,0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0,0,0,0,0.03,0,0,0.071,0,0.013,0.065,0,2.11,46,3220,1 0.49,0.33,0.33,0,0.08,0.41,0.08,0.24,0,1.4,0.33,0.57,0.9,0,0.24,0,0,0,2.89,0,0.9,0,0.16,0.41,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.063,0,0.271,0.191,0.095,4.904,264,667,1 0,0.27,0.27,0,1.09,0,0,0,0.82,0.54,0,0.27,0.27,0,0,0.27,0.54,0,2.46,0,2.19,0,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.085,0.128,0,2.484,20,164,1 1.18,0.39,0.59,0,0,0.98,0.19,0.19,1.38,0.39,0,0.98,0,0.19,0,0.98,0,0,2.56,0.39,1.38,0,0,1.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.237,0,0.765,0.316,0.026,6.652,76,632,1 0,0,0,0,3.84,0,0,1.28,0,0,0,1.28,0,0,0,0,0,0,2.56,0,1.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.203,0,0,1.956,10,45,1 0.33,0.44,0.37,0,0.14,0.11,0,0.07,0.97,1.16,0.11,1.42,1.76,1.27,0.03,0.03,0.07,0.07,4.38,0,1.49,0,0.33,0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0.03,0,0,0,0.006,0.159,0,0.069,0.221,0.11,3.426,72,819,1 0,0,0,0,0,0,0,3.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.04,0,0,0,0,0.198,0,0.198,0.198,0,3.857,25,81,1 0,0,0.78,0,1.17,0,0,0,0,0,0,0.39,0,0,0,0.78,0,0,1.56,0,1.96,0,0,0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.094,0,4.111,20,222,1 0,0.34,1.02,0,0.68,0.34,0.34,0,0,0,0,0.34,0,0,0,2.04,0,0.34,4.76,0,2.38,0,0,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.167,0,0.222,0,4.008,6.978,56,328,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.25,0,2.12,0,0,0,0,0,0,0,0,0,0,0,2.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.5,26,45,1 0,0,0.48,0,1.45,0,0,0,0.48,0,0,0,0,0.16,0,0.64,0.32,0,0.8,0,0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0,0,0,0,0,0.16,0,0,0,0,0.198,0,0.594,0,0,5.683,128,557,1 0.28,0.28,0.56,0,0.28,0.28,0,1.4,0.28,0.84,0.28,0.56,0,0,0,0,1.4,0.56,1.69,1.97,3.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.28,0,0,0,0,0,0,0,0,0,0.284,0,1.282,0.094,0,3.725,63,339,1 0.3,0,0,0,0.3,0.3,0.61,0,0.61,0.61,0,0.61,0,0,0,0.3,0.3,0.61,1.84,0,0.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.051,0,0.103,0.051,0,6.125,64,343,1 0,0,0,0,0,0,0,0,0,3.77,0,0,0,0,0,0,0,0,1.88,0,0,0,0,1.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.264,0,0,0,0,0,4.333,13,78,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.66,0,0,0,0,6.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.537,0,0,2.777,12,25,1 0,0,0,0,0,0,0,0,0,3.77,0,0,0,0,0,0,0,0,1.88,0,0,0,0,1.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.264,0,0,0,0,0,4.333,13,78,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.66,0,0,0,0,6.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.537,0,0,2.777,12,25,1 0,0,0,0,0,0,0,0,0,3.77,0,0,0,0,0,0,0,0,1.88,0,0,0,0,1.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.264,0,0,0,0,0,4.333,13,78,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.66,0,0,0,0,6.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.537,0,0,2.777,12,25,1 0,0,0.53,0,0.21,0.1,0.1,0.53,0.1,0.21,0,0.64,0,0,0,0,0.1,0,0,0,0,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.099,0,0.016,0.066,0,2.104,29,381,1 0,0,0,0,0,0,1.15,0,0,0,1.15,0.76,0.76,0,0,0.38,0,0.38,4.61,0.38,0.76,0,1.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.486,0.347,0,1.608,6,74,1 0,0,0.68,0,0.68,0,0.68,0,0,0.68,0,0.68,0,0,0,0,0,4.1,4.1,0,0.68,0,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0,0,0,0,0,0,0,1.089,0.242,0,3.488,60,157,1 0,0,0.51,0,1.03,0.51,0,0,0,0,0.51,1.03,0,0.51,0,0,0.51,0.51,2.59,0,5.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.074,0,0.373,0.149,0,7.233,71,217,1 0,0.35,0.17,0,0,0,0,0,0.17,1.25,0,0.53,0,0,0,0,0,0.17,3.21,0,1.25,7.32,0,0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.17,0,0,0,0,0,0,0.066,0,0,0.099,0.63,16.418,158,903,1 0,0,0,1.33,0,0,0,1.33,0,0,0,0,0,0,0,1.33,0,0,0,0,1.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.684,0,0.228,3,12,69,1 0,0,0.27,0,0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0.82,0,1.1,1.93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.14,0,0.187,6.693,49,328,1 0,0,0,0,0,1.09,0,0,0,0,0,0,0,0,0,0,0,0,2.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.403,0,3.427,0,0,2.678,12,75,1 0.09,0,0.27,0,0.36,0.09,0,0.18,0.09,0,0,0.73,0,0.36,0,0,0,0,2.01,0,3.38,0,0.36,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0.016,0.048,0.032,0.257,0.032,0.032,3.689,69,535,1 0.73,0,0.36,0,1.59,0,0,0,0.24,1.35,0.73,2.58,0.61,0,0,0.61,0.12,0,4.55,0,1.72,0,0.49,0.98,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0,0,0,0.041,0,0.104,0.229,0.02,3.705,54,478,1 0.73,0,0.36,0,1.59,0,0,0,0.24,1.35,0.73,2.58,0.61,0,0,0.61,0.12,0,4.55,0,1.72,0,0.49,0.98,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0,0,0,0.041,0,0.104,0.229,0.02,3.705,54,478,1 0,0,0,0,0,0,0,0,0,0,0,0,0,2.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0.176,0,0.176,0.352,0,3.857,25,81,1 0.66,0,0.26,0,0.26,0,0.13,0,0.66,0.26,0,0,0.79,0.13,0,0,0,0,3.98,0,0.53,0,0,1.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.13,0,0,0,0,0.109,0,0.414,0.021,0,5.955,65,667,1 0.18,0,0.09,0,0.36,0.09,0,0.36,0.09,0,0,0.63,0.09,0.36,0,0,0.09,0,1.27,0,3.38,0,0.36,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.059,0.029,0.029,0.014,0,4.192,48,566,1 0,0,1.15,0,0.38,0.38,0,0,0,0,0,0.38,0,0,0,1.54,0,0,5.4,0,2.31,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.196,0,0.261,0,0,5.666,56,272,1 0,0,0,0,1.01,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.05,0,0,0,0,0,0,0,0,0,0,0,0,0.088,0,0,0.088,0,6.718,33,215,1 0,0,0.53,0,0.53,0,0,0.53,0,0,0,1.06,0,0,2.12,0,0.53,0.53,2.65,0,2.65,0,1.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.53,0.53,0,0,0,0,0,0,0,0,0,0.191,0,0.095,0.478,0,5.038,60,131,1 0,0.11,0.35,0,1.18,0.47,0.23,0.35,0,0.11,0.11,0.95,0,0.11,0,2.14,0.95,0.23,1.9,0.35,0.35,0,0.59,0.11,0,0,0,0,0,0,0,0,0,0,0,0.35,0,0,0,0.11,0,0,0,0,0,0,0,0,0,0.059,0,0.434,0.197,0.217,8.026,283,1509,1 0,0.35,0.35,0,1.07,0,0,0.35,0,1.07,0,0.71,0,0,0,0,0.71,0.71,2.85,0,2.5,0,1.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.35,0,0,0,0,0.35,0,0,0,0,0.24,0,0.24,0.24,0,3.414,25,140,1 0,0.76,0,0,0,0,0,0,0.57,0.19,0,0,0,0,0,0.57,0,0.19,0.19,0.38,0.57,10.17,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0.099,0,0.232,0.066,0.928,20.432,213,1655,1 0.1,0,0.1,0,0.4,0.1,0.1,0,0.2,0.2,0.4,0.5,0,0.6,0,0.91,0.2,0,1.72,4.26,1.72,0,0.4,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.096,0,0.336,0.16,0,6.758,494,1426,1 0.39,0.46,0.31,0,0.15,0.03,0,0.19,0.58,0.66,0.31,0.7,0.62,1.29,0.03,0.23,0.43,0,3.16,0,1.36,0,0.5,0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0,0,0,0.006,0.078,0,0.381,0.496,0.133,7.192,543,2424,1 0.32,0,0.64,0,0.32,0.32,0,1.61,0.32,0.64,0.32,0.64,0,0,0,0,1.61,0,1.29,2.58,3.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0,0,0,0,0,0.16,0,1.178,0.107,0,3.613,63,318,1 0.1,0,0.1,0,0.4,0.1,0.1,0,0.2,0.2,0.4,0.5,0,0.6,0,0.91,0.2,0,1.72,4.26,1.72,0,0.4,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.096,0,0.336,0.16,0,6.758,494,1426,1 0.08,0.08,0.76,0,0.85,1.02,0.25,0.17,0.59,0.08,0.17,0.59,0.17,0,2.21,0.25,0.08,0.93,1.61,0.17,0.42,0,0.85,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0.17,0.08,0.08,0.08,0,0,0,0.065,0,0.403,0.117,0.013,7.484,669,1407,1 0.09,0.49,0.59,0,0.29,0.19,0,0,0.09,0.39,0,1.59,0.19,0,0,0,0.09,0,3.67,0.09,1.09,0,0,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.09,0,0,0,0.766,0.037,0,5.836,1.31,0,5.792,54,753,1 0,0,1.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.92,1.92,0,1.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.416,6,17,1 0.1,0,0.1,0,0.4,0.1,0.1,0,0.2,0.2,0.4,0.5,0,0.6,0,0.91,0.2,0,1.72,4.26,1.72,0,0.4,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.096,0,0.352,0.16,0,6.918,494,1439,1 0,0,0,0,1.26,0,0,1.26,0,0,0,0,0,0,0,1.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.411,0,0.926,0,0,3.558,25,121,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0,1.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.714,0,0,0.238,0,0,4.333,11,104,1 0,0.08,0.25,0,0.84,0.25,0.08,0.33,0,0.16,0.08,0.76,0,0.08,0,1.6,0.76,0.33,1.6,0.33,0.5,0.84,0.42,0.08,0,0,0,0,0,0,0,0,0,0,0,0.25,0.08,0,0,0.08,0,0,0,0,0,0,0,0,0,0.047,0.015,0.502,0.157,0.329,7.24,292,2049,1 0,0.08,0.25,0,0.84,0.25,0.08,0.33,0,0.16,0.08,0.76,0,0.08,0,1.61,0.76,0.33,1.52,0.33,0.5,0.84,0.42,0.08,0,0,0,0,0,0,0,0,0,0,0,0.25,0.08,0,0,0.08,0,0,0,0,0,0,0,0,0,0.047,0.015,0.518,0.157,0.33,7.277,292,2045,1 0,0,0,0,1.05,2.1,1.05,0,0,0,0,0,0,0,0,0,0,0,3.15,0,1.05,0,2.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.176,0.176,0,2.05,6,41,1 0,0,0,0,1.25,0,0,1.25,0,0,0,0,0,0,0,1.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.409,0,1.023,1.023,0,3.485,25,122,1 0.09,0,0.09,0,0.39,0.09,0.09,0,0.19,0.29,0.39,0.48,0,0.58,0,0.87,0.19,0,1.66,4.1,1.66,0,0.39,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.139,0,0.31,0.155,0,6.813,494,1458,1 0,0,0,0,0,0,0,1.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.11,0,0,0,0,0.208,0,0.208,0.416,0,3.95,23,79,1 0,0.55,1.11,0,0.55,0.55,0,0,0,0,0.55,0,0,0,0.55,1.11,0,0,1.67,0,1.67,0.55,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.086,0.086,0.517,4.166,18,125,1 0,0,0.29,0,0.59,0.29,0.29,0,0.29,1.78,0,0.89,0,0,0,0,0.59,0.29,4.16,0,0.89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.137,0,0.045,0.045,0,12.2,163,488,1 0.65,0.49,0.32,0,0.32,0.16,0,0.49,0.65,0.49,0.16,1.3,0,0,0.16,1.14,1.3,0.16,3.6,0.49,1.8,0,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0.231,0,0.925,0.231,2.29,5.833,47,595,1 0,0.64,0.64,0,1.29,0.64,0,0.64,0,0.64,0,1.94,0,0.64,0,3.89,0,0.64,3.24,0,3.89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.728,0.08,0.08,6.612,129,205,1 0,0,0.96,0,0,0,0,0,0,0,0,0.48,0,0,0,0.96,0,0.48,5.79,0,1.93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0.086,0,0.26,0.086,0,1.117,4,38,1 0,0,3.7,0,0,0,0,0,0,0,0,0,0,0,0,1.85,0,0,1.85,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.291,0,3.79,0,0,4.833,29,87,1 0,0,0.38,0,0.38,0.38,0.38,0,0.38,1.94,0,1.16,0,0,0,0.38,0.77,0.77,2.72,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0.057,0.057,0,7.121,70,235,1 0,0.85,0.42,0,0.42,0,0.42,0,1.27,0.85,0,0.42,0.42,0,0,0,0,0,2.55,0,2.12,0,0,0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0.42,0,0,0,0,0,0,0,0,0,0,0,0.221,0,0.177,0.221,0.177,8.777,54,553,1 0,0.6,0,0,0,0.6,0,0,0,0.3,0,1.21,0,0,0,0,0.3,0,0.3,0,0.3,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.146,0,0,0.097,0,3.23,77,210,1 0,0,0.18,0,1.68,0.18,0.37,0.56,0,0,0.37,1.5,0.18,0,0,1.12,0,0.18,3.18,0,0.93,0,0,0,0,0,0,0,0,0,0,0,0,0,0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.066,0,0.563,0.165,0.033,3.106,34,292,1 0,0,0,0,0.91,0,0,0,0,0.45,0,0.45,0,0,0,0,0,0,3.21,0.45,0.91,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.37,0,0,0,0,0,0,0.164,0,0,1.076,4,42,1 0,0,0,0,1.82,0.36,0.36,0.72,0.36,0.36,0,0,0,0,0,0,0,0.36,2.91,0,2.18,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.36,0.36,0,0,0,0,0,0.297,0.059,0.178,0,0,2.446,11,115,1 0,0,0,0,0,0,0,0,0.93,0,0,0,0,0,0,0,0,0,0.93,0,3.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.14,0,0.561,0,0,2.166,23,65,1 0.42,0.39,0.36,0,0.13,0.09,0.09,0.06,0.49,0.91,0.26,0.55,0.42,1.08,0.03,0.26,0.42,0.03,2.75,0,1.27,0,0.32,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.09,0,0,0,0.032,0.104,0.016,0.345,0.515,0.109,5.632,134,2501,1 0,0.33,1.34,0,0,0,1.34,0.33,0,0.33,0.33,0.33,0,0,0,0.67,0.67,0.33,0.67,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0,0,0,0,0,0.161,0,0.053,0,0.053,2.036,12,167,1 0,0,0,0,0.13,0,0,0,0,0.13,0,0.06,0,0,0,0,0,0,0.2,0,0.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,1.03,0,0,1.611,0.01,7.549,278,3752,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.38,0,0,3.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.872,0,0,2.2,5,11,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.333,0,0,1.666,5,15,1 0.29,0,0.29,0,0,0,0,0,0.44,0.29,0,0.44,0,0,0,0.14,0,0,3.14,0,1.64,0,0,0.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.05,0.066,0,0.083,0.05,0,3.075,60,326,1 0.6,0,0.36,0,1.44,0,0,0,0.24,1.32,0.72,2.53,0.6,0,0,0.6,0.24,0,4.45,0,1.8,0,0.72,0.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0,0,0,0.04,0,0.102,0.224,0,3.656,54,479,1 0.43,0.43,0.43,0,0.43,0,0,0,0,1.31,0,0.87,0.43,0,0,2.63,0,0,1.75,0,2.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.201,0,0.941,0.067,0,2.329,28,226,1 0,0.45,0,0,0.45,0.45,0.45,0.45,0,1.8,0,0.45,0,0,0,0,0,0,1.8,0,0.45,0,0,0,0,0,0,0,0,0,0,0,0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.281,0,0.21,0,0,2.368,14,135,1 0.29,0.29,0,0,0.29,0,0,1.46,0,0,0,0.29,0,0,0,0.58,2.04,0.29,2.04,1.16,1.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.42,0.046,0.046,2.228,34,234,1 0.12,0.12,0.24,0,1.34,0.12,0,0.12,0,0,0.36,0.85,0,0,0,0.24,0.24,0,2.33,0,0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.98,0,0,0,0.12,0,0,0,0,0.12,0,0,0,0.063,0.021,0,0.042,0.042,0,2.351,69,254,1 0,0.33,0.33,0,0.66,0,0,0.33,0,0.33,0,0.33,0,0,0,0.66,1,0,1,0,0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0.109,0,0.054,2.825,34,113,1 0.62,0.62,0,0,0,1.86,0,0,0,0,0,0.62,0.62,0,0,0,0,0.62,2.48,0,1.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.327,0,2.295,0.218,0,5.166,28,155,1 0,0,0.78,0,0.78,0,0.52,0.52,0,1.04,0,0.26,1.56,0,0.78,0,0,1.56,2.08,0,1.56,0,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.52,0,0,0,0,0,0,0,0,0,0.08,0,0.443,0.402,0,2.41,19,241,1 0,0.72,0,0,2.89,0,0,0,0,0,0.72,0.72,0,0,0,0,0,0,2.17,0,1.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.379,7,40,1 0,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,4.91,0,0,3.27,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.695,0,0,2.315,12,44,1 0,0,0,0,0.26,0,0.26,0,0,0,0,0,0,0,0,0.26,0,0,0,0,0.52,17.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.26,0,0,0,0,0.462,0.084,0.084,0.378,0,1.051,13.82,104,1078,1 0,0,0,0,0,0,0,0,0,0,0,0,5.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.26,0,2.444,10,44,1 0.25,0,0.25,0,0.5,0,0.25,0,0,0,0.5,0.76,0,0,0,0.5,0,0,1.52,0,1.01,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.25,0,0,0,0,0.25,0,0,0,0,0,0,0,0.041,0,0.082,0.041,0.041,1.89,18,225,1 0.25,0.5,0.5,0,0,0,0,0,0,0.25,0.25,1,0.25,0,0,0,0,0.5,3,0,2.75,0,1.25,0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.181,0,0.407,0.997,0,3.417,49,270,1 0,0,0.35,0,0,0.7,0.35,0.35,0,0,0.35,1.06,0,0,0,0.7,0,1.41,2.12,2.82,2.47,0,0,0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.061,0,0.061,0,0.122,2.302,21,99,1 0,0,0,0,2.48,0,0,0.62,0,0,0,1.24,0,0,0,0,0,0,2.48,0,3.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.179,0,0.089,3.702,53,174,1 0,0,0.77,0,0.77,0,0.51,0.51,0,1.03,0,0.25,1.54,0,0.77,0,0,1.54,1.8,0,1.54,0,0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.51,0,0,0,0,0,0,0,0,0,0.079,0,0.514,0.434,0,2.441,19,249,1 0,0,0.74,0,0.74,0,0,0.74,1.49,0,0,0,0,0,0,0,0,0,6.71,0,2.98,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.99,0,0,1.666,12,60,1 0,0,0,0,0,0,0,2.5,0,0,0,0,0,0,0,0,1.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.25,0,0,0,0,0.188,0,0,0.188,0,3.545,21,78,1 0.49,0.28,0.4,0,0.09,0.11,0.02,0.21,0.42,0.75,0.23,0.89,0.54,1.06,0,0.16,0.33,0.02,3.23,0,1.46,0,1.03,0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0,0,0,0,0.059,0,0.391,0.868,0.144,5.783,193,3210,1 0,0,1.56,0,0,0,1.56,0,0,0,0,0,0,0,0,1.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.56,0,0,0,0,0.256,0,0.769,0,0,2.125,12,34,1 0,0,1.4,0,0,0,0,0,0,0,0,0,0,0,0,1.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.647,0,0,1,1,13,1 0,0,0,0,0,0,0,0,0,0,0,0,0,2.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.03,0,0,0,0,0,0,0,0,1.03,0,0,0,0,0.171,0,0.171,0.342,0,3.809,24,80,1 0,0.19,0.39,0,0,0,0.19,0,0,0,0,0,0,0,0,0,0,0.19,2.36,0,1.18,0,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0,0.152,0,0,1.357,19,148,1 0,0.57,0.57,0,0.14,0.14,0,0,0.14,0,0,0.43,0.14,0,0,0,0.14,0,3.31,0,1.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.156,0,0,1.394,6,159,1 0,0.17,0,0,0,0,0.17,0.17,0,0.17,0,0,0,0.35,0,0,0,0,0,0,0.17,0,0.17,0,3.37,1.77,0,0,0,0.17,0,0,0,0,0,0,0,0,0.17,0,0,0,0,0,0.35,0,0,0,0.108,0.216,0.061,0.046,0.03,0,4.259,85,3318,1 0.1,0.2,1.01,0,0.8,0.8,0.5,0,0.8,0.1,0.3,0.7,0.3,0,1.61,0.1,0,1.11,1.31,0.2,0.7,0,0.6,0.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0.1,0.1,0,0,0,0.11,0,0.488,0.157,0.015,8.55,669,1351,1 0,0,0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,1.91,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.265,0,0.797,0.885,0,9.29,75,288,1 0,0,0,1.29,1.29,0,0,0,0,0,0,0,0,0,0,1.29,0,0,2.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.187,0,0,0.936,0,0,4.586,24,133,1 0.84,0,0,0,0,2.54,0,0,0,0,0,0.84,0.84,0,0,0,0,0,2.54,0,1.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.452,0,2.865,0.301,0,5.037,23,136,1 0,0,0.76,0,0.76,0,0.76,0.51,0,1.02,0,0.25,1.53,0,1.02,0,0.25,1.79,1.53,0,1.79,0,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.51,0,0,0,0,0,0,0,0,0,0.083,0,0.458,0.499,0,2.455,19,248,1 0,0,1.06,0,0,0,0,0,2.12,0,0,0,0,0,0,0,0,0,3.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.06,0,0,0,0,0,0,0,0.353,0,3.904,12,82,1 0.08,0.08,0.76,0,0.85,1.02,0.25,0.17,0.59,0.08,0.17,0.59,0.17,0,2.21,0.25,0.08,0.93,1.61,0.17,0.42,0,0.85,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0.17,0.08,0.08,0.08,0,0,0,0.065,0,0.403,0.117,0.013,7.484,669,1407,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.273,0,0,5.75,25,69,1 0,0,1.16,0,3.48,0,0,0.58,0.58,0,0,0.58,0,0,0,1.74,0,0,1.16,0,3.48,0,0,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.085,0,0.171,0.085,0,2.17,12,102,1 0.74,0.28,0.31,0,0.07,0.21,0,0.14,0.49,0.35,0.17,0.74,0.56,1.48,0,0.17,0.49,0.03,3.24,0,1.23,0,0.56,0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0.07,0,0.03,0,0.006,0.054,0,0.678,1.05,0.162,5.648,154,3084,1 0.32,0,0.64,0,0.32,0.32,0,1.6,0.32,0.64,0.32,0.64,0,0,0,0,1.6,0,1.28,2.57,3.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0,0,0,0,0,0.213,0,1.174,0.106,0,3.584,63,319,1 0.09,0.49,0.59,0,0.29,0.19,0,0,0.09,0.39,0,1.59,0.19,0,0,0,0.09,0,3.67,0.09,1.09,0,0,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.09,0,0,0,0.766,0.037,0,5.836,1.31,0,5.792,54,753,1 0,0,0,0,1.56,0,0,0,0,0.31,0,0.31,0,0,0,0.31,0.62,0,2.82,0,0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.94,0,0,0,0,0.05,0,0,0,0,2.132,22,113,1 0,0,0,0,0.96,0,0.96,0,0,0,0,0,0,0,0,0.96,0,0,0,0,1.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.96,0,0,0,0,0,0,0,0.824,0,0,3.025,67,118,1 0,0,0.93,0,0,0,0,0,0,2.8,0.93,0,0,0,0,0,2.8,0,4.67,0.93,0.93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.93,0,0,0,0,0,0,0.464,0.154,0,1.612,10,50,1 0,1.14,1.14,0,0,0,0,0,1.14,0,0,1.14,0,0,0,0,0,0,0,0,3.44,0,0,1.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.197,0,3.681,35,81,1 0,0,0,0,0,0,0,0,0,0,0,0,0,2.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.03,0,0,0,0,0,0,0,0,1.03,0,0,0,0,0.17,0,0.51,0.34,0,3.761,23,79,1 0,0.81,0,0,2.03,0,0,0.4,0,1.21,0,0.81,0,0,0,0.4,0,0,3.65,0,1.62,0,1.62,0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.4,0,0,0,0,0,0,0,0.272,0,3.192,34,166,1 0.6,0,0,0,1.21,0,0.6,0,0,0,0,0.6,0,0,0,0,0,0.6,3.65,0,1.21,0,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,1.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.105,0.315,0,3.217,30,74,1 0.25,0,0.25,0,0,0,0.25,0,0.77,1.55,0,0.51,0,0,0,0.25,0,0,1.55,0,0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.29,0,0,0,0,0,0,0,0,0,0,0,0,0.404,0,0.161,0.161,0.04,9.633,110,578,1 0.76,0.19,0.38,0,0.19,0.12,0,0.25,0.76,0.31,0.25,1.52,0.31,0.38,0,0.38,0.44,0.06,2.98,0.69,1.26,0,0.44,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0,0.06,0,0,0,0,0,0,0.087,0.054,0.439,0.241,0.065,3.702,45,1070,1 0,0,0.47,0,0.47,0.47,0.47,0,0,2.38,0,0.95,0.47,0,0,0,0.47,0,1.9,0,0,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0.266,0,0.621,0.799,0.088,36.642,148,513,1 0,0,0,0,0,0,0,0.42,0,0.42,0.42,0,0,0,0,0,0,0,2.52,0,2.94,0,0,0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.84,0,0,0,0,0.129,0,0.129,0.194,0,1.859,20,119,1 0.62,0,0,0,1.24,0,0.62,0,0,0,0,0.62,0,0,0,0,0,0.62,3.72,0,1.24,0,0,0.62,0,0,0,0,0,0,0,0,0,0,0,0,1.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.107,0.322,0,3.318,30,73,1 0.33,1.01,0,0,1.35,0,0.33,0,0,0,0.67,0.67,0.33,0,0,1.01,0,1.68,2.36,0,3.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0.06,0.06,0,193.5,1013,1161,1 0,0,0,0,0.97,0,0.97,0,0,0,0,0,0,0,0,0.97,0,0,0,0,1.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.97,0,0,0,0,0,0,0,0.834,0,0,3.052,68,116,1 0.14,0,0.21,0,1.72,0.43,0,0,0.07,0.14,0.07,0.57,0.35,0.07,0,0,0.28,0,4.31,0.28,0.64,0,1,0.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.07,0,0,0,0,0,0.012,0.064,0,0,0.206,0,1.711,10,380,1 0.6,0,0.36,0,1.44,0,0,0,0.24,1.32,0.72,2.52,0.6,0,0,0.6,0.24,0,4.44,0,1.8,0,0.72,0.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0,0,0,0.041,0,0.102,0.205,0,3.548,54,479,1 0.2,0.1,0.7,0,1.1,0.2,0,0.3,0,1.2,0.3,1.1,0.1,0,0.1,0.4,0.2,0.1,2.61,0,2.51,0,0,0,0,0,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0,0,0,0,0,0,0.017,0.159,0,0.53,0.406,0.123,9.781,84,851,1 0,0,0,0,0,0,0,0,0,0,0,5.26,0,0,0,0,0,0,5.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.048,0,0.349,3.333,10,30,1 0.35,0.46,0.31,0,0.15,0.03,0,0.35,0.58,0.66,0.31,0.7,0.62,1.28,0.03,0.23,0.42,0,3.12,0,1.36,0,0.46,0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0,0,0,0.006,0.09,0,0.324,0.486,0.126,6.11,116,2218,1 0.3,0.2,0.3,0,0.2,0.4,0.2,0.3,0.4,1.71,0.1,1.91,0.2,0,0.5,0.6,0,0.8,3.43,0,1.51,0,0.9,0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0,0.2,0,0,0,0,0.017,0,0.275,0.206,0.017,4.923,103,1029,1 0,0,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.59,0,2.99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.093,0.186,0.559,0.279,0,2.297,12,108,1 0.19,0.19,0.29,0,1.07,0.19,0.19,0.97,0.87,0.58,0.09,1.07,0.19,0.87,0.09,0,0,1.17,3.81,0.68,1.75,0,0.09,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.09,0.09,0,0,0,0,0,0,0,0,0,0,0.201,0.402,0.232,0.03,4.295,49,872,1 0,0,0,0,0,0,0,0,0,0,0,1.42,0,0,0,1.42,0,0,2.14,0,0.71,0,0,0.71,0,0,0,0,0,0,0,0,0,0,0,0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0.099,0,0.899,0,0,3.066,36,138,1 0.15,0.3,0.45,0,0.76,0.3,0,0,1.52,1.52,0.15,1.98,0.3,0,0.61,0.3,0,1.52,2.14,0.15,2.44,0,0.76,0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.025,0,1.051,0.225,0.05,6.686,217,896,1 0,0,0.28,0,0.84,0.84,0.28,0,0.28,0.28,0,0.28,0,0,0,0.56,0,0.56,2.52,0,0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.28,0,0,0,0,0.28,0,0,0,0,0.05,0,0.05,0,0,2.083,34,150,1 0.09,0.09,1.14,0,0.38,0,0,0.09,0,0.19,0.38,0.19,0,0,0,0.66,0,0,1.52,0,1.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.38,0,0,0,0,0,0.044,0.059,0,0.591,0,0,3.28,31,771,1 0,0,0,0,0,0,1.11,0,0,1.11,0,0,0,0,0,0,0,0,2.22,0,1.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.202,0,0.202,0,0,4,16,40,1 0,0.51,0,0,0,0,0,0,0,0.51,1.02,0.51,0,0,0,0.25,0.76,1.27,2.04,0,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.25,0,0,0,0,0,0,0,0,0,0.457,0,0.29,0,0.124,2.614,66,149,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.169,0,0,3,12,36,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.66,0,0,3.33,0,1.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.653,0,0,8,38,80,1 0.7,0,1.05,0,0,0,0,1.4,0.35,0.35,0,0.35,0,0,0,2.1,0.7,0.35,2.1,3.15,2.1,0,0.35,1.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.116,0,0.348,0,0,1.166,13,189,1 0,0,0,0,0,0,0,1.2,0,0,1.2,0,0,0,0,6.02,0,0,1.2,0,1.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.443,0,0,3.782,32,87,1 0,0,0.53,0,0.53,0,0.53,0,0,0.53,0,0,0,0,0,0,0.53,0,5.85,0,3.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.09,0,0.361,0,0,2.437,19,78,1 0,1.26,0,0,0,0,0,0,0,0,0,0,0,0,0,1.26,0,0,1.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.416,0,0,9.785,42,137,1 0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0.609,0,1.524,0,0.304,1,1,36,1 0.32,0.16,0.56,0,0.32,0.23,0.04,1.24,0.4,0.4,0.11,0.68,0.52,0.36,0.28,0.72,0.4,0.4,3.08,0.16,1.32,0,0.44,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.04,0,0,0,0,0.11,0,0,0,0.019,0.052,0.065,0.413,0.164,0,3.533,181,1643,1 0,0,0,0,0,0,0,1.21,0,0,1.21,0,0,0,0,6.09,0,0,1.21,0,1.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.407,0,0,3.454,32,76,1 0.28,0,0.28,0,0,0.28,0.28,0.28,0.28,1.15,0,0.86,0.86,0,0,0,0,0,2.89,0,1.44,0.86,0,0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.28,0,0,0,0,0,0,0,0.554,0.221,0.166,5.328,140,341,1 0.09,0,0.67,0,0.29,0,0,0,0.19,0.38,0.09,1.35,1.06,0,0,0.29,0.19,0,2.51,0,1.35,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.58,0,0,0,0,0.29,0,0,0.19,0,0.149,0,0.374,0.059,0,9.039,148,1148,1 0,0,0.4,0,0.4,0.2,0,0,0,1.01,0.2,0.4,0,0,0,0.2,0.4,0.2,0.8,0,0.4,0,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0,0,0,0,0,0,0,0,0.03,0,0,0.302,0,1.727,11,190,1 0,3.05,0.38,0,1.14,0.19,0,0,0,1.52,0.19,0.76,0.19,0,0,0,1.72,0.38,3.05,0.38,2.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0,0,0,0.171,0,0.294,0.147,0.024,17.074,430,1144,1 0,0,1.55,0,0,0.77,0,0.38,0,0,0.38,1.16,0,0,0,0.38,0,1.16,1.93,0,0.38,0,1.16,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0.609,0.121,0,2.666,22,160,1 0,0.82,0.32,0,1.14,0.32,0,0.16,0,0.65,0,2.13,0,0,0,0.16,0,0,1.47,0,1.47,0,0.98,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0,0,0,0,0,0.29,0.029,2.257,13,158,1 0,0,0,0,0,0,0,0,0,0,0,0,2.63,0,0,0,0,0,2.63,0,1.75,0,0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.144,0,0,3.907,0,0,13.928,70,195,1 0.1,0,0.7,0,0.2,0,0,0,0.2,0.3,0.1,1.3,1.1,0,0,0.3,0.2,0,2.61,0,1.2,0,0,0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.5,0,0,0,0,0.3,0,0,0.2,0,0.141,0,0.352,0.056,0,9.601,148,1133,1 0.35,0.1,0.55,0,2.15,0.15,0,0,0.1,0.75,0.35,0.85,0.25,0,0,0.15,0.3,0,5,0,1.75,0,0.05,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0,0,0.008,0.035,0,0.149,0.131,0.008,3.629,127,617,1 0,0.19,1.08,0,0.79,0.79,0.49,0,0.89,0.29,0.29,0.69,0.29,0,1.58,0.09,0,1.08,1.38,0.19,0.69,0,0.59,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.19,0,0.09,0.09,0,0,0,0.092,0,0.417,0.154,0.015,8.323,669,1365,1 0.61,0,0,0,1.22,0.61,0.61,0,0.61,0,0,1.22,0,0,0,1.22,0,0,5.52,0,0.61,0,0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.61,0,0,0,0,0,0,0,0,0,0.184,0,0.829,0,0,4.45,34,89,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.22,0,0,2.22,0,2.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.145,0.145,0.291,0,2.95,11,59,1 0,0,0.7,0,0,0,0,0,0,0,0,0,0,0,0,0.7,0,0,0,0,0,0,0,0,0.7,0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.328,0,0,1.333,4,16,1 0,0.26,0.78,0,0.26,0.26,0.08,1.04,0.52,1.56,0.26,0.69,0.17,0.08,0.69,0.86,0.34,0,1.82,0.17,1.3,0,0.08,0.34,0,0,0,0,0,0,0,0,0.08,0,0,0.08,0,0,0,0,0,0,0,0,0.08,0.08,0,0,0.096,0.234,0,0.358,0.261,0.11,3.554,54,981,1 0.17,0.17,0.25,0,0.43,0.08,0.08,0.08,0.69,2.41,0,0.34,0.17,0,1.46,0.34,0.08,0,2.76,0.43,1.55,0,0.17,0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.43,0,0,0,0,0.34,0.08,0,0,0,0.107,0,0.308,0.067,0.026,4.215,82,1214,1 0.71,0,0.35,0,0.17,0.17,0.35,0,0,0.35,0.17,0.53,0,0,0,0.35,0.71,0.35,3.76,0,1.97,0,0,0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.029,0,0.234,0.029,0,3.519,97,359,1 0,0,0.71,0,0.23,0,0,0,0.23,0.23,0.23,1.9,0,0,0,0.23,0,0,3.81,0.23,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.111,0,1.045,0.037,0,4.022,97,543,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.094,0,0,1.428,5,40,1 0,0.26,0,0,0.26,0,0,0,0,1.88,0,0,0,0,0,0,0,0,0.26,0,0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.407,0.067,0,0.033,0,0,5.009,55,506,1 0.27,0.27,0.27,0,0,0,0,0.54,0,0.27,0,0.27,0,0,0,1.08,0,0.27,1.08,0,0.27,0,0.27,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.225,0,0.451,0.496,0,2.934,64,578,1 0.16,0,0.24,0,1.63,0.49,0,0,0,0.16,0.08,0.65,0.4,0.08,0,0,0.32,0,3.68,0.32,0.65,0,1.14,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0.08,0,0,0,0.014,0.058,0,0,0.232,0,1.725,10,333,1 0,0,1.29,0,0,0,0,0,0,0,0,1.29,0,0,0,0,0,0,1.29,0,5.19,0,0,1.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.208,0,4.23,25,55,1 0.19,0,0.38,0,0,0.19,0,0,0,0,0.19,0.19,0,0,0,0.38,0,0.19,1.14,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.01,0,0,0,0.003,2.383,21,15841,1 0.19,0,0.19,0,0.87,0.48,0.09,0,0.09,0.39,0.48,0.68,0.19,0,0.09,0.29,1.07,0.39,3.51,0.78,1.56,0,0.09,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.09,0,0,0,0.015,0.18,0,0.045,0.015,0,2.133,40,303,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.219,0,3.875,11,31,1 0,0,0,0,0,0,0,1.25,0,0.41,0,0,0,0,0,0.41,0,1.67,0.41,0,0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0.125,0,0.312,0.062,0,1.477,8,65,1 0.86,0,0.86,0,0,0,0,0,0,0,0,0.43,0,0,0,0.86,0.86,0,3.47,0,1.73,0,0,0,0,0,0,0,0,0,0,0,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.24,0,1.765,0.481,0.08,7.059,159,473,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.05,1.05,0,3.15,0,2.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.05,0,0,0,0,0.365,0,0.365,0.182,0,3.343,28,107,1 0.76,0.38,0,0,0.38,0.38,0,0,0,0.38,0,1.53,0,0,0,0,0,0,1.92,0,3.07,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.124,0,0.124,0,0.062,89.9,735,899,1 0,0,0.94,0,0.31,0,0,0,0.31,0,0,0.62,0,0,0,1.25,0.62,0,3.14,0,1.25,0,0.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0,0.145,0.048,0.485,0.388,0.097,3.322,61,319,1 0,0,0,0,1.56,0,1.56,0,0,0,0,0,0,0,0,0,0,0,1.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.767,0.255,0,8.083,81,97,1 0.52,1.31,0.26,0,2.9,0.26,0.79,0.26,0,0.79,1.05,1.58,0.79,0,0,0,0,1.31,3.16,0,0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.138,0,0.046,0,0,2.934,60,135,1 0.47,0,0.95,0,0.95,0,0.95,0,0,0,0,0.47,0,0,0,0.47,0.47,0,4.28,0,0.95,0,1.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.47,0,0,0,0,0.076,0,1.306,0.23,0,6.027,91,217,1 0,0,0.47,0,1.43,0,0,0,0,0,0,0.95,0,0,0,0.47,0.95,0,3.34,0,1.91,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.43,0,0,0,0,0.076,0,0.536,0.306,0,4.653,78,242,1 0.49,0,0.99,0,0.99,0,0.99,0,0,0,0,0.49,0,0,0,0.49,0.49,0,4.45,0,0.99,0,1.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.49,0,0,0,0,0,0,1.118,0.239,0,5.228,69,183,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.19,1.19,0,1.19,0,0,0,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.181,0.787,0,3.875,31,93,1 1.63,0,1.63,0,0,0,0,0,1.63,0,0,0,0,0,0,1.63,0,0,3.27,0,3.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.802,0.267,0,2,16,36,1 0.27,0,0.16,0,0.27,0,0,0.05,0,0.21,0.1,0.93,0.1,0,0,0.38,0.1,0,2.85,0,1.2,0,0.21,0.16,0,0,0,0,0,0,0,0,0.05,0,0,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0.068,0.029,0,0.019,0.058,0.009,3.389,56,539,1 0.33,0,0,0,0,0.33,0,0,0,0,0,1.01,0.67,0,0,0,0.67,0,3.05,0,2.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.682,21,69,1 0.23,0.23,0.47,0,0.7,0.23,0.23,1.41,0.23,0.47,0.23,0.47,0,0,0,0,1.41,0.47,0.94,1.89,3.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.47,0,0,0,0,0.23,0,0,0,0,0.075,0,1.289,0.151,0,6.529,276,666,1 0,0,1.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.21,0,0,0,0.212,0.212,0,0,0.212,0,3.272,24,72,1 0,0.17,0,0,0,0,0.17,0.52,0,0.17,0.35,0.52,0,0,0,0,0.17,0.7,0.87,0,0.7,1.92,0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.029,0.147,0.029,0.117,0.058,0.235,3.521,39,419,1 0,0.74,0,0,0,1.49,0.74,0,0,0,0,0,0,0,0,0,0,2.23,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.307,0,0,0,0,3.39,45,139,1 0,0.56,0.56,0,1.12,0.56,2.25,0,0,0.56,0,0.56,0,0,0,0,0,0.56,3.38,0,1.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.56,0,0,0,0,0.083,0,0.502,0,0.083,16.304,148,375,1 0.8,0,0.8,0,1.6,0,0,0,0,0,0,0,0,0,0,0.8,0.8,0,1.6,0,2.4,0,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.566,0.361,0,2.638,22,124,1 0,0,0,0,0.87,0,0,0,0,0,0,0,0,0.87,0,0,0,0,0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.132,0,0,0,0,3.851,51,104,1 0,0,0,0,0,0.8,0,0,0,0,0,0,0,0,0,0,0,0,2.4,0,0,12.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.709,0,1.56,7.82,39,305,1 0,0,0,0,1.52,0,2.29,0,0,0,0,0,0,0,0,0,0.76,0.76,0.76,0,2.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.85,19,77,1 0,0,0,0,0,1.36,0,0,1.36,0,0,0,0,0,0,1.36,0,0,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,1.36,0,0,0,1.36,0,0,1.36,0,0,0,0,0,0,0,0,0,0,0,1.777,0.222,0,9.727,63,107,1 0.28,0.28,0.28,0,0.57,0.28,0.28,0,0,0,0,0.86,0.28,0,0,0,0.57,0.28,2.88,0,2.01,0,1.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.131,0,0.218,0.218,0,3.694,40,218,1 0,0.5,0,0,1.25,0,0,0.25,0,0.75,0.25,0.75,0,0,0,0.25,0,0,2.01,0,1.76,0,0,0,0,0,0,0,0,0,0,0,1.25,0,0,0,0,0.25,0,0,0,0,0,0.25,0.25,0,0,0,0,0.222,0.095,0.031,0,0,5.5,114,616,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.33,0,0,0,0,0,0,0,0,1.33,0,0,0,0,0.213,0,0.426,0.213,0,4.6,23,69,1 0.16,0.16,0.5,0,0.33,0,0,0,0.5,0.84,0,0.84,0,0.33,0,0,0,0.16,2.37,0,0.5,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0.5,0,0,0,0,0.16,0,0,0,0,0,0,0.143,0,0.458,0.143,0.028,6.298,247,781,1 0,0,0,0,0,0.41,0,0,0,0.82,0,0,0,0,0,0.41,0,0,1.23,0,0,0,0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.199,0.53,0.331,0.199,0,0,5.019,18,261,1 0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0.9,0,1.81,3.63,0,2.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.9,0,0,0,0,0,0,0.17,0.17,0,6.266,41,94,1 0,0,0,0,0,0,0,0,0,0,0,1.14,0,0,0,0,0,0,1.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.545,4,17,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0.212,0,0.424,0.212,0,4.125,21,66,1 0.49,0.21,0.56,0,0.28,0.21,0,0.28,0.28,0.98,0.42,0.98,0.14,0,0,1.12,0.7,0.07,2.24,0,0.98,0,0.07,0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.07,0,0,0,0,0.14,0,0,0,0,0.108,0,0.768,0.312,0,3.401,94,966,1 0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0.9,0,1.81,3.63,0,2.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.9,0,0,0,0,0,0,0.17,0.17,0,6.266,41,94,1 0,0,1.78,0,0,0,0,0,0,0,0,1.78,0,0,0,0,0,0,1.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.351,0,0.27,32,75,160,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.413,0,0,4.047,22,85,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.412,0,0.268,20,137,180,1 0,0.33,0.33,0,1.65,0.33,0.66,0,0,0.16,0.16,0.99,0,0,0,0.82,0.33,0.16,2.81,0,0.99,0,0.49,0.33,0,0,0,0,0,0,0,0,0.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.025,0.075,0.252,0.05,0.05,6.269,350,721,1 0,0.55,0.55,0,1.1,0.55,2.2,0,0,0.55,0,0.55,0,0,0,0,0,0.55,3.31,0,1.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.55,0,0,0,0,0.165,0,0.495,0,0.082,16.826,148,387,1 0,0,0,0,0.86,0,0.86,0,0,0,0,0,0,0,0.86,0,0,1.72,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0,0,0,0,0,0,0.86,0,0,0,0,0.272,0,0,0.136,0,4.541,31,109,1 0.63,0.63,0.63,0,0,0,0.63,0.63,0.63,0,0,0.63,0,0,0.63,1.26,0,0.63,1.89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.398,0,0,2.625,19,126,1 0,0,0,0,0,1.12,0,0,0,1.12,0,0,0,0,0,0,0,1.12,2.24,0,1.12,0,1.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.375,0,0,6.003,0,3.75,14,45,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.33,0,0,0,0,0.208,0,0.417,0.208,0,3.812,16,61,1 0.15,0,1.22,0,0.45,0,0.15,0,0.61,0.61,0,0.76,0.3,0,0.3,0.61,0.61,0,1.83,0.45,2.75,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.112,0,0.698,0.067,0,5.101,63,801,1 0,0,0,0,2.17,0,0,0,0,0,0,2.17,0,0,0,2.17,0,2.17,6.52,0,2.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.333,5,16,1 0.16,0,0.32,0,1.3,0.65,0,0.65,0,0,0,0.16,0,0,0.16,0.32,1.63,2.45,1.79,0,1.14,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.49,0,0,0,0,0,0,0,0,0,0.027,0,0.622,0.027,0,1.25,12,165,1 0,0,0,0,2.17,0,0,0,0,0,0,2.17,0,0,0,2.17,0,2.17,6.52,0,2.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.333,5,16,1 0,0,0,0,0,0,1.96,0,0,0,0,0.98,0,0,0,0,0.98,1.96,2.94,0,0.98,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.163,0.489,0,0.326,0,0,2.3,12,46,1 0.87,0.17,0.52,0,0,0.32,0,0.04,0.29,0.42,0.39,1.37,0.87,1.69,0,0.32,0.54,0.22,3.47,0.29,1.32,0,0.34,0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0.07,0,0.04,0,0.016,0.058,0,0.639,0.165,0.182,3.697,117,3498,1 0,0,0,0,0,1.88,0,0,0,0,0,0,0,0,0,0,0,0,1.88,0,0,0,0,0,0,0,0,0,0,0,0,0,1.88,0,0,0,1.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.5,15,49,1 0.56,0,0.56,0,2.25,0,0,0,0,0.56,0,0,0,0,0,0.56,0.56,0,1.69,0,1.69,0,0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.91,0.273,0,2.283,22,121,1 0.07,0,0.15,0,0.07,0.15,0,0.07,0.07,0,0,0.46,0,0,0,0,0.15,0,0.15,0,0.07,0,0,0.07,0,0,0,0,0,0,0,0,0,0,0,0,0.15,0,0,0.07,0,0,0.07,0,0,0,0,0,0.011,0.047,0,0,0.023,0,1.263,10,264,1 0.54,0,1.08,0,0.54,0,1.08,0,0,0,0,0.54,0,0,0,0.54,0.54,0,4.32,0,1.08,0,1.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,0,0,0,0,0,0,1.216,0.26,0,5.454,68,180,1 0,1.65,0,0,0,0,1.65,0,0,1.65,0.82,0,0,0,0,0.82,0,0,3.3,0,0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.36,0,0,15.5,68,186,1 0.41,0,0.41,0,0,0,0.41,0,0,0,0.41,0,0.41,0,0,0,0,0,2.05,0,1.23,0,0,0.41,0,0,0,0,0,0,0,0,0.41,0,0,0,0.41,0.41,0,0,0,0,0,0,0,0,0,0,0,0.067,0,0.067,0,0,1.863,14,41,1 0.14,0,0.29,0,1.17,0.58,0.14,0.58,0,0.43,0,0.14,0,0,0.14,0.29,1.46,2.05,1.9,0,1.02,0,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.43,0,0,0,0,0,0,0,0,0,0.097,0,0.558,0.024,0,1.517,12,217,1 0,0.29,0.29,0,0,0.59,0.29,1.04,1.04,2.22,0.14,1.04,0,0,1.04,0.29,0.74,0,1.63,0.44,0.59,0,1.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0.29,0,0,0.29,0,0,0,0,0,0,0,0,0,0.084,0,0.105,0.21,0.021,10.817,887,1244,1 0.17,0,0.08,0,0.42,0.08,0.08,0.42,0.08,0.08,0,0.6,0.17,0.17,0,0,0.17,0.08,1.2,0,3.17,0,0.34,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.084,0.028,0.098,0.014,0,4.049,48,575,1 0.22,0,0.78,0,0,0.11,0.11,0,0.22,0.11,0.11,0.22,0.89,0,0,0.44,0.44,0,4.68,0,1.56,0,0.11,1.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0,0.44,0,0,0,0,0.142,0,0.775,0.224,0.142,5.782,103,798,1 0.58,0,0.58,0.58,0.58,0,0,0,0,0,0,1.17,0,0,0,0,0,0,4.11,0,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.098,0,0.197,0,0,3.807,61,297,1 0.26,0.05,1.45,0,0.37,0.1,0,0,0.1,0.1,0.21,1.07,0,0,0,0,0,0,3.38,0,1.39,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.05,0,0,0,0,0.076,0,0.262,0.186,0.025,11.793,289,2288,1 0.44,0,0,0,0.89,0,0,0,0,0.44,0,1.33,0,0,0,0.44,0,0,4.46,0,1.78,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0,0,0,0,0,0,1.131,0.15,0.075,2.428,28,153,1 0.43,0,0,0,0.87,0.87,0,0,0,0.43,0,2.18,0,0,0,0,1.74,0,0.87,0,0.87,0,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.942,0,0,5.114,107,179,1 0,0,0,0,0,0,0,3.57,0,0,0,0,0,0,0,0,0,0,7.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.554,0,0.518,2.111,15,38,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.428,4,10,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0.215,0,0,0.215,0,3.937,18,63,1 0,1.63,0.81,0,0,0,1.63,0,0,1.63,1.63,0,0,0,0,0.81,0,0,4.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.212,0,0,15.916,71,191,1 0.52,0,2.38,0,0.26,0,0.26,0,0.52,0,0.26,0,0,0,0,0.79,0,0,1.32,0,1.05,0,0,0.52,0,0,0,0,0,0,0,0,0.26,0,0,0.26,0.26,0,0.52,0,0,0,0,0,0,0,0,0,0,0.689,0,0.326,0,0,5.549,71,566,1 0.32,0,0.8,0,0.8,0.32,0.16,0,0.64,0,0.32,1.44,0.16,0,0,0,0.32,0,3.37,0,1.28,0,0.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0.05,0,0.05,0.075,0,1.419,15,159,1 0,0,1.2,0,0,0,0,0,0,0,0,2.4,0,0,0,0,0,0,2.4,0,4.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,16,72,1 0.58,0,0.19,0,1.75,0.39,0.58,0,0,0.19,0.39,0.78,0.39,0,0,0.58,0.58,0.58,4.29,0,0.39,0,0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.39,0,0,0,0,0.271,0,0.067,0.135,0,3.015,21,190,1 0,0,0,0,0.73,0,0,0,0,0,0,0,0,0,0,0.73,0,0,2.94,0,2.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.73,0,0,0,0,0.105,0,0.211,0,0,1.333,7,48,1 0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,4,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.666,12,23,1 0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.317,0,0,0.952,0,0,4.823,13,82,1 0,1.05,0,0,0,0,1.05,0,0,0,0,0,0,0,0,3.15,0,1.05,0,0,1.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.74,0,0,4.947,24,94,1 0,4.76,0,0,0,0,0,0,0,1.19,0,0,0,0,0,0,0,2.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.225,38,162,1 0,0,3.48,0,0,0,0,1.16,1.16,0,0,0,0,0,0,0,0,0,2.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.405,0,0,9,28,72,1 0.5,0.19,0.57,0,0.25,0.38,0,0,0.5,0.06,0.12,0.63,0.19,0,0,0.69,0.5,0.38,3.49,0.06,1.27,0,0.31,1.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.25,0,0.06,0,0,0.067,0,0.435,0.592,0.022,5.335,73,1590,1 0.09,0.09,1.14,0,0.38,0,0,0.09,0,0.19,0.38,0.19,0,0,0,0.66,0,0,1.52,0,1.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.38,0,0,0,0,0,0.044,0.059,0,0.591,0,0,3.28,31,771,1 0,0,1.07,0,3.22,0,0,0,0,0,0,0,0,1.07,0,1.07,0,0,2.15,0,2.15,0,1.07,1.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.07,0,0,0,0,0,0,0,0,0,0,0,0,2.395,0.598,0.998,82.25,295,329,1 0,0,0,0,0.68,0,0,0,0,1.81,0,0.68,0,0,0,0.22,0,0,3.4,0,1.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.45,0,0,0.22,0,0,0,0.159,0.558,0.159,0.199,0,0,6.091,83,530,1 0,0,0,0,0.47,0,1.43,0,0,0,0.47,0.47,0,0,0,0.47,0,1.91,1.91,0.47,1.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.073,0.295,0,3.361,54,158,1 0,0.2,1.83,0,0.81,0.2,0.61,0.4,0,0,1.22,1.01,0.2,0,0,0.2,0.4,0.2,1.83,0,1.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.232,0,0,4.159,142,287,1 0,0,0,0,0.68,0,0,0,0,1.81,0,0.68,0,0,0,0.22,0,0,3.4,0,1.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.45,0,0,0.22,0,0,0,0.159,0.558,0.159,0.199,0,0,6.091,83,530,1 0,0,0,0,0,1.4,0,0,0,0,0,0,0,0,0,2.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.24,0,0.963,0,0,3.8,17,57,1 0.77,0.38,0.77,0,0,0.57,0,0.57,1.15,1.15,0,0.38,0.38,0,1.15,0.19,0.19,2.12,2.12,1.15,1.15,0,1.35,0.77,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0.027,0.027,0,0.438,0.191,0.054,14.619,525,921,1 0,0,0,0,1.09,0,0,0.54,0,0,0.54,1.63,0,0.27,0,0,0.27,0.54,2.18,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0.208,0,0.166,0.083,0,3.521,114,243,1 0.17,0.26,1.24,0,0.53,0.62,0.44,0.17,0.79,0.79,0.26,1.33,0.17,0,0.62,0.62,0.08,1.33,2.66,0.17,1.15,0,0.79,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0.08,0,0,0,0,0.07,0,0.225,0.211,0.014,6.725,583,1345,1 0.13,0.13,0.26,0,0.26,0.26,0,0.13,0.39,0.13,0.13,0.39,0,0,0,0.13,0,0,2.35,0,0.13,0,0,0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.137,0,0,0.068,0,2.736,30,468,1 0,0,0.83,0,1.66,0.41,0,0,0,0,0,0.41,0,0,0,0.41,0,0,2.08,0,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.218,0,0,0,0,2.35,12,134,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.675,0,0,2.23,12,29,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.62,0.62,0,1.25,3.12,3.12,1.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.075,0,1.285,0.075,0.226,6.722,101,363,1 0.58,0,0.19,0,1.75,0.39,0.58,0,0,0.19,0.39,0.78,0.39,0,0,0.58,0.58,0.58,4.29,0,0.39,0,0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.39,0,0,0,0,0.271,0,0.067,0.135,0,3.015,21,190,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0.73,0,0,0,0,0,0,0,0.73,0,0,0,0,0,0,0,0,0,0.36,0,0,0.21,0.21,0,0,0.105,0,1.866,22,112,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0,1.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.714,0,0,0.238,0,0,4.333,11,104,1 0,0.38,0.38,0,0,0,0,0.38,0.38,0,0,0,0,0,0,0.38,0,0.38,0.38,2.67,0,0,0.76,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.35,0,0,0.35,0,2.162,8,80,1 0.99,0.49,0,0,0,0,0,0,0,0.49,0,0.49,0,0,0,0,0,0,2.48,0,1.99,2.98,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.356,0,0.446,10.366,64,311,1 0.52,0,1.05,0,0,1.05,0,0,0,0.52,0,0.52,1.05,0,0,1.05,0.52,0,3.15,0,0.52,0,1.05,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.295,0.698,0,2.016,14,125,1 0.08,0,0.32,0,0.24,0.32,0,0.16,0.16,0,0,0.65,0,0,0,0,0,0,4.67,0,0.65,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0.32,0,0,0.24,0,0,0,0,0.045,0,0.36,0.03,0,1.42,10,196,1 0,0,0,0,1.9,0,0.95,0,0,0.95,0,0.95,0,0,0,0,0,0,5.71,3.8,2.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.147,0,0,1.4,6,21,1 0.85,0,0,0,0,0,0,0,0,0,0,0.85,0,0,0,4.27,0,0,3.41,0,4.27,0,0,5.98,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.146,0,0.881,0,0,5,17,150,1 0.77,0.38,0.77,0,0,0.57,0,0.57,1.15,1.34,0,0.38,0.38,0,1.15,0.19,0.19,1.92,2.11,1.15,1.15,0,1.34,0.77,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0.027,0.027,0,0.438,0.191,0.054,14.619,525,921,1 0,0,3.7,0,0,0,0,0,0,0,0,0,0,0,0,1.85,0,0,1.85,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.289,0,3.768,0,0,4.833,29,87,1 0,2.43,0,0,1.21,0,0.6,0,0.6,0,0,0,0,0,0,0,0,2.43,1.82,0,1.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.031,71,129,1 0.32,0.16,0.56,0,0.32,0.24,0.04,1.16,0.4,0.4,0.12,0.68,0.52,0.4,0.28,0.64,0.36,0.4,3.06,0.16,1.28,0,0.36,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.04,0,0,0,0,0.12,0,0,0,0.019,0.052,0.066,0.37,0.152,0,3.225,181,1500,1 0.28,0,0,0,0,0,0,0.28,0,0,0,0.84,0.56,0,0.84,0.84,0.28,4.51,2.54,0,2.54,0,0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.091,0,1.147,0.045,0,7.178,104,524,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.6,0,0,0,0,0,0,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.102,0,0.102,0.716,0,4.512,43,185,1 0.09,0.09,1.14,0,0.38,0,0,0.09,0,0.19,0.38,0.19,0,0,0,0.66,0,0,1.52,0,1.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.38,0,0,0,0,0,0.044,0.059,0,0.591,0,0,3.28,31,771,1 0,0,0.42,0,0.42,0,0.21,0,0,0,0.21,0.21,0,0,0,0,0,0.42,0.42,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0.126,0,0.031,1.269,0.412,13.017,183,1484,1 0.32,0.09,0.6,0,2.04,0.13,0,0,0.09,0.69,0.32,0.79,0.27,0,0,0.13,0.32,0,4.92,0,1.81,0,0.04,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.04,0,0,0,0,0.18,0,0.09,0,0.008,0.032,0,0.145,0.121,0.008,3.575,127,640,1 0.14,0.28,0.84,0,0.14,0.14,0,0.84,0.42,0.14,0,0.56,0.28,0.14,0.42,0.14,0.14,0.28,4.34,0.14,2.1,0,0.14,0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.42,0,0,0,0,0.025,0,0.381,0.05,0,2.322,15,216,1 0.13,0.27,0.83,0,0.13,0.13,0,0.83,0.41,0.13,0,0.55,0.27,0.13,0.41,0.13,0.13,0.27,4.31,0.13,2.08,0,0.13,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.41,0,0,0,0,0.025,0,0.379,0.05,0,2.329,15,219,1 0.34,0.17,0.17,0,1.38,0.69,0.17,0.17,0,0.17,0,0.86,0,0,0.34,1.55,0.34,0.17,2.94,0,2.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.115,0,0,0.086,0,4.792,38,508,1 0.77,0.38,0.77,0,0,0.57,0,0.57,1.15,1.15,0,0.38,0.38,0,1.15,0.19,0.19,2.12,2.12,1.15,1.15,0,1.35,0.77,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0.027,0.027,0,0.438,0.191,0.054,14.619,525,921,1 0.4,0.18,0.32,0,0.25,0.18,0.03,1.01,0.4,0.4,0.1,0.72,0.65,0.36,0.25,0.54,0.36,0.36,3.05,0.14,1.41,0,0.29,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0,0,0,0,0.07,0,0,0,0.012,0.042,0.072,0.334,0.139,0,3.305,181,1613,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.6,0,0,0,0,0,0,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.102,0,0.102,0.72,0,4.512,43,185,1 0.77,0.38,0.77,0,0,0.57,0,0.57,1.15,1.15,0,0.38,0.38,0,1.15,0.19,0.19,2.11,2.11,1.15,1.15,0,1.34,0.77,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0.027,0.027,0,0.437,0.191,0.054,14.406,525,922,1 0.32,0,0.64,0,0,0,0,0,0,0,0.64,0.97,0,0,0,2.58,0,0,2.58,0.32,1.94,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.233,0.058,0,0.116,0.116,0,2.926,20,240,1 0,0.17,1.03,0,0.68,0.17,0.68,0,0,0.17,0,0.17,0.17,0,0.34,1.03,0.34,0.17,3.44,0,1.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.17,0,0,0,0,0,0,0,0,0,0.084,0,0.056,0.196,0,2.26,53,208,1 0,0,0,0,0,1.21,0,0,0,0,0,0,0,0,0,0,0,0,1.21,0,1.21,2.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.21,0,0,0,0,0,0,1.627,0,0.465,2.591,31,127,1 0.77,0.38,0.77,0,0,0.57,0,0.57,1.15,1.15,0,0.38,0.38,0,1.15,0.19,0.19,2.12,2.12,1.15,1.15,0,1.35,0.77,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0.027,0.027,0,0.438,0.191,0.054,14.619,525,921,1 0.14,0.29,0.44,0,0.88,0.29,0,0,1.47,1.47,0.14,1.91,0.29,0,0.58,0.29,0,1.62,2.35,0.14,2.35,0,0.73,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.023,0,1.003,0.215,0.047,6.602,217,898,1 0.14,0.29,0.44,0,0.88,0.29,0,0,1.47,1.47,0.14,1.91,0.29,0,0.58,0.29,0,1.62,2.35,0.14,2.35,0,0.73,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.023,0,1.004,0.215,0.047,6.602,217,898,1 0,0.17,0,0,0.34,0.34,0,0,0,0.17,0,0,0.17,0,0,0.17,0.17,0,0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0,0.054,0,0,0.027,0,2.073,11,170,1 0,0,0.36,0,0.73,0,0,0,0,0.73,0,0.36,0,0,0,0,0,0,0.73,0,0,0,0,0,0,0,0,0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.36,0,0,0,0,0,0,0,0,2.13,12,228,1 0,0,0.58,0,1.16,0,0,0,0,0.58,0,0,0,0,0,0.58,0,0,0.58,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.101,11,145,1 0,0,0,0,0.23,0,0,0,0,0,0,0.93,0,0,0,0.11,0,0.11,0.35,0,0.23,0,0,0,0,0,0,0.35,0.11,0.11,0,0,0,0,0,0.58,0,0.11,0,0,0,0.35,0,0,0,0.46,0.11,0.11,0,0.381,0,0.016,0,0,2.47,41,504,1 0,0,0,0,0,0.59,0,2.95,0,0,0,0.59,0.59,0,0.59,5.91,2.95,0.59,1.77,0,1.18,0,0,0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.293,0,0,1.69,15,93,1 0.77,0.38,0.77,0,0,0.57,0,0.57,1.15,1.34,0,0.38,0.38,0,1.15,0.19,0.19,1.92,2.11,1.15,1.15,0,1.34,0.77,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0.027,0.027,0,0.438,0.191,0.054,14.619,525,921,1 0.43,0.26,0.43,0,0.78,0.26,0,0.17,0.34,4.09,0.08,1.22,0.43,0,0.78,1.13,0.26,1.91,2.35,0,2.35,0,0.08,0.43,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0.08,0,0,0,0,0,0,0,0,0.056,0.241,0.042,0.709,0.056,0,4.319,126,1123,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0,0,2.5,33,125,1 0.85,0,0,0,0.85,0,0,0,0,0,0,0.85,0.42,0,0,1.28,0,0,3.86,0,0.85,0,0,0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.066,0,0.535,0.133,0,11.592,110,313,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.041,0,0,0,0,1.938,33,95,1 0,0,1.55,0,0,0.77,0,0.38,0,0,0.38,1.16,0,0,0,0.38,0,1.16,1.93,0,0.38,0,1.16,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0.601,0.12,0,2.666,22,160,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.66,0,0,3.33,0,1.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.986,0,0,8,38,80,1 0,0.13,0.13,0,0,0.13,0,0,0.13,1.5,0,0.4,0,0,0.27,0.27,0,0.4,1.09,0,2.32,10.38,0.13,0,0,0,0,0,0,0,0,0,0,0,0,0.13,0,0,0,0,0,0,0,0,0,0,0,0,0.021,0.042,0,0.364,0.064,0.686,13.884,107,1444,1 0.87,0.17,0.52,0,0,0.32,0,0.04,0.29,0.42,0.39,1.37,0.87,1.69,0,0.32,0.54,0.22,3.47,0.29,1.32,0,0.34,0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0.07,0,0.04,0,0.016,0.058,0,0.638,0.165,0.182,3.697,117,3498,1 0,0.27,0.54,0,0.27,1.64,0,0.27,0.54,0.54,0,1.09,0.27,0,0,0,0,0.27,1.37,0,1.09,0,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0,0,0,0.472,0.128,0,10.877,93,533,1 0.4,0,0,0,0.8,0,0.4,2.8,0,1.2,1.2,2.8,0,0,0,0.4,0,0,4,0,0.8,0,0,1.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.052,0,0,0.105,0.052,0.052,1.194,5,129,1 4.54,4.54,0,0,0,0,0,0,0,0,0,0,0,0,0,4.54,0,9.09,0,0,4.54,0,0,9.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.076,0,0,1.428,4,10,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.28,0,1.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.699,0.932,0,5.083,39,122,1 0,0,0,0,2.63,0,0,0,0,0,0,0,0,0,0,1.31,0,0,1.31,0,2.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.818,46,106,1 0,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,1.19,0,0,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.403,0,0,9.785,42,137,1 0.4,0.34,0.27,0,0.13,0.4,0.06,0.2,0,1.36,0.27,0.68,0.95,0,0.2,0,0,0,3.68,0,0.81,0,0.13,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.013,0.052,0,0.474,0.197,0.065,3.286,53,608,1 0,0,0,0,0.12,0,0,0,0,0,0,0.9,0,0,0,0.12,0,0.12,0.12,0,0.12,0,0,0,0,0,0,0.25,0.12,0.12,0,0,0,0,0,0.64,0,0.12,0,0,0,0.38,0,0,0,0.38,0,0,0,0.391,0,0,0,0,2.417,41,481,1 0,0,0,0,0.12,0,0,0,0,0,0,0.99,0,0,0,0.12,0.12,0.12,0.12,0,0.12,0,0,0,0,0,0,0.24,0.12,0.12,0,0,0,0,0,0.62,0,0.12,0,0,0,0.37,0,0,0.12,0.37,0,0,0,0.365,0,0,0,0,2.376,41,492,1 0,0,0,0,0.12,0,0,0,0,0,0,0.96,0,0,0,0.12,0.12,0.12,0.12,0,0.12,0,0,0,0,0,0,0.24,0.12,0.12,0,0,0,0,0,0.6,0,0.12,0,0,0,0.36,0,0,0.12,0.36,0,0,0,0.352,0,0,0,0,2.337,41,505,1 0.19,0.19,0.19,0,1.08,0.19,0.19,0.98,0.89,0.59,0.09,1.08,0.19,0.89,0.09,0,0,1.18,3.85,0.59,1.78,0,0.09,0.29,0,0,0,0,0,0,0,0,0,0,0.09,0,0,0,0.09,0.09,0,0,0,0,0,0,0,0,0,0,0.19,0.412,0.222,0.015,4.195,49,814,1 0.87,0.17,0.52,0,0,0.32,0,0.04,0.29,0.42,0.39,1.37,0.87,1.69,0,0.32,0.54,0.22,3.47,0.29,1.32,0,0.34,0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0.07,0,0.04,0,0.016,0.058,0,0.639,0.165,0.182,3.697,117,3498,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.862,0,8.5,17,34,1 0,0,0,0,0,0,0,0,0,0,0,2.1,0,0,0,0,0,0,2.1,0,1.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.178,0,0,0,0,1.275,7,51,1 0.21,0.21,0.42,0,0.42,0.21,0,0.42,0.42,0.21,0,0.64,0,0,0,0.85,1.07,0,4.07,1.07,1.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.216,0,0.432,0.18,0.072,4.391,36,303,1 0,0.29,0.29,0,0.58,0,0.58,0,0,0.58,0.29,0.29,0,0,0,1.46,0.29,0.87,1.16,0.87,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.29,0,0,0,0,0.086,0,0.606,0,0.043,3.591,37,352,1 0.22,0.88,0.44,0,0.22,0,0,0,1.32,1.54,0,0.88,0.66,0,1.1,0.66,0,1.54,2.87,0,1.54,0,0,0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0,0,0,0,0,0,0,0,0,0.085,0,0.659,0.114,0.028,9.1,65,728,1 0,0,0,0,1.63,0,0,0,0,1.63,0,0.81,0,0,0,0,0,0,3.27,0,0,0,0,0,0,0,0,0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.81,0,0,0,0,0,0,0,0,1.558,11,53,1 0,0,0,0,0,0,0,0,0,0.76,0,2.29,0,0,0,0,0,0,3.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.76,1.52,0,0,0,0,0,0,0,0,1.216,4,45,1 0.22,0.88,0.44,0,0.22,0,0,0,1.32,1.54,0,0.88,0.66,0,1.1,0.66,0,1.54,2.87,0,1.54,0,0,0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0,0,0,0,0,0,0,0,0,0.085,0,0.659,0.114,0.028,9.1,65,728,1 0.1,0,0.74,0.21,0.21,0,0.1,0.1,0,0,0.1,0.31,0,0,0,0,0,0.21,0.63,0,0.31,0,0.21,0,0,0,0,0,0,0.1,0,0,0,0,0,0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0.101,0,0.05,0.609,0.253,7.887,126,1609,1 0,0.32,0,0,0,0,0,0,0.32,0,0,1.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.037,0,0,0,0,2.391,36,110,1 0.43,0,0.43,0,0.43,0.43,0,0,0,0.87,0,0.43,0,0,0,0,3.49,0,1.31,0,1.74,0,1.31,0,0,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.298,0.149,0.074,2.955,47,133,1 0.43,0.26,0.43,0,0.78,0.26,0,0.17,0.34,4.09,0.08,1.22,0.43,0,0.78,1.13,0.26,1.91,2.35,0,2.35,0,0.08,0.43,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0.08,0,0,0,0,0,0,0,0,0.056,0.241,0.042,0.709,0.056,0,4.319,126,1123,1 0.7,0,1.06,0,0,0,0,1.41,0.35,0.35,0,0.35,0,0,0,2.12,0.7,0.35,2.12,3.18,2.12,0,0.35,1.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.117,0,0.353,0,0,1.209,13,196,1 0.43,0.4,0.37,0,0.15,0.09,0.06,0.12,0.5,0.97,0.25,0.69,0.4,1.06,0.03,0.15,0.25,0,2.57,0,1.41,1.28,0.31,0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0,0,0.031,0.122,0.01,0.345,0.42,0.266,8.016,178,3303,1 0,0,0.19,0,0,0,0.19,0,0,0,0,0.19,0,0.09,0,0,0,0.09,0.19,0,0.09,0,0,0,0.09,0,0,0,0,0,0,0,0.19,0,0,0,0,0.09,0.19,0,0,0,0,0,0,0,0.09,0,0.015,0.137,0,0.061,0,0,3.626,44,990,1 0,0.24,1.45,0,0.36,0.6,0.6,0,0.6,1.45,0.12,0.85,0.48,0,1.94,0.12,0,0,1.33,0.12,0.6,0,0.48,0.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.24,0,0,0.12,0,0,0,0.117,0,0.234,0.234,0,4.493,39,746,1 0.35,0.1,0.55,0,2.15,0.15,0,0,0.1,0.75,0.35,0.85,0.25,0,0,0.15,0.3,0,5,0,1.75,0,0.05,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0,0,0.008,0.035,0,0.149,0.131,0.008,3.629,127,617,1 0,0,0,0,0.45,0,0.45,0,0.9,0.45,0.45,0.9,0.45,0,0,1.81,0,0.45,1.36,0,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.45,0,0,0,0,0.45,0,0,0,0,0.16,0,0.64,0.16,0,3.607,71,184,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,1.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.5,0,0,0.121,0.605,0,0,0,0,2.222,22,100,1 0,0.45,1.35,0,1.35,0,0.9,0.45,0,1.35,0,0.45,2.71,0,0,0,0,0.9,2.26,0,1.8,0,0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.82,0.41,0,2.446,12,137,1 0.4,0.4,0,0,0,0,0,0,1.2,4.81,0.4,0,0,0,4.41,0,0,0,1.2,0,1.2,0,4.01,0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.855,0.794,0,4.152,41,353,1 0.1,0.1,0.03,0,0.07,0.03,0,0.03,0,0.1,0,0.53,0,0,0,0.17,0.03,0,0.81,0.03,1.35,0,0.1,0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0,0,0,0,0.03,0,0,0.071,0,0.006,0.065,0,2.106,46,3214,1 0.14,0.18,0.79,0,0.04,0.14,0.18,0.28,0.28,0.84,0.18,0.46,0.61,0.09,0.32,0.89,0.37,0.46,3.8,0.04,1.87,0,0.46,0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.28,0,0,0,0,0.28,0.04,0,0,0,0.101,0,0.522,0.109,0.062,5.759,116,2062,1 0,0,0.21,0,0.21,0,0,0,0,0,0,0.84,0,0,0,0.21,0,0,0,0,0,0,0,0,0,0,0,0.42,0,0,0,0,0,0,0,0.42,0,0,0.21,0,0,0,0,0,0,1.48,0,0,0,0.057,0,0,0,0,2.807,39,379,1 0.33,0.42,0.75,0,0,0.25,0,0.08,0.16,1.09,0.33,1.09,0.16,0,0,0.67,0.67,0.08,2.52,0,0.92,0,0,0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0.08,0,0,0,0.014,0.029,0,0.523,0.378,0,3.631,67,897,1 0,0.82,0.32,0,1.14,0.32,0,0.16,0,0.65,0,2.13,0,0,0,0.16,0,0,1.47,0,1.47,0,0.98,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0,0,0,0,0,0.29,0.029,2.257,13,158,1 0,0,0,0,1.21,0,0,0.6,0,0.6,1.21,0,0,0,0,1.82,0,0,4.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.29,0,0.193,0,0,1.861,26,67,1 0.33,0.16,0.16,0,1.35,0.67,0.16,0.33,0,0.16,0,0.84,0,0,0.33,1.52,0.33,0.16,2.88,0,2.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.113,0,0.028,0.084,0,4.971,40,532,1 0,0,0,19.73,0,0,0,0,0,0,0,0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.53,0,0,0.53,0,0,0,0,0,0,0,0,0,0,0.087,0,0,0,0,4.786,152,292,1 0,1.11,0.55,0,0,0,0,0,0,0,0.55,0,1.11,0,0,3.35,0,0,0.55,0,1.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.55,0,0,0,0,0,0,0,0,0.19,0,0,1.429,0.095,0,2.861,36,186,1 0,0,0.24,0,0.72,0,0,0,1.69,0,0.48,1.21,0,0,0,0.24,0,0,2.91,0,1.21,0,0,0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0.24,0,0,0,0,0,0,0.24,0,0,0,0,0.036,0,1.021,0.291,0.109,7.092,67,461,1 0,0,0,0,0.67,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0.67,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.114,0.114,0,0.228,0.228,0,2.847,16,168,1 0.15,0,0.3,0,1.23,0.61,0,0.61,0,0.15,0,0.3,0,0,0.15,0.3,1.54,2.32,1.85,0,1.08,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.46,0,0,0,0,0,0,0,0,0,0.128,0,0.615,0.025,0,1.377,13,186,1 0.13,0.1,0.55,0,0.02,0.13,0.02,1.11,0.23,0.29,0.05,0.34,0.42,0.07,0.55,0.87,0.45,0.66,3.95,0.05,1.59,0,0.39,0.34,0,0,0,0,0,0,0,0,0.02,0,0,0,0,0,0,0.31,0,0,0,0.05,0.23,0.02,0,0,0.03,0.083,0,0.538,0.145,0.07,5.108,116,3525,1 0,0,0,0,0,1.05,0,0,0,0,0,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.057,0,0,0,0,2.675,36,99,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.47,0,0,0,0.364,0,0,0,0,3.23,38,126,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.4,0,0,2.4,0,2.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.151,0.302,0,2.611,11,47,1 0,0,0.94,0,0,0,0,0,0,0,0,0,0,0,0,0.47,0,0,0,0,0.47,0,0,0,0,0,0,0.94,0.47,0,0,0,0,0,0,0,0,0,0.47,0,0,0,0,0,0,0.94,0,0,0,0.332,0,0,0,0,1.518,15,161,1 0.98,0.16,0.41,0,0.08,0.24,0,0.08,0,0.49,0.08,0.57,0.9,0,0.16,0,0,0.32,2.46,0,1.14,0,0.49,1.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.095,0,0.751,0.255,0.095,4.163,84,712,1 0,0.22,0.22,0,0,0,0,0,0.22,2.75,0,0.68,0,0,0.68,0.45,0,1.37,2.06,0,4.12,0,0.45,0.22,0,0,0,0,0,0,0,0,0,0,0,0.22,0,0,0,0,0,0,0,0,0,0,0,0,0.028,0.114,0,0.919,0.229,0.028,4.444,138,400,1 0,0,0.68,0,0,0,0,0,0,0,0,0,0,0,0,0.68,0,0,0,0,0,0,0,0,0.68,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.309,0,0,1.6,4,32,1 0.26,0.46,0.99,0,0.53,0,0,0.53,0.19,1.12,0.26,0.73,0.66,0,0.06,0.26,0.13,0.26,3.78,0,3.32,0,0.39,0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.011,0.023,0,0.449,0.265,0.034,13.235,272,1575,1 0,0.26,0.78,0,0.26,0.26,0.08,1.04,0.52,1.56,0.26,0.69,0.17,0.08,0.69,0.86,0.34,0,1.82,0.17,1.3,0,0.08,0.34,0,0,0,0,0,0,0,0,0.08,0,0,0.08,0,0,0,0,0,0,0,0,0.08,0.08,0,0,0.096,0.234,0,0.358,0.261,0.11,3.56,54,979,1 0.14,0,0.29,0,0.14,0,0,0,0,0,0,0.14,0.29,0,0,0.29,0,0,2.19,0,1.02,0,0,0.43,0,0,0,0,0,0,0,0,0,0.14,0,0,0,0,0,0,0,0,0,0,0.29,0,0,0,0.05,0.382,0,0.764,0,0,2.468,28,469,1 0,0.26,0.78,0,0.26,0.26,0.08,1.04,0.52,1.56,0.26,0.69,0.17,0.08,0.69,0.86,0.34,0,1.82,0.17,1.3,0,0.08,0.34,0,0,0,0,0,0,0,0,0.08,0,0,0.08,0,0,0,0,0,0,0,0,0.08,0.08,0,0,0.096,0.234,0,0.358,0.261,0.11,3.554,54,981,1 0,0,0,0,0.53,0,0,0.26,0,0,0,0.26,0.26,0,0,0.53,0,0,1.33,0,0,9.33,0.53,0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.555,0,1.157,19.26,107,886,1 0,0,2.15,0,1.07,0,0,0,0,0,0,0,0,0,0,0,0,0,1.07,0,1.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.192,0,0,2.333,19,49,1 0,0,0,0,0,0,0,0,0,0,0,0,0,2.05,0,0,0,0,0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.73,0,0,0.098,0.589,0,0,0,0,2.044,22,92,1 0,0.18,0.37,0,0.18,0,0,0,0,0,0.18,0.56,0,0.18,0.18,0.56,0.18,0.56,0.56,0,0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.277,0,0.493,0.061,0.03,1.874,13,253,1 0,0,0,0,1.04,1.04,0,0,0,0,0,0,0,0,0,6.25,0,0,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.555,0,0,3.275,14,95,1 0,0.28,0,0,0,0,0,0,0,0,0,0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.28,0,0,0,0,0,0,0,0.85,0,0,0.57,0,0,0,0,0,0,0.57,0,0,0,0.103,0,0,0,0,2.417,33,162,1 0.09,0.49,0.59,0,0.49,0.19,0,0,0.09,0.39,0,1.57,0.19,0,0,0,0.09,0,3.74,0.09,1.08,0,0,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.765,0.037,0,5.803,1.284,0,5.944,54,755,1 0,0.55,0.55,0,0.55,0.55,0,0.27,1.94,1.67,0,1.39,0.83,0,0.83,0.27,0,1.94,2.5,0,2.22,0,0.55,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.273,0.364,0.045,6.641,48,352,1 0.58,0,0.34,0,0.11,0.11,0,0,0,0.23,0.23,0.93,0.93,0,0,0.58,0.23,0.11,4.19,0,1.51,0,0.58,0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0,0,0.11,0,0,0.11,0,0.125,0,0.733,0.104,0.335,8.192,326,1360,1 0,0.18,0.18,0,0.74,0,0.18,0,0,0.55,0.18,0.18,0,0,0.18,0,0,0,1.11,0,0.74,0,0,0,0,0,0,0,0,0.18,0,0,0.37,0,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0.058,0,0,0.029,1.57,2.166,11,208,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.013,0,0,0,0,3.5,28,42,1 0.49,0,0.24,0,0.24,0,0,0.73,0,0,0,0.49,0,0,0,0,0,0,4.9,0,1.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.544,0.077,0,2.055,22,111,1 0,0,0,0,0,0,0.91,0,0,0,0.91,2.75,0,0,0,0,0,0,6.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.941,12,33,1 0,0,0.29,0,0.87,0,0.29,0,0.87,0,0,1.45,0,0,0,0,0,0.29,5.24,0,1.45,0,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.042,0,0,0.085,0,5.145,33,247,1 0,0,0.57,0.57,0,0,0,0.28,0,0,0,0,0.28,0,0,0,0.28,0.57,2.89,0.86,1.73,0,0,0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.461,0,1.385,0,0.046,3.535,64,396,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,7,1 1.06,0.16,0.4,0,0.16,0.24,0,0.16,0,0.49,0.08,0.57,0.9,0,0.16,0,0,0.32,2.37,0,1.22,0,0.49,1.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.095,0,0.845,0.255,0.095,4.194,84,713,1 0,0.26,0.79,0,0.26,0.26,0.08,1.06,0.53,1.59,0.26,0.71,0.17,0.08,0.71,0.88,0.44,0,1.86,0.26,1.24,0,0.08,0.35,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0,0,0,0,0,0,0,0.26,0.08,0,0,0.098,0.226,0,0.353,0.254,0.113,3.591,54,966,1 0.98,0.16,0.41,0,0.16,0.24,0,0.16,0,0.49,0.08,0.57,0.9,0,0.16,0,0,0.32,2.37,0,1.23,0,0.49,1.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.096,0,0.736,0.256,0.096,4.123,84,701,1 0.58,0,0.34,0,0.11,0.11,0,0,0,0.23,0.23,0.93,0.93,0,0,0.58,0.23,0.11,4.19,0,1.51,0,0.58,0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0,0,0.11,0,0,0.11,0,0.125,0,0.733,0.104,0.335,8.192,326,1360,1 0,0,0,0,0,0,0,0,0,4.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.287,0,0,0,0,0,4.333,13,78,1 0.41,0,0.41,0,0.41,0,0,0,0.41,0.83,0,0,0,0,0,0,0.41,0,1.66,0,1.25,3.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.676,9.444,54,255,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.333,11,13,1 0,0.26,0.79,0,0.26,0.26,0.08,1.06,0.53,1.59,0.26,0.71,0.17,0.08,0.71,0.88,0.44,0,1.86,0.26,1.24,0,0.08,0.35,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0,0,0,0,0,0,0,0.26,0.08,0,0,0.098,0.226,0,0.353,0.254,0.113,3.598,54,968,1 0,0.32,0,0,0,0,0,0,0,0,0,0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.97,0,0,0.65,0,0,0,0,0,0,0,0,0,0,0.075,0,0,0,0,2.269,33,118,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0.32,0,0,0,0,0,0.64,0,0,0,0,0,0,0,0,0,0,0.254,0,0,0,0,1.987,28,153,1 0,0,0,0,0.44,0,0,0,0,0.88,0,0,0,0,0,0.44,0,0,1.32,0,0,0,0,0,0,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0,0,0,0,0,0,0,0,1.841,10,186,1 0.09,0.49,0.59,0,0.39,0.19,0,0,0.09,0.39,0,1.57,0.19,0,0,0,0.09,0,3.74,0.09,1.08,0,0,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.765,0.037,0,5.828,1.308,0,6.047,54,768,1 0.36,0.29,0.36,0,0,0.58,0.07,0.14,0.66,1.25,0.14,1.39,0.58,1.1,0.14,0.14,0,0,2.35,0,1.25,0.07,0.58,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.07,0,0,0,0,0,0,0,0,0.319,0.266,0.279,4.689,145,1163,1 0,0,0,0,0,0,0,0,0,0,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0,0,0,0.24,0,0,0.48,0,0,0,0,0,0,0,0,0,0,0.186,0,0,0,0,2.823,38,240,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.4,0,0,2.4,0,2.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.145,0.291,0,2.5,11,45,1 0.17,0.22,0.62,0,0.11,0.22,0.05,0.11,0,0.39,0.11,1.02,0.45,0.05,0.05,0,0,0.39,3.46,0,1.76,0,0.56,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.01,0.2,0,0.441,0.421,0.04,4.945,116,1449,1 0,0,0,0,0,0,0,0,0,0,0,0,0,2.32,0,0,1.16,0,1.16,0,0,0,0,0,0,0,0,2.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.16,0,1.16,0,0,0,0.381,0,0,0,0.19,2.652,28,61,1 0,0.26,0.78,0,0.26,0.43,0.08,1.12,0.43,1.47,0.26,0.69,0.17,0.08,0.69,0.86,0.6,0,1.82,0.6,1.39,0,0.08,0.26,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0,0,0,0,0,0,0,0.26,0.08,0,0,0.097,0.222,0,0.444,0.25,0.111,3.138,54,929,1 0,0,0,0,0,0,0,0,0,0,1.23,0,0,0,0,2.46,0,0,2.46,0,2.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.232,0.465,0,2.687,12,43,1 0,0,0,0.6,0.6,0,0,0,0,0,0,0,0.6,0,0,2.42,0,0.6,0,0,0.6,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.412,0.206,0.103,2.3,20,237,1 0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,0,0,0.6,0,1.2,0,0,0,0,0,0,0,0,1.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.6,0,1.8,0,0,0,0.299,0,0,0,0.199,2.465,28,106,1 0,0,0,0,0,0,0,0,0,0,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0,0,0,0.24,0,0,0.48,0,0,0,0,0,0,0,0,0,0,0.185,0,0,0,0,2.802,38,241,1 0,0,0,0,0,0.27,0,0,0.82,0,0,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,0,0,0,0,0,0,0,0.54,0,0,0.54,0,0,0,0,0,0,0,0,0,0.037,0.226,0,0.037,0,0,2.666,33,208,1 0,0.68,0,0,4.08,0,0.68,0,0,0.68,1.36,1.36,0,0,0,0,0.68,0.68,2.72,0,2.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,6.588,68,112,1 0,0.68,0,0,4.08,0,0.68,0,0,0.68,1.36,1.36,0,0,0,0,0.68,0.68,2.72,0,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.111,0,6.588,68,112,1 0.7,0,0.7,0,2.83,0,0,0,0,0,0,0,0,0,0,0,0,0.7,3.54,0,2.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0,1.083,3,26,1 0.2,0.41,0.2,0,1.44,0,0,0.41,0.41,0.62,0,1.86,0.2,0.2,0,0.2,0.41,0,2.69,1.03,2.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.249,0,0.996,0.106,0,7.836,116,384,1 0,0,0,0,0,0,0,0,0,0,0,0,1.44,0,0,1.44,0,0,5.79,0,1.44,0,1.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.543,0.271,0,2.157,11,41,1 0,0.28,0,0,1.4,0,0.28,0.28,0,0.56,0,0.84,0,0,0,0.28,0,0,1.68,0,1.96,0,0,0,0,0,0,0,0,0,0,0,1.4,0,0,0,0,0.28,0,0,0,0,0,0.28,0.28,0,0,0,0,0.137,0.068,0.034,0,0,5.635,114,603,1 0,0,0,0,1.03,0,1.03,0,0,0,0,2.06,0,0,0,2.06,0,0,3.09,0,1.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,60,84,1 0,0.49,0,0,0,0,0,0,2.48,0,0,0,0,0,0,0,0,0,0.49,0,0.99,0,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.177,0,0,0.265,0.088,10.159,114,447,1 0.4,0.48,0.37,0,0.14,0.14,0.03,0.07,0.55,0.66,0.29,0.89,0.44,1.04,0.03,0.26,0.37,0.07,3.16,0,1.41,0,0.48,0.59,0,0,0,0,0,0,0,0,0.03,0,0,0,0,0.03,0,0,0,0,0,0,0.07,0,0,0,0,0.082,0,0.433,0.529,0.114,6.482,140,2379,1 0,0,0,0,0,0,0,0,0,3.57,0,1.78,0,0,0,0,0,0,1.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.252,0,0,0.757,0,0,4.157,13,79,1 0,0,0,0,0.64,0,0.64,0,0,0,0,0.64,0,0,0,0,0,0,5.8,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0,0,1.44,0,0,2.875,21,115,1 0,0,0,0,0,0,0,0,0,3.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.261,0,0,0.785,0,0,4.333,13,78,1 0,0,0,0,0.65,0,0.65,0,0,0,0,0,0,0,0,0,0,0,5.22,0,0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.65,0,0,0,0,0,0,1.461,0,0,2.973,21,113,1 0.1,0.1,0.71,0,0.61,0.3,0.4,0.1,1.42,0.81,0.1,0.5,0,0,0,0.1,0,1.11,2.23,0.5,2.03,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0.264,1.01,0.397,0.033,3.199,56,1043,1 0.15,0,0.3,0,1.23,0.61,0,0.61,0,0.15,0,0.3,0,0,0.15,0.3,1.54,2.32,1.85,0,1.08,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.46,0,0,0,0,0,0,0,0,0,0.128,0,0.615,0.025,0,1.377,13,186,1 0,0,0,0,0,0,0,0,0,3.7,0,0,0,0,0,1.85,0,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.258,0,0,1.55,0,0,4.555,13,82,1 0,0,0,0,0.65,0,0.65,0,0,0,0,0,0,0,0,0.65,0,0,5.88,0,0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.65,0,0,0,0,0,0,1.765,0,0,3.025,21,118,1 0,0,0,0,0,0,1.43,0,0,0.47,0,0.95,0.47,0,0,0,0,0,2.87,0,0.47,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.694,0,0,7.709,164,239,1 0.19,0.19,0.29,0,1.07,0.19,0.19,0.97,0.87,0.58,0.09,1.07,0.19,0.87,0.09,0,0,1.17,3.81,0.68,1.75,0,0.09,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.09,0.09,0,0,0,0,0,0,0,0,0,0,0.202,0.404,0.233,0.031,4.32,49,877,1 0,0,0,0,0,0,0,0,0,0,0,0,1.44,0,0,1.44,0,0,5.79,0,1.44,0,1.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.542,0.271,0,2.157,11,41,1 0,0,2.43,0,0,0,0,0,0,0,0,0,0,0,0,2.43,0,4.87,2.43,0,2.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.896,0,0,5.538,58,72,1 0,0,1.14,0,0,0,1.14,0,0,0,0,0,0,0,0,0,0,0,2.29,0,2.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.197,0,0,0,0,1.227,6,27,1 0,1.63,0,0,0.81,0,1.63,0,0,0,0,0,0,0,0.81,0,0,0.81,1.63,0,2.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.134,0,0,0,3.294,11,56,1 0,0,0,0,0,0,2.3,0,0,0,0.76,2.3,0,0,0,0.76,0,0.76,3.07,0,2.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.518,0,0,11.312,142,181,1 0,0,1.06,0,0,1.06,1.06,0,0,0,0,1.06,1.06,0,0,0,0,0,2.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.909,78,108,1 0,0,1.03,0,1.03,0,0,0,0,0,0,0,0,0,0,0,2.06,1.03,4.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.149,0,0,1.461,3,19,1 0.27,0,0.83,0,1.11,1.11,0.27,0,0,0,0,0.83,0,0,0,0.83,1.11,0.27,1.38,0,1.11,0,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.638,0,0,2.512,17,196,1 0,0,0,0,0,0,0,0,0,3.92,0,0,0,0,0,0,0,0,1.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.55,3,31,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.375,0.525,0.225,0,0,4.906,39,211,1 0,0,0,0.04,0,0,0,0,0,0,0,0,0,0,0,0.02,0,0,0.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.024,9.752,0.003,1.542,1.785,1.998,239.571,9989,10062,1 0.1,0.1,0.71,0,0.61,0.3,0.4,0.1,1.42,0.81,0.1,0.5,0,0,0,0.1,0,1.11,2.23,0.5,2.03,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0.264,0.975,0.396,0.033,3.186,56,1042,1 0,0,1.63,0,0.54,0,0.54,0,0.54,1.09,0,2.18,0,1.09,0,0,0,0,2.73,0,2.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.086,0,1.466,0.258,0.086,31.388,392,565,1 0,1.2,0.4,0,0.4,0,0.8,0.4,0,0,0,0.8,0.4,0,0,0.8,0.4,1.2,3.62,0,1.61,0,0,1.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.4,0,0,0,0,0.069,0,0.552,0.207,0.138,6.652,69,153,1 0.22,0.44,0,0,1.33,0.22,1.33,0,0,0.22,0.44,0.66,0.22,0,0,1.11,0,1.11,2.66,0,1.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.22,0,0,0,0,0,0,0,0,0,0.134,0,0.067,0.067,0,1.946,22,183,1 0.07,0,1,0,0.3,0.46,0.07,0.23,0.23,0,0.3,1.31,0.15,0,0.07,1.39,0.15,0.85,2.24,0,0.77,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.15,0,0,0,0,0,0,0,0,0.024,0.183,0,0,0.183,0,3.211,84,700,1 0,0.5,0,0,0,0,0,0,2.5,0,0,0,0,0,0,0,0,0,0.5,0,1,0,0,0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.178,0,0,0.267,0.089,10.372,114,446,1 0.19,0.76,0.19,0,0.19,0.19,0.19,0,0.95,0.38,0.19,0.57,0,0,2.86,0.19,0,3.43,1.71,0,2.09,0,3.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.38,0,0,0,0,0,0,0,0,0,0.151,0.303,0.212,0.303,0,11.242,132,742,1 0,0.37,0,0,0,0.74,1.12,0,0,0,0.74,1.49,0.74,0,0,0.37,0,1.49,4.49,0,1.87,0,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.223,0.223,0,2.021,10,93,1 0,0,0,0,0,0,0,11.11,0,0,0,0,0,0,0,0,0,0,0,0,11.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.55,0,0,0,0,0,0,0,0,0,1,1,4,1 0.17,0.26,1.21,0,0.43,0.6,0.43,0.26,0.69,0.52,0.26,1.3,0.17,0,0.6,0.69,0.08,1.47,2.43,0.17,1.04,0,0.95,0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0.08,0,0,0,0,0.107,0,0.256,0.242,0.013,6.142,583,1339,1 0.96,0,0.48,0,0,0.96,0,0,0.48,0,0.48,0,0,0,1.44,0.48,0.48,2.41,0,0,0.96,0,0,0.48,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0.818,0,0.175,0.467,0.116,9.56,259,717,1 0,0,0,0,0.67,0,2.01,0,0,0,0,0,0,0,0,0,0,2.01,1.34,0.67,2.01,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.305,0,0,2.162,14,93,1 0.09,0.49,0.59,0,0.29,0.19,0,0,0.09,0.39,0,1.58,0.19,0,0,0,0.09,0,3.76,0.09,1.09,0,0,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.765,0.037,0,5.831,1.309,0,6,54,756,1 0,0,0,0,0.68,0,2.04,0,0,0,0,0,0,0,0,0,0,2.04,1.36,0.68,2.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.309,0,0,2.111,14,95,1 0,0,0.37,0,0,0,0.37,0,0,0,0,0.37,0,0,0,0.74,0.37,0.37,0.74,0.37,0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.37,0,0,0,0,0,0,0.37,0,0,0.302,0,0.241,0.06,0,2.166,18,143,1 0.16,0.24,1.23,0,0.41,0.57,0.49,0.32,0.65,0.49,0.24,1.23,0.16,0,0.65,0.9,0.08,1.56,2.38,0.16,1.07,0,0.9,0.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0.08,0,0,0,0,0.114,0,0.241,0.228,0.012,6.544,683,1466,1 0,0,0,0,0,0,0,0,0,1.08,0,0,0,0,0,3.26,0,0,5.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.152,0,0,5.21,36,99,1 0,0.96,0.96,0,1.44,0,0.48,0,0.48,1.92,0.48,0.96,0.48,0,1.92,0,0,0,0.96,0,0.96,0,4.32,0.48,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0.061,0,0.43,0.43,0,25.964,305,727,1 0,0.18,1.1,0,0.73,0.73,0.73,0.09,0.83,0.27,0.27,0.64,0.27,0,1.47,0.09,0,1.2,1.38,0.18,0.64,0,0.55,0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.18,0,0.09,0.09,0,0,0,0.094,0,0.43,0.134,0.013,8.445,696,1478,1 0,0,0,0,0.4,0.4,0.4,0.4,0,0,0.4,0,0,0,0,0.4,0,0,3.6,0,2,0,0,0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.124,0,0,1.94,12,97,1 0,0,0,0,0,0,0,0,0,0,0,0.84,0,0,0,0,0,0,2.52,0,1.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0.139,0,0,0,0,1.304,6,30,1 0,0,0,0,0,0,0,0.85,0,0,0,0.85,0,0,0,0,0,0,2.56,0,0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0.144,0,0,0,0,1.333,6,28,1 0,0.21,0.43,0,0.65,0,0.21,0.21,0.87,0.65,0.43,0.87,0,0,0,0.43,0,0.87,3.71,0,1.09,0.65,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0.43,0,0,0,0,0,0,0.21,0,0,0,0,0.032,0,0.96,0.128,0.128,8.08,70,501,1 0,0,0,0,1.29,0,0,0,0,0,0,0,0,0,0,0.64,0,1.29,2.58,0.64,1.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0,0,0,0,0,0,0.224,0,2.354,0,0,2.09,13,69,1 0,0.5,0,0,0,0,2,0,0,0.5,0.5,0.5,0,0,0,0.5,0,1.5,3,0,1.5,0,0.5,0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.267,0,0.445,0.979,0,4.685,28,164,1 0.27,0.27,0.55,0,0.27,0.27,0,1.37,0.27,0.82,0.27,0.55,0,0,0,0,1.37,0.55,1.65,2.2,3.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0,0,0,0,0,0,0.28,0,1.029,0.093,0,3.621,63,344,1 0.87,0.17,0.52,0,0,0.32,0,0.04,0.29,0.42,0.39,1.37,0.87,1.69,0,0.32,0.54,0.22,3.47,0.29,1.32,0,0.34,0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.02,0.07,0,0.04,0,0.016,0.058,0,0.639,0.165,0.182,3.697,117,3498,1 0,0.78,2.34,0,0.78,0,1.56,0,0,0,0,1.56,0,0,0,0,0,0.78,7.03,0,2.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.363,0,0,1.348,29,147,1 0.71,0.35,0.71,0,1.79,0,0,0,0,0.35,0,1.43,0,0,0,0.35,0,0,3.94,0,1.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.061,0,0,0,0,8.086,153,186,1 0.33,0.84,0.67,0,0.67,0.33,0.67,0,0.33,0,0.16,0.84,0.16,0,0,0.67,0,0.5,3.03,0.33,2.18,0,0,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.183,0,0.156,0.104,0.026,6.5,525,858,1 0.42,0,0.42,0,1.71,0,0.42,0,0,0.21,0.21,0.85,0.21,0,0,0,1.92,0.42,3.21,0,1.49,5.78,0.21,0.21,0,0,0,0,0,0,0,0,0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.075,0.263,0.075,0.639,53.433,494,1603,1 0,0,1.01,0,0,0,0.5,0,0,2.02,1.51,1.51,0,0,0,0.5,0,0,3.53,0,1.01,0,1.51,1.01,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.089,0,1.431,0.536,0,4.09,23,225,1 0.86,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0.86,3.44,0,4.31,0,0.86,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.928,0.154,0.154,2.409,7,53,1 0.25,0.17,0.34,0,0,0.08,0,0,0.08,0.08,0.08,0.86,0,0,0,0.08,0,0.25,4.66,0,1.2,0,0,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0,0,0.015,0,0.094,0.015,0,2.531,89,319,1 0.27,0.27,0.55,0,0.27,0.27,0,1.37,0.27,0.82,0.27,0.55,0,0,0,0,1.37,0.55,1.65,2.2,3.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0,0,0,0,0,0,0.279,0,1.023,0.093,0,3.621,63,344,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.81,9.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.763,21.428,62,150,1 0,0,0,0,0,0,7.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.297,0,0,2,8,52,1 0.1,0.2,1.01,0,0.8,0.8,0.5,0,0.8,0.1,0.3,0.7,0.3,0,1.61,0.1,0,1.11,1.31,0.2,0.7,0,0.6,0.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0.1,0.1,0,0,0,0.11,0,0.488,0.157,0.015,8.55,669,1351,1 0.2,0,0.1,0,0,0.1,0.2,0,0,0,0,0.72,0,0,0,0.1,0.2,0.1,4.17,0,1.35,0,0.52,0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.057,0,0.095,0,0,2.717,24,318,1 0,0,1.47,0,0,1.1,0.36,0,0,0,0.36,0.36,0,0,0,0.36,0,0,2.21,1.1,2.95,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.123,0,0.433,0.928,0.185,3,33,177,1 0.15,0.15,0.31,0,0.15,0,0.46,0,0,0,0.62,0.62,0.15,0,0,0.31,0.15,0.93,2.63,0,2.01,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.092,0,0.123,0,0,6.268,196,608,1 0.93,0,0,0,0.93,0,1.86,0,0,0,0,2.8,0.93,0,0,0,0,0,8.41,0,1.86,0,0,0.93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.164,0,2.306,0.164,0,8.312,29,133,1 0,0.22,0.45,0,0.68,0,0.22,0.22,0.9,0.68,0.45,0.9,0,0,0,0.68,0,0.9,3.86,0,1.13,0,0,0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0.45,0,0,0,0,0,0,0.22,0,0,0,0,0.033,0,1.103,0.133,0.033,7.166,54,430,1 0,0,0.27,0,0.54,0,0.27,0,0,0.27,0,0.54,0,0,0,1.35,0,0,1.08,0,2.44,10.86,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.329,0.141,1.41,44.72,252,1118,1 0.76,0,0.38,0,0.12,0.25,0,0.12,0.12,0,0,0.25,0.38,0,0,0.38,0,0.25,2.92,0,2.92,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.022,0,0.661,0.088,0,2.256,21,325,1 0,0,0,0,0,0,0,3.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.06,0,0,0,0,0.207,0,0.207,0.207,0,3.761,25,79,1 0,0,0,0,0,1.29,0,0.64,0,0,0,0,0,0,0,0,0,0,3.87,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.16,0.116,0,1.8,12,63,1 0,0,0,0,0,0,0,3.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.06,0,0,0,0,0.207,0,0.207,0.207,0,3.761,25,79,1 0,0,1.35,1.35,0,0,0,1.35,0,0,0,0,0,0,0,1.35,0,0,2.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.7,0,0,0,0,0,0,3.588,0,0,2.516,17,78,1 0,1.03,0,0,1.03,0,1.03,0.51,0,0.51,0,1.03,0,0,0,0.51,0,0.51,2.07,0,1.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.257,0,0.6,0.429,0,1.447,4,55,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.29,0.29,0,0.29,1.79,0,0.59,0,0.29,0,0,0,0,0,0,0,0,0,0.29,0,0,0,0,0,0,0,0,0,0.89,0,0,0,0,0,0,0.248,0,0,0.049,0,2.47,30,168,1 0,0,0.68,0,0,0,0,1.36,0,0,0.68,0.68,0,0,0,0,0,0,3.4,0,1.36,0,0.68,0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.238,0.238,0,2.232,19,96,1 0.1,0.2,1.01,0,0.8,0.8,0.5,0,0.8,0.1,0.3,0.7,0.3,0,1.61,0.1,0,1.11,1.31,0.2,0.7,0,0.6,0.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0.1,0.1,0,0,0,0.11,0,0.488,0.157,0.015,8.55,669,1351,1 0,0,0.66,0,0.33,0,0.33,0.33,1.33,2,0,0.66,0,0.33,1,0.33,0,0.66,2.67,0,1,0,2,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0.23,0,0.057,0.23,0,5.279,82,227,1 0,0,0,0,0,0.23,0,0,0,0,0,0.46,0,0,0,0.46,0.46,0.23,3,0,0.69,0,0,0,0,0,0,0,0,0,0,0,0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.077,0.038,0,0,0,0.038,2.6,42,182,1 0.39,0,0,0,0,0.39,0.79,0,0,0.39,0,0.79,0,0,0,0,0.39,0,2.37,0,2.76,0,1.18,0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.064,0,0.64,0.192,0,2.74,13,74,1 0,0,0.77,0,0.38,0.38,0.38,0,0,0.77,0.38,0.38,0,0,0,0.77,0.77,0.77,2.31,0,1.15,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.063,0.127,0.255,0.51,0,0,3.685,62,258,1 0,0,0,0,0.53,0,0.53,0,0.53,0,0,1.07,0,0,0,0,0,0,2.15,0,3.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.082,0,0,4.391,66,101,1 0,0.31,0.42,0,0,0.1,0,0.52,0.21,0.52,0,0.52,0.63,0.1,0.1,0.21,0.31,0.21,2.53,0.42,1.69,0.31,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0,0,0.016,0,0.887,0.032,0.049,3.446,318,1003,1 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.022,0.022,0.019,0.022,0.022,0.022,3.482,5,5902,0 0,0,0,0,0,0,0,0,0,0.85,0,0,0,0,1.7,0,0,0,2.56,0,1.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.299,0,0,0.149,0,0,1.04,2,26,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,33.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.12,0,0,0,0,0,0,0,0,0,0,0,1.28,0,2.56,0,0,0,0,0,0,0,0,0,0,0.131,0,0.262,0,0,1.625,7,65,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.07,0,0.07,0,0,0,0.07,0,0,0,0,0,0,0.07,0,0,0,0,0,0,0,0,0,0.104,0.324,0,0,0.011,4.411,28,1866,0 0,0,0,0,2.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.08,2.04,2.04,2.04,2.04,2.04,2.04,2.04,0,2.04,2.04,2.04,0,0,0,2.04,0,4.08,0,0,0,0,0,0,0,0.671,0,0,0,0,2.5,11,35,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.84,0,0.84,0,0,0,0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.273,0.136,0,0,0.136,3.571,28,150,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.87,0,0,0,0,0,0,0,0,0,0,0,0,0.393,0,0,1.75,7,28,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.729,0,0,2.285,7,16,0 0,0,0,0,0,0,0,0,0,0,0,0.24,0,0,0,0,0,0,0.24,0,0,0,0,0,9.33,3.93,0.24,0,0,0.73,0,0,0,0,0.24,0.24,0,0,0.24,0,0,0.73,0,0.49,0,0,0,0,0,0.037,0,0.149,0,0,10.012,251,791,0 0.9,0,0,0,0.9,0,0,0,0,0,0,1.8,0,0,0,0,0,0,3.6,0,1.8,0,0,0,0.9,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.9,0,0,0,0,0,0,0,0.149,0,0,0,0,2.766,12,83,0 0,0,0,0,0,0,0,0,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.85,0,0,0,0,0,0,0,1.85,0,0,0,0,3.7,0,0,0,0,0,0,0,0.308,0,0,0,0,2,11,26,0 0.08,0,0.08,0,0,0.08,0,0.49,0,0,0.08,1.48,0.08,0.08,0,0,0.08,0,0,0,0,0,0,0,3.3,0,0,0,0,0,0,0,0,0,0,0.41,0.08,0,0,0,0,0,0.08,0,0,0,0,0.16,0.098,0.153,0,0,0.032,0,2.324,18,709,0 0,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.85,0,0,0,0,0,0,0,0.284,0,0,0,0,1.8,5,27,0 0,0,1.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.89,0,0,2.89,0,0,0,0,0,0,0,0.247,0,0,0,0,2.38,8,50,0 0,0,0,0,0,0,0,0,0,0,0,5.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.546,0,0,2,4,16,0 0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,0,0,0,0,0.8,0,0,0,0,0,0.8,0,0,0,0,0,0,0,0,0,0.8,0.8,0,0,0,0,0,1.6,0,1.6,0,0,0,0,0,0.115,0,0.115,0,0,3.388,28,122,0 0,0,0,0,1.51,0,0,0,0,0,0,3.03,0,0,0,0,0,0,1.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.51,0,0,0,0,0,0.547,0,0,0,0,1.75,5,28,0 0,0,0,0,0,0,0,0,0,2.32,0,0,0,0,0,2.32,0,0,2.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.32,0,0,0,0,2.32,0,0,0,0,0,0.375,0,0,1.444,5,13,0 0,0.63,0,0,1.27,0,0,0,0,0,0,1.27,0,0,0,0,0,0.63,3.18,0,0.63,0,0,0,0,0,1.27,1.27,0,0,0,0.63,0,0.63,0,0,0,0,0,0,0,0,0,0,2.54,0,0,0,0,0.218,0,0,0,0,2.215,22,113,0 0,0,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,1.16,0.58,0.58,0,0,0,0,0,1.16,0.58,1.16,1.74,0.58,0.58,0.58,0.58,0,0.58,0.58,0.58,0,0,0,0.58,0,0,0,0,0.58,0,0,0,0,0.658,0,0.282,0,0,1.932,11,114,0 0.18,0.06,0.24,0,0.18,0,0,0.18,0,0.12,0,0.6,0,0,0,0.24,0.12,0,0.78,0,0.72,0,0.06,0.42,1.93,0.66,0,0.18,0,0.12,0.3,0,0,0,0.42,0,0.18,0,0.24,0,0,0.12,0,0,0.18,0,0,0.12,0,0.196,0,0.044,0.026,0,1.873,29,843,0 0,0,1.88,0,0,0,0,0,0,0,0,1.88,0,0,0,0,0,0,1.88,0,1.88,0,0,0,0,0,1.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.88,0,0,0,0,0,0,0.323,0.323,0,0,0,0,1,1,12,0 0,0,2.12,0,0,0,0,0,0,0,0,2.12,0,0,0,0,0,0,0,0,2.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.647,16,45,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.1,5.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.35,0,0,0,0,0,0,0,0,0,0,1.142,2,8,0 0,0,0,0,0,0,0,0,0,0,0,1.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.78,0,0,0,0,0,0,0.336,0,0,0,0,1.909,5,21,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.315,0,0,1,1,7,0 0,0,0,0,0,0,0,0,0,0,0,0.86,0.86,0,0,0,0,0,0,0,0,0,0,0,3.47,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.322,11,72,0 0,0,0,0,0,0,0,0,0,1.44,0,0,0,0,0,0,0,1.44,0,0,0,0,0,0,2.89,1.44,0,1.44,0,1.44,1.44,0,0,0,1.44,1.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0.156,0,0.313,0,0,1.689,10,49,0 0,0,0,0,3.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.538,4,20,0 0,0,0.54,0,0,0,0,0,0,0,0,0.27,0,0,0,0,0,0,3.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0.27,0,0.54,0,0.27,0,0.27,0.27,0,0,0,0.188,0.047,0,0,0,1.745,12,89,0 0,0,0.75,0,0,0,0,0,0,0,0,0.75,0,0,0,0,0,0,3.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.262,0,0,0,0,1.437,3,23,0 0,0,0.79,0,0,0,0,0,0,0,0,0.39,0,0,0,0,0,0.39,3.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.39,0,0,0,0.39,0,0,0,0.39,0.39,0,0,0,0.237,0,0,0,0.059,2.51,12,123,0 0.08,0.16,0.08,0,0.2,0,0.04,0.04,0.04,0.49,0.12,0.32,0.12,0.04,0,0.08,0,0,0.77,0,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.69,0.65,0,0,0.04,0,0.08,0,0.16,0,0.28,0,0.89,0.016,0.243,0,0.033,0,0.016,2.747,86,1995,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.125,17,25,0 0,0,0,0,0,0,0,0.26,0.39,0,0.13,0.52,0.26,0,0,0,0,0,0,0,0,0,0,0,4.22,0.13,0,0,0,0,0,0,0,0,0,0.13,0.13,0,0,0,0,0,0,0.13,0,0,0,0,0.017,0.107,0,0,0.071,0,2.848,26,433,0 0,0,1.58,0,0,0,0,0,0,0,0,1.58,0,0,0,0,0,0,1.58,0,1.58,0,0,0,1.58,3.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.538,4,20,0 0,0,0.21,0,0.42,0,0,0,0.21,0,0,0,0,1.27,0,0,0.21,0,0.21,0,1.06,0,0,0,0.21,0,0,0.21,0,0,0,0,0,0,0.21,0,0,0,0,0,0,2.12,0,0,0,0,0,0,0,0.161,0,0.161,0,0.182,2.813,121,723,0 0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.684,0,0,2,7,16,0 0,0,1.21,0,0,2.43,0,0,0,0,0,0,0,0,0,0,0,0,1.21,0,1.21,0,0,0,0,0,1.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.211,0,0.211,0,0,1,1,11,0 0,0,0,0,0,0,0,0,0,0,0,1.41,0,0,0,0,0,0,0.47,0,0.94,0,0,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0.137,0,0.068,0,0,3.195,21,147,0 0,1.28,0,0,0,0,0,0,0,0,0,1.28,0,0,0,0,0,0,2.56,0,1.28,0,0,0,5.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.196,0,0,0,0,1.952,10,41,0 0.29,0,0.29,0,0.29,0,0,0,0,0,0,0,0.29,0,0,0,0.29,0,0,0,1.75,0,0,0,2.63,0,0,0,0,0,0,0,0,0,0,0.58,0,0,0,0,0,0,0,0.58,0,0,0,0,0,0.108,0,0.072,0,0,2.847,60,242,0 0.26,0,0,0,0,0,0,0,0,0.53,0,3.76,0,0,0,0,0,0,0.26,0,0,0,0,0,3.76,2.68,0,0,0,0.26,0,0,0,0,0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.55,0,0.045,0,0,1.837,11,158,0 0,0,0,0,0,0,0,0,0,0,0,0.91,0,0,0,0,0.91,0,2.75,0,0,0,0,0,1.83,0,0,0,0,0,0,0,0,0,0,0,0.91,0,0,0,0,0.91,0,0,0,0,0,0,0.301,0,0,0.301,0,0,1.942,8,68,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.14,0,0,0,0,7.14,0,0,0,0,0,0,0,0,5.5,10,11,0 0,0,0,0,0,0,0,0,0,0,0,3.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.22,0,0,0,0,3.22,0,0,0,0.526,0,0,0,0,1.571,3,11,0 0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.613,0,0,1,1,14,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.34,0,8.69,0,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.048,0,0,1,1,8,0 0,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.02,0,0,0,0,0,0,1.02,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0.167,0,0,0,0,2.195,17,90,0 0,0,0,0,0,0,0,0,0,0,0,1.75,0,0,0,0,0,0,2.63,0,3.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.235,5,21,0 0,0,0.78,0,0,0,0,0,0,0,0,0.78,0,0,0,0,0,0,1.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.78,0.145,0,0,0.725,0,0,1.187,4,19,0 0.11,0,0.11,0,0.11,0.11,0,0,1.03,0,0,0.34,0,0,0,0,0,0,0.45,0,0.22,0,0,0,0.57,0.68,0.11,0,0,0,0,0,0.34,0,0,0,0.22,0,0,0,0,0,0,0,0,0,0,0,0.078,0.171,0.031,0,0.031,0,3.407,41,535,0 0.67,0,0,0,1.01,0,0,0,0,0,0,0.67,0.67,0,0,0,0,0,1.35,0,1.68,0,0,0,0.33,0.33,0,0,0,0,0,0,0,0,0,0,0.67,0,0,0,0,0.67,0,0,0.33,0,0,0.33,0.097,0.048,0,0.048,0,0,2.326,22,107,0 0.02,0,0.15,0,0.24,0.31,0,0.04,0.22,0,0.02,0.08,0,0,0.02,0,0,0.02,0.08,0,0.06,0,0,0,0.44,0.47,0.02,0,0,0,0,0,0.11,0,0,0,0,0.02,0,0,0,0.02,0,0,0,0,0,0,0.185,0.15,0.044,0,0.006,0,2.838,52,2078,0 0.51,0,0.51,0,0,0.51,0,0,0,0,0,0.51,0,0,0,0,0,0,0.51,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.51,0,0.51,0,0.51,0,1.02,0,0,0.51,0,0,0,0.161,0.08,0.08,0,0,1.885,12,66,0 0,0,0.65,0,0.32,0,0,0,0,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.061,0.061,0.061,0,0,1.392,11,71,0 0,0,0.1,0,0.1,0.1,0.2,0.2,0.1,0,0,0.2,0.1,0.2,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0.2,0.013,0.097,0,0,0.027,0,2.214,22,423,0 0,0,0.23,0,0.23,0,0,0,0.23,0,0,0,0,1.43,0,0,0.23,0,0.23,0,2.14,0,0,0,0.23,0,0,0,0,0,0,0,0,0,0,0,0.23,0,0,0,0,1.9,0,0,0,0,0,0,0.117,0.235,0,0.117,0,0.164,2.616,160,683,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.15,0,0,0,0,0,0,0,4.5,1.8,0,0,0,2.7,0,0,0,0,0,0,0,0,0,0.45,0,0,0,0,0,0,0,0,0.557,0,0,0.123,0,0,2.063,34,130,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.751,0,0,0,0,2,4,10,0 0,0.16,0.32,0,0.16,0,0,0.16,0.16,0,0,0,0,0,0.16,0,0,0,0,0,0,0,0,0,0.96,0.48,0.16,0,0,0,0,0,0,0,3.21,0,0.16,0,0,0,0,0.96,0,0,0.32,0.16,0.16,0,0,0.124,0,0,0,0.11,4.771,63,1064,0 0,0.54,0,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.09,0,0,0,0,0.182,0.091,0.091,0,0,1.212,5,40,0 0,0.37,0,0,0,0,0,0,0,0,0,0,0.37,0,0,0,0,0,1.51,0,0,0,0,0,5.68,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.065,0.261,0,0,0,0,1.114,5,39,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0,0,0,7.14,3.57,0,3.57,0,3.57,3.57,0,0,0,3.57,3.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0.24,0,0.24,0,0,1.687,10,27,0 0.3,0,0,0,0,0,0,0,0,0,0,0.61,0,0,0,0,0,0,0.3,0,0,0,0,0,0.3,0,0,0,0,0.3,0,0,0.3,0,0,0,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.426,6,97,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.77,0,0,0,0,0,0,5.55,2.77,0,2.77,0,2.77,2.77,0,0,0,2.77,2.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0.218,0,0.218,0,0,1.687,10,27,0 0,0,0,0,0,0,0,0,0,0,0,3.92,0,0,0,0,0,0,0,0,0,0,0,0,1.96,1.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.303,0.303,0,0,0,0,1.6,9,24,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.7,3.7,3.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.645,0,0,0.645,2.888,8,26,0 0,0,0,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0.74,0,0,0,0,0,2.22,0.74,0,0,0,0,0,0,0,0,0,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0.414,0,0.31,0.103,0,2.034,11,59,0 0,0,0.06,0,0.89,0.13,0,0.2,0,0,0,0.13,0.06,0,0,0,0.96,0,0,0,0,0,0,0,1.1,0,0,0,0,0,0,0,0,0,0,0,0.13,0.06,0,0,0,0.06,0,0,0.34,0,0,0,0.018,0.047,0,0,0.085,0,2.924,52,617,0 0,1.35,0.19,0,0,0,0,0,0,1.74,0,0.19,0,0,0,0,0,0,0,0,0.38,0,0,0,2.32,0.96,0,0,0,0,0,0,0,0,0.58,1.16,0.38,0,0,0,0,0,0.19,0,0,0,0,0.58,0,0.337,0,0,0,0,3.937,44,693,0 0.07,0,0.15,0,1.53,0.15,0,0.46,0,0,0.07,0.46,0.46,0,0,0,0.07,0,0.76,0,0.38,0,0,0,0.69,0,0,0,0,0,0,0,0,0,0,0,0.15,0,0,0,0,0.07,0,0,0.61,0,0,0,0,0.022,0,0,0.033,0,1.705,36,220,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.886,0,0,1.375,4,11,0 0,0,0,0,0.69,0,0,0,0,0,0,1.38,0,0,0,0,0,0,0.69,0,0.69,0,0,0,1.38,0,0,0,0,0,0,0,0,0,0,1.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,2.488,15,112,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,4,0 0,0,0,0,0,0,0,0,0,0,0,1.07,0,0,0,0,0,0,1.07,0,0,0,0,0,0,0,0,0,0,0,0,0,2.15,0,0,0,0,0,0,0,0,0,0,3.22,0,0,0,0,0,0,0,0,0,0,1.416,6,68,0 0,0,0,0,0,0,0,0,0.91,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0.109,0.254,0,0,0,0,3.606,119,357,0 0,0,0,0,0,0,0,0,0,0,0,0.93,0,0,0,0,0,0,2.34,0,0,0,0,0,0.93,0,0,0,0,0.46,0,0,0,0,0,0,0,0,3.75,0,0,0,0,0,0,0,0,0,0,0.438,0,0,0,0,2.448,37,120,0 0,0,0,0,0,0.25,0,1,0,0,0,0.25,0,0,0,0,0,0.25,0,0,0,0,0,0,0.75,0.25,0,0,0,1,0,0,0,0,0,0.25,1.25,0,0,0,0,0,0,0,0,0.5,0,0,0,0.153,0,0,0,0,1.958,26,329,0 0.11,0.05,0.22,0,0.22,0.05,0,0,0.05,0.11,0.11,0.56,0.05,0,0,0.11,0.16,0,1.35,0,0.73,0,0,0,1.69,1.3,0,0.05,0,0.11,0.16,0,0.05,0,0.33,0.05,0.33,0,0,0.05,0,0.11,0,0.11,0.05,0,0,0.05,0.025,0.085,0,0.042,0,0,2.031,22,971,0 0,0,0,0,0,0,0,0,0.14,0,0,0.43,0,0,0,0,0.14,0,0,0,0.14,0,0,0,0.14,0.57,0,0,0,0,0,0,0.14,0,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0.14,0.058,0.156,0,0,0,0,1.687,24,496,0 0,0,0,0,0,0,0,0,0.29,0,0,0,0,1.75,0,0,0.29,0,0.29,0,0.29,0,0,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0.29,0,0,0,0,1.75,0,0,0,0,0,0,0.156,0.052,0,0.052,0,0.235,2.721,38,566,0 0,1.36,0,0,0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0,4.54,0,0,0,0,0,0,0,0,0,0.45,0,0.9,0,0.45,0,0,1.81,0.45,0,0,1.36,0,0,0.069,0.069,0,0,0,0,2.186,15,164,0 0,2.4,0,0,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,2.4,0,0,0,0,0,1.6,0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,0,0,1.6,0,0,0,0.8,0,0,0.12,0,0,0,0,0,1.696,15,56,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.14,0,0,0,0,7.14,0,0,0,0,0,0,0,0,5.5,10,11,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0.1,0,0,0,0,0,0,0.1,0.1,0.96,0.1,0,0,0,0,0,0,0,0,0,0,0,3.52,0.1,0,0,0,0,0,0,0.74,0,0,0.1,0.21,0.1,0,0,0,0,0,0,0,0,0,0,0.014,0.117,0,0,0,0,2.204,24,496,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.14,0,0,0,0,7.14,0,0,0,0,0,0,0,0,5.5,10,11,0 0,0,0,0,0,0,0,0,0,0,0,1.01,0,0,0,0,0,0,0,0,1.01,0,0,0,1.01,0,0,0,0,0,0,0,0,0,2.02,1.01,0,0,1.01,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.108,25,115,0 0,0,0,0,0.68,0,0,0,0,0,0,0.34,0,0,0,0,0,0,0.34,0,2.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0,0,0,0.051,0,0,0,0,1.062,4,137,0 0,0,0,0,0.14,0,0,0.14,0,0,0,0.73,0,0,0,0,0,0,0.14,0,0,0,0,0,2.48,1.6,0,0,0,0.14,0,0,0,0,0,0,1.16,0,0.29,0,1.16,0,0,0,0.14,3.07,0,0,0.144,0.433,0.082,0.02,0,0,4.113,52,654,0 0,0,0,0,0,0,0,0,0,0,0,4.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.09,0,0,0,0,0,0,0,0,0,0,0,0,9.09,0,0,0,0,0,0,0,0,0,0,0,0,1.666,4,15,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.17,0,0,0,0,0,0,4.34,2.17,0,4.34,0,2.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.529,0,0,0,0,4,11,68,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.163,0,0,0,0,1.5,5,24,0 0,0,0,0,0.26,0.26,0,0,0,0.26,0,0.26,0,0,0,0.26,0,0,2.08,0,2.6,0,0,0,0.26,0,0,0,0,0,0.26,0,0,0,0,0,0,0,0,0,0,0,0.26,0,0.26,0,0,0,0.037,0,0,0,0,0,2.545,18,168,0 0.31,0,0,0,0,0,0,0,0,0,0,0.62,0,0,0,0,0,0,0.31,0,0,0,0,0,0.31,0,0,0,0,0.31,0,0,0.31,0,0,0,0,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.446,6,94,0 0,0,0,0,0,0,0,0,0,0,0,0.96,0,0,0,0,0,0,1.44,0,0.48,0,0,0,2.89,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0.48,0,0,0,0.48,0,0,0,0,0.48,0,0.371,0.074,0.074,0,0,2.534,18,185,0 0.32,0,0,0,0,0,0,0,0,0.64,0,3.23,0,0,0,0,0,0,0.32,0,0,0,0,0,3.88,2.58,0,0,0,0.32,0,0,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.492,0,0,0,0,1.89,11,138,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.14,0,0,0,0,7.14,0,0,0,0,0,0,0,0,5.5,10,11,0 0.15,0.1,0,0,0.3,0.15,0,0.3,0,0,0,0.6,0,0,0,0,0,0,2.06,0,0.85,0,0.05,0,0.2,0,0,0,0,0,0,0,0.25,0,0.15,0,0.35,0,0,0,0,0,0,0.05,0,0,0,0,0.073,0.234,0,0.073,0,0,2.206,49,1026,0 0,0,0,0,0,0,0,0,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.142,3,16,0 0,0,0,0,0,0.27,0,0,0,0.27,0,0.27,0,0,0,0.27,0,0,2.18,0,2.73,0,0,0,0.27,0,0,0,0,0,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.593,18,166,0 0.07,0,0.15,0,0.09,0.05,0,0.03,0.05,0.05,0,0.41,0.03,0,0,0.01,0.09,0.13,0.03,0.01,0.07,0,0.01,0,0,0,0.03,0,0.01,0,0,0,1.21,0,0,0.07,0.95,0,0.01,0.11,0.13,0.01,0,0,0,0.39,0.03,0.51,0.042,0.173,0.002,0.008,0.005,0.002,2.145,71,2954,0 0.16,0.08,0,0,0,0,0,0,0.16,0.33,0,0.67,0,0,0.08,0,0.5,0.33,0.58,0.16,0.42,0,0,0.08,1.34,0.58,0,0,0,0.08,0,0,0,0,0,1.09,1.34,0,0.16,0,0,0,0,0.08,0,0,0,0,0.084,0.408,0.06,0,0.012,0,4.179,104,1655,0 1,0,2,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,5,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.17,0,0,0.17,0,0,1.692,4,22,0 0,0,1.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.7,2.35,0,0,0,0,0,0,0,0,0,0,1.17,0,0,0,0,0,1.17,0,0,0,0,0,0,0,0.193,0,0,0,1.974,16,77,0 0,0,1.06,0,0,0.35,0,0,0,0,0,2.13,0,0,0,0,0.71,0,0.71,0,0,0,0,0,4.62,0,0,0,0.35,1.06,0,0,0,0,0,0.35,0.35,0,0.35,0,0,0,0.35,0,0.71,0,0,0,0,0.055,0.055,0,0,0,2.239,17,206,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0,0,0,7.14,3.57,0,3.57,0,3.57,3.57,0,0,0,3.57,3.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0.239,0,0.239,0,0,1.687,10,27,0 0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,4,2,0,2,0,2,2,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0.18,0,0,0,0,1.611,10,29,0 0.23,0,0,0,0,0.23,0,0,0,0,0,0.92,0.46,0,0,0,0,0,2.76,0,2.76,0,0,0.69,0,0,0,0,0,0,0,0,0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.445,0,0.202,0.121,0,1.945,7,142,0 0,0,0,0,0,0,0,1.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.78,0,0,0,0,0,3.57,0,0,0,0,0,0,0,0,0,0,0,1.666,7,15,0 0,0,0,0,0,0,0,0,0,0,0,0,0.89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.89,0,0,0,0,0,0,1.78,0,0,0,0.89,0,0,0,0,0.89,0,0,0,0.269,0.269,0,0,0,2.27,16,84,0 0,0,0,0,0,0,0,0,0,0,0,1.72,0,0,0,0,0,0,3.44,0,1.72,0,0,0,0,1.72,0,0,0,0,0,0,0,0,0,0,0,0,1.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.058,5,35,0 0,0,0,0,0,0,0,0,0,0,0,2.32,0,0,0,0,0,0,1.16,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.16,0,0.744,0,0.148,0,0,1.972,18,71,0 0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,1.36,0,0,0,0,0,0,0,0,0,1.36,1.36,0,0,0,0,0,0,1.36,0,0,0,1.36,0,0,0,0,0,1.36,0,0,0,0,0,0,0.404,0.202,0,0,0,3.533,17,53,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.35,0,0,0,0,0,0,0,0,0,0,0,0,1.169,0,0,0,0,2.533,21,76,0 0,0,1.18,0,0,0,0,0,0.59,0,0,0,0,0,0,0,0,0,1.77,0,1.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.313,0.313,0,7.588,66,129,0 0,0,0,0,0,0.57,0,0,0,0,0,1.73,0,0,0,0,1.15,0,0.57,0,0,0,0,0,2.31,0,0,0,0.57,1.73,0,0,0,0,0,0.57,0,0,0,0,0,0,0,0,0.57,0,0,0,0,0.086,0,0,0,0,1.5,5,72,0 0,0,0,0,0,0,0,0,0.62,0,0,0.2,0,0,0,0,0,0,0,0,0,0,0,0,3.34,2.3,0,0,0,0,0,0,0,0,0.2,0,0.62,0,0.2,0,0,0,0.41,0,0,0,0,0,0.085,0.198,0.056,0,0,0.454,3.414,108,536,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.96,0,1.96,0,0,0,0.98,0,0,0,0,0.98,0,0.377,0,0.125,0,0,2.925,27,158,0 0,0,0.25,0,0,0.25,0.5,0.25,0,0,0,0,0,0,0,0,0,0,1.25,0,0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.25,0,0,0,0,0.18,0,0.045,0,0,1.324,5,98,0 0.05,0,0.45,0,0.16,0.11,0,0,0.62,0,0,0.16,0,0,0,0,0,0.05,0.22,0,0.16,0,0,0,0.62,0.67,0.05,0,0,0,0,0,0.9,0,0,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0.121,0.162,0.04,0,0.016,0,2.887,45,875,0 0,0,0.35,0,0.35,0.71,0,0,0,0,0,0.71,0,0,0,0,0,0,0.71,0,0.35,0,0,0,0,0,0,0,0,0,0,0,0.35,0,0,0,0.35,0,0.35,0,0,0,0.35,0,0.35,0,0,0,0,0.124,0,0.372,0,0,1.641,12,110,0 0.25,0.25,0,0,0.25,0,0.25,0,0,0.25,0.25,0,0.25,0,0,0.25,0,1.02,2.05,0,2.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.25,0,0,0,0.51,0.25,0,0,0,0,0,0.413,0,0.165,1.78,13,146,0 0,0,0,0,0,0,0,0,0,0,0,0,0,1.26,0,0,0,0,0,1.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.26,0,0,0,0,0,0.214,0,0.214,0,0,1.263,4,24,0 0,0,0,0,0,0,0,0,0,0,0,1.38,0,0,0,0,0,0,1.38,0,1.38,0,0,0,1.38,1.38,2.77,0,0,0,0,0,0,0,0,0,1.38,0,1.38,0,0,0,1.38,0,0,0,0,0,0,0,0.224,0.448,0,0,1.451,12,45,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.234,0,0,3,5,15,0 0,0,0,0,0,0,0,0,0,0,0,1.52,0,0,0,0,0,0,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.76,0,0,0,0,0.76,0,0,0,0.118,0,0,0,0,1.735,10,59,0 0,0,0,0,0,0,0,0,0,0,0,2.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.92,0,0,0,0,0,0,0,0.92,0,0,0,0,0.92,0,0,0,0,0,0,0,0.165,0,0.165,0,0,1.666,7,45,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.5,3,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.42,0,1.42,0,0,0,0,0,0,0,0,1.42,0,0.361,0,0,0,0,2.025,7,81,0 0,0,0,0,0,0,0,0,0,0.42,0,0.42,0,0,0,0,0,0,3.4,0,0.42,0,0,0.42,1.27,0.85,0,0.85,0,0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0.42,0,0,0,0,0,0.155,0,0,0,0,2.555,11,92,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.38,2.307,9,30,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0,0,0,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,17,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0.724,0,0,2.285,8,16,0 0,0,0,0,0,0,0,0,0,0,0,1.4,0,0,0,0,0.7,0,1.4,0,1.4,0,0,0,0,0,0.7,0,0,0,0.7,0,0,0,0,0,0,0,0,2.11,0,0,0,0,0,0,0,0,0,0.266,0.066,0,0,0,18,200,378,0 0,0,0,0,0,0,0,0,0.14,0,0,0.43,0,0,0,0,0.14,0,0,0,0.14,0,0,0,0.14,0.57,0,0,0,0,0,0,0.14,0,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0.14,0.058,0.156,0,0,0,0,1.566,13,462,0 0,0,0.13,0,0.26,0,0,0.65,0.13,0,0,0.78,0.26,0,0,0,0.13,0,0,0,0,0,0.13,0,1.69,0,0,0,0,0,0,0,0,0,0,0,0.13,0,0,0,0,0,0,0,0,0,0,0.13,0,0.105,0,0,0.052,0,2.165,20,446,0 0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0.286,0,0,0,0,1.461,4,38,0 0.67,0,0,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0,1.01,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.34,0,0,0,0,0,0,0,0.055,0,0,0,0,1.644,13,74,0 0.42,0,0,0,0,0,0,0,0,0.42,0,0.42,0.42,0,0,0,0,0,0.42,0,0.42,0,0,0,1.28,2.57,0,0,0,0.42,0,0,0.42,0,0,0.42,0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.151,0,0,1.533,9,69,0 0,0,1.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.96,0,0,0,0,0,0,3.92,1.96,0,3.92,0,1.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.475,0,0,0,0,2.95,11,59,0 0,0,0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0,0,0,0.96,1.93,0,0,0,0.48,0,0,0,0,0,0.96,0.48,0,0,0,0,0,0,0,0.48,0,0,0,0,0,0,0,0,0,1.353,7,88,0 0,0,1.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.66,1.66,0,0,0,0,0,3.33,1.66,0,1.66,0,1.66,1.66,0,0,0,1.66,1.66,0,0,3.33,0,0,0,0,0,0,0,0,0,0,0.167,0,0.167,0,0,1.533,10,46,0 0,0,0,0,0,0,0,0,0,0,0,0.81,0,0,0,0,0,0,0.81,0,0,0,0,0,2.45,2.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.12,0.12,0,0,0,0,2.473,11,47,0 0,0,0,0,0,0,0,0,0,0,0,2.15,0,0,0,0,0,0,1.07,0,0,0,0,0,5.37,1.07,0,0,3.22,0,1.07,0,0,0,0,0,1.07,0,0,0,0,3.22,1.07,0,0,0,0,0,0,0.388,0.194,0,0,0.194,2.666,13,104,0 0.06,0,0.19,0,0.06,0.06,0,0,0,0.13,0,0.26,0.06,0,0,0,0,0,0.52,0,0.46,0,0,0,2.57,0.92,0,0.06,0.13,0.52,0.32,0,0.06,0,0.26,0.13,0.32,0,0.26,0,0,0,0,0,0.06,0,0,0.13,0.009,0.152,0,0.047,0.028,0,1.948,23,836,0 0,0,0,0,0,0,0,0,0,0,0,5.12,0,0,0,0,0,0,2.56,0,0,0,0,0,0,0,0,0,5.12,0,0,0,0,0,0,0,0,0,0,0,0,5.12,0,0,0,0,0,0,0,0.5,0,0,0,0,2.266,7,34,0 0,0,0,0,0,0,0,0,0,0,0.38,0.19,0,0,0,0,0,0,1.33,0,0.38,0,0,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0,0,0,0,0,0,0.154,0,0.03,0,0.03,2.852,12,388,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.09,4.54,4.54,4.54,4.54,4.54,4.54,4.54,0,4.54,4.54,4.54,0,0,0,4.54,0,0,0,0,0,0,0,0,0,1.169,0,0,0,0,3.1,11,31,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0,0,2.43,0,0,0,0,0,0,0,0,0,0,0,0,0,2.43,0,0,0,0,0,0,0,2.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.442,0,0,0,0,1.2,3,12,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.5,6,66,0 0,0,0,0,0,0,0,0,0,1.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.4,0,0,0,0,0,1.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.22,0.22,0,0,0,3.875,28,93,0 0.22,0,0.22,0,0.67,0,0,0,0.22,0,0,0,0.22,1.34,0,0,0.44,0,0.67,0,1.56,0,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.23,0,0,0,0,0,0,0,0.156,0,0.134,0,0.156,3.08,121,693,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.161,0,0,1.25,3,15,0 0,0,0,0,0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0.69,0,0,0,0,0,2.08,0.69,0,0,0,0,0,0,0,0,0,0,0.69,0,0,0,0,0,0,0,0,0,0,0,0,0.284,0,0.284,0.094,0,2,11,60,0 0.13,0,0.13,0,0.13,0,0,0,1.18,0,0,0.52,0,0,0,0,0,0,0.52,0,0.26,0,0,0,0.65,0.79,0.13,0,0,0,0,0,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.128,0.164,0.036,0,0.036,0,3.185,32,481,0 0.08,0,0.22,0,0.04,0,0,0,0.44,0.04,0,0.22,0.04,0,0,0,0.04,0,0.17,0,0.08,0,0,0,0.39,0.44,0.04,0,0,0,0,0,0.57,0,0,0,0.08,0.08,0,0,0,0,0.04,0.04,0,0,0,0,0.163,0.197,0.058,0,0.011,0,3.851,64,1583,0 0,0.28,0.28,0,0.86,0,0,0,0,0.28,0.28,0,0,0,0,0,0,0,2.87,0,1.72,0,0,0,0.28,0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.57,0,0,0,0,0.137,0,0,0,0,1.95,15,156,0 0.09,0.04,0.04,0,0.04,0,0,0,0.66,0,0,0.33,0.04,0,0,0,0,0.04,0.19,0,0.14,0,0,0,0.62,0.66,0.04,0,0,0,0,0,0.14,0,0,0.04,0,0,0,0,0,0,0,0.09,0,0,0,0.04,0.145,0.152,0.053,0,0.013,0,3.685,85,1463,0 1.14,0,0,0,1.14,0,0,0,0,0,0,1.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.14,0,2.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.14,0,0,0,0,0,0,0,0,0,1.19,3,25,0 0,0,0,0,3.22,0,0,0,0,0,0,3.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.22,0,0,6.45,0,0,0,0,0.512,0,0,0,0,2.4,11,24,0 0.2,0,0.1,0,0,0,0,0.1,0,0,0,0.4,0.1,0,0,0,0.2,0,0,0,0,0,0.1,0,4.5,0.1,0,0,0,0,0,0,0.1,0,0,0.1,0.1,0.1,0,0,0,0.6,0,0,0,0,0,0,0.092,0.079,0,0,0.013,0,2.361,26,562,0 0,0,1.94,0,0,0,0,0,0,0.97,0,0.97,0,0,0,0,0,0,1.94,0,0,0,0,0,0.97,0.97,0,1.94,0,0.97,0,0,0,0,1.94,0,0,0,0,0,0,0.97,0,0,0,0,0,0,0,0.379,0,0,0,0,8.125,75,195,0 0,0,0,0,0,0,0,0,0,1.23,0,2.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.23,0,0,0,0,0,0,0,0,0,0,0,0,0.796,0,0,0,0,2.142,8,60,0 0.81,0,0.81,0,0.81,0,0,0,0,0,0,0.81,0,0,0,0,0,0,1.62,0,1.62,0,0,0,0.81,0,0,0,0,0,0,0,0,0,0.81,0,0,0,0,0,0,1.62,0,0,0,0,0,0,0,0.123,0,0.37,0,0,5.375,69,129,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.38,7.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,0,0,7,17,35,0 0.54,0,0.27,0,0,0,0,0,0.27,0.54,0,0.81,0,0,0,0,0,0.27,1.08,0,0.81,0,0,0,0.81,0.81,0,1.08,0,0.54,0,0,0.27,0,1.08,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0.039,0.318,0.079,0,0,0,4.971,76,517,0 0.56,0,2.24,0,0,0.56,0,0,0,0,0,1.12,0,0,0,0,0,1.12,4.49,0,0,0,0,0,1.12,0,0,0,0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.56,0,0,0,0.111,0.111,0,0.111,0,0,1.23,4,32,0 0.36,0,1.09,0,0,0,0,0,0,0,0,0.72,1.81,0,0,0,0,0,0.72,0,1.09,0,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.063,0.126,0,0.063,0.126,0,2.562,35,123,0 0,0,0,0,0,0,0,0,0,0,0,0,2.38,0,0,0,0,0,2.38,0,0,0,0,0,2.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.2,2,12,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,17,20,0 0.27,0,0.27,0,0,0,0,0,0,0,0,0.81,0,0,0,0,0,0,0.54,0,0.27,0,0,0,9.83,0.81,0.54,0.27,0.27,0.27,0.27,0.27,0,0.27,0.27,0.27,0.54,0,0.27,0.27,0,0.54,0.54,0,0.54,0,0,0,1.411,1.411,0.041,0,0,0,4.891,20,675,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.43,0,0,0,0,0,0,0,0,2.43,0,0,0,0,0,0,0,0,0,0,0,0,1.625,6,13,0 0.84,0,0,0,0,0,0,0,0,0,0,0.84,0,0,0,0,0,0,0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.84,0,0,0,0,0.082,0.414,0,0,0,0,3.34,7,167,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,5,0,0,0,0,0,0,0,0,0,1.125,2,9,0 0,0,0,0,1.75,0,0,0,0,0.87,0,0,0,0,0,0,0,0,1.75,0,0.87,0,0,0,0.87,0.87,0.87,0,0,0,0,0,0,0,0,0,0.87,0,0.87,0,0,0,0.87,2.63,0.87,0,0,0,0.469,0,0.156,0,0,0,1.466,12,44,0 0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0,0,0,0,0,0,0,0.48,1.93,0,0,0.48,0,0,0,0,0,0,0,1.44,0,0,0,0,0,0,1.44,0,0,0,0,0,0.304,0,0,0.365,0,3.016,10,187,0 1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.02,0,0,0,0,0,0,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.02,0,0,0,0,1.02,0,0,0.352,0,0.176,0,0,1.241,3,36,0 0,0,0,0,0,0,0,0,0,0,0,0.45,0,0,0,0,0,0,0,0,0,0,0,0,1.81,0.45,0,0,0,0.9,0,0,0,0,0,0.45,0,0,0,0,0,0,0,0,0.45,0,0,0,0,0,0,0,0,0,1.444,5,104,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.5,0,0,0,0,0,0,0,0,0,1.333,3,12,0 0,0,0.76,0,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0.76,0,0,0,0,0,0.76,0,1.52,0.76,0.76,1.52,0.76,0.76,0,0.76,0.76,0.76,0.76,0,0,0.76,0,0.76,0,0,2.29,0,0,0,0,0.254,0,0.127,0,0,1.755,11,79,0 0,0,0,0,2.46,0,0,0,0,1.23,0,0,0,0,0,0,0,0,2.46,0,1.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.46,0,0,0,0,0.245,0,0,0,0,0,1.166,3,14,0 0,0,0,0,0,0.69,0,0,0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.92,0.92,0,0,0,0,0,0,0.46,0,0,0,0.23,0,0.23,0.23,0,0,0.23,0,0,0.69,0,0,0.033,0,0.033,0,0,0,1.554,13,143,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.04,1.04,0,0,0,0,0,1.04,0,0,0,0,0,0,0,0,0,0,0,1.04,0,1.04,0,0,0,1.04,0,0,0,0,0,0.179,0.358,0,0.179,0,0,2.037,13,55,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.08,0,0,0,0,0,2.08,0,0,0,0,0.393,0,0,0,0,1.545,6,17,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.02,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.02,0,0,0,1.02,0,0,0,0,0,0,0,0,0,0,0.142,0,1.857,10,65,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.6,4,8,0 0.46,0,0,0,0,0,0,0,0,0,0,2.32,0.46,0,0,0,0,0.46,5.11,0,0.46,0,0,0,0.93,1.39,0,0,0.46,0,0,0,0,0,0,0,0,0,1.39,0,0,0.93,0,0,0,0,0,0,0,0,0,0.065,0,0,2,13,86,0 0,0,0.58,0,0.58,0,0,0,0,2.35,0,0,0,0,0,0,0,0,0.58,0,0,0,0,0,1.76,1.17,1.76,0.58,0.58,0.58,0.58,0.58,0,0.58,0.58,0.58,0.58,0,0,0.58,0,0,0.58,0,0.58,0,0,0,0.188,0.566,0.094,0,0,0,2.246,13,146,0 0,0,0,0,0,0,0,0,0,0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.75,0,0.75,0,0,0,0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0.133,0,0,0.133,4.472,33,161,0 0,0,0,0,0,0,0,0,0,0.51,0,0,0,0,0,0,0,0,0.51,0,0.51,0,0,0,3.06,2.04,0.51,2.04,0.51,1.02,0.51,0.51,0,0.51,1.02,0.51,0,0,0.51,0.51,0,1.02,0,0,0.51,0,0,0,0.158,0.553,0,0,0,0,4,37,216,0 0.85,0.85,0,0,1.7,0,0,0,0,0.85,0,0.85,0,0,0,0,0,0,0,0,0.85,0,0,0,0,0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.275,0,0,0,0,1.55,5,31,0 0,0,0.52,0,0.52,0,0,0,0,2.11,0,0,0,0,0,0,0,0,0.52,0,0.52,0,0,0,1.58,1.05,1.05,0.52,0.52,0.52,0.52,0.52,0,0.52,0.52,0.52,0.52,0,0.52,0.52,0,0,0.52,0,0.52,0,0,0,0.171,0.513,0.085,0,0,0,2.225,13,158,0 0.39,0.39,0,0,0.39,0,0,0,0,0.39,0,1.19,0,0.39,0,0,0,0,1.19,0,0,0,0,0,2.77,1.98,1.19,1.19,0.39,0.39,0.39,0.39,0,0.39,1.19,0.39,0.39,0,0,0.39,0,0.39,0.39,0,0.39,0,0,0,0.125,0.377,0.439,0,0,0,2.238,13,141,0 0,0,0,0,0,0,0,0.86,0,0,0,2.58,0,0,0,0,0.86,0,2.58,0,0,0,0,0,1.72,0,0,0,0,0.86,0,0,0,0,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.141,0,0,1.535,8,43,0 0,0,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.58,0,0,0,0,0,0.58,1.75,1.16,0,0.58,0,0.58,0.58,0,0,0,0.58,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0.059,0,0,0,0.178,2.506,11,183,0 0,0.68,0.34,0,0,0,0,0.34,0,0,0.34,0,0,0,0,1.72,0,0,1.03,0,2.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0,0.34,0,0,0,0,0.046,0,0,0,0,3.344,107,194,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.58,0,0,0,0,0,0,0,0,0,0,1.46,0.29,0,0.58,0,0,0,0,0,0,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0.018,0,0,0,0,251,1488,1506,0 0.3,0,0.3,0,0,0,0,0,0,0,0,0.15,0,0,0,0,0,0.15,1.38,0,0.61,0,0,0,1.38,0.3,0.61,0.15,0.15,0.15,0.15,0.15,0.3,0.15,0.15,0.15,0.3,0,0.15,0.15,0,0,0.3,0,0.61,0,0,0,0.131,0.183,0.052,0,0,0,1.837,13,305,0 0,0,0.29,0,0.29,0,0,0,0.29,0,0,0.29,0,1.19,0,0,0.29,0,0.29,0,0.29,0,0,0,0,0,0,0.29,0,0,0.29,0,0,0,0.29,0,0.29,0,0,0,0,0.89,0,0,0,0,0,0,0,0.156,0.031,0.376,0,0.125,3.338,157,611,0 0,0,0,0,0,0,0,0,0.43,0,0,2.17,0,0,0,0,0,0,0.86,0,0,0,0,0,0.86,0.43,0,1.3,0,0,0,0,0,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.248,0,0,0,0,2.119,15,142,0 0,0,0,0,0,0,0,0,0,0.55,0,0.55,0,0,0,0,0,0,1.11,0,0.55,0,0,0,2.77,2.22,1.11,0.55,0.55,0.55,0.55,0.55,0.55,0.55,0.55,0.55,1.66,0,0.55,0.55,0,0,1.11,0,1.11,0,0,0,0,0.603,0.086,0,0,0,2.113,13,167,0 0,0,0,0,0,0,0,0,0,0,0,1.49,0,0,0,0,0,0,0,0,0,0,0,0,1.49,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.49,0,0,0,0,0,0,0,0,2,8,36,0 0,0,0.89,0,0.44,0.44,0,0,0,0,0,0.89,0,0.44,0,0,0.89,0,0,0,0,0,0,0,1.79,0,0,0,0,0,0,0,0,0,0,0.89,0,0,0,0,0,0,0,0,0.44,0,0,0,0,0.131,0,0,0,0,1.61,13,95,0 0,0,0,0,0,0,0,0,0,0,0,0.9,0,0,0,0,0,0.6,0.6,0,0.6,0,0,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.173,0,0,0.129,0,3.266,31,196,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.666,3,15,0 0.35,0,0.35,0,0,0,0,0,0,0,0,0.35,0,0,0,0,0,0,1.42,0,0.71,0,0,0,0,0,0.35,0,0,0,0,0,0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.292,4,84,0 0,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.85,0,0,0,0,0,0,0,0,1,1,19,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0,1.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0.294,0,0,0,0,1.25,2,15,0 0,0,0,0,0,0,0,0,0,0.65,0,0,0,0,0,0,0,0,0,0,0.65,0,0,0,0,0,0.65,0,0.65,0,0,0,0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.327,0.109,0,0,0.109,3.857,28,162,0 0.36,0,1.47,0,0.36,0,0,0,0.36,0,0.36,3.32,0,0,0,0,0,0,1.1,0,0.36,0,0,0,0.36,0.36,0,0,0,0,0,0,0,0,0,0,1.1,0,0,0,0,1.1,0,0,0,0,0,0,0,0.051,0,0,0,0,2.293,45,172,0 0,0,0,0,0.83,0.41,0,0.83,0,0,0,1.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0.41,0,0,0,0,0,0,0,0,0,0,0.41,0,0,0,0,0,0,0,0,0,0,0,0,0.068,0,0,0,0,1.673,5,82,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,16,0 0,0,0.87,0,0.87,0,0,0,0,0.87,0,0,0,0,0,0,0,0,0,0,0.87,0,0,0,1.75,0.87,2.63,0,0,0,0,0,0,0,0,0,0.87,0,0,0,0,0,0.87,0,0.87,0,0,0,0,0.283,0.141,0,0,0,1.785,15,75,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.63,0,0,0,0,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.272,0.272,0,0,0,4.19,26,88,0 0,0,0,0,0,0,0,0,0,0,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.58,0,0,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,1.182,0,0,0,0,2.057,13,72,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.22,2.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.22,0,0,0,0,0,0,0,0,0,1.75,5,21,0 0,0,0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.4,0,0.56,0,0,0,3.37,1.4,0.28,0.28,0.28,0.28,0.28,0.28,0,0.28,0.28,0.28,0.56,0,0,0.28,0,0.28,0.56,0,0.28,0,0,0,0,0.14,0.093,0,0,0,2.464,15,207,0 0,0,0,0,0,0,0,0,0,0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.5,0,0.5,0,0,0,0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.505,0.168,0,0,0.084,4.068,28,236,0 0,0,1.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.33,0,0,0,0,0,0,2.66,1.33,0,1.33,0,1.33,1.33,0,0,0,1.33,1.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0.288,0,0.144,0,0,1.857,10,39,0 0,0,0,0,0.35,0,0,0,0,0.35,0,0,0,0,0,0,0,0,2.1,0,0.7,0,0,0,2.8,1.05,1.4,0.35,0.35,0.35,0.35,0.35,0,0.35,0.35,0.35,0.7,0,0,0.35,0,0,0.7,0,0.7,0,0,0,0,0.233,0.116,0,0,0,1.746,13,145,0 0,2.07,0,0,0,0,0,0,1.55,0,0,0.51,0,0,0,0,0,0,1.03,0,0,0,0,0,0.51,0,2.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.07,0,0,0.141,0.211,25.812,104,413,0 0,1.36,0.9,0,0,0,0,0,0,1.81,0,0.45,0,0,0,0,0,1.81,0,0,3.18,0,0,0,0.45,0,0,0,0,0,0,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.199,0,0,0,0,3.382,53,159,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.14,0,0,0,0,0,2.29,1.14,0,3.44,0,0,0,0,0,0,2.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.163,0,0,0,0,3.28,15,82,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.08,0,1.08,0,0,0,0,0,1.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.263,4,24,0 0.1,0,0.1,0,0.1,0.1,0,0.4,0,0,0.1,0.8,0,0,0,0,0,0.1,0.1,0,0,0,0,0,0.1,0,0,0.1,0,0,0,0,0.1,0,0,0,0.3,0,0.1,0,0,0.4,0.2,0.2,0,0.8,0,0,0.015,0.136,0.015,0,0.015,0,1.636,18,527,0 0,0,0,0,0.67,0,0,0,0,0.67,0,0.67,0,0,0,0,0,0,0.67,0,0,0,0,0,4.05,4.05,0,2.02,0,0,0,0,0,0,0.67,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0.613,0,0,0,0,2.976,24,128,0 0.9,0,0,0,0,0,0,0,0,0,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0.9,0.9,0,0,0,0,0,0,0,0.9,0,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0.15,0,0,0,0,3.225,22,129,0 1.19,0,0,0,0,0,0,0,0,1.19,0,0,0,0,0,0,0,0,1.19,0,0,0,0,0,1.19,1.19,0,2.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.4,0,0,0,0,3.09,11,68,0 0,0,0,0,0.34,0,0,0,0,0,0,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,3.06,0,0,0,0.34,0,0,0,0.34,0,0,0,0,0,0,0.34,0.088,0.132,0,0,0,0,1.25,7,85,0 0,0,0,0,0,0.32,0,0.64,0,0,0,1.6,0,0.32,0,0,0,0.32,0.32,0,0,0,0,0,0.32,0.32,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0,4.295,87,262,0 0,0,0,0,0,0,0,0,0,1.85,0,0,0,0,0,0,0,0,1.85,0,3.7,0,0,0,1.85,0,0,3.7,0,0,0,0,1.85,0,1.85,0,0,0,0,0,0,0,0,0,1.85,0,0,0,0,0.636,0,0.318,0,0,2.695,15,62,0 0,0,0,0,0,0,0,0,0,0.76,0,0,0,0,0,0,0,0,0,0,0.76,0,0,0,0,0,0.76,0,0.76,0,0,0,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.123,0.123,0,0,0.123,3.7,28,148,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,6,0 1.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.22,0,3.33,0,0,0,2.22,1.11,2.22,1.11,1.11,1.11,1.11,1.11,0,1.11,1.11,1.11,1.11,0,1.11,1.11,0,0,1.11,3.33,1.11,0,0,0,0,0.353,0,0.176,0,0,2.1,12,63,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.14,3.57,3.57,3.57,3.57,3.57,3.57,3.57,0,3.57,3.57,3.57,0,0,0,3.57,0,0,0,0,0,0,0,0,0,0.956,0,0,0,0,3.6,11,36,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.44,0,4.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.22,0,0,0,0,0,0,0.383,0,0,1.333,3,8,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.66,0,0.33,0,0,0,5.66,2.66,2,0.66,0.66,0.66,0.66,0.66,0,0.66,0.66,0.66,0.66,0,0.33,0.66,0,0,0.66,0,0.66,0,0,0,0.101,0.254,0.101,0.05,0.05,0,2.725,15,248,0 0.2,0,0.2,0,0.2,0,0,0,0,0,0,1,0.2,0,0,0,0,0.2,0.4,0,0,0,0,0,2.61,1.2,0,0.4,0,0,0,0,0.8,0,0.4,0,0.8,0,0,0,0,0,0,0,0,0.2,0,0,0.061,0.462,0.061,0,0,0,2.61,24,308,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.08,0,4.16,0,0,0,0,0,2.08,0,2.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.08,0,0,0,0,0,0,0,0,1.181,3,13,0 0,0,0,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0,0,1.31,0,0,0,0,0,2.63,2.63,2.63,1.31,1.31,1.31,1.31,1.31,0,1.31,1.31,1.31,1.31,0,1.31,1.31,0,0,1.31,0,2.63,0,0,0,0,0.407,0.203,0,0,0,2.151,12,71,0 0,1.32,0,0,0,0,0,0,0.66,0.66,0,0.22,0,0,0,0,0,0.88,0.66,0,0.88,0,0,0,1.76,0,1.54,0,0,0.44,0,0,0.44,0,0,0,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0.023,0,0.023,0.047,0.094,8.76,161,876,0 0,2.07,0,0,0,0,0,0,1.55,0,0,0.51,0,0,0,0,0,0,1.03,0,0,0,0,0,0.51,0,2.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.07,0,0,0.141,0.211,25.812,104,413,0 0.34,0,0,0,0,0,0,0,0.34,0.68,0,1.02,0,0,0,0,0,0,1.36,0,0.68,0,0,0,2.38,1.7,0.68,1.7,0.68,0.34,0.34,0.34,0,0.34,0.34,0.34,0.68,0,0.68,0.34,0,0,0.68,0,0.34,0,0,0,0.052,0.42,0.052,0,0,0.052,2.604,13,250,0 0,0,0,0,0,0,0,0,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.47,0,0.47,0,0,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.074,0.297,0,0,0.074,4.308,28,293,0 0,0,0,0,4.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.54,0,4.54,0,0,0,0,0,0,0,0,0,0,2,5,16,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,1.428,3,10,0 0,1.86,0,0,0,0,0,0,0,0.93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.8,1.86,1.86,0.93,0.93,0.93,0.93,0.93,0,0.93,0.93,0.93,0.93,0,0.93,0.93,0,0.93,0.93,0,0.93,0,0,0,0,0.457,0.152,0,0,0,2.097,13,86,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.47,0,0.47,0,0,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.074,0.298,0,0,0.074,4.268,28,286,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.74,0,0,1.625,6,13,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.27,0,0,0,0,0,2.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.052,2,20,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.2,4,11,0 0.66,0,0,0,0,0,0,0,0.66,0.66,0,2,0,0,0,0,0,0,2,0,1.33,0,0,0,0.66,0.66,0,1.33,0,0,0,0,0,0,0,0,0,0,0.66,0,0,0,0,0,0,0,0,0,0,0.35,0,0,0,0,2.529,11,86,0 0,0,0,0,0,0,0,0,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.04,1.02,2.04,2.04,0,0,0,0,0,0,0,0,1.02,0,0,0,0,0,1.02,0,2.04,0,0,0,0,0.323,0,0,0,0,2.682,13,110,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.68,0,0.34,0,0,0,5.86,2.75,1.37,0.68,0.68,0.68,0.68,0.68,0,0.68,0.68,0.68,0.68,0,0.34,0.68,0,0,0.68,0,0.68,0,0,0,0.11,0.276,0.11,0.055,0.055,0,2.87,15,244,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.92,0,0,0,0,0,0,3.84,0,0,0,0,0,0,0,0,0,0.26,0,0,2.2,10,44,0 0,0,3.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.727,5,19,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.38,0,0,0,2.38,0,0,4.76,0,0,0,0,2.38,0,2.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.749,0,0.374,0,0,2.85,15,57,0 2.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.38,0,0,0,1.38,0,0,0,0,0,0,0.213,0,0,1.75,6,49,0 0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.733,9,26,0 0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.592,8,43,0 0,0,0.17,0,0.17,0,0,0.17,0.08,0,0.08,0.25,0,0,0,0.08,0,0,0.94,0,0.6,0,0.25,0,1.89,0.43,0,0.08,0,0.25,0.34,0,0,0,0.25,0,0.17,0,0,0,0,0,0,0,0,0,0,0.08,0,0.127,0,0.051,0.038,0,1.838,24,605,0 1.05,0,0,0,1.05,0,0,0,0,0,0,2.1,0,0,0,0,0,0,4.21,0,2.1,0,0,0,1.05,1.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.05,0,0,0,0,0,0,0,0.171,0,0,0,0,2.541,12,61,0 0,0,0,0,0.59,0,0,0,0,0,0,0.59,0,0,0,0,0,0,0,0,0,0,0,0,1.79,1.49,0,0.59,0,0.89,0,0,0,0,0.29,0,0,0,0,0,0,0,0,0.29,0,0,0,0,0,0.141,0,0,0,0,1.87,24,174,0 0,0,0,0,0,0,0,0,0.27,0,0,0.82,0.27,0,0,0,1.64,0,1.36,0,0.54,0,0,0,0.27,0,0,0,0,0,0,0,0,0,0,0.54,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0.09,0,0.045,0,0,1.465,8,85,0 0,0,0,0,0,0.8,0,0,0,0,0,1.61,0,0,0,0,0,0,0.8,0,0.8,0,0,0,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.417,0,0.139,0,0,1.411,5,24,0 0,0,0,0,1.69,0,0,0,0,0,0,1.69,0,0,0,0,0,0,1.69,0,1.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.69,0,0,0,0,0,0,0.552,0,0,1.461,4,19,0 0,0,0,0,0,0,0,0,0,0,0,1.6,0,0,0,0,0,0,0,0,0,0,0,0,9.62,0,0.53,0,0,0,0,0,2.13,0,0,0,1.06,0,1.6,0,0,0.53,1.06,0,1.06,0,0,0,0.425,0,0.17,0,0,0,2.567,15,172,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.93,0,0.46,0,0,0,4.2,2.8,1.4,0.46,0.46,0.46,0.46,0.46,0,0.46,0.46,0.46,0.46,0,0,0.46,0,0,0.46,0,0.46,0,0,0,0.151,0.227,0.075,0.075,0.075,0,2.482,12,139,0 0.35,0,0,0,1.41,0,0,0,0,0,0,1.76,0,0,0,0,0.35,0,0,0,0,0,0,0,1.06,1.06,0,0.7,0,0,0.35,0,0,0,0.7,0,1.06,0,0.7,0,0,0,0,1.41,0,0,0,0.35,0,0.104,0,0,0,0,2.108,24,213,0 0,0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0.68,0,0,0,0,0,0,0,0,0,0,0.68,0,0,0,0.68,0,0,5.47,0,0,0,0,0,0.68,0,0.68,0,0,0,0,0,0,0,0,0.68,0,0,0,0,0,0,0,0,0.68,0,0,0,0,0.138,0,0,0,0,1.1,3,22,0 0,0,0,0,0.59,0,0,0,0,1.18,0,0.59,0,0,0,0,0,0,1.77,0,0,0,0,0,0.59,0.59,0,1.18,0,0,0,0,0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.215,0,0.107,0,0,2.741,11,85,0 0,0.55,0,0,0,0,0,0,0,0,0,1.67,0,0,0,0,0,0,2.79,0,1.67,0,0,0,0,0,0,0,0,0,0,0,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.083,0,0,0,0,1.392,4,39,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.098,0,0,0,2.375,5,19,0 0,0,0,0,0,0,0,0,1.81,0,0,0,0,0,0,0,0,0,7.27,0,1.81,0,0,0,0,0,1.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.316,0,0,0,0,1.125,2,9,0 0,0,0.16,0,0.83,0,0.16,0,0.16,0,0,0.66,0,0.66,0,0,0.16,0,0,0,0.16,0,0,0,3,0.83,0.33,0.5,0.16,0.16,0.5,0.16,0,0.16,0.5,0.16,0.5,0,0.16,0.16,0,0.66,0.33,0.16,0,0,0,0,0,0.162,0.04,0.02,0,0.02,2.604,28,758,0 0.33,0.33,0.99,0,0,0.66,0,0,0,0,0,0.33,0,0,0,0,0,0,2.65,0,0.33,0,0,0,1.99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0.33,0,0,0,0,0,0,0.051,0,0,1.786,28,134,0 0,0,0,0,0,0,0,0.08,0,0,0,0.08,0,0,0,0,0.08,0,0,0,0,0,0,0,0.08,0.08,0.08,0,0,0,0,0,0,0,0,0.42,0,0,0,0,0.08,0,0,0,0,0,0,0.34,0.081,0.451,0,0,0,0,1.833,18,935,0 0,0,0.2,0,0.6,0.2,0,0,0.1,0,0,0.5,0,0,0,0,0.1,0,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0,0.049,0,0,0,0,1.133,10,263,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.7,0,0.85,0,0,0,0.85,0,0,0,0,0,0,0,0.85,0,0,0,0,0,0,0,0,0.85,0,0.85,0,0,0,0,0,0,0.138,0,0,0,1.228,4,43,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.25,2,5,0 0,0,0.23,0,0.92,0,0,0,0.23,0,0,0.92,0,0.92,0,0,0.23,0,0,0,0.23,0,0,0,0.23,0,0,0.23,0,0,0.23,0,0,0,0.23,0,0.23,0,0,0,0,0.92,0,0,0,0,0,0,0,0.13,0.026,0.026,0,0.026,2.222,23,480,0 0,0,0.33,0,0.08,0,0,0.16,0,0,0,1,0.08,0,0,0,0.25,0,0.16,0,0,0,0,0,2.68,0,0,0,0,0,0,0,0.08,0,0,0.08,0.08,0,0,0,0,0.25,0,0,0.16,0,0,0,0.134,0.089,0,0,0,0,2.432,24,557,0 0,0,0,0,0,0,0,0,0,0.54,0,0,0,0,0,0,0,0,1.62,0,0,0,0,0,1.62,1.08,1.08,1.62,0.54,0.54,0.54,0.54,0,0.54,0.54,0.54,0.54,0,0,0.54,0,0,0.54,0,0.54,0,0,0,0,0.559,0,0,0,0,3.039,13,155,0 0,0,0,0,0,0,0,0,0,0,0,3.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.5,0,0,1.538,8,20,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.33,0,0,0,0,0,0,0,0,0,3.714,16,26,0 0,0,0,0,0,0,0,0,0,2.56,0,0,0,0,1.28,0,0,0,0,0,0,0,0,0,3.84,3.84,0,0,0,0,0,0,2.56,0,0,0,1.28,0,0,0,0,0,1.28,0,0,0,0,0,0,0.194,0.194,0,0,0,3.631,17,69,0 0,0,0,0,0,0,0,0,0,2.56,0,0,0,0,1.28,0,0,0,0,0,0,0,0,0,3.84,3.84,0,0,0,0,0,0,2.56,0,0,0,1.28,0,0,0,0,0,1.28,0,0,0,0,0,0,0.194,0.194,0,0,0,3.631,17,69,0 0,0,0,0,0,0,0,0,0,0,0,0.89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.583,8,38,0 0,0,0,0,0,0,0,0,0,4.34,0,2.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,7,0 0,0,0.23,0,0.46,0,0,0,0.23,0,0,0,0,1.39,0,0,0.23,0,0,0,0.69,0,0,0,0.46,0,0,0,0,0,0,0,0,0,0,0,0.23,0,0,0,0,1.86,0,0,0,0,0,0,0,0.113,0,0.09,0,0.203,2.43,121,666,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.333,11,13,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.16,0,0,0,0,0,0,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.16,0,0,0,0,0,0,1.458,0,0,1.066,2,16,0 0.08,0,0,0,0.08,0,0,0,0,0.08,0,0,0.08,0,0,0,0.08,0,0.08,0,0.08,0,0,0,0.16,0,0,0,0,0,0,0,0.16,0,0.24,0.16,0.08,0,0,0,0,0,0,0.24,0,0,0,0,0,0.085,0,0,0,0.007,4.858,60,2026,0 0.09,0,0.09,0,0,0.09,0,0.09,0.87,0,0,0.29,0,0,0,0,0,0,0.38,0,0.19,0,0,0,0.58,0.68,0.09,0,0,0,0,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.173,0.173,0.053,0,0.026,0,3.704,48,726,0 0,0,0.09,0,0.09,0,0,0.55,0,0.09,0,0.73,0.09,0,0,0,0.55,0,0.09,0,0,0,0.36,0.09,3.48,0,0,0,0,0,0.09,0,0,0,0,0.09,0.09,0,0,0,0,0,0,0.55,0,0,0,0,0.012,0.1,0,0,0.1,0,2.188,22,510,0 0.05,0,0.15,0,0.05,0.05,0,0,0.52,0,0,0.15,0,0,0.05,0,0,0.05,0.31,0,0.15,0,0,0,0.78,0.83,0.05,0,0,0,0,0,0.47,0,0,0,0,0,0,0,0,0,0.05,0.1,0.1,0,0,0,0.223,0.162,0.084,0,0.015,0,2.725,38,1150,0 0,0.24,0,0,0.24,0,0,0.24,0,0.49,0,0,0,1.49,0,0,0,0,0.99,0,0,0,0,0,0.49,0,0.24,0,0,0,0.24,0,0,0,0.24,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.163,0,9.876,235,1116,0 0,0,0.29,0,0.59,0,0,0,0.29,0,0,0,0,1.79,0,0,0.29,0,0,0,0.59,0,0,0,0.59,0,0,0,0,0,0,0,0,0,0,0,0.29,0,0,0,0,2.69,0,0,0,0,0,0,0,0.052,0,0.078,0,0.235,3.153,121,618,0 0,0,0,0,0,0,0,0,0,0.46,0,1.84,0,0,0,0,0,0.46,1.38,0,0.46,0,0,0,1.84,1.38,0.92,0.92,0.46,0.46,0.92,1.38,0,1.38,0.92,0.46,0,0,0,0.92,0,1.38,0,0,0.46,0,0,0.92,0,0.362,0,0,0,0,4.153,34,162,0 0.67,0,0.22,0,0.45,0,0,0,0,0.22,0.45,1.12,0.22,0.22,0,0,0,0,1.12,0,1.35,0,0,0,2.03,0,0,0.45,0,0,0.22,0,0,0,0.45,0,0.22,0,0,0,0,0,0,0,0,0,0,0,0,0.072,0,0.072,0.072,0.036,3.242,38,347,0 0,0.33,0.16,0,1.15,0.33,0.16,0,0,1.32,0,0.16,0,0.16,0.16,0.99,0,0,2.8,0,2.31,0,0.33,0,0,0,0.16,0,0,0,0,0,0,0,0,0,0.49,0,0,0,0,0,0,0,0.33,0,0,0,0,0.126,0,0.076,0.076,0.025,3.401,37,364,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.75,4,14,0 0.53,0,0,0,0.88,0,0,0,0,0,0,0.71,0.35,0,0,0,0,0,1.06,0,1.06,0,0,0,2.13,1.06,0.17,0.17,0.17,0.17,0.17,0.17,0,0.17,0.17,0.17,0.53,0,0,0.17,0,0.71,0.17,0,0.53,0,0,0.35,0.052,0.131,0.026,0.026,0,0,2.941,34,353,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.25,2,5,0 0,0,0,0,0,0,0,0.79,0,0,0,0.79,0,0,0,0,0,0,0.39,0,0,0,0,0,0,0,0,0,0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.309,8,55,0 0.29,0,0.51,0,1.62,0,0,0,0,0,0,0.73,0.14,0,0,0.07,0.81,0,1.54,0,0.07,0,0,0,0.95,0,0,0,0,0.07,0,0,0,0,0,0.14,0.07,0.07,0,0,0,0.07,0,0,0.07,0,0,0,0,0.032,0,0,0.01,0,1.588,51,243,0 0,0,0,0,0,0,0,0,0,1.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.05,1.05,0,2.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.563,0,0,0,0,3.571,11,75,0 0,0,0,0,0,0,0,0,0,1.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.72,1.72,0,3.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.588,0,0.294,0,0,3.714,11,78,0 0.26,0,0.26,0,0,0,0,0,0,0,0,0.26,0.52,0,0,0,0,0,1.56,0,0,0,0,0,0.78,0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0.26,0,0,0,0,0,0,0,0,0.123,0.041,0,0.041,0,0,1.517,4,44,0 0.6,0,0,0,0.91,0,0,0,0,0,0,0.91,0.6,0,0,0,0,0,1.21,0,1.82,0,0,0,0.3,0.3,0,0,0,0,0,0,0,0,0,0,0.6,0,0,0,0,0.91,0,0,0.3,0,0,0.3,0.088,0.044,0,0.044,0,0,2.222,22,120,0 0,0,0,0,0,0,0,0,0,0.81,0,0,0,0,0,0,0,0,0.81,0,0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.43,0,0,0,3.25,0,0,0,0,2.43,0,0,0,0.142,0,0.285,0,0,2.136,7,47,0 0,0,0.76,0,0.15,0,0,0,0,0.15,0,1.07,0,0,0,0,0,0,1.99,0,0.46,0,0,0,0.92,0.15,0,0.3,0,0,0,0,0,0,0,0,0.15,0,0,0,0,0,0,0,0,0,0,0,0,0.264,0,0,0,0.026,2.891,28,347,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.69,0,1.69,0,0,0,0,0,0,3.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.69,0,0,0,0.296,0,0,0,0,3.315,13,63,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.29,0,0,0,0,0,1.29,1.29,1.29,0,0,0,1.29,0,0,0,0,0,0,0,1.29,0,0,0,0,0,1.29,0,0,0,0,0.234,0,0,0,0,1.857,8,39,0 0,0,0,0,0,0,0,0,0,1.01,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.01,0,0,0,0,0,1.01,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.312,0,0,0,4.03,28,133,0 0,0,0,0,0,0,3.07,0,0,0,0,0,0,0,0,0,0,0,3.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.251,0,0,3.214,12,45,0 1.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.04,0,0,0,0,0,0,0,3.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.04,0,0,0,0,0,0,0,0.191,0,0,0,0,1,1,17,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,0,0,0,0,0,0,0,0,0,0.6,0,0,0,0.088,0,0,0,0,1.607,4,45,0 0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0,0,0,0,0,0,0,0,0,3.57,3.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.545,3,17,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.09,0,0.19,0,0,0,0.09,0,0,0,0,0,0,0.09,0,0,0,0,0,0,0,0,0,0.046,0.341,0,0,0.031,4.413,28,1399,0 0,0,0,0,0,0.32,0,0.65,0,0,0,1.62,0,0.32,0,0,0,0.32,0,0,0,0,0,0,0.32,0.32,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.081,0,0,0,0,4.093,87,262,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.84,0,0,0,0,0,0,2.54,2.54,0,1.69,0,0,0,0,0,0,1.69,0,0,0,0.84,0,0,0,0,0,0.84,0,0,0.84,0,0.123,0,0.123,0,0.371,5.515,34,182,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.66,0,0,0,0,0,0,1.66,1.66,0,3.33,0,0,0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0,1.66,0,0,0,0,0,0.53,4.052,22,77,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.333,3,8,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.666,3,10,0 0,0,0,0,0.78,0,0,0,0.78,0.78,0,0.78,0,0,0,0.78,0,0,1.56,0,0,0,0,0,0.78,0.78,0,1.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.544,0,0,0.136,0,2.62,11,76,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.42,0,0.94,0,0,0,0.47,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.94,0,0,0,0,0.079,0,0,0,0,2.315,17,88,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.85,0,0,0,0,0,3.7,1.85,1.85,1.85,1.85,1.85,1.85,1.85,0,1.85,1.85,1.85,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0.6,0,0,0,0,2.526,11,48,0 0,0,0,0,4.76,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,7.14,0,0,0,0,0,0,0,0,0,0,0,0,1.23,3,16,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.71,2.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.25,2,10,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.22,0,0,4.44,2.22,0,0,0,0,0,4.44,0,0,0,0,0,0,2.22,0,0,0,0,0,0,0,0,0,0,0,0,3.578,20,68,0 0.09,0.09,0.36,0,0.91,0.18,0,0,0,0,0,3.66,0.09,0,0,0,0.82,0,0.82,0,0.45,0,0,0,1.37,0.09,0,0,0,0.82,0,0,0,0,0,0.18,0,0,0.09,0,0,0,0,0,0,0,0,0,0.027,0,0,0,0,0,1.263,4,192,0 0,0,1.96,0,0,0,0,0,0,0,0,3.92,0,0,0,0,0,0,0,0,0,0,0,0,1.96,3.92,0,0,0,1.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.476,0,0,0,0,2.318,25,51,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.11,0,11.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.34,10.86,0,0,0,2.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.798,0,0,2.615,13,34,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.69,0,0,0,0,0,0,0,0,0,0,0,4.34,0,0,0,0,1.44,1.44,0,1.44,0,0,0,0,0,0.222,0,0,0,5.357,28,150,0 0.08,0.17,0.17,0,0.8,0.08,0,0.26,0,0,0,3.39,0.17,0,0,0.08,0,0,0,0,0,0,0,0,2.68,0,0,0,0,0,0,0,0,0,0,0.35,0.08,0,0,0,0,0.08,0.08,0,0,0,0,0,0.023,0.046,0,0,0.023,0,2.658,57,436,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.84,0,0,0,0,0,0,0,0,0,3.84,0,0,0,0,0,3.84,0,0,0,0,0,0,3.84,0,0,0,0,0,0,0,0,0,0,0,0.751,2.333,13,21,0 0,0,0.69,0,0,0,0,0,0,0,0,1.39,0,0,0,0,0,0,0,0,0,0,0,0,2.79,0,0,0,0,0,0,0,0.69,0,0,0,0,0,0,0,0,0,0,1.39,0,0,0,0,0,0,0,0,0,0,1.268,4,52,0 0,0,0,0,0,0,0,0.82,0,0,0,0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.41,0,0,0,0,0,0,0,0.41,0,0,0,0,0,0,0,0.41,0,0,0.41,0,0,0,0,0,0,1.394,12,53,0 0,0,0,0,0.31,0,0.31,0,0.31,0,0.31,0.31,0,0,0,0,0,0.31,0.63,0,0.63,0,0,0,0,0,0.95,0,0,0,0,0.31,0,0.63,0,0,0.31,0,0,0,0,0,0,0,0.63,0,0,0,0,0.255,0.102,0,0,0.255,3.547,46,259,0 0.07,0.07,0.07,0,0.14,0,0,0.43,0,0,0.14,1.43,0.07,0,0,0,0.93,0,0,0,0,0,0,0,4.3,0,0,0,0,0.07,0,0,0,0,0,0.43,0.14,0,0,0,0,0,0,0,0,0,0,0.14,0.056,0.094,0,0,0.028,0,2.394,24,881,0 0,0,0,0,0,0,0,0,0,0.72,0,0.72,0,0,0,0,0,0,4.37,0,0,0,0,0,1.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.107,0,0,0,1.48,9,37,0 0,0,0.32,0,0,0,0.32,0,0.32,0,0,0.65,0,0,0,0,0,0.32,0.98,0,2.63,0,0,0,0,0,0.32,0,0,0,0,0,0,0,0.98,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.128,5.633,118,338,0 0.9,0,0.9,0,0,0,0,0,0,0,0.9,0,0,0,0,0,0,0.9,0.9,0,0,0,0,0,1.81,1.81,0,0.9,0,0.9,0.9,0,0,0,0.9,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0.112,0,0.225,0,0,1.807,10,47,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.7,0,0,0,0,0,0,0,0,0,0,0,0,8.1,0,0,0,0,0,0,0,0,0,0.473,2.25,14,27,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.7,0,0,0,0,0,2.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.432,0,0,2,16,60,0 0,0.1,0,0,0,0,0,0.2,0,0,0,0.3,0,0,0,0,0.05,0.05,0.05,0,0,0,0,0,0.3,0.2,0,0.05,0,0.05,0,0,0.05,0,0,0.2,0.41,0,0,0,0,0,0,0.1,0.05,0.2,0,0.35,0,0.141,0,0,0,0,1.997,87,1620,0 0,0,0,0,0,0,0,0,0.6,0,0,0,0,0,0,0,0,0,0.6,0,1.21,0,0,0,3.63,1.21,1.21,0.6,0.6,1.81,0.6,0.6,0,0.6,0.6,0.6,0,0,0,0.6,0,0,0,0,0.6,0,0,0,0,0.132,0,0,0,0,4.536,52,186,0 0,0,0,0,0,0,0,0,0,0,0,0.74,0,0,0,0,0,0.74,1.49,0,0,0,0,0,4.47,2.23,0,0.74,0,0.74,0.74,0,0,0,0.74,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0.102,0.204,0.102,0,0.204,2.121,10,87,0 0,0,0.91,0,0,0,0,0,0,0,0,1.83,0,0,0,0,0,0.91,1.83,0,0,0,0,0,1.83,0.91,0,0.91,0,0.91,0.91,0,0,0,0.91,0.91,0,0,0,0,0,0,0,0,0,0,0,0,0,0.232,0,0.116,0,0,1.619,10,68,0 0,0,0,0,0.57,0,0,0,0,0,0,1.71,0,0,0,0,0,0.57,0,0,0,0,0,0,1.71,0.57,0,0.57,0,0.57,0,0,0,0,0.57,0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0.077,0,0,0,0,1.947,12,111,0 0.22,0,0.22,0,0.45,0,0,0,0.22,0,0,0,0,1.35,0,0,0.22,0,0,0,0.67,0,0,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0.45,0,0,0,0,2.02,0,0,0.22,0,0,0,0,0.042,0,0.063,0,0.232,3.133,121,749,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,0,0,0,0,0,0,0,0,0,0,1.4,3,7,0 0,0,0.12,0,0.12,0,0,0.29,0.08,0.04,0,0.8,0.04,0,0,0,0.08,0,0.88,0,0.63,0,0.08,0,1.9,0.5,0,0.08,0,0.12,0.21,0,0,0,0.08,0,0.21,0,0.21,0,0,0,0,0.08,0,0,0,0.04,0.038,0.115,0,0.044,0.051,0,1.664,27,1263,0 0,0,0.24,0,0.49,0,0,0,0.24,0,0,0,0,1.49,0,0,0.24,0,0,0,0.74,0,0,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0.24,0,0,0,0,2.23,0,0,0,0,0,0,0,0.046,0,0.069,0,0.255,2.776,121,622,0 0,0,0,0,0.51,0,0,0,0,0,0,3.09,0,1.03,0,0,0.51,0,0,0,0,0,0,0,1.03,0.51,0,0,0,0.51,0,0,2.06,0,0,0,0,0,0,0,0,0,0,2.57,0,0,0,0,0,0,0,0,0,0,1.586,6,92,0 0,0,1.5,0,0,0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.116,0,0,0,0,1.833,8,22,0 0,0,1,0,1.5,0,0,0,0,1,0.5,2,0,0,0,0,0,0,6.5,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.145,0,0,1.342,14,51,0 0,0,0.77,0,0,0,0,0,0,0,0,0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.77,0,0,0,2.32,0,0,0,0,0,0,0,0,0,0,0.77,0,0.102,0,0.102,0,0,4.771,26,167,0 0,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.29,1.17,0,0,0,0,0,0.58,0,0.29,0.29,0,0,0,0,0.178,0,0.044,0,0,1.666,10,180,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.222,2,11,0 0.01,0.01,0.13,0,0.13,0.01,0,0,0.01,0.13,0.03,0.45,0.03,0.07,0,0.11,0.53,0.07,0.07,0,0.03,0,0.01,0,0,0,0,0,0.01,0,0,0,1.57,0,0,0.11,0.86,0,0,0.03,0,0.03,0.03,0.01,0.01,0.23,0,0.15,0.008,0.111,0,0.002,0,0.01,2.106,58,3027,0 0,0,0,0,0,0,0,0,0,0,0,0.59,0,0,0,0,0,0,2.38,0,1.19,0,0,0,1.19,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.103,0,0,0,0,3.086,55,142,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,33.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0.78,0,0,0,0,0,0,0,0,1.57,0,0,0,0,0,0,3.14,0,0,0,0,0,0,0,0,1.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0.78,0,0,0,0.78,0,0,0,0.437,0,0.087,0,0,2.812,13,90,0 0,0.44,0,0,0,0,0,0,0,0.29,0,0.29,0,0,0,0,0.14,0,0,0,0.29,0,0,0,0.44,0,0,0,0,0.89,0,0,0,0,0,0,0.89,0,0,0,0.59,0,0.14,0,0,0.89,0,0.44,0.101,0.135,0.016,0,0,0,2.297,46,680,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.888,5,17,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.72,0,0,0,0,0,0,3.44,0,0,0,0,0,0,0,0,0,0.25,0,0,2.619,9,55,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.75,3,7,0 0,0,0,0,0.57,0,0,0,0,0,0,1.72,0,0,0,0,0,0.57,0,0,0,0,0,0,1.72,0.57,0,0.57,0,0.57,0,0,0,0,0.57,0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0.077,0,0,0,0,1.964,12,110,0 0,0,0,0,0,0,0,0,0,0,0,4.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,5,16,0 0,0,0,0,0,0,0,0,0,0,0,0.99,0,0,0,0.99,0,0,2.97,0,1.98,0,0,0,0.99,0.99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.262,0,0,1.565,14,36,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.63,0,0,0,0,0,0,0,0,0,1.666,7,25,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.33,8.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.333,2,4,0 0,0,0,0,0,0,0,0,0,0,0,0.72,0,0,0,0,0,0,1.45,0,2.18,0,0,0,1.45,0.72,1.45,0.72,0.72,0.72,0.72,0.72,0,0.72,0.72,0.72,0.72,0,0.72,0.72,0,0,0.72,0,0.72,0,0,0,0,0.367,0,0,0,0,1.897,12,74,0 0,0,0,0,0,0,0,0,0,0,0,0.58,0.58,0,0,0,0,0,1.17,0,2.35,0,0,0,1.17,0.58,1.17,0.58,0.58,0.58,0.58,0.58,0,0.58,0.58,0.58,0.58,0,0.58,0.58,0,0,0.58,0.58,0.58,0,0,0,0,0.301,0,0,0,0,1.76,12,81,0 0,0,1.47,0,0,0,0,0,0,0,0,0,0.73,0,0,0,0,0,3.67,0,0.73,0,0,0,1.47,0.73,0.73,0.73,1.47,0.73,0.73,0.73,0,0.73,0.73,0.73,0.73,0,0,0.73,0,0,0.73,0,0,0,0,0,0,0.363,0.121,0,0,0,2.171,12,76,0 0,0,0,0,0,0,0,0,0,1.41,0,0,1.41,0,0,0,0,0,1.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.7,0,0,0,0,0.246,0,0,0,0,1.56,6,39,0 0,0,2.5,0,0,0,0,0,0,0,0,0,2.5,0,0,0,0,0,2.5,0,2.5,0,0,0,0,0,0,0,2.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.222,3,11,0 1.04,0,0.52,0,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,2.09,0,0.52,0,0,0,2.09,2.61,1.04,0.52,0.52,0.52,0.52,0.52,0,0.52,0.52,0.52,0,0,0,0.52,0,0,0,0,1.04,0,0,0,0,0.309,0,0.309,0,0,3.973,34,151,0 0,0,0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.66,0,0,0,0,0,2,2.66,0,0.66,0,0,0,0,0,0,0.66,0,1.33,0,0.66,0,0,0,0.66,0,0,0,0,0,0,0.104,0.209,0.104,0,0,2.152,17,127,0 0,0,1.29,0,0,0,0,0,0,1.29,0,1.29,0,0,0,0,0,0,2.59,0,0,0,0,0,2.59,2.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.22,0,0,0,0,1.35,4,27,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.67,0,0,0,2.71,0,0.67,0,0,0,0,0,0.67,0,0,0,0,4.4,0,0,0,0,0,0,0,0,0,0.555,3,14,348,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0.4,0,0,0,0,0,0,0,0,0.4,0,0,0,0,0,0,0,0,0.4,0,0,0,13.93,0.81,0,0,0,0,0,0,0,0,0,0,0.4,0,0,0,0,0,0,0,0,0,0,0,2.053,1.932,0.06,0,0,0,6.113,20,593,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.01,0,0.5,0,0,0,4.02,2.01,1,0.5,0.5,0.5,0.5,0.5,0,0.5,0.5,0.5,0.5,0,0.5,0.5,0,0,0.5,0,0.5,0,0,0,0,0.176,0.088,0,0,0,2.319,12,109,0 0,0,0,0,0,0,0,0,0,0.37,0,0.75,0,0,0,0,0,0,2.63,0,0.75,0,0,0,0,0,0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.066,0,0,0,0,1.433,5,86,0 0,0,0.28,0,1.73,0,0,0,0,0,0,0.28,0.57,0.28,0,0,0,0,1.15,0,0.57,0,0,0,0.28,0,0.57,0,0,0.28,0,0,0,0,0,0,0,0,0,0,0,0.28,0,0.28,0.57,0,0,0,0,0.051,0,0.103,0,0,1.411,4,24,0 2.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.1,2,11,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.408,0,0,0,0,0,2.6,6,13,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.204,0,0,0,0,0,1.285,2,9,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.96,2.97,1.98,2.97,0.99,0.99,0.99,0.99,0,0.99,0.99,0.99,0,0,0,0.99,0,0,0.99,0,0.99,0.99,0,0,0,0.479,0,0.239,0,0,2.688,13,121,0 0,0,0,0,0,0,0,0,0.27,0,0,0.27,0,1.36,0,0,0.27,0,0.81,0,0.54,0,0,0,0.27,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0,0,0,1.09,0,0,0,0,1.91,0,0,0,0,0,0,0.23,2.521,31,517,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.123,0,0,2.6,16,26,0 0.48,0,0,0,0,0,0,0,0,0,0,0.96,0,0,0,0,0,0.48,0.96,0,0,0,0,0,2.88,0.96,0.96,0.96,0.48,0.96,0.96,0.48,0,0.48,0.96,0.96,0,0,0,0.48,0,0,0,0,0.48,0,0,0,0,0.276,0,0.138,0,0,1.986,11,147,0 0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0,0,0,0,0,0,0,2.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.32,2.32,0,4.65,0,2.32,0,0,0,0,4.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.453,0,0,0,0,11.687,75,187,0 0.43,0,0.43,0,0,0.21,0,0,0,0.21,0,0.21,0.21,0,0,0,0,0,1.08,0,0.43,0,0,0,0.43,0.43,0,0.43,0,0.21,0,0,0,0,0.43,0,0,0,0,0.21,0,0,0,0,0,0,0.65,0,0.034,0.238,0.136,0,0,0,3.372,75,344,0 0,0,0.93,0,0.93,0,0,0,0,0,0,0,0,0,0,0.93,0,0,2.8,0,0,0,0,0,0,0.93,0,0,0.93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.93,0,0,0,0,0,0,1.771,5,62,0 0.42,0,0,0,0,0,0,0,0,0,0,0,0.85,0,0,0,0,0,4.25,0,0.85,0,0,0,0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.374,0,0,0.124,0,1.772,18,78,0 0,0,0.11,0,0.11,0,0,0.11,0,0,0,0,0.11,0.23,0,0,0.11,0,0,0,0,0,0.11,0,3.45,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0,0,0,0,0,0,0,0,0,0,0.047,0.157,0,0,0.078,0,2.351,28,508,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.75,0,1.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.35,0,0,0,0,0,0,0,0,0,1.913,6,44,0 0.39,0,0,0,0,0,0,0,0,0.39,0,0,0,0,0,0,0,0,2.39,0,0,0,0,0,5.57,1.59,1.19,0.39,0.39,1.19,0.39,0.39,0,0.39,0.39,0.39,0.39,0,0.79,0.39,0,0,0.39,0,0.39,0,0,0,0,0.104,0.052,0,0,0.052,3.153,57,246,0 0,0,0.15,0,0.3,0,0,0.15,0.15,0,0.15,2.76,0,0,0,0,0,0.46,1.69,0,0,0,0,0,0.46,0.15,0,0,0,0,0,0,0.15,0,0,0.15,0.15,0,0,0,0,0,0,0.15,0,0,0,0,0.023,0.023,0,0,0,0,2.677,58,415,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.08,0,0,0,0,0,7.6,2.17,2.17,1.08,1.08,1.08,1.08,1.08,0,1.08,1.08,1.08,1.08,0,0,1.08,0,0,1.08,0,0,0,0,0,0,0.364,0.182,0,0,0,2.421,13,92,0 0,2.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.23,1.23,0,1.23,0,0,0,0,0,3.7,2.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.23,0,0,0,0,0.392,0,0,0,0,2.142,10,75,0 0,0,0,0,0,0,0,0,0,0,0,2.4,0,0,0,0,0,0,1.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.2,0,0,0,0,0,0,2.4,0,0,0,0,0,0,0,0.166,0,0,0,0,2.2,22,55,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.35,0,0,0,0,0,1.17,0,1.17,2.35,0,0,0,0,1.17,0,1.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.61,0,0.203,0,0,2.541,15,61,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.25,4,18,0 0,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,2.916,7,35,0 0,0,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0,0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.97,0,0,0,0,0,0,0,0,0,0,0.97,0,0.1,0,0,0,0,2.59,69,386,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.272,0,0,1.75,7,14,0 0,0,0,0,0,0,0,0.13,0,0.13,0,1.1,0.13,0,0,0,1.24,0,0,0,0,0,0,0,3.17,0,0,0,0,0,0,0,0.69,0,0,0.27,0.41,0,0,0,0,0,0,0,0,0,0,0.13,0.07,0.07,0,0,0,0,2.064,23,322,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.337,0,0,0,0.337,2.95,7,59,0 0,0,0,0,0,0,0,0,0,0,0,1.44,0,0,0,0,0.48,0,0,0,0,0,0,0,0.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.058,0,0,0.058,0.058,1.755,9,79,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.04,0,0,0,0,0,0,0,0,0,0,0,0,0,2.04,0,2.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.286,0,0,2.434,17,56,0 0,0,3.79,0,0,0,0,0,0,0,0,3.79,0,0,0,0,0,1.26,0,0,0,0,0,0,3.79,2.53,0,1.26,0,1.26,1.26,0,0,0,1.26,1.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0.147,0,0.147,0,0,1.962,10,53,0 0,0,0,0,0.42,0,0,0,0,0,0,0.42,0,0,0,0,0,0,0.42,0,0,0,0,0,0,0,0,0,1.28,0,0,0,0,0,0,0,0.85,0,0,0,0,0.85,0,0,0,0,0,0,0,0,0,0,0,0,2.161,5,294,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.88,0,0,0,0,0,0,1.88,0,0,0,0,0,0,0,0,0,0.277,0,0,3,17,51,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.26,0,0.65,0,0,0,0,0,1.3,0,0,0,0,4.57,0,0,0,0,0,0,0,0,0,0.657,3.041,14,219,0 0,0.31,0,0,0,0,0,0.31,0,0,0,0.62,0,0,0,0,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.62,0,0.31,0,1.24,0,0.31,0,0,1.24,0,0,0,0.088,0.044,0,0,0,3.086,34,250,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,2,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.38,0,0,1.38,0,1.38,0,0,0,2.77,1.38,1.38,1.38,1.38,1.38,1.38,1.38,0,1.38,1.38,1.38,0,0,0,1.38,0,0,0,0,0,0,0,0,0,0.464,0,0,0,0,2.333,11,42,0 0,0,0,0,0,0,0,0,0,2.5,0,0,0,0,0,0,0,0,2.5,0,2.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.416,3,17,0 0.09,0,0.19,0,0.09,0,0,0.39,0,0,0,1.27,0.19,0.09,0,0,0.49,0,0.29,0,0,0,0.29,0,2.74,0,0,0,0,0,0,0,0,0,0,0.29,0.19,0,0,0,0,0,0.09,0.09,0,0,0,0,0.067,0.067,0,0,0.026,0,2.247,18,481,0 0.44,0.22,0.22,0,0.44,0,0,0.22,0,0.22,0,0.44,0,0,0,0,0,0,1.57,0,0,0,0,0,0.44,0.22,1.12,0.22,0.22,0.22,0.22,0.22,0,0.22,0.22,0.22,0.22,0,0,0.22,0,0.22,0.22,0,0.67,0.44,0,0,0.033,0.169,0.033,0.033,0.033,0,2.28,12,203,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.342,0,0,0,0.342,2.75,7,55,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0.19,0,0,0,0.09,0,0.09,0.59,0,0,0,0.09,0.39,0,1.77,0,0.98,0,0.09,0,1.57,0.78,0,0,0,0.09,0.19,0,0.09,0,0.19,0.09,0.39,0,0.29,0.09,0,0,0,0.09,0,0,0,0.19,0,0.096,0.027,0.068,0,0,2.059,25,593,0 0,0,0.32,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0,0,0.055,0.334,0,0.055,0,0.055,1.685,6,59,0 0,0,0.91,0,0,0.45,0,0,0,0,0,0.45,0,0,0,0,0,0.45,2.28,0,1.36,0,0,0,0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.416,0,0.486,0,0,3.782,31,87,0 0.76,0,0,0,0,0,0,0,0,0,0,0.76,0,0,0,0,0,0,1.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.76,0,0,0.76,0,0,0,0.135,0,0,0,0,0,1.411,5,24,0 0,0.44,0.44,0,0.44,0,0.22,0,0,2.43,1.1,0.44,0,0,0,0,0,1.55,2.88,0,2.21,0,0,0,0,0,0.22,0,0,0,0,0,0,0,0,0,0,0,0,0.22,0,0,0,0,0,0,0,0,0,0.036,0,0.073,0.146,0.036,2.574,22,224,0 0,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.29,0,0,0,0,0,0,0.29,0,0,0,0.1,0.353,0.05,0,0,0,1.227,4,27,0 0.37,0.18,0.18,0,0.37,0,0,0.18,0,0.18,0,0.55,0,0,0,0,0,0,0.92,0,0,0,0,0,0,0,0.55,0,0,0,0,0,0,0,0,0,0.55,0,0.18,0,0,0.37,0,0,0.74,1.48,0,0,0.116,0.29,0.029,0.029,0.029,0,3.455,24,387,0 0.17,0.11,0.05,0,0.4,0.11,0,0.4,0,0,0,0.34,0.11,0,0,0,0,0,1.15,0,0.57,0,0.05,0,0.52,0,0,0,0,0,0,0,0.23,0,0.17,0,0.63,0,0,0,0,0,0,0.05,0,0,0,0,0.007,0.304,0,0.053,0.03,0,2.548,49,1134,0 0,0,0,0,0.93,0,0,0,0,0,0,0.93,0,0,0,0,0,0,0,0,0,0,0,0,3.73,0,0,0,0,0,0,0,3.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.533,7,46,0 0,0,0,0,0,0,0,0,0,0,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,3.06,4.08,0,0,0,0,0,0,0,0,0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.217,0,0,0,0,1.718,12,122,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.19,0,1.19,0,0,0,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.082,0,0,0,0.216,3.478,7,80,0 0,0,0.85,0,0,0,0,0,0,0,0,0.85,0,0,0,0,0,0,2.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.85,0,0,0,0,0.85,0,0,0,0,0,0,0,0.331,0,0,1.842,6,35,0 0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0,1.78,0,0,0,0,0,0,0,1.78,1.78,0,0,0,0,0,0,0,0,0,0,1.78,0,0,0,0,0,0,3.57,0,0,0,0,0,0,0,0,0,0,1.72,11,43,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.66,0,1.66,0,0,0,1.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.884,0,0,0,0.294,3.368,7,64,0 0,0.19,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0,0,0,0,2.86,0,0,0.38,0.19,0,0,0,0,0,0,0,0,0,0.19,0.19,0,0.201,0,0,0,0,2.217,9,204,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.33,4.16,4.16,4.16,4.16,4.16,4.16,4.16,0,4.16,4.16,4.16,0,0,0,4.16,0,0,0,0,0,0,0,0,0,1.092,0,0,0,0,3.333,11,30,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.341,0,0,0,0.341,3.166,7,57,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.337,0,0,0,0.337,2.95,7,59,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.19,0,1.19,0,0,0,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.082,0,0,0,0.216,3.478,7,80,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.5,4,9,0 0,0,0,0,0,0,0,0,0.55,0,0,0,0,0,0,0,0,0.55,1.65,0,1.65,0,0,0,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.55,0,0,0.55,0,0,0,0,0,0,0.104,0.314,0,0.052,0,6.894,97,393,0 0,0,0,0,0,0,0,0,0,0.29,0,0,0,0,0,0,0,0,0.58,0,0.58,0,0,0,3.51,2.34,0.87,2.34,0.58,1.17,0.58,0.58,0,0.58,1.17,0.58,0.29,0,0.87,0.58,0,0.87,0.29,0,0.58,0,0,0,0.091,0.637,0.045,0,0,0,3.552,37,373,0 2,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,2,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.888,29,53,0 0,0,1.58,0,0,0,0,0,0,0,0,0.79,0,0,0,0,0,0,1.58,0,0,0,0,0,0.79,0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.149,0,0.149,0,0,1.482,10,43,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.375,4,11,0 0,0,0,0,0.33,0,0,0,0,0,0,0.33,0,0,0,0.33,0,0.33,0.33,0,0.33,0,0,0,0.99,0.33,0,0.66,0,0.33,0,0,0,0,0.33,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0,0.13,0.043,0,0,0,2.016,19,125,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.325,0,0,0,0,0,1,1,14,0 0,0,0,0,0,0,0,0,0,0,0,0.24,0,0,0.24,0,0,0,0.49,0,0.49,0,0,0,1.72,1.23,0.24,0.24,0.24,0.24,0.24,0.24,0,0.24,0.24,0.24,0.24,0,0,0.24,0,0,0.24,0,0.24,0,0,0,0,0.312,0.039,0,0.117,0,1.89,13,189,0 0,0,0,0,0,0,0,0,0,0,0,0.73,0,0,0,0,0,0,0,0,0,0,0,0,8.08,5.88,0.73,0.73,0.73,0.73,0.73,0.73,0,0.73,0.73,0.73,0.73,0,0.73,0.73,0,0,0.73,0,0.73,0,0,0,0.388,0.259,0.129,0,0,0,2.666,13,96,0 0,0,0.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.15,0.62,0,0.31,0,0,0,1.56,0.31,0.93,0.15,0.15,0.15,0.15,0.15,0.46,0.15,0.15,0.15,0.31,0,0.31,0.15,0,0,0.31,0,0.31,0,0,0,0.078,0.235,0.052,0,0,0,1.945,12,323,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.03,0,0,0,0,0,0,0,3.03,0,0,0,0,0,0,0,0,0,3.03,0,3.03,0,0,6.06,3.03,0,0,0,0,0,0,0,0,0,0,0,2,12,42,0 0.12,0,0.12,0,0,0,0,0,1.11,0,0,0.37,0,0,0,0,0,0,0.49,0,0.24,0,0,0,0.61,0.74,0.12,0,0,0,0,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.119,0.17,0.034,0,0.034,0,3.237,32,505,0 0,0,0,0,0,0,0,0,0,0.69,0,0,0,0,0,0,0,0,0,0,0.69,0,0,0,0,0,0.69,0,0.69,0,0,0,0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.228,0.114,0,0,0.114,3.651,28,157,0 0,0,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.56,0,0,0,0,0,1.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.18,0,0,0,0,1.5,4,63,0 0,0,0.31,0,0.31,0,0,0,0,1.27,0,0,0,0,0,0,0,0,0.31,0,0,0,0,0,2.87,1.27,1.91,0.63,0.63,0.63,0.63,0.63,0,0.63,0.63,0.63,0.95,0,0.95,0.63,0,0,0.95,0,0.95,0,0,0,0.097,0.534,0.242,0,0.048,0,2.23,13,261,0 0,0.16,0,0,0,0,0,0.16,0.16,0,0,0,0,0,0.16,0,0,0,0.48,0,0.16,0,0,0,0.81,0.48,0.16,0.32,0,0,0,0,0,0,3.4,0,0.16,0,0,0,0,0.48,0,0,0,0.32,0.16,0,0,0.123,0,0,0,0.095,4.438,50,932,0 0.18,0.14,0.25,0,0,0,0,0.07,0,0.14,0.03,0.77,0.07,0.03,0,0,0.03,0.18,0.11,0,0.25,0.07,0,0,0,0,0,0,0.03,0.11,0,0,0.03,0,0,0.37,0.62,0,0,0,0.18,0,0.03,0,0,0.22,0,0.18,0.019,0.414,0,0.004,0,0,2.393,40,1795,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,6.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.3,3,13,0 0.04,0.02,0.14,0,0.25,0.08,0,0.08,0.02,0.12,0,0.27,0,0,0.02,0,0.08,0.23,0.17,0,0.06,0.29,0,0,0,0,0,0.04,0,0,0,0,1.4,0,0,0.12,1.04,0,0,0,0.17,0.04,0,0.06,0.06,0.27,0,0.02,0.046,0.149,0.005,0.014,0,0.002,2.35,46,3006,0 0,0,0,0,0,0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.12,1.3,0,0,0,0,0,0,0,0,0,0.18,0.93,0,0.18,0,1.3,0,0,0,0,1.49,0,0,0.182,0.339,0.13,0,0,0,3.628,44,479,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,4,6,0 0,0,0,0,0,0,0,0,0,0,0,1.03,0,0,0,0,0,0,3.09,0,0,0,0,0,1.03,0,0,0,0,1.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.03,0,0,0,0,0,0,0,0,0,1.666,7,25,0 0.09,0,0.09,0,0.56,0.09,0,0,0,0.18,0,0.46,0,0.09,0,0,0.37,0,0.56,0,0.65,0,0,0,1.86,0.46,0,0.09,0,0.09,0.28,0,0,0,0.37,0,0.28,0,0.09,0,0,0.28,0,0.18,0,0,0,0,0,0.081,0,0,0,0,1.983,25,601,0 0,0,1.23,0,0,0,0,0,0,0,0,2.46,0,0,0,0,0,2.46,1.23,0,1.23,0,0,0,2.46,1.23,0,1.23,0,1.23,1.23,0,0,0,1.23,1.23,2.46,0,0,0,0,0,0,0,0,0,0,0,0,0.139,0,0.279,0,0,1.736,10,66,0 0,0,0.57,0,0,0,0,0,0,0,0,0.57,0,0,0,0,0,0,0.57,0,0.57,0,0,0,0.57,0,0,0,0,0,0,0,1.15,0,0,0,0,0,0,0,0,0,0,1.73,0,0,0,0,0,0.093,0,0,0,0,1.136,3,25,0 0,0,0,0,0,0,0,0,0,0,0,0,1.08,0,0,0,0,0,2.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.08,0,0,0,3.26,0,0,0,0,3.26,0,0,0,0,0,0,0,0,3.066,10,46,0 0,4.16,0,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0.709,0,0,2.09,6,23,0 0,0,0.74,0,0,0,0,0,0,0,0,2.22,0,0,0,0,0,0,1.48,0,0,0,0,0,0,0.74,0,0,0,0,0,0,0,0,0,0,0.74,0,0,0,0,0,0,0,0,0,0,2.22,0,0,0,0,0,0,2.595,31,122,0 0,0,0,0,0,0,0,0,0.48,0,0.48,0,0,0,0,0.48,0.48,0,1.44,0,2.88,0,0.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0.076,0,0.305,0.381,0,1.884,9,98,0 0,0,0,0,0,0,0,0,0.48,0,0.48,0,0,0,0,0.48,0.48,0,1.44,0,2.88,0,0.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0.076,0,0.305,0.381,0,1.884,9,98,0 0,0,0,0,0,0,1.78,0,0,1.78,0,0,0,0,0,1.78,0,1.78,5.35,0,1.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7,35,63,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,2,0 0.15,0.31,0,0,0,0,0,0,0,0.63,0.31,0.31,0,0,0,0,0,0.63,0.95,0,0.47,0,0,0,3.34,0.63,0.47,0.15,0.15,0.15,0.15,0.15,0,0.15,0.15,0.15,0.47,0,0.47,0.15,0,0,0.31,0,0.15,0,0,0,0.149,0.199,0.049,0.174,0,0,4.026,100,608,0 0,0,0.43,0,0,0,0,0,0,0,0,0.43,1.29,0,0,0,0,0,1.29,0,0.43,0,0,0,0.86,0,0,0,0,0,0,0,0.43,0,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0.22,0,0.146,0,0,1.341,6,55,0 0,0.25,0.12,0,0.37,0,0,0.12,0,0.37,0.25,0.37,0.12,0,0,0,0.12,0,0.37,0,0.12,0,0.12,0,2.51,0,0,0,0,0.25,0,0,0.12,0,0,0,0.12,0,0,0,0,0,0,0,0,0,0,0,0.016,0.05,0,0.05,0,0,2.414,25,367,0 0,0,0.61,0,0,0,0,0,0,0,0,0.61,0,0,0,0,0,0,0.61,0,0,0,0,0,5.52,1.22,1.22,0.61,0.61,1.84,0.61,0.61,0,0.61,0.61,0.61,0,0,1.22,0.61,0,0,0,0,0.61,0,0,0,0,0.143,0,0,0,0,3.682,51,151,0 0,2.59,1.29,0,1.29,0,0,0,0,0,0,1.29,0,0,0,0,0,0,2.59,0,0,0,0,0,0,0,1.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.29,0,0,0,0,0,0,0,0,0,1,1,13,0 0.33,0.33,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0,0.99,0.33,0,0.66,0,0,0,4.98,0,0,0,0,0,0,0,0,0,0,0,0.33,0,0.66,0,0,0,0,0,0,0,0,0,0.306,0.204,0,0.306,0,0,5.525,100,431,0 0,0,2.41,0,0,0,0,0,0.26,0,0,2.14,0,0,0,0,0,0,0.26,0,1.6,0,0,0,0.26,0.53,0,0,0.26,0,0,0,0.26,0,0,0,0,0,0,0.26,0,0,0,0,0,0,0,0,0,0.339,0,0,0,0,2.36,12,177,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.48,0,1.48,0,0.74,0,0,0,2.96,0,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.105,0,0,0.105,0.105,2.555,12,69,0 0.04,0.14,0.29,0,0.04,0.04,0,0.09,0,0.19,0.09,1.04,0,0,0,0,0,0.24,0.09,0,0.04,0,0,0,0.04,0,0,0,0,0.09,0,0,0,0,0,0.09,0.24,0,0,0,0,0,0.04,0,0,0,0,0,0.02,0.16,0.006,0,0,0,2.667,185,1763,0 0,0,0,0,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.666,4,16,0 0,0,0,0,0.82,0,0,0,0,0,0,1.65,0,0,0,0,0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.82,0,0,0,0,0,0,3.3,0,0,0,0,0,0,0,0,0,0,2.06,8,68,0 0.18,0,0.55,0,0.18,0,0,0,0,0,0,0.37,0,0,0,0,0,0,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.18,0,0,0,0,0,0,0,0.18,0,0,0,0.031,0.127,0.031,0,0,0,1.428,5,80,0 0,0,0,0,0,0.57,0,0,0,0,0,0,0,0,0,0,0,0,0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.57,0,0,0,0,0,0,0.57,0,2.31,0,0,0,0,0.089,0.179,0,0.089,0,0,2.204,10,97,0 0.37,0,0.63,0,0.25,0.12,0,0,0,0,0,0.12,0.12,0,0,0,0,0.12,1.51,0,0.25,0,0,0,0,0,0,0,0.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.37,0,0,0,0,0.107,0,0.193,0,0,1.181,4,104,0 0,0,0.1,0,0.1,0,0,0,0,0,0,0.1,0,0.1,0,0,0,0,0,0,0,0,0,0,0.4,0.1,0,0.1,0.2,0.2,0,0.1,0.7,0,0.1,0.1,0,0,0,0.1,0,0,0,0.1,0,0,0,0.6,0,0.096,0,0,0,0.012,2.037,18,913,0 0,0,0,0,1.38,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.94,0,0,0,0,0,0,0,0,0,0,0,0,6.94,0,0,0,0,0,0,0,0.238,0,0,0,0,1.578,4,30,0 0.51,0,0,0,0,0,0,0,0,0,0,0.25,0.51,0,0,0,0,0,2.3,0,1.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.25,0,0,0,0,0,0,0,0.25,0,0,0,0,0.333,0.047,0,0,0,1.196,5,67,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.67,0,1.35,0,0,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.087,0,0,0.087,0.087,4.23,24,110,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.191,0,0,0.095,0.095,1.688,11,103,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.09,2.27,2.27,2.27,2.27,2.27,2.27,2.27,0,2.27,2.27,2.27,0,0,0,2.27,0,0,0,0,0,0,0,0,0,0.664,0,0,0,0,3.157,11,60,0 0,0,0.74,0,0,0,0,0,0,0,0,2.22,0,0,0,0,0,0,1.48,0,0,0,0,0,0,0.74,0,0,0,0,0,0,0,0,0,0,0.74,0,0,0,0,0,0,0,0,0,0,2.22,0,0,0,0,0,0,2.425,23,114,0 0.12,0,0.12,0,0,0,0,0,1.12,0,0,0.37,0,0,0,0,0,0,0.49,0,0.24,0,0,0,0.62,0.74,0.12,0,0,0,0,0,0.37,0,0,0,0.12,0,0,0,0,0,0,0,0,0,0,0,0.12,0.189,0.034,0,0.034,0,3.302,41,535,0 0.08,0,0.16,0,0,0,0,0,0.82,0,0,0.24,0,0,0,0.08,0,0,0.32,0,0.16,0,0,0,0.49,0.57,0.08,0,0,0,0,0,0.74,0,0,0,0.16,0,0,0,0,0,0,0,0.08,0,0,0,0.221,0.188,0.044,0,0.033,0,2.816,32,628,0 0,0,0,0,0,0,0,0,0,0,0,1.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.2,0,0,0,0,0,0,0,1.2,0,0,0,0,0.202,0,0,0,0,1.533,5,23,0 0.12,0,0.12,0,0,0.06,0,0,0.56,0,0,0.31,0,0,0,0.06,0,0.06,0.25,0,0.18,0,0,0,0.63,0.69,0.06,0,0,0,0,0,0.82,0,0,0,0.63,0,0,0.06,0,0,0,0.06,0,0,0,0,0.187,0.16,0.035,0,0.017,0,2.829,47,815,0 0,0,0,0,0,0,0.49,0.99,0,2.48,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,3.48,2.48,0.49,0,0,0,0,0,0,0,0,0,1.99,0,0,0,0,0,0,0,0,0,0,0,0.336,0.588,0.168,0,0,0,5.61,42,331,0 0,0,0,0,0,0,0.49,0.99,0,2.48,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,3.48,2.48,0.49,0,0,0,0,0,0,0,0,0,1.99,0,0,0,0,0,0,0,0,0,0,0,0.336,0.588,0.168,0,0,0,5.61,42,331,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,1.5,3,6,0 0.31,0,0.31,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0,0,1.24,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0,0,0,0,0.31,0,0,0,0.31,0,0.31,0,0.31,0.31,0,0,0,0,0.051,0,0,0,1.409,12,62,0 0,0,0,0,0,0,0,0,0,0,0,1.11,0,0,0,0,0,0,0,0,0,0,0,0,1.11,1.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.183,0,0,0,0,1.8,4,36,0 0.4,0,0.4,0,0,0,0,0,0,0,0,0,0.4,0,0,0,0,0,1.61,0,0,0,0,0,0,0,0,0,0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.148,3,31,0 0.69,0,0.69,0,0,0,0,0,0,0.69,0,0,0,0,0,1.38,0,0,1.38,0,1.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.378,0,0,3.315,37,126,0 0,0,0,0,0,0,0,0,0,2.38,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.442,0,0,2.125,10,17,0 0,0,0.73,0,0,0,0,0,0,0,0,1.47,0,0,0,0,0,0,1.47,0,0.73,0,0,0,0,0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.47,0,0,0,0,0,0,0.238,0,0,0,0,1.827,5,53,0 0,0,0.17,0,0,0.08,0,0,0,0,0.08,0.87,0.08,0.08,0,0,0.78,0,0,0,0,0,0,0,3.05,0,0.08,0,0,0,0,0,0.61,0,0,0.08,0.08,0,0,0,0,0,0,0,0,0,0,0,0.079,0.068,0,0,0.022,0,2.432,24,540,0 0,0,0,0,0,0,0,0,0,0,0,3.7,0,0,0,0,0,0,3.7,0,0,0,0,0,0,7.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.7,0,0,0,0.371,0,0,2.25,8,27,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.75,0,0,0,0,0,1.75,0.87,1.75,1.75,1.75,0.87,0.87,0.87,0,0.87,1.75,0.87,0,0,0,0.87,0,0,0,0,0.87,1.75,0,0,0,0.749,0,0.107,0,0,2.454,11,81,0 0.03,0.01,0.15,0,0.09,0.03,0,0.03,0.03,0.11,0,0.25,0.11,0.05,0.01,0.03,0.05,0.03,0.13,0,0.15,0,0.07,0,0,0,0,0,0,0,0,0,1.84,0,0,0.11,0.91,0,0,0.05,0.19,0.01,0.03,0.03,0,0.09,0,0.23,0.038,0.19,0,0.002,0.005,0,2.143,107,3168,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.086,0,0,1,1,3,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.13,0,2.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.13,0,0,0,0,0,0.194,0,0,0,1.909,5,42,0 0,0,0,0,1.47,0,0,0,0,0,0,0.73,0.73,0,0,0,0,0,0.73,0,0,0,0,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.94,0,0,0,0,0,0.73,0,0.276,0,0,0,0,1.379,4,40,0 0,0,1.61,0,0,0,0,0,1.61,0,0,0,0,0,0,0,0,0,4.83,0,0,0,0,0,0,0,0,0,0,3.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.769,8,23,0 0,0,1.31,0,0,0,0,0,0,0,0,2.63,0,0,0,0,0,0,5.26,0,1.31,0,0,0,1.31,0,0,0,0,1.31,0,0,0,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0,0,0,0.242,0,0,0,0,1.266,3,19,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.72,0,0.72,0,0,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.047,0,0,0,0.13,3.475,7,139,0 0,0,0,0,0.44,0,0,0,0,0,0,0.88,0,0,0,0,0,0,1.32,0,0.44,0,0,0,1.76,1.32,0.88,0.44,2.64,0.44,0.44,0.44,0,0.44,0.44,0.44,0.88,0,0.88,0.44,0,2.64,0.88,0,0.88,0,0,0,0,0.146,0.073,0,0,0,1.955,13,133,0 0,0,0,0,1.75,0,0,0,0,0,0,5.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.75,0,0,0,0,0,0,0,0,0,0,0,0,1.75,0,0,0,0,0,0,0,0,0,0.955,0,0,1.5,5,24,0 0,0,0.94,0,0,0,0,0,0,0,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0.94,0,0,0,0,1.42,0,0,0,0,0,0.94,0,0,0,0,0,0,1.766,4,53,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.558,0,0,2,7,28,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,0,0,0,0.9,0,0,0,0,0,0,1.8,0,0,0,0,0,0,1.8,0,0.9,0,0,0,0,0,0,0,2.7,0,0,0,0,0,0,0,0,0,0,0,0,2.7,0,0,0,0,0,0,0,0,0,0,0,0,1.631,8,31,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,2.77,2.77,1.38,2.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.38,0,0,0,0,0.355,0,0.355,0,0,2.666,12,64,0 0,0,0,0,0.96,0,0,0,0,0.48,0,0.48,0,0,0,0,0.48,0,1.93,0,0,0,0,0,0.96,0.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.223,0,0,0,0,0,1.375,5,55,0 0,0.22,0.33,0,0.22,0.11,0,0,0,0,0.11,0.44,0,0,0,0,0,0,0.44,0,0.11,0.11,0,0,0.11,0.11,0,0,0,0,0,0,0.11,0,0,0,0,0,0.11,0,0,0,0,0,0.66,0,0,0,0.019,0.253,0,0,0,0,2.068,11,395,0 0,0,2.43,0,0,0,0,0,0.27,0,0,2.16,0,0,0,0,0,0,0.27,0,1.62,0,0,0,0.27,0.54,0,0,0.27,0,0,0,0.27,0,0,0,0,0,0,0.27,0,0,0,0,0,0,0,0,0,0.344,0,0,0,0,2.319,12,167,0 0,0,0,0,0,0,0,0,0,0,0,1.48,0,0,0,0,0,0,0,0,0,0,0,0,2.22,0.74,0,0,0,0.74,0,0,0,0,0,0,1.48,0,1.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.228,53,148,0 0,0.18,0,0,0,0,0,0,0,0,0,0.18,0,0,0,0,0,0,0,0,0,0,0,0,0.18,0,0,0,0,0,0,0,2.8,0,0,0.37,0.18,0,0,0,0,0,0,0,0,0,0.18,0.18,0,0.187,0,0,0,0,2.141,9,212,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.787,0,0,1.875,7,15,0 0,0,1.81,0,0,0,0,0,0,0,0,3.63,1.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.81,0,0,0,0,0,0,0,0,0,0,0,0,1.81,0,0,0,0,0,0,0,0.3,0,0,0,0,1.652,8,38,0 0,0,0,0,4.16,0,0,0,0,0,0,4.16,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0,0.689,0,0.689,0,0,1.3,4,13,0 0,0,0,0,1.43,0,0,0,0,0,0,0,0,0,0,0,0,0,1.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0.71,0,0,0,0,0,0,0,0,2.15,0,0,0,0,0,0,0,0.138,0,0,0,0,1.863,5,41,0 0,0,0,0,0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,1.55,0,0.77,0,0,0.77,0,0,0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.55,0.77,0,0,0,0.49,0,0.196,0,0,3.16,10,79,0 0,0,0,0,3.07,0,0,0,0,0,0,4.61,0,0,0,0,0,0,0,0,1.53,0,0,0,0,0,0,0,6.15,0,0,0,0,0,0,0,0,0,0,0,0,6.15,0,0,0,0,0,0,0,0,0,0,0,0,1.529,4,26,0 0.29,0.58,0.29,0,0.29,0,0,0.29,3.23,0.88,0.29,0.88,0,0,0,0,0,0.88,1.76,0.29,2.64,0,0,0,0.29,0.29,0.29,0,0,0,0,0,0,0,0,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0.075,0.113,0,0.113,0.265,0.113,2.285,16,208,0 0,0,0,0,0,0,0,0,0,0,0,3.44,0,0,0,0,0,0,1.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.062,8,33,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.777,14,25,0 0,0,0,0,0,0,0,0,0,0,0.61,0.61,0,0,0,0,0,0,0,0,0,0,0,0,1.85,0,0,0,0,0.61,0,0,0.61,0,0,0.61,0.61,0,0,0,0,0.61,0,0,0,0,0,0,0.179,0,0,0,0,0,1.24,6,67,0 0,0,0.26,0,0,0,0,0,0,0,0.26,0,0,0,0,0,0,0.26,0,0,0,0,0,0,0.26,0,0,0,0,0,0,0,0,0,0,0,0.53,0,0,0,0,1.06,0,0.26,0,2.4,0,0,0.036,0.109,0,0,0.036,0,1.632,11,307,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.027,0,0,0,0,1.567,6,428,0 1.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.29,2.19,0,1.09,0,0,0,0,0,0,1.09,0,0,0,0,0,0,0,0,0,1.09,0,0,0,0,0.353,0,0,0,0,2.304,10,53,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.386,0,0,1.6,4,16,0 0,0,1.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.41,0.97,0.48,0.48,0,0,0,0,0,0,0.48,0,0.48,0,0,0,0,0,0.48,0,0.97,0,0,0,0.471,0.55,0,0.078,0,0,2.552,16,171,0 0,0,0.08,0,0.17,0,0,0.08,0.08,0,0,0.43,0.08,0,0,0,0,0,0,0,0,0,0.08,0,3.54,0,0,0,0,0,0,0,0,0,0,0.77,0.17,0,0,0,0,0.08,0,0.17,0,0,0,0.17,0.08,0.045,0,0,0.011,0,2.45,25,566,0 0,0,2.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.25,1.5,0,0.75,0,0,0,0,0,0,0.75,0,0,0,0,0,0,0,0,0,0.75,0,0,0,0,0.369,0,0,0,0,2.032,10,63,0 0,0,0,0,0,0,0,0,0,0,0,0,2.56,0,0,0,0,0,2.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.56,0,0,0,0,0,2.56,0,0,0,0,0,0,0.473,0,0,2.454,15,27,0 0,0,0,0,0,0,0,0,0,0,0,0,1.01,0,0,0,0,0,2.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.01,0,2.02,0,0,0,1.01,0,2.02,1.01,0,0,0,0,0.188,0.376,0,0,2.31,15,67,0 0,0,1.06,0,1.06,0,0,0,0,0,0,1.06,0,0,0,0,0,0,4.25,0,0,0,0,0,0,0,1.06,0,0,0,0,0,1.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.186,0,0,1.25,4,25,0 0,0,0,0,0.54,0,0,0,0,1.63,0.54,0.54,0.54,0,0,0,0,0,2.18,0,1.09,0,0,0,1.09,0.54,0,0,0,0,0,0,0,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.09,0.09,0,0,0,1.969,16,65,0 0,0,0,0,0,0,0,0,0,0,0,1.09,0,0,0,0,0,0,2.19,0,0,0,0,0,0,0,2.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.409,11,53,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.33,0,0,0,0,0,0.68,0.68,0,0,2.9,18,29,0 0,0,0,0,0,0,0,0.56,0,0,0,0,0,0,0,0,0,0,1.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.12,0,0,0,0,0,1.12,0.56,0,0,0,0.181,0.09,0.181,0,0,4.5,34,153,0 0,2.12,0,0,0,0,0,0,0,2.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.3,4,13,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.88,0,0,0,0,0,1.88,0,0,0,0,0,0,0.366,0,0,2,15,28,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.29,0,0,0,0,0.229,0,0.114,0,0,1.8,17,36,0 0.39,0,0,0,0.78,0.39,0,0,0,0,0,0.39,0,0,0,0,0,0,0.39,0,0.39,0,0,0,3.14,0.39,1.18,0.39,0.39,0.39,0.39,0.39,0.39,0.39,0.39,0.39,0.78,0,0.78,0.39,0,1.96,0.78,0,0.78,0,0,0,0.645,0.581,0,0.129,0,0,2.895,16,249,0 0.05,0,0,0,0,0.1,0,0,0,0.1,0.05,0.48,0,0,0.05,0.21,0.1,0,1.62,0.05,1.08,0,0.21,0.05,2.05,0.48,0.05,0.16,0,0.16,0.27,0,0,0,0.21,0,0.27,0,0.16,0,0,0,0,0,0.05,0,0,0.1,0,0.289,0.015,0.062,0.046,0,2.007,32,1026,0 0.06,0,0,0,0,0.12,0,0,0,0.12,0,0.19,0,0,0.06,0.19,0.12,0,1.74,0.06,1.23,0,0.25,0.06,2.26,0.38,0.06,0.19,0,0.19,0.32,0,0,0,0.25,0,0.32,0,0.19,0,0,0,0,0,0.06,0,0,0.12,0,0.33,0.018,0.064,0.055,0,2.024,25,897,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.97,0,0.97,0,0,0,1.94,0.97,0,2.91,0,0,0,0,0,0,1.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.141,0,0,0,0,3.178,15,89,0 0,0,0.85,0,1.36,0,0,0,0,0.17,0,0.34,0.17,0,0,0,0,0,0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.034,0,0,0,0,0,1.085,3,89,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.7,0,0,3.7,0,0,3.7,0,0,0,0,0,0.689,0,0,0,1.888,5,17,0 0,0,0,0,0,0,0,0,0,0,0,1.49,0,0,0,0,0,0,0,0,0,0,0,0,2.23,0.74,0,0,0,0.74,0,0,0,0,0,0,1.49,0,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,45,140,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.2,17,26,0 0,0,0,0,1.92,0,0,0,0,0,0,2.88,0,0,0,0,0,0,0,0,0,0,0,0,0.96,0.96,0,0,0,0,1.92,0,0,0,0.96,0,0.96,0,0.96,0,0,0,0,3.84,0,0,0,0,0,0,0,0,0,0.161,2.307,14,90,0 0,0,0,0,0,0,0,0,0,0.45,0,0,0,0,0,0,0,0,0.45,0,1.35,0,0,0,1.35,1.35,1.35,1.35,0.9,0.45,0.45,0.45,0,0.45,1.35,0.45,0.45,0,0.45,0.45,0,0.45,0.45,0,0.45,0,0,0,0,0.358,0.43,0,0,0.071,2.236,12,161,0 0,0,0.36,0,0.73,0,0,0,0,0,0,0.36,0.18,0,0,0.36,0,0,1.28,0,0.36,0,0,0,0.36,1.28,0,0,0,0,0,0,0,0,0,0,0.36,0,0,0,0,0,0.18,0,0.18,0,0,0,0.027,0,0,0.055,0,0,3.176,51,270,0 1.03,0,0,0,1.03,0,0,0,0,0,0,0,0,0,0,0,0,0,1.03,0,1.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.09,0,0,0,0,0,0,0,0.185,0,0.37,0,0,2.277,11,41,0 0.72,0,0,0,0,0,0,0,0,0,0,1.45,0,0,0,0,0,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.72,0,0,0,0,0,0,1.407,6,38,0 0,0,0,0,0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0.82,0,0,0,0,0,4.13,2.47,1.65,0.82,0.82,0.82,0.82,0.82,0,0.82,0.82,0.82,0,0,0,0.82,0,0,0,0,0.82,0,0,0,0,0.361,0,0.24,0,0,4.666,34,126,0 0,0,0.34,0,0.34,0,0,0,0,0,0,0.34,0.34,0,0,0,0,0,0.34,0,0.34,0,0,0,0.34,0.69,0,0,0,0,0,0,0,0,0,0.34,1.04,0,0,0,0,0,0.34,0,0,0,0,0,0,0.149,0,0,0,0,2.35,14,188,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.78,0,0,0,0,0,0,1.78,0,0,0,0,0,0,0,0,0,0.24,0,0,2.833,12,68,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.78,0,0,0,0,0,1.78,1.78,0,0,0,0,0,0,3.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.5,9,30,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.9,0,0,0,0,0,0.9,0,0,0,0,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.14,0,0,0,3.472,28,125,0 0,0,0,0,0,0,0,0,2.29,0,0,0.76,0,0,0,0,0,0,0,0,0,0,0,0,5.34,1.52,1.52,0.76,0.76,2.29,0.76,0.76,0,0.76,0.76,0.76,0,0,0.76,0.76,0,0,0,0,0.76,0,0,0,0,0.157,0,0,0,0,4.242,52,140,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.22,0,0,0,0,0,0,0,0,0,1.5,4,18,0 0.97,0,0.97,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.91,0,0,0,0,0,0,0,0.97,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.166,0,0,2.185,11,59,0 0,0,0,0,0,0,0.46,0,0,0,0,0.46,0,0,0,0,0,0,0.46,0,0,0,0,0,0.46,0,0.92,0,0,0,0,0,2.3,0,0,0,0.92,0,0.92,0,0,0,0.92,0,0.46,0,0,0,0.163,0.163,0,0.163,0,0.081,2.343,13,150,0 0,0,0,0,0.54,0.54,0,0,0,0,0,1.09,0,0,0,0,0,0,1.63,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,0,0,0,0,0,0,0.299,0.199,0,0,0,0,1,1,14,0 0,0.07,0.14,0,0.14,0.07,0,0,0,0,0,1.34,0.07,0.14,0,0,0.63,0,0.14,0,0,0,0.07,0,3.03,0,0,0,0,0,0,0,0,0,0,0.07,0.21,0,0,0,0,0,0,0,0,0,0,0,0.084,0.177,0,0,0,0,2.25,26,855,0 0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.5,9,21,0 0,0,0,0,0,0,0,0,0,0,0,0.99,0,0,0,0,0,0,0,0,0,0,0,0,2.97,3.96,0,0,0,0,0,0,0,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.21,0,0,0,0,1.736,12,125,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.49,0,4.47,0,0,0,0,0,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,16,0 0.53,0,0,0,0,0,0,0,0,0,0,1.07,0,0,0,0,0,0,0.26,0,0.26,0,0,0,1.61,0.8,1.88,0.53,0.53,0.53,0.53,0.53,1.88,0.53,0.53,0.53,0.8,0,0.8,0.53,0,0,0.8,0,0.8,0,0,0,0,0.412,0,0.091,0,0,2.225,12,227,0 0,0,0,0,0,0,0,0,0,0.93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.93,0,0,0,0,0,0.93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.475,0.158,0,0,0,4.393,33,145,0 0.58,0,0,0,0,0,0,0,0,0,0,0.58,0,0,0,0,0,0,0.58,0,0,0,0,0,1.76,1.17,1.76,0.58,0.58,0.58,0.58,0.58,1.76,0.58,0.58,0.58,0.58,0,0.58,0.58,0,0,0.58,0,0.58,0,0,0,0,0.414,0,0.103,0,0,2,12,94,0 0.31,0.31,0.94,0,0,0.62,0,0,0,0,0,0.31,0,0,0,0,0,0,2.83,0,0.31,0,0,0,1.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0.31,0,0,0,0,0,0,0.096,0,0,2.368,45,180,0 0.12,0,0.12,0,0.12,0.12,0,0,1.08,0,0,0.36,0,0,0,0,0,0,0.48,0,0.24,0,0,0,0.6,0.72,0.12,0,0,0,0,0,0.36,0,0,0,0.12,0,0,0,0,0,0,0,0,0,0,0,0.117,0.151,0.033,0,0.033,0,4.134,78,645,0 0.05,0,0.1,0,0,0.1,0,0.05,0.49,0,0,0.27,0,0,0,0,0.38,0,0.21,0,0.1,0,0,0,0.49,0.54,0.05,0,0,0,0,0,0.38,0,0,0.38,0.21,0,0,0,0,0,0,0,0,0,0,0,0.308,0.136,0.078,0,0.014,0,3.715,107,1386,0 0,0,0,0,0,0,0,0,0,0,0,1.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.216,0,0.216,0,0.216,2.166,6,39,0 0.75,0,0.37,0,0,0,0,0,0,0,0,0,0.37,0,0,0,0,0,1.12,0,1.87,0,0,0,0.75,0.37,1.87,0.37,0.37,0.37,0.37,0.37,0,0.37,0.37,0.37,0.75,0,0.37,0.37,0,0,2.63,0,0.75,0,0,0,0,0.305,0,0.061,0,0,1.903,13,118,0 0,0,0,0,0,0,0,0,0,0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.2,2.2,1.47,2.2,1.47,0.73,0.73,0.73,0,0.73,2.2,0.73,0.73,0,0.73,0.73,0,0.73,0.73,0,0.73,0,0,0,0,0.555,0.666,0,0,0.111,2.351,12,127,0 0.68,0,0,0,0,0,0,0,0,0,0,0,0.68,0,0,0,0,0,1.37,0,2.06,0,0,0,0,0,1.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.44,0,0,0,0,0,0,0.332,0,0,0,0,1.125,2,18,0 0,0,0,0,0,0,0,0,0,1.12,0,0,0,0,0,0,0,0,1.12,0,1.12,0,0,0,0,0,1.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.24,0,0,0,0,0,0,0.203,0,0.203,2.222,20,40,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.34,0,0,0,8.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.125,6,17,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,4,0 0,0,0,0,0,0,0,0,0,0.57,0,0,0,0,0,0.57,0,0,2.87,0,4.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.14,0,0,0,0,0.84,0,0,0,0.076,3.583,31,129,0 0.24,0,0.49,0,0,0,0,0,0,0.24,0,0.24,0.24,0,0,0,0,0,1.23,0,0.24,0,0,0,0.24,0.24,0,0.49,0,0.24,0,0,0,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0.74,0,0.029,0.119,0.119,0,0,0,3.574,75,336,0 0,0,0,0,0,0,0,0,0,0,0,3.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.22,0,0,0,0,0,0,0,0,0,0,0,0,0.564,0,0,1.818,9,20,0 0,0,0,0,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.49,0,0,0,0.49,0,0.49,0,0,0,0,0.195,0,0.097,0,0,2.3,18,69,0 0,0,0,0,0,0,0,0,0,0,0,0.68,0,0,0,0,0.68,0,0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.04,0,0,0,0,0,0,0,3.4,0,0,0,0.68,0,0.086,0,0,0,0,1.41,5,79,0 0,0,2.91,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.389,0,0.389,0,0,1.26,3,29,0 0,0,0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.66,0,0,0,0,0.231,0,0.231,0,0,1.761,17,37,0 0,0,0.79,0,0,0,0,0,0,0,0,1.58,0,0,0,0,0,0,2.38,0,0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.137,0,0,0,0,0,1.09,3,24,0 0,0,0,0,0,0,0,0,0,0,0,1.04,0,0,0,0,0,0,5.2,0,0,0,0,0,0,0,1.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.195,0,0,0,0,1.071,2,15,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,1.28,0,0,0,1.28,0,0,0,0,1.28,0,0,0,0,1.28,0,0,0,0,0,2.56,1.28,1.28,1.28,1.28,1.28,1.28,1.28,0,1.28,1.28,1.28,0,0,0,1.28,0,0,0,0,0,0,0,0,0,0.398,0,0,0,0,2.21,11,42,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.84,0,0,2.54,0,0,0,1.69,0.84,1.69,1.69,0,0.84,0,0,0,0,0.84,0,0,0,0,0,0,0,0,0,0.84,0,0,0,0,0,0,0,0,0,1.777,11,64,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.51,0,0,0,0,0,1.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.51,0,0,0,0.3,0,0,0,0,0,1.611,5,29,0 0.48,0,0,0,0.48,0,0,0,0,0,0,0,0.48,0,0,0,0,0,4.39,0,0,0,0,0,0.48,0,0.48,0,2.92,0,0,0,0,0,0,0,0,0,0,0,0,0.97,0,0,0,0,0,0,0,0.085,0,0,0,0,1.275,3,37,0 0.12,0,0.25,0,0,0,0,0.38,1.28,0,0,0.38,0,0,0,0,0,0,0.51,0,0.25,0,0,0,0.64,0.76,0.12,0,0,0,0,0,0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.054,0.162,0.036,0,0.036,0,3.167,32,491,0 0.08,0.08,0.25,0,0,0.25,0,0,0.76,0,0,0.25,0,0,0,0,0,0,0.33,0,0.16,0,0,0,0.5,0.59,0.08,0,0,0,0,0,0.42,0,0,0.25,0.08,0,0,0,0,0.08,0,0,0,0,0,0,0.148,0.136,0.045,0,0.022,0,3.995,55,807,0 0,0,0,0,0,0,0,0,0,0,0,0.89,0,0,0,0,0,0,3.57,0,2.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.375,23,38,0 0.24,0,0.12,0,0,0.12,0.24,0,0,0,0,0.37,0,0,0,0,0,0,0.86,0,0.24,0,0,0,1.24,0.62,0.49,0.24,0.24,0.24,0.24,0.24,0.37,0.24,0.24,0.24,0.24,0,0.24,0.24,0,0.12,0.24,0.86,0.24,0,0,0,0.018,0.297,0.055,0,0,0,1.801,13,227,0 0,0,0,0,0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,1.5,0,0,0,0,0,3.75,3,2.25,0.75,0.75,0.75,0.75,0.75,0,0.75,0.75,0.75,0.75,0,0.75,0.75,0,0.75,0.75,0,0.75,0,0,0,0,0.222,0,0,0,0,1.833,12,77,0 0.1,0,0.21,0,0,0,0,0.21,0.31,0.1,0,1.06,0.21,0,0,0.1,0.21,0,0,0,0,0,0.21,0,3.5,0.1,0,0,0.1,0.1,0,0,0,0,0,0.21,0.21,0,0,0.1,0,0,0,0.21,0,0,0,0,0.043,0.143,0,0,0.057,0,2.409,23,571,0 0,3.68,0,0,0,0,0,0,0,0,0,0.61,0,0,0,0,0,0,0.61,0,1.22,0,0,0,1.22,0.61,3.06,0.61,0.61,0.61,0.61,0.61,0,0.61,0.61,0.61,1.84,0,0.61,0.61,0,0,1.84,0,1.84,0,0,0,0,0.189,0.094,0,0,0.094,2.283,13,169,0 0,0,0,0,0,0,0,0,0,0,0,3.12,0,0,0,0,0,0,1.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.56,0,0,0,0,0,0,0,0,0.27,0,0,1.5,4,30,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.33,0,1.33,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.211,0,0,0.211,0,0,1.38,4,29,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.94,0,0,0,0.48,0,0.48,0,0,0,0,0,0,0,0,0,0,0.057,0,0,0,0,6.526,83,248,0 0.51,0,0,0,0,0,0,0,0,0,0,1.54,0,0,0,0,2.06,0,0.51,0,0,0,0,0,3.6,2.06,0,0,0,0.51,0,0,0,0,0,0.51,0,0,0,0,0,0,0,0,0.51,0,0,0,0,0,0,0,0,0,1.574,4,74,0 0,0,0,0,0,0,0,0,0,0,0,0.89,0,0,0,0,0,0,0.89,0,0,0,0,0,1.78,1.78,0,0.89,0,0,0,0,0,0,0.89,0.89,0,0,0,0,0,0,0,0,0,0,0,0.89,0,0.537,0,0,0,0.268,2.292,12,94,0 0,0,0,0,0,0.78,0,0,0,0,0,0.78,0,0,0,0,0,0,0.78,0,0,0,0,0,0.78,0.78,0,0.78,0,0,0,0,0,0,0.78,0.78,0,0,0,0,0,0,0,0.78,0,0,0,0,0,0.451,0,0,0,0.112,2.714,22,133,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.29,0,0,0,0,0,1.29,6.49,0,0,0,1.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.19,0,0.19,0,0,1.857,4,26,0 0,7.01,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.75,0,1.75,0,0,0,0,0,3.5,0,0,0,0,0,0,0,0,0,1.75,0,0,0,0,0,1.75,0,1.75,0,0,0,0,0,0,0,0,0.286,1.826,13,42,0 0,5.47,0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0,0,1.36,0,2.73,0,0,0,0,0,2.73,0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0,1.36,0,1.36,0,0,0,0,0,0,0,0,0.232,2.035,13,57,0 0,0,0,0,0.87,0,0.87,0,0,0,0,0,0,0,0,2.63,0,0.87,2.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.798,0.159,0,18.454,136,203,0 0,0,0,0,0,0,0,0,0,0,0,1.4,0,0,0,0,0.7,0,1.4,0,1.4,0,0,0,0,0,0.7,0,0,0,0.7,0,0,0,0,0,0,0,0,2.11,0,0,0,0,0,0,0,0,0,0.266,0.066,0,0,0,18,200,378,0 0.3,0,0.15,0,0,0.15,0.3,0,0,0,0,0.3,0,0,0,0,0,0,0.75,0,0.3,0,0,0,0.75,0.3,0.3,0.15,0.15,0.15,0.15,0.15,0.45,0.15,0.15,0.15,0.15,0,0.15,0.15,0,0,0.15,0.75,0.15,0,0,0,0,0.328,0.046,0,0,0,1.703,12,155,0 0.41,0,0.41,0,1.25,0,0.41,0,0,0.2,0,1.04,0.2,0,0,0.41,0.41,0,3.96,0,2.29,0,0.2,1.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0,0.83,0,0,0,0,0.069,0,0.866,0.103,0,5.052,214,485,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32,0,0.32,0,0,0,0.32,0,0,0,0,0,0,0.32,0,0,0,0,0,0,0,0,0,0.54,0.108,0,0,0.054,3.787,28,375,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.92,0,0,0,0,0,0,0,5.88,0,0,0,0,0,0,0,0,0,1.96,0,1.96,0,0,0,0,0,1.96,0,0,0,0,0,0,0,0,0,1.785,6,25,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.87,0,0,0,0,0,0,0,7.31,0,0,0,0,0,0,0,0,0,2.43,0,0,0,0,0,0,0,2.43,0,0,0,0,0,0,0,0,0,1.461,5,19,0 0,0,0,0,0,0,0,0,0,0,0,0,2.22,0,0,0,0,0,4.44,0,0,0,0,0,0,0,8.88,0,0,0,0,0,0,0,0,0,2.22,0,0,0,0,0,0,0,2.22,0,0,0,0,0,0,0,0,0,1.3,5,26,0 0,0,0.7,0,0,0.14,0,0,0.28,0,0,3.08,0.14,0.28,0,0,0.14,0,0,0,0,0,0,0,0.98,0,0,0,0.14,0.14,0,0,0,0,0,0.7,0.28,0,0,0,0,0,0,0,0,0,0,0,0.054,0.199,0,0,0,0,1.82,18,304,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.94,0,0,0,0,0,0,0,5.88,0,0,0,0,0,0,0,0,0,2.94,0,0,0,0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,1.562,5,25,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.09,0,0,0,0,0,0,0,9.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.285,2,9,0 0,0,0,0,0,0,0,0,0,0,0,0,5.55,0,0,0,0,0,5.55,0,0,0,0,0,0,0,5.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,10,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.285,2,9,0 0,0,0,0,0,0,0,0,0,1.4,0,0,0,0,1.4,0,0,0,0,0,0,0,0,0,1.4,1.4,0,0,0,0,0,0,0,0,0,0,1.4,0,0,0,0,0,1.4,0,0,0,0,0,0,0.205,0.205,0,0,0,4.533,21,68,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.285,2,9,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.56,2.56,0,1.28,0,0,0,0,0,0,1.28,1.28,0,0,0,0,0,0,0,2.56,0,0,0,0,0,0.552,0,0,0,0,2.093,11,90,0 0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0.64,0,0,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.314,0.209,0,0,0.104,4.062,28,195,0 0,0,0,0,1.26,0,0,0,0,0,0,1.26,0,0,0,1.26,0,0,2.53,0,0,0,0,0,0,0,1.26,0,1.26,0,0,0,0,0,0,0,0,0,0,0,0,1.26,0,0,1.26,0,0,0,0,0,0,0,0,0,1.285,5,18,0 0,0.25,0,0,0,0,0,0,0,0.51,0.77,0.25,0,0,0,0,0,0,1.02,0,0.51,0,0,0,0.25,0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.124,0,0.207,0,0,10.409,343,635,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.857,18,41,0 0,0,0.38,0,0,0,0,0,0,0,0,1.53,0,0.38,0,0,0.76,0,0.76,0,0,0,0,0,3.84,1.53,0.38,0.38,1.53,0.38,0.38,0.38,0,0.38,0.38,1.15,0.38,0,0,0.38,0,0,0.38,0,0.76,0,0,0,0,0.163,0.054,0,0,0,2.297,17,193,0 2,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,2,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.888,29,53,0 0,0,0,0,0,0,0,0,0,0.29,0,0,0,0,0,0,0,0,0.58,0,0.58,0,0,0,3.51,2.34,0.87,2.34,0.58,1.17,0.58,0.58,0,0.58,1.17,0.58,0.29,0,0.87,0.58,0,0.87,0.29,0,0.58,0,0,0,0.091,0.637,0.045,0,0,0,3.552,37,373,0 2,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,2,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.888,29,53,0 0,0,1.58,0,0,0,0,0,0,0,0,0.79,0,0,0,0,0,0,1.58,0,0,0,0,0,0.79,0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.149,0,0.149,0,0,1.482,10,43,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.375,4,11,0 0,0,0,0,0.33,0,0,0,0,0,0,0.33,0,0,0,0.33,0,0.33,0.33,0,0.33,0,0,0,0.99,0.33,0,0.66,0,0.33,0,0,0,0,0.33,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0,0.13,0.043,0,0,0,2.016,19,125,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.325,0,0,0,0,0,1,1,14,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,6.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.3,3,13,0 0.34,0,0,0,0,0,0,0,0.34,0.68,0,1.02,0,0,0,0,0,0,1.36,0,0.68,0,0,0,2.38,1.7,0.68,1.7,0.68,0.34,0.34,0.34,0,0.34,0.34,0.34,0.68,0,0.68,0.34,0,0,0.68,0,0.34,0,0,0,0.052,0.42,0.052,0,0,0.052,2.604,13,250,0 0,0,0,0,4.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.54,0,4.54,0,0,0,0,0,0,0,0,0,0,2,5,16,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,1.428,3,10,0 0,1.86,0,0,0,0,0,0,0,0.93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.8,1.86,1.86,0.93,0.93,0.93,0.93,0.93,0,0.93,0.93,0.93,0.93,0,0.93,0.93,0,0.93,0.93,0,0.93,0,0,0,0,0.457,0.152,0,0,0,2.097,13,86,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.74,0,0,1.625,6,13,0 0.26,0,0.26,0,0,0,0,0,0.53,0,0.53,2.94,0,0,0,0,0,0.26,4.27,0,2.4,0,0,0,0,0.26,0.53,0,0,0,0,0,0,0,0,0,0.26,0,0.53,0,0,0.8,0,0,0,0,0,0.53,0,0.03,0,0,0,0,1.58,8,128,0 0,0,0,0,0.13,0,0,0.55,0,0,0,0.13,0.13,0,0,0,0.27,0,0,0,0,0,0.41,0,2.79,0,0,0,0,0,0,0,0,0,0,0.13,0.27,0,0,0,0,0,0,0,0,0,0,0,0.071,0.143,0,0,0.053,0,2.662,22,418,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,2,2,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0.176,0,0,1.758,7,51,0 0,0,1.23,0,0,0,0,0,0,0,0,2.46,0,0,0,0,0,2.46,1.23,0,1.23,0,0,0,2.46,1.23,0,1.23,0,1.23,1.23,0,0,0,1.23,1.23,2.46,0,0,0,0,0,0,0,0,0,0,0,0,0.139,0,0.278,0,0,1.736,10,66,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.7,0,0,3.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.166,2,7,0 0.17,0.35,0,0,0,0,0,0,0,0.35,0,0.17,0,0,0,0,0,0,1.94,0,0.7,0,0,0.17,0.17,0.17,0.88,0,0,0.17,0,0.17,0,0.17,0,0,0.35,0,0,0,0,0,0,0,0.53,0.17,0,0,0,0.031,0,0.031,0,0,1.564,21,194,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,1.75,3,14,0 0,0,0,0,0.1,0,0,0,0,0,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0,2.06,1.19,0,0,0,0.1,0,0,1.3,0,0,0.1,1.08,0,0,0,0.65,0,0,0,0,2.6,0,0.1,0.14,0.5,0.093,0,0,0,4.06,51,1003,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,1.6,7,16,0 0.17,0,0.51,0,0.17,0,0,0,0,1.36,0,0.17,0,0,0,0.17,0.34,0,1.19,0,0.85,0,0,0,1.53,0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.027,0.111,0,0.167,0,0,1.894,22,216,0 0,0,0,0,0,0.44,0,0,0,0,0,0.44,0.44,0,0,0,0,0,1.32,0,0,0,0,0,0,0,0.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0.44,0,0,0,0.15,0,0,0,0,1.613,11,71,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,1.142,2,8,0 0,0.24,0,0,0.24,0,0,0.24,0,0.49,0,0,0,1.48,0,0,0,0,0.99,0,0,0,0,0,0.49,0,0.24,0,0,0,0.24,0,0,0,0.24,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,9.31,235,1108,0 0,0,0,0,0.44,0,0,0,0,0,0.44,0.89,0,0,0,0,0,0,1.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.89,0,0,0,0,0,0,0,0,1.33,0,0,0,0.139,0,0,0,0,1.731,16,116,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.54,0,9.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0.86,0,0,0,0,0,0,0,0.86,0.86,0,0,0,0,0,0.86,6.95,0,4.34,0,0,0,0,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.047,2,22,0 0.25,0,0,0,0.25,0.25,0,0,0,0,0,0.51,0,0.25,0,0,0,0.25,0.51,0,0.25,0,0,0,0,0.25,0,0,0,0,0,0,0.25,0,0,0,0,0,0,0,0.25,0,0,0,0,0.25,0,0.25,0,0.082,0,0,0,0.041,1.287,4,85,0 0,0,1.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.56,0,0,0,0,1.56,0,0,0,0,0,0,0,0,1.75,3,21,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.12,0,0,0,0,0,0,0,3.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.502,0,0,0,0,1,1,8,0 0,1.61,3.22,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0,3.22,3.22,0,0,0,0,0,0,0,1.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.61,1.61,0,0,0,0,0,0,0,0,1.083,2,13,0 0,0,0,0,0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,4.63,0,3.31,0,0,0,2.64,1.98,1.32,0.66,0.66,0.66,0.66,0.66,0,0.66,0.66,0.66,0,0,0.66,0.66,0,0,0,0,0.66,0,0,0,0,0.293,0,0,0,0,3.968,34,127,0 0,0,0,0,0,0,0,0.77,0,0,0,0,0,0,0,0,0,1.55,2.32,0,0,0,0,0,3.1,3.87,3.1,0.77,0,0.77,0.77,0,0,0,1.55,0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0.198,0,0.099,0,0,2.325,30,93,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.5,0,0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.501,0.167,0,0,0.083,3.983,28,239,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.125,17,25,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.453,0.181,0,0,0.09,4.037,28,214,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.666,3,5,0 0,0,0,0,0,0,0,0,0,0,0,0.68,0,0,0,0,0,0,0.34,0,0,0,0,0,0.68,0.34,0,0.68,0,0.34,0,0,0.34,0,0.34,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0.048,0,0,0,0,0,2.147,11,131,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.72,0,0,0,0,0,0,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.395,2.333,8,119,0 0,0,0,0,1.04,0,0,0,0,0,0,0.52,0,0,0,0,0,0,0,0,0,0,0,0,1.56,1.04,0,0.52,0,0,0,0,2.08,0,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.274,0,0,0,0,1.848,10,61,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,0,0,5.6,0,4,0,0,1.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.122,0.244,0,0,0,0,1.909,6,21,0 0,0,0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0,0,1.36,0,5.47,0,0,0,0,0,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.307,8,30,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.396,0,0.396,2.533,10,38,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.63,0,0,0,0,0,2.63,0,0,0,0,0,2.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.248,0,0,0,0,4.166,14,50,0 0,0.28,0,0,0.56,0,0,0,0.28,0,0,0.56,0,0,0,0,0,0.56,3.41,0,1.13,0,0,0,0.56,0.56,1.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.56,0,0.85,0,0,0,0.046,0.281,0.046,0,0,0,1.834,15,200,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,50,99,100,0 0,0,0,0,0.32,0.32,0,0,0,0,0,0.32,0,0,0,0,0,0,1.3,0,0.98,0,0,0,0,0,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.65,0,0,0,0,0,0.257,0,0,0,0,1.3,7,104,0 0,0,0,0,0,0,0,0,0,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.19,1.19,3.57,0,0,0,0,0,0,0,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.791,71,115,0 0,0,0,0,2.25,0,0,0,0,0.75,0,0,0,0,0,0,0,0,1.5,0,0,0,0,0,0.75,0.75,1.5,1.5,0,0,0,0,0,0,0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.128,0,0,0.128,0.128,3.657,28,128,0 0,1.96,0,0,0,0,0,0,0,0,0,0,0,0.98,0,0,0,0,1.96,0,0.98,0,0,0,1.96,1.96,1.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.98,0,0,3.92,0,0,0,0,0,0,0,0,3.129,17,97,0 0,0,0,0,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0.87,0,0.29,0,0,0,0.29,0.29,0.29,0.58,0,0,0,0,0,0.29,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.22,0,0,2.038,0,13.562,351,434,0 0,0,0,0,0,0,0,0,0,1.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.615,4,21,0 0,0,0.59,0.11,0,0,0,0,0.11,0.23,0,0.11,0,0,0,0.11,0,0,0.95,0,0.47,0,0,0,0.23,0,0.71,0,0,0,0,0,0,0.11,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0.227,0.322,0.113,0.056,0.075,0,2.546,38,601,0 0.39,0,0,0,1.17,0,0,0,0,0.39,0,1.17,0,0,0,0,0,0.39,3.12,0.39,1.17,0,0,0,0,0,0.39,0.78,0,0,0,0,0,0,0,0,0,0,0,0.39,0,0,0,0,0,0,0,0,0.07,0.07,0,0.07,0,0,2.069,13,89,0 0,0,0,0,1.17,0,0,0,0,1.17,0,0,0,0,0,0,0,0,2.35,0,0,0,0,0,0,0,1.17,1.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.551,10,45,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,0,0,4.8,19,24,0 0,0,0,0,0,0,0,0,0,0,0,0.52,0,0,0,0,0,0,1.05,0,1.05,0,0,0,0.52,2.11,1.58,1.05,0,0.52,0,0,0,0,0.52,0,0,0,0,0,0,0,0,0,0.52,0,0,0,0,0.164,0,0,0,0,2.173,11,113,0 0,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,1.58,1.58,3.17,0,1.58,0,0,0,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.4,0,0,0.601,0,3.36,13,84,0 0,0,0,0,0.56,0,0,0,0,0,0,2.27,0,0,0,0,0,0,1.98,0,0.28,0,0,0,0.85,0.85,0.85,0,0,0.28,0,0,0,0,0,0,0,0,0,0,0,0,0.28,0,0.28,0,0,0,0,0.09,0.135,0,0,0,1.962,15,155,0 0,0,0.16,0,0.64,0,0,0.16,0,0,0,1.91,0,0,0,0.16,0,0,3.04,0,1.76,0,0,0,0,0.32,0.32,0,0,0,0.16,0,0,0,0,0.16,0,0,0,0.16,0,0,0,0,0.32,0,0,0,0,0.055,0,0,0.055,0,1.798,7,196,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.49,0,0,2.98,0,0,0,0,0,0,0,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.761,5,37,0 0,0,0,0,0,0.65,0,0.65,0,0.65,0,1.97,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.65,0,0,0,0.108,0.108,0,0,0.108,0,1.924,9,102,0 0,0,0,0,0,0,0,0,0,0,0,1.74,0,0,0,0,0,0,2.9,0,0.58,0,0,0,0,0,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.201,0,0,0,0,1.794,6,70,0 0,0,1.49,0,0,0.37,0,0,0,0,0,0.74,0.37,0,0,0,0,0,2.24,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.49,0,0.37,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0,0,0,1.79,5,111,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.344,0,0,0,0,1.88,13,47,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20.83,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.555,18,64,0 0.87,0.43,0,0,0,0,0,0,0,0.43,0,0.87,0,0,0,0,0,0,3.5,0,1.31,0,0,0,1.31,0.43,0,0,0,0,0,0,0,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0,2.085,25,73,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.29,0,0,0,0,0,2.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.76,0,0,0,0,0.258,0,0,0,0,3.74,53,101,0 0,6.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.58,0,0,0,0,0,1.58,1.58,1.58,1.58,1.58,1.58,1.58,3.17,0,3.17,1.58,1.58,0,0,0,1.58,0,0,0,0,0,0,0,0,0,0.431,0,0,0,0.215,3.461,12,90,0 0.32,0,0,0,0.32,0,0,0,0,0,0,0.32,0,0,0,0,0,0,0.96,0,2.56,0,0,0,0,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.234,0,0.058,0,0,1.068,3,47,0 0,1.23,0,0,0,0,0,0,0,1.23,0,0.61,2.46,0,0,0,0,0,3.08,0,1.23,0,0,0,0,0,0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.224,0,0,0,0,1,1,20,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,4,6,0 0.28,0,0,0,0.28,0,0,0,0.28,0,0.28,3.97,0,0,0,0,0,0,3.97,0,0.85,0,0,0,0.28,1.13,0,0,0,0,0,0,0,0,0,0,0.28,0,0.28,0,0,0,0,0.28,0,0,0,0.28,0,0,0,0.08,0,0,2.396,16,139,0 0,0,0,0,0,0,0,0,0,0,0,0,1.88,0,0,0,0,1.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.88,0,0,0,0,0,0,0,0,0,0,1.533,5,23,0 0,0,0,0,0,0,0,0,0,0,0,5.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.166,2,14,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.72,6.89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.466,13,37,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.53,6.15,0,0,0,0,0,0,0,0,0,0,1.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.714,6,36,0 0,0,0,0,0,0,0,0,0,0,0,1.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.56,0,1.56,0,3.12,0,1.56,0,1.56,1.56,0,0,0,0.215,0.215,0,0,0,1.666,12,30,0 0,0,2.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.5,0,0,0,0,0,0,0,0,0,0,0,0,1,1,15,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.552,0,0,0,0,1,1,4,0 0,0,0.97,0,0,0,0,0,0,1.94,0,0.97,0,0,0,0,0,0.97,5.82,0,0.97,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.293,0,0,0,0,2.187,14,70,0 0,0,0,0,0,0,0,0,0,1.19,0,2.38,0,0,0,0,0,0,1.19,0,0,0,0,0,1.19,1.19,0,2.38,0,0,0,0,0,0,0,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0.621,0,0,0,0,2.617,11,89,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0.34,0,0,0.34,0,1.7,0,1.36,0.34,0,0,0,0.34,0,1.36,0,0,0,0,0,0.34,0.34,1.02,0,0,0,0,0,0,0,0.34,0,0.34,0,0,0,0,0,0,2.38,0,0,0,0,0,0.055,0.11,0,0,0,1.421,8,91,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0,0,0,0,0,0,0,0,7.14,0,0,0,0,0,0,0,0,0,0.578,1.734,0,0,0,0,3.083,24,37,0 0,0,1.33,0,0,0,0,0,0,1.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.33,1.33,0,2.66,0,0,0,0,0,0,0,1.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0.413,0,0,0,0,4.36,46,109,0 0.23,0,0.46,0,0,0,0,0.23,0,0.23,0,0,0,0,0,0,0,0,3.69,0,0.69,0,0,0,1.84,0.23,0,0,0,0.23,0,0,0,0,0,0,0.23,0,0,0,0,0,0,0,0,0,0,0,0,0.253,0,0,0.031,0,2.016,19,244,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.52,4.76,4.76,4.76,4.76,4.76,4.76,4.76,0,4.76,4.76,4.76,0,0,0,4.76,0,0,0,0,0,0,0,0,0,1.257,0,0,0,0,3.333,11,30,0 0,0,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0,0.22,0,0,0.33,0.11,0,0,0,0,0.11,0,0,0,0,0,0,0.053,0.16,0,0,0,0,2.367,24,651,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.552,0,0,0,0,1.6,4,8,0 0,0,0,0,0,0,0,0,0,0,0,3.38,0,0,0,0,1.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.204,0,0.408,0,0,6.187,47,99,0 0,0,0.32,0,0.32,0,0,0,0,0,0,0.64,0,0,0,0,0,0,0.32,0,0,0,0,0,0.64,0.64,0,0,0,0,0,0,0,0,0,0.32,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0.04,0,0,0,0,1.677,10,156,0 0.23,0,0.23,0,1.17,0,0,0,0,0,0,1.41,0,0,0,0,0.11,0,0.47,0,0.7,0,0.11,0,1.29,0.11,0,0,0.11,0.23,0,0,0,0,0,0,0.11,0,0,0,0,0.11,0,0,0.23,0,0,0,0,0.015,0,0,0,0.015,1.486,7,162,0 2,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,2,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.888,29,53,0 0,0,0,0,0,0,0,0,0,0,0,1.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.6,7,64,0 0,0,0,0,0,0,0,0,0,0,0,0.98,0,0,0,0,0,0,0,0,0,0,0,0,0.49,0.98,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.49,0,0,0,0.159,0,0,1.45,7,74,0 0.29,0,0.44,0,0.73,0,0,0,0,0,0,0.58,0,0,0,0.14,0,0,0.73,0.14,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.29,0,0,0,0,0.14,0,0.14,0,0,0,1.32,0.02,0.321,0.18,0.14,0,0,1.891,24,522,0 0,0,0.91,0,0,0,0,0.45,0,0,0,0,0,0,0,0,0,0,2.28,0,0,0,0,0,0.91,0.91,0,0,0,0,0,0,0,0,0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.32,7,103,0 0,0,1.09,0,0,0,0,0,0,1.09,0,2.19,0,0,0,0,0,0,0,0,0,0,0,0,1.09,1.09,0,2.19,0,0,0,0,0,0,0,1.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0.325,0,0,0,0,4.586,51,133,0 0,0.51,0,0,1.02,0,0,0.51,0,0,0,0,0,0,0,0.51,0.51,0,0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.51,0,0,0,0.51,0,0,0,0.51,0,0,0,0,0,0.071,0,0,0,0,2.076,9,108,0 0,0.61,0,0,1.22,0,0,0,0,3.68,0,0,0,0,0.61,0,0,0,1.84,0,1.84,0,0,0,0.61,0.61,0,0,0,2.45,0,0,0,0,0,0,0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.476,8,62,0 0,0,0,0,0,0,0,0,0,0.91,0,0,0,0,0,0,0,0,1.83,0,0,0,0,0,1.83,0.91,2.75,0.91,0.91,0.91,0.91,0.91,0,0.91,0.91,0.91,0.91,0,0.91,0.91,0,0,0.91,0,0.91,0,0,0,0,0.46,0,0,0,0,1.918,13,71,0 0,0,0,0,0,0,0,0,0,0,0,4.58,0,0,0,0,0.91,0,0,0,0.91,0,0,0,1.83,0,0,0,0,0.91,0,0,0,0,0,0.91,0,0,0.91,0,0,0,0,0,0,0,0,0,0,0.12,0,0.241,0,0,3.541,26,85,0 0,0,0.36,0,0.36,0,0,0,0,0,0,0.36,0,0,0,0,0,0,0,0,0,0,0,0,6.25,5.51,0,0,0,0,0,0,0,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0.279,0.767,0.139,0,0,0,3.722,20,268,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0.18,0,0,0.09,0,0,0,0,0,0.94,0.37,0,0,0,0.28,0,0,0,0,0,0,0,1.41,0,0,0,0,0,0,0,0.84,0,0,0.47,0.09,0.09,0,0,0,0,0,0,0,0,0,0,0.052,0.065,0,0,0,0,2.022,19,451,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.44,0,0,0,1.44,0,0.72,0,1.44,1.44,0,0,0,0,0.114,0.114,0,0.114,1.645,12,51,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.81,0,0,0,1.62,0,0.81,0,1.62,1.62,0,0,0,0,0.137,0,0,0.137,1.636,12,36,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0,0,0,0,0,1,1,8,0 0,0,0,0,0,0,0,0,0,0,0,0.85,0,0,0,0,0,0.85,2.56,0,0,0,0,0,0.85,0.85,0,0.85,0,0,0,0,0,0,0.85,0,1.7,0,0,0,0,0,0.85,0,0.85,0,0,0,0.142,0,0.142,0,0,0,1.717,12,67,0 0,0,0,0,0,0,0,0,0,1.22,0,0,0,0,0,0,0,0.61,1.84,0,0,0,0,0,2.45,1.84,1.22,1.22,0.61,0.61,0.61,0.61,0,0.61,1.22,0.61,0.61,0,0,0.61,0,0,0.61,0,0.61,0,0,0,0.095,0.38,0.19,0.19,0,0,1.857,12,104,0 0,0,0,0,0,0,0,0,0,0,0,2.81,0,0,0,0.35,0,0.35,0.35,0,0,0,0,0,0.35,0,0,0,0,0,0,0,0,0,0,0,0.35,0,0,0,0,0,0,0,0,0,0,0,0,0.293,0,0,0,0,1.226,5,146,0 0,0,0,0,0,0,0,0,0,0,0,0.54,0.54,0,0,0,0,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,0,0,0.54,0,0,2.7,0,0.54,0,0,0,0,0,0.087,0,0.087,0,0,2.363,5,52,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.22,0,0,0,0,0,2.22,2.22,0,0,0,0,0,0,0,0,2.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.769,8,23,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.81,0,0,0,0,0,3.63,0,0,0,0,0,0,0,0,0,0,0,1.81,0,0,0,0,0,0,0,0,0,1.181,3,13,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.36,2.04,0,0.68,0,0,0,1.36,0.68,0,0.68,0,0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.241,0,0,0,0,2.461,17,96,0 0,0,0,0,6.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,6,10,0 0,0,0,0,0,0,0,0,0,0,0,1.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.87,0,0,0,0,0,0,0,0,0,1.75,0,0,0,0,0,0,0,0,0.87,0,0,0,0,0,0,1.795,11,79,0 0,0,0,0,0,0,0,0,0,0.9,0,1.81,0,0,0,0,0,0,0,0,0,0,0,0,0.9,0.9,0,1.81,0,0,0,0,0,0,0,0.9,0,0,0.9,0,0,0,0,0,0,0,0,0,0,1.208,0,0,0,0,5.111,58,138,0 0,0,0,0,0,0,0,0,0,0,0,0.35,0,0,0,0,0,0,0.35,0,1.06,0,0,0,1.41,1.06,0.7,0.35,0.35,0.7,0.35,0.35,0.35,0.35,0.35,0.35,0.35,0,0,0.35,0,0,0.35,0,0.7,0,0,0,0,0.222,0.055,0,0,0,1.506,12,119,0 0,0,0,0,0,0,0,0,0,0,0,2.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,2,7,26,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.5,0,0,0,0,0,1.01,0,0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.5,0,0,0,0.08,0.564,0,0,0.161,0,1.712,20,137,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.67,0,0.33,0,0.042,0,0,0,0,2.519,46,131,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.56,0,1.12,0,0,0,0,0,0,0,0,0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.56,0,0,0,0,0.096,0,0,0,0,1.15,4,46,0 0,0,0.18,0,0.56,0,0,0,0,0,0,0.75,0.37,0,0,0,0,0,0,0,0,0,0,0,3.03,0,0,0,0,0,0,0,0,0,0,0.18,0.18,0,0,0,0,0,0.18,0,0.18,0,0,0,0,0.056,0,0,0.112,0,2.188,19,232,0 0,0,0.8,0,2.42,0,0,0,0,0,0,0.4,0,0,0,0,0,0.4,5.26,0,1.61,0,0,0,0.4,0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0,0,0.27,0,0,2.36,35,59,0 0,0,0,0,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,0,0,0,0,0,3.84,3.84,0,0,7.69,0,0,0,0,0,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,0,0,0.581,0,0,1.615,4,21,0 0.21,0,0.21,0.21,0.63,0,0,0,0,0,0.42,0,0.21,0,0,0.84,0,0.42,1.9,0,0,0,0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0.21,0,0,0,0,0,0,0,0,0,0,0,0,0.031,0,0.374,0.062,0,2.892,71,405,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.17,0,0,0,0,0,4.7,2.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.882,21,49,0 0,0,0.22,0,0,0,0,0,0,0,0,0.68,0.9,0,0,0.22,0.22,0,0,0,0,0,0,0,1.36,0,0,0,0,0,0.22,0,0,0,0,0,0.45,0,0,0,0,0,0,0,0,0,0,0,0,0.094,0,0,0,0,2.182,42,203,0 0,0.08,0.08,0,0,0.08,0,0.16,0,0,0,0.81,0.16,0,0,0,0.08,0,0,0,0,0,0.08,0,3.49,0.48,0,0,0.32,0.24,0,0,0,0,0,0.32,0.08,0,0,0,0,0.08,0,0,0,0,0,0.08,0.022,0.111,0,0,0.055,0,2.145,21,693,0 0.22,0,0.22,0,0.45,0,0,0,0,0,0,0,0.68,0,0,0.22,0,0,0.68,0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.22,0,0,0,0.68,0,0.22,1.83,0.22,0.91,0,0,0,0.267,0.038,0,0,0,1.649,13,94,0 0,0,0,0,0,0,0,0,0,0,0,1.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.6,7,64,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.66,0,0,0,0,0,0,1.149,0,0,1.5,3,12,0 0,0,0.81,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0.81,0,0,0,0,0,0,0,0.81,0,0,0,0,0,0,0,0,0,0,0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.09,2,12,0 0,1.17,0,0,0,0,0,0,1.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.17,2.35,0,0,0,0,0,0,0,0,0,1.17,0,0,0,0,0,0,0,0,0,0,1.17,0.376,0,0,0,0,0,2.925,16,117,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,1,1,8,0 0,0,0,0,0,1.34,0,0,0,0,0,0,0,0,0,0,0,0.67,1.34,0,0,0,0,0,0,0,0.67,0,0,0,0,0,1.34,0,0,0,0,0,0,0,0,0,0,0.67,0.67,0,0,0,0,0.111,0,0,0,0,1.285,5,27,0 0,0,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.65,0,0.55,0,0,0,0,0,0.55,0,0,0,0,0,0,0,0,0,0,0.55,0,0,0,0,0,1.1,0.55,0,0,0,0,0.092,0,0,0,0,1.84,5,46,0 0,0,0,0,0,0,0,0,0,0,0,1.35,0,0,0,0,0,0,2.7,0,1.35,0,0,0,0,1.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.35,0,0,0,0,0,0,0,0,0,0,0,1.88,5,47,0 0,0,1.56,0,1.56,0,0,0,0,1.56,0,6.25,0,0,0,0,0,1.56,1.56,0,1.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.278,0,0,0,0,1,1,11,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.687,0,0,0,0,1.903,17,59,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.52,4.76,4.76,4.76,4.76,4.76,4.76,4.76,0,4.76,4.76,4.76,0,0,0,4.76,0,0,0,0,0,0,0,0,0,1.257,0,0,0,0,3.333,11,30,0 0,0,0,0,0.27,0,0,0,0,0.27,0,0.54,0,0.54,0,0,0.54,0,1.63,0,0,0,0,0,4.89,1.35,0.27,0.27,0.27,0.27,0.27,0.27,0,0.27,0.27,0.27,0,0,0.27,0.27,0,0,0.27,0,0.81,0,0,0,0,0.192,0.153,0,0,0,4.608,35,424,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.63,0,0,0,0,0,0,2.63,0,0,0,0,0,0,0,0,0,0.414,0,0,2.705,13,46,0 0,0,0.2,0,0,0,0,0.2,0,0.2,0,0,0,0,0,0,0,0.2,0.2,0,0,0,0,0,0.2,0.2,0,0.41,0,0,0,0,0.2,0,0.2,0,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0.148,0,0,0,0,1.669,15,187,0 0,0.22,0,0,0.66,0.22,0,0.44,0.44,0.89,0,0,0.22,0.22,0,1.33,0,0,0.89,0,0.44,0,0,0.22,3.34,3.56,0.66,0.22,0.22,0.22,0.22,0.22,0,0.22,0.22,0.22,1.11,0,0,0.22,0,0,0.22,0,0.22,0,0,0,0.148,0.372,0.111,0.372,0.223,0,3.425,42,411,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.66,0,0,0,0,0,1.33,0,0,1.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.478,0,0,0,0,2.166,18,52,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.333,5,7,0 0,0,0.62,0,0,0,0,0,0,0,0,0.62,0,0,0,0,0,0.62,1.25,0,0,0,0,0,1.25,0.62,0,0.62,0,0.62,0.62,0,0.62,0,0.62,0.62,0,0,0,0,0,0,0,0,0.62,0,0,0,0,0.084,0,0.169,0,0,1.863,10,82,0 0,0.04,0.25,0,0.04,0.04,0,0,0.16,0.08,0.2,0.62,0,0,0,0.16,0.04,0,0.71,0,0.41,0,0.12,0,2.01,0.41,0,0.12,0,0.08,0.12,0,0,0,0.04,0,0.2,0,0,0,0,0,0,0.08,0.08,0,0,0.04,0.012,0.274,0.012,0.031,0.056,0,1.83,23,1479,0 0,0,0,0,0,0,0,0,0,0,0,4.34,0,0,0,0,0.62,0,0,0,0.62,0,0,0,3.1,0,0,0,0,0.62,0,0,0,0,0,0.62,0,0,0.62,0,0,0,0,0,0.62,0,0,0,0,0.166,0,0.333,0,0,4.255,34,200,0 0,0.39,0.19,0,0.19,0.09,0,0,0,0,0,0.29,0,0,0.29,0,0,0.29,0.89,0,0.29,0,0,0,0.49,0.49,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,4.75,0,0.09,0,0.09,5.74,0,0,1.352,0.08,0,0.016,0,0,1.679,17,178,0 0,0.39,0.19,0,0.19,0.09,0,0,0,0,0,0.29,0,0,0.29,0,0,0.29,0.89,0,0.29,0,0,0,0.49,0.49,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,4.75,0,0.09,0,0.09,5.74,0,0,1.353,0.08,0,0.016,0,0,1.679,17,178,0 0,0.39,0.19,0,0.19,0.09,0,0,0,0,0,0.29,0,0,0.29,0,0,0.29,0.89,0,0.29,0,0,0,0.49,0.49,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,4.75,0,0.09,0,0.09,5.74,0,0,1.353,0.08,0,0.016,0,0,1.679,17,178,0 0,0,0.93,0,0.31,0,0,0,0.31,0,0.31,0.93,0,0,0,0,0.62,0,3.75,0,3.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0.054,0.108,0,0.054,0,0.054,2.735,14,145,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.729,0,0,1.875,4,15,0 0,0,0,0,0,0,0,0,0,0.84,0,0.84,0,0,0,0,0,0,4.2,0,0,0,0,0,1.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.119,0,0,0,1.454,9,32,0 0,0,0,0,0,0,0,0,0,0,0,4.58,0,0,0,0,0.91,0,0,0,0.91,0,0,0,1.83,0,0,0,0,0.91,0,0,0,0,0,0.91,0,0,0.91,0,0,0,0,0,0,0,0,0,0,0.124,0,0.249,0,0,2.576,14,67,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.333,3,12,0 0,0,0.86,0,0,0,0,0.86,0,0,0,1.73,0,0,0,0,0,0,0,0,0,0,0,0,3.47,5.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0.125,0,0,0,0,1.8,9,72,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.41,4.41,0,1.47,0,0,0,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.565,10,59,0 0.06,0,0.19,0,0.26,0.06,0,0.19,0,0.06,0,1.12,0.06,0.19,0,0,0.52,0,0,0.59,0.06,0,0.39,0,3.23,0,0,0,0,0,0,0,0.06,0,0,0.19,0.13,0,0,0,0,0,0,0.06,0,0,0,0,0.072,0.117,0,0,0.063,0,2.121,25,751,0 0,0,1.09,0,0,0,0,0,0,0,0,1.09,0,0,0,0,0,0,3.29,0,0,0,0,0,0,0,0,0,0,2.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.111,2,20,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.44,0,1.44,0,2.89,1.44,0,0,0,0,0.227,0,0,0,1.64,12,41,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.166,3,14,0 0,0,0,0.14,0.42,0,0,0.14,0,0,0,0.98,0,0.14,0,0,0.7,0,0,0,0,0,0,0,1.82,0.28,0,0,0.28,0.7,0,0,0,0,0,0.28,0.14,0,0,0,0,0,0,0,0.14,0,0,0,0,0.077,0,0,0,0,1.502,6,257,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.69,2.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,6,24,0 0,0,0,0,0,0,0,0,0,0,0,4.25,0,0,0,0,0,0,6.38,0,0,0,0,0,0,0,2.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.133,3,17,0 0,0,0,0,0.68,0,0,0,0,0.68,0,0.68,0,0,0.68,0,0,0.68,1.36,0,0.68,0,0,0,2.72,1.36,1.36,0.68,0.68,0.68,0.68,0.68,0,0.68,0.68,0.68,0.68,0,0.68,0.68,0,0,0.68,0.68,0.68,0,0,0,0.104,0.418,0.104,0,0,0,2.102,12,82,0 0,0,0,0,0,0,0,0,0,0.39,0,0,0,0,0,0,0,0,0.39,0,0,0,0,0,2.35,0.39,1.17,0.39,0.39,0.78,0.39,0.39,0,0.39,0.39,1.56,0.39,0,0,0.39,0,0.39,0.39,0,0.39,0,0,0.39,0,0.314,0,0.125,0,0,1.955,13,133,0 0,0,0,0.15,0.46,0,0,0.15,0,0,0,0.92,0,0.15,0,0,0.46,0,0,0,0,0,0,0,2.15,0.3,0,0,0.3,0.92,0,0,0,0,0,0.3,0.15,0,0,0,0,0,0,0,0.15,0,0,0,0,0.085,0,0,0,0,1.535,6,238,0 0,0,0,0,0.68,0,0,0,0,0.68,0,0,0,0,0.68,0,0,0,0.68,0,0,0,0,0,2.72,2.72,2.04,2.04,0.68,0.68,0.68,0.68,0,0.68,2.04,0.68,0.68,0,0.68,0.68,0,0,0.68,0.68,0.68,0,0,0,0,0.828,0.621,0,0,0,2.277,12,123,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.92,0,0.64,0,0,0,0,3.2,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0.213,0,0,0.106,0,0,2.714,47,95,0 0,0,0.2,0,0.2,0,0,0,0,0.8,0,1,0,0,0,0,0,0,0.2,0,0.2,0,0,0,1.4,1.6,0.2,0.2,0.2,0.2,0.2,0.2,0,0.2,0.4,0.2,1,0,0.2,0.2,0,0,0.2,0.8,0,0,0,0.2,0,0.429,0.03,0,0,0,2.703,50,346,0 0.87,0,0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.493,0,0,0,0,1.344,4,39,0 0,1.12,0,0,0,0,0,0,1.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.12,2.24,0,0,0,0,0,0,0,0,0,1.12,0,0,0,0,0,0,0,0,0,0,1.12,0.361,0,0,0,0,0,2.875,16,115,0 0,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.43,0,0,0,0,0,6.92,3.89,0,0,0,0,0,0,0,0,0,0,2.16,0,0,0,1.29,0,0,0.43,0,0,0,0,0.318,0.717,0.159,0.079,0,0,4.411,19,300,0 0.05,0,0.1,0,0.15,0.05,0,0,0.57,0,0,0.26,0,0,0,0,0,0.05,0.21,0,0.15,0,0,0,0.63,0.68,0.05,0,0,0,0,0,0.73,0,0,0,0.42,0,0,0,0,0,0,0,0,0,0.05,0,0.193,0.17,0.059,0,0.014,0,3.461,66,1170,0 0.07,0,0.14,0,0.07,0,0,0,0.74,0,0,0.22,0,0.07,0,0,0,0.07,0.29,0,0.22,0,0,0,0.74,0.81,0.07,0,0,0,0,0,0.22,0,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0.116,0.2,0.042,0,0.021,0,2.79,36,681,0 0.12,0,0.6,0,0.6,0,0,0,0,0,0,0.12,0.12,0,0.12,0,0.73,0,0.6,0,0.48,0,0,0,1.58,0,0,0.24,0,0,0.48,0,0,0,0.36,0,0.12,0,0,0,0,1.33,0,0.12,0.12,0,0,0.12,0.016,0.148,0,0.033,0.016,0,2.056,65,364,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.369,0,0,0,0,1.25,2,5,0 0,0,0.42,0,0.42,0.14,0,0,0,0,0,1.56,0.14,0,0,0,0.28,0,0.14,0,0.14,0,0,0,3.12,0,0.14,0,1.27,0.42,0,0,0,0,0,0.56,0.28,0,0.14,0,0,0,0.14,0,0.14,0,0,0,0.058,0.019,0.019,0,0,0,2.345,17,333,0 0,0,2.04,0,2.04,0,0,0,0,0,0,0,0,0,0,0,0,0,2.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.08,0,0,0,0,0,0,0,0.722,0,0,0,0,1.1,2,11,0 0,0,0,0,0,0,0,0,0,0,0,1.13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.13,0,0,3.4,0,0,0,0,0,0,0.198,0.396,0,0,0,0,2.076,5,27,0 0,0,0,0,0,0,0,0,0,0,0,2.77,0,0,0,0,0,0,2.77,0,0,0,0,0,5.55,2.77,2.77,2.77,5.55,2.77,2.77,2.77,0,2.77,2.77,2.77,0,0,0,2.77,0,0,0,0,0,0,0,0,0,1.229,0,0,0,0,3.25,11,39,0 0,0,0,0,0,0,0,0,0,0,0,1.42,0,0,0,0,0,0,0,0,4.28,0,0,0,2.85,1.42,4.28,1.42,1.42,1.42,1.42,1.42,0,1.42,1.42,1.42,1.42,0,1.42,1.42,0,0,1.42,0,1.42,0,0,0,0,0.419,0,0,0,0,2.133,12,64,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.46,0,0,0,0.185,0,0,0,0,1.904,11,80,0 0.2,0.06,0.2,0,0.4,0,0,0,0,0,0,0.95,0.27,0,0,0.06,0.06,0,0,0,0,0,0,0,3.47,0,0,0,0,0,0,0,0.06,0,0,0.34,0.06,0,0,0,0,0,0.13,0.06,0.06,0,0,0.13,0.028,0.093,0,0,0.018,0,2.423,26,693,0 0,0,0,0,0.38,0,0,0,0,0,0,2.28,0,0,0,0,0,0,0.76,0,0,0,0,0,1.14,0.76,0,0,0.38,1.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.352,3,46,0 0.06,0.04,0.29,0,0.08,0.06,0,0.13,0.02,0.11,0,0.47,0,0.02,0.02,0.13,0.13,0.08,0.24,0,0.17,0,0,0,0,0,0,0.02,0.02,0,0,0,1.7,0,0,0.22,0.83,0.02,0,0,0.06,0.04,0.02,0.06,0,0.29,0.02,0.15,0.032,0.176,0,0.003,0.003,0,2.201,79,2631,0 0,0,0,0,0,0,0,0,0,2.5,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.436,0,0,1.7,8,17,0 0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.769,0,0,1.428,4,10,0 0.2,0.06,0.2,0,0.4,0,0,0,0,0,0,0.95,0.27,0,0,0.06,0.06,0,0,0,0,0,0,0,3.47,0,0,0,0,0,0,0,0.06,0,0,0.34,0.06,0,0,0,0,0,0.13,0.06,0.06,0,0,0.13,0.027,0.09,0,0,0.018,0,2.423,26,693,0 0.79,0,0.79,0,0.79,0,0,0,0,0,0,0.79,0,0,0,0,0,0,0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.147,1.166,4,42,0 0,0,0,0,0,0,0,0,0,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.19,1.19,0,2.38,0,0,0,0,0,0,0,1.19,0,0,1.19,0,0,0,0,0,0,0,0,0,0,0.404,0,0,0,0,3.379,11,98,0 0,0,0.32,0,0.49,0.16,0,0,0,0,0,1.48,0,0,0,0,0.32,0,0.16,0,0.16,0,0,0,1.31,0,0,0,1.31,0.49,0,0,0,0,0,0.65,0.16,0,0,0,0,0,0,0,0.16,0,0,0,0,0.022,0,0,0,0,1.638,6,154,0 0,0,0,0,0.31,0,0,0,0,0,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0.63,0,0.63,0,0,0.63,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.045,0,0,0,0,1.813,11,107,0 0,0,0,0,0,0,0,0.67,0,0,0,1.35,0,0,0,0.67,0,0,4.05,0,2.02,0,0,0,0.67,0,0,0,0,0,0,0,0,0,0.67,0,0,0,0,0,0,0,0,0,0.67,0,0,0,0,0.386,0,0,0,0,3.27,19,121,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.222,9,29,0 0,0,1.63,0,0,0,0,0,0,0,0,1.63,0,0,0,1.63,0,0,1.63,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.181,3,13,0 0,0,3.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.44,0,0,3.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.727,4,19,0 0,0,0,0,0.33,0,0,0,0,0.66,0,0,0,0,0,0,0,0,0.66,0,0.33,0,0,0,7.61,2.64,0,0,0,0,0,0,0.33,0,0,0,1.32,0,0,0,2.31,0.33,0,0.33,0,0,0,0,0.349,0.524,0.116,0,0,0,3.627,19,341,0 0,0,0,0,1.4,0,0,0,0,0,0,2.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.239,0,0,1.923,7,50,0 0,0,0,0,0,0,0,0,0,0.53,0,2.68,0,0,0,0,0,0,0.53,0,0,0,0,0,0.53,0.53,0,1.07,0,0,0,0,0,0,0,0.53,0,0,1.07,0,0,0,0,0,0,0,0,0,0,0.194,0,0,0,0,3.731,21,153,0 0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0,0,0,0,0,10.71,0,0,0,0,0,3.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,7,0 0,0,0,0,0.51,0,0,0,0,0,0,0,0.51,0,0,0,0,0,1.55,0,0,0,0,0,0,0,0.51,0,0,0,0,0,1.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.281,0,0,1.363,5,45,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,0.925,0,0,1.833,6,11,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.52,4.76,4.76,4.76,4.76,4.76,4.76,4.76,0,4.76,4.76,4.76,0,0,0,4.76,0,0,0,0,0,0,0,0,0,1.257,0,0,0,0,3.333,11,30,0 0,0,0,0,0,0,0,0,0,0,0,0.85,0,0,0,0,0,0,0,0,0,0,0,0,0.42,0.42,0,0,0,0,0,0,0,0,0,1.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.536,8,106,0 0,0,0,0,7.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.14,0,0,0,0,0,0,0,0,0,1.6,4,8,0 0,0,1.96,0,0,0,0,0,0,0,0,0,0,0,0,1.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.92,0,0,3.92,0,0,0,0,0,0,0,0,0,1.647,4,28,0 0,0,0,0,0,0,0,0,0,0,0,1.06,0,0,0,0,0,0,1.06,0,0,0,0,0,3.19,1.06,0,0,0,0,0,0,0,0,0,0,1.06,0,2.12,0,0,0,0,0,0,0,0,0,0,0.168,0,0.168,0,0,1.75,7,63,0 0,0,0,0,0,0,0,0,0,0,0,3.19,0,0,0,0,1.06,0,0,0,0,0,0,0,0,1.06,0,0,0,0,0,0,0,0,0,0,0,0,1.06,0,0,0,0,0,0,0,0,0,0,0.143,0,0,0,0,2.714,13,76,0 0.64,0,0.64,0,0,0,0,0,0,0,0,0.64,0,0,0,0.64,0,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0.64,0,0.64,0,0,0,0,0.309,0.619,0,0,0,0,1.727,5,57,0 0,0,0.47,0,1.91,0,0,0,0,0,0,1.91,0,0,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.43,0,0,0.95,0,0,0,0,0,0,0,0,0,1.233,4,37,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,0,0,1.333,2,4,0 0,0,0,0,0.76,0.25,0,1.27,0,0,0,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.14,5,65,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.38,0,0,1.38,0,0,0,0,0,0,0,2.77,0,0,0,0,0,0,0,0,0,1.38,0,0,0,0,0,0,0,1.38,0,0,1.38,0,0,0,0,0,0,1.666,9,35,0 0,0,0,0,0,0,0,0,0,1.85,0,0,0,0,0,0,0,0,3.7,0,0,0,0,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.349,0,0,1.47,4,25,0 0,0,0.59,0,0.29,0.59,0.59,0.29,0,0.29,0.29,0,0,0,0,0,0,0.89,3.58,0,1.49,0,0,0,0.29,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.29,0,0,0,0.089,0,0,0.044,0.134,0,1.6,15,120,0 0,0,0,0,0,0,0,0,0,0.28,0,0.56,0,0,0,0,0,0,0,0,0,0,0,0,1.13,0.84,0,0.56,0,0.84,0,0,0,0,0.56,0,0.56,0,0,0,0,0,0,0,0,0,0,0.28,0,0.262,0,0,0,0,3.25,75,286,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.7,0,0,0,0,0,0,0,0,0,0.523,0,0,2.571,10,36,0 0,0,0.08,0,0,0.25,0,0.25,0.08,0,0,1.17,0.08,0.25,0,0,0.42,0,0,0,0,0,0,0,3.11,0,0,0,0,0,0,0,0.16,0,0,0.25,0.25,0,0,0,0,0,0,0,0,0,0,0,0.034,0.08,0,0,0,0,2.023,27,694,0 0,0,1.36,0,0.68,0.68,0,0,0,0,0,0,0,0,0,0,0,0,2.05,0,0,0,0,0,0,0,1.36,0,0,0,0,0,0,0,0,0,0.68,0,0,0,0,0,0,0,0,0,0,0,0,0.357,0,0,0,0,1.294,5,44,0 0,0,0,0,0,0.14,0,0,0,0.14,0,0,0,0,0,0,0,0.29,0.74,0,0.14,0,0,0,0.14,0.14,0.59,0,0,0,0,0,0.14,0,0,0,0.59,0,0,0,0,0,0,0,0.44,0,0,0,0,0.297,0,0,0,0,1.803,27,238,0 0.03,0.03,0,0,0,0,0,0,0.06,0.09,0.03,0.15,0,0,0,0,0.03,0.12,0.03,0,0,0,0,0,0.46,0.27,0,0,0.03,0.06,0,0,0,0,0,0.03,0.15,0,0,0,0.36,0,0.03,0,0.12,1.19,0,0,0.024,0.178,0,0.128,0,0,3.427,49,1827,0 0,0,0.27,0,0,0,0,0,0,0.83,0,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0.55,0.27,0,0.55,0,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0.082,0.164,0,0,0,0,2.235,51,199,0 0,0,2.27,0,0,0,0,0,0,0,0,4.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.27,0,0,0,0,0,0,0,0,0,0,0,0,0.296,0,0,5.25,32,63,0 0,0,0.7,0,0,0.7,0,0,0,0,0,0,0,0,0,0,0,0,3.54,0,0.7,0,0,0,0,0,1.41,0,0,0,0,0,0,0,0,0,0.7,0,0,0,0,0,0,0,0.7,0,0,0,0.126,0.252,0,0,0,0,1.375,5,55,0 0,0,0.64,0,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,3.84,0,0.64,0,0,0,1.28,0.64,1.92,0.64,0.64,0.64,0.64,0.64,0,0.64,0.64,0.64,0.64,0,0.64,0.64,0,0,0.64,0,1.28,0,0,0,0,0.225,0,0.225,0,0,1.902,12,78,0 0,0,0,0,0,0,0,0,0,0,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,0,6.34,0,0,0,0,0,0,0,0,0,0,1.259,3,34,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.09,0,0,0,0,0,0,0,9.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.09,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,2.98,0,0,0,0,1.49,0,0,0,0,0,0,0,0,0,1.49,1.49,0,0,0,0,0,0,0,0,0,0,1.49,0,0,0,0,0,1.49,0,0,0,0,0,0,0.209,0.209,0,0,0,3.5,17,49,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,14.28,0,0,0,0,0,0,0,0,0,1.123,0,0,1.3,4,13,0 0,0,0,0,0,0,0,0,0,0,0,2.18,0,0,0,0,0,0,1.45,0,0,0,0,0,2.18,0,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.45,0,0,0,0,0,0.122,0,0,0,0,1.785,18,75,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.63,0,0,0,0,0,0,0,0,0,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.187,5,35,0 0,0.99,0,0,0.49,0,0,0.49,0,0,0,0.49,0,0,0,0,0,1.98,2.97,0,1.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.081,0,0,1.348,4,58,0 0,0,0,0,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0.52,0,0,0.52,0,0,0,0.52,0.52,0.52,1.05,0,0.52,0,0,0,0,0,0,0,0,0,0,0,0.52,0,0,0,0,0,0,0,0.166,0,0,0,0,3.888,55,140,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.7,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.7,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.7,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.7,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0.76,0,0,0,0,0,0,0,0,0,0,0,0.76,0,0,0,0,0,0,0,2.3,0.76,0,0,0,1.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.3,7,138,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.7,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,0.2,0.2,0,0,0,0,0,0,0,0,1.03,0,0,0,0,0,0.62,0.41,0,0,0,0,0,2.49,0.62,0,0,0,0,0,0,0,0,0,0.2,0.2,0,0,0,0,0,0,0,0,0,0,0.2,0,0.087,0,0,0,0,2.797,127,512,0 0.04,0.09,0.31,0,0.04,0.22,0.04,0,0,0.58,0.09,1.17,0,0,0,0.13,0.04,0,1.3,0,1.17,0,0.04,0,0.9,0.54,0,0.04,0,0.18,0.18,0,0,0,0.18,0.04,0.31,0,0.22,0,0.04,0,0,0,0.13,0.04,0,0.09,0.013,0.224,0,0.027,0.006,0,1.784,29,1192,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.99,0,0.99,0,0,0,0,0,1.98,0,0,0,0,0,0,0,0,0,0.99,0,0,0,0,0,0,0.99,0.99,0,0,0,0,0,0,0,0,0,1.478,5,34,0 0,0,0,0,0,0,0,0,0,0,0,1.4,0,0,0,0,0.7,0,1.4,0,1.4,0,0,0,0,0,0.7,0,0,0,0.7,0,0,0,1.4,0,0,0,0,2.11,0,0,0,0,0,0,0,0,0,0.267,0.066,0,0,0,17.952,200,377,0 0,0,0.59,0,0.59,0,0,0,0,0,0,2.38,0,0,0,0,0,1.19,0.59,0,0,0,0,0,1.78,1.19,0,0.59,0,0.59,0.59,0,0,0,0.59,0.59,0,0,1.19,0,0,0,0,0,0,0,0,0,0,0.177,0,0.088,0,0,1.8,10,81,0 0,0.26,0.26,0,0.26,0,0,0.26,0,0,0.26,1.07,0,0,0,0,0.53,0,1.07,0,1.07,0,0,0,1.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.111,0,0,0.037,0,1.564,8,194,0 0,0,5.1,0,2.04,0,0,0,0,0,0,1.02,0,0,0,0,0,0,1.02,0,0,0,0,0,0,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.094,0,0,0,0,1.29,5,40,0 0.27,0,0.13,0,0.82,0,0,0,0,0,0,0.55,0.41,0,0,0,0,0,1.24,0,1.1,0,0,0,1.65,0.82,0.13,0.13,0.13,0.13,0.13,0.13,0,0.13,0.13,0.13,0.41,0,0,0.13,0,0.41,0.13,0,0.41,0,0,0.27,0.041,0.102,0.02,0.02,0,0,2.78,34,367,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,33.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0,0,0,0,0,0,0,0,0,4.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.454,5,27,0 0,0,0,0,0,0,0,0.39,0,0,0,0.39,0,0,0,0,0,0,0,0,0,0,0,0,0.39,0,0,0,0.39,0.39,0,0,0.39,0,0,0.39,0.39,0,0,0,0,0,0,0,0,0,0,0,0,0.049,0,0,0,0,2,38,124,0 0,0,0,0,1.58,0.79,0,0,0,0,0,3.17,0,0,0,0,0,0.79,0,0,0,0,0,0,1.58,1.58,0,1.58,0,0,0.79,0,0,0,0.79,0,0,0,0,0,0,0,0,3.17,0,0,0,0,0,0.263,0,0,0,0,2.575,15,103,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0,0,0,0,0.68,0,0.68,0,0.136,0,0,0,0,4.341,46,178,0 0,0,0,0,3.27,0,0,0,0,0,0,0,0,0,0,0,0,0,1.63,0,0,0,0,0,0,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.91,0,0,0,0,0,0,0,0,0,0,0,0,1.545,5,17,0 0,0,0,0,0,0,0,0.4,0,0,0,0.81,0.4,0,0,0,0,0,1.22,0,0,0,0,0,2.86,2.45,0,0,0.4,0.4,0,0,0,0,0,0,0.4,0,0.4,0,0,0,0,0.4,0,0,0,2.45,0.126,0.063,0.063,0.063,0,0,1.611,12,116,0 0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,3.33,0,0,0,0,0,0,0,0,0,1.3,4,13,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.21,0,0,1.21,0,0,0,0,0,0,0,0,0,0,0,0,1.166,2,21,0 0,0,1.16,0,0,0,0,0,0,0,0,2.32,0,0,0,0,0,3.48,0,0,1.16,0,0,0,2.32,1.16,0,1.16,0,1.16,1.16,0,0,0,1.16,1.16,0,0,1.16,0,0,0,0,0,0,0,0,0,0,0.139,0,0.139,0,0,1.515,10,50,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.2,0,0,0,0.163,0,0,0,0,1.153,3,15,0 0,0.24,0,0,0.24,0,0,0.24,0,0.49,0,0,0,1.48,0,0,0,0,0.99,0,0,0,0,0,0.49,0,0.24,0,0,0,0.24,0,0,0,0.24,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,9.31,235,1108,0 0,0,0,0,0,0,0,0.5,0,0,0,1,0.5,0,0,0,0,0,1.5,0,0,0,0,0,1,0.5,0,0,0.5,0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0.5,0,0,0,2.5,0,0.075,0,0.075,0,0,1.483,6,89,0 0,0,0,0,2.08,0,0,0,0,0,0.83,0.83,0,0,0,0.83,0,1.66,2.91,0,0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.131,0,0.329,0,0.065,2.962,11,157,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.12,1.12,0,1.12,0,0,0,1.12,1.12,0,2.24,0,1.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.326,0,0,0,0,4.689,55,136,0 0,0,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0,0.32,1.28,0,0.32,0,0,0,4.48,3.52,0.96,0.96,0.64,0.32,0.32,0.32,0,0.32,0.64,0.32,0.32,0,0,0.32,0,0,0.32,0,0.96,0,0,0,0.264,0.211,0.105,0.052,0,0.105,2.258,15,192,0 0,0,0,0,0,0,0,0,0,0,0,0.5,0,0,0,0,0,0.5,1.5,0,0.5,0,0,0,2.01,1.5,1,1,0,0,0,0,0,0,0.5,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.168,0.084,0.084,0,0.168,2.303,15,129,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.17,0,0,0,0,0,0,0,0,0,2.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.682,0,0,0,0,4.208,15,101,0 0,0,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.19,0,0,1.19,0,0,0,4.76,2.38,0,1.19,0,1.19,1.19,0,0,0,1.19,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0.286,0,0.286,0,0.143,2.724,13,79,0 0,0,0,0,0.73,0.24,0,0,0,0.24,0,0.49,0,0,0,0,0,0,2.46,0,0.49,0,0,0,1.23,0.73,1.47,0.49,0.49,0.49,0.49,0.49,0,0.49,0.49,0.49,0,0,0.49,0.49,0,0,0.73,0,0.73,0,0,0,0,0.287,0.041,0.041,0,0.041,1.792,12,224,0 0,0,0,0,0,0,0,0,0,1.56,0,0,0,0,0,0,0,0,1.56,0,0,0,0,0,1.56,1.56,0,3.12,0,0,0,0,0,0,0,1.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0.484,0,0,0,0,3,11,81,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,4.76,0,0,0,0,0,0,1.8,5,9,0 0.01,0,0.03,0,0.33,0.03,0,0,0.23,0.01,0,0.09,0,0,0,0.13,0,0.01,0.07,0,0.05,0,0,0,0.53,0.55,0.01,0,0,0,0,0,0.47,0,0.01,0.01,0.45,0.01,0,0,0,0,0.01,0,0,0,0.05,0,0.2,0.127,0.064,0,0.005,0,2.589,38,2349,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.32,0,0,0,0,0,0,0,0,0,0,0,2.32,0,0,0,0,0,0,0,0,0,2,4,16,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.6,4,8,0 0.05,0,0.1,0,0.16,0.05,0,0,0.48,0,0,0.16,0,0,0,0,0,0.05,0.21,0,0.16,0,0,0,0.64,0.69,0.05,0,0,0,0,0,0.26,0,0,0.16,0.75,0,0,0,0,0,0,0,0.05,0,0,0,0.172,0.195,0.062,0,0.015,0,2.758,47,1073,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,0,0,0,0,0,0,0,0,1.36,0,2.73,0,0,0,0,0,0,1.36,0,0,0,0,0,1.36,1.36,0,2.73,0,0,0,0,0,0,0,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0.68,0,0,0,0,3.142,11,88,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.37,0,1.01,0,0,0,0.67,1.69,1.01,0.33,0,0.67,0,0,0,0,0.33,0,0.33,0,0,0,1.01,0,0.33,0,1.01,1.01,0,0,0,0.108,0,0,0,0,1.851,13,100,0 0,0,0.38,0,0.38,0,0,0,0,0,0,0.38,0.38,0,0,0,0,0,1.14,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.38,0,0,0,0,0.414,0,0.276,0,0,1.104,2,53,0 0.26,0,0,0,0,0.26,0,0,0.26,0,0,0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.35,0,0,0,0,0,0,0,0.26,0,0,0,0,0.52,0,0,0,0.033,0,0,0,0,2.921,61,111,0 0,0,3.44,0,3.44,0,0,0,0,0,0,0,0,0,0,0,0,0,3.44,0,0,0,0,0,0,0,3.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.609,0,0,0,0,1.5,4,15,0 0,0,0,0,0,0.13,0,0.27,0,0,0,0.54,0.13,0,0,0,0.68,0,0,0,0,0,0,0,1.9,0.13,0,0,0,0,0,0,0.13,0,0,0.54,0.27,0,0,0,0,0,0,0,0,0,0,0,0.161,0.143,0,0,0,0,2.296,21,473,0 0,0,0,0,0,0,0,0,0,0.84,0,0,0,0,0,0,0.84,0,1.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.84,0,0.84,0,0,0,0.84,0,0.84,0.84,0,0,0,0,0.137,0.413,0,0.137,3.052,13,116,0 0,0,0,0,0,0,0,0,0,0,0,0.42,0,0,0,0.42,0,0.42,2.12,0,0.42,0,0,0,1.7,0.42,0.85,0.85,0.42,1.7,0.42,0.85,0,0.85,0.42,0.42,0.85,0,0.85,0.42,0,0.42,0.85,0,0.85,0,0,0,0,0.403,0.134,0.134,0,0,2.202,17,163,0 0,0,0.26,0,0,0,0,0,0,1.05,0,1.31,0,0,0,0,0,0,0.26,0,0.26,0,0,0,0.26,1.05,0,0,0,0,0,0,0,0,0.26,0,1.05,0,0,0,0,0,0,1.05,0,0,0,0.26,0,0.439,0,0,0,0,2.724,50,237,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.14,0,0,0,0,0,0,0,0,0,1.333,3,8,0 0,0,0,0,0,0,0,0,0,1.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.363,0,0,0,0,1,1,10,0 0,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.542,0,0.217,0,0,1.34,14,67,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.5,4,10,0 1.17,0,0,0,0,0,0,0,0,0,0,0,0,1.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.607,8,45,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.32,0,1.16,0,0,0,0,0,0,1.16,0,2.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.408,0,0,0,0,2.125,17,51,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.38,7.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.8,17,34,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.125,2,9,0 0,0,0,0,0,0,0,0,0,0,0,1.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.655,8,48,0 1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.27,0,0,1.63,0,0,0,0,0.571,0,0,0,0,1.181,3,13,0 0,0,0,0,1.13,0,0,0,0,1.13,0,0,0,0,0,0,0,1.13,2.27,0,3.4,0,0,0,0,0,1.13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.13,0,0,0,0,0,0,0,0,0,0,0,0,1,1,11,0 0,0,0.87,0,0,0,0,0,0,0.87,0,1.75,0,0,0,0,0,0,0,0,0,0,0,0,0.87,0.87,0,1.75,0,0,0,0,0,0,0,0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0.608,0,0,0,0,2.941,11,100,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.26,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.34,0,0,0,0,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.735,0,0.735,0,0,2.571,10,18,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.5,0,0,0,0,0.465,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.555,3,14,0 0,0,1.33,0,0,0,0,0,0.66,0,0,0,0,0,0,0,0,0,0,0,1.33,0,0,0,2,1.33,2,0.66,0.66,0.66,0.66,0.66,0,0.66,0.66,0.66,0,0,0.66,0.66,0,0,0.66,0,0.66,0,0,0,0.3,0.2,0.1,0,0,0,1.979,12,97,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.81,0,0,0,0,0,0,0,0,0,0,0,0.175,0,0,0,0,0,1.873,8,118,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.46,0,0,0,0.202,0,0,0,0,2,11,82,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.34,0,2.17,0,0,0,0,0,2.17,0,0,0,0,0,0,0,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,8,0 0,0,3.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.222,3,11,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.25,6,9,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.666,6,8,0 0,0,0,0,1.31,0.65,0,0,0,0.65,0,1.31,0,0,0,0,0,0,3.28,0,0,0,0,0,1.31,0.65,1.97,0.65,0.65,0.65,0.65,0.65,0,0.65,0.65,0.65,0,0,0,0.65,0,0,0.65,0,0.65,0,0,0,0,0.35,0,0.116,0,0,2,12,88,0 2,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,2,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.888,29,53,0 0,0,0,0,0,0,0,0,0,0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.83,0,0,0,0,0,0.83,0,0,0,0,0,0,0,0,0,0,0,0.83,0,0,0,0,0.131,0.262,0,0,0,4.128,28,161,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,4,6,0 0,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.58,0,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.464,7,41,0 0,0,0,0,0,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0,0,1.61,0,0,0,0,0,1.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.22,0,0,0,0,0,0,0.253,1.518,0,0.506,0,0,2.047,6,43,0 0,0,0,0,0,0,0,0,0,0,0,1.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.275,8,91,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.34,0.272,0,0,0,4.382,28,298,0 0,0,0,0,0,0.23,0,0,0,0.23,0,1.18,0,0,0,0,0.23,0,1.18,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.71,0,0,0,0,0,0,0,0,0,0,0.71,0,0,0,0,0.069,0,2.216,44,215,0 0,0,0,0,0,0,0.67,0,0,0,0,0,0,0,0,0,0,0,3.37,0,0.67,0,0,0,0,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.35,0,0,0,0,0,0,0,0,0,1.87,7,58,0 0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0,0,0.86,0,0,0,0,0,1.72,0,0,0,0,0,0,0,0,0,0,1.564,7,61,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.333,5,7,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.333,5,7,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.333,5,7,0 0,0,0,0,0,0,0,0,0,0.87,0,2.63,0,0,0,0,0,0,0.87,0,0,0,0,0,0.87,0.87,0,1.75,0,0,0,0,0,0,0,0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0.308,0,0,0,0,3.029,11,103,0 0,0,0.2,0,0,0.1,0,0.51,0,0.1,0,1.33,0.1,0.2,0,0,0.82,0,0,0,0,0,0,0,2.97,0,0,0,0,0,0,0,0.1,0,0,0.2,0.1,0,0,0,0,0,0,0,0,0,0,0,0.08,0.16,0,0,0.053,0,2.224,19,574,0 0,0,0.87,0.87,0.87,0.43,0,0,0,0,0,0,0,0,0,0,0,0.43,0.43,0,0,0,0,0,1.74,1.74,0,1.74,0,0,0,0,0,0,0.43,0,1.31,0,0.43,0,0,0,0.43,0,0.43,0,0,0,0,0.298,0.059,0.059,0,0,2.554,15,212,0 0,0,1.58,0,0,0,0,0,0,0,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.58,0,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.482,7,43,0 0,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0.72,0,0.36,0,0.36,0,0,0,0,0.36,0,0,0,0,0,0,0,0,1.646,12,107,0 0.39,0,0.39,0,0.59,0,0,0,0,0,0,0.19,0,0,0,0,0,0.19,0.59,0,0.19,0,0,0,1.39,0,0,0.39,0,0,0,0,0.59,0,0.39,0,0.19,0,0,0,0,0,0,0,0.39,0.19,0,0,0,0.191,0,0,0,0,2.566,34,349,0 0,0,0,0,0,1.01,0,0,0,0,0,0,0,0,0,0,0,0,2.02,0,1.01,0,0,0,2.02,1.01,3.03,1.01,1.01,1.01,1.01,1.01,0,1.01,1.01,1.01,0,0,0,1.01,0,0,0,0,1.01,0,0,0,0,0.476,0,0,0,0,1.875,11,45,0 0,0,0,0,0,0,0,0,0,0,0,0.28,0.28,0,0,0,0,0,0.57,0,0,0,0,0,0.86,0,0,0,0,0,0,0,0,0,0,0.28,0,0,0,0,0,0.28,0,0,0,0.28,0,0,0,0.216,0,0.043,0,0,1.3,7,52,0 0,0,0,0,1.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.072,0,0,0,0,1.486,10,55,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.61,0,0,4.83,0,0,0,0,0,0,0,3.22,0,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0,0,0,1.61,0,0,0,0,0,0,0,0,0,1.705,7,29,0 1.16,0,0,0,0,0,0,0,0,0,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,1.16,1.16,0,0,0,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.176,0.176,0,0,0,0,1.476,7,31,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.32,0,0,0,0,0,0,2.32,0,0,0,0,0,0,0,0,0,0.344,0,0,3.25,17,52,0 2.27,0,2.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.401,0,0,0,0,1,1,5,0 0,0,0.55,0,0,0,0,0,0,0,0,0,0.55,0,0,0,0,0.55,3.31,0,0,0,0,0,2.2,1.65,0.55,0,0,0,0,0,0.55,0,0,0,0,0.55,0,0,0,0,0,0,0,0,0,0.55,0,0.27,0.18,0,0,0,3.596,34,187,0 0,0.77,0.77,0,0.77,0.38,0,0,0,0,0,1.16,0,0,1.16,0,0,0.38,3.48,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0.067,0,0,0,0,1.225,5,49,0 0.1,0.05,0.1,0,0.31,0.1,0,0,0,0.05,0,0.31,0.05,0,0,0.1,0.1,0,0.84,0.05,0.63,0,0,0.05,1.47,0.36,0,0.05,0,0.21,0.1,0,0,0,0.1,0.15,0.21,0,0.36,0,0,0,0,0,0.1,0,0,0.15,0.007,0.168,0,0.038,0.061,0.007,1.704,25,939,0 0,2.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,2.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.5,0,0,0,0,0,0,0,0,0,1.142,2,8,0 0,0,1.58,0,0,0,0,0,0,1.58,0,0,0,1.58,0,0,0,0,3.17,0,1.58,0,0,0,1.58,0,3.17,0,1.58,1.58,0,0,0,0,0,1.58,0,0,0,0,0,0,0,0,1.58,0,0,0,0,0,0,0,0,0,1.4,5,35,0 0,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0,0,0,0,1.31,0,1.31,0,0,0,1.31,0,2.63,0,1.31,1.31,0,0,0,0,0,1.31,1.31,0,0,0,0,0,0,0,1.31,0,0,0,0,0,0,0,0,0,1.75,15,42,0 0,0,0,0,0,0,0,0,0,0,0,1.4,0,0,0,0,0.7,0,1.4,0,1.4,0,0,0,0,0,0.7,0,0,0,0.7,0,0,0,0,0,0,0,0,2.11,0,0,0,0,0,0,0,0,0,0.266,0.066,0,0,0,18,200,378,0 0,0,0.65,0,0,0,0,0,0,0,0,2.61,0,0,0,0,0,0,0.65,0,0.65,0,0,0,1.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.65,0,0,0,0,0.109,0,0,0,0,0,1.411,4,48,0 0,0,0,0,2.17,0,0,0,0,0,0,0,0,4.34,0,0,0,0,2.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.367,0,0,0,0,1,1,8,0 0,0,0,0,0,0.49,0,0,0.99,0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.49,0.99,0,0,0,0.49,0,2.48,0,0.49,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,0.062,0,0,0,0,2.824,29,161,0 0,0,0.53,0,0.53,0,0,0.53,0,1.07,1.07,0,0,0,0,1.07,0,0,3.76,0,1.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.53,0,0,0,0,0.294,0,0.367,0,0,2.161,21,67,0 0,0,0,0,0,0,0,0,0,0,0.71,0,0,0,0,0.71,0,0,5,0,0,0,0,0,0,0,1.42,0,0,0,0,0,0,0,0,0,0.71,0,0,0,0,0.71,0,0,0.71,0,0,0,0,0.121,0,0,0,0,1.387,5,43,0 0,0,0,0,0,0,0,0,0,0,0,0.96,0,0,0,1.92,0,0,3.84,0,0,0,0,0,0,0,2.88,0,0,0,0,0,0,0,0,0,0.96,0,0,0,0,0,0,0,0.96,0,0,0,0.343,0,0,0.171,0,0,1.291,5,31,0 0,0.56,0,0,0.56,0,0,0,0,0,0,1.7,0,0,0,0,1.7,0,1.13,0,0,0,0,0,0,0,0,0,1.13,0.56,0,0,0,0,0,0,0,0,0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.028,13,71,0 0.19,0,0,0,0,0,0,0,0,0,0,0.96,0.38,0,0,0,0.58,0,0,0,0,0,0.38,0,3.48,0,0,0,0,0,0,0,0,0,0,0.19,0.19,0.19,0,0,0,0,0,0,0,0,0,0,0.027,0.108,0,0,0.108,0,2.634,23,303,0 0,0,0,0,0,0,0,0,0,0,0,3.17,0,0,0,0,0,0,0,0,0,0,0,0,1.58,1.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.216,0,0,0,0,1.92,6,48,0 0,0,0,0,0,0,0,0,0,0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.227,0.227,0,0,0,4.043,28,186,0 0,0,0,0,0,0,0,0,0,0,0,5.88,0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.65,0,0,0,0,0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.211,9,115,0 0,0,1.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.03,3.03,0,3.03,0,0,0,0,0,0,0,0,0,0,1.51,0,0,0,0,0,0,0,0,0,0,0.428,0,0,0,0,2.321,22,65,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.14,0,0,0,0,0,0,0,0,0,1.428,4,10,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.69,4.34,4.34,4.34,4.34,4.34,4.34,4.34,0,4.34,4.34,4.34,0,0,0,4.34,0,0,0,0,0,0,0,0,0,1.162,0,0,0,0,3.333,11,30,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.32,0,0,2.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.32,0,0,0,0,0,0,0.416,0.416,0,0,0,0,1,1,9,0 0,0,1.58,0,1.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.05,0,0,0,0,2.11,0,0,0,0,1.58,0,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0.209,0,0,0,0,1.78,7,73,0 0.16,0,0,0,0.66,0,0,0,0,0,0,0.66,0,0,0,0,0,0,0.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0.16,0,0.16,0.33,0,0,0,0,0.118,0.047,0.023,0,0,0,1.983,19,240,0 0.12,0.12,0.12,0,0.12,0.12,0,0.37,0.12,0,0.12,0.74,0,0,0,0,0.24,0.12,0,0.24,0,0,0,0,0.49,0,0,0.12,0.12,0,0,0,0,0,0,0.98,0.24,0,0.12,0,0,0.49,0,0,0,0.74,0,0,0.017,0.089,0,0,0.017,0,1.403,18,456,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.77,0,0,0,0.442,0,0,0,0,0,1.363,3,15,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.32,2.32,0,0,0,0,0.409,0,0,0,0,1,1,10,0 0.24,0,0.12,0,0,0,0,0.36,0,0.12,0,1.09,0.12,0,0,0,0.6,0,0,0,0,0,0.12,0.12,3.63,0,0,0,0,0,0,0,0,0,0,0.12,0.12,0,0,0,0,0,0,0,0,0,0,0,0.016,0.05,0,0,0.016,0,2.309,25,425,0 0,0,0,0,0,0,0,0,0.66,0,0.66,0,0,0,0,1.98,0,1.98,1.98,0,0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0,0,1.382,5,47,0 0,0,0,0,0.27,0,0,0.27,0,0,0,0.27,1.91,0,0.27,0.27,0,0.54,0,0,0,0,0,0,0.27,0.27,0,0.54,0,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0.039,0.117,0,0,0,0,2.52,55,189,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.186,0.186,0,0,0,3.862,28,112,0 0,0,0,0,0,0,0,0,0,0,0.56,0.56,1.12,0,0,0,0,0,2.82,0,0,0,0,0,1.12,0.56,0,0,0,0.56,0.56,0,0,0,0,0,0.56,0,0,0,0,0,0,0,0,0,0,0,0.183,0.367,0,0,0,0,1.583,7,57,0 0,0,0,0,0,0,0,0,0,0,0,1.4,0,0,0,1.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.4,0,1.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.393,9,46,0 0,0,0,0,0,0,0,0,0,0.29,0,0.59,0,0,0,0,0,0,0,0,0,0,0,0,1.18,0.59,0,0.59,0,0.88,0,0,0,0,0.59,0,0.59,0,0,0,0,0,0,0,0,0,0,0.29,0,0.273,0,0,0,0,3.317,75,282,0 0,0,0,0,0,0,0,0,0,0.78,0,0,0,0,0,0,0,0,1.56,0,0,0,0,0,0.78,0.78,0,1.56,0,0,0,0,0,0,0,0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0.278,0,0,0,0,2.472,11,89,0 0,0,0,0,0,0.76,0,0,0,0,0,0.76,0,0.76,0,0,0,0,1.53,0,0.76,0,0,0,0,0,0.76,0,0,0,0,0,0.76,0,0,0,0,0,0,0,0,0.76,0,0,0,0,0,0,0.133,0.133,0,0,0,0,1.269,4,33,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.16,0,0,4.16,0,0,0,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0.704,0,0,0,0,1.428,4,10,0 0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0,0,0,0,1.73,0,0.86,0,0,0,6.08,3.47,0.86,0.86,0.86,0.86,0.86,0.86,0,0.86,0.86,0.86,0.86,0,0,0.86,0,0,0.86,0,0.86,0,0,0,0,0.267,0.133,0.133,0,0,2.607,13,73,0 0,0,0,0,2.85,0,0,0,0,0,0,0,0,2.85,0,0,0,2.85,0,0,2.85,0,0,0,0,0,2.85,0,0,0,0,0,2.85,0,0,2.85,0,0,0,0,0,0,0,0,2.85,0,0,0,0,0,0,0,0,0,3.8,29,38,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0.925,0,0,1.3,4,13,0 0,0,0,0,0.82,0,0,0,0,0.82,0,0,0,0,0,0,0,0,1.65,0,0.82,0,0,0,0,0,0.82,0,0.82,0.82,0,0,3.3,0,0,0,0,0,0,0,0,0.82,0,0,1.65,0,0,0,0,0.301,0.15,0,0,0,1.678,5,47,0 0.07,0,0.31,0,0,0,0,0,0.71,0,0,0.31,0,0,0,0,0,0,0.31,0,0.15,0,0,0,0.55,0.63,0.07,0,0,0,0,0,0.79,0,0,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0.096,0.234,0.064,0,0.021,0,3.617,42,890,0 0.05,0,0.11,0,0.05,0.02,0,0,0.35,0,0,0.14,0,0,0.02,0,0,0.02,0.11,0,0.08,0,0,0,0.5,0.53,0.02,0,0,0,0,0,0.14,0,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0.02,0.203,0.182,0.049,0,0.008,0,2.95,52,1617,0 0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0,0,0,0,0,0,0,2.18,0,0,0,0,0,0,0,0,0,0,0,0.24,0,0,0,0,0,0,0,0.24,0,0,0,0.067,0.067,0,0,0,0,1.98,59,204,0 0.03,0,0.03,0.13,0.06,0.03,0,0,0.32,0,0,0.09,0,0,0,0,0,0.03,0.13,0,0.09,0,0,0,1.4,1.44,0.03,0,0,0,0,0,0.09,0,0,0.03,1.27,0.03,0,0,0,0,0,0.06,0,0,0,0,0.226,0.235,0.181,0,0.009,0,2.754,34,2688,0 0.06,0,0.06,0,0,0.06,0,0,0.54,0,0,0.18,0,0.12,0,0,0,0.06,0.24,0,0.18,0,0,0,1.14,1.2,0.06,0,0,0,0,0,0.18,0,0,0,0.9,0,0,0,0,0,0,0,0.12,0,0,0,0.115,0.221,0.115,0,0.017,0,3.015,38,1345,0 0,0,0,0,0,0,0,0,0,1.05,0,1.05,0,0,0,1.05,0,0,1.05,0,0,0,0,0,0,1.05,3.15,0,0,0,0,0,0,0,0,0,1.05,0,1.05,0,0,0,1.05,2.1,1.05,0,0,0,0,0.677,0,0.338,0,0,1.468,12,47,0 0,0,0,0,0,0,0,0,0,0,0,3.22,0,0,0,0,0,0,0,0,0,0,0,0,1.61,1.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.222,0,0,0,0,1.958,6,47,0 0,0,0.26,0,0,0,0,0,0,0,0,0.26,0.53,0,0,0,0.53,0,0.53,0,0,0,0,0,0.8,0,0,0,0,0,0,0,0,0,0,0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0.043,0,0,0.043,0,1.487,4,61,0 0.01,0.03,0.2,0,0.09,0.03,0,0,0.05,0.05,0,0.47,0.03,0.17,0,0.09,0.18,0.13,0.35,0.03,0.15,0,0,0.03,0,0,0.05,0.03,0.01,0,0,0,1.47,0,0,0.11,0.9,0,0,0.03,0,0.07,0,0.13,0.05,0.18,0,0.15,0.038,0.263,0.005,0.016,0,0.005,2.23,102,3168,0 0,0,0,0,0,0,0,0,0,1.01,0,0,0,0,0,0,1.01,0,0,0,0,0,0,0,2.02,1.01,2.02,1.01,1.01,1.01,1.01,1.01,0,1.01,1.01,1.01,1.01,0,1.01,1.01,0,0,1.01,4.04,1.01,0,0,0,0,0.814,0,0.162,0,0,2.125,12,68,0 0,0,0,0,0,0,0,0,0,3.33,0,3.33,0,0,0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.156,0,0,2.333,10,21,0 0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,1.07,0,0,0,0,0,1.07,2.15,0,0,0,0,0,2.15,3.22,0,2.15,0,0,0,0,0,0,2.15,0,0,0,0,0,0,2.15,0,0,0,0,0,0,0,0,0,0,0,0,1.718,11,55,0 0,0,1.47,0,0,0,0,0,0,0,0,2.94,0,0,0,0,0,0,0,0,1.47,0,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.928,16,41,0 0,0,0,0,0,0,0,0,0,0,0,3.03,0,0,0,0,0,0,1.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.595,0,0,1.5,4,15,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.763,0,0,2.222,8,20,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.22,6.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.44,0,0,0,0,0,0,0,0,0,0,0,0,1.764,6,30,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.11,0,0,0,0,0,0,0,0,0,0,0,0,1,1,7,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.11,0,0,0,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0.86,0,0,0,0,0,0,0,0,0.86,0,0,0,0,0,0.86,0.86,0,2.58,0,0,0,1.72,0.86,0,0.86,0,0.86,0.86,0,0,0,0.86,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0.11,0,0,1.812,10,58,0 0,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.74,0,0,0,0,0,0,2.99,1.87,0,1.87,0,0.74,0.74,0,0,0,1.49,0.74,0.37,0,0,0,0,0,0.37,0,0,0,0,0,0,0.131,0.043,0.043,0,0,2.468,15,195,0 0,0,2.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.04,0,0,0,0,0,0,4.08,2.04,0,2.04,0,2.04,2.04,0,0,0,2.04,2.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0.189,0,0,0,0,1.681,10,37,0 0,0,0,0,0,0,0,0,0,1.23,0,0,0,0,0,1.23,0,0,1.23,0,0,0,0,0,0,1.23,2.46,0,0,0,0,0,0,0,0,0,1.23,0,1.23,0,0,0,1.23,2.46,1.23,0,0,0,0,0.77,0,0.192,0,0,1.535,12,43,0 0,0,0,0,0,0,0,0,0,0,0,0,0,2.94,0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.476,0,0,0,0,1,1,7,0 0,0.66,0,0,0,0,0.66,0,0,1.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,6,1.33,0,0,2,0,0,0,0,0.66,0,0.66,0,0,0,2,0,0,0,0,0,0,0,0,0.228,0,0,0,0,2.673,18,139,0 0,0,0,0,0,0,0,0,0,0,0,0.81,1.62,0,0,0,0,0,1.62,0,0,0,0,0,0,0,0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.81,0,0,0,0,0,0,0,0,0,1.125,3,18,0 0,0,0.68,0,0,0,0,0,0,0.68,0,2.06,0,0,0,0,0,0,0.68,0,0,0,0,0,1.37,0.68,0,1.37,0,0,0,0,0,0,0,0.68,0,0,0,0,0,0,0,0,0,0,0,0,0.114,0.342,0,0,0,0,2.727,11,90,0 0,0,0,0,0,0,0,0,0,0,0,1.15,0,0,0,0,0,0,2.89,0,0.57,0,0,0,0,0,0,0,0,0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0.57,0,0,0,0,0,0,0,0,0,0,1.379,5,40,0 0,0,1.03,0,2.06,0,0,0,0,0,0,0,0,0,0,0,0,0,1.03,0,0,0,0,0,0,0,0,0,1.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.03,1.03,0,0,0,0.373,0.186,0,0,0,0,1.529,5,26,0 0,1.25,0,0,0,0,0,0,0.62,1.25,0,0,0,0,0,0,0,0,1.87,0,1.25,0,0,0,1.87,1.87,1.25,1.87,0.62,1.87,0.62,0.62,0,0.62,1.87,0.62,1.87,0,0.62,0.62,0,0,0.62,0,1.87,0,0,0,0,0.475,0.57,0,0,0,2.238,12,141,0 0,0,0.94,0,0,0,0,0,0,0,0,0.94,0,0,0,0,0,0,1.89,0,0.94,0,0,0,1.42,0.94,0,0,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.24,13,112,0 0,0,0.18,0,0.09,0,0,0,0,0,0.27,2.31,0.27,0,0,0,0.27,0,0,0,0.18,0,0,0,3.06,0,0,0,0,0,0,0,0.27,0,0,0,0.18,0,0,0,0,0,0,0,0,0,0,0,0.143,0.117,0,0,0.039,0,2.313,24,590,0 0,0,0,0,0,0,0,0,0,0,0,1.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.342,8,89,0 0.84,0,0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.69,0,1.69,0,0,0,1.69,0,0.84,0,0,0,0,0.136,0,0,0,0,1.619,12,68,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,18.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.25,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,1.11,0,0,0.55,0,3.91,0,0,0,0,0,0.55,0,0,1.67,0,2.23,0,0,0,0.55,0.55,0,0,0,2.79,0,0,0,0,0,0,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.085,0,0,1.142,5,48,0 0,0,1.04,0,1.04,0,0,0,0,0,0,1.04,0,0,0,0,0,0,0,0,0,0,0,0,2.08,2.08,0,2.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.313,0,0,0,0,2.108,22,78,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.54,0,0,0,0,0,0,0,0,0,4.54,0,4.54,0,0,0,0,0,0,0,0,0,0.675,1.351,0,0,0,0,3.7,26,37,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.523,0,0,2.272,9,25,0 0,0,0.5,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,1.25,0,0,0,0.5,0.25,0,0,0,0,0,0,0,0,0,0,0.25,0,0,0,0,0,0,0,0,0,0,0,0,0.083,0,0,0,0.041,1.732,11,227,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0.06,0.03,0.46,0,0.13,0.06,0,0.03,0.03,0.16,0.19,0.59,0.06,0.03,0,0.19,0,0,1.23,0.19,1.06,0,0,0,1.53,0.23,0,0.06,0,0.06,0.36,0,0,0,0.13,0.09,0.13,0.16,0.19,0,0,0,0,0.06,0.03,0,0,0.13,0.024,0.231,0,0.019,0.009,0.004,1.885,25,1738,0 0,0,0.28,0,0.28,0,0,0,0,0,0,1.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.28,0,0.56,0,0,0,0,0,0,0,0,0,0,0,0,0.18,0,0,0,0,1.523,11,160,0 0.52,0,1.05,0,0.52,0,0,0,0,0,0,3.17,0,0,0,0,0,0,0.52,0,0,0,0,0,2.64,2.64,0.52,0,0,0,0,0,0,0,0,0.52,1.05,0,2.64,0,0,0.52,0.52,1.58,0.52,0,0,0.52,0.084,0.169,0.084,0,0,0,1.577,12,112,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.12,0,0,0,0,0,6.25,3.12,3.12,3.12,3.12,3.12,3.12,3.12,0,3.12,3.12,3.12,0,0,0,3.12,0,0,0,0,0,0,0,0,0,0.913,0,0,0,0,3.454,11,38,0 0,0,0,0,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0.58,0,0,0,0,0,0.58,0.58,0,0,0,0,0,0,0,0,0,1.16,1.74,0,0.58,0,0,0,0.58,0,0,0,0,1.74,0,0,0.118,0,0,0,6.428,98,315,0 0.16,0,0.67,0,0.33,0.16,0.33,0.83,0.33,0.67,0.33,1.51,0,0,0,0,1.67,0.33,2.01,1.67,3.85,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0,0,0,0,0,0,0.205,0,1.027,0.051,0,4.984,70,638,0 0,1.93,0.77,0,0.77,0,0,0.38,0,0,0.38,0,1.54,0,0,0.38,0.38,1.15,4.24,0,6.17,0,0,0,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.109,0,0,1.043,4,95,0 0,0,0.5,0,0,0,0,0,0,0,0,0,0,0,0,0.5,0,0,2.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.01,0,0,0,1.01,0,1.01,0,0,1.01,0,0,0.271,0.09,0.181,0.181,0,0,2,12,122,0 0,0,0.29,0,0.29,0,0,0,0,0,0,2.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0.187,0,0,0,0,1.484,11,147,0 0,0,0.13,0,0,0,0,0,0.13,0,0,0.27,0,0,0,0,0,0,0,0,0,0,0,0.13,0,0,0,0,0,0.13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.41,0,0,0,0,0.072,0,0.024,0,0,1.666,8,190,0 0,0,0,0,0,0,0,0,0.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.94,0,0,0,0.94,0.94,0,0,0,0.203,0,0,0,0,1.416,6,34,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.4,0,0,0,1.4,1.4,0,0,0,0.133,0,0,0,0,1.5,6,30,0 0,0,0.88,0,0.88,0,0,0,0,0,0,4.42,0,0,0,0,0,0,0.88,0,0,0,0,0,1.76,1.76,0.88,0,0,0,0,0,0,0,0,0.88,0.88,0,3.53,0,0,0.88,0,1.76,0,0,0,0.88,0.139,0.279,0,0,0,0,1.326,6,61,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,8,0 0,0,0.28,0,0,0,0,0.28,0.28,0.57,0,0.57,0,0,0,0,0,0,0,0,0.57,0,0,0,5.2,6.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.28,0,0.28,0,0,0.04,0,0,0,0,1.883,9,211,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,2,0 0,0,0,0,0.9,0,0,0,0,0,0,2.7,0,0,0,0,0,0,4.5,0,0,0,0,0,0.9,0,0.9,0,1.8,0,0,0,0,0,0,0,0,0,0,0,0,1.8,0,0,0,0,0,0,0,0,0,0,0,0,1.45,4,29,0 0,0,0.52,0,0.13,0,0,0,0,0,0,2.22,0.65,0.13,0,0,0.13,0,0.13,0,0.13,0,0.13,0,2.09,0,0,0,0,0,0,0,0.78,0,0,0.26,0.26,0,0,0,0,0,0,0,0,0,0,0,0.018,0.073,0,0,0,0,2.556,23,317,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.12,0,0,0,0,0,0,0,0,2.56,2.56,0,0,0,0,0,0,0,2.56,0,0,2.56,0,0,0,0.375,0,0,2.25,7,36,0 0,0,0,0,0.3,0,0,0,0,0,0,0.3,0.91,0,0,0,0.6,0,1.21,0,0.3,0,0,0,0,0,1.21,0,0,0,0,0,0,0,0,0,0.3,0,0.3,0,0,0.6,0.3,0,0.6,0,0,0,0,0.042,0.042,0.042,0.042,0,1.183,13,168,0 0.43,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0,0,0,4.34,0,0.86,0,0,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.374,0,0,0.124,0,1.974,18,77,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.336,0,0,0,0,3.38,7,71,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,3.52,0,0,0,0,0,0,0,0,2.35,0,0,0,0,1.17,0,0,0,1.17,0,0,0,0,0,0,0,0,1.17,0,0,2.35,0,0,0,0,0,0,0,0,0,0,0,0,2,12,54,0 0.18,0,0.18,0,0,0,0,0,0.94,0,0,0.37,0,0,0,0.09,0,0,0.37,0,0.18,0,0,0,0.56,0.66,0.09,0,0,0,0,0,0.37,0,0,0,0.28,0,0,0,0,0,0,0,0,0,0,0,0.18,0.167,0.051,0,0.025,0,4.434,87,909,0 0.11,0,0.22,0,0,0,0,0,1.02,0,0,0.34,0,0,0,0,0,0,0.45,0,0.22,0,0,0,0.56,0.68,0.11,0,0,0,0,0,0.34,0,0,0.11,0.22,0,0,0,0,0,0,0,0,0.11,0,0,0.076,0.198,0.03,0,0.03,0,4.211,81,678,0 0.06,0,0.06,0,0.19,0,0,0,0.73,0,0,0.19,0,0,0,0,0,0.06,0.26,0,0.19,0,0,0,0.79,0.86,0.06,0,0,0.06,0,0,1.06,0,0,0,0,0,0,0,0.13,0,0,0,0,0,0.06,0,0.363,0.143,0.057,0,0.019,0,2.716,37,880,0 0.05,0,0.45,0,0.15,0.1,0,0,0.55,0,0,0.15,0,0,0,0,0,0.05,0.2,0,0.15,0,0,0,0.65,0.7,0.05,0,0,0,0,0,1.16,0,0,0,0.81,0.05,0,0,0,0,0,0,0,0,0,0,0.203,0.195,0.05,0,0.014,0,2.88,45,1080,0 0,0,0,0,0,0,0,0,0,1.21,0,1.21,0,0,0,0,0,0,0,0,0,0,0,0,1.21,1.21,0,2.43,0,0,0,0,0,0,0,1.21,0,0,1.21,0,0,0,0,0,0,0,0,0,0,0.441,0,0,0,0,3.193,11,99,0 0,0,0,0,5.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,7,12,0 0.1,0.1,0.1,0,0.21,0.1,0,0,0.1,0.31,0,0.84,0.21,0,0,0.1,0,0.21,1.78,0,0.63,0,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.21,0,0,0.035,0.177,0.035,0.07,0.053,0,1.744,29,417,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.88,0,0.88,0,0,0,0,0,1.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.88,0,0,0,0,0.139,0.139,0,0,0,1.763,9,67,0 0,0,0,0,0.37,0,0,0,0,0,0,0.37,0.37,0,0,0,0.75,0,1.12,0,0.37,0,0,0,0,0,0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.75,0,0,0.37,0,0,0,0,0.054,0,0,0.054,0,1.066,4,128,0 0.1,0,0,0,0,0.1,0,0,0,0,0,1.66,0.1,0.31,0,0,0.41,0,0,0,0,0,0,0,2.07,0,0,0,0,0.1,0,0,0,0,0,0.1,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0.117,0,0,0.043,0,2.272,24,525,0 0,0,3.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0,0.595,0,0,0,0,1.25,2,10,0 0,0,0.24,0,0,0,0,0,0,0.48,0,0.24,0,0,0,0,0.48,0.24,0.72,0.48,0.72,0,0,0,1.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.034,0,0,0,0,3.202,87,285,0 0.29,0,0,0,0,0,0,0,0,0.29,0,0,0,0,0,0.29,0.29,0,2.38,0,0.29,0,0,0,1.19,0.59,2.38,0.29,0.29,0.29,0.29,0.29,0,0.29,0.29,0.29,0.89,0,0.89,0.29,0.29,0,0.89,0,0.59,0.29,0,0,0,0.196,0.049,0.344,0,0.049,1.843,17,212,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.25,2,10,0 0,0,0,0,0,0,0,0,0,2.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.04,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.896,2.142,8,60,0 0,0,0.42,0,0,0,0.21,0,0,0.21,0,0.42,0,0,0,0,0,0.21,1.49,0,0.42,0,0,0,0.21,0.21,0,0,0,0,0,0,0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.034,0.139,0.034,0,0.069,0,3.151,37,312,0 0,0,0,0,0,0,0,0,0,2.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.04,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.896,2.142,8,60,0 0,0,0,0,0.15,0,0,0.07,0.07,0.07,0,0.83,0.15,0,0,0,0.15,0,0,0,0,0,0.07,0,4.42,0,0,0,0,0,0,0,0.07,0,0,0.22,0.07,0,0,0,0,0,0,0,0,0,0,0.07,0.068,0.049,0,0,0.009,0,2.356,27,707,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.735,0,0,0,0,3,7,48,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0,0,0,0,0,0.054,0.353,0,0,0,4.438,28,1589,0 0,0,0,0,0,0,0,0,0,18.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,39,40,0 0.08,0,0.16,0,0.58,0.08,0,0,0.08,0,0,2.24,0.08,0.16,0,0,0.08,0,0.99,0,0.74,0,0.08,0,0.74,0.66,0,0,0.82,0.99,0,0,0,0,0,0.08,0.08,0,0.16,0,0,0.24,0,0,0.08,0,0,0.08,0.08,0.011,0,0,0,0,2.1,60,500,0 0,0,0,0,1.56,0,0,0,0,0,0,0,0,0,0,0,0,0,3.12,0,0,0,0,0,3.12,1.56,1.56,1.56,1.56,1.56,1.56,1.56,0,1.56,1.56,1.56,0,0,0,1.56,0,0,0,1.56,0,0,0,0,0,0.53,0,0,0,0,2.533,11,38,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.555,0,0,0,0,1.647,4,28,0 0,0,0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.28,0,0,0,0,0,0.28,0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.28,0,0,0.28,0.28,0.28,0,0.28,0,0.043,0,0,0,0,1.641,8,110,0 0,0,0,0,0,0,0,1.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.7,1.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.196,0,0,0,0,1.294,3,22,0 0,0,0.21,0,0,0.21,0,0,0,0.21,0.21,1.28,0,0,0,0,0.21,0,1.28,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0.097,0,2.451,55,255,0 0,0,0,0,1.16,0,0,0,0,1.16,0,2.32,0,0,0,0,0,0,0,0,0,0,0,0,1.16,1.16,0,2.32,0,0,0,0,0,0,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0.39,0,0,0,0,3.379,11,98,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.25,0,0,0,0,0,0,0,0,0,0,2.714,10,38,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.54,0,0,0,0,0,0,0,0,0,0,0,0,4.54,0,0,0,0,0,0,0,0,0,0.925,5.857,16,41,0 0.86,0,0,0,0,0,0,0,0,0,0,2.6,0,0,0,0,0,0,2.6,0,0,0,0,1.73,0,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0,0.86,0.561,0.14,0,0,0,0,1.352,6,23,0 0,0,0.24,0,0,0,0,0,0,0.48,0,0.24,0,0,0,0,0.48,0.24,0.72,0.48,0.72,0,0,0,1.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.035,0,0,0,0,3.179,87,283,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0.62,0,0.31,0,0,0,0,0.31,1.24,0,0,0,0,0.31,0,0,0.31,0,0,0,0,0,0,0,0,0,0,2.607,11,219,0 0,0,0,0,1.19,0,0,0,0,0,0,1.19,0,0,0,0,0,0,0,0,0,0,0,0,3.57,3.57,0,0,3.57,0,0,0,0,0,0,0,1.19,0,0,0,0,3.57,0,0,1.19,0,0,0,0,0,0,0,0,0,1.733,14,52,0 0,0,0.71,0,0,0,0,0,0,0,0,1.43,0,0,0,0,0,1.43,0.71,0,2.87,0,0,0,2.15,0.71,1.43,0,0,1.43,0,0,0,0,2.15,0,0,0,0,0,0.71,0,0,0,0,0,0,0,0.08,0.322,0,0,0,0,3.9,27,156,0 0,0,1.31,0,0,0,0,0,0,0,0,2.63,0,0,0,0,0,1.31,0,0,0,0,0,0,2.63,1.31,0,1.31,0,1.31,1.31,0,0,0,1.31,1.31,3.94,0,0,0,0,0,0,0,0,0,0,0,0,0.15,0,0.15,0,0,1.906,10,61,0 0.1,0,0.21,0,0.31,0,0,0.1,0,0,0,0.63,0.21,0,0,0,0.53,0,0,0,0,0,0,0,3.82,0,0.1,0.1,0,0,0,0,0.42,0,0,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0.215,0.043,0,0,0,0,2.221,18,511,0 0,0,0,0,2.63,0,0,0,0,0,0,0,0,0,0,0,0,0,2.63,0,2.63,0,0,0,5.26,2.63,2.63,2.63,2.63,2.63,2.63,2.63,0,2.63,2.63,2.63,0,0,0,2.63,0,0,0,2.63,0,0,0,0,0,0.793,0,0,0,0,3.076,11,40,0 0,0,0,0,5.55,0,0,0,0,0,0,5.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.11,0,0,0,0,0,0,0,0,0,0,0,0,11.11,0,0,0,0,0,0,0,0,0,0,0,0,1.222,3,11,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.63,0,0,0,0,0,0.24,0,0,0,0,2,7,48,0 0,0,0,0,0,0,0.62,0,0,0,0,0,0,0,0,0,0,0.62,1.25,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.62,0,0,0,0,0.049,0,0.247,0.049,0,3.732,90,321,0 0,0,1.78,0,0.25,0.51,0,0,0,0.25,0,0.76,0.25,0,0,0,0,0,1.27,0,0.76,0,0,0,1.27,1.02,0,0.25,0.51,0.51,0,0,0,0,0.25,0,0,0,0,0,0,0,0,0,0,0,0,0.76,0.186,0.26,0,0,0,0.037,1.794,10,183,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.77,0,2.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.77,0,1.55,0,0,0,0,0.247,0.247,0,0,0,0,1.611,12,29,0 0,0,0.25,0,0,0,0,0,0,0,0,0.25,0.25,0,0,0,0,0,4.02,0,4.02,0,0,0,0.75,0.75,0,0,0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.25,0,0,0.111,0,0,0,0,4.446,29,209,0 0,0,1.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0,1.61,0,0,0,1.61,1.61,0,0,0,0,0,0,0,0,0,0,0,4.83,0,0,0,0,0,0,0,0.283,0,0,0,0,1.666,4,20,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.86,0,0,0,0,0,3.73,0,1.86,0,0.93,3.73,0,0,0,0,0,0,0,0.93,0,0,0,0,0,0,0,0,0,0,0,0.173,0,0,0,0,1.9,5,38,0 0,0,1.96,0,0,0,0,0,0,0,0,0,0,1.96,0,0,0,0,1.96,0,1.96,0,0,0,0,0,0,0,1.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.142,11,30,0 0,0,0.59,0,0,0,0,0.59,0,0,0,2.99,0,0,0,0,0,0,1.19,0,0,0,0,0,1.19,0.59,0,0,0,0,0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.391,0,0,0,0,1.836,7,90,0 0,0,1.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.26,0,0,0,0,0,0,2.53,1.26,0,1.26,0,1.26,1.26,0,0,0,1.26,1.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0.141,0,0,0,0,2.28,10,57,0 0.1,0.05,0.35,0,0.15,0,0.05,0.05,0.05,0.2,0.15,0.61,0,0,0,0.1,0.05,0,0.71,0.05,0.46,0,0.05,0,1.84,0.3,0,0.1,0,0.15,0.15,0,0,0,0.1,0.25,0.15,0,0,0,0,0,0,0.05,0.05,0,0,0.15,0,0.153,0,0.029,0.021,0,1.871,25,1123,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.85,0,0,0,0,0,0,2.85,0,0,0,0,0,0,0,0,0,0.436,0,0,3.071,13,43,0 0,0,0,0,0,0,0,0,0,0,0,0,0.6,0,0,0,0,0,0.6,0,1.21,0,0,0,0,0,3.03,0,0,0,0,0,0,0,0,0,1.21,0,1.21,0,0,0,1.21,0,1.21,0,0,0,0,0,0.1,0,0,0,1.535,13,86,0 0.04,0.14,0.29,0,0.04,0.04,0,0.09,0,0.19,0.09,1.03,0,0,0,0,0,0.24,0.09,0,0.04,0,0,0,0.04,0,0,0,0,0.14,0,0,0,0,0,0.09,0.24,0,0,0,0,0,0.04,0,0,0,0,0,0.02,0.157,0.013,0,0,0.006,2.655,185,1787,0 0,0,0,0,0,0,0,0,0,0,0,1.25,0,0,0,0.62,0.62,0,0.62,0,0,0,0,0,1.87,0.62,0,0,0,0,0,0,0,0,0,0.62,0,0,0,0,0,0,0,0.62,0,0,0,0,0,0.103,0,0.103,0,0,1.347,4,31,0 0,0,0,0,0,0,0,0,0,0,0,0.76,0,0,0,0,0,0,0.76,0,1.53,0,0,0,10.76,0,0,0,0,0,0,0,0,0,0,0,0.76,0,0,0,0,0,0.76,0,0,0,0,0,0.377,0,0.094,0,0,0,4.807,31,274,0 0,0,0,0,0.36,0,0,0,0,0,0,1.09,0,0,0,0,0.36,0.36,0,0,0,0,0,0,1.81,0.72,0,0.72,0,0.72,0,0,0,0,0.36,0,0.36,0,0,0,0,0,0,0.36,0,0,0,0.36,0,0.201,0,0.05,0,0,2.293,11,211,0 0,0,0,0,0,0,0,0,0,0,0,0.86,0,0.51,0,0,0,0,1.55,0,3.79,0,0,0,0.69,0.69,0,0,2.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.2,0,0.078,0.052,0,0,0,1.203,5,183,0 0,0.39,0,0,0,0,0,0,0.39,0,0,1.97,0.79,0,0,0,1.18,0,0.79,0.39,0,0,0,0,0.39,0,0,0,0,0,0,0,0,0,0,1.97,0,0,0,0,0,0,0,0,0,0,0.39,0,0,0.192,0,0.128,0,0,1.229,6,75,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.53,0,0,0,3.07,0,1.53,0,1.53,3.07,0,0,0,0.253,0.253,0,0,0,2.235,12,38,0 0,0,0.39,0,0,0,0,0.39,0,0.79,0,0.79,0,0,0,0,0,0,0,0,0,0,0,0,6.74,7.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.39,0,0,0.053,0,0,0,0,1.8,9,153,0 0,0,0,0,0,0,0,0,0,0.85,0,0.85,0,0,0,1.28,0,0,0.85,0,0.42,0,0,0,1.7,1.28,0.85,1.28,0.42,0.42,0.42,0.42,0,0.42,0.42,0.85,0.42,0,0,0.42,0,0,0.42,0,0.42,0,0,0,0,0.369,0.073,0,0,0,2.44,12,144,0 0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.345,0,0,0,0,1.666,6,55,0 0.27,0,0,0.55,0.13,0,0,0,0.13,0,0,1.1,0.55,0,0,0,0,0,0.13,0,0,0,0,0,0.13,0.13,0,0,0,0,0,0,0,0,0,0.13,0.27,0,0,0,0,0,0,0.27,0,0,0,0.13,0,0.04,0,0,0,0,2.496,16,322,0 0,0,0,0,0.62,0.62,0,0,0,0,0,2.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.25,0,0,0,0,0,0,0.224,0.224,0,0,0,0,2,5,54,0 0,0,0,0,2.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0.74,0,0.09,0,0,0,0,1.357,6,38,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.613,0,0,0,0,2,3,6,0 0.47,0,0.62,0,0,0,0,0,0.15,0,0,0.15,0,0,0,0,0,0,0.15,0.15,0,0,0.15,0,0,0,0,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.052,0,0.105,0.079,0.026,1.962,13,155,0 0,0,0.83,0,0.41,0,0,0,0,0,0,0,0.41,0,0,0,0,0,3.33,0,0,0,0,0,0,0,1.25,0,0,0,0,0.41,0,0.41,0,0,0,0,0,0,0,0,0,0,0.41,0,0,0,0,0.065,0,0.195,0,0,1.444,22,91,0 2,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,2,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.888,29,53,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,4,0,0,0,0,0,0,0,0,0,1.111,2,10,0 0.05,0,0.15,0,0.1,0,0,0,0.52,0,0,0.15,0,0,0,0,0.1,0,0.21,0,0.1,0,0,0,0.47,0.52,0.05,0,0,0,0,0,0.15,0,0,0.05,0.36,0,0,0,0,0,0,0.1,0,0,0,0.05,0.164,0.171,0.068,0,0.013,0,3.591,35,1329,0 0,0,0,0,0.13,0.26,0,0,0,0.13,0,1.17,0.13,0.13,0,0,0.52,0,0,0,0,0,0,0,3.64,0.65,0,0,0.13,0.52,0,0,0,0,0,0.39,0.13,0,0,0,0,0,0,0,0,0,0,0,0.135,0.101,0,0,0,0,1.915,19,387,0 0.07,0,0.07,0,0,0,0,0.46,0.69,0,0,0.23,0,0,0,0,0.07,0.07,0.3,0,0.23,0,0,0,0.69,0.76,0.07,0,0,0,0,0,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.142,0.175,0.032,0,0.021,0,3.007,60,791,0 0.05,0,0.05,0,0,0,0,0,0.53,0,0,0.23,0,0,0,0,0,0.05,0.23,0,0.17,0,0,0,0.65,0.71,0.05,0,0,0,0,0,0.53,0,0,0,0.71,0,0,0,0,0,0,0,0,0,0,0,0.115,0.173,0.041,0,0.016,0,2.853,47,896,0 0.03,0.05,0.03,0,0.09,0.05,0,0.01,0.16,0,0,0.09,0.01,0,0,0.01,0.01,0.01,0.07,0.01,0.05,0,0,0,0.56,0.58,0.01,0,0,0,0,0,1.43,0,0,0.05,0.49,0.03,0,0,0.03,0.01,0.01,0.07,0,0,0.01,0,0.221,0.129,0.063,0,0.005,0,3.364,66,3334,0 0,0.19,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0,0,0,0,2.86,0,0,0.38,0.19,0,0,0,0,0,0,0,0,0,0.19,0.19,0,0.199,0,0,0,0,2.204,9,205,0 0,0,0,0,0,0,0,0,0,0,0,9.67,0,0,0,0,0,0,3.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.564,0,0,0,0,1.692,5,22,0 0.41,0,0,0,0,0.82,0,0,0,0,0,1.23,0,0,0,0,0,0,0,0,0,0,0,0,3.3,0,0,0,0,1.65,0,0,0,0,0,0.82,0,0,0,0,0,0,0,0.41,0,0,0,0.41,0,0.198,0,0,0,0,1.569,7,113,0 0,0,0,0,0,0.63,0,0,0,0,0,0,0,0,0,0,0,0,0.63,0,0,0,0,0,3.16,0,0,0,0,0,0,0,0,0,0.63,0,0,0,0,0,0,0,0,0.63,0,0,0,0,0,0,0,0,0,0,1.56,9,64,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.16,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.714,6,24,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.125,17,25,0 0,0,1.81,0,2.01,0,0,0,0,0,0.2,0,0,0,0,0.4,0,0.2,3.62,0,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.4,0,0,0,0,0,0,0.301,0,0,1.576,17,164,0 0,0,0,0,0,0,0,0,0,0,0,1.72,0,0,0,0,0,0,0,0,0,0,0,0,3.44,1.72,1.72,1.72,1.72,1.72,1.72,1.72,0,1.72,1.72,1.72,0,0,0,1.72,0,0,0,0,0,0,0,0,0,0.487,0,0,0,0,2.533,11,38,0 0,0,0,0,0,0,0,0,0,0,0,1.72,0,0,0,0,0,0,0,0,0,0,0,0,3.44,1.72,1.72,1.72,1.72,1.72,1.72,1.72,0,1.72,1.72,1.72,0,0,0,1.72,0,0,0,0,0,0,0,0,0,0.487,0,0,0,0,2.533,11,38,0 0,0,0,0,0,0,0,0,0,0,0,1.67,0,0,0,0,0.41,0,0.83,0,0,0,0,0,1.25,0,0,0,0.83,0,0,0,0,0,0,0,0,0,0,0,0,0.41,0,0,0,0,0,0,0,0.139,0,0,0.069,0,1.804,6,74,0 0,0,1.19,0,0,0,0,0.59,0,0,0,0,0,0,1.19,0,0,0,2.97,0,1.19,0,0,0,1.78,1.19,2.38,0.59,0.59,0.59,0.59,0.59,0,0.59,0.59,0.59,0,0,0.59,0.59,0,0,0.59,0,1.19,0,0,0,0,0.197,0.098,0,0,0,2.203,12,119,0 0,0,0.36,0,0,0.09,0,0.09,0,0,0.09,0.36,0.09,0,0,0,0.27,0,0,0,0,0,0.09,0,3.2,0,0,0,0,0,0,0,0.27,0,0,0,0,0,0,0,0,0,0.09,0,0,0,0,0,0.1,0.176,0,0,0.125,0,2.356,21,641,0 0,0,1.12,0,0,0,0,1.12,0,0,0,0,0,0,2.24,0,0,0,3.37,0,2.24,0,0,0,0,0,1.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.12,0,0,0,0,0,0,0,0,0,2,5,44,0 0,0,0,0,0,0.74,0,0,0,0.74,0,0.37,0,0,0,0,0,0,2.61,0,1.49,0,0,0,0.37,0.37,0,0.74,0,0,0,0,0.37,0,0,0.37,0.37,0,0.37,0,0,0.37,0,0.74,0.37,0,0,0,0,0.405,0,0,0,0,2.28,11,130,0 0,1.52,0,0,0.76,0,0,0,0,0.76,0,0,0,0,0,0,0,0,0,0,0.76,0,0,0,0.76,0.76,1.52,0,0,0,0,0.76,0,0.76,0,0,1.52,0,0.76,0,0,0.76,0.76,0,0.76,0,0,0,0.121,0.365,0.121,0.487,0,0,1.956,22,90,0 0,0,1.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.6,3.2,0,2.4,0,0,0,1.6,0.8,2.4,0.8,0.8,0.8,0.8,0.8,0,0.8,0.8,0.8,0,0,1.6,0.8,0,0,1.6,0,1.6,0,1.6,0,0,0.25,0,0,0,0,2.065,12,95,0 0,0,0.56,0,0,0,0,0,0,1.12,0,0.56,0,0,0,0,0,0,1.12,0,1.12,0,0,0,0.56,0.56,0,1.12,0,0,0,0,0,0,0,0.56,0,0,0,0,0,0,0,0,1.12,0,0,0,0,0.311,0,0,0,0,2.486,11,92,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,2,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.769,0,0,0,0,3.75,9,15,0 0.3,0,0,0,0,0,0,0,0,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0,0,1.81,2.11,0,0,0,0,0,0,0.3,0,0,0.3,1.51,0,0,0,2.11,0,0,0,0,2.11,0,0,0.358,0.666,0.256,0,0,0,3.923,18,408,0 0,0,2.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.17,8.69,0,6.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.17,0,0,0,0,0,0,0,1.333,3,8,0 0,0,2.04,0,0,0,0,0,0,0,0,0,0,2.04,0,0,0,0,2.04,0,2.04,0,0,0,0,0,0,0,2.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.428,3,20,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.54,0,0,0,0,0,0,0,0,0,4.54,0,4.54,0,0,0,0,0,0,0,0,0,0.675,1.351,0,0,0,0,3.7,26,37,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.77,0,2.32,0,0,0,0,0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.77,0,0,0,0,0,0,0,0,0,0,0,0,1.156,3,37,0 0,0,0,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0,1.61,3.22,0,1.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.181,3,13,0 0,0,0,0,3.84,0,0,0,0,0,0,0,0,0,0,0,0,0,3.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,7,0 0,0,0,0,0,0,0,0,0,0,0,1.58,0,0,0,0,0,0,0.79,0,0.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.79,0,0,0,0,0,0.124,0.124,0,0,0,0,1.8,8,45,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0,3.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.526,7,87,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14.28,0,7.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.14,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0.8,0,0,0.6,0,0,0.2,0,0.2,0,0,0,0,0,1.8,0,2.2,1.8,0,2.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0,0,0.2,0.2,0,0,0,0,0,0.06,0,0,2.55,43,227,0 0.05,0.02,0.18,0,0.02,0.02,0,0.02,0,0.07,0,0.38,0.02,0.2,0.02,0,0.33,0.12,0.31,0,0.12,0,0,0,0,0,0,0.02,0,0,0,0,2.17,0,0,0.2,0.59,0,0,0.1,0,0.07,0.02,0.28,0,0.15,0.05,0.05,0.011,0.144,0.003,0,0,0,2.255,55,1825,0 0,0,0,0,0,0,0,0,0,1.07,0,1.07,0,0,0,0,0,0,0.53,0,0,0,0,0,1.07,0,0,0,0,0,0,0,0,0,0,0,0.53,0,0,0,0,0,0.53,0,0.53,0,0,1.07,0,0.18,0,0.09,0,0,1.825,7,73,0 0,0,0.24,0,0.24,0,0,0,0,0,0,0.72,0,0,0,0.48,0.48,0,1.44,0.24,0.48,0,0,0,1.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.823,143,464,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.7,5.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.4,0,0,0,0,0,0,0,0,0,0,0,0,1.4,3,7,0 0,0,0,0,0,0,0,0,0,1.27,0,1.27,0,0,0,0,0,0,0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.27,0,0.111,0,0.111,0,0,1.73,7,45,0 0,0,0,0,0,0,0,0,0,0,0,0,0,3.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.84,0,0,0,0,0.719,0,0,0,0,1.571,3,11,0 0,0,0,0,1.81,0,0,0,0,0,0,1.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.615,0,0,0,0,1.388,5,25,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.72,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.497,0.124,0,0,0.124,3.904,28,164,0 0,0,0,0,0,0,0,0,0,0,0,1.22,0,0,0,0,0,0,1.84,0,0.61,0,0,0,0,0,0,0,0,0.61,0,0,0,0,0,0,0,0,0,0,0.61,0,0,0,0,0.61,0,0,0,0.109,0.109,0.327,0,0,1.068,2,47,0 0,0,0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.28,0,0,0,0,0,0.28,0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.28,0,0,0.28,0,0.28,0,0.28,0,0.043,0,0,0,0,1.651,8,109,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.12,0,2.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.769,0,0,0,0.384,3.187,7,51,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.17,0,0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.026,0.343,0,0,0.026,4.326,28,822,0 0,4.1,0,0,1.36,0,0,0,0,0,0,0,0,0,0,0,0,1.36,6.84,0,1.36,0,0,0,0,0,2.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0.21,0,0.42,0,0,1.387,7,43,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.176,0.264,0,0,0.088,4.25,28,238,0 0,0,0,0,0,0,0,0,0,0,0,1.56,0,0,0,0,0,0,0,0,0,0,0,0,3.12,3.12,1.56,3.12,3.12,0,0,0,0,0,1.56,0,0,0,0,0,0,3.12,0,0,0,0,0,0,0,0.515,0,0,0,0,3.285,24,69,0 0,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.58,0,0,0,0,1.58,0,1.58,0,0,0,0,0,0,2.227,7,49,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.67,0,0,0,0,0,3.91,2.23,0.55,0.55,0.55,0.55,0.55,0.55,0,0.55,0.55,0.55,0.55,0,0,0.55,0,0,0.55,0,0.55,0,0,0,0,0.275,0.091,0.367,0,0,2.208,13,106,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.689,0,0,0,0,1.666,3,10,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.7,7.4,0,0,0,0,0,0,0,0,0,0,0,0,7.4,0,0,0,0,0,0,0,0,0,0,0,1.503,3.875,16,31,0 0.17,0,0.35,0,0.53,0,0,0.35,0,0,0.17,1.78,0.17,0.17,0,0,0.53,0,0,0,0,0,0,0,3.2,0,0,0,0,0,0,0,0,0,0,0,0.17,0,0,0,0,0,0,0,0,0,0,0,0.071,0.143,0,0,0,0,2.464,80,244,0 0,0,0,0,0,0.37,0,0,0,0,0,0.75,0.37,0,0,0,0.37,0,0,0,0,0,0,0,2.26,0,0,0,0,0,0,0,0,0,0,0,0.37,0,0,0,0,0,0,0,0,0,0,0,0,0.387,0,0,0.331,0,2.287,14,167,0 0,0,0,0,0,0,0,0,0,0,0,1.4,0,0,0,0,0.7,0,1.4,0,1.4,0,0,0,0,0,0.7,0,0,0,0.7,0,0,0,0,0,0,0,0,2.11,0,0,0,0,0,0,0,0,0,0.266,0.066,0,0,0,18,200,378,0 0,1.07,0,0,1.79,0.35,0,0,0,0.35,0,0,0.35,0,0,0,0,0,0.71,0,1.07,0,0,0,0.35,0.35,0.71,0,0,0,0,0.35,0,0.35,0,0,0.71,0,0.71,0,0,0,0.71,0,0.35,0,0,0,0,0.244,0.061,0.244,0,0,1.974,22,152,0 0,0,0,0,0,0,0,0,0,0,0,0,1.4,0,0,0,0,0,0,0,2.81,0,0,0,0,0,1.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.125,2,18,0 0.59,0,1.19,0,0.59,1.19,0,0,0,0,0,0,0,0,0,0,0,0,1.19,0,0.59,0,0,0,0.59,0,1.19,0,0,0.59,0,0.59,0,0.59,0,0,0.59,0,0,0,0,0,0,0,0.59,0,0,0,0,0.312,0,0.312,0,0,1.818,22,80,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.95,0,0,0,0,0,0,0,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.49,0,0.262,0,0,0,0,1.625,7,26,0 0.02,0.02,0,0,0,0,0,0,0.05,0.08,0.02,0.17,0,0,0,0,0.02,0.11,0.02,0,0,0,0,0,0.44,0.26,0,0,0.02,0.05,0,0,0,0,0,0.02,0.14,0,0,0,0.35,0,0.02,0,0.11,1.15,0,0,0.024,0.17,0,0.126,0,0,3.637,49,2015,0 0,0,0,0,0.4,0,0,0,0,0,0,2.04,0,0,0,0,0,0,2.44,0,0.81,0,0,0,1.22,3.26,0,0,0,0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0.4,0,0,0,0,0,0.137,0,0.068,0,0,2.282,21,89,0 0,1.1,0,0,0.55,0,0,0,0,1.1,0,0,0,0,0,0,0,0.55,0,0,1.1,0,0,0,0.55,0.55,2.2,0,0,0,0,0.55,0,0.55,0,0,1.65,0,0.55,0,0,0,1.1,0,0.55,0,0,0,0.088,0.355,0.088,0.177,0,0,1.867,22,127,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.7,0,0,0,0,0,0,0.666,0,0,1.222,3,11,0 0,1.28,0,0,0.64,0,0,0,0,1.28,0,0,0,0,0,0,0,0,0,0,1.28,0,0,0,0.64,0.64,1.92,0,0,0,0,0.64,0,0.64,0,0,1.28,0,0.64,0,0,0,0.64,0,0.64,0,0,0,0.104,0.418,0,0.209,0,0,1.888,22,102,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.714,4,12,0 0,0,0.37,0,0.37,0,0,0,0,0,0,0.37,0,0,0,0.37,0,0,0.37,0,0.37,0,0,0,0.37,0.74,0,0,0,0,0,0,0,0,0,0.37,0.74,0,0,0,0,0,0.37,0,0,0,0,0,0,0.162,0,0,0,0,2.643,34,193,0 0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.37,0,2.23,0,0.37,0,0,0,1.11,0.37,1.86,0.37,0.37,0.37,0.37,0.37,0,0.37,0.37,0.37,0.74,0,0.74,0.37,0.37,0,0.74,0,0.37,0.37,0,0,0,0.192,0.064,0.32,0,0.064,1.923,17,177,0 0,0,0,0,0,0,0,0,0,0,0,0,0,1.19,0,0,0,1.19,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.38,0,0,0,0,0,0,0,0.22,0,0,0,0,2,12,34,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.06,0,0,0,0,0,2.06,1.37,1.37,0.68,0.68,0.68,0.68,0.68,0,0.68,0.68,0.68,0.68,0,0,0.68,0,0,0.68,0,0.68,0.68,0,0,0,0.216,0.108,0.216,0,0.108,2.754,25,157,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,0,0,0,0,0,0,0,0,0,0,1.14,0,0,0,0,0,0,2.29,0,0,0,0,0,1.14,1.14,0,0,0,0,1.14,0,0,0,0,0,0,0,0,0,0,0,0,2.29,0,0,0,0,0,0,0,0.596,0,0.198,2.133,14,64,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.32,0,0,0,0,0,0,2.32,0,0,0,0,0,0,0,0,0,0.314,0,0,2.473,10,47,0 0,0,2.63,0,0,0,0,0,0,0,0,2.63,0,0,0,0,0,0,2.63,0,0,0,0,0,2.63,2.63,2.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,8,0 1.02,0,0,0,0,0,0,0,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.02,1.02,0,2.04,0,0,0,0,0,0,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0.531,0,0,0,0,2.967,11,92,0 0,0,0,0,0.6,0,0,0,0,0,0,0.6,0,0,0,0,0,0,3.01,0,0,0,0,0,0,0,1.8,0,0,0,0,0.6,0,0.6,0,0,0.6,0,0.6,0,0,0,0.6,0,1.2,0,0,0,0,0.085,0.085,0.085,0,0,1.735,22,92,0 1,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,2,0,0,1,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0.324,0,0.487,0,0,2.291,22,55,0 0.06,0,0.36,0,0.12,0,0,0.06,0.06,0,0.12,0.66,0,0,0,0.06,0.18,0.06,0.6,0,0.78,0,0,0,1.99,0.42,0,0,0,0.18,0.18,0,0,0,0.06,0,0.18,0,0,0,0,0.06,0,0,0,0,0,0.24,0.008,0.099,0,0,0.008,0.016,1.972,27,941,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,2.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.833,0,0.416,1.937,8,31,0 0,0,0,0,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,1.49,0,0,0,0,0,4.47,2.98,0,1.49,0,0,0,0,0,0,1.49,0,0,0,0,0,0,0,0,0,1.49,0,0,0,0,0.229,0,0,0,0,2.333,10,49,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,1.33,0,0,1.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.33,0,0,0,0,0,1.33,0,0,0,0,1.33,0,1.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.681,0,0.227,0,0,2.037,22,55,0 0,3.27,0,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.27,0,0,0,0,0,1.63,0,0,0,0,1.63,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.527,0,0.263,0,0,2.12,22,53,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.62,0,0.81,0,0,0,0.81,0,2.43,0,0,0,0,0,0,0,0,0,0.81,0,0.81,0,0,0,0.81,0,0,0,0,0,0,0.135,0,0.406,0,0.135,1.958,17,94,0 0,0,0,0,0.7,0,0,0,0,0,0,0.7,0,0,0,0,0,0,3.52,0,0,0,0,0,0,0,1.4,0,0,0,0,0.7,0,0.7,0,0,0,0,0,0,0,0,0,0,1.4,0,0,0,0,0.1,0,0.1,0,0,1.682,22,69,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.52,0,0,0,0,0.793,0,0,0,0,1.25,2,5,0 0,0,0,0,0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0.61,0,0,0,0,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0.61,0,0,0,0,0,0,0,0.61,0,0,0,0.196,0.098,0,0.098,0,0,1.312,6,63,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 1.07,0,1.07,0,1.07,1.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.07,0,1.07,0,0,1.07,0,1.07,0,1.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.37,0,0.185,0,0,2.24,22,56,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,5,8,0 0,0.25,0.75,0,1,0.25,0,0,0,0,0.25,0.25,1.25,0,0,0.25,0,1.25,2.51,0,1.75,0,0.25,0,0,0,0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.042,0,0,1.204,7,118,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,5,8,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,0.34,0,0,0.68,0,0.68,0,0,0.34,0.34,0,0,0,0,0.34,0,1.36,3.42,0,2.73,0,0,0,0.34,0.34,0,0,0,0,0,0,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.048,0.048,0,1.411,15,96,0 0,0,0,0,0,0,0,0,0,0,0,1.25,0,0,0,0,0,0,1.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.25,0,0,0,2.5,1.25,1.25,0,1.25,2.5,0,0,0,0,0.209,0,0,0,3.3,13,66,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.56,0,0,0,2.56,2.56,0,0,0,0,0,0,0,0,3.333,7,20,0 0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.85,0,0,0,0,0,0,0,0,0,1.736,8,33,0 0,0,0,0,0,0,0,0,0,0.72,0,0.72,0,0,0,0,0,0,4.37,0,0,0,0,0,1.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.107,0,0,0,1.48,9,37,0 0,0,0.36,0,0.72,0,0,0,0,0,0,0.36,0.18,0,0,0.36,0,0,1.44,0,0.36,0,0,0,0.36,0.9,0,0,0,0,0,0,0,0,0,0,0.36,0,0,0,0,0,0.18,0,0.18,0,0,0,0.026,0,0,0.107,0,0,2.988,51,263,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.47,0,2.94,0,0,0,1.47,0,1.47,2.94,0,0,0,0,1.47,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.484,0,0.484,0,0,2.5,15,65,0 0,0,0,0,0.09,0,0,0,0,0.09,0,0.18,0,0,0,0,0,0,0.37,0,0,0,0,0,2.43,1.21,0.28,0.09,0.09,0.18,0.09,0.09,1.12,0.09,0.09,0.18,1.12,0,0,0.09,0.56,0,0.18,0,0.09,2.24,0,0.09,0.123,0.479,0.095,0.013,0,0,3.625,51,1131,0 0,0,0,0,0.24,0,0,0,0,0,0,0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.7,0,0,0.24,0,0,0,0,0,0,0,0,0,0.24,0,0.24,0,0.195,0,0,0,0,2.192,35,239,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0,0,0,0,0,0,0,0,0,1.78,1.78,0,0,0,0,1.78,0,0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0,0,0,0,0,0,0.307,2.227,14,49,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,5,8,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,5,8,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.333,5,7,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,9,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.5,4,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.166,4,13,0 0,0,0,0,0,0,0,0,0,0,0,0,0,2.5,0,0,0,0,7.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.5,0,0,0,0,0,0,0,0,0,2.142,5,15,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0.23,0,0,0.23,0,0,0,0,0.47,0,0.23,0,1.67,0,0,0,0,1.19,0,0,0,0,0,0.47,0,0.23,0,0,0,0.23,0,0,0,0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.158,0,10.036,235,1094,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,7,0 0,0.12,0.12,0,0,0,0,0.12,0,0,0,0.12,0,0,0.12,0,0,0,0.12,0,0,0,0,0,0.9,0.38,0.38,0,0,0,0,0,0,0,4.11,0,0,0,0,0,0,0.9,0,0,0,0.12,0.12,0,0,0.149,0,0,0,0.074,5.264,53,1232,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.33,0,0,0,0,0,0,1.5,3,12,0 0,0.14,0.14,0,0,0,0.14,0.14,0,0,0,0.14,0,0,0.14,0,0,0,0.28,0,0,0,0,0,1.13,0.42,0.28,0,0,0,0,0,0,0,2.69,0,0,0,0,0,0,0.84,0,0,0,0.14,0.14,0,0,0.16,0,0,0,0.072,5.331,80,1029,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,4.4,16,22,0 0,0,0.56,0,0.08,0.16,0,0,0,0.16,0,0,0,0.24,0,0,0,0,0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0.08,0.08,0,0,0,0,0,0,0,0,0,0,0,1.54,0.164,0.505,0,0.01,0.021,0,2.729,55,1122,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.04,0,0,0,2.04,2.04,2.04,2.04,2.04,2.04,2.04,2.04,0,2.04,2.04,2.04,0,0,0,2.04,0,0,0,0,0,0,0,0,0,0.536,0,0,0,0.268,2.529,11,43,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0 0.38,0,0.64,0,0.12,0.25,0,0,0,0,0,0.25,0,0,0,0,0.25,0.12,1.03,0,0.38,0,0,0,0.9,0.38,0.25,0.25,0.64,0.25,0,0,0,0,0.12,0.51,0,0,0,0,0,0.12,0,0.25,0,0,0,0.25,0,0.082,0,0.02,0,0,1.491,11,267,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.97,0,0,0,0.97,0.97,0.97,1.94,0,0.97,0,0,0,0,0.97,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.261,0,0,0,0,2.03,11,67,0 0.44,0,0,0,1.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0,0,0,2.22,0,0,0,0,0.44,0,0,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.265,0,0,0,0,1.48,7,74,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0,0.86,0.86,0.86,1.73,0.86,0.86,0,0,0,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.131,0,0,0,0,1.74,11,47,0 0,0,0.64,0,0.32,0.32,0,0,0,0,0,0,1.29,0,0,0,0.32,0,0.97,0,0.32,0,0,0,0.32,0.32,0.32,0.64,0,0.32,0,0,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.371,0,0,0,0,2.05,11,82,0 0.13,0,0.13,0,0,0,0,0,0.13,0.13,0,0.66,0,0.66,0,0,0.13,0,1.06,0,0.66,0,0,0,0.13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,0.13,0,0.93,0,0.014,0.042,0,0,0,0.183,5.603,57,1160,0 0,0,0,0,9.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.33,0,0,0,0,0,0,0,0,0,3.333,14,20,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.754,0,0,0,0,1,1,7,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.333,14,20,0 0,0,0,0,0,0,0,0,0.3,0,0,0.3,0,1.82,0,0,0.3,0,0.6,0,0.91,0,0,0,0,0,1.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.21,0,0,0,0,1.51,0,0,0.057,0,0,0,0.231,2.011,28,358,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,0,0,0,0,0,0,0,0,1.857,5,13,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.09,0,0,0,0,0,0,0,0,2.428,5,17,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.09,9.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.09,0,0,0,0,0,0,0,0,0,0,0,0,1.25,2,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.09,0,0,0,0,0,0,0,0,3,5,15,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.754,0,0,0,0,1,1,7,0 0.21,0,0.42,0,2.54,0,0,0,0,0,0,1.05,0,0,0,0,0.21,0,0,0,0,0,0,0,0.21,0,0.63,0.21,0,0,0,0,0.21,0,0,0,0,0,0.21,0,0,1.27,0,0,0,0,0,0.21,0.028,0.115,0,0,0,0,2.457,45,258,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,18.18,9.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,4,0 0,0,0,0,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.333,2,4,0 0,0.35,0.35,0,0.17,0,0,0,0.17,0.35,0,1.23,0,0.88,0,0,0.17,0,1.41,0,0.7,0,0,0,0.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.7,0,0,0.17,0,0.88,0,0,0.038,0,0.019,0,0.095,2.059,28,447,0 0,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,2.94,0,0,0,0,0,5.88,0,1.47,0,0,1.47,0,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.037,15,82,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.33,0,0,0,0,0,0,0,0,0,4.333,20,26,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.353,0.353,0,0,0,0,1,1,21,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.12,0,0.289,0,0,0.289,0,0,1.076,2,14,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.666,15,20,0 0.01,0.01,0.07,0,0.05,0,0,0.01,0.03,0.13,0.05,0,0,0.05,0,0,0.01,0.07,0.01,0,0.01,0,0,0,0,0,0,0,0,0,0,0,0.07,0,0,0.13,0,0,0,0.01,0.46,0,0,0.03,0,0.8,0.01,0.07,0.05,0.301,0.131,0.002,0.09,0.002,2.577,82,5395,0 3.94,0,0,0,0,0,0,0,0,0,0,2.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,5,36,0 0,0,1.13,0,0,0,0,0,0,0,0,1.13,0,0,0,0,0,1.13,1.13,0,1.13,0,0,0,2.27,1.13,0,1.13,0,1.13,1.13,0,0,0,1.13,1.13,1.13,0,0,0,0,0,0,0,0,0,0,0,0,0.136,0,0.136,0,0,1.812,10,58,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.44,3.44,0,0,0,0,0,0,0,0,0,0,6.89,0,0,0,0,0,0,0,3.44,0,0,0,0,0,0,0,0,0,2.818,8,31,0 0,0,0,0,0,0,0,0,0,0,0,3.17,0,0,0,0,0,0,3.17,0,0,0,0,0,1.58,0,0,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,6.34,0,0,0,0,0,0,0,0,0,0,0,0,1.384,4,18,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.476,0,0,2.642,9,37,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0.17,0,0,0,0,0,0,0.17,0,0.17,0,0,0,0,0,0,0.17,0,0,0,0,0,1.57,1.4,0,0,0,0.17,0,0,0.17,0,0,0,1.92,0,0,0,2.8,0,0,0,0,2.8,0,0,0.267,0.802,0.118,0,0,0,4.808,20,601,0 0.19,0,0.39,0,1.24,0.13,0,0.06,0.32,0,0,0.45,0.26,0,0,0,0.13,0,1.24,0,0.39,0,0.06,0,1.04,0,0,0,0,0,0,0,0,0,0,0.06,0,0,0,0,0,0,0,0,0.39,0,0,0,0,0.03,0,0,0.03,0,1.571,42,297,0 0,0,0,0,0,0,1.78,0,0,0,0,1.78,0,0,0,0,0,0,3.57,0,0,0,0,0,0,0,0,0,0,1.78,0,0,0,0,0,1.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.586,0,0,1.307,4,17,0 0,0,0,0,0,0,0,0,0,0,0,0.58,0,0,0,0,0,0,0,0,0.58,0,0,0,0,0,0.58,0,0,0,0,0,1.76,0,0,0,0,0,0,0,0,0,0,0,0.58,0,0,0,0,0.107,0,0.107,0,0,1.531,6,49,0 0.13,0,0.2,0,0.54,0.13,0,0.13,0.4,0,0,0.06,0.06,0,0,0,1.01,0,0,0,0,0,0,0,1.08,0,0,0.06,0,0,0,0,0,0,0,0,0.4,0,0,0,0,0,0,0,0.06,0,0,0,0.009,0.068,0,0,0.166,0,2.804,45,617,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.373,0.373,0,0.373,0,0,1.714,4,12,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.03,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,2,0 0.26,0,0.53,0,0,0.26,0,0,0,0,0,1.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0.26,0,0,0,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0.26,0,0,0,0,0,0,0.039,0,0,0,0,2.646,77,172,0 0.26,0,0.53,0,0,0.26,0,0,0,0,0,1.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0.26,0,0,0,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0.26,0,0,0,0,0,0,0.039,0,0,0,0,2.646,77,172,0 0,0,0,0,0,0,0,0,0,0,0,0.55,0,0,0,0,0,0,0,0,0.55,0,0,0,0.55,0,0,0,0.55,0,0,0,0,0,0,0,0.55,0,0,0,0,0.55,0,0,0,0,0,0,0,0.25,0,0,0,0,1.601,8,173,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.28,0,0,0,0,0,2.56,1.28,2.56,1.28,1.28,1.28,1.28,1.28,0,1.28,1.28,1.28,0,0,0,1.28,0,0,0,0,0,0,0,0,0,0.632,0,0,0,0,2.142,11,45,0 0,0,0,0,0,0,0,0,0,0,0,1.75,0,0,0,0,0,0,0.29,0,0,0,0,0,1.75,0.29,0,0.58,0,0,0,0,0,0,0.29,0.29,0.58,0,0,0,0,0,0,0,0.29,0,0,0,0,0.091,0,0.045,0,0,2.333,15,175,0 0,0,0.6,0,0.6,0,0,0,0,0,0,0,0,0,0,0.6,0,0,2.4,0,0,0,0,0,0,0,1.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.6,0,0,0,0,0,0,0,0.11,0,0,0,0,1.074,3,29,0 0,0.32,0.32,0,0.16,0.16,0,0,0.16,0.32,0,0,0,0,0.32,0,0.32,0.32,0.8,0.32,2.08,0,0,0.16,0,0,0.16,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0,0,1.309,0,0.022,0.377,0,3.918,157,772,0 0.04,0.08,0.24,0,0.04,0.16,0,0.04,0.16,0.52,0.08,1.04,0,0,0,0.24,0.04,0,0.68,0,0.88,0,0.04,0,1.76,0.6,0,0.16,0,0,0.16,0,0,0,0.36,0,0.24,0.04,0,0,0,0.04,0,0,0.04,0,0,0,0.029,0.142,0,0.071,0.071,0.011,1.983,23,1361,0 0,0,0,0,0.7,0,0,0.88,0,0.17,0,0.52,0.17,0,0,0,0,0,2.46,0,1.93,0,0,0,0.52,0.35,0.35,0.17,0.17,0.17,0.17,0.17,0.17,0.17,0.17,0.17,0.17,0,0,0.17,0,0,0,0,0.17,0,0,0,0,0.086,0,0.057,0,0,1.472,15,162,0 0.09,0.09,0.09,0,0.29,0,0,0,0.87,0,0,0.29,0.09,0,0.19,0,0,0,0.39,0,0.19,0,0,0,0.58,0.68,0.09,0,0,0,0,0,0.29,0,0,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0.111,0.153,0.069,0,0.041,0,3.298,41,686,0 0.02,0.08,0.1,0,0.27,0.07,0,0,0.14,0,0,0.05,0,0.02,0.01,0.02,0,0.01,0.05,0,0.04,0,0,0,0.48,0.49,0.04,0,0,0,0,0,0.36,0,0,0.01,0.45,0,0,0.01,0,0,0.04,0,0.01,0,0,0.02,0.221,0.152,0.056,0,0.004,0,2.63,38,3086,0 0.05,0,0.17,0,0.28,0,0,0,0.51,0,0,0.17,0,0.05,0,0,0,0.05,0.22,0,0.17,0,0,0,0.96,1.02,0.05,0,0,0,0,0,0.28,0,0,0.11,0.73,0,0,0,0,0,0,0,0,0,0,0,0.165,0.182,0.091,0,0.016,0,2.777,38,1161,0 0.05,0,0.11,0,0.16,0.05,0,0,0.5,0,0,0.16,0,0,0,0,0,0.05,0.22,0,0.16,0,0,0,0.62,0.67,0.05,0,0,0,0,0,0.56,0,0,0,0.73,0,0,0,0,0,0,0.05,0,0,0,0,0.073,0.211,0.04,0,0.016,0,2.787,47,1090,0 0,0,0,0,0,0.05,0,0.34,0,0,0.11,0.81,0.05,0.11,0,0,0.75,0,0,0,0,0,0.05,0,1.16,0,0,0,0,0,0,0,0.05,0,0,0.23,0.05,0,0,0,0,0,0,0,0,0,0,0,0.283,0.107,0,0,0.053,0,1.864,32,910,0 0,0,0,0,1.05,0,0,0,0,0,0,0,0,0,0,0,0,0,2.11,0,2.81,0,0,0,0,0,0.7,0,0,0,0,0.35,0,0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.047,0,0,0,0.047,2.232,12,163,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,7,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.72,0,0,0,0,0,0,0,0,0,0,0,1.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.481,5,40,0 0.27,0.27,0,0,0.83,0,0,0,0,0,0,0.27,0.27,0,0,0,0,0,1.1,0,0.27,0,0,0,1.93,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0,0,0,0,0,0,0,0.084,0,1.231,6,101,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.38,0,1.69,0,0,0,1.69,1.69,1.69,0,0,0,0,0,0,0,0,0,0,0,1.69,0,0,0,0,0,1.69,0,0,0,0,0.315,0,0,0.63,0,3.083,12,37,0 0,0,0.87,0,0,2.63,0,0,0,0,0,0.87,0,0,0,0,0,0.87,0.87,0,0,0,0,0,0,0,0.87,0,0,0,0,0,0,0,0,0,0.87,0,0,0,0,0,0,0,0.87,0,0,0,0.317,0.317,0,0,0,0,1.269,5,33,0 0,0,0,0,0,0,0,0,0,0.97,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.97,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.491,0.163,0,0,0,4.312,33,138,0 0,0,0,0,0,0,0,0,0,0,0,0.85,0,0,0,0,0,0,0,0,0,0,0,0,0.42,0.42,0,0,0,0,0,0,0,0,0,1.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.652,9,114,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0,0,0,1.333,4,12,0 0,0,0.73,0,0,0,0,0.73,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,2.94,4.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.73,0,0,0.107,0,0,0,0,1.695,9,78,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.55,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.16,0,0,0,0,0,0,0,0,0.07,0,0,0,0,1.541,4,37,0 2,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,2,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.888,29,53,0 0,0,0.28,0,0,0,0,0,0,0.57,0,1.43,0,0,0,0,0,0,0,0,0.28,0,0,0,0.28,1.14,0,0,0,0,0,0,0,0,0.28,0,1.14,0,0,0,0,0,0,1.14,0,0,0,0.28,0,0.43,0,0,0,0,2.902,55,238,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.49,0,0,0,0,0,7.46,2.98,0,0,0,2.98,0,0,0,0,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.961,11,51,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,0,0,0,0,0,0,0,0,0,0,0,0,1.666,3,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,4,6,0 0,0,0,0,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0.8,0,0,0,0.8,0,0,0,0,0,0,0,0,0,0,0,0,1.35,3,54,0 0,0,0,0,0,0,0,0,0,0,0,1.12,0,0,0,0,0,0,2.24,0,0.56,0,0,0.56,0,0,1.12,0,0,0,0,0,0,0,0,0,0.56,0,0,0.56,0,0,0.56,0,0.56,0,0,0,0,0.299,0,0,0,0,2.236,13,85,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.439,0,0,0.219,0,1.911,11,65,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.333,8,10,0 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0.74,0,0,0,0,0,0,0,0.74,0,0,0,0.134,0.672,0,0,0,0,1.863,5,41,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.2,0,0,0,0,0,0,0,0,0,1.59,5,35,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.081,0,0,0,1,1,3,0 0.97,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.97,0,0,0,0,0,0,0,0,0,0,0,0,0,0.97,0,0,0,0,0,0,0,0.97,0,0,0,0,0.76,0,0,0,0,2,5,38,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.12,0,0,0,2.12,0,0,4.25,0,0,0,0,2.12,0,2.12,0,0,0,0,0,0,0,0,0,2.12,0,0,0,0,0.714,0,0,0,0,2.708,15,65,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.98,0,1.96,0,0,0,0,0,0,0,0,0,0,0,0.98,0,0,0,0,0,0,0,0,0,0,1.96,0,0.98,0,0,0,0,0,0,0,0,2,13,42,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.239,0,0,0,0,2.166,5,13,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.714,3,12,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.863,0.143,0,0,0,4.484,33,148,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.52,4.76,4.76,4.76,4.76,4.76,4.76,4.76,0,4.76,4.76,4.76,0,0,0,4.76,0,0,0,0,0,0,0,0,0,1.257,0,0,0,0,3.333,11,30,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0.34,1.36,0,0,0,0,0,0,0.34,1.7,0,0,0,0,0,0,0,0,1.7,0,0.34,0,0,0,1.36,0.68,1.02,0.34,0.34,0.34,0.34,0.34,0,0.34,0.34,0.34,0.34,0,0.34,0.34,0,0,0.34,0,0.34,0,0,0,0,0.244,0,0,0,0,1.696,13,112,0 0,0,0,0,0,0,0,1.57,0,1.57,0,1.57,0,0,0,0,0,0,1.57,0,1.57,0,0,0,3.14,2.36,0.78,0.78,0.78,0.78,0.78,0.78,0,0.78,0.78,0.78,0,0,0.78,0.78,0,0,0,0,0.78,0,0,0,0,0.372,0,0,0,0,3.971,34,139,0 0,0,0.88,0,0,0,0,0,0.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.315,0,0,0,0,1.166,3,21,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.88,5.88,0,5.88,0,0,0,0,5.88,0,0,0,0,0,5.88,0,0,0,0,5.88,0,0,0,0.763,0,0,0,0,2.285,10,16,0 0,0,0,0,0,0,0,0,0,0.31,0,0.31,0,0,0,0,0,0,0.95,0,0.95,0,0,0,1.27,0.63,1.91,0.63,0.63,0.63,0.63,0.63,0,0.63,0.63,0.63,0.95,0,0.63,0.63,2.22,0,0.63,0,0.63,1.91,0,0,0.05,0.304,0.101,0,0,0,2.186,15,164,0 0,0.18,0,0,0.18,0,0,0.37,0,0,0,0.94,0,0,0,0,1.89,0,0.18,0,0,0,0,0,0.37,0.18,0,0,0,0.18,0,0,0,0,0,0.37,0,0,0,0,0,0,0,0,0.56,0,0,0,0,0.244,0,0,0,0,1.663,10,168,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.176,0,0,0,2.142,8,15,0 0,0,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0,0,0.97,0,0.97,0,0,0,1.29,0.64,1.94,0.64,0.64,0.64,0.64,0.64,0,0.64,0.64,0.64,0.97,0,0.64,0.64,2.26,0,0.64,0,0.32,1.94,0,0,0.051,0.255,0.102,0,0,0,2.197,15,156,0 0,0.46,0,0,0,0,0,0,0,0,0,0.46,0,0,0,0,0,0,1.38,0,1.85,0,0,0,2.31,0.46,0,0,0,0.46,0,0,0,0,0,0,0.46,0,0.46,0,0,1.38,0,0,0,0,0,0,0,0.155,0,0,0,0,2.982,28,167,0 0.1,0,0.3,0,0.05,0.15,0,0.25,0.3,0.1,0.2,0.65,0,0,0,0.25,0.05,0,0.55,0,0.65,0,0.05,0,1.3,0.35,0,0.15,0,0.25,0.2,0,0,0,0.2,0.05,0.25,0,0,0.05,0,0,0,0.3,0.15,0,0.05,0,0.014,0.139,0,0.022,0.058,0,1.979,23,1081,0 0,0,0,0,0.81,0,0,0,0,0,0,0.81,0.81,0,0,0,0,0,1.62,0,0,0,0,0,0.81,0,1.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.81,0,0,0,0,0.143,0,0.143,0,0,1.055,2,19,0 0,0,0,0,0,0,0,0,0,0,0,1.4,0,0,0,0,0.7,0,1.4,0,1.4,0,0,0,0,0,0.7,0,0,0,0.7,0,0,0,0,0,0,0,0,2.11,0,0,0,0,0,0,0,0,0,0.267,0.066,0,0,0,17.952,200,377,0 0,0,0.61,0,0,0,0,0,0,0,0,1.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.61,0,0.079,0.158,0,0,0,0,2.508,17,143,0 0.78,0,0,0,0.39,0,0,0,0,0.39,0,0,0,0,0,0,0,0,0.78,0,0.39,0,0,0.39,0,0,0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.39,0,0,0,0,0.073,0.146,0,0,0,0,1.354,8,42,0 0,0,0,0,0,0,0,0,0,0,0,0,2.94,2.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.6,5,16,0 0.22,0,0.07,0,0.07,0.07,0,0.14,0,0.36,0,0.51,0.44,0.07,0,0,0.29,0.07,0.07,0,0.07,0,0,0,1.99,0,0,0,0.29,0.29,0,0,0,0,0,0.14,0.07,0.07,0,0,0,0,0,0,0,0,0,0,0.041,0.031,0,0.031,0,0,1.912,22,568,0 0,0,0,0,0,0,0,0,0,0,0,0.91,0,0,0,0,0,0.22,0,0,0,0,0,0,0.22,0.22,0,0.45,0,0,0,0,0,0,0.22,0,0,0,0,0,0,0,0,0.22,0,0,0,0.22,0,0.154,0,0,0,0,1.768,15,122,0 0,0.33,0,0,0.33,0,0,0,0,0,0,0.33,0,0,0,0,0,0.33,0,0,0,0,0,0,0.33,0.33,0,0.67,0,0,0,0,0,0,0.33,0,0,0,0,0,0,0,0,0.33,0,0,0,0.33,0,0.088,0,0,0,0,1.87,15,116,0 0.49,0,0,0,0.49,0.49,0,0.49,0,0,0,0.49,0.99,0,0,0,0,0,0.49,0,0,0,0,0,2.48,0.99,0,0,0.99,0.99,0,0,0,0,0,0.49,0.49,0,0,0,0,0,0,0,0,0,0,0.49,0,0.145,0,0,0,0,1.641,10,87,0 0,0,0,0,0,0,0,0,0,0,0,0.42,0,0,0,0,0,0,1.28,0,1.28,0,0,0,0.85,0.42,1.7,0.42,0.42,0.42,0.42,0.42,0,0.42,0.42,0.42,0.85,0,0.42,0.42,1.7,0,0.42,0,0.42,1.28,0,0,0,0.204,0.068,0,0,0,2.108,15,97,0 0,0,0.51,0,1.54,0,0,0,0.25,0,0,1.28,0,0,0,0,0,0,0,0,0,0,0,0,0.77,0.25,0,0,1.03,1.8,0,0,0,0,0,0,0.25,0.25,0,0,0,0,0,0,0.25,0,0,0,0,0.039,0,0,0,0,1.767,7,99,0 0,0,0,0,0.5,0,0,0,0,0.5,0,1.01,0,0,0,0,0,0,2.53,0,1.01,0,0,0,1.52,1.01,1.52,0.5,0.5,0.5,0.5,1.01,0,1.01,0.5,0.5,0.5,0,0.5,0.5,0,0,0.5,0,1.01,0,0,0,0.09,0.272,0.09,0,0,0,1.836,13,101,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.78,0,0.89,0,0,0,0,0,2.67,0,0,0,0,0,0,0,0,0,0.89,0,0,0,0.89,0,0,0,0,0.89,0,0,0,0.15,0,0,0,0,1.85,15,37,0 0,0,0,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,4.91,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.769,5,46,0 0,0,0,0,0,0,0,0,0,0,0,3.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.709,0,0.709,0,0,2.3,9,23,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.52,4.76,4.76,4.76,4.76,4.76,4.76,4.76,0,4.76,4.76,4.76,0,0,0,4.76,0,0,0,0,0,0,0,0,0,1.257,0,0,0,0,3.333,11,30,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0.602,4.7,23,47,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.63,4.91,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.48,6,37,0 0,0,0,0,0,0,2.22,2.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.665,0,0,0.665,0,4.571,25,64,0 0,0,0,0,0,0,2.22,2.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.665,0,0,0.665,0,4.571,25,64,0 0,0,0.33,0,0,0.49,0,1.32,0.16,5.12,0,0,0,0.66,0,0,0.33,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0,0,0,0.16,0,0,0,0.33,0,0,0,0.07,0.023,0,0,0.023,1.552,10,149,0 0,0,0,0,1.06,0,0,0,0,0,0,1.06,0,0,0,0,0,0,1.06,0,1.06,0,0,0,1.06,1.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.19,0,0,0,0,0,0.181,0,0,0,0,1.4,4,28,0 0,0,0,0,0,0,0,0,0,0,0,0.44,0,0,0,0,0,0,0.88,0,0.88,0,0,0,1.32,0.88,0.88,0.88,0.44,0.44,0.44,0.44,0,0.44,0.88,0.44,0,0,0,0.44,0,0,0,0,0.44,0,0,0,0,0.207,0,0,0,0.207,2.588,40,132,0 0,0,0,0,0,0,0,0,0,0,0,0.51,0,0,0,0,0,0,1.03,0,0.51,0,0,0,1.54,1.03,1.54,1.03,0.51,0.51,0.51,0.51,0,0.51,1.03,0.51,0,0,0,0.51,0,0,0,0,0.51,0,0,0,0,0.24,0,0,0,0.48,2.6,40,130,0 0,0,0,0,0,0,0,0,0,0,0,0.91,0,0,0,0,0,0.91,0.91,0,0.45,0,0,0,2.73,3.19,0.91,0.45,0,0,0,0,0,0,0.45,0,0.45,0,0,0,0,0,0,0,0,0,0,0,0.075,0.151,0,0,0,0,2.158,20,136,0 0.05,0,0.31,0,0,0.05,0,0.05,0.47,0,0,0.15,0,0,0,0,0.26,0.05,0.21,0,0.15,0,0,0,0.79,0.85,0.05,0,0,0,0,0,0.47,0,0,0,0.85,0,0,0,0,0,0,0,0,0,0,0,0.112,0.202,0.067,0,0.014,0,3.117,52,1303,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.7,0,0,0,0,0,0.35,0.35,0,0.7,0.35,0.35,0,0,0,0,0.35,0,0,0,0.7,0,0,0,0,0.35,0,0,0,0,0,0,0,0,0,0,1.506,11,113,0 0.23,0,0.23,0,0.69,0,0,0,0,0,0,1.39,0,0,0,0,0,0,0.23,0,0,0,0,0,0.23,0.23,0,0,0,0,0,0,0.23,0,0,0,0,0,0,0,0,0,0,0.23,0,0,0,0,0,0.067,0,0,0,0,1.433,9,86,0 0.23,0,0,0,0.23,0.23,0,0,0,0,0,0.23,0,0,0,0.23,0,0,0.47,0,0,0,0,0,0.47,0.23,0,0,0,0.47,0.23,0,0.47,0,0,0,0.23,0,0,0,0,0,0,0,0,0,0,0,0.106,0.106,0,0,0,0,1.588,8,143,0 0,0,0,0,0,0,0,0,0,1.31,0,1.31,0,0,0,0,0,0,6.57,0,0,0,0,0,2.63,1.31,2.63,1.31,1.31,1.31,1.31,1.31,0,1.31,1.31,1.31,1.31,0,0,1.31,0,0,1.31,0,1.31,0,0,0,0,0.649,0,0,0,0,2.214,13,62,0 0.05,0,0.05,0,0.05,0.05,0,0,0.5,0,0,0.16,0,0.05,0,0,0,0.05,0.22,0,0.16,0,0,0,0.62,0.67,0.05,0,0,0,0,0,0.45,0,0,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0.185,0.233,0,0,0.016,0,2.972,46,963,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.09,0,0,0,0.719,0,0,0,0,1,1,4,0 0.13,0.4,0,0,0,0,0,0,0,0.53,0,0,0,0,0,0,0.13,0,0.8,0,0.53,0,0,0,0,0,0,0,0,0,0,0,0,0.13,0,0,1.2,0,0,0,0,0,0,0,0,0.53,0,0.13,0,0.25,0,0.014,0.427,0.044,5.473,143,1538,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.909,0,0,0,0,1,1,1,0 0.1,0,0.1,0,0.1,0,0,0,0.94,0,0,0.31,0,0,0,0,0,0,0.41,0,0.2,0,0,0,0.52,0.62,0.1,0,0,0,0,0,1.15,0,0,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0.132,0.251,0.029,0,0.029,0,2.784,32,490,0 0.11,0,0.22,0,0,0.11,0,0,1.01,0,0,0.33,0,0,0,0,0,0,0.44,0,0.22,0,0,0,0.78,0.67,0.11,0,0,0,0,0,0.56,0,0,0,0.22,0,0,0,0,0,0,0,0,0,0,0,0.171,0.233,0.031,0,0.031,0,3.189,32,571,0 0,0,0.09,0,0,0,0,0.09,0,0,0.09,1.49,0.27,0.09,0,0,0.37,0,0,0.09,0,0,0,0,2.51,0,0,0,0.09,0.27,0,0,0,0,0,0.37,0.18,0,0,0,0,0,0,0.09,0,0,0,0,0,0.106,0,0,0,0,2.277,27,558,0 0.02,0,0.1,0,0.05,0.05,0,0.13,0.3,0,0,0.13,0,0,0,0,0,0.02,0.1,0,0.08,0,0,0,0.46,0.49,0.02,0,0,0,0,0,0.27,0,0,0,0.41,0,0.13,0,0,0,0,0,0,0,0,0,0.166,0.158,0.047,0,0.007,0,2.984,52,1758,0 0,0,1.06,0,0,0,0,0,0,0,0,1.06,0,0,0,0,0,1.06,0,0,0,0,0,0,2.12,1.06,0,1.06,0,1.06,1.06,0,1.06,0,1.06,1.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0.121,0,0,0,0,2.151,10,71,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.88,0,0,0,0,0.35,0,0,0,0,1.461,6,19,0 0,0,3.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.74,0,0.74,0,0,2.166,7,26,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0.26,0,0.26,0.13,0,0,0,0,0,0.52,0,0,0,0,0.39,0,1.05,0,1.05,0,0,0,0.39,0,0,0,0,0,0,0,0,0,0,0.13,0,0,0,0,0,0,0,0,0,0,0,0,0.017,0.089,0.017,0.035,0.053,0.053,5.189,107,685,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14.28,0,0,0,0,0,0,0,0,0,1.25,2,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,2.94,0,0,0,0,2.94,0,2.94,0,0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,1,1,10,0 0,0,0.48,0,2.18,0,0,0,0.48,0,0,1.69,0,0,0,0,0.24,0,0.48,0,0.48,0,0,0,1.69,0.24,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0,0,0,0.24,0,0,0,0,0.036,0,0,0,0,2.364,73,227,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.06,3.03,3.03,3.03,3.03,3.03,3.03,3.03,0,3.03,3.03,3.03,0,0,0,3.03,0,0,0,0,0,0,0,0,0,0.9,0,0,0,0,2.75,11,33,0 0,0,0.95,0,0.31,0,0,0,0,0,0,0.31,0,0,0,0,0,0,1.26,0,0,0,0,0,0.63,0.95,0,0,0,0,0,0,0,0,0,0.31,0.31,0,0.31,0,0,0,0.31,0,0,0,0,0,0.048,0.339,0,0.048,0,0,1.99,14,215,0 0,0,0,0,0,0,0,1,0,0,0,0.5,0,0,0,0,0,0,0,0,0,0,0,0,2,0.5,0,0.5,0.5,1,0,0,0,0,0.5,0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0,0,0,2.017,13,117,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.888,5,17,0 0,0,2.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.505,0,0,2.375,4,19,0 0,0,0,0,0,0,0,0,0.83,1.66,0,0,0,0,0,0,0,0,0.83,0,2.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0,0,0,0.118,0,0,0,0,1.475,11,59,0 0,0,0.57,0,0.85,0,0,0,0.28,0,0.57,0.28,0,0,0,0.85,0,0.57,1.42,0,0.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0.282,0.242,0,2.46,26,278,0 0.71,0.14,0.42,0,1,0.14,0.14,0,0,3,0.14,0.85,0,0,0,0,0,0.28,0.85,0,0.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.14,0,0,0,0,0,0,0,0,0,0.022,0,0,0,0.022,1.931,9,168,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.4,3,7,0 0,0,0,0,0,0,0,0,5.26,0,0,0,0,0,0,0,2.63,0,2.63,0,5.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.058,11,35,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,0,4,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.38,0,0,0,0,1.806,11,56,0 0,0,0,0,0,0,0,1.08,0,0,0,1.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,0,0,0,0,0,0,0,0.54,0.54,0,0,0,0,0,0,0,0,0,0.54,0,0.166,0,0.083,0,0,1.528,13,81,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,2.41,0,0.8,0,0,0,0,0,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,0.8,0,0,0,0,0.45,0,0,0,0,1.2,4,30,0 0,0,0,0,0.27,0,0,0.55,0,0.55,0,0.55,0,0,0,0,0,0,0,0,0,0,0,0,6.64,4.15,0.83,0.27,1.66,0.27,0.27,0.27,0,0.27,0.27,0.27,1.38,0,0,0.27,0,0.27,0.27,0,0.55,0,0,0,0.183,0.549,0.137,0,0,0,4.257,57,430,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.543,0,0.271,0,0,3,18,72,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.43,0,0.28,0,0,0,1.43,1.15,0.86,0.28,0.28,0.28,0.28,0.28,0.14,0.28,0.28,0.28,0.28,0,0.14,0.28,0,0,0.43,0,0.57,0.28,0,0,0.023,0.324,0.046,0,0,0,2.24,12,372,0 0,0,0,0,0,0,0,0,0,0,0,0,0.97,0,0,0,0,0,0,0,0,0,0,0,6.79,3.88,0.97,0.97,0.97,0.97,0.97,0.97,0,0.97,0.97,0.97,0.97,0,0,0.97,0,0,0.97,0,1.94,0,0,0,0,0.299,0.149,0,0,0,2.666,13,72,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.35,0,1.35,0,1.35,0,1.35,0,1.35,1.35,0,0,0.205,0,0.205,0,0,0,1.722,12,31,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.666,5,20,0 0,0,1.19,0,0,0,0,0,0,0,0,2.38,0,0,0,1.19,0,0,2.38,0,0,0,0,0,0,0,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.19,0,0,0,0,0,0,0.2,0.4,0,0.2,0,0,1.461,4,19,0 0,0,0,0,1.81,0,0,0,0,1.81,0,0,1.81,0,0,0,0,0,0,0,0,0,0,0,1.81,1.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.3,3,13,0 0,0,0,0,0,0,0,0,0,0.45,0,0.45,0,0,0,0,0,0,0.9,0,0,0,0,0,0,0.45,0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.082,0.165,0,0,0,0,1.666,4,40,0 0,0,0.26,0,0.53,0,0,0,0,0,0,0.26,0,0,0,0,0.26,0,0,0,0,0,0,0,1.61,0.8,0,0,0.53,0.8,0,0,0,0,0,0.8,0.26,0,0,0,0,0,0,0,0.26,0,0,0,0.128,0.042,0,0,0,0,1.635,6,139,0 0,0,0.32,0,0,0.16,0,0,0,0,0,0.64,0,0,0,0,0.48,0,0.96,0,0.96,0,0,0,0.48,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0,0,0,0,0,0,0,0,0,0.021,0.105,0,0.021,0.063,0.063,3.789,39,432,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.24,0,0,0,1.93,1.69,0.72,0.24,0.24,0.24,0.24,0.24,0.24,0.24,0.24,0.24,0.24,0,0,0.24,0,0,0.24,0,0.24,0.48,0,0,0,0.148,0.074,0,0,0,2.386,12,210,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.97,1.72,0.49,0.24,0.24,0.24,0.24,0.24,0.24,0.24,0.24,0.24,0.24,0,0,0.24,0,0,0.24,0,0.24,0.49,0,0,0,0.15,0.075,0.037,0,0,2.367,12,206,0 0,0,0.09,0,0,0.09,0,0.27,0,0,0.18,1.49,0.09,0.09,0,0,0.46,0,0,1.49,0,0,0.09,0,2.42,0,0,0,0,0,0,0,0,0,0,0.09,0.18,0,0,0,0,0,0,0,0,0,0,0,0.066,0.118,0,0,0.066,0,2.156,26,552,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.56,2.56,2.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.56,0,0,0,0,0,0,0,0,0,1.5,4,24,0 0,0,0,0,0,0,0,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.73,2.73,1.36,0,0,0,0,0,0,0,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.541,7,37,0 0,0,0,0,0,0,0,0,0,1.25,0,1.25,0,0,0,0,0,0,6.25,0,0,0,0,0,0,0,1.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.246,0,0,0,0,1.076,2,14,0 0,0.08,0.17,0,0,0.08,0,0.34,0,0.17,0.08,0.34,0,0,0,0,0.87,0,0.26,0,0,0,0,0,2.79,0.69,0,0.08,0,0,0,0,2.35,0,0,0.26,0.78,0,0.17,0,0,0,0,0,0,0,0,0,0.133,0.306,0.053,0,0.013,0,3.205,57,904,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.625,9,26,0 0,0,0,0,0,0,0,0,0,4.54,0,4.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.51,0,0,0,0,0,0,1.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.286,0,0,0,0,2.277,12,41,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.7,0,0,0,0,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.5,5,18,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.545,4,17,0 0.35,0,0.35,0,0.71,0,0,0,0,0,0,0.35,0,0,0,0,0,0,0.35,0,0.17,0,0,0,0.53,0.35,0.53,0.17,0.17,0.17,0.17,0.17,0.35,0.17,0.17,0.17,0,0,0,0.17,0,0.71,0.17,0.17,0.35,0,0,0,0.123,0.309,0.03,0,0,0,2.241,13,204,0 0,0.13,0.55,0,0.27,0.13,0,0,0,0.27,0,1.38,0,0,0,0.13,0,0,1.94,0,0.97,0,0,0,0.13,1.11,0,0,0,0,0,0,0,0,0,0,0.55,0,0,0,0,0,0.13,0,0,0,0,0,0.075,0.025,0,0.025,0,0,5.695,82,598,0 0,0,0.48,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0,0,0,0,0,0,0.48,0,1.44,1.93,0,0,1.44,1.44,0,0,0,0,0,0,1.44,0,0,0,0,0,0,0.48,0,0,0,0,0,0,0,0.119,0.059,0,6.145,115,338,0 0,0,0.24,0,0.09,0.04,0,0,0.04,0,0.04,0.69,0,0,0,0.14,0.19,0,0.69,0,0.64,0,0,0,2.04,1.09,0,0.04,0,0.19,0.14,0,0.04,0,0.29,0.09,0.34,0,0,0,0,0,0,0,0.04,0,0,0,0.014,0.148,0,0.014,0.044,0.007,2.112,26,1223,0 0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0,0,1.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.92,0,1.92,0,1.92,0,1.92,0,1.92,1.92,0,0,0.394,0.098,0.295,0,0,0,1.813,13,107,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.09,7,23,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.97,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.94,0,1.94,0,1.94,0,1.94,0,1.94,1.94,0,0,0.147,0.147,0.294,0,0,0,1.789,12,68,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.72,0,1.72,0,1.72,0,1.72,0,1.72,1.72,0,0,0,0,0.265,0,0,0,1.65,12,33,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.09,7,23,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,2,0,0,0,0.687,0,0,0,0,1.888,9,17,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.777,0,0,0,2,4,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.777,0,0,0,2,4,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,4.16,0,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.751,0,0,0,1.428,4,10,0 0,0,0.08,0,0,0.16,0,0.08,0.08,0,0.08,0.92,0.08,0.08,0,0,0.16,0,0,0,0,0,0,0,3.53,0,0,0,0,0,0,0,0.25,0,0,0,0.08,0,0,0,0,0,0,0.16,0,0,0,0,0.069,0.103,0,0,0.011,0,2.44,18,598,0 0,0,0,0,2.38,0,0,0,0,0,0,0,0,0,0,0,0,0,2.38,0,2.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.5,4,15,0 0,0,0,0,0.84,0,0,0,0,0,0,1.68,0,0,0,0.42,0,0.42,1.68,0,0.42,0,0,0,0,0,0,0,0.84,0,0,0,0,0,0,0,0,0,0,0,0,2.95,0,1.26,0,0,0,0,0.145,0.217,0,0,0,0,1.487,8,61,0 0,0,0,0,0,0,0,0,0,0.84,0,0.84,0,0,0,0,0,0,4.2,0,0,0,0,0,1.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.119,0,0,0,1.454,9,32,0 0.08,0.08,0.57,0,0.48,0,0,0.08,0,0,0,0.81,0.08,0,0.08,0,0.81,0,0.65,0,0.4,0,0,0,1.38,0,0,0,0,0,0.16,0,0.16,0,0.08,0,0.08,0,0,0.08,0,0.89,0,0.24,0.08,0,0,0.08,0.011,0.034,0,0.057,0.022,0,1.875,65,542,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.66,0,0,0,0,0,0,0,1.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.304,0,0,2.125,9,34,0 0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0,0,1.78,1.78,0,0,0,0,0,1.78,0,0,1.78,0,0,0,0,1.78,0,1.78,0,1.78,0,0,0,0,0,0,0,0,0,0,0,0,0.319,0,0,0,0,2.391,10,55,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0,0.86,0,0,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.944,8,35,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.09,7,23,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.85,0,0,0,0,2.85,0,0,0,0,0,0,2.85,0,0,0,0,0.543,0,0,0,0,1,1,10,0 0,0,0,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0.9,4.5,0,0.9,0,0,0,0,0,0.9,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.318,0,0,0,0,1.772,4,39,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.52,4.76,4.76,4.76,4.76,4.76,4.76,4.76,0,4.76,4.76,4.76,0,0,0,4.76,0,0,0,0,0,0,0,0,0,1.257,0,0,0,0,3.333,11,30,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.375,6,44,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.03,0,0,0,0,0,2.53,2.53,1.52,0,0,0,0,0,1.01,0,0,0,0.5,0,0.5,0,0,0,0.5,3.04,0.5,0,0,0,0.094,0,0.094,0.094,0,0,1.26,12,63,0 0,0,0,0,0,0,0,0,0,1.2,0,2.4,0,0,0,0,0,0,4.81,0,1.2,0,0,0,0,0,1.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.2,0,0,0,0,0,0,0.227,0,0,0,0,1.062,2,17,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.09,0,0,0,0,0,1.03,1.03,2.06,0,0,0,0,0,1.03,0,0,0,0,0,0,0,0,0,0,3.09,0,0,0,0,0,0,0,0.193,0,0,1,1,23,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.2,0,1.2,0,0,0,0,0,1.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.237,0,0,2.583,8,62,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.73,0.86,3.47,0.86,0.86,0.86,0.86,0.86,0,0.86,0.86,0.86,1.73,0,1.73,0.86,0,0,1.73,0,1.73,0,0,0,0,0.289,0,0,0,0,1.978,12,91,0 0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0.645,0,0,0,0,1,1,9,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.66,0,0,0,0,0,1.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.837,0,0,0,0,3.789,10,72,0 0.23,0.23,0,0,0.23,0.23,0,0.47,0.23,0.23,0.23,0.23,0,0,0,0.23,0,0,2.87,0,1.91,0,0.23,0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.23,0,0,0,0,0,0,0.37,0.205,0.041,2.281,24,146,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,7,0 0.11,0,0.11,0,0.34,0.22,0,0,1.02,0,0,0.45,0.11,0,0,0,0,0,0.45,0,0.22,0,0,0,0.68,0.79,0.11,0,0,0,0,0,0.34,0,0,0.11,0.22,0,0,0,0,0,0,0,0,0,0,0,0.096,0.192,0.08,0,0.032,0,2.829,32,549,0 0.11,0,0.11,0,0,0,0,0,1.15,0,0,0.34,0,0,0,0,0,0,0.46,0,0.23,0,0,0,0.57,0.69,0.11,0,0,0,0,0,0.34,0,0,0.11,0,0,0,0,0,0,0,0,0,0,0,0,0.047,0.159,0.031,0,0.031,0,3.196,32,505,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,12,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.272,3,14,0 1.19,0,0.59,0,0,0,0,0,0,0,0,1.19,0,0,0,0,0,0,2.97,0,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.102,0,0,1.52,7,38,0 0.08,0,0.08,0,0,0,0,0,0.79,0,0,0.26,0,0,0,0,0,0.08,0.35,0,0.26,0,0,0,0.88,0.97,0.08,0,0,0,0,0,0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.149,0.162,0.049,0,0.024,0,2.9,40,673,0 0.05,0,0.05,0,0.3,0,0,0,0.51,0,0,0.15,0,0,0,0,0,0.05,0.2,0,0.15,0,0,0,0.67,0.72,0.05,0,0,0,0,0,0.2,0,0,0,0.46,0,0,0,0,0,0,0.1,0,0,0,0,0.209,0.158,0.05,0,0.014,0,3.764,85,1423,0 0.17,0.08,0.08,0,0.17,0.08,0,0,0.76,0,0,0.25,0,0,0,0.08,0,0.08,0.34,0,0.25,0,0,0,0.76,0.85,0.08,0,0,0,0,0,0.34,0,0,0,0.68,0,0,0,0,0,0,0,0,0,0,0,0.157,0.205,0.036,0,0.024,0,2.883,47,715,0 0,0,1.16,0,0,0,0,0,0,0,0.58,4.09,0,0,0,0,0,0,0,0,0,0,0,0,1.16,0,0,0,0,0,0,0,0,0,0,0.58,0,0,1.16,0,0,0,0,0,0,0,0,0,0.091,0,0,0,0,0,1.21,4,46,0 0,0.15,0.07,0,0.23,0,0,0.07,0.07,0.07,0,1.48,0.15,0.23,0,0.07,1.01,0,0.15,0.07,0,0,0.15,0.07,3.11,0,0,0,0,0,0,0,0.15,0,0,0,0.15,0,0,0,0,0.46,0,0,0.23,0,0,0,0.185,0.098,0,0,0.043,0,2.013,24,576,0 0,0,0,0,0,0,0,0,0,0,0,5.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.098,0,0,2.142,9,15,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.08,0,0,0,0,0,0,0,0,0,0,2.08,2.08,0,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0.173,0,0,1.777,6,48,0 0,0,0,0,0.74,0,0,0,0,1.49,0,0,0,0,0,0,0,0,1.49,0,0.74,0,0,0,2.98,2.23,1.49,2.23,0.74,0.74,1.49,0.74,0,0.74,0.74,1.49,0.74,0,0,0.74,0,0,0.74,0,0.74,0,0,0,0,0.557,0.111,0,0,0,2.607,12,133,0 0,0,0,0,0,0,0,0,0,0,0,0,0,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.75,8,19,0 1.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.26,0,1.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.178,0,0,1.272,3,14,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.55,2.77,2.77,2.77,0,2.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.857,11,26,0 0,0.13,0.52,0,0,0.13,0,0.79,0,0,0,0.13,0,0,0,0.13,0,0,0.26,0,0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.13,0,0,0,0,0,0,0.02,0.061,0,0.04,0.04,0,1.823,26,372,0 0,0,0,0,0,0,0,0,0,0.8,0,1.07,0,0,0,0.26,0,0,1.07,0,1.88,0,0,0,2.15,1.61,1.07,0.26,0.26,0.26,0.26,0.26,0,0.26,0.26,0.26,0.53,0,0,0.26,0,0,0.53,0.53,0.53,0,0,0,0.174,0.437,0,0.043,0,0,2.879,19,262,0 0,0,0.36,0,0.36,0.72,0,0,0,0,0,1.09,0,0,0,0,0.36,0,0.36,0,0.72,0,0,0,1.09,1.09,0,0,0,0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.114,0.114,0,0,0,0,2.075,7,110,0 0.68,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0,0.34,0.34,0.68,0.34,0,0.34,0,0,0,0,0.34,0,0,0,0.34,0,0.34,0,0,0,1.02,0.34,0,0,0,0.172,0,0.387,0,0,1.5,15,84,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.43,0,0.86,0,0,0,6.03,3.44,0.86,0.43,0.43,0.43,0.43,0.43,0,0.43,0.43,0.43,0.86,0,0.43,0.43,0,0,0.43,0,0.43,0,0,0,0,0.13,0.065,0.065,0,1.043,2.983,40,179,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.5,4,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.391,0,0,1.333,4,20,0 0,0,0,0,0,0,1.23,0,0,0,0,1.23,0,0,0,0,1.23,0,0,0,0,0,0,0,2.46,0,0,0,0,1.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.403,0,0,2.045,6,45,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0.628,0,0,0,0,1.5,5,15,0 0,0,0,0,0,0,0,0,0,0,0,1.41,0,0,0,0,0.7,0,1.41,0,1.41,0,0,0,0,0,0.7,0,0,0,0.7,0,0,0,0,0,0,0,0,2.12,0,0,0,0,0,0,0,0,0,0.267,0.066,0,0,0,17.857,199,375,0 0,0.07,0.15,0,0.12,0.02,0,0.02,0,0.12,0,0.3,0.12,0,0,0.02,0.17,0.12,0.22,0.02,0.12,0,0,0,0,0,0,0,0,0,0,0,2.16,0,0,0.15,0.68,0,0,0.02,0.1,0.02,0.02,0.02,0,0.33,0,0.43,0.037,0.225,0.007,0.015,0.041,0.003,2.198,25,2458,0 1.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.23,0,0.61,3.7,0,2.46,0,0,0,0,0,1.23,0,0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.61,0,0.226,0,0,0,0,1.3,3,26,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.166,2,7,0 0,0,0,0,1.54,0,0,0.51,0,0.51,0,0.51,0.51,0,0,0.51,0,0,1.54,0,1.03,0,0,0,0.51,0.51,1.54,0.51,0,0.51,0,0,0,0,0.51,0,0.51,0,0,0,0.51,0,0,0,0,0.51,0,0,0,0.158,0,0.079,0,0,1.711,15,77,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.166,2,7,0 1.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.75,0,1.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.75,0,0,0,0,0.317,0,0,0,0,1.125,2,9,0 0,0,0,0,0,0,0,0,0,2.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,13.04,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.407,3.555,19,96,0 0,0,0,0,0,0,0,0,0,0,0,3.7,0,0,0,0,0,3.7,0,0,0,0,0,0,3.7,0,0,0,0,3.7,0,0,0,0,0,0,3.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.466,6,22,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0,0,1.05,0,0,0,0,0,0,0,0,0,0,0,0,0,1.05,0,0,0,0,0,0,0,1.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.181,0,0,2,7,22,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.319,0,0,0,0,2.636,9,29,0 0,0,0,0,0.22,0.22,0,0,0,0.45,0,0.9,0,0,0,0,0,0,0.67,0,0.22,0,0,0,0.67,0,0,0.67,0,0,0.45,0,0,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.024,0,4.223,157,359,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.166,2,7,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.166,2,7,0 0,0,0,0,0,0.49,0,0.49,0,0,0,0,0,0,0,0,0,0,0.49,0,0,0,0,0,0,0,0.49,0,0,0,0,0,0,0,0,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,0.228,0,0,0,0,1.962,5,106,0 0,0,0.32,0,0.32,0,0,0,0,0,0,0.64,0,0,0,0,0,0,0.32,0,0,0,0,0,0.64,0.64,0,0,0,0,0,0,0,0,0,0.32,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0.04,0,0,0,0,1.902,10,175,0 0,0,2.5,0,0,0,0,0,0,0,0,2.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.5,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,1,1,15,0 0,0,0,0,0,0,0,0,0,0,0,5.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.961,0,0,2.333,9,14,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.431,0,0,0,0,2.733,7,41,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.96,0,0,0,0,1.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.613,0.306,0,0,0,1.611,7,29,0 0,0,0,0,0,0,0,0,0,0,0,0,0,1.19,0,0,0,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.178,21,61,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.12,2.56,2.56,2.56,2.56,2.56,2.56,2.56,0,2.56,2.56,2.56,0,0,0,2.56,0,0,0,0,0,0,0,0,0,0.881,0,0,0,0,2.5,11,40,0 0.77,0,0,0,0.25,0,0,0,0,0,0,1.28,0,0,0,0,0,0,2.05,0,2.31,0,0,0,0.25,2.57,0,0,0.51,0,0,0,0,0,0,0,0.25,0,0,0,0,0,0,0,0,0,0,0,0,0.117,0,0.039,0,0,4.016,45,237,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.45,0,0,0,0,0,0.72,6.56,0,0,0,0,0,0,0,0,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.124,0,0,2.361,30,85,0 0.07,0,0.22,0,0.03,0.07,0,0,0.03,0.22,0,0.71,0.03,0,0,0.03,0,0,0.9,0,0.56,0,0,0,1.58,0.26,0,0.11,0.11,0.11,0.18,0,0.03,0,0.22,0.07,0.18,0,0,0.03,0,0,0,0,0,0,0,0.03,0.028,0.078,0,0.028,0.016,0,1.765,20,1356,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.84,0,0,0,0,0,0,0,0,0,0,0.719,0,1.25,2,10,0 0,0,0.34,0,0.34,0,0,0,0.34,0,0,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.78,0,0,0,0,0,0,0,0,0,0,1.583,6,95,0 0.89,0,0,0,1.49,0.29,0,0,0,0,0,1.19,0,0,0,0,0,0,0.89,0,0.89,0,0,0,0,0,0.29,0,0,0,0,0,0.29,0,0,0,0,0,0.29,0,0,0.59,0,0.59,0,0,0,0,0.325,0.162,0,0,0,0,1.583,9,76,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.84,0,0,0,0,0,0,0,0,0,0,0.724,0,1.25,2,10,0 0,0,0,0,0,0,0,0,0,0,0,3.72,0,0,0,0,0,0,3.1,0,0,0,0,0,0.62,0,0,0,1.24,0,0,0,0,0,0,0,0,0,0,0,0,1.24,0,0,0,0,0,0,0,0.11,0,0,0,0,1.47,8,50,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.23,4,16,0 0,0,0,0,0,0,0,0,0,0,0,0,0,1.07,0,0,0,0,0.35,0,1.43,0,0,0,3.95,0.71,0,0,0,0,0,0,0,0,0.71,0,0.35,0,0,0,0,0,0.35,0,0.35,0,0,0,0,0.113,0.113,0.056,0,0,2.969,16,193,0 0,0,0,0,0,0,0,0,0,1.37,0,2.75,0,0,0.68,0,0,0,0,0,0,0,0,0,2.06,2.06,1.37,0,0.68,0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,1.37,0,0,0,0,0,0.235,0,0,0,0,1.531,11,49,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.66,0,1.88,0,1.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.22,0,0,0,0,2.15,8,43,0 0,0,0.25,0,0.12,0.12,0,0.12,0.25,0,0.12,1.14,0.25,0,0,0,0.25,0,0,0,0,0,0.25,0,3.04,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0,0.017,0,2.444,24,418,0 2,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,2,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.888,29,53,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.56,0,0,0,0,0.465,0,0,0,0,1.769,6,23,0 0,0,0,0,0,0,0,0,0,0,0,0.9,0,0,0,0,0,0.9,0.9,0,0,0,0,0,0.9,0.9,0,0,0.9,0,0.9,0,0.9,0,0,0.9,0,0,0,0,0,0.9,0,0,0,0,0,0,0,0.471,0,0,0,0,1.424,8,47,0 0,0,0,0,0,0,0,0,0,0.58,0,2.33,0,0,0,0.58,0,0,1.75,0,3.5,0,0,0,0,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.58,0,0,0,0,0,0.195,0,0.097,0,0,2.157,11,41,0 0,0,0.2,0,0,0,0,0,0.2,0.41,0,0,0,0,0,0,0,0,1.45,0,0.2,0,0,0,0,0.2,0.2,0,0.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.45,0,0,0,0,0,0.329,0,0.109,0,0.365,1.187,11,114,0 0,1.16,0,0,0,0,0,1.16,0,1.16,0,0,0,0,0,0,0,0,0,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.16,0,0,0,0,0,0,0,0,0,1.25,3,20,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.34,0,0,0.21,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0.06,0,0,0,0,2.426,76,199,0 0.23,0,0,0,0.23,0.23,0,0,0,0,0,0.23,0,0,0,0.23,0,0,0.47,0,0,0,0,0,0.47,0.23,0,0,0,0.47,0.23,0,0.47,0,0,0,0.23,0,0,0,0,0,0,0,0,0,0,0,0.107,0.107,0,0,0,0,1.595,8,142,0 0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0.684,0,0,0,0,1,1,8,0 0,0,0,0,0,0,0,0,0,0,0,3.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.74,0,0,1.5,4,18,0 0,0,0,0,0,0,0,0,0,0,0,0.46,0.46,0,0,0,0,0,1.38,0,0.46,0,0,0,0.46,0,0,0,0,0,0,0,0,0,0,0,0.92,0,1.38,0,0.92,0.46,1.38,0,1.38,0.92,0,0,0.149,0.074,0.149,0,0,0,1.76,12,132,0 0,0,0.8,0,0,0,0,0,0,0,0,0.8,0,0,0,0,0,0,4.8,0,1.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,0,0,0,0,0,0,0,0,1,1,11,0 0,0,2.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.854,0,0,0,0,1.823,8,31,0 0,0,0,0,0,0,0,0,0,0,0,0,1.4,0,0,0,0,1.4,2.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.125,2,9,0 0.21,0,0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.42,0,2.11,0,0,0,2.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.084,0,0.028,0.084,0.084,4.11,62,411,0 0,0,0,0,0,0,0,0,0.68,0,0.68,0,0,0,0,0.68,0,2.04,4.08,0,0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.523,0,0,1.218,5,39,0 0.11,0,0.22,0,0.55,0,0,0,0,0,0,0.55,0,0,0,0.11,0.11,0,1.22,0,0,0,0,0,1.22,0.44,0,0,0,0.11,0,0,1.89,0,0,0,0,1.22,0.11,0,0,0,0,0,0.22,0,0,0.11,0.052,0.156,0.034,0.017,0,0.052,3.061,38,600,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.35,0,0,0.21,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0.06,0,0,0,0,2.444,76,198,0 0.75,0,0,0,0,0,0.37,0,0,0.37,0,0,0,0,0,0,0.75,0,3.75,0,3.38,0,0,0,0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.099,0.597,0,0,0,2.125,13,85,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.72,3.44,0,0,0,0,0,0,0,0,0,0,1.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.611,7,29,0 0,0,0,0,0,0,0,0,0,0,0,2.7,0,0,0,0,0,0,2.7,0,2.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.444,0,0,2.8,7,28,0 0,0,0,0,0,0.67,0,0.67,0,0,0,2.02,0,0,0,0,0,0,0,0,0,0,0,0,2.02,1.35,0,1.35,0,0,0,0,0,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.205,0,0,0,0,2.84,24,142,0 0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,9.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,1.5,4,9,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.55,0,0,0,0,0,0,0,0,0,1.714,6,12,0 0,0,0,0,0,1.03,0,2.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.144,0.072,0,0,0,1.523,11,64,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.5,2.63,3.5,2.63,1.75,1.75,0.87,0.87,0,0.87,1.75,0.87,0.87,0,2.63,0.87,0,0,0.87,0,1.75,0,0,0,0,0.49,0.122,0.122,0,0,2.203,12,130,0 0.06,0,0.4,0,0.13,0.13,0,0.13,0,0,0,1.4,0.2,0.06,0,0,0.2,0,0.06,0,0,0,0,0,2.54,0,0,0,0,0,0,0,0,0,0,0.06,0.06,0,0,0,0,0,0,0,0.06,0,0,0,0.028,0.085,0,0,0,0,2.341,22,665,0 0,0,0,0,0,0,0,0,0,0,0,1.02,0,0,0,0,0,1.02,2.04,0,0,0,0,0,2.04,1.02,0,0,0,0,0,0,0,0,0,0,0,0,1.02,0,0,0,0,0,0,0,0,0,0,0.188,0,0,0,0,3.9,13,78,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.8,12,28,0 0.26,0.26,0,0,0.52,0.26,0,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.83,1.57,0,0,0.26,0,0.26,0,0,0,0.26,0.26,0.26,0,0,0,0,0,0.52,0,0,0,0,0,0.065,0,0.032,0,0,0,1.455,12,115,0 0.13,0,0.26,0,0.65,0,0,0,0,0,0,0.52,0,0,0,0.13,0.13,0,1.18,0,0,0,0,0,0.52,0.26,0,0,0,0.13,0,0,2.1,0,0,0,0,1.44,0.13,0,0,0,0,0,0.26,0,0,0.13,0,0.188,0.041,0,0,0.062,2.876,38,420,0 0,0,0,0,0,0,0,0,0,0,0,3.63,0,0,0,0,0,0,0,0,0,0,0,0,0.9,3.63,0,0,0,0,0,0,0,0,0,0,1.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.153,0,0,1.933,7,58,0 0,0,0,0,0,0,0,0,0,0,0,6.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.645,0,0,1.666,7,15,0 1.17,3.52,0,0,0,0,0,0,0,1.17,0,1.17,0,0,0,0,0,3.52,2.35,0,3.52,0,0,0,3.52,2.35,0,0,0,0,0,0,0,0,0,0,0,0,0,1.17,0,0,0,0,0,0,0,0,0,0,0,0.414,0,0,1,1,14,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,0,0,0,0,0,0,1.4,3,7,0 0,0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,0,0,0,2.4,0,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0,1.6,0,0.8,0.8,1.6,0,1.6,0.8,0,0,0.128,0,0.128,0,0,0,1.596,12,83,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.25,0,0,0,0,0,0,0,6.25,0,0,0,0,0,6.25,0,0,0,0,0,0,0,0,0,0,0,6.25,0,0,0,0,0,0,0,0,0,1.285,3,9,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.11,0,0,0,0,0,11.11,0,0,0,0,1.492,0,0,0,0,1.571,4,11,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.25,2,5,0 0,0,0.44,0,0,0,0,0,0,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,1.76,1.76,0,0,0,0,0,0,0,0,0,0,0.88,0,0.88,0,0,0,0.44,0,0,0,0,0.44,0,0,0.061,0,0,0,1.949,17,230,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.55,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.25,0,0,0,0,0,0,0,2.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.12,0,0,0,0,0,0,0,0,0,1.142,2,8,0 3.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.03,0,0,0,0,0,0,0,3.03,0,0,0,0,0,0,0,0,0,0,0,3.03,0,0,0,0,0,3.03,0,0,0,0,0.609,0,0,0,0,1.181,3,13,0 0,0,0,0,0,0,0,0,0,0,0,1.69,0,0,0,0,0.42,0,0,0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.42,0,0,0,0,0,0,0,0.84,0,0,0,0,0,0.061,0,0,0,0,2.288,11,103,0 0,0,0.32,0,0,0,0,0,0.32,0,0,1.3,0,0,0,0,0,0,0.97,0,0.32,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0,0,0.32,0,0.32,0,0.65,0,0.32,0.32,0,1.3,0,0,0.047,0.094,0.047,0,0,0,1.973,17,148,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.88,0,0.47,0,0,0,2.83,2.35,1.88,2.35,1.41,1.41,0.47,0.47,0,0.47,1.41,0.47,0.47,0,0,0.47,0,0,0.47,0,1.41,0,0,0,0,0.144,0.072,0.072,0,0,2,13,168,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.53,0,0,0,0,0,1.26,1.26,1.26,2.53,1.26,1.26,0,0,0,0,1.26,0,0,0,0,0,0,0,0,0,1.26,0,0,0,0,0,0,0.208,0,0,1.758,11,51,0 0.11,0.11,0.34,0,0.11,0,0,0,1.02,0,0,0.45,0,0,0,0.11,0,0,0.45,0,0.22,0,0,0,0.56,0.68,0.11,0,0,0,0,0,0.34,0,0,0,0.22,0,0,0.11,0,0.11,0,0,0,0,0,0,0.103,0.177,0.029,0,0.029,0,4.296,81,653,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.25,10,17,0 0,0,0.58,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0.58,0,2.33,0,0,0,0,0,2.33,0,0.58,0,0,0,0,0,0.58,0,0,0,0,0,0,0,0,0,0,0.58,0.58,0,0,0,0,0.203,0,0.407,0.407,0,3.294,17,112,0 0,0,0,0,0,0,0,0,0,0,0,4.65,0,2.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.448,0,0,2,4,28,0 0,0,0,0,0,0,0.88,0,0,0,0,0.88,0,0,0,0,0,0,0.88,0,0,0,0,0,0,0,0,0,0,0,0,0,1.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.133,0,0,0,0,1.629,9,44,0 0,0,0,0,0,0,0,0,0,0.63,0,0.63,0,0,0,0,0,0,0,0,0.63,0,0,0,2.54,1.91,1.91,0.63,0.63,0.63,0.63,0.63,0,0.63,0.63,0.63,0.63,0,0.63,0.63,0,0,0.63,0,0.63,0,0,0,0,0.279,0.093,0,0,0,1.981,12,105,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.69,3.84,3.84,3.84,3.84,3.84,3.84,3.84,0,3.84,3.84,3.84,0,0,0,3.84,0,0,0,0,0,0,0,0,0,1.092,0,0,0,0,2.909,11,32,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.75,4,7,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,0.54,0,0.54,0,0,0,0,0,0,0.54,0,0,0,2.71,1.63,0.54,0.54,0.54,0.54,0.54,0.54,0,0.54,0.54,0.54,0,0,0,0.54,0,0,0,0.54,0.54,0,0,0,0,0.531,0,0,0,0,4.114,35,251,0 0,0,0,0,0,0,0,0,0,0,0,2.38,0,0,0,0,0,0,2.38,0,0,0,0,0,0,0,2.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.38,0,0,0,0,0,0,0,0,0,1.666,9,25,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.33,0,0,0.21,0,0,0,0,0,0,0,0,0,0,0,0,0.03,0.06,0,0,0,0,2.481,76,201,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.92,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,0.65,0,0,0,0,0,0,0,0,1.31,0,0,0,2.63,1.97,1.31,0.65,0.65,0.65,0.65,0.65,0,0.65,0.65,0.65,0,0,0,0.65,0,0,0,0.65,0.65,0,0,0,0,0.507,0,0,0,0,3.041,34,146,0 0,0,0.32,0,0.32,0,0,0,0,0,0,0.64,0,0,0,0,0,0,0.32,0,0,0,0,0,0.64,0.64,0,0,0,0,0,0,0,0,0,0.32,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0.04,0,0,0,0,1.677,10,156,0 0,0,0,0,0,0,0,0,0,1.96,0,0,0,0,0,0,0,1.96,1.96,0,0,0,0,0,0,0,0,1.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.373,0,0,0,0,1.857,11,26,0 0,0,0,0,0,0,0,0,0,1.96,0,0,0,0,0,0,0,1.96,1.96,0,0,0,0,0,0,0,0,1.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.373,0,0,0,0,1.857,11,26,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.2,0,0,0,0,0,2.4,1.2,1.2,1.2,1.2,1.2,1.2,1.2,0,1.2,1.2,1.2,0,0,0,1.2,0,0,0,0,1.2,0,0,0,0,0.57,0,0,0,0,2.312,11,37,0 0,0,0,0,0,0,0,0,0,1.11,0,3.33,0,0,0,0,0,0,1.11,0,0,0,0,0,2.22,1.11,0,0,0,3.33,0,0,0,0,0,1.11,0,0,0,0,0,0,0,0,0,0,0,1.11,0,0.191,0,0,0,0,1.454,7,48,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.343,0,0,0,0,4.226,8,317,0 0,0,0.33,0,0.66,0,0,0.33,0,1.32,0,0,0,0.33,0,0,0.33,0,1.32,0,0.33,0,0,0,1.98,0.66,0.66,0,0,0,0,0,0.33,0,0,0,0.99,0,0,0,0,0,0.33,0.33,0.33,0,0,0,0.168,0.392,0,0.224,0.336,0,4.115,42,321,0 0.51,0,0,0,0.17,0.17,0,0,0.34,0.17,0,2.07,0,0,0,0.17,0,0,2.24,0,1.03,0,0,0,0.34,0.69,0.17,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0,0,0.17,0.34,0,0,0,0,0.466,0.248,0,0,0,0.062,2.926,48,319,0 0,0.1,0,0,0.1,0.21,0,0.1,0,0,0,1.19,0,0,0,0,0,0,0.1,0,0,0,0,0,0.87,0,0,0,0,0.1,0,0,0.1,0,0,0.43,0,0,0,0,0,0,0,0.1,0,0,0,0,0,0.047,0,0,0.031,0,1.793,12,391,0 0.09,0,0,0,0,0.09,0,0.28,0,0,0,0.76,0.09,0,0,0,0.38,0,0,0,0,0,0,0,2.66,0,0,0,0,0,0,0,0.38,0,0,0,0.09,0,0,0.47,0,0.09,0,0,0,0,0,0,0.026,0.093,0,0.013,0.12,0,2.658,24,577,0 0,0,0,0,0,0,0,0,0,0,0,2.89,0,0,0,0.57,0,0,0,0,1.73,0,0,0,2.31,0,0,0,0,3.46,0,0,0,0,0,0,0.57,0,0,0,0,0,0,0,0,0,0,0,0,0.163,0,0,0,0,1.9,12,76,0 0.3,0.3,0,0,0.6,0.3,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.6,0.3,0,0,0.3,0,0.3,0,0,0,0.3,0.3,0,0,0,0,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0,1.389,8,82,0 0,0,0,0,0,0,0,0,0,0,0,1.4,0,0,0,0,0.7,0,1.4,0,1.4,0,0,0,0,0,0.7,0,0,0,0.7,0,0,0,0,0,0,0,0,2.11,0,0,0,0,0,0,0,0,0,0.267,0.066,0,0,0,17.952,200,377,0 0,0,0,0,0,0.24,0,0,0,0,0,0.72,0,0,0,0,0,0,0.24,0,0,0,0,0,2.65,1.2,0,0,0,0,0,0,0.24,0,0,0,0.96,0,0,0,0,0,0,0,0,0.48,0,0.24,0.067,0.371,0.067,0,0,0,3.322,44,319,0 0.23,0,0.23,0,0.69,0,0,0,0,0,0,1.39,0,0,0,0,0,0,0.23,0,0,0,0,0,0.23,0.23,0,0,0,0,0,0,0.23,0,0,0,0,0,0,0,0,0,0,0.23,0,0,0,0,0,0.068,0,0,0,0,1.483,9,89,0 0,0,0,0,0,0,0.68,0,0,0.68,0,0,0,0,0,0,0,0,1.37,0,2.06,0,0,0,0,0,0.68,0,0,0,0,0.68,0,0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.144,0,0,0,0.072,3.369,19,155,0 0.58,0,0,0,0.19,0.19,0,0,0.38,0.19,0,2.32,0,0,0,0.19,0,0,2.51,0,1.16,0,0,0,0.19,0.58,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0,0,0,0.38,0,0,0,0,0,0.251,0,0,0,0.071,2.08,11,156,0 0,0,0,0,0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0,0,0,1.26,0,0,0,0,0,0,0,0.31,0,0,0,0.31,0,0,0,0,0.14,0,0,0,0,1.592,7,129,0 0,0,0,0,0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0,0,0,1.26,0,0,0,0,0,0,0,0.31,0,0,0,0.31,0,0,0,0,0.14,0,0,0,0,1.592,7,129,0 0,0,0.42,0,0.64,0,0,0,0,0,0,0.21,0,0,0,0,0,0,0.85,0,0.21,0,0,0,2.13,0.21,0.21,0,0,0,0,0,2.13,0,0,0,0.42,0,0.21,0.21,0,0,0.42,0.21,0.64,0,0,0,0.238,0.443,0.068,0,0,0,2.524,18,260,0 0,0,0,0,0.24,0.49,0,0,0,0.49,0,0.24,0,0,0,0,0,0,0.99,0,0.49,0,0,0,0.74,0,0,0.74,0,0,0.49,0,0,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.054,0,0,0.027,0,4.634,157,380,0 0,0.23,0,0,0.47,0,0.23,0,0,0,0.23,0,0,0,0,0,0,0.23,0.23,0,0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.23,0,0,0,0,0,0,0.23,0,0,0,0.298,0,0.149,0,0,1.533,18,184,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.03,0,0,0,0,0,0,0,3.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.03,0,0,0,0,0,0,0,0,0,1.272,4,14,0 0,0,0,0,0,0,0,0,0,0,0,3.75,0,0,0,0,0,0,0,0,0,0,0,0,1.25,1.25,0,1.25,0,0,0,0,0,0,0,0,0,0,1.25,0,0,0,0,0,0,0,0,0,0,0.224,0,0,0,0,2.379,18,69,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0.39,0,0.19,0,0,0,0,0,0,0.19,0.19,1.98,0,0.19,0,0,0,0.19,0.19,0,0.19,0,0,0,1.58,1.19,0,0.19,0,0.39,0.19,0,0.59,0,0.39,0.39,1.19,0,0.19,0,0,0.19,0.19,0,0,0,0,0.39,0.28,0.14,0.028,0.112,0,0,2.101,17,311,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0.88,0,0,0.88,0.88,2.65,0,1.76,0,0,0,0.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.37,3,37,0 0.13,0.06,0,0,0.13,0.13,0,0,0.13,0.27,0.06,0.41,0.06,0,0,0.27,0.06,0,1.04,0.13,0.83,0,0,0.06,1.46,0.48,0,0.13,0,0.06,0.27,0,0,0,0.13,0,0.2,0,0,0,0,0,0,0,0.06,0,0,0.48,0,0.194,0,0.029,0.048,0.009,1.793,23,888,0 0.09,0.09,0.28,0,0.28,0,0,0.28,0,0,0,0.09,0.18,0,0,0,0.18,0.28,1.22,0,0.37,0,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0.09,0.28,0,0,0.09,0,0,0,0.28,0.37,0.09,0,0,0.014,0.084,0,0.042,0,0.042,1.877,18,552,0 0,0,0,0,0,0,0,0,0,0.68,0,0,0,0,0,0,0,0,1.37,0,0,0,0,0,0,0,0.68,0,0,0,0,0,0,0,0,0,0.68,0,0.68,0,0,0,0.68,0,0,0,0,0,0,0,0,0,0,0,1.488,12,64,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.5,4,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.934,0,0,0,0,3.2,7,16,0 0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,2,2,2,2,4,2,2,0,2,2,2,0,0,0,2,0,0,0,0,0,0,0,0,0,0.682,0,0,0,0,2.705,11,46,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.354,0,0,0,0,2.187,5,35,0 0.9,0,0,0,0,0,0,0,0,0,0,2.7,0,0,0,0,0,0,3.6,0,0,0,0,0,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.9,0,0,0,0.9,0,0.479,0,0,0,0,2.166,8,52,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.333,8,10,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.35,0,0,0,1.17,1.17,2.35,0,0,0,0,4.7,0,4.7,0,0,0,0,1.17,0,0,0,0,0,2.35,0,0,0,0.185,0.743,0,0,0,0,4.476,14,94,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.84,0,0,3.84,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,1.85,0,0,0,0,0,1.85,3.7,0,3.7,0,0,0,0,0,0,3.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.544,1.634,0,0,0,2.352,11,40,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.42,0.42,0.85,0,0,0,0,2.14,0,2.14,0,0,0,0,0,0,0,0,0,0,0.85,0,0,0,0.332,0.73,0,0,0,0,5,14,270,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.84,1.27,0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.84,0,0,0,0.325,0.781,0,0,0,0,4.758,14,276,0 0,0,0.24,0,0,0,0,0.12,0.12,0,0,0.6,0.12,0.12,0,0,0.72,0,0,0,0,0,0,0,1.81,0,0,0,0,0,0,0,0,0,0,0.12,0.12,0,0,0,0,0,0,0,0,0,0,0,0.105,0.06,0,0,0,0,1.827,23,466,0 0.67,0,0,0,0,0,0,0,0.33,0.33,0.33,0.33,0.33,0,0,0,0,0.33,1.35,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.056,0.28,0.168,0.056,0,0,1.866,17,112,0 0.1,0,0.1,0,0,0,0,0,0.92,0,0,0.4,0,0,0,0,0.1,0,0.4,0,0.2,0,0,0,0.51,0.61,0.1,0,0,0,0,0,0.3,0,0,0,0.1,0,0,0,0,0,0,0.1,0,0,0,0,0.014,0.154,0.028,0,0.028,0,2.785,32,507,0 0.04,0.02,0.12,0,0.08,0.02,0,0.08,0,0.06,0.02,0.5,0.06,0,0.02,0.02,0.14,0.12,0.25,0,0.19,0,0.04,0,0,0,0.1,0,0.02,0,0,0,1.97,0,0,0.19,0.97,0.02,0,0.02,0.1,0.02,0,0.14,0,0.33,0.02,0.1,0.024,0.198,0,0,0.018,0.003,2.43,81,3337,0 0,0,0,0,0,0,0,0,0,0,0,1.66,0,0,0,0,0,0,3.33,0,1.66,0,0,1.66,1.66,0,1.66,0,0,0,0,0,0,0,0,0,1.66,0,0,0,0,0,0,0,0,0,0,0,0,0.29,0,0,0,0.29,1.722,7,31,0 0,0,0.5,0,0,0,0,0,0,0,0,0.5,0,0,0,0,0,0,0,0,0,0,0.5,0,1.52,2.03,0,0,1.52,1.52,0,0,0,0,0,0,1.01,0,0,0,0,0,0,0.5,0,0,0,0,0,0,0,0.122,0.061,0,4.309,38,237,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,6,18,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.98,0,0.99,0,0,0,2.97,1.98,0,0.99,0,0,0,0,0,0,0.99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.51,0,0,0,0,2.347,10,54,0 0,0.11,0,0,0.11,0.22,0,0.11,0,0,0,1.32,0,0,0,0,0,0,0.22,0,0,0,0,0,0.99,0,0,0,0,0.22,0,0,0.11,0,0.11,0.44,0,0,0,0,0,0,0,0.11,0,0,0,0,0,0.047,0,0,0.031,0,1.614,12,339,0 0,0,0.21,0,0,0,0,0.21,0,0.21,0,0,0,0,0,0,0,0.21,0,0,0,0,0,0,0.21,0.21,0,0.43,0,0,0,0,0.21,0,0.21,0,0.21,0,0,0,0,0,0,0,0,0,0,0,0,0.149,0,0,0,0,1.79,15,188,0 0,0,0,0,0,0.3,0,0,0,0,0.3,2.42,0,0,0,0.3,0,0.9,3.63,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.087,0,0,2.74,64,148,0 0,0,0,0,0,0,0,0,0,0,0,2.9,0,0,0,0.58,0,0,0,0,1.74,0,0,0,2.32,0,0,0,0,3.48,0,0,0,0,0,0,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0.165,0,0,0,0,1.7,12,68,0 0,0,0,0,0,0,0,0,0,5.26,0,5.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.952,0,0,3.2,12,16,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.22,0,1.4,0,0,0,1.4,0,1.4,2.81,0,0,0,0,1.4,0,1.4,0,0,0,0,0,0,0,0,0,0,0,0,2.81,0,0.458,0,0.229,0,0,2.653,15,69,0 0,0,0,0,0,0,0,0,0,1.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.265,0,0,0,3.85,26,77,0 0,0,1.28,0,0,0,0,0,0,0,0,1.28,0,0,0,0,0,0,2.56,0,0,0,0,0,2.56,1.28,0,0,0,0,0,0,2.56,0,0,0,1.28,0,0,0,0,3.84,0,0,0,0,0,0,0,0.148,0.148,0,0,0,2.034,13,59,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.96,0,1.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.74,0,0.268,0,0.134,0,0,2.75,8,22,0 0,0,0,0,0.19,0,0,0,0,0,0.19,0.77,0.19,0,0,0.19,0,0.19,0.38,0.19,0,0,0,0,0.19,0,0,0.38,0,0,0,0,0,0,0,0.19,0.38,0,0.19,0,0,0.38,0,0,0,0,0,0,0.068,0.113,0,0.022,0.045,0,1.74,21,395,0 0,0,2.12,0,1.06,0,0,0,0,1.06,0,1.06,0,0,0,0,0,0,4.25,0,0,0,0,0,0,0,1.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.06,0,0,0,0,0,0,0,0,0,1.785,6,25,0 0,0,0,0,0,0,0,0,0,0,0,0.68,0,0,0,0,0,0,0.68,0,0,0,0,0,0,0,0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.68,0,0,0,0,0.237,0,0,0,0,1.8,9,36,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,4,4,4,4,4,4,4,0,4,4,4,0,0,0,4,0,0,0,0,0,0,0,0,0,1.117,0,0,0,0,3.333,11,30,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.33,4.16,4.16,4.16,4.16,4.16,4.16,4.16,0,4.16,4.16,4.16,0,0,0,4.16,0,0,0,0,0,0,0,0,0,1.142,0,0,0,0,3.333,11,30,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.2,4.05,0,0,0,0,0,0,0.9,0,0,0,2.25,0,0,0,1.35,0.9,0,0,0.9,0,0,0,0.332,0.747,0.166,0,0,0,4.054,19,296,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.22,0,0,0,0,0,0,0,0,0,1.214,4,17,0 0,0,0.36,0,0,0,0,0,0.36,0,0,0,0,0,0,0,0,0.36,1.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.236,0,0,0,0,1.277,3,69,0 0,0,1.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.21,0,0,0,0,0,1.21,0,0,0,0.238,0,0,0.238,0,0,1,1,16,0 2,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,2,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.888,29,53,0 0,0,0,0,1.21,0,0,0,0,0,0,2.43,0,0,0,0,0,0,0,0,1.21,0,0,0,0,0,1.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.43,0,1.21,0,0,0,0,0.567,0.378,0,0,0,0,1.333,3,24,0 0,0.5,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0.5,0,0,0,0,0,0,0,1.5,1,0,0,0.5,0,0,0,0,0,0,0,0,0,0,0,0,0.5,0,0,0,0,0,0,0,0,0,0,0,0,1.468,5,69,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.17,0,0,0,0,0,5.43,6.52,2.17,1.08,1.08,1.08,1.08,1.08,0,1.08,1.08,1.08,0,0,1.08,1.08,0,0,0,0,1.08,0,0,0,0,0.472,0,0,0,0,5.291,34,127,0 0.06,0,0.25,0,0.25,0.25,0,0.5,0,0,0,0.56,0.12,0.06,0,0,0.5,0,0.12,0,0,0,0,0,2.06,0,0,0,0,0,0,0,0.06,0,0,0.75,0.06,0,0,0,0,0.06,0,0.06,0,0,0,0.06,0.104,0.069,0,0,0.043,0,2.148,23,623,0 0,0,0,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0,0,1.31,0,0,0,0,0,0,2.63,0,0,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,0.431,0,0,0,0,2.176,8,37,0 0,0,0,0,0.19,0,0,0,0,0,0.19,0.76,0.19,0,0,0.19,0,0.19,0.38,0.19,0,0,0,0,0.38,0,0,0.38,0,0,0,0,0,0,0,0.19,0.38,0,0.19,0,0,0.38,0,0,0,0,0,0,0.066,0.111,0,0.022,0.044,0,1.759,21,403,0 0.75,0,0,0,0,0,0,0,0.75,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.5,0,0,0,0,0,0,1.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.846,39,100,0 0,0,1.69,0,0,0,0,0,0,0,0,1.69,0,0,0,0,0,0,1.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.69,0,0,1.69,0,0,0,0,0,0,0,0,0,0,0,0,1,1,12,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.714,5,12,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.12,0,0,0,0,0,0.33,0,0,0,1.444,5,13,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.384,4,18,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0.99,0,0,0,0,0,0,0,0,0,0,0,0,0.49,0,0,0,0,0,0,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.07,0.28,0,0,0,0,1.363,5,30,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,7,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.526,0,0,0,0,1.529,6,26,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.285,7,32,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.5,3,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.833,5,11,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.5,3,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.5,7,10,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.25,6,9,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.5,3,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,0,0,0,0,1.5,3,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.4,2,7,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.6,4,8,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14.28,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.333,8,10,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,4,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,7,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.142,2,8,0 0,0,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.34,0,0,0,0,0,0,0,0.44,0,0,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,3.901,33,398,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.941,0,0,0,0,1,1,4,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,4,6,0 0,0,0,0,0,0,0,0,0,0,0,5.26,0,0,0,0,0,0,0,0,0,0,0,0,2.63,2.63,0,0,5.26,0,0,0,0,0,0,0,0,0,0,0,0,7.89,0,0,0,0,0,0,0,0,0,0,0,0,1.4,3,14,0 0,0.15,0,0,0.15,0,0,0,0.15,0.15,0.3,0.46,0,0,0,0,0,0.15,0.3,0,1.07,0,0,0,0,0,0,0,0,0.15,0,0,0.61,0,0,0.15,1.22,0,0,0,0,0,0,0,0,0.61,0,0.15,0.019,0.137,0,0,0,0,2.276,20,485,0 0.36,0.36,0,0,1.8,0,0,0,0,0,0,1.44,0,0,0,0,0.72,0,0.36,0,1.08,0,0,0,1.8,0,0,0,0.72,0.36,0,0,0,0,0,0,0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.636,12,54,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.88,0,0,0,0,0.28,0,0,0.28,0,0,0,0.14,0,0.28,0,0,0,0,0,0,0,0,0,0,0,0,0.037,0,0,12.43,30,2051,0 0,0,0,0,2.02,0,0,0,0,0,0,0,1.01,0,0,0,0,0,1.01,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.02,0,0,0,0,0.166,0.166,0.166,0,0,0,1.428,6,40,0 0,0,0,0,0.3,0,0,0,0,0,0,0.76,0,0,0,0,0,0,0.15,0,0,0,0,0,0.3,0.15,0,0,0.6,0,0,0,0,0,0,1.21,0.15,0,0,0,0,0,0,0,0,0.15,0,0,0,0.022,0,0,0,0,1.59,37,272,0 0,0,0,0,0,0,0,0,0,1.08,0,1.08,0,0,0,0,0,0,2.17,0,2.17,0,0,0,0,2.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.08,0,0,0,0,0,0.173,0,0,0,0,2.1,18,42,0 0,0,0.61,0,0,0,0,0,0,0,0,1.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.61,0,0.079,0.158,0,0,0,0,2.508,17,143,0 0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0,1.29,0,0,0,0,0,0,4.51,3.22,3.22,1.29,0,1.29,1.29,0,0,0,1.29,1.29,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0.324,0.194,0.129,0,0.194,2.142,10,150,0 0,0,0,0,0.53,0,0,0,0,0,0,0.53,0.53,0,0,0,0,0,0.53,0,1.06,0,0,0,0,0,1.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.53,0,0,0,0,0.188,0,0,0,0,1.142,3,40,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.869,0,1.739,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,0.14,0,0,0,0,0,0,0,0,0.14,0,0,0,5.16,0,0,0,0.14,0.44,0,0,0.14,0,0,0,1.47,0,0.59,0,0,0,0,0,0.29,0,0,0,0.186,0.538,0.124,0,0,0,4.454,55,931,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.92,0,0,0,0,0,0,0,3.84,0,0,0,1.92,0,3.84,0,0,0,0,0,0,0,0,0,0,0,0,0.178,0,0,1.666,7,50,0 0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0.636,0,0,2,10,18,0 0,0,0,0,0,0,0,0,0,0,0,2.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.85,0,0,0,0,0,0,0,0,0,0.444,0,0,2.333,12,28,0 0,0,0,0,0,0,0,0,0,0,0,1.35,0,0,0,0,0,0,1.35,0,0,0,0,0,0,0,1.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.35,0,0,0,0,0,0,1.533,6,23,0 0.11,0,0.23,0,0.23,0.11,0,0,1.15,0,0,0.34,0,0,0,0.11,0,0,0.46,0,0.23,0,0,0,0.57,0.69,0.11,0,0,0,0,0,0.34,0,0,0.34,0.23,0,0,0,0,0,0,0,0,0,0,0,0.048,0.194,0.032,0,0.032,0,3.275,33,511,0 0.17,0,0.17,0,0,0,0,0,0.8,0,0,0.26,0,0,0.08,0,0,0,0.35,0,0.17,0,0,0,0.62,0.71,0.08,0,0,0,0,0,0.26,0,0,0.08,0.44,0,0,0,0,0,0,0,0,0,0,0,0.253,0.168,0.084,0,0.024,0,4.665,81,1031,0 0.07,0,0.29,0,0.07,0.07,0,0,0.74,0,0,0.22,0,0.07,0,0,0,0.07,0.29,0,0.22,0,0,0,0.67,0.74,0.07,0,0,0,0,0,1.63,0,0,0,0.59,0,0,0,0,0,0.07,0,0,0,0,0,0.163,0.228,0.032,0,0.021,0,3.03,45,706,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,4,2,2,4,0,2,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2.095,11,44,0 0,0,0,0,0,0,0,0,0.75,0,0,0,0,0,0,0,0,0,1.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.75,0,0,0,0,0.255,0,0,0,0,1.842,6,35,0 0.83,0,0.41,0,0,0,0,0,0,0,0.41,0.83,0,0,0,0,0,0,2.91,0,1.66,0,0,0,0.41,0.41,0,0,0,0,0,0,0,0,0,0,0.41,0,0,0,0,0,0.41,0,0,0,0,0,0,0,0,0.283,0,0,2.022,13,91,0 0,0,0.06,0,0,0,0,0.06,0.13,0.13,0.13,1.67,0.26,0.33,0,0.13,0.13,0,0,0.06,0.06,0,0,0,2.54,0.13,0,0,0.2,0.26,0.13,0,0,0,0.06,0.2,0.13,0.06,0,0.06,0,0,0,0,0,0,0,0,0.028,0.131,0,0,0,0,1.997,20,787,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.32,0,0,0,0,0,0,0,2.32,0,0,0,0,0,4.65,0,0,0,0,0,0,0,0,0,0,0,2.32,0,0,0,0,0,0,0,0,0,1,1,11,0 0,0,0.38,0,0.38,0.38,0,0,0.38,0,0,1.55,0,0,0,0,0,0,1.16,0,0.38,0,0,0,0.77,0.77,0.38,0,0,0,0,0,1.93,0,0,0,0,0,0.38,0,0,0,0,0,1.16,0,0,0,0,0.061,0,0,0,0,2.953,34,127,0 0,0,0,0,0,0.47,0,0,0,0.23,0,0,0,0,0,0,0,0,2.6,0,0,0,0,0,5.45,0,0.23,0,0,0,0,0,0,0,0,0,0.71,0,0,0,0,0,0.94,0,1.18,0,0,0,0.119,0.158,0.119,0,0,0,2.565,19,295,0 0,0,0,0,0,0,0,0,0,0,0,0.88,0,0,0,0,0,0.22,0,0,0,0,0,0,0.22,0.22,0,0.44,0,0,0,0,0,0,0.22,0,0,0,0,0,0,0,0,0.22,0,0,0,0.22,0,0.172,0,0,0,0,1.729,15,128,0 0,0,0,0,0,0.57,0,0,0,0.28,0,0,0,0,0,0,0,0,2.86,0,0,0,0,0,4.58,0,0.28,0,0,0,0,0,0,0,0,0,0.57,0,0,0,0,0,0.85,0,0.85,0,0,0,0.144,0.192,0.096,0,0,0,2.306,19,203,0 0.41,0,0.83,0,0,0.41,0,0,0,0,0,0.83,0,0,0,0,0,0,1.67,0,0.41,0,0,0,0,0,0.83,0,0,0.41,0,0,0,0,0,0,0,0,0,0,0,0,0.41,0,0,0,0,0,0,0,0,0,0,0,1.12,3,56,0 0,0,0.15,0,0.13,0.03,0,0.08,0,0.06,0.03,0.64,0.08,0.01,0,0.05,0.22,0.01,0.15,0.03,0.33,0,0,0,0,0,0.01,0,0.03,0.01,0,0,1.33,0,0,0.1,0.76,0,0.01,0.05,0.06,0.03,0,0.05,0,0.1,0,0.37,0.024,0.254,0.002,0.002,0.007,0,2.128,36,3467,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0.675,0,0,0,0,0,1,1,3,0 0,0.33,0,0,0.33,0,0,0,0,0,0,0.33,0,0,0,0,0,0.33,0,0,0,0,0,0,0.33,0.33,0,0.67,0,0,0,0,0,0,0.33,0,0,0,0,0,0,0,0,0.33,0,0,0,0.33,0,0.132,0,0,0,0,1.857,15,117,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.55,0,0,0,0,0,0,0,2.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,7,0 0,0,0,0,0,0,0,0,0,0,0,1.63,0,0,0,0,0,0,1.63,0,0,0,0,0,0,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.63,0,0,0,0,0,0,0,0,0,2.333,8,28,0 0,0,0.52,0,0,0,0,0,0,0,0,1.56,0,0,0,0.52,0,0,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,5.72,0,0,0,1.56,0,0,0,0.52,1.04,0,0,0,0.52,0,0,0,0.075,0.151,0,0,0,2.416,18,116,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,18.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.449,0,0,0,2,5,14,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,11.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.25,2,5,0 0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0,4.74,0,0,0.86,0,0,0,0,0,0,0,0.43,0,0,0,0,0,0.062,0,0,0,0,1.21,6,69,0 0,0.13,0.54,0,0.27,0.13,0,0,0,0.27,0,1.21,0,0,0,0.13,0,0,1.89,0,0.94,0,0,0,0.13,0.94,0,0,0,0,0,0,0,0,0,0,0.54,0,0,0,0,0,0.13,0,0,0,0,0,0.073,0.048,0,0.024,0,0,5.15,82,582,0 1.26,0,0,0,0,0,0,0,0,1.26,0,1.26,0,0,0,0,0,1.26,2.53,0,0,0,0,0,0,0,0,2.53,0,0,0,0,0,0,0,0,0,0,0,0,1.26,0,0,0,1.26,2.53,0,0,0,0,0,0,0,0,2.842,11,54,0 0,0,0,0,0.64,0,0,0,1.28,0,0,0.64,0,0,0,0,0,0,0,0,1.28,0,0,0,0,0,1.28,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0,0,0,0.64,0,0,0,0,0,0,0.197,0,0,2.35,13,94,0 0,0,0,0,0.5,0,0,0,0,0,0,0.5,0,0,0,0,0,0,1.01,0,0,0,0,0,1.01,0.5,5.55,0.5,0.5,0.5,0.5,0.5,0,0.5,0.5,0.5,0.5,0,0.5,0.5,0,0,0.5,0,0.5,0,0,0,0.083,0.167,0,0.502,0,0,1.547,11,113,0 0,0,0,0,0,0,0,0,0,0,0,1.33,0,0,0,0,0,0,0.44,0,0,0,0,0,0,0.44,0,0,0,0,0,0,1.33,0,0.44,0,0.89,0,0,0,0,0,0,0,0,0,0,0,0,0.397,0,0,0,0,1.936,10,122,0 0,0,0,0,0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0,0,0,1.37,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0,0.143,0,0,0,0,1.784,18,141,0 0,0,0,0,0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0,0,0,1.37,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0,0.143,0,0,0,0,1.784,18,141,0 0,0,0,0,0.9,0,0,0,0,0,0,0,1.8,0,0,0.9,0,0,0.9,0,0,0,0,0,2.7,0.9,0.9,0.9,0.9,0.9,0.9,0.9,0,0.9,0.9,0.9,0,0,0,0.9,0,0,0,0,0,0,0,0,0,0.449,0,0,0,0,2.15,11,43,0 0,0,0,0,0,0,0,0.99,0,0,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,1.98,0.49,0,0.49,0.49,0.99,0,0,0,0,0.49,0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0.119,0,0,0,0,2.135,13,126,0 0,0,0,0,0,0.23,0.23,0.23,0,0,0,0.46,0,0.46,0,0,0,0,0.23,0,0,0,0.23,0,0,0,0,0,0,0,0,0,0.23,0,0,0,0.23,0,0,0,0,0,0,0,0,0,0,0,0.073,0,0,0,0,0,3.184,74,207,0 0,0,0,0,0,0,0,0,0,0,0,0,0.86,0.86,0,0,0,0,0,0,0,0,0,0,3.44,2.58,1.72,0.86,0.86,0.86,0.86,0.86,0,0.86,0.86,0.86,1.72,0,1.72,0.86,0,0,1.72,0,1.72,0,0,0,0,0.27,0.135,0.135,0,0,2.288,13,103,0 0.1,0,0,0,0,0.1,0,0.52,0,0.1,0,1.9,0.1,0.1,0,0.1,0.21,0,0,0,0,0,0,0,3.17,0,0,0,0,0,0,0,0,0,0,0.1,0.1,0,0,0,0,0,0,0.1,0,0,0,0,0.027,0.138,0,0.041,0.041,0,2.321,31,469,0 0,0,0,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0,0,0.53,0,0,0,0,0,1.61,0.53,0,0,0.53,0,0,0,0,0,0,0.53,0,0,0,0,0,0.53,0,1.07,0,0,0,0.53,0,0,0,0,0,0,1.375,5,99,0 0,0,0.41,0,0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.41,0,0,0,0.41,0,0,0,0,0,0,0.41,0,0.41,0,0,0,0,0,0,0,0,1.522,11,67,0 0,0,0,0,0.43,0,0,0,0,0,0,0.43,0,0,0,0,0,0,2.19,0,0,0,0,0,0,0,0.43,0,0,0,0,0,0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.079,0.158,0,0,0,0,1.115,2,29,0 0.23,0,0.23,0,0.69,0,0,0,0,0,0,1.38,0,0,0,0,0,0,0.23,0,0,0,0,0,0.23,0.23,0,0,0,0,0,0,0.23,0,0,0,0,0,0,0,0,0,0,0.23,0,0,0,0,0,0.066,0,0,0,0,1.412,9,89,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.5,9,15,0 0,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.02,0,0,0,0,0,0.395,0,0,0,1.523,6,32,0 0,0,0,0,0,0,0,0,0.75,0,0,0.75,0,0,0,0,0,0,2.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.75,0,0,0,0,0.263,0,0,0,0,1.176,3,20,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,4,6,0 0,0,0.19,0,0.19,0.19,0,0,0,0.19,0,0.38,0,0,0,0,0,0.38,1.54,0,0.96,0,0,0,2.69,1.54,0.77,0.57,0.19,1.15,0.19,0.19,0,0.19,0.57,0.38,0.38,0,0,0.19,0.38,0,0.38,0,0.38,0,0,0.19,0.026,0.404,0.053,0.026,0,0,2.894,45,411,0 0,0,0,0,0,0,0,0.65,0,1.3,0,0,0,0,0,0,0,0.32,0.32,0,0.65,0,0,0,4.9,4.24,0.32,0,0,0.65,0,0,0,0,0,0,1.63,0,0,0,0.98,0,0,0,0.65,0,0,0,0.153,0.562,0.102,0,0,0,5.555,42,500,0 0.25,0,0,0,0,0,0,0,0.25,0,0,0,0,0,0,0.25,0,0,0.25,0,0,0,0,0,2.06,1.03,0.25,0.25,0.25,0.25,0.25,0.25,2.83,0.25,0.25,0.25,0.25,0,0,0.25,0,0,0.25,0,0.25,0,0,0,0.301,0.473,0.043,0.043,0,0,2.111,17,190,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0,0,0.44,0,0,0,0,0,0,0.44,0.44,0,0.88,0,0,0,0,0,0,0.44,0,0,0,0,0,0,0,0,0.44,0,0,0,0,0,0.123,0,0,0,0,1.857,15,104,0 0,0,0.44,0,0.44,0,0,0,0,0.44,0,0.88,0,0,0,0,0,0.88,2.22,0,2.22,0,0,0,1.33,0.44,0.88,0.88,0,0.88,0,0,0,0,0.88,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0,0.506,0,0.05,0,0,3.772,45,249,0 0.33,0,0,0,0,0,0,0,0.33,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,3.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.448,0,0.056,0,0,1.788,6,93,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.166,2,7,0 0,0,1.1,0,0,0,0,0,0,0.27,0.27,0.55,0,0,0,0,0,0,1.1,0,0.83,0,0,0,1.1,0.27,0,0,0.55,0.27,0,0,0,0,0,0,0.83,0,0,0,0,0,0,0,0,0,0,1.1,0.543,0.349,0,0,0,0,2.724,79,316,0 0,0.29,0.29,0,0.29,0,0,0.29,0,0,0.29,1.45,0,0,0,0,0.58,0,1.16,0,1.45,0,0,0,0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.037,0.113,0,0,0.037,0,1.531,7,147,0 0,0,2.56,0,0,0,0,0,0,0,0,0,0,5.12,0,0,0,0,2.56,0,0,0,0,0,0,0,2.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.56,0,0,0,0,0.485,0,0,0,0,1,1,11,0 0,0,0,0,0,0,0,0,0,0,0,2.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.22,0,0,0,0,0,0,0.374,0,0,1.375,5,22,0 0,0,0,0,0,5.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.666,5,16,0 0,0,0,0,2.22,0,0,0,0,0,0,3.33,0,0,0,0,0,0,1.11,0,1.11,0,0,0,1.11,1.11,0,0,1.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,22,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.26,0,0,5.26,0,0,0,0,0,0,0,0,0,1.25,2,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,2.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,7,18,0 0,0,0,0,0,0,0,0,0,0,0,1.81,0,0,0,0,0,0,1.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.81,0,0,0,0,0,0,0,0.286,0,0,0,0,2.277,9,41,0 2,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,2,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.888,29,53,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.69,0,0,0,0,0,1.69,0,0,0,0,0,0,1.629,7,44,0 0,0,0,0,0.93,0,0,0,0,0.93,0,0.46,0,0,0,0,0,0,1.4,0,0,0,0,0,4.22,1.87,0.93,0.46,0.93,0.46,0.46,0.46,0,0.46,0.46,0.46,0.46,0,0,0.46,0,0,0.46,0,0.93,0,0,0,0,0.2,0.066,0,0,0,5.593,42,330,0 0,0,0,0,0,0,0,0,0,0,0,1.23,0,0,0,0,0,0,3.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.23,0,0,1.23,0,0,0,0,0.404,0,0,0,0,1.187,4,19,0 0,0,1.49,0,0,0,0,0,0,0,0,1.49,0,0,0,0,0,0,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.49,0,0,0,0,0,0,0,0.238,0,0.238,0,0,2,8,50,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.32,0,0,0,0,0,1.98,3.97,0,0,0,0.66,0,0,0,0,0,0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.102,0,0,2.531,30,81,0 0,0.23,0,0,0,0.23,0,0.46,0,0,0,0.92,0,0,0.23,0,0,0.23,0.23,0,0,0,0,0,1.15,0.92,0,0,0,0.23,0,0,0.23,0,0,0.23,0.23,0,0,0,0,0.23,0.23,0,0,0.23,0,0,0.063,0.063,0,0.159,0,0,1.616,13,173,0 0,0,0,0,1.23,0,0,0,0,0,0,0,0,0,0,0,0,0,1.23,0,0,0,0,0,0,0,1.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.7,0,1.23,1.23,0,0,0,0,0.468,0,0,0,0,1.058,2,18,0 0,0.8,0,0,0,0,0,0,0,1.6,0,0,0,0,0,2.4,0,0,5.6,0,1.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.235,0,0,1.38,4,29,0 0.07,0,0.07,0,0,0.07,0,0,0,0,0.15,1.07,0.15,0.07,0,0,0.53,0,0,0,0,0,0.22,0,1.83,0,0,0,0,0,0,0,0,0,0,0.22,0.07,0,0,0,0,0,0,0,0,0,0,0,0.127,0.174,0,0,0.023,0,2.182,24,659,0 0.2,0,0.2,0,0.4,0,0,0,0,0,0.3,1.71,0,0.1,0,0,0.1,0,1.01,0.3,0.5,0,0,0,2.93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.042,0,0.014,0,0,4.325,63,545,0 0,0,0,0,1.11,0,0,0,0,0,1.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.11,0,0,0,0,0,1.11,0,0,0,0,1.11,0,0,0,2.22,0,0,0,0,0,0,0,0.363,0,0.181,0,0,1.285,4,27,0 0,0,0,0,0,0,0,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0.336,0,0,1.555,4,42,0 0.07,0,0.07,0,0,0.07,0,0,0,0,0.14,1.04,0.14,0.07,0,0,0.52,0,0,0,0,0,0.22,0,2.23,0.07,0,0,0,0,0,0,0,0,0,0.22,0.14,0,0.07,0,0,0,0.07,0,0,0,0,0,0.111,0.151,0.01,0,0.02,0,2.25,24,720,0 0,0.27,0,0,0,0,0,0,0,0,0,1.94,0,0,0,0,0.27,0,1.39,0,0,0,0,0,0.83,0.55,0,0,0,0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0,0.128,0,0,0,0,0,1.197,6,109,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,4.34,0,8.69,0,0,0,0,0,0,0,0,0,0.636,1.273,0,0,0,0,3.5,24,35,0 1.06,0,0,0,1.06,0,0,0,0,0,0,1.06,0,0,0,0,0,0,1.06,0,1.06,0,0,0,0,0,1.06,0,0,0,0,0,0,0,0,0,0,1.06,0,0,0,0,0,0,0,0,0,0,0,0.386,0,0,0,0,1.705,6,29,0 0,0,0,0,3.44,0,0,0,0,0,0,3.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.574,0,0,0,0,1.714,4,12,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,0,0,0,0.8,0,0,0,0,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.125,0,0,0,0,1.428,5,50,0 0,0,0,0,0,0,0,0,0,0,0,0.55,0.55,0,0,0,0,0,1.65,0,0.55,0,0,0,1.1,0.55,0,0,0,0.55,0.55,0,0,0,0,0,0.55,0,0,0,0,0,0,0,0,0,0,0,0.087,0,0,0,0,0,1.657,8,58,0 0,0,0,0,0,0,0,0,1.16,0,0,1.16,1.16,0,0,0,0,0,1.16,0,1.16,0,0,0,0,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.2,3,12,0 0,0,0,0,1.85,0,0,0,0,0,0,1.85,1.85,0,0,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.714,4,12,0 0,0,0,0,8.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.33,0,0,0,8.33,0,0,0,0,0,0,0,0,0,0,0,0,1,1,2,0 0,0,0,0,0,0.17,0,0,0,0,0,0.52,0.17,0,0,0,0.69,0,0,0,0.17,0,0,0,1.04,0,0,0,0.34,0.34,0,0,0,0,0,1.04,0,0,0,0.17,0,0,0,0.52,0,0,0,0,0,0.055,0,0,0,0,1.685,7,204,0 0,0,0,0,1.61,0,0,0,0,0,0,0.8,0.8,0,0,0.8,0,0,0.8,0,0,0,0,0,1.61,1.61,0,0,0,0,0,0,0,0,0,0,0.8,0,0.8,0,0,0,0,0,0,0,0,0,0,0,0.144,0,0,0,1.913,13,44,0 0,0,0,0,2.04,0,0,0,0,0,0,1.02,1.02,0,0,1.02,0,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.642,4,23,0 0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.5,0,0.5,0,0.5,0,0,0,0,0,0,0,0,0,0,0,0,0.411,0,0,0,0,1.866,10,112,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.4,1.2,1.2,1.2,1.2,1.2,1.2,1.2,1.2,1.2,1.2,1.2,0,0,0,1.2,0,0,0,1.2,0,0,0,0,0,0.446,0,0,0,0,2.166,11,39,0 0,0,0.28,0,0.28,0,0,0,0,0,0,0.85,0,0,0,0,0,0,0.28,0,0,0,0,0,1.7,0,0,0.56,0,0,0,0,0,0,0.56,2.55,0.28,0,0.28,0,0,0,0,0.28,0,0,0,0,0.223,0.074,0,0,0,0,1.958,55,190,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.333,4,12,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.88,1.88,0,1.88,0,0,0,0,0,1.88,0,0,0,0,0,3.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.366,0,0,0,0,1.307,3,17,0 0,0,0.5,0,0,0,0,0.5,0,0,0,0.5,0,0,0,0.5,0,0,0.5,0,0,0,0,0,0.5,1,0,0,0,0,0,0,0,0,0,0,0.5,0,0,0,0,0,0,0,0,0,0,0,0,0.062,0,0.188,0,0,3.461,47,180,0 0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.43,0,0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.041,26,73,0 0,0,0.36,0,0,0.73,0,0,0,0,0,1.46,0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.36,0,0,0,0,0,0,0,0,0,0,0,0,0.049,0,0.049,0,0,1.919,54,167,0 0,0,0,0,0,0,0,0.42,0,0,0,1.28,0.42,0,0,0,0.42,0,0,0,0,0,0,0,2.57,0,0,0,0.14,0,0,0,0.14,0,0,0.28,0.28,0.14,0,0,0,0,0,0,0,0,0,0.14,0.08,0.242,0,0,0.04,0,2.275,20,421,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.6,3,8,0 0,0,0.5,0,0.5,0,0,0,0,0.5,0,1.01,0,0,0,0,0.5,1.01,2.03,0,3.04,0,0,0,1.52,0.5,1.01,1.01,0,1.01,0,0,0,0,1.01,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.551,0,0.055,0,0,4.275,45,248,0 0,0,0,0,0,0,0,0,0,0,0,0,0,5.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.888,13,35,0 0,0,1.31,0,0,0,0,1.31,0,0,0,0,0,0,0,0,0,0,3.94,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0.279,0,0.139,0,0,2.13,15,49,0 0,0,2.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.27,0,0,0,0,0.404,0,0.404,0,0,2.076,15,27,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.862,0,0.862,0,0,1,1,3,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.03,0,0,0,0,0,3.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.03,0,0,0,0,0.507,0,0,0,0,1.133,3,17,0 0,0,0.65,0,0.65,0,0,0,0,0,0,0.65,0,0,0,0,0.65,0,0,0,0,0,0,0,0.65,3.26,0,0,0,0.65,0,0,0,0,0,0,0.65,0,0.65,0,0,0,0.65,0,0.65,0,0,0,0.093,0,0,0.093,0,0,1.705,17,87,0 0,0,0,0,0,0,0,2.63,0,0,0,0,0,0,0,0,0,0,0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.37,0,0.37,0,0.75,0,0.37,0,0.75,1.12,0,0,0,0,0.063,0,0,0,2.023,14,85,0 0,0,0,0,0,0,0,3.97,0,0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.32,0,0,0,1.98,0,0,0,0.66,1.98,0,0,0.11,0.11,0,0,0,0,2.857,19,120,0 0,0,0,0,0,0,0,5.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,10,0 0,0,0,0,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0.6,0,0,0,0,0,3.03,0,0,0,0,0,0,0,0,0,0.6,0,0,0,0,0,0,0,0,0.6,0,0,0,0,0,0.092,0,0,0,0,1.568,9,69,0 0.46,0,0,0,0,0,0,0,0,0,0,1.85,0,0,0,0,0,0,0.92,0,0.46,0,0,0,0.92,0,0,0,0,0,0,0,0,0,0.46,0,0.92,0,0,0,0,0,0,0,0,0,0,0,0,0.125,0,0,0,0,1.51,10,74,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.26,0,0,0,0,0,0,0,0,0,0,0,0,1,1,7,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.5,3,6,0 0,0,0,0,0,0,0,0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.83,0.41,0,0.41,0.41,0,0,0,0,0,0.41,0.41,0.41,0,0,0,0,0,0,0,0,0,0,0,0,0.158,0,0,0,0,1.969,13,130,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.85,0,0,0,0,0,4.27,3.41,2.56,0.85,0.85,0.85,0.85,0.85,0,0.85,0.85,0.85,0.85,0,0.85,0.85,0,0,0.85,0,0.85,0,0,0,0,0.278,0.139,0,0,0,2.138,12,77,0 0,0,0,0,0.67,0,0,0,0,0,0,2.01,0,0,0,0,0,0,1.34,0.67,1.34,0,0,0,0,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.67,0.67,0,0,0,0.117,0.117,0,0,0,0,1.222,5,33,0 0,0.25,0,0,0,0.25,0,0.5,0,0,0,1.01,0,0,0.25,0,0,0.25,0.25,0,0,0,0,0,0.5,0.25,0,0,0,0.25,0,0,0.25,0,0,0.25,0,0,0,0,0,0.25,0,0,0,0.25,0,0,0,0.073,0,0,0,0,1.545,7,136,0 0,0,1.33,0,1.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.33,0,0,0,8,8,0,0,0,0,0,0,0,0,0,0,1.33,4,1.33,0,0,4,0,0,0,0,0,0,0.865,0,0.216,0,0,0,1.647,12,28,0 0,0.04,0.23,0,0.09,0,0,0.04,0.04,0.04,0.04,0.74,0,0,0,0.13,0.04,0.04,0.93,0,0.65,0,0,0,1.49,0.32,0,0.23,0,0.18,0.18,0,0,0,0.23,0,0.32,0,0.04,0.04,0,0.18,0,0.13,0,0,0,0.04,0.027,0.184,0,0.047,0.061,0,1.686,20,1184,0 0,0,3.22,0,3.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.45,0,0,0,6.45,0,0,0,0,0,0,0,0,0,0,0,0,1,1,8,0 0,0,0.1,0,0.2,0.1,0,0,0,0,0,2.04,0.2,0.1,0,0,0.81,0,0,0,0,0,0.2,0,2.75,0,0,0,0,0,0,0,0,0,0,0.3,0.3,0,0,0,0,0,0,0,0,0,0,0,0.03,0.091,0,0,0,0,2.161,27,575,0 0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.36,1.36,0,2.73,0,0,0,0,0,0,0,1.36,0,0,1.36,0,0,0,0,0,0,0,0,0,0,0.475,0,0,0,0,3.478,11,80,0 0,0,0,0,0,0,0,0,0,0,0,1.11,0,0,0,0,0,0,0,0,0,0,0,0,4.44,1.66,0,1.11,0,0,0,0,0,0,1.11,0,0.55,0,0,0,0,0,0,0.55,0,0,0,0,0,0,0,0,0,0,2.018,12,107,0 0,0,0.31,0,1.04,0.1,0,0,0,0,0,0.1,0,0,0,0,0,0,0.2,0,0,0,0,0,0.41,0.2,0.52,0.2,0.2,0.2,0.2,0.2,0.41,0.2,0.2,0.2,0.1,1.57,0.1,0.2,0,0.41,0.1,0.1,0.1,0,0,0.1,0.067,0.523,0.016,0,0.016,0.033,2.232,47,393,0 0,0,0,0,2.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.7,0,0,0,0,0,0,0,0,0,0,1.4,5,14,0 0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,1.333,3,16,0 0,0,0.28,0,0.84,0,0,0,0,0,0,1.96,0,0,0,0,0,0,0.28,0,0,0,0,0,1.4,0.84,0,0,0,0.84,0,0,0,0,0,0,0.56,0,0,0,0,0,0,0,0.28,0,0,0,0,0,0,0,0,0,1.426,7,97,0 0.55,0,0,0,0,0,0,0,0,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.11,0.55,1.66,0.55,0.55,0.55,0.55,0.55,0,0.55,0.55,0.55,0.55,0,0.55,0.55,0,0,0.55,0,0.55,0,0,0,0,0.367,0.091,0,0,0,2.117,12,108,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.34,0,0,0,0,0,0,0.86,2.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0,0,0.295,0,0,0,0,3.26,42,75,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32,0,0,0,0.218,0.218,0,0.054,0,0,2.16,9,108,0 0,0,0.78,0,0,0,0,0,0,0,0,0,0.78,0,0,0,0,0,0,0,0.78,0,0,0,0,0,0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.78,0,0,0,0,0.401,0,0.133,0,0,1.565,4,36,0 0,0,0,0,6.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.5,2,3,0 0,0,0.71,0,0.71,0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,1.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.71,0,0,0,0,0,0,0,0.055,0,0.055,0,0,15.333,54,138,0 0,0,0.82,0,0.82,0,0,0,0,0,0,0.82,0,0,0,0,0.82,0,0,0,0,0,0,0,0,1.65,0,0,0,0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.82,0,0,0,0,0,0,0.119,0,0,1.272,6,42,0 0,0,0,0,0,0,0,0,0,2.43,0,2.43,0,0,0,0,0,0,0,0,2.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.43,0,0,0,0,0,0,5.3,40,53,0 0,0,0,0,3.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.96,0,1.96,1.96,0,0,0,0,0,0,0.348,0,0,1.312,4,21,0 0,0,0.52,0,1.04,0,0,0,0,0,0,1.04,0,0,0,0,0,0,0.52,0,0.52,0,0,0,1.83,1.57,0.52,0.26,0.26,0.26,0.26,0.26,1.3,0.26,0.26,0.26,0.26,0,0.26,0.26,0,0.78,0.26,0.26,0.78,0,0,0.52,0.136,0.182,0.091,0,0.045,0,1.823,13,155,0 0,0,0.62,0,0.62,0,0,0,0,0,0,1.25,0,0,0,0,0,0,0.62,0,0.62,0,0,0,1.57,1.57,0.31,0,0,0,0,0,1.57,0,0,0,0.31,0,0.31,0,0,0.94,0,0,0.62,0,0,0.62,0.164,0.109,0.109,0,0.054,0,1.671,13,107,0 0,0,0.31,0,0,0,0,0,0,0,0,0.63,0,0,0,0,0,0.31,0,0,0.31,0,0,0,0.63,0.63,0,0.63,0,0.63,0,0,0,0,0.31,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0.588,0,0,0,0,3.183,55,191,0 0,0,0.11,0,0.11,0,0,0,0,0,0.11,1.02,0,0,0,0,0,0.11,0.11,0,0,0,0,0,0,0,0,0.22,0,0,0,0,0,0,0.22,0,0.22,0,0.11,0.11,0,0.34,0,0,0,1.02,0,0,0.049,0.149,0,0,0,0,1.637,18,511,0 0,0,0.71,0,0.71,0,0,0,0,0,0,1.43,0,0,0,0,0,0,0.71,0,0.71,0,0,0,0,0,0.35,0,0,0,0,0,1.79,0,0,0,0,0,0,0,0,0.71,0,0,0.71,0,0,0.71,0,0.125,0.062,0,0.062,0,1.574,6,85,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,15.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14,53,56,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,1.75,3,7,0 0,0,0.1,0,0,0,0,0.1,0,0,0.31,0.52,0.1,0,0,0.1,0.1,0,0.1,0,0,0,0.1,0,3.14,0,0,0,0,0,0,0,0,0,0,0.52,0.31,0,0,0.1,0,0,0,0,0,0,0,0.1,0.079,0.142,0,0,0.063,0,2.542,26,605,0 0,0,0,0,0,0,0,0,0,0,0,1.4,0,0,0,0,0.7,0,1.4,0,1.4,0,0,0,0,0,0.7,0,0,0,0.7,0,0,0,0,0,0,0,0,2.11,0,0,0,0,0,0,0,0,0,0.267,0.066,0,0,0,17.904,200,376,0 0,0,0,0,0,0,0,0,0,0,0,1.16,0,0,0,0,0,0,0,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.866,6,28,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.81,0,0,0,0,0,3.63,1.81,0,0,0,3.63,0,0,0,0,1.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.227,11,49,0 0,0,0,0,0,0,0,0,0,0,0,0,0.62,0,0,0.62,0,0,1.88,0,0.62,0,0,0,1.25,0.62,0,0,0,0,0,0,0,0,0,0,1.25,0,1.25,0,0,0,1.25,0,0,0,0,0,0.895,0.179,0.358,0,0,0,1.712,13,149,0 0,0,0,0,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.63,0,0,0,0,0,0,0,0,0,1.25,4,15,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,0,0,0.8,0,0,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0.8,0,0,0,0,0,0,0,0.265,0,1.347,3,31,0 0,0,0.1,0,0,0,0,0.1,0,0,0.2,0.41,0.1,0,0,0.1,0.1,0,0.1,0,0,0,0.1,0,3.02,0,0,0,0,0,0,0,0,0,0,0.52,0.31,0,0,0.1,0,0,0,0,0,0,0,0.1,0.074,0.134,0,0,0.059,0,2.529,26,597,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.72,0,0,0,0,0,6.89,3.44,0,0,0,3.44,0,0,0,0,1.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.16,11,54,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.65,2.32,2.32,2.32,2.32,2.32,2.32,2.32,0,2.32,2.32,2.32,0,0,0,2.32,0,0,0,0,0,2.32,0,0,0,0.692,0,0,0,0,3.312,11,53,0 0,0,0,0,0,0,0,0,0,1.57,0,4.72,0,0,0,0,0,0,1.57,0,0,0,0,0,0.78,0.78,0,1.57,0,0,0,0,0,0,0,0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0.268,0,0,0,0,2.885,11,101,0 0,0,2.56,0,0,0,0,0,0,0,0,1.28,0,0,0,0,0,0,1.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.227,0,0,0,0,1.647,7,28,0 0,0,0,0,0,0,0,0,0,0,0,1.22,0,0,0,0,0,0.61,0,0,0,0,0,0,0.61,0.61,0,1.22,0,0,0,0,0.61,0,0.61,0,0.61,0,0,0,0,0,0,0.61,0,0.61,0,0,0,0.412,0,0,0,0,2.206,19,128,0 0,0.16,0.32,0,0.16,0.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.97,1.13,0,0,0,0,0,0,0.8,0,0,0,1.29,0,0,0,0.32,0,0,0,0,1.61,0,0,0.184,0.394,0.131,0,0,0,3.666,20,506,0 1.12,0,0,0,0,1.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.12,0,0,0,0,0,0,0.204,0,0,1.214,3,34,0 0.19,0,0.59,0,0,0,0,0,0,0.39,0,0,0,0,0,0,0,0,2.59,0,0.39,0,0,0,0.79,0.39,0.59,0.39,0.39,0.39,0.39,0.39,0,0.39,0.39,0.39,0.19,0,0,0.39,0,0,0.19,0,1.19,0,0,0,0.093,0.657,0.062,0,0,0.062,2.156,13,207,0 0,0,0.87,0,0,0,0,0,0,2.63,0.87,0.87,0,0,0,0,0,0,1.75,0,0,0,0,0,1.75,0.87,2.63,0.87,0.87,0.87,0.87,0.87,0,0.87,0.87,0.87,0.87,0,0.87,0.87,0,0,0.87,0,0.87,0,0,0,0.139,0.976,0,0.139,0,0,1.767,12,76,0 0,0,0.6,0,0,0,0,3.04,0,0,0,0.6,0,0,0,0.6,0,0,0.6,0,1.21,0,0,0,1.21,1.82,0,0.6,0,0.6,0,0,0,0,0.6,0.6,1.21,0,1.21,0,0,0,0,0,0,0,0,0,0,0,0,0.077,0,0,3.277,33,177,0 0,0,0,0,0,0,0,0,0,0.82,0,0.82,0,0,0,0,0,0,1.65,0,0.82,0,0,0,0,1.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.82,0,0,0,0,0.122,0,0,0,0,2.111,19,76,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.12,6.38,0,0,0,0,0,0,0,0,0,0,2.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.722,7,31,0 0,0,0,0,0,0,0,0,0,0,0,1.47,0,0,0,0,0,0,2.2,0,0.73,0,0,0,0.73,0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.111,0.223,0,1.76,6,88,0 0,0,0,0,0.87,0,0,0,0,0,1.31,0.43,0,0,0,1.75,0,1.31,2.63,0,0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0.361,0.18,0,1.72,6,86,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0,0,0,0,0,0,0,0,0,2.94,0,0,0,2.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.285,3,9,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.32,1.16,0,0,0,0,0.391,0,0,0,0,1.384,4,18,0 0,0,0.47,0,0.95,0.47,0,0,0,0,0,0.47,0,0,0,0,0,0,0.95,0,0,0,0,0,0,0.47,0.47,0,0,0,0,0,0,0,0,0,0,0.95,0,0,0,0.47,0,0,0,0,0,0,0,0.073,0,0,0,0,1.884,8,98,0 0,0,0,0,0,0,0,0,0,0,0,1.75,0,0,0,0,0,0,3.5,0,3.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.75,0,0,0,0.325,0,0,0,0.651,0,1.125,3,18,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.44,0,0,0,0,0,0,0,0,0,1,1,8,0 0,0,0,0,0.81,0,0,0,0,1.22,0,0.4,0,0,0,0,0,0,0.4,0,0.4,0,0,0,4.08,4.08,0,0,0,1.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.754,8,93,0 0,0,0,0,0.88,0,0,0,0,2.65,0.88,0,0,0,0,0,0,0,1.76,0,0,0,0,0,1.76,0.88,1.76,0.88,0.88,0.88,0.88,0.88,0,0.88,0.88,0.88,0.88,0,0.88,0.88,0,0,0.88,0,2.65,0,0,0,0.142,0.855,0,0.285,0,0,1.777,12,80,0 0,0,0,0,0,0,0,0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.83,0.41,0,0.41,0.41,0,0,0,0,0,0.41,0.41,0.41,0,0,0,0,0,0,0,0,0,0,0,0,0.159,0,0,0,0,1.848,13,122,0 0,0,0.51,0,0.51,0,0,0,0,0,0,0.51,0,0,0,0,0,0,0,0,0,0,0,0,2.07,2.07,0,0,0,0,0,0,0,0,0,0,1.55,0,0,0,0,0.51,0,0,0,0,0,0.51,0.165,0.497,0,0.082,0,0,3.525,20,208,0 0,0,0,0,0,0,0,0,0.13,0,0,0.27,0,0,0,0,0,0,0,0,0,0,0,0,1.38,1.52,0,0,0,0,0,0,1.38,0,0,0,1.25,0,0.27,0,0.69,0,0,0,0,2.63,0.27,0,0.125,0.438,0.146,0,0,0,3.657,35,534,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.65,2.32,2.32,2.32,2.32,2.32,2.32,2.32,0,2.32,2.32,2.32,0,0,0,2.32,0,0,0,0,0,0,0,0,0,0.757,0,0,0,0,2.5,11,50,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.75,0,0,0,0,0,3.5,3.5,0,0,0,0,0,0,0,0,1.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.846,11,48,0 0,0,0,0,0.28,0,0,0,0,0,0,0.57,0,0,0,0,0,0.85,0,0,0,0,0,0,5.14,4,2.28,1.14,0.28,1.14,1.14,0.28,0.57,0.28,1.14,1.14,0.28,0,0,0.28,0,0,0.28,0,0.57,0,0,0,0.064,0.292,0.194,0.097,0,0.097,2.291,12,307,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.83,5.5,0,0,0,0,0,0,0,0.91,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.91,0,0,0.91,0,0.175,0,0,0,0,1,1,18,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,3.33,0,0,0,0,0,0,1,1,6,0 0,0.19,0.59,0,0.19,0,0,0,0,0.59,0.39,0.19,0,0.19,0,0,0,0.79,2.79,0,1.99,0,0,0,1.79,0.19,0.39,0.19,0,0,0.59,0.19,0.79,0.19,0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.203,0.018,0.018,0,0,3.716,47,472,0 0,0,0,0,1.15,0.28,0,0,0,0,0,0,0,0,0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.072,0,0,0,0,1.517,8,88,0 0,0,0,0,0,0,0,0,0,0,0,1.29,0,0,0,0,0,0,0,0,0,0,0,0,1.29,3.89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.625,6,26,0 0,0,0,0,0.14,0,0,0,0,0,0,1.75,0,0,0,0,0,0,0.29,0,0,0,0,0,0.14,0,0,0.29,0,0.14,0,0,0.14,0,0.14,0,0.14,0.14,0,0,0,0,0,0.29,0,0.14,0,0,0,0.064,0,0.021,0,0,1.715,11,187,0 0,0,0,0,1.28,0,0,0,0,2.56,0,0.64,0,0,0,0,0,0,1.92,0,0.64,0,0,0,0.64,0.64,0,0,0,1.92,0,0,0,0,0,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.536,8,63,0 0,0.22,0.22,0,0.45,0,0.22,0,0,1.82,0,0.68,0,0,0,0.68,0.22,0,2.05,0.45,1.59,0,0,0,0,0,0.22,0,0,0,0,0,0,0,0,0,0.91,0,0,0,0,0,0,0,0,0,0,0,0,0.101,0,0.135,0.067,0,2.5,27,210,0 0,0,0,0,0,0,0,0,0,1.44,0,0,0,0,0,0,0,0,1.44,0,1.44,0,0,0,2.89,1.44,4.34,1.44,1.44,1.44,1.44,1.44,0,1.44,1.44,1.44,0,0,0,1.44,0,0,0,0,1.44,0,0,0,0,0.417,0,0,0,0,2.166,11,39,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.78,0,0,0,0,0,0,0,3.57,0,0,0,1.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.406,7,45,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.846,17,76,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.846,0,0,0,0,0,6.333,17,19,0 0,0,0,0,0,0,0,0,0,0.58,0,0,0,0,0,0,0,0,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.58,0,0,0,0.083,0,0,0,0,6.096,21,189,0 0.24,0,0.24,0,0.24,0,0,0,0,0,0,0,0,0,0,0,0.24,0.24,0.24,0,0,0,0,0.24,0.98,0.73,0,0.49,0,0.24,0,0,0,0,0.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.831,13,152,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.22,0,0,2.22,2.22,0,0,0,0,0,0,2.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.374,0,0,0,1.583,8,19,0 0,0.25,0.5,0,0,0,0,0,0,0.5,0,0.63,0,0,0,0,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.018,0.129,0.092,0.018,0,0,8.021,66,746,0 0,0,1.16,0,1.16,0,0,0,0,0,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.16,0,0,0,0,0,1.16,0,0.368,0,0.184,0,0,2.833,11,51,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.5,4,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.888,8,35,0 0,0,0,0,0,0.4,0,0,0.4,0.4,0,0,0,0,0.4,0,0,0,1.22,1.22,0.4,0,0,0,0,0.4,0.4,0,0,0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0.81,0,0.4,0,0,0,0.065,0,0,0,0,1.84,8,81,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,5,6,0 0,0,0,0,0,0,0,0,0,0,0,2.08,0,0,0,0,1.04,0,0,0,0,0,0,0,1.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.478,0,0,0,0,3.291,21,79,0 0.17,0,0.26,0,0.08,0.08,0,0.08,0.08,0.08,0.17,0.17,0.08,0,0,0.08,0.26,0,1.75,0,1.14,0,0,0,1.93,0.52,0,0.17,0,0,0.26,0,0.17,0,0.26,0.08,0.79,0,0,0,0,0,0,0,0.08,0,0,0,0,0.063,0,0.038,0,0,1.66,20,646,0 0,0.18,0.72,0,0.18,0,0,0,0,0,0,0.54,0,0,0,0,0,0.18,0.9,0,0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.36,0,0,0,0,0,0.18,0.54,0,0,0,0.177,0.059,0.148,0.029,0,1.6,18,256,0 2,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,2,0,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.888,29,53,0 0,0,0.11,0,0.22,0.11,0,0,0,0,0,0.99,0.11,0.11,0,0,0.22,0,0,0,0,0,0.11,0,3.21,0.11,0,0,0.33,0,0,0,0.11,0,0,0.88,0.44,0,0.11,0,0,0,0.11,0,0,0,0,0,0.044,0.149,0.014,0,0,0,2.419,27,559,0 0,0,0.33,0,0.33,0,0,0,0,0,0,0.33,0,0,0,0,0,0,1.01,0,0.67,0,0,0,1.35,1.01,0.67,0.33,0.33,0.33,0.33,0.33,0.33,0.33,0.33,0.33,0.33,0,0.33,0.33,0,0,0.33,0,1.35,0,0,0,0,0.175,0.058,0,0,0,2.068,12,120,0 0,0,0.59,0,0.59,0,0,0,0,0,0,0.59,0,0,0,0,0,0,0.59,0,0.59,0,0,0,0,0,0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.59,0,0,0,0,0.105,0,0,0,0,1.826,8,42,0 0,0,0.3,0,0.61,0,0,0,0,0,0,0,0,0,0,0,0.3,0,0.91,0,0.3,0,0,0,2.44,0.61,0,0,0,0,0,0,0,0,0,0,0.3,1.52,0,0,0,0,0.61,1.22,0,0,0,0,0.301,0.043,0.043,0,0.086,0,2.161,19,227,0 0.4,0,0.81,0,0,0.4,0,0,0,0,0,0.81,0,0,0,0,0,0,1.63,0,0.4,0,0,0,0,0,0.81,0,0,0.4,0,0,0,0,0,0,0,0,0,0,0,0,0.4,0,0.4,0,0,0,0,0.071,0,0,0,0,1.156,3,59,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,33.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0.16,0,0,0,0,0,0,0,0,0.76,0.028,0,0,0,3.989,33,738,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,7,0 0,0,0,0,0,0,0,0,0,0.4,0,0.4,0,0,0,0,0,0,0,0,1.22,0,0,0,0.4,0.4,0,0.81,0,0,0,0,0.81,0,0,0.4,0,0,0,0,0,0,0,0.81,0,0,0,0,0,0.199,0,0,0,0,2.386,11,105,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.47,0,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.49,0,0,0,0,0,0,0,0,1.49,0,0,0,0,0,0,1.785,6,25,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.186,0,0,0,3.677,28,114,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.44,2.22,2.22,2.22,2.22,2.22,2.22,2.22,0,2.22,2.22,2.22,0,0,0,2.22,0,0,0,0,0,0,0,0,0,0.735,0,0,0,0,2.45,11,49,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.428,4,10,0 0,0,0,0,0,0,0,0,0,1.07,0,0,0,0,0,0,0,0,1.07,0,0,0,0,0,1.07,1.07,2.15,2.15,0,0,0,0,0,0,0,1.07,1.07,0,1.07,0,0,0,1.07,0,2.15,0,0,0,0,0.326,0,0,0,0,2.7,12,108,0 0,0,1.14,0,0,0,0,0,0,0,0,2.29,0,0,0,0,0,0,1.14,0,0,0,0,0,0,0,1.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.209,0,0,0,0,1.833,5,22,0 0.08,0,0.16,0,0,0.08,0,0.08,0.73,0,0,0.24,0,0,0,0,0,0,0.32,0,0.16,0,0,0,0.49,0.57,0.08,0,0,0,0,0,0.57,0,0,0,0.16,0,0,0,0,0,0,0,0,0,0,0,0.126,0.172,0.057,0,0.022,0,3.212,44,665,0 0.12,0,0.12,0,0.12,0,0,0,1.11,0,0,0.37,0,0,0,0,0,0,0.49,0,0.24,0,0,0,0.62,0.74,0.12,0,0,0,0,0,0.49,0,0,0,0.12,0,0,0,0,0,0,0,0,0,0,0,0.083,0.167,0.033,0,0.033,0,3.211,32,485,0 0.06,0,0.06,0,0,0,0,0,0.61,0,0,0.2,0,0,0,0,0,0.06,0.27,0,0.2,0,0,0,0.75,0.81,0.06,0,0,0,0,0,0.27,0,0,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0.173,0.183,0.048,0,0.019,0,2.738,36,827,0 0.08,0,0.08,0,0,0,0,0,0.77,0,0,0.25,0,0,0,0,0,0.08,0.34,0,0.25,0,0,0,0.77,0.86,0.08,0,0,0,0,0,0.25,0,0,0,0.43,0,0,0.17,0,0,0,0,0,0,0,0,0.098,0.16,0.037,0,0.024,0,2.634,36,598,0 0.07,0.03,0.18,0,0.1,0.03,0,0,0.4,0,0,0.1,0,0,0,0,0,0.03,0.14,0,0.1,0,0,0,0.47,0.5,0.03,0,0,0,0,0,0.76,0,0,0,0.32,0,0,0,0.07,0,0,0,0,0,0,0,0.188,0.148,0.035,0,0.01,0,3.233,66,1387,0 0,0,0,0,0,0,0,0,0,0,0,3.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.81,0,0.232,0.116,0,0,0,0,1.976,9,83,0 0.23,0,0.47,0,0,0,0.23,0,0,0.47,0,0,0,0,0,0,0,0,1.17,0,0.23,0,0,0,1.64,0.7,0.7,1.17,0.23,0.23,0.23,0.23,0,0.23,0.23,0.7,0.47,0,0.23,0.23,0,0,0.47,0,0.7,0,0,0,0,0.237,0,0,0,0,2.42,12,334,0 0,0,0,0,0,0,0,0,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.18,1.45,0,1.45,0,0,0,0,0,0,0,0.72,0.72,0,0.72,0,0,0,0.72,0,0.72,0,0,0,0,0.467,0.116,0,0,0,2.431,12,124,0 0,0,0,0,0,0,0,0,0,0,0,0.54,0.54,0,0,0,0,0,1.09,0,0,0,0,0,0.54,0.54,0.54,0.54,0,0,0,0,0,0,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0.102,0.308,0,0,0,0,1.4,10,77,0 2.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.85,0,0,0,0,0,0,0,0,0,2.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.85,0,0,0,0,0,0,0,0.465,0,0,0,0,1.25,3,10,0 0,0,0,0,0,0,0,0,0,0,0,2.23,0,0,0,0,0,0,0.74,0,0,0,0,0.74,0,0.74,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.49,0,0,0,0,0,0,0,0.124,0,0,2.333,31,77,0 0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,0,0,0,0,1.61,0,0,0,0,0,1.61,0.8,2.41,0.8,0.8,0.8,0.8,0.8,0,0.8,0.8,0.8,0.8,0,0,0.8,0,0,0.8,0,0.8,0,0,0,0.122,0.366,0,0,0,0,1.853,13,76,0 0,0,0,0,0,0,0,0,0,0,0,1.38,0,0,0,0,0,0,2.77,0,0,0,0,0,0,0,1.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.228,0,0,0,0,1,1,12,0 0.04,0.08,0.15,0,0.04,0.04,0,0.04,0.04,0.08,0,0.41,0.06,0,0,0,0.06,0.15,0.6,0,0.34,0,0.02,0,0,0,0,0,0.02,0,0,0,1.67,0,0,0.19,0.82,0.02,0.04,0,0.02,0.02,0.08,0.02,0,0.26,0.04,0.54,0.005,0.213,0.002,0.031,0.039,0.008,2.246,54,3003,0 0,0,0.86,0,0,0,0,0,0,0,0,0.86,0,0,0,0,0,0,2.6,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0,0,0,0,0,0,0.167,0,0,1.5,4,24,0 0,0,0,0,0,0,0,0,0,0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.217,0.29,0,0,0,4.461,28,290,0 0,0,0,0,0,0,0,0,0,1.86,0,1.24,0,0,0,0,0,0,0,0,0,0,0,0,2.48,1.24,1.24,1.86,0.62,0.62,0.62,0.62,0,0.62,0.62,1.24,0,0,0.62,0.62,0,0,0.62,0,0.62,0,0,0,0.189,0.757,0,0,0,0,2.63,16,171,0 0,0,0,0,0,3.44,0,0,0,0,0,0,0,3.44,0,0,0,0,0,0,6.89,0,0,0,0,0,3.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.52,0,0,0,0,1,1,7,0 0,0,0.87,0,0,0.14,0,0,0,0,0.14,1.46,0.14,0,0,0.14,0.58,0.43,0.14,0,0.43,0,0,0,1.9,0.58,0,0.29,0.14,0,0,0,0,0,0.29,0,0.29,0,0,0.14,0,0.43,0.14,0,0.14,0,0,0.29,0.019,0.019,0.019,0,0,0,2.174,35,461,0 0,0,0.74,0,0,0,0,0,0,0.74,0,0,0.37,0.74,0,0,0.37,0,0.37,0,0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.11,0,0,0,0,0,0,0.37,0,0,0,0,0,0.245,0,0,0,0,4.666,64,196,0 0,2.35,0,0,3.52,1.17,0,1.17,0,4.7,0,0,0,0,0,1.17,0,0,1.17,0,1.17,0,0,0,0,0,1.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.17,0,0,0,0,0.192,0,0,0,0,1,1,14,0 0,0.17,0,0,0.17,0,0,0.35,0,0,0,0.88,0,0,0,0,1.95,0,0.17,0,0,0,0,0,0.35,0.17,0,0,0,0.17,0,0,0,0,0,0.35,0,0,0,0,0,0,0,0,0.53,0,0,0,0,0.256,0,0,0,0,2.097,14,237,0 0,0,0,0,0,0,0,0,0,0.62,0.31,0,0,0,0,0,0,0,0.31,0,0,0,0,0,0.93,0.62,0,0.93,0,0,0,0,0,0,0.31,0,0.93,0,0,0,0.93,0,0.31,0,0,0.62,0,1.86,0,0.122,0.122,0,0.214,0,2.904,20,363,0 0,0,0,0,0,0,0,1.78,0,0,0,0,0,1.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0,0,0,0,0,0,0,0,1.444,5,13,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.28,1.44,0,0,0,0,0,0,1.44,0,0,0,1.6,0,0,0,2.56,0,0,0,0,3.52,0,0,0.208,0.671,0.092,0,0,0,4.122,20,540,0 0,0,1.81,0,0,0,0,0,0,0,0,1.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.81,0,0,0,0,0,0,0,0,3.63,0,0,0,0,0,0,0,0,0.849,0,0,0,2.294,8,39,0 0,0,0,0,0,0,0,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.17,0,0,0,0,0,0,0,0,0,0,0,0,1.928,15,54,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.88,0,0,0,1.88,0,0,0,0,0,0,0.647,0,0,0,0,2.8,18,42,0 0,0,2.08,0,0,0,0,0,0,0,0,2.08,0,2.08,0,0,0,0,2.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,11,0 0,0,0,0,0,0,0,0,0,1.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.335,0,0,0,4.133,26,124,0 0.09,0,0.36,0,0,0,0,0.09,0,0,0.18,1.01,0.18,0,0,0,0.64,0,0,0,0,0,0,0,2.49,0,0,0,0,0,0,0,0,0,0,0.09,0.18,0,0,0,0,0,0,0,0,0,0,0,0.131,0.209,0,0,0.039,0,2.278,24,629,0 0,0,0,0,2.32,0,0,0,0,0,0,0,0,0,0,0.77,0,0,0,0,0,0,0,0,1.55,0.77,0.77,0.77,0.77,0.77,0.77,0.77,0,0.77,0.77,0.77,0,0,0,0.77,0,0,0,0,0,0,0,0,0,0.376,0.125,0,0,0,2.4,11,48,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.17,0,0,0,0,2.094,26,111,0 0,0,0,0,0,0,0,0,0,0,0,0.53,0,0,0,0,0,0,0.17,0,0,0,0,0,1.41,1.59,0,0,0,0,0,0,0.17,0,0,0,2.83,0,0,0,2.83,0,0,0,0,3,0,0.17,0.271,0.753,0.12,0,0,0,4.84,20,576,0 0,0,0,0,2.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.38,0,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,2.111,6,19,0 0,0,0.49,0,0.49,0.49,0,0,0,0.49,0,2.94,0,0,0,0,0,0,0.98,0,0,0,0,0,1.47,0.98,0,0.98,0.49,0,0,0,0.49,0,0,0.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0.166,0,0,0,0,2.234,11,105,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.12,6.38,0,0,0,0,0,0,0,0,0,0,2.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.666,6,30,0 0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.16,0,0,0,0,0,0,0,1.16,0,0,0,0,0,1.16,0,0,0,0,0,0,0,0,0,0,0,1.16,0,0,0,0.196,0.393,0,0,0,0,1.058,2,18,0 0,0,0,0,0.47,0,0,0,0,0,0,0.47,0,0,0,0,1.9,0,0,0,0,0,0,0,1.9,0.95,0,0,0,1.42,0,0,0,0,0,0.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0.217,0,0,0,0,1.677,5,99,0 0,0,0,0,0,0,0,0,0,0,0,4.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.333,0,0,1.666,4,25,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,8.333,0,0,2,3,8,0 0,0,0.28,0,0.28,0,0,0,0,0,0,0.84,0,0,0,0,0,0,0.28,0,0,0,0,0,1.69,0,0,0.56,0,0,0,0,0,0,0.56,2.54,0.28,0,0.28,0,0,0,0,0.28,0,0,0,0,0.217,0.072,0,0,0,0,1.948,55,191,0 0,0,0,0,0.32,0,0,0,0.32,0.96,0,1.29,0,0,0.32,0.32,0,0,1.29,0,0,0,0,0,0.64,0.64,0,0,0.32,0,0,0,0,0,0,0.32,0.64,0,0.32,0,0,0,0.32,1.29,0.32,0,0,0,0,0.145,0.048,0,0,0,1.967,18,120,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0.632,0,0,1,1,4,0 0.33,0,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0.33,2.01,0,0.33,0,0,0,1.34,1,1.34,0.33,0.33,0.33,0.33,0.33,1.34,0.33,0.33,0.33,0.33,0,0.33,0.33,0,0,0.33,0,0.33,0,0,0,0,0.296,0.059,0,0,0,1.742,12,122,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.46,0,1.23,0,0,0,0,0,1.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.062,2,17,0 0,0,1,0,0,0,0,0,0,0.25,0.25,0.5,0,0,0,0,0,0,1,0,0.75,0,0,0,1,0.5,0,0,0.5,0.25,0,0,0,0,0,0,0.75,0,0,0,0,0,0,0,0,0,0,1,0.457,0.294,0,0,0,0,4.379,208,508,0 0,0,0,0,0,0,0,0,0,0,0,2.32,0,0,0,0,0,0,2.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.207,0.207,0,0,0,0,1.466,4,22,0 0.54,0,0,0,0,0.27,0,0,0,0,0,0,0.54,0,0,0,0,0,3.79,0,0.54,0,0,0,0.27,0,0,0,0,0,0.54,0,0,0,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.433,0,0,0.078,0,1.859,18,106,0 0.09,0,0.57,0,0,0.09,0,0,0,0,0.09,1.33,0.19,0,0,0.09,0.38,0.28,0.38,0,0.19,0,0,0,4.37,0.57,0.19,0.28,0.19,0.09,0.09,0.09,0,0.09,0.28,0.09,0.19,0,0,0.19,0,0.28,0.09,0,0.28,0,0,0.19,0.21,0.052,0.013,0,0,0,2.731,34,885,0 0,0.17,0,0,0.17,0,0,0.35,0,0,0,0.88,0,0,0,0,1.95,0,0.17,0,0,0,0,0,0.35,0.17,0,0,0,0.17,0,0,0,0,0,0.35,0,0,0,0,0,0,0,0,0.53,0,0,0,0,0.256,0,0,0,0,2.053,13,232,0 0,0,0,0,0,0,0,0,0,0.58,0,0,0,0,0,0,0,0,1.17,0,1.17,0,0,0,0,0,0,0,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.817,0,0,0,0,1.64,5,146,0 0,0,0,0,0,0,0,0,0,0,0,1.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.578,5,60,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.476,0,0,0,0,1.285,3,18,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.95,0,0,0,0,0,0.95,0,0,0,0,0,0,0,0.95,0,0,0,0,0,0,0,0,0,0,1.9,0,0,0,0,0.263,0.394,0,0,0,0,2.142,5,45,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.222,2,11,0 0,0,4.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.342,0,0,1.2,2,12,0 0,0,0.87,0,0,0.17,0,0,0,0,0.17,1.74,0.17,0,0,0.17,0.69,0.52,0.17,0,0.17,0,0,0,1.21,0.52,0,0.34,0.17,0,0,0,0,0,0.34,0,0.17,0,0,0.17,0,0.52,0,0,0.17,0,0,0.34,0.022,0.022,0,0,0,0,1.601,11,277,0 0.06,0,0.18,0,0.12,0.12,0,0,0.06,0.18,0,0.55,0.06,0,0,0.06,0.12,0.06,0.93,0.06,1.05,0,0,0,0.93,0.43,0,0,0,0.18,0.18,0,0,0,0.31,0,0.49,0,0,0.06,0,0,0,0.12,0,0,0,0.24,0,0.182,0,0.1,0.109,0,2.062,21,1056,0 0,0,1.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.26,0,0,0,0,0,0,2.53,1.26,0,1.26,0,1.26,1.26,0,0,0,1.26,1.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0.149,0,0.149,0,0,1.423,10,37,0 0,0,0,0,0,0,0,0,0,0.8,0,0,0,0,0,1.61,0,0,0.8,0,0.8,0,0,0,0.8,0,0,0,0,0,0.8,0,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.089,0,0,0,0,2.405,28,89,0 0,0.85,0.42,0,0,0,0,1.28,0,0,0,0.42,0,0,0,0,0,0.42,1.28,0,0,0,0,0,2.14,1.28,0,0.42,0,0.42,0.42,0,0,0,0.42,0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0.112,0,0.056,0,0,1.602,14,125,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0,0,0.44,0,0,0,0,0,0,0.44,0.44,0,0.88,0,0,0,0,0,0,0.44,0,0,0,0,0,0,0,0,0.44,0,0,0,0,0,0.119,0,0,0,0,1.842,15,105,0 0,0,0.51,0,0.17,0.17,0,0,0,0,0,0,0.17,0,0,0,0,0,1.19,0,1.02,0,0,0,2.9,0,0,0,0,0,0.34,0,0,0,0,0,0.34,0,0,0,0,0,0.17,0,0,0,0,0,0.026,0.156,0,0.078,0,0,1.748,13,299,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.166,2,7,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.166,2,7,0 0,0.22,0,0,0.22,0,0,0.22,0,0.45,0,0.22,0,1.59,0,0,0.22,0,1.36,0,0,0,0,0,0.68,0,0.22,0,0,0,0.22,0,0,0,0.22,0,0.45,0,0,0,0,0,0,0,0,0,0,0,0,0.053,0,0,0,0,4.964,152,705,0 0,0,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.6,0,0,0,0,0,2.4,1.6,0,0.8,0,0,0,0,1.6,0,0.8,0,0,0,0,0,0,0,0,0,0.8,0,0,0,0,0.371,0.123,0,0,0,2.44,10,61,0 0,0,1.09,0,1.09,0,0,0,0,0,0,1.09,0,0,0,0,0,0,3.29,0,0,0,0,0,0,0,1.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.434,0.217,0,0,0,0,1,1,18,0 0,0,0,0,0,0,0,0,0,0,0,2.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.56,0,0,0,0.485,0,0,3.444,15,31,0 0,0,0,0,0,0,0,0,0,0.74,0,0,0,0,0,0,0,0,0.74,0,0,0,0,0,0.74,0.74,0,1.48,0,0,0,0,0,0,0,0.74,0,0,0,0,0,0,0,0,0.74,0,0,0,0,0.257,0,0,0,0,2.638,11,95,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.545,6,17,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0.87,0,0,0,0,0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.87,0,0,0,0.327,0.327,0,0,0,0,1.3,3,26,0 0,0,0,0,0,0,0,0,0,0,0,4.22,0,0,0,0,0,0,0,0,1.4,0,0,0,0,2.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.81,0,0,0,0,0,0,0,0,0,0,0,0,3.153,38,82,0 0,0,0,0,0,0,0,4.23,0,0,0,0,0,0,0,0,0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.84,0,0.84,0,1.69,0,0.84,0,0.84,1.69,0,0,0,0,0.126,0,0,0,1.605,12,61,0 0,0,0,0,0,0,0,4.68,0,0,0,0,0,0,0,0,1.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.153,3,15,0 0.07,0,0.23,0,0.15,0,0,0.07,0,0.07,0.15,1.84,0.07,0,0,0,0.15,0,0.23,0.23,0,0,0.23,0,2.61,0,0,0,0,0,0,0,0,0,0,0.07,0.07,0.07,0,0,0,0,0,0.15,0,0,0,0,0.011,0.143,0,0,0.044,0,2.442,26,591,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,4,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0.343,0,0.171,0,0,0,1.725,13,69,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0.33,0.33,0,0,0,0,0,1,0,0.33,0,0,0,8.69,4.68,0,0,0,0.33,0.33,0,0,0,0,0,0.66,0,0.33,0,1.33,0,0,0,0,0,0,0,1.001,0,0,0,0,0,2.701,20,181,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,5,8,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.31,0,0,0,0,0,9.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.875,12,46,0 0,0,0,0,0.92,0,0,0,0,0,0,0.92,0,0,0,0,0,0,0.92,0,0.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.5,7,33,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.44,1.72,0,0,0,0,0,0,0,1.72,0,0,0,0,0,0,0,0,0,0,3.44,0,0,0,1.72,0,0,1.72,0,0,1.72,0,0,0,0,0,0,1.2,4,18,0 0,0,0.66,0,0,0,0,0,0,0.33,0,0,0,0,0,0,0,0,1.98,0,0.66,0,0,0,0.99,0.66,0.66,0.99,0.33,0.33,0.33,0.33,0,0.33,0.33,0.66,0.33,0,0,0.33,0,0,0.33,0,0.33,0,0,0,0,0.282,0,0,0,0,2.238,13,188,0 0,0,0.38,0,0.38,0,0,0,0,0,0,1.15,0,0,0,0,0,0,0,0,0.38,0,0,0,0.38,0.38,0,0,1.93,0,0,0,0,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0.129,0,0,0,0,1.8,5,108,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.72,0,0,0,0,0,6.89,3.44,0,0,0,3.44,0,0,0,0,1.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.16,11,54,0 0,0,2.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.12,0,0,0,0,0,0,0,0,0,0,0,0,4.368,52,83,0 0,0,0,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,2.7,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,0,0,0,0,0,1.8,0,0.9,0,0,0,0,0,0,0.281,0,0,1.551,13,76,0 0,0,0.13,0,0.2,0,0,0,0,0,0,0.6,0.06,0,0,0.13,0,0,0.73,0.06,0.73,0,0,0,1.6,0.33,0,0.13,0,0,0.26,0,0,0,0.33,0.13,0.4,0,0,0,0,0,0,0,0.13,0.06,0,0.2,0,0.208,0,0.028,0.075,0,2.068,29,871,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.43,0,0,0,0,0,4.87,4.87,0,2.43,0,0,0,0,0,0,2.43,1.21,0,0,0,0,0,0,0,0,0,0,0,0,0.182,0.365,0,0,0,0,2.25,10,63,0 0,0,0.4,0,0,0,0,0,0,0.8,0,0,0,0,0,0,0,0,0.4,0,0.4,0,0,0,1.2,0.8,0,0,0,0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.099,0,0.049,0,0,2.288,9,135,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.79,1.79,0,0.89,0,0,0,0,0,0,0.44,0,0,0,0,0,0,0,0,0.89,0,0,0,0,0,0.136,0,0,0,0,1.988,24,179,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.29,2.19,0,3.29,0,0,0,0,0,0,1.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.155,0,0,0,0,2.862,15,83,0 0,0,0,0,1.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.96,0,0,0,0,0,0.666,0,0,0,0,2.111,7,19,0 0.19,0,0,0,0,0,0,0,0,0.59,0,0.19,0.19,0,0,0,0,0.19,0.59,0,0.19,0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0.59,0,0,0,0,0,0,0,0,0,0,0,0,0.127,0.095,0,0,0.031,0,1.411,7,120,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.33,4.16,4.16,4.16,4.16,4.16,4.16,4.16,0,4.16,4.16,4.16,0,0,0,4.16,0,0,0,0,0,0,0,0,0,1.176,0,0,0,0,3.444,11,31,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.69,4.34,4.34,4.34,4.34,4.34,4.34,4.34,0,4.34,4.34,4.34,0,0,0,4.34,0,0,0,0,0,0,0,0,0,1.19,0,0,0,0,3.333,11,30,0 0,0,0,0,0,0,0,0,0,0,0,0,1.09,0,0,0,0,0,1.63,0,0.54,0,0,0,1.09,0.54,0.54,0.54,0.54,0.54,0.54,0.54,0,0.54,0.54,0.54,0,0,0,0.54,0,0,0,0,0,0,0,0,0,0.17,0,0,0,0,1.373,11,169,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.25,2,5,0 0,0,0,0,0,0,0,0,0,0.58,0,0,0,0,0,0,0,0,2.9,0,0,0,0,0,1.74,1.16,1.16,1.74,0.58,1.16,0.58,0.58,0,0.58,0.58,1.16,0.58,0,0.58,0.58,0,0,0.58,0,0.58,0,0,0,0,0.379,0,0,0,0,2.222,12,140,0 0,0,0,0,0,0,0,0,0,0.67,0,0.67,0.67,0,0,0,0,0,2.68,0,0,0,0,0,2.68,1.34,2.01,0.67,0.67,0.67,0.67,0.67,0,0.67,0.67,0.67,0.67,0,0.67,0.67,0,0,0.67,0,1.34,0,0,0,0.107,0.537,0,0,0,0,2.604,17,112,0 0.34,0,0.34,0,0.34,0.34,0,0,0,0,0,0,0,0,0,0,0,0,2.41,0,1.03,0,0,0,2.06,1.03,1.03,0.68,0,0.68,0,0,0,0,0.68,0,1.03,0,0,0,0,0,0.34,0,0.68,0.34,0,0,0.116,0.292,0.058,0,0,0,2.333,15,182,0 0,0,1.2,0,0,0,0,0,0,0,0,2.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.2,0,0,0,0,0,0,0,0.666,1.111,0.222,0,0,2.826,8,65,0 0.08,0,0.16,0,0,0.08,0,0.08,0.08,0,0.16,0.74,0.57,0.16,0,0,0.41,0,0,0,0,0,0.24,0,3.3,0,0,0,0,0,0,0,0,0,0,0.24,0.24,0,0,0,0,0,0,0,0,0,0,0,0.199,0.105,0,0,0.023,0,1.878,24,740,0 0.89,0,0,0,0.89,0.89,0,0,0,0,0,0,0,0,0,0,0,0,2.67,0,1.78,0,0,0,1.78,0.89,1.78,0.89,0,0.89,0,0,0,0,0.89,0,0.89,0,0,0,0,0,0,0,0.89,0,0,0,0.149,0.298,0,0,0,0,2.259,15,61,0 0,0,0,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,2.63,2.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.208,10,53,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.55,0,2.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.25,10,13,0 0,0,0,0,0.32,0,0,0,0,0,0,0.32,0,0,0,0,0,0.32,0.64,0,0.32,0,0,0,1.28,1.28,0.64,0.32,0.32,0.32,0.32,0.32,0.64,0.32,0.32,0.32,0.96,0,0.32,0.32,0,0,0.64,0.32,0.32,0.64,0,0,0,0.094,0.047,0.094,0,0,1.919,13,167,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.76,0,0,0,1.53,0.76,2.3,0.76,0.76,0.76,0.76,0.76,0,0.76,0.76,0.76,0.76,0,0.76,0.76,0,0,0.76,0,0.76,0,0,0,0,0.339,0,0.339,0,0,1.813,12,78,0 0,0,0,0,0,0,0,0,0,0,0,1.6,0,0,0,0,0,0,0,1.6,0,0,0,0,8,0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0.136,0,0.273,0,0,2.588,29,88,0 0.51,0,0.51,0,1.53,0.51,0,0,0,0,0,0.51,0,0,0,0,0,0,3.58,0,0,0,0,0,2.56,0,2.05,0.51,0.51,2.05,0.51,0.51,0,0.51,0.51,1.02,0,0,0,0.51,0,0,0,0,1.02,0.51,0,0,0,0.27,0,0,0,0,1.983,24,121,0 0,0,0,0,0.51,0,0,0,0,0,0,0.51,0,0,0,0,0,0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.03,0,0,0,0.51,0,0,0,0,0,0,0.51,0,1.03,0,0,0,0,0,0,0,0,1.681,11,74,0 0,0,1.05,0,0,0,0,0,0,0,0,1.05,0,0,0,0,0,0,0,0,0,0,0,0,4.21,3.15,0,0,0,0,0,0,1.05,0,0,0,0,0,1.05,0,0,2.1,1.05,0,0,0,0,0,0.169,0,0.679,0,0,0,2.096,12,65,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.282,0,0,1,1,8,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.44,2.22,0,2.22,0,0,0,0,0,4.44,0,0,0,0,0,0,0,0,0,2.22,0,2.22,0,0,0,2.22,0,4.44,0,0,0,0,0,0,0,0,0,1.947,12,37,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.69,4.34,4.34,4.34,4.34,4.34,4.34,4.34,0,4.34,4.34,4.34,0,0,0,4.34,0,0,0,0,0,0,0,0,0,1.111,0,0,0,0,3.1,11,31,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.5,9,11,0 0,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.85,0,0,0,0.398,0,0,0,0.199,3.055,11,55,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.69,5.93,0,0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.84,0,0,0.84,0,0.84,0,0,0,0,0,0,0,0,1.285,4,36,0 0.34,0,0,0,0,0,0,0,0,0,0,0.69,0,0,0,0,0,0,3.12,0,0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0.432,0,0,0,0,1.526,11,87,0 0,0,0,0,0,0,0,0,0,0,0,1.58,0,0,0,0,0,0,0,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.58,0,0,0,0.287,0,0.287,0,0,1.076,2,14,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,0,0,0,0,0,3.26,0,0,1,1,5,0 0,0,0.9,0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0,3.63,0,0.9,0,0,0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.45,0.45,0,0,0,0.155,0,0.077,0,0,1.545,15,68,0 0,0,1.4,0,0,0,0,0,0,0,0,1.4,0,0,0,0,0,0,1.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.81,0,0,0,0,1.4,0,0,0,0,0,0.497,0,0,1.722,10,31,0 0.26,0,0.52,0,0.52,0,0,0,0,0.26,0,0.26,0,0,0,0,0,0.26,1.31,0,0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.26,0.52,0.26,0,0,0.047,0.047,0,0.047,0,0,1.081,3,53,0 0,0,0.27,0,0,0.27,0,0,0,0,0,0.27,1.39,0,0,0.27,0,0.27,2.79,0,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0.27,0,0,0.051,0,0,0,0,0,1.195,6,55,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.23,0,1.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.23,0,0,0,0.202,0,0,0,0,1,1,14,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.45,0,3.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.81,0,0,0,0,0,0,0,0,1.125,2,9,0 0,0,0,0,0,0,0,0,0,0,0,0.97,0,0,0,0,0,0,1.94,0,0.97,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.97,0,0,0,0.255,0,0,0,0.127,2.344,11,68,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.32,2.32,0,0,0,0,0,0,0,0,1.666,5,25,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.22,0,2.15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.15,0,0,0,0,1.07,0,0,0,0.197,0,0,0,0,2.315,7,44,0 0,0,0,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,1.73,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0.86,0,0.86,0,0,0,0.152,0,0.457,0,0,1.192,3,31,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.44,0,0,0,1.44,1.44,0,0,0,0,0,0.247,0,0,1.684,5,32,0 0,0,0,0,0,0.34,0,0,0,0,0,0.69,0,0,0,0,0,0,4.19,0,1.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0.34,0.34,0,0,0,0,0,0,0,0,1.206,5,70,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.9,0,0,0,0.174,0,0,0,0,1.222,4,22,0 0,0,0.49,0,0,0.49,0,0,0,0,0,0.99,0,0,0,0,0,0,2.47,0,0.99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.49,0,0,0,0.093,0,0.093,0,0,1.275,4,51,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.943,0,0.943,0,0,2.166,5,13,0 0,0,0,0,0.96,0.48,0,0,0.48,0,0.48,0.48,0,0,0,1.44,0,1.92,0.96,0,1.44,0,0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0,0,0,0,0.666,0,0,4.437,27,142,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.55,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,2.01,0,0,0,0,0,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.67,0,0,0,0,0,0,0,0.67,0,0,0,0,0,0,0,0.26,0,1.592,5,43,0 0,0,0.59,0,0.19,0,0,0,0,0,0,0.39,0.19,0,0,0.19,0.19,0.19,2.19,0,0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.39,0.19,0,0,0,0.232,0,0,0.038,0,1.129,4,96,0 3.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.84,0,0,0,0.645,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,1.724,0,0,1,1,6,0 0,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.16,0,2.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.16,0,0,0,0,0,0,0,1.16,1.16,0,0,0,0,0,0.578,0,0,1.36,5,34,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0.684,0,0,0,0,1.125,2,9,0 0,0,0,0,0,0.57,0,0,0,0,0,0,0,0,0,0,0,0,2.31,0,2.89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.57,0,0,0,0,0,0,0,1.73,0.57,0,0,0,0,0,0,0,0,1.645,5,51,0 0.54,0,0,0,0,0,0,0,0,0,0,2.18,0.54,0,0,0,0,0,3.82,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,0,0,0,0,0,0,0,1.09,0,0,0,0,0.294,0,0.392,0,0,1.829,7,75,0 0,0,0,0,0,0,0,0,0,1.38,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.38,0,0,0,0,0,0,0,0,1.5,4,24,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.19,0,1.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.06,0,0,0,1.06,1.06,0,0,0,0,0,0.398,0,0,1.181,5,26,0 0,0,0,0,0,0,0,0,0,0,0,1.29,0,0,0,0,0,0,1.94,0,2.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0.105,0.105,0,0,0,1,1,30,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.89,0,0.44,0,0,0,0,0,1.34,2.69,0,0,0,0,0,0,0,0,2.362,15,137,0 0,0,0,0,0,0,0,0,0,0,0,3.84,0,0,0,0,0,0,5.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.92,1.92,0,0,0,0,0,0,0,0,1.166,3,14,0 0,0,0.67,0,0,0,0,0,0,0,0,1.34,0,0,0,0,0,0,4.69,0,1.34,0,0,0,0,0,0,0,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.67,0,0,0,0.493,0,0,0,0,1.24,3,31,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.4,0,3.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.7,0,0,0,0,0,0.613,0,0,1,1,8,0 0,0,0,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.16,4.16,0,0,0,0,0,0,0,0,1,1,9,0 0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1.428,3,20,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.12,0,0,0,2.12,0,0,0,0.344,0,0,0,0,1.4,5,14,0 0,0,0,0,0,3.57,0,0,0,0,0,0,0,0,0,0,0,0,7.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0,0,0,0,0,1,1,7,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.85,0,0,0,0,0,0,0,2.85,2.85,0,0,0,0.473,0,2.843,0,0,1.294,5,22,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0,0,0,0,3.57,3.57,0,0,0,0.564,0,0,0,0,1.454,5,16,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,3.33,0,0,0,0.537,0,1.075,0,0,1.2,3,12,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0.28,0,0,0,0,0,0.86,1.72,0,0,0,0,0,0,0,0,2.557,16,179,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.68,4.08,0,0.68,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.68,0,0,0,0,0.68,0,0,1.36,0.68,0,0,0,0.38,0,0,0,0,1.607,6,45,0 0.49,0,0.49,0,0.49,0,0,0,0,0,0,0.99,0,0,0,0,0,0,0.99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.49,0,0,0,0,0,0,0,0.99,0.49,0,0,0,0,0,0.091,0,0,1.214,5,51,0 0,0,0,0,0,1.21,0,0,0,0,0,0,0,0,0,0,0,0,1.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.21,0,0,0,0,0,0,0,1.21,0,0,0,0,0.212,0,0,0,0,1.406,5,45,0 0,0,0,0,0,0,0,0,0,0,0,2.38,0,0,0,0,0,1.19,2.38,0,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0,0,0,0,0.395,0,0,0.197,0,1.428,4,30,0 0,0,0,0,0,0,0,0,0,0,0,3.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.438,0,0,0,0,1,1,9,0 0,0,0,0,0,0,0,0,0,0,0,1.81,0,0,0,0,0,0,1.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.9,0,0,0,0,0,0,0,0.159,0,0,0.159,0,1.515,5,50,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5,5,0,0,0,0,0,1.438,0,0,1,1,7,0 0.08,0,0.17,0,0,0.08,0,0,0.08,0,0,0,0.08,0,0,0,0,0.08,4.19,0,1.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.26,0,0,0,0.031,0.078,0,0.078,0,0,1.114,9,272,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.85,2.85,0,0,0,0,0,0,0,0,1.111,3,20,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.17,0,2.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.17,2.17,0,0,0,0.743,0,0.371,0,0.371,1.714,11,24,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,0,0,1.142,2,8,0 1.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.31,1.31,0,0,0,0,0,0,0,0,1.25,3,30,0 0,0,0,0,0,0,0,0,0,0,0,0.84,0,0,0,0,0,0,5.04,0,0.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.84,0,0,0,0,0.143,0,0.143,0,0,1.37,4,37,0 0,0,0,0,0,0,0,0,0,0,0,1.86,0,0,0,0,0,0,1.86,0,0.93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.93,0,0,0,0,0,0,0.165,0,0,1.238,4,26,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.17,0,0,0,0,0,0.704,0,0,1,1,10,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.68,0,0,1,1,11,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.69,0,0,7.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,1.04,0,0,0,0,0,0,1.04,0,0,0,0,0,0,0,0,5.2,0,0,0,0,0,1.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.08,0,0,0,0,0.211,0,0.422,0,0,1.16,4,29,0 0,0,0,0,0,0,0,0,0,0,0,1.53,0,0,0,0,0,0,4.61,0,0,0,0,0,0,0,0,0,1.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.222,5,20,0 0,0,0,0,0.79,0.79,0,0,0,0,0,0,0,0,0,0,0,0,3.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.58,0,0,0,0.79,0,0,0,0,0,0,0,0,0,1.076,2,28,0 0.13,0,0.41,0,0,0,0,0.27,0,0,0.27,1.93,0.13,0,0,0,0,0.27,1.65,0,0.13,0,0,0,0,0,0,0,0,0,0,0,0.13,0,0,0,0,0,0,0,0,0,0,0,0.82,0,0,0.13,0,0.023,0.046,0.164,0,0,1.279,11,183,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.61,0,0,0,0,0,0,0,0.61,0,0,0,0,0.118,0,0,0.118,0,1.59,5,35,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0,0,0,2.666,7,24,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.22,0,0,0,0,0,0,0,0.546,0,0,1.75,7,14,0 0,0,0,0,0,0,0,0,0,0.95,0,0,0,0,0,0,0,0,2.85,0,0.95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.95,0,0,0,0.172,0.172,0,0,0,0,1.263,5,24,0 0,0,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.49,0,2.98,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.49,0,0,0,0,0,0,1.069,0,0,1,1,13,0 0,0,0.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.41,7.88,0,0,0.109,0,0,0.054,0,0,1.786,14,134,0 0,0,0,0,0,0.6,0,0,0,0.6,0,0.6,0.6,0,0,0,0,0,3.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.6,0,0,0,0,0,0,0,0.6,3.04,0,0,0.094,0,0,0.094,0.189,0,1.976,15,83,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.89,0,0,0,0.188,0,0.564,0,0,1,1,14,0 0,0,0,0,0,0,0,0,0,0,0,1.28,0,0,0,0,0,0,3.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.56,0,0,0.182,0.182,0,0,0,0,1,1,9,0 0,0,0,0,0,0,0,0,0,0,0,0,0.47,0,0,0,0,0,1.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.47,0,0,0,0,0.47,0.47,1.91,0,0,0,0.076,0,0.076,0,0,1.833,12,77,0 0,0,0,0,0,0,0,0,0,0,0,1.75,0,0,0,0,0,0,3.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.87,1.75,0,0,0,0,0,0.259,0,0,1.681,12,37,0 0.66,0.66,0.66,0,0,0,0,0,0,1.33,0,0,0,0,0,0.66,0,0,3.33,0,2.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.117,0,0,2.487,17,97,0 0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,5,0,0,0,0,0,0,0,0,2.413,15,70,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.88,0,0,0,0,0,0,0,0,1,1,8,0 0,0,0,0,0,0,0,0,0,0,0,0,0.68,0,0,0,0,0,4.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.68,0,0,0,0,0,0,0,0.68,3.42,0,0,0,0,0,0.109,0.218,0,1.897,15,74,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.12,0,0,0,0,0,0.248,0,0,1.1,2,11,0 0,0,0,0,0,0,0,0,0,1.44,0,0,0,0,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.89,0,0,0,0,0,0.954,0,0,9.125,63,73,0 0,0,0,0,0,0,0,0,0,0,0,0.45,0,0,0,0,0,0,4.1,0,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.45,0.91,0,0,0,0.219,0,0,0,0,1.225,5,49,0 0,0,2.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.08,0,2.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.04,4.08,0,0,0.226,0,0,0,0,0,1,1,8,0 0,0,0,0,0,0,0,0,0,2.55,0,0,0,0,0,0,0,0,3.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.02,1.02,0,0,0,0.253,0,0.169,0.169,0,1.677,7,52,0 0,0,0,0,0,0,0,0,0,0.84,0,0.84,0,0,0,0,0,0,2.54,0,1.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.69,0,0,0,0.134,0,0,0,0,1.285,5,27,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.81,0,1.16,0,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.32,0,0,0,0.163,0,0.49,0,0,2.125,7,34,0 0,0,0.35,0,0.35,0,0,0,0.35,0,0,0,0,0,0,0,0,0,1.4,0,3.5,1.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.35,0,0,0,0,0,0,0,0,0.35,0,0,0,0.65,0,0,0,0.05,2.483,17,226,0 0,0,0.52,0,0,1.04,0,0,0,0.52,0,1.57,0,0,0,0,0,0,3.66,0,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.04,0,0,0,0,0,0.09,0,0,1.466,6,44,0 0,0,0,0,0,0,0,0,0,1.02,0,0,1.02,0,0,0,0,0,4.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.04,0,0,0,0,0,0.147,0,0,1.333,4,24,0 0.63,0.63,0,0,0,0,0,0,0,0.63,0,0,0,0,0.63,0,0,0,4.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.27,1.91,0,0,0,0.204,0,0.102,0,0,1.361,4,49,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.25,0,1.25,2.5,0,1.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.176,55,71,0 0.1,0.72,0.62,0,0.62,0.1,0.2,0.2,0,0,0.1,0.51,0,0,0,0,0,0.82,3.61,0,0.93,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0,0.41,0,0,0,0.122,0,0.157,0,0,2.213,29,425,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.25,0,0,0,0,0,0,0,0,0,1,1,7,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.47,1.47,0,0,0,1.066,0,0.213,0,0,1.333,3,36,0 0,0,0,0,0,0,0,0,0,0,0,0,1.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.29,0,0,0,0.246,0,0,0,0.246,0,1.363,4,30,0 0,0,0,0,0,0,0,0,0,0,0,0,1.13,0,0,0,0,0,1.13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.13,0,0,0,0.634,0,0.211,0,0.211,0,1.347,4,31,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.253,0.253,0,0,0,2.352,17,40,0 0,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.17,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.23,4,16,0 0.34,0,0.69,0,0,0,0,0,0,0,0,0.69,0,0,0,0,0,0,2.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.69,0,1.39,0,0.34,0,0,0,0.374,0,0,0,0,1.775,5,71,0 0,0,0,0,0,0,0,0,0,0,0,0.6,0,0,0,0,0,0,1.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.21,0,0,1.454,5,32,0 0.9,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.9,3.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.9,0,0,0,0,0,1.8,0.9,0,0,0,0,0,0,0,0,0,1.727,5,19,0 0,0,0.4,0,0,0,0,0,0.4,0.4,0,0,0,0,0,0,0,0.4,1.63,0,0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.4,0,0,0.81,0,0,0,0,3.68,0,0,0.139,0,0,0.069,0,0,2.525,15,101,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.8,0,0,0,0.9,4.5,0,0,0.145,0,0,0,0,0,2.638,20,124,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0,2.263,0,0,0,0,3.149,9,1310,0 0,0,0,0,0.66,0,0,0,0,0,0,0.66,0,0,0,0,0.66,0,3.33,0,0,0,0,0,0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.66,0,0,0,0,0,0.254,0,0,0,0,1.458,7,35,0 1.08,0,1.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.08,0,0,0,0,0,0,0,0,0,1.523,5,32,0 0,0,0,0,0,0,0,0,0,0.44,0,0,0.44,0,0,0,0,0,3.53,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0.44,0,0,0,0,0,0,0,0,2.063,47,97,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.06,0,0,0,0,0,0,0,1.06,2.65,0,0,0,0.322,0,0,0,0.129,2.6,18,182,0 0,0.78,1.56,0,0,0,0,0,0,0,0,0.78,0,0,0,0,0,1.56,5.46,0,3.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.137,0,0.275,0,0,1.625,9,39,0 0,0,0,0,0,1.63,0,0,0,0,0,0,0.81,0,0,0,0,0,3.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.81,0,0,0,0,0,0.81,2.45,0,0,0,0,0,0,0,0,2.829,47,116,0 0,0,0.55,0,0,0,0,0,0,0,0,0.55,0.55,0,0,0,0,0,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.55,0,0,0,0.087,0,0,0,0,2.54,47,94,0 0,0,0,0,0,0,0,0,0,0,0,0,1.26,0,0,0,0,0,2.53,0,1.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.26,0,0,0,0,0,0,0,0,4.352,47,74,0 0,0,0,0,0,0,0,0,0,0,0,0,0.62,0,0,0,0,0,3.75,0,0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.25,1.87,0,0,0,0,0,0,0,0,2.704,47,119,0 0,0,0.81,0,0.27,0,0,0,0,0.27,0,0.27,0.27,0,0,0,0,0,2.16,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,0.27,0,0,0.045,0.091,0,0.045,0,0,2.078,47,106,0 0,0,0.78,0,0,0.78,0,0,0,0.78,0,0,0.78,0,0,0,0,0,1.56,0,0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.78,0,0,0,0.12,0,0.12,0,0,2.862,47,83,0 0,0,0,0,0,0,0,0,0,0,0,0,1.47,0,0,0,0,0,2.94,0,2.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.47,0,0,0,0,0,0,0,0,4.312,47,69,0 0,0,0,0,0,0,0,0,0,0.54,0,0,0.54,0,0,0,0,0,5.43,0,1.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,1.63,0.54,0,0,0,0.083,0,0,0,0,2.827,47,82,0 0,0,0,0,0,0.33,0,0,0,0,0,0,0.82,0.16,0,0,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.49,0.16,0,0,0.019,0.039,0,0.059,0,0,1.632,47,191,0 0,0,0,0,0,0.65,0,0,0,0,0,0,0.65,0,0,0,0,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.65,0,0,0,0,0,0,0,0,2.555,47,92,0 0,0,0.43,0,0,0,0,0,0,0,0,3.94,0,0,0,0,0,0,2.63,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.314,5,46,0 0,0,0.5,0,0,0.5,0,0,0,0,0,0,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0.5,0,0,0,0,0,0,0,0,2.527,47,91,0 0,0,0,0,0,0,0,0,0,0,0,0,1.09,0,0,0,0,0,2.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.09,0,0,0,0,0,0,0,0,3.304,47,76,0 0.32,0,0.16,0,0,0,0,0,0,0,0,1.29,0.48,0,0,0.16,0,0,2.43,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0,0,0.48,0.16,0,0,0,0,0,0.082,0,0,1.704,47,167,0 0.43,0,1.31,0,0,0.43,0,0,0,0,0,0,0.87,0,0,0,0,0,0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.43,0,0,0,0,0,0,0,0,2.137,47,109,0 0,0,0,0,0,0,0,0,0,0,0,0,0.97,0,0,0,0,0,4.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.97,0,0,0,0,0,0,0,0,3.391,47,78,0 0,0,0.67,0,0,0,0,0,0,0,0,1.01,0.33,0,0,0,0,0,1.35,0,0.33,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.33,0.33,0,0,0,0,0,0.174,0,0,2.071,47,116,0 0.15,0,0.15,0,0,0,0,0,0.07,0,0,0.07,0.15,0,0,0.07,0,0.07,3.6,0,1.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.15,0,0,0,0.013,0.123,0,0.082,0,0,1.111,9,328,0 0.09,0,0.54,0,0,0.09,0,0,0.09,0,0,0.09,0.09,0,0,0.09,0,0,0.09,0,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.09,0,0,0,0.017,0,0.034,0,0,1.429,47,306,0 0,0,0.38,0,0.19,0.29,0,0,0,0,0,0,0.87,0,0,0.09,0,0,0.19,0,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.09,0,0,0,0,0,0.09,0,0,0,0,0,0,0,0,1.508,47,187,0 0,0,0.09,0,0,0,0,0,0,0,0,0.47,0.66,0,0,0.09,0,0,1.23,0,0.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.37,0.09,0,0,0,0.033,0,0,0,0,1.536,47,192,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.68,0,1.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.67,0,0,0,0,0,0,0.471,0,0,1.033,2,31,0 0,0,1.57,0,0.22,0.22,0,0,0,0,0,0,0.22,0,0,0,0,0,2.02,0,0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.22,0,0,0,0,0,0.89,0,0,0,0,0.091,0,0.045,0,0,1.276,16,97,0 0,0,0.66,0,0,0.66,0,0,0,0,0,0.66,0,0,0,0,0,0,1.66,0,0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.33,0.33,0,0,0,0,0,0,0,0,0,1.142,4,56,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0,0,0,0,0,0,3.103,51,90,0 0,0,0,0,0,0,0,0,0,0.86,0,1.72,0.86,0,0,0,0,0,2.58,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0,0.321,0,0.214,0,0,3.956,51,91,0 0,0,0,0,0,0,0,0,0,0,0,1.28,0,0,0,0,0,0,3.84,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.56,1.28,0,0,0,0,0,0,0,0,3.772,51,83,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.57,0,0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.78,0.78,0.78,0,0,0,0,0,0,0,0,2.848,51,94,0 0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0,0,0,0,4.05,51,81,0 0,0,0,0,0,0,0,0,0,0.75,0,0,0,0,0,0,0,0,2.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.75,0,0,0,0,0,0,0,0,3.333,51,90,0 0,0,0.25,0,0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,2.05,0,0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.76,0.25,0,0,0,0.094,0,0.047,0,0,1.884,51,147,0 0,0,0.48,0,0.32,0.16,0,0,0.32,0,0,0,0.16,0,0,0,0,0,2.26,0,0.48,0,0,0.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0.16,0,0,0,0.086,0,0.057,0,0,1.698,51,158,0 0,0,1.88,0,0.94,0,0,0,0,0,0,0,0,0,0,0,0,0,2.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.94,0,0,0,0,0,0,0.756,0,0,1,1,22,0 0.38,0,1.16,0,0,0,0,0,0,0,0,1.16,0,0,0,0.77,0,0,0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.77,0.38,0,0,0,0,0,0,0,0,2,51,114,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.58,0,0,0,0,0,0,0,0,4.368,51,83,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.67,0,0,0,0,0,0,0,2.01,2.68,0,0,0,0.102,0,0,0,0,3.4,51,119,0 0,0,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.76,0,0,0,0,0,0,0,1.52,2.29,0,0,0,0.139,0,0,0,0,2.29,16,71,0 0,0,0.53,0,0,0.53,0,0,0,0.53,0,0,0.53,0,0,0,0,0,2.15,0,0.53,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.07,0,0,0,0,0.101,0,0,0,0,1.857,16,52,0 2.32,0,0,0,0,0.77,0,0,0,0,0,0.77,0,0,0,0,0,0,4.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.55,0,0,0,0,0,0,0.159,0,0,1.346,4,35,0 0,0,0,0,0,0,0,0,0,1.43,0,0,0,0,0,0,0,0,2.15,0,0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.71,0.71,0,0,0,0,0,0,0,0,2.939,51,97,0 0,0,0,0,0.64,1.29,0,0,0,0,0,0,0,0,0,0,0,0,1.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,1.29,0,0,0,0,0,1.29,1.94,0,0,0,0,0,0.188,0,0,2.686,51,137,0 0,0,0.27,0,0,0,0,0,0.27,0.55,0,0,0,0,0,0,0,0,3.3,0,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0.27,0,0,0,0,0,0.048,0,0,1.873,47,118,0 0,0,1.39,0,0,0,0,0,0,0,0,0.34,0,0,0,1.04,0,0,4.52,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.04,0.34,0,0,0,0.122,0,0,0,0,1.963,47,108,0 0,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0.6,0,0,1.8,0,1.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.817,0,0,1.857,15,39,0 0,2.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.12,0,2.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.12,0,0,0,0,0,0,0,0,4.117,47,70,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.7,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.92,0.92,0,0,0,0,0,0,0.857,0,2.918,47,108,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.06,0,0,1.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.12,1.06,0,0,0,0.14,0,0,0,0,2.625,47,84,0 0.7,0,0.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.41,1.41,0,0,0,0,0,0.105,0,0,2.342,47,89,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.03,0,0,3.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.03,1.03,0,0,0,0,0,0,0,0,2.843,47,91,0 0,0,0,0,0,0,0,0,0,0,0,1.53,0.76,0,0,0,0,0,3.07,0,0.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.444,6,26,0 0,0,0.91,0,0,0,0,0,0,0,0,0.91,0.91,0,0,0,0,0,5.5,0,0.91,0,0,1.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.91,0.91,0,0,0,0,0,0.13,0,0,2.457,47,86,0 0,0,0,0,0,0,0,0,0,0,0,0.83,0,0,0,0,0,0,3.33,0,0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.83,0,0,0,0.83,0,0,0,0.12,0,0,0,0,3.137,47,91,0 0,0,1.17,0,0,0,0,0,0,1.17,0,2.35,1.17,0,0,0,0,0,1.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.17,0,0,0,0,0,0,0,0.361,0,0.361,0.18,0,1.652,4,38,0 0,0,0,0,0,0,0,0,0,0.96,0,0,0.96,0,0,0,0,0,1.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.96,0,0,2.88,0,0,0,0,0.327,0,0.327,0.327,0,1.482,4,43,0 0,0,0,0,0,0.78,0,0,0,0,0,0,0,0,0,0,0,0.78,0,0,0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.277,0,0.263,0,0,1.047,2,22,0 0,0,1.17,0,1.17,0,0,0,0,0,0,3.52,0,0,0,0,0,0,1.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.35,0,0,2.35,0,0,0,0,0.192,0,1.156,0.192,0,1.7,6,34,0 0,0,1.17,0,0,0,0,0,0,0,0,2.35,0.78,0,0,0,0,0,3.13,0,0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.39,0,0,0,0.284,0,0.284,0.213,0.071,1.565,12,72,0 0,0,1.5,0,0.75,0,0,0,0,0,0,0.75,1.5,0,0,0.75,0,0,1.5,0,0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.75,0,0,0,0,0.147,0,0.441,0,0,2,6,54,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.331,0,0.331,0,0,1.714,4,24,0 0,0,0,0,0,0,0,0,0,0,0,1.88,0,0,0,0,0,0,1.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.33,0,0.33,0,0,1.769,4,23,0 0.36,0,0.36,0,0.36,0,0,0,0,0,0,0.72,0,0,0,0,0,0,1.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.72,0,0,0,0,0,0.36,0,1.08,0.72,0,0,0.124,0,0.062,0.062,0,0,1.414,13,116,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.75,0,1.75,5.26,0,1.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.238,0,0,4.375,55,70,0 0,0,0.39,0,0.39,0.39,0,0,0,0,0,0,0.39,0,0,0.39,0,0.39,1.17,0,0.78,0,0.39,0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.39,0,0,0,0.065,0.065,0.261,0.065,0,2.89,55,159,0 0,0,0,0,0,0,0,0,0,0,0,1.31,0,0,0,1.31,0,1.31,0,0,3.94,0,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0.194,0,0,0,5.2,55,104,0 0,0,1.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.05,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.36,0.68,0,0,0,0.113,0,0,0,0,1.315,4,25,0 0,0,0.71,0,0,0,0,0,0,0,0,0.71,0,0,0,0,0,0,2.15,0,0.71,0,0,0,0,0,0,0,0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.342,0,0,1,1,31,0 0,0,0.9,0,0,0,0,0,0,0.45,0,0,0,0,0,0.45,0,0.45,0,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.45,0,0,0,0,0,0,0,0.45,0.45,0,0,0.056,0.227,0,0.056,0,0.056,5.8,70,290,0 0,0,1.25,0,0.62,0,0,0,0,0,0,1.25,0,0,0,1.88,0,0,4.4,0,0,0,0,1.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.327,0,0,0.109,0.109,1.705,9,58,0 0.31,0,0.31,0,0,0,0,0,0,0,0,0.31,0.31,0,0,0,0,0,2.84,0,0.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.059,0,0.709,0,0,1.119,4,47,0 0,0,0.21,0,0.21,0,0,0.21,0,0,0,0,0,0,0,0,0,0,1.94,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.079,0,0.039,0.119,0,0.039,1.086,3,101,0 0,0,1.85,0,0,0,0,0,0,1.85,0,1.85,1.85,0,0,0,0,0,5.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.692,0,0,1.727,5,19,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.216,0,0,1,1,18,0 0,0,0.35,0,0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,2.47,0,0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.35,0,0,0,0,0,0.7,0,0,0,0,0.064,0,0.324,0,0,1.12,3,56,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.653,0,0,1.666,5,10,0 0,0,0.58,0,0,0.58,0,0,0,0,0,0,0,0,0,0,0,0,1.76,0,1.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.108,0.108,0.432,0,0,1,1,35,0 0.28,0,0.28,0,0.57,0,0,0,0,0,0,0.28,0,0,0,0,0,0,2.87,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.154,0,0.308,0,0,1.148,4,54,0 0,0,0,0,0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,1.89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.63,0,0,0,0,0,0.103,0,0.62,0,0,1,1,26,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,8,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.757,0,0,1.222,4,22,0 0.39,0,0.13,0,0.13,0,0,0,0.13,0,0.13,0.13,0,0,0,0.13,0,0,3.85,0,1.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.066,0,0,0,0.022,1.514,21,159,0 0,0.49,0,0,0,0,0,0,0,0,0,0.49,0,0,0,0,0,0,2.94,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.241,0,0,0,0.08,1.77,21,85,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.66,0,0,0,1.66,3.33,0,0,0,0.8,0,0,0,0,1.5,4,33,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.87,0,0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.478,0,0,0,0,1.333,4,28,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.69,0,0,0,0,0,0,0,0,0,0,0,0,1.4,4,14,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.69,0,0,0,0,1.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.69,0,0,1.69,0,0,0,0,0,0,0,0,0,1.071,2,15,0 0,0,0,0,0,0,0,0,0,0,0,1.25,0,0,0,0,0,0,1.25,0,3.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.25,0,0,0,0,0.715,0,0,0,0,1.411,4,24,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.63,0,2.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.523,0,0,0,0,1.6,4,16,0 0,0.52,0.52,0,0,1.57,0,0,0,0,0,0,0,0,0,0.52,0,0.52,1.04,0,0.52,0,0,0,0,0,0,0,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0.52,0,0.52,0,0,0,0,0,0.087,0,0.175,0,0,1.093,3,35,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.94,0,2.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,1.92,0,0,0,0,0,0,1.92,0,1.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.5,21,42,0 0,0,0.19,0,0,0,0,0,0,0,0,0.79,0,0,0,0.39,0,0,0.99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0,0.19,1.19,0,0,0,0,0,0.029,0,0,1.131,11,155,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.69,0,0,0,0,0,0,9.575,0,0,1.387,5,43,0 0.28,0,0.28,0,0,0,0,0,0,0,0,0.28,0.28,0,0,0.28,0,0.28,1.97,0,0,0,0,0.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.28,0,0,0,0.103,0,5.054,0,0,1.403,18,80,0 0,0,0.73,0,0.36,0.36,0,0,0,0,0,0,0,0,0,0,0,0,2.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.387,0,0,1.131,4,69,0 0,0,0.48,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,2.43,0,0.97,0,0,0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0,0,0.45,0,0,1.138,4,41,0 0,0,0,0,0,0.61,0,0,0,0,0,0.61,0,0,0,0,0,0,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.61,0,0,0,0,0,0,0,0,0,1,1,35,0 0,0.35,0.35,0,0,0.35,0,0,0,0.35,0,0.71,0,0,0,0,0,0,3.58,0,1.07,0,0,0,0,0,0,0,0.35,0,0,0,0,0,0,0,0,0,0.35,0,0,0,0,0,0,0,0,0,0,0.12,0.06,0,0,0,1.787,11,118,0 0,0,0,0,0,0,0,0,0,0,0,1.29,0,0,0,0,0,0,2.59,0,2.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0,0.64,0,0,0,0,0,0,0,0,0,1.5,5,51,0 0,0,0.51,0,0,0.51,0,0,0,0,0,0,0.51,0,0,0.51,0,0,1.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.087,0,0,1.218,6,39,0 0,0.38,0.38,0,0,0.38,0,0,0,0.38,0,0.77,0,0,0,0,0,0,3.5,0,1.16,0,0,0,0,0,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.131,0.065,0,0,0,1.843,11,118,0 0.95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.85,0,0.95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.95,0,0,0,0,0,0,0,0,0,0,0,0,0.35,0,0,0,0,2.608,14,60,0 0.76,0,0,0,0,0.76,0,0,0,0,0,0,0,0,0,0,0,0,3.07,0,3.07,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.253,0,0.253,0,0,2.172,9,63,0 0,0,0,0,0,0,0,0,0,0,0,1.69,0,0,0,0,0,1.69,0,0,1.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.69,0,0,0,0,0,0,0.278,0,0,1.777,4,32,0 0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0,0,0,0,3.33,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0.558,0,0,0,0,1,1,6,0 1.47,1.47,0,0,0,0,0,0,0,1.47,0,0,0,0,0,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.391,21,55,0 0,0.87,0.87,0,0,0,0,0,0,0.87,0,0.87,0,0,0,0,0,0,3.5,0,0.87,0,0,0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.87,0,0,0,0,0,0,0,0.138,0,2.136,21,47,0 0,3.03,0,0,0,0,0,0,0,3.03,0,0,0,0,0,0,0,0,3.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.03,0,0,0,0,0,3.03,0,0,0,0,0,0,0,0,0,2.769,21,36,0 0,1.08,0,0,0,0,0,0,0,1.08,0,3.26,0,0,0,0,0,0,5.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.08,0,0,0,0,0,0,0.169,0,0,2.052,21,39,0 0,2.7,0,0,0,0,0,0,0,2.7,0,0,0,0,0,0,0,0,8.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.7,0,0,0,0,0,0,0,0,0,2.538,21,33,0 0.58,0,0,0,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,2.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.58,0,0,0.58,0,0,0.58,1.16,0,0,0,0.165,0,0.082,0,1.403,2.674,17,115,0 0,0,0,0,0,0,0,0,0,0,0,0,0,1.75,0,0,0,0,1.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.25,0,0,1.285,3,18,0 0,1.28,0,0,0,0,0,0,0,1.28,0,0,0,0,0,0,0,0,5.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.28,0,0,1.28,1.28,0,0,0,0,0,0,0,0,0,2.105,21,40,0 0,0.36,0.36,0,0,0,0,0,0,0.36,0,0,0,0,0,0,0,0,1.47,0,0.36,8.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.121,0,0,0.063,0,0.507,7.326,43,359,0 0,0.42,0.21,0,0,0,0,0,0,0.42,0,0,0,0,0,0,0,0,1.26,0,0.21,8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.21,0,0,0.42,0,0,4.385,0,0,0.071,0,0.503,6.822,43,614,0 0,0.36,0,0,0.36,0,0,0,0,0.36,0,0.36,0,0,0,0,0,0,1.08,0,0,7.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.972,0,0,0.063,0,0.504,6.423,43,334,0 0,0.44,0,0,0.44,0,0,0,0,0.44,0,0.44,0,0,0,0,0,0,0.44,0,0,8.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.367,0,0,0.074,0,0.592,7.288,43,328,0 0,0.41,0,0,0,0,0,0,0,0.41,0,0.41,0,0,0,0,0,0,0.41,0,0,8.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.096,0,0,0.07,0,0.776,7.531,43,354,0 0,1.35,1.35,0,0,0,0,0,0,1.35,0,0,0,0,0,0,0,0,2.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.35,0,0,0,0,0.221,0,0,0,0,2.222,21,40,0 0,1.38,1.38,0,0,0,0,0,0,1.38,0,0,0,0,0,0,0,0,9.72,0,1.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.38,0,0,0,0,0,0,0,0,0,2.052,21,39,0 0,2.12,0,0,0,0,0,0,0,2.12,0,2.12,0,0,0,0,0,0,6.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.12,0,0,0,0,0,0,0,0,0,2.692,21,35,0 0.35,0.35,0,0,0,0,0,0,0,0.35,0,0.35,0,0,0,0,0,0,1.42,0,0,11.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.672,0,0,0.06,0,0.481,7.464,43,418,0 0,0,0,0,0,0,0,0,0,0,0,1.01,0,0,0,1.01,0,0,2.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.327,0,0,1.263,6,24,0 0,0.36,0,0,0,0,0,0,0,0.73,0,0,0,0,0,0,0,0,1.46,0,0.36,10.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.838,0,0,0.062,0,0.503,6.912,43,394,0 0,1.42,0,0,0,0,0,0,0,1.42,0,0,0,0,0,0,0,0,4.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.42,1.42,0,0,0,0,0,0,0,0,3.555,21,96,0 0,1.78,0,0,0,0,0,0,0,1.78,0,3.57,0,0,0,0,0,0,8.92,0,1.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.78,0,0,0,0,0,1.78,0,0,0,0,0,0,0,0,0,2.388,21,43,0 0.36,0,0.73,0,0,0,0,0,0,0.73,0,0.73,0,0,0,0,0,0,3.3,0,0,0,0,0,0.73,1.1,0,0.73,0.36,0.36,0,0,0,0,0.36,0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0.231,0,0,0,0,2.482,16,144,0 1.49,0,0,0,0,0,0,0,0,0,0,1.49,0,0,0,0,0,0,4.47,0,1.49,0,0,0,0,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.49,0,0,0,0,0,0,0,0,0,0,1.933,8,29,0 0,0,0,0,0,0,0,0,0,0.69,0,2.09,0,0,0,0,0,0,4.19,0,0.69,0,0,0,1.39,3.49,0,1.39,0.69,0.69,0,0,0,0,0.69,1.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0.281,0,0,0.093,0,2.744,12,129,0 0.16,0,0.32,0,0,0.16,0,0,0,0.16,0,1.44,0,0,0,0.16,0,0,3.21,0,0.96,0,0,0,0.16,0.16,0,0,0.16,0.16,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0.64,0,0,0,0.32,0.185,0.318,0,0.079,0,0.053,1.695,36,290,0 0,0,0,0,0,0,0,0,0,1.02,0,1.02,0,0,0,0,0,0,5.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.04,0,0,0,0,0.309,0.154,0,0.154,0,0,3.304,48,76,0 0,0,2.32,0,0,0,0,0,0,2.32,0,0,0,0,1.16,0,0,0,2.32,0,0,0,0,0,0,2.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.16,0,1.16,0,0,0,0,0.204,0,0,0,0,1.75,11,35,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.72,0,0,0,0,0,0.72,0.72,0,2.17,0,0,0,0,0,0,1.44,0,0,0,0,0,0,0,0,0.72,0,0,0.72,0,0,0.204,0,0.306,0.102,0,2.538,22,99,0 0,0.56,0,0,0,0,0,0,0,0,0,0.56,0,0,0,0,0,0,2.27,0,0,0,0,0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.56,0.56,0,0,0,0,0.099,0,0,0,0.099,1.035,2,29,0 0,0,0,0,0,0,0,0,0,0,0,0.67,0,0,0,0,0,0,4.05,0,2.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.67,0,0.67,0,0,0,0,0.679,0,0,0,0,1.636,6,72,0 0,0,0,0,3.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.307,5,17,0 0,0.8,0,0,0.6,0,0,0.2,0,0.2,0,0,0,0,0,1.8,0,2.2,1.8,0,2.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0.2,0,0,0,0,0,0.06,0,0,2.533,43,228,0 0,0.37,0.37,0,0.09,0.09,0,0.37,0,0,0,0.28,0.28,0,0,0.84,0.09,0.56,2.72,0,2.16,0,0.18,0,0,0,0,0,0,0,0,0,0,0,0,0,0.09,0,0,0,0,0,0,0,0.18,0,0,0,0,0.056,0,0.142,0.071,0.014,1.934,19,383,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,1.571,3,11,0 2.27,0,0,0,0,0,0,0,0,2.27,0,0,0,0,0,0,0,0,2.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.27,0,0,0,0,0,2.27,0,0,0,0,0,0,0,0,0,1.2,3,12,0 4,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.168,0,0.168,0,0,1.459,10,54,0 0,0,0,0,0.48,0,0,0,0,0,0,0.48,0,0,0,0,0,0,0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.084,9,123,0 0,0,0.37,0,1.13,0,0,0,0,0.75,0,1.13,0,0,0,0,0,0,2.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.37,0,0,0,0,0,0,0,0,0,0,1.264,4,43,0 0,0,1.98,0,0.99,0,0,0,0,0,0,1.98,0,0,0,0,0,0,4.95,0,0.99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.99,0,0,1.98,0,0,0,0,0,0,0,0,0,0,0,0,1.222,4,22,0 0,0,0,0,0,0.5,0,0,0,0,0,0,0,0,0,0,0,0.5,2,0,0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.271,0,0,1.057,2,37,0 0,0,0,0,0,0,0,0,0,0.88,0,0,0,0,0,0,0,0,1.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.88,0,0.88,0,0,0,0,0,0,1.76,0,0,0,0.157,0,0.157,0,0,2,15,84,0 0,0,0.51,0,0.17,0,0,0,0,0,0,0.34,0,0,0,0,0,0,2.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.064,0,0.064,0,0,3.587,55,226,0 0,0,0.46,0,0,0,0,0,0,0,0,0.46,0,0,0,0,0,0,3.7,0,0,0,0,0.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.46,0,0,0,0,0.094,0,0.473,0,0,2.5,24,40,0 0,0,0.36,0,0.09,0,0,0,0,0,0,0,0.09,0,0,0,0,0.18,4.24,0,1.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.09,0.99,0,0,0.072,0.116,0,0.188,0,0,1.302,9,297,0 0,0,3.61,0,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,3.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.6,0,0,0,0,0,0,0.12,0,0.12,1.96,9,49,0 0,0.82,0,0,0,0,0,0,0,1.24,0,0,0,0,0,0,0,0,1.65,0,0,9.95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.149,0,0,0.07,0,0.562,7.416,43,356,0 0,0,0,0,0,0,0,0,0,2.77,0,0,0,0,0,0,0,0,2.77,0,5.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.77,0,0,0,0,0,0,0.438,0,0,1.214,3,17,0 0,9.52,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,0,0,0,0,0,1,1,10,0 0,0.27,0,0,0,0,0,0,0,0.27,0,0,0,0,0,0,0,0,1.94,0,0,8.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.23,0,0,0.048,0,0.482,5.802,43,412,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.63,0,0,0,0,0,0,0,0,0,1,1,12,0 0,0,0.71,0,0,0,0,0,0,0,0,0.71,0.71,0,0,0,0,0,0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.121,0,0.243,0,0,1,1,31,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.04,0,0,9.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.187,0,0,0.141,0,0.425,6.51,43,319,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,6.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.43,0,0,0,0,0,0,0,0,0.43,0,0,3.885,0,0,0.073,0,0.439,5.754,43,328,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.37,0,0,2.24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.37,0,0,3.024,0.059,0,0.059,0,0.237,5.016,43,311,0 0,0,0.22,0,0,0,0,0,0,0,0,0.22,0,0,0,0,0,0,1.11,0,0.22,7.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.22,0,0,0,0,0,0,0,0,0,3.125,0,0,0.24,0,0.28,5.397,43,448,0 0,0,0,0,0,0.42,0,0,0,0,0,0.84,0,0,0,0,0,0,2.1,0,0,6.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.42,0,0,0,0,0,0,0,0,0,4.123,0,0,0.073,0,0.441,6.186,43,266,0 0,0,0,0,0,0,0,0,0,0,0,1.63,0,0,0,0,0,0,6.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.63,0,0,0,0,0,0,0,0,0,1.333,4,20,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0.95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.95,0,0,0,0,0,0,0,0,0,0,0,0,1.076,3,28,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.37,0,0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.116,0,1.419,5,44,0 0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,9,0 0,0,3.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.7,0,0,0,0,0,0,0,0,0,1,1,9,0 0,0,0,0,0,0,0,0,0,0,0,0.76,0,0,0,0,0,0.76,1.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.133,0,0.266,0,0,1,1,23,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,12.19,0,4.87,0,0,9.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,7,0 0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0,0,3.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,3.33,0,0,0,0,0,0,0,0,0,1.142,3,16,0 0,0,0,0,0,0,0,0,0,0,0.24,0.72,0.24,0,0,0,0.24,0,0.72,0.24,2.16,0,0.48,0,0,0,0,0,0,0,0,0,0,0,0.24,0,0,0,0,0,0,0,0,0,0,0.24,0,0,0,0.447,0,0.122,0.285,0,3.714,19,286,0 0,0,0.91,0,0.3,0,0,0,0,0,0,0.3,0.3,0,0,0,0,0,1.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.3,0.3,0,0,0,0,0,0,0,0,1.505,14,128,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.38,0,0,0,0,0,0,0,0,1.38,0,0,0,0,0,0.208,0,0,2.655,15,77,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,10,0 0,1.25,0,0,0,0,0,0,0,0,0,2.81,0,0,0,0,0,1.56,0.93,0,0.31,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0.164,0,0.109,0.054,0,2.193,18,136,0 0,0.22,0,0,0.22,0,0,0,0,0,0,1.36,0,0,0,0,0,1.59,0.91,0,0,0,0,0,0,0,0,0,0.22,0,0,0,0.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.326,0,0.285,0,0,2.043,31,141,0 0.51,0,0.51,0,1.53,0,0,0,0,0.51,0,0.51,0,0,0,0,0,0,1.02,0,0,0,0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.51,0.51,0,0,0.079,0,0,0,0,1.442,8,75,0 0,0,0.34,0,0.34,0,0,0,0,0,0,1.37,1.37,0,0,0,0,0.34,2.74,0,1.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0.232,0,0.406,0,0,1.425,6,77,0 0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.96,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.377,0,0,1,1,33,0 0,0,0,0,0,0,0,0,0,1.28,0,0,0,0,0,0,0,0,2.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.28,0,0,0,0,0,0,0,0,0,0,0,0,0.232,0,0,1.296,8,35,0 0,0,2.12,0,1.06,0,0,0,0,0,0,2.12,0,0,0,0,0,0,5.31,0,1.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.06,0,0,2.12,0,0,0,0,0,0,0,0,0,0,0,0,1.238,4,26,0 0.26,0,0.26,0,0.52,0,0,0,0,0.26,0,0.26,0,0,0,0.26,0,0,1.31,0,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.038,0,0.038,1.541,12,202,0 0,0,0,0,0,0,0,0,0,0,0,0.69,0,0,0,0.69,0,0,2.79,0,0.69,0,0,0,2.09,0,0,0,0,1.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.69,1.39,0,0,0,0.221,0,0,0,0,2.184,12,83,0 0,0,0,0,0.54,0,0,0,0,0,0.54,1.09,0,0,0,0,0,0,3.82,0,0,0,0,0,2.18,2.18,0,0.54,0,1.09,0,0,0,0,0.54,0,0,0,0,0,0,0,0.54,0,0.54,0,0,0,0,0.087,0,0,0,0,3.533,34,159,0 0,0,0,0,0,0,0,0,0,1.25,0,1.25,0,0,0,0,0,0,2.5,0,1.25,0,0,0,1.25,1.25,0,0,0,0,0,0,0,0,0,0,0,0,1.25,0,0,0,0,0,1.25,1.25,0,0,0,0,0,0,0.204,0,2.45,15,49,0 0,0,0.55,0,0,0,0,0,0.55,0,0,0,0.55,0,0,0.55,0,0.55,0,0,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.093,0,0.563,0,0,2.928,55,82,0 0,0,0,0,0,0,0,0,0,0,0,0,0.54,0,0,0.54,0.54,0,1.63,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,0,0.54,0,0,0,0,0,0.407,0,0,2.038,14,53,0 0,0,2.27,0,0,0,0,0,0,0,0,2.27,0,0,0,1.13,0,1.13,2.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.13,0,0,0,0.143,0,0,0,0,8.761,77,184,0 0,0,0,0,0,0,0,0,0,0,0,0.92,0,0,0,0,0,0.92,3.7,0,0.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.92,1.85,0,0,0,0.295,0,0,0,0,2.535,12,71,0 0,0,0,0,0,0,0,0,0,0,0,0.99,0,0,0,0,0,0,5.94,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.99,0,0,0,0.191,0,0,0.766,0,0,1,1,18,0 0.12,0,0.12,0,0,0,0,0,0,0,0.12,0.38,0,0,0,0,0.12,0,1.78,0,0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0,0,0,0.25,0,0,0.12,0.63,0,0,0.018,0.074,0,0.055,0,0.018,3.08,63,419,0 0.11,0,0.33,0,0,0,0,0,0,0.11,0,0.45,0,0,0,0.11,0.11,0,2.81,0,0.9,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0,0,0,0,0.22,0,0,0.33,0.56,0,0,0.017,0.136,0,0.051,0,0.017,2.944,63,427,0 0,0.6,0,0,0.6,0,0,0,0,2.43,0,0.6,0,0,0,0,0,0,1.82,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.21,0,0,0,1.82,0,0,0,0.271,0,0,0,0.09,6.09,71,201,0 0,0.6,0,0,0.6,0,0,0,0,2.43,0,0.6,0,0,0,0,0,0,1.82,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.21,0,0,0,1.82,0,0,0,0.271,0,0,0,0.09,6.09,71,201,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.63,5.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.625,3,13,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14.28,0,7.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.14,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0,0,0,0,2.34,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.33,0.33,0,0,0,0.06,0,0.302,0,0,1.562,14,100,0 0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.2,0,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.1,0,0,0,0,0,0,0.633,0,0,1.386,11,61,0 0,0,0,0,0,0.77,0,0,0,0,0,0,0,0,0,0,0,0,7.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.156,0,0.312,0,0,1.08,2,27,0 0,0,0.26,0,0.52,0,0,0,0,0,0,0,0,0,0,0,0,0.52,1.56,0,1.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.52,0,0,0,0.26,0,0,0,0,0.26,0,0,0,0.753,0.113,0,0.037,0.037,0,1.797,20,169,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0.34,0,0,0.68,0,0.68,0,0,0.34,0.34,0,0,0,0,0.34,0,1.36,3.42,0,2.73,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0.34,0,0,0,0,0,0.048,0.048,0,1.405,15,97,0 0,0,0.59,0,0.29,0.59,0.59,0.29,0,0.29,0.29,0,0,0,0,0,0,0.89,3.58,0,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.29,0.29,0,0,0.088,0,0,0.044,0.132,0,1.592,15,121,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.66,0,0,0,0.675,0,0,0,0,1,1,4,0 0.06,0,0.32,0,0,0,0,0,0,0.06,0,0.06,0.06,0,0,0,0,0.06,2.79,0,1.1,0.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.06,0,0,0.06,0.19,0,0,0.317,0.035,0,0.093,0,0,1.11,9,261,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.69,7.69,0,0,0,0.775,0,0,0,0,1,1,5,0 0,0,0.6,0,0,0,0,0,0,0,0,0.43,0.08,0,0,0,0,0,3.02,0,1.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0,0,0.51,0,0,0,0,0.083,0,0.099,0,0,1.329,18,214,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.56,0,0,0,0,0,0,0,0,7.69,0,0,0,0.395,0,0,0,0,3,18,39,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.88,0,0,0,0,0,0,0,0,1,1,7,0 0,1.57,1.18,0,0,0,0,0,0,2.36,0,0.78,0,0,0,0,0,0,0.39,0,0,6.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.151,0.203,0,0.271,0,0.067,5.689,30,330,0 0,0,0,0,0,1.42,0,0,0,0,0,0,0,0,0,0,0,0,7.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.42,0,0,0,0,0.267,0,0,0,0,1,1,17,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,1.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.82,0,0.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.21,0,0,0,0,0.371,0,0,0,0,1.967,13,61,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,21.42,0,0,0,0,0,0,0,0,0,1.125,2,9,0 0,2.6,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.47,0,1.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.154,0,0.773,0,0,1,1,17,0 0,0,0.21,0,0,0.21,0,0,0,0,0,0,0,0,0,0,0,0,1.95,0,0.21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.131,0.175,0,0,0,0,1,1,68,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,3,0 0,0,1.01,0,0,0,0,0,0,0,0,3.03,0,0,0,0,0,0,5.05,0,2.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.187,0,0,1.166,3,21,0 0,0,0.81,0,0,0,0,0,0,0,0,3.25,0,0,0,0,0,0,4.06,0,1.62,0,0,0,0.81,0,0,0,0,0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.81,0,0,0,0,0,0,0.134,0,0,1.366,5,41,0 0,0,1.81,0,0,0,0,0,0,0,0,0.9,0.9,0,0,0,0,0,4.54,0,2.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.164,0,0,1.391,8,32,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.71,0,1.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.85,0,0,0,0,0,0,0,0,0,2.125,5,17,0 1.39,0,2.09,0,0,0,0,0,0,0,0,6.29,0,0,0,0.69,0,0,4.19,0.69,0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.69,0,0,0,0,0,0,0,0,0,0,0,0,0.254,0,0,2,13,64,0 0.97,0,0,0,0.48,0,0,0,0,0,0,0,0,0,0,0.48,0,0,2.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.48,0,0,0.48,0.97,0,0,0,0.15,0,0,0,0.075,3.367,21,165,0 0.15,0,0.63,0,0.07,0.11,0,0,0,0.03,0,0.07,0.37,0,0,0,0.03,0.03,1.16,0,0.22,0,0,0,0.03,0,0,0,0,0,0,0,0,0,0,0,0,0.07,0,0,0,0.03,0,0,0.22,0.03,0,0,0.014,0.05,0,0.014,0,0,1.111,7,389,0 0,0,0,0,0,0,0,0,0,0,0,3.03,0,0,0,0,0,0,3.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.473,0,0,1.687,5,27,0 0,0,0,0,0,0,0,0,0,0,0,3.44,0,0,0,0,0,0,3.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,5,26,0 0,0,2.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.214,4,17,0 4.34,0,0,0,0,0,0,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.75,4,14,0 0,0,4.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.645,0,0,0,0,1,1,7,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.02,0,0,0,0.344,0,0.344,0.172,0,2.166,11,39,0 0,0,1.66,0,0,0,0,0,0,0,0,1.66,0,0,0,0,0,0.83,2.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.151,0,0,1.518,8,41,0 0,1.08,0,0,0,0,0,0,0,0,0,1.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.181,0,0,0,0,1.612,11,50,0 0,0,0,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0,0,0,0,0,0,0,0,0.266,0,0.533,0,0,4.5,31,63,0 0,0,0,0,0,0,0,0,0,0.38,0,0,0,0,0,0.19,0,0.19,0,0,0,0,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0.58,0,0,0,0,0,0,0,0,22.05,0,0,0.135,0.339,0.067,0,0,0,4.13,81,285,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.17,0,3.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.58,1.58,0,0,0,0,0,0,0,0,1,1,12,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.934,0,0,0,0,3,9,18,0 0,0.36,0.36,0,0,0.36,0,0.73,0,0.36,0.36,1.46,0,0,0,0.36,0,2.56,2.93,0,0.36,0,0,0.73,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.36,0,0,0,0,0,0,0,0,0,0.123,0,2.854,68,157,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.68,0,0,0,0,0.112,0,0,0.903,0,2.285,14,80,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.33,0,0,0,0,0,0,0,0,0,1,1,4,0 0,0,1.72,0,0,0,0,0,0,0,0,2.58,0,0,0,0,0,0,2.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0.86,2.58,0,0,0.86,0,0,0,0,0.303,0,0.91,0,0,2.171,9,76,0 0,0,0,0,0,0,0,0,0,0,0,1.78,0,0,0,0,0,0,3.57,0,1.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.78,0,0,1.78,0,0,0,0,0,0,1.194,0,0,2.23,8,29,0 0,0,0,0.31,0.94,0,0,0.31,0,0.63,0,1.26,0,0,0,0,0,0,0.94,0,1.26,0,0,0,0,0,0,0.63,0,0,0,0,0,0,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0.037,0,0.074,0,0,3.904,39,246,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.571,0,0,1,1,4,0 0,0,0,0,0,0,0,0,0,0.79,0,0.79,0,0,0,0,0,0.79,1.58,0,2.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.79,0,0,0,0,0,0,0,1.58,0,0,0,0.135,0.405,0,0.27,0,0,1.608,13,37,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.225,0,0,1,1,4,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.32,0,0,0,0,0,0,0.763,0,2.181,6,24,0 0,0.15,0.3,0,0.15,0,0,0,0,0,0,1.38,0.15,0,0,0,0.15,0,2.6,0,1.68,0,0.15,0,0,0,0,0,0,0,0,0,0.46,0,0,0,0.3,0,0,0,0,0,0,0,0.61,2.91,0,0,0.023,0.093,0,0.069,0,0,2.05,23,326,0 0.32,0.32,0.32,0,0,0,0,0,0,0,0,1.29,0.32,0,0,0,0,0,2.92,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32,0,0.32,0,0,0,0,0,1.29,0,0,0,0.058,0.174,0,0.291,0,0,1.833,15,121,0 0,0,1.18,0,0.16,0,0,0,0,0.16,0,0.16,0.16,0,0,0,0.16,0,2.88,0,0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0.031,0.374,0,0.561,0,0,1.462,10,136,0 0,0,1.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.94,0,1.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.656,0,0.656,0,0,1.488,5,67,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.571,5,11,0 0.13,0,0.13,0,0.27,0.27,0,0,0,0,0,0.41,0.27,0,0,0,0,0,1.25,0,0.27,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.13,0,0,0,0,0,0.27,0.13,0,0,0,0.294,0,0.514,0,0,1.409,17,172,0 0,0.16,0.49,0,0,0.16,0,0,0,0.49,0,0.16,0.32,0,0,0,0,0,1.3,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0,0,0,0,0,0,0,0.16,0.16,0,0,0,0.119,0,0.149,0,0,2.178,107,244,0 0,3.36,1.92,0,0,0,0,0,0,4.32,0,1.44,0,0,0,0,0,0,0.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.695,0,0.347,0,0,6.137,107,178,0 0,0,0.21,0,0,0,0,0,0,0.21,0.21,0,0.42,0,0,0,0,0,0,0,0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.5,0,0,0,0.058,0,0,0,0,1.203,8,195,0 0,0,0.23,0,0,0.23,0,0,0,0,0,0,0,0,0,0,0,0,0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.23,0,0,0,0.23,1.4,0,0,0,0.064,0,0.161,0,0,1.065,7,146,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.22,4.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.22,2.22,0,0,0,0,0,0,0,0,1.75,5,14,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.01,0,1.01,5.05,0,1.01,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10,55,60,0 0,0,0,0,0,0,0,0,0.58,0,0,1.16,0,0,0,0,0,0.58,1.75,0,1.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.58,0,0,0,0,0,0,0.58,0,0,0,0.282,0,0.376,0,0,1.702,16,80,0 0.99,0,0.99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.98,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.98,2.97,0,0,0,0,0,0.186,0,0,1.937,15,62,0 0,0,0,0,0,0,0,0,0,0.74,0,0,0,0,0,0.74,0,0,1.49,0,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.74,4.47,0,0,0,0.124,0,0,0,0,1.966,15,59,0 0.71,0,0.71,0,0,0,0,0,0,0.71,0,1.43,0,0,0,1.43,0,0,1.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.43,0,0,0,0,0,0,0,0,1.032,2,32,0 0,0,0,0,0,0,0,0,0,1.19,0,0,0,0,0,0,0,0,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.19,0,0,0,0,0,0,0,0,9.52,0,0,0,0,0,0,0,0,2.074,15,56,0 0,0,1.01,0,0,1.01,0,0,0,1.01,0,0,0,0,0,0,0,0,1.01,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.01,0,0,0,0,0,1.01,3.03,0,0,0,0,0,0.475,0,0,1.576,15,41,0 0,0,0,0,0,0,0,0,0,0.33,0,0,0.33,0,0,0,0,0,2,0,0.33,0,0,0.33,0,0,0,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0,0,0,0,4.33,0,0,0,0.112,0,0.224,0.224,0,1.542,15,108,0 0,1.62,0.54,0,0,0,0,0,0,0.54,0,1.62,0,0,0,0,0,0,1.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.08,0,0,0,0.484,0,0,0,0,1.769,27,69,0 0,0,0,0,0,0,0,0,0,11.11,0,0,0,0,0,0,0,0,5.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,2,0 0.59,0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.19,0,0.59,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.59,1.19,0,0,0,0.212,0,0.212,0,0.106,1.7,11,68,0 0,0.32,0.96,0,0,0,0,0,0,0.64,0,1.28,0,0,0,0,0,0,3.52,0,1.6,0,0,0,0.96,1.6,0,0,0,0.64,0,0,0,0,0,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0.064,0,0.128,0,0,1.653,5,86,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.81,0,0,2.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.81,0,0,0,0,0.81,0,0,0.81,0.81,0,0,0,0,0,0,0,0,1.684,5,64,0 0,0,0,0,0,0,0,0,0,0,0,1.23,0,0,0,0,0,0,1.85,0,0.61,0,0,0.61,0,0,0,0,0,0,0,0,0,0,0,0,0.61,0,0,0,0,1.23,0,0,1.23,1.85,0,0,0,0.098,0,0.098,0,0,1.627,15,70,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,20,0,0,0,0,0,0,0,0,0,1,1,5,0 0,0,0.41,0,0,0,0,0,0,0.41,0,1.25,0,0,0,0,0,0,2.91,0,0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.184,0,0,0,0,1.538,10,40,0 0.4,0,0.81,0,0,0,0,0,0,0,0,0,0,0,0,0.4,0,0.81,1.22,0,0.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.22,0,0,0,0,0.223,0,0,0,0.055,4.75,70,266,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.38,0,0.38,2.31,0,0.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.38,0,0,0,0,0.216,0,0.162,0,0.054,5.07,70,289,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0.27,0.55,0,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.55,0.27,0,0,0,0.122,0.081,0,0,0.04,3.891,70,323,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.03,0,3.03,3.03,0,3.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.153,55,67,0 0,0,1.13,0,0.37,0,0,0,0,0,0,0,0,0,0,0.37,0,0.37,1.13,0,0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.145,0,0.436,0,0,1.792,55,147,0 0,0,2.06,0,0,0,0,0,0,0,0,1.03,0,0,0,0,0,0,4.12,0,1.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.03,0,0,0,0,0,0,0,0,1,1,16,0 0,0.31,0.31,0,0,0,0,0,0,0.31,0,0,0.31,0,0,0.63,0,0.31,4.73,0,0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.63,0,0,0,0,0.228,0,0.045,0,0.045,8.117,97,414,0 0,0,0.4,0,0,0.4,0,0,0,0,0,0,0,0,0,0.4,0,0.4,0.4,0,0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.81,0,0,0,0,0,0,0,0.4,0.4,0,0,0,0.323,0.053,0,0,0.053,5.263,70,300,0 0,0,0,0,0,0.44,0,0,0,0,0,0,0,0,0,0.44,0,0.44,0.44,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0,0,0,0,0,0,0,0.44,0.44,0,0,0,0.175,0.058,0,0,0.058,8.478,122,390,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.41,0,0.41,1.23,0,0.41,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.82,0,0,0,0,0.229,0,0.114,0,0.057,5.196,70,265,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.72,0,0.72,2.18,0,0.72,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.72,0,0,0,0,0.298,0,0.198,0,0.099,4,59,128,0 0,0,0.59,0,0,0,0,0,0,0.29,0,0.59,0,0,0,0.29,0,0.29,1.47,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0.039,0.235,0,0.471,0,0.039,3.659,70,333,0 0,0.13,0.66,0,0,0,0,0,0,0.13,0,0.13,0,0,0,0.26,0,0.13,2.65,0,0.39,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.26,0,0,0,0,0,0,0,0.26,0,0,0,0.019,0.367,0,0.193,0,0.038,3.122,70,559,0 0,0,0.92,0,0,0,0,0,0,0,0,0,0.61,0,0,0.3,0,0.3,0,0,0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.044,0.222,0,0.178,0,0.044,4.757,70,314,0 0,0,0.74,0,0,0,0,0,0,0,0,0.24,0,0,0,0.49,0,0.49,2.71,0,1.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.24,0,0,0,0,0,0,0,0.49,0,0,0,0.036,0.147,0,0.147,0,0,2.587,55,282,0 0,0,0.74,0,0,0,0,0,0,0,0,0.24,0,0,0,0.49,0,0.49,2.71,0,1.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.24,0,0,0,0,0,0,0,0.49,0,0,0,0.036,0.147,0,0.147,0,0,2.587,55,282,0 0,0,0,0,0.43,0,0,0,0,0,0,0,0,0,0,0,0,0.43,2.19,0,0.87,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.87,0.87,0.43,0,0,0,0.079,0,0,0,0,1.292,5,53,0 0,0,0.74,0,0,0,0,0,0,0,0,0.24,0,0,0,0.49,0,0.49,2.71,0,1.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.24,0,0,0,0,0,0,0,0.49,0,0,0,0.036,0.147,0,0.147,0,0,2.587,55,282,0 0,0,0,0,0,0,0,0,0,0,0,0.61,0.61,0,0,0.61,0,0.3,3.09,0,0.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.61,0,0,0,0,0.179,0,0.448,0,0,5.277,70,285,0 0,0.28,0.42,0,0,0,0,0,0,0,0,0.28,0,0,0,0.14,0,0.14,0.14,0,0.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.14,0.14,0.14,0,0,0,0,0.132,0,0.022,0,0,2.621,70,422,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0,0.44,0,0,0.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.44,0,0,0,0,0,0,0,0,0.88,0,0,0,0.178,0.059,0,0,0.059,7.046,70,303,0 0,0,0.08,0,0,0.17,0,0,0,0,0,0.17,0,0,0,0.08,0,0.08,0.17,0,0.25,0,0,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0.08,0,0,0,0.08,0,0,0,0,0.59,0,0,0,0.075,0,0.012,0.012,0,2.057,70,605,0 0,0,0.68,0,0.68,0,0,0,0,0,0,0,0.34,0,0,0,0,0.34,1.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.68,0,0,0,0,0.173,0,0.463,0,0,1.538,11,80,0 0,0,0,0,0,0,0,0,0,2.11,0,0,0.7,0,0,0.7,0,0.7,2.11,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.7,0,0.7,0.7,0,0,0,0,0,0.336,0,0,2.97,68,101,0 0,0,0,0,0,0,0,0,0,0,0,1.28,0,0,0,0,0,0,3.84,0,0,0,0,0,1.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.28,1.28,0,0,0,0,0,0,0,0,0,1.428,2,10,0 0,0,0.62,0,0,0,0,0,0,0,0,0.31,0,0,0,0,0,0,3.41,0,0,0,0,0,0.31,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0,0.62,0,0,0,1.24,0,0,0,0,0.112,0,0.225,0,0,1.866,4,28,0 0.3,0,0.3,0,0,0,0,0,0,0,0,0.3,0.6,0,0,0,0,0,3.03,0,1.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.6,0,0,0,1.21,0,0,0,0.055,0.11,0,0.055,0,0,1.947,7,74,0 0.12,0,0.12,0,0,0.25,0,0,0,0,0,0.12,0.25,0,0,0.12,0,0,2.19,0,0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.25,0,0,0.64,0.25,0.12,0,0,0,0.093,0,0.023,0,0,1.247,5,131,0 0,0,0,0,0,0,0,0,0.64,0,0,0,0,0,0,0,0,0,1.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0,0,0,0.116,0,0.232,0,0,1.551,6,45,0 0,0,0,0,0,0,0,0,0.74,0,0,0,0,0,0,0,0,0,2.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.74,0,0,0,0,0,0.74,0,0,0,0,0.276,0,0.552,0,0,2.666,16,72,0 0,0,1.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.12,0,2.12,0,0,0,0,0,0,0,0,1.06,0,0,0,0,0,0,0,0,0,0,1.06,0,0,0,0,0,0,0,0,0,0,0,0,0,13.333,73,160,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.77,2.77,0,0,0,0,0,0,0,0,0,1,1,8,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.12,3.12,0,0,0,0,0,0,0.467,0,0,1,1,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.44,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.44,0,0,0,0,0,0,0,0,0,1.6,4,8,0 0.25,0,0.51,0,0,0.25,0,0,0,0.12,0,0,0.25,0,0,0.25,0.25,0.38,1.78,0,0.51,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.38,4.34,0,0,0.019,0.019,0,0,0.038,0,1.642,17,207,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.46,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.23,0,0,0,0,0,0,19.131,0,0,13.25,48,53,0 0.16,0.16,0.16,0,0.83,0.16,0,0.16,0,0,0,0.5,0.16,0,0,0,0,0,2.34,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.16,0.33,0.16,0,0,0.087,0.058,0,0,0,0,1.901,16,135,0 0.95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.9,0,0.95,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.95,0.95,0,0,0,0.144,0,5.78,0,0,2.13,15,49,0 0,0,0,0,0,1.2,0,0,0,0,0,0,0,0,0,0,0,0,4.81,0,3.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.2,0,0,0,0,0,0,0,0,1.3,3,13,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.66,0,0,0,0,0,0,32.478,0,0,1.666,3,5,0 0,0,1.2,0,0,0,0,0,0,0,0,1.2,0,0,0,0,0,0,6.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.4,0,1.2,0,0,0,1.2,1.2,0,0,0.197,0,0,7.707,0,0,3.4,15,51,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.92,0,1.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.92,0,0,0,0,0,0,0,0,5.76,0,0,0,0.336,0,0,0,0,2.352,15,40,0 0,0,2.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.05,0,1.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.17,0,1.17,0,0,0,1.17,1.17,0,0,0,0,0,0,0,0,3,15,45,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.704,0,0,0,0,1.75,3,7,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.33,0,0,0,0,0,1.66,0,0,0,0,0,0,0,0,0,0,0,1.66,0,0,0,0,0,0,0,0,5,0,0,0,0.554,0,0,0,0,2.294,15,39,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.45,0,0,0,0,0,1.81,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.5,2,3,0 0,0,0.44,0,0,0.44,0,0,0,0,0,0,0.44,0,0,0,0,0,2.67,0,0.89,0,0,0,0.89,0,0,0,0,0,0,0,0,0,0,0.44,0,0,0,0,0.44,0,0,0,0.44,0,0,0,0,0.074,0,0.149,0,0,1.115,2,29,0 1.42,0,0,0,0,0,0,0,0,0,0,4.28,0,0,0,0,0,0,1.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.28,0,0,0,0,0,0,0,0,0.35,0,0.175,0,0,1.826,7,42,0 0.76,0,0.76,0,0,0.38,0,0,0,0,0,1.15,0.38,0,0,0,0,0,2.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.38,0,2.69,0,0,0,0.38,0.38,0,0,0,0.18,0,0.54,0,0,2.285,15,144,0 0.26,0,0,0,0,0.26,0,0,0,0,0,0,0.26,0,0,0,0,0,2.66,0,0.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.53,0,0,0.26,0.53,0.26,0,0,0,0.046,0,0,0,0,1.222,5,77,0 0,0,0,0,0,0,0,0,0,0,0,2.85,0,0,0,0,0,0,8.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.85,0,0,0,0,0,0,0,0,0,1,1,7,0 0,0,0,0,0,0,0,0,0,0,0,0.94,0,0,0,0,0,0,2.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.94,0,0,0,1.88,0,0,0,0.94,8.49,0,0,0,0.267,0,0,0,0,2.241,15,65,0 0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.684,0,0.684,0,0,1,1,1,0 0,0,0.37,0,0,0,0,0,0,0,0,0.37,0.37,0,0,0,0,0,3.33,0,0.37,0,0,0.37,1.48,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.37,0,0,0,0,0.067,0,0.135,0.135,0,1.437,4,23,0 0,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0.57,0,0,3.17,0,0.28,0,0,0,0.57,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0,0,0.196,0,0.049,0.147,0,1.1,2,55,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.25,0,4.16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.359,0.359,0,0,0,0,1,1,1,0 1.88,0,0,0,0,0,0,0,0,0,0,0.31,0,0,0,0,0,0,0.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.168,0,0.112,0,0.056,2.933,23,311,0 0,0.11,0.11,0,0.34,0,0,0,0.11,0.69,0.34,0.23,0.11,0,0,0,0,0.11,0.81,0,0.46,0,0.34,0,0,0,0,0,0,0,0,0,0,0,0,0.11,0.23,0,0.11,0,0,0,0,0,0,0.92,0,0,0.017,0.153,0,0.017,0.068,0.017,3.441,35,499,0 0.08,0.08,0.61,0,0,0,0,0,0,0.43,0,0,0.08,0,0,0,0,0.08,0.87,0,0.08,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.26,0,0,0,0,0.08,0,0,0,0.78,0,0,0.027,0.208,0.013,0.027,0,0,4.696,124,1315,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.47,0,0,0,1.47,0,0,0,0.335,0,0,0,0.167,2.652,11,61,0 0,0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,0,0,0,4.8,0,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0.8,0,0,0,0,0,0,0,0,1,1,18,0 0,0.62,0.62,0,0,0,0,0,0,1.24,0,0,0,0,0,0.62,0,0.62,0,0,3.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.24,0,1.86,0,0,0,0,0,0,1.24,0,0,0,0.384,0,0.288,0,0.096,6,116,294,0 0.39,0,0.98,0,0,0.19,0,0,0,0,0,0.58,0.19,0,0,0.78,0,0.39,5.09,0,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.19,0,0,0.39,0,0.19,0,0,0.239,0,0.444,0,0,1.626,8,122,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.31,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.65,0,0,0,0.127,0,0,0,0,0,1.137,3,33,0 0.35,0,0.71,0,0,0,0,0,0,0.35,0,0.71,0,0,0,0,0,0,7.47,0,1.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.35,0,0,0,0,0.067,0,0,0,0,1,1,40,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.29,0,0,4.38,0,0.58,0,0,0,0,0,0,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.58,0,0,0,0.055,0.167,0,0,0,0,1.122,3,55,0 0,0,0,0,0,0,0,0,0,0,0,1.25,0,0,0,0,0,0,2.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.62,0,0,0,0,0.62,0.62,0,0,0,0,0.356,0,0.594,0,0,2.125,16,34,0 0,0,1.09,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.29,0,0,0,0.191,0,0,0,0,3,15,51,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,0,0,0,0,0,0,0,0,3.176,15,54,0 0,0,0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0,0,4.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.36,0,0,0,0,0,1.36,1.36,0,0,0,0,0,0.234,0,0,2.076,15,27,0 0,0,0,0,0,0,0,0,0,0,0,0.95,0,0,0,0,0,0,3.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.95,0,0,0,0,0,0,0,0,2.85,0,0,0,0,0,0.175,0,0,3.125,15,50,0 0,0,0,0,0.35,0.35,0,0,0,0,0,0,0,0,0,0,0,0,1.79,0,0.71,0,0,0,0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.35,0,0,0,0.064,0,0,0,0,1.27,8,61,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.17,0,0,0,0,0,3.17,0,0,0,0,0,0,0,0,0,0,0,0,0,1.58,0,0,0,0,0,1.58,1.58,0,0,0,0,0,0,0,0,2.071,14,29,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.61,0,1.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0,0,0,0,4.83,0,0,0,0,0,0,0,0,3.117,15,53,0 0,0,0,0,0,0.74,0,0,0,0,0,0.74,1.49,0,0,0,0,0,1.49,0,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.74,0,0,0,0,0,0,0,0,0,1.36,3,34,0 0.78,0,0,0,0,0,0,0,0,0,0,0.78,0,0,0,0,0,0,2.36,0,0.78,0,0,0,0,0.78,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.78,0,0,0,0,0,0,0,0,0,0,1.875,8,30,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.84,0,1.28,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.28,0,0,0,0,0,0,0,1.28,0,0,0,0,0,0,1.548,0,0,3.222,14,58,0 0,0,0,0,0,0,0,0,0,0,0,2.38,0,0,0,0,0,0,2.38,0,2.38,0,0,0,2.38,2.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.404,0,0.809,0.809,0,3,11,27,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.08,0,0,0,0,0,0,0,0,6.25,0,0,0,0,0,0,0,0,3.125,15,50,0 0,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.22,0,0,0,0,0,0.64,0,0,0,0,0,0,0,0,0,0,0,0.64,0,0.64,0,0,0,0,0,1.29,2.58,0,0,0,0.348,0,1.16,0,0,3.121,15,103,0 0,0,0,0,0,0,0,0,0,0,0,2.22,0,0,0,0,0,0,0.74,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.74,0,0,0,0,0,0,0,0,2.22,0,0,0,0,0,0,0.277,0,2.72,15,68,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.01,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.01,0,0,0,0,0,0,0,0,5.05,0,0,0,0,0,0,0,0,3.043,15,70,0 0.23,0,0,0,0,0.11,0,0,0,0.11,0,0.11,0.11,0,0,0,0,0.23,2.15,0,0.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.23,0,0,0,0,0,0,0.71,0.11,0,0,0,0.126,0,0.021,0,0,1.198,5,145,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.85,0,0.42,0,0,0,0,0,0.85,3.84,0,0,0,0,0,0,0,0,2.769,15,180,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.63,0,0.31,0,0,0,0,0,0.95,2.22,0,0,0,0,0,0,0,0,2.603,16,164,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.45,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.81,0,0,0,0,0,0,0,0,2.45,0,0,0,0.306,0,0,0.46,0.153,3.173,15,73,0 0,0,0,0,0,0,0,0,0,0,0,0,1.08,0,0,0,0,0,2.17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.17,0,0,0,0,0,0,0,0,7.6,0,0,0,0,0,0,0,0,3.387,15,105,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.79,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.66,0,0,0,0,0,0,1.66,0,5,0,0,0,0,0,0,0,0,3.125,15,50,0 0.88,0,0,0,0,0,0,0,0,1.76,0,0,0,0,0,0,0,0,1.76,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.88,0,0,0,0,0.88,0,0,0.88,1.76,0,0,0,0.125,0,0.125,0,0,1.681,5,37,0 0,0,0,0,0,0,0,0,0,0,0,0.86,0.86,0,0,0,0,0,2.58,0,0.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.86,0,0,0,0,0,0,0,0.86,0,0,0,0,0,0,0.152,0,0,2.166,14,52,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.91,0,0,0,0,0,0,0,0,0,0.22,0,0,0,0,0,0,0,0.45,0,0,0,0,0,0,0,0,16.7,0,0,0,0.066,0,0,0,0,2.284,19,329,0 0,0.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.39,0.98,0.19,0.98,0,0,0,0.19,0,0,0,0,0.19,0,0,0,0,0,0,0.39,0,0,0,0,0,0,0,0.19,15.35,0,0,0.086,0,0,0.028,0,0,3.377,15,537,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,1,1,9,0 0,2.01,0,0,0,0,0,0,0,2.68,0,0.67,0,0,0,0,0,0,4.02,0,3.35,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.67,0,0,0,0,0,0,0,0,2.01,0,0,0.112,0.112,0,0.112,0,0,2.484,15,82,0 0.09,0,0.48,0,0,0.29,0,0,0,0.09,0,0,0.19,0,0,0.09,0.19,0.58,1.35,0,0.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.38,3.1,0,0,0.015,0.03,0,0,0.046,0,1.722,17,267,0 0,0,0,0,0,0,0,0,0,0,0,0.63,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.63,0,0,0,0,0,0,0,0.63,13.37,0,0,0,0.158,0,0,0.079,0.079,1.719,15,98,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.13,0,0,0,0,0,0,0,0,0,1.13,0,0,0,0.136,0,0,0,0.409,1.837,12,68,0 0.42,0,0.42,0,0.21,0,0,0,0,0,0,0.21,0,0,0,0,0,0,1.91,0,0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.21,0,0,0,0.04,0.04,0,0,0,0,2,3,14,0 0,0,0,0,0,0.37,0,0,0,0,0,0,0,0,0,0,0,0,3.73,0,0.37,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.11,0,0,0,0,0.066,0,0.066,0,0,1.555,4,14,0 0,0,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0,0,4.29,0,2.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.66,0,0,0,0,0.058,0,0,0,0,1.153,3,15,0 0,0,0.4,0,0.2,0.1,0,0,0,0,0,0.1,0.2,0,0,0,0,0,1.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.2,0,0,0,0,0.055,0,0.018,0,0,1.666,4,25,0 0,0,0.36,0,0.12,0.24,0,0,0,0.24,0,0,0.24,0,0,0,0,0,1.58,0,0.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.12,0,0.12,0.24,0,0,0,0.067,0.022,0,0,0,1.433,12,76,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.72,0,0,0,0,0,0,0,0,0,0.123,1.75,4,21,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.33,0,1.86,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.46,0,0,0,0,0.46,0,0.46,0,0,0,0.082,0,0,0,0,1.117,3,38,0 0,0,0,0,0,0,0,0,0,5.26,0,0,0,0,0,0,0,0,5.26,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.666,3,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.26,5.06,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.26,2.53,0,0,0,0,0.263,0,0,0,0,2,5,32,0 0,0,0,0,0,0,0,0,0,0,0,1.92,0,0,0,1.92,0,0,3.84,0,1.92,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.92,0,0,0,1.92,1.92,0,0,0,0,0,0,0,0,1.611,5,29,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,9,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,5.55,0,0,0,0,0,0,0,0,0,0,0,0,1.375,4,11,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.33,0,0,0,0,0,0,0,0,0,5.333,18,32,0 0,0,0,0,0,0,0,0,0,0,0,1.25,0,0,0,0,0,0,1.25,0,0,0,0,0,0,0,0,0,1.25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.746,0,0,0,0,1.687,4,27,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.03,0,0,0,0,3.03,0,0,3.03,3.03,0,0,0,0,0,0,0,0,1.47,5,25,0 0,0,0,0,0,0,0,0,0,3.7,0,0,0,0,0,0,0,0,3.7,0,7.4,0,0,0,0,0,0,0,3.7,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,4,8,0 0,0,0.42,0,0,0,0,0,0,0,0,0,0.21,0,0,0.21,0,0.21,2.14,0,0.42,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.21,0.21,0,0,0.42,0.21,0,0,0,0.078,0.039,0.039,0,0,1.292,6,106,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8.33,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,2,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.272,4,25,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.16,0,0,0,0,0,0,0,0,0,0,1.666,3,10,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.93,0,0,0.93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.93,0,0,0,0,0,0,0.93,0.93,0.93,0,0,0,0.163,0,0,0,0,1.911,15,65,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,0,0,0,0,0.8,0.8,0.8,0,0,0,0.149,0,0,0,0,1.9,15,57,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.25,2,5,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.333,5,7,0 0,0,0.97,0,0,0,0,0,0,0.97,0,0,0,0,0,0,0,0,2.91,0,0.97,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.97,0,0,0,0,0,0,0,0,0,0,1.714,6,12,0 0,0,0,0,0,0.8,0,0,0.8,0,0,0,0,0,0,0,0,0.8,1.6,0,0,0,0,0,0,0,0,0,0.8,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,0.8,0,0,0.294,0,0,0,0,1.166,2,14,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,3,4,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.43,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.47,0.47,0,0,0,0.252,0.168,0.168,0,0,1.228,5,43,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.66,0,0,0,0.334,0,0,0,0,3.333,18,60,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16.66,0,0,0,0,0,0,0,0,0,0,2,3,4,0 0.33,0,0,0,0,0,0,0,0,0,0,0.33,0,0,0,0,0,0,0.66,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.33,0,0,0.99,0.33,0,0,0,0.175,0.058,0.116,0,0,1.271,5,75,0 0.17,0,0.68,0,0.34,0.34,0,0,0,0,0,0,0,0,0,0,0,0.34,4.8,0,1.88,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.51,0.17,0,0,0,0.032,0,0.065,0,0,1.189,5,69,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.77,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.77,0,0,0,0,0,0,0,0,1,1,10,0 0.69,0,0,0,0.69,0,0,0,0,0,0,0,0,0,0,0,0,0,1.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.69,0,0,1.38,0,0,1.38,1.38,0,0,0,0.302,0,0,0,0.1,2.447,15,93,0 0.16,0,0.32,0,0.1,0.1,0,0,0,0,0,0.21,0.96,0,0,0.05,0.05,0,0.64,0,0,0,0.1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.1,0,0,0,0.025,0.017,0.008,0,0.008,0.008,1.318,12,244,0 0,0,0.55,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.11,4.45,0,0.83,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0.27,0,0,0,0.052,0,0,0,0,1.2,4,54,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.12,0,0,0,0,0,0,0,0,1,1,7,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,10.63,0,2.12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2.12,2.12,0,0,0.374,0,0,0,0,0,1,1,7,0 0,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.06,0,2.04,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.02,0,0,0,0,1.02,0,0,0,0.55,0,0,0,0,1.333,5,28,0 0.54,0,0.54,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,4.39,0,1.64,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.54,0,1.09,0,0,0,0,0.097,0,0,0,1.512,11,59,0 0,0,0.37,0,0.28,0.28,0,0,0.09,0,0,0.18,0.28,0,0,0,0,0.46,2.71,0,0.93,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.56,0.09,0.09,0,0,0.017,0,0,0,0,1.024,3,128,0 0,0,0,0,0,0,0,0,0,0,0.6,0,0,0,0,0,0,0,1.82,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.6,1.21,0,0,0.112,0,0,0,0,0,1.617,11,55,0 0,0,0.45,0,0.45,0,0,0,0,0,0,0,0.22,0,0,0,0,0,1.35,0,0.22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.22,0.22,0.22,0,0,0,0,0,0,0,0,1.13,3,78,0 0.14,0,0.14,0,0,0.56,0,0,0,0,0,0.14,0,0,0,0,0,0.28,2.41,0,0.14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.42,0,0,0,0.7,0.14,0,0,0,0.053,0,0,0,0,1.136,5,108,0 0.67,0,0,0,0.67,0,0,0,0,0,0,0,0,0,0,0,0,0,1.34,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.67,0,0,1.34,0,0,2.01,1.34,0,0,0,0.29,0,0,0,0.096,2.432,15,90,0 0.25,0,0.5,0,0.25,0,0,0,0,0,0,0.5,0,0,0,0,0,0.75,6.28,0,0.75,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.5,0.25,0,0,0.048,0,0,0,0,0,1,1,42,0 0,0,0,0,0,0,0,0,0,0,0,0.5,1.01,0,0,0.5,0,0.5,2.53,0,0.5,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.5,0,0,0.5,0.5,0,0,0,0.087,0,0,0.087,0,1.225,3,38,0 0,0,0.46,0,0.23,0.23,0,0,0,0,0,0,0,0,0,0.23,0,0,1.63,0,0.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.46,0,0,0,0,0.23,0,0,0,0.082,0,0.082,0,0,1.256,5,98,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.4,0,0,0,0.254,0,0,0,0,1,1,13,0 0,0,0.18,0,0.18,0.18,0,0,0,0,0,0,0,0,0,0,0,0,2.06,0,0.56,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.37,0.56,0.37,0,0,0.033,0.033,0,0.099,0,0,1.489,11,137,0 0.29,0,0.29,0,0,0,0,0,0,0.29,0,0.29,0.59,0,0,0.29,0,0,3.86,0,0.29,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.29,0,0,0,0.107,0,0,0,0,1.22,6,61,0 0,0,0,0,0,0,0,0,0,1.38,0,0,0,0,0,0,0,1.38,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.38,2.77,0,0,0,0.213,0,0,0,0,1.72,11,43,0 0,0,0,0,0,0,0,0,0,0,0,0.37,0.37,0,0,0,0,0,1.49,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.37,0,0,0,0,0.37,0,0,0,1.11,0.37,0,0,0,0.131,0,0,0,0,1.488,5,64,0 0,0,1.2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.61,0,2.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.2,0,0,0,0,0,0,0,0,1.2,3,24,0 0,0,0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.8,0,0,0,0,0.4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.4,0.4,0,0,0,0,0.145,0,0,0,1.372,5,70,0 0.27,0.05,0.1,0,0,0,0,0,0,0,0,0.48,0,0,0,0,0,0.1,0.97,0,0.1,3.47,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.27,0,0,0,0,0,0,0,0,0.76,0,0,0.607,0.064,0.036,0.055,0,0.202,3.766,43,1789,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0,4.76,0,0,0,0,0,0,0,0,1.571,5,11,0 0,0,0,0,0,0.51,0,0,0,0,0,0,0,0,0,0,0,0.51,3.06,0,1.02,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.51,0,0,0,0.091,0,0.091,0,0,1.586,4,46,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.89,0.89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.89,0,0,0,0,0,0,0,0,1.266,3,19,0 0,0,1.23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.61,1.85,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.61,0.61,0,0,1.23,0.61,0,0,0,0,0.406,0,0,0,1.666,13,70,0 0,0,0.45,0,0,0.22,0,0,0,0,0,0,0.45,0,0,0,0,0,1.83,0,0.45,0,0,0,0,0,0,0,0.22,0,0,0,0,0,0,0,0,0,0,0,0.68,0,0,0.45,0.22,0.22,0,0,0,0.082,0,0.041,0,0,1.5,7,123,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,9.52,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.76,0,0,0,0.625,0,0,0,0,1.375,4,11,0 0,0,0,0,0.36,0,0,0,0,0,0,3.3,0,0,0,0,0.36,0.36,1.47,0,0.36,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.47,0,0,0,0,0,0,0,0,0,0,0.112,0,0,0,0.056,1.793,21,174,0 0,0,0,0,0,0,0,0,0,0,0,0.71,0.71,0,0,0,0,0,0.71,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.71,0,0,0,0.125,0,0,0.125,0,1.272,4,28,0 0,0,3.03,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.03,3.03,0,0,0,0,0,0,0,0,1.111,2,10,0 0,0,0,0,0.54,0,0,0,0,0,0,0.54,0,0,0,0,0,0,0.54,0,0.54,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.09,0,0.54,0,0,0,0,0,0,0,0,1,1,22,0 0,0,0,0,0,0,0,0,0,0,0,0.58,0,0,0,0.58,0,0,2.9,0,0.58,0.58,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.58,0,0,0,0.185,0,0,0,0.092,2.468,11,79,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6.89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3.44,0,0,0,0,0,0,0,0,1,1,8,0 0,0,1.25,0,2.5,0,0,0,0,0,0,0,0.62,0,0,0,0,0,0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.62,0,0,1.25,0.62,0.62,0,0,0,0.111,0,0,0,0,1.285,4,27,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,7.69,0,0,0,0,0,1.052,0,0,1,1,6,0 0,0,0,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0,0,6.45,0,0,0,0,0,0,0,0,0,0,1.61,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.61,0,0,0,0.63,0,0,0,0,1.727,5,19,0 0,0,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.59,3.57,0,1.19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.59,0,0,0,0,0,0,0,0,1,1,24,0 0.31,0,0.62,0,0,0.31,0,0,0,0,0,1.88,0,0,0,0,0,0,0.62,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.31,0.31,0.31,0,0,0,0.232,0,0,0,0,1.142,3,88,0 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,6,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0.353,0,0,1.555,4,14,0 0.3,0,0.3,0,0,0,0,0,0,0,0,1.8,0.3,0,0,0,0,0.9,1.5,0,0.3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.2,0,0,0.102,0.718,0,0,0,0,1.404,6,118,0 0.96,0,0,0,0.32,0,0,0,0,0,0,0.32,0,0,0,0,0,0,1.93,0,0.32,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0.32,0,0.32,0,0,0,0.057,0,0,0,0,1.147,5,78,0 0,0,0.65,0,0,0,0,0,0,0,0,0,0.65,0,0,0,0,0,4.6,0,0.65,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1.97,0.65,0,0,0,0,0,0.125,0,0,1.25,5,40,0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/gp/spambase.py0000644000076500000240000000771314456461441016274 0ustar00runnerstaff# This file is part of EAP. # # EAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # EAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with EAP. If not, see . import random import operator import csv import itertools import numpy from functools import partial from deap import algorithms from deap import base from deap import creator from deap import tools from deap import gp # Read the spam list features and put it in a list of lists. # The dataset is from http://archive.ics.uci.edu/ml/datasets/Spambase # This example is a copy of the OpenBEAGLE example : # http://beagle.gel.ulaval.ca/refmanual/beagle/html/d2/dbe/group__Spambase.html with open("spambase.csv") as spambase: spamReader = csv.reader(spambase) spam = list(list(float(elem) for elem in row) for row in spamReader) # defined a new primitive set for strongly typed GP pset = gp.PrimitiveSetTyped("MAIN", itertools.repeat(float, 57), bool, "IN") # boolean operators pset.addPrimitive(operator.and_, [bool, bool], bool) pset.addPrimitive(operator.or_, [bool, bool], bool) pset.addPrimitive(operator.not_, [bool], bool) # floating point operators # Define a protected division function def protectedDiv(left, right): try: return left / right except ZeroDivisionError: return 1 pset.addPrimitive(operator.add, [float,float], float) pset.addPrimitive(operator.sub, [float,float], float) pset.addPrimitive(operator.mul, [float,float], float) pset.addPrimitive(protectedDiv, [float,float], float) # logic operators # Define a new if-then-else function def if_then_else(input, output1, output2): if input: return output1 else: return output2 pset.addPrimitive(operator.lt, [float, float], bool) pset.addPrimitive(operator.eq, [float, float], bool) pset.addPrimitive(if_then_else, [bool, float, float], float) # terminals pset.addEphemeralConstant("rand100", partial(random.uniform, 0, 100), float) pset.addTerminal(False, bool) pset.addTerminal(True, bool) creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", gp.PrimitiveTree, fitness=creator.FitnessMax) toolbox = base.Toolbox() toolbox.register("expr", gp.genHalfAndHalf, pset=pset, min_=1, max_=2) toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.expr) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("compile", gp.compile, pset=pset) def evalSpambase(individual): # Transform the tree expression in a callable function func = toolbox.compile(expr=individual) # Randomly sample 400 mails in the spam database spam_samp = random.sample(spam, 400) # Evaluate the sum of correctly identified mail as spam result = sum(bool(func(*mail[:57])) is bool(mail[57]) for mail in spam_samp) return result, toolbox.register("evaluate", evalSpambase) toolbox.register("select", tools.selTournament, tournsize=3) toolbox.register("mate", gp.cxOnePoint) toolbox.register("expr_mut", gp.genFull, min_=0, max_=2) toolbox.register("mutate", gp.mutUniform, expr=toolbox.expr_mut, pset=pset) def main(): random.seed(10) pop = toolbox.population(n=100) hof = tools.HallOfFame(1) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) algorithms.eaSimple(pop, toolbox, 0.5, 0.2, 40, stats, halloffame=hof) return pop, stats, hof if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/gp/symbreg.py0000644000076500000240000000645614456461441016154 0ustar00runnerstaff# This file is part of EAP. # # EAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # EAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with EAP. If not, see . import operator import math import random import numpy from functools import partial from deap import algorithms from deap import base from deap import creator from deap import tools from deap import gp # Define new functions def protectedDiv(left, right): try: return left / right except ZeroDivisionError: return 1 pset = gp.PrimitiveSet("MAIN", 1) pset.addPrimitive(operator.add, 2) pset.addPrimitive(operator.sub, 2) pset.addPrimitive(operator.mul, 2) pset.addPrimitive(protectedDiv, 2) pset.addPrimitive(operator.neg, 1) pset.addPrimitive(math.cos, 1) pset.addPrimitive(math.sin, 1) pset.addEphemeralConstant("rand101", partial(random.randint, -1, 1)) pset.renameArguments(ARG0='x') creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", gp.PrimitiveTree, fitness=creator.FitnessMin) toolbox = base.Toolbox() toolbox.register("expr", gp.genHalfAndHalf, pset=pset, min_=1, max_=2) toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.expr) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("compile", gp.compile, pset=pset) def evalSymbReg(individual, points): # Transform the tree expression in a callable function func = toolbox.compile(expr=individual) # Evaluate the mean squared error between the expression # and the real function : x**4 + x**3 + x**2 + x sqerrors = ((func(x) - x**4 - x**3 - x**2 - x)**2 for x in points) return math.fsum(sqerrors) / len(points), toolbox.register("evaluate", evalSymbReg, points=[x/10. for x in range(-10,10)]) toolbox.register("select", tools.selTournament, tournsize=3) toolbox.register("mate", gp.cxOnePoint) toolbox.register("expr_mut", gp.genFull, min_=0, max_=2) toolbox.register("mutate", gp.mutUniform, expr=toolbox.expr_mut, pset=pset) toolbox.decorate("mate", gp.staticLimit(key=operator.attrgetter("height"), max_value=17)) toolbox.decorate("mutate", gp.staticLimit(key=operator.attrgetter("height"), max_value=17)) def main(): random.seed(318) pop = toolbox.population(n=300) hof = tools.HallOfFame(1) stats_fit = tools.Statistics(lambda ind: ind.fitness.values) stats_size = tools.Statistics(len) mstats = tools.MultiStatistics(fitness=stats_fit, size=stats_size) mstats.register("avg", numpy.mean) mstats.register("std", numpy.std) mstats.register("min", numpy.min) mstats.register("max", numpy.max) pop, log = algorithms.eaSimple(pop, toolbox, 0.5, 0.1, 40, stats=mstats, halloffame=hof, verbose=True) # print log return pop, log, hof if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/gp/symbreg_epsilon_lexicase.py0000644000076500000240000000641714456461441021557 0ustar00runnerstaff# This file is part of EAP. # # EAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # EAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with EAP. If not, see . import operator import math import random import numpy from deap import algorithms from deap import base from deap import creator from deap import tools from deap import gp # Define new functions def protectedDiv(left, right): try: return left / right except ZeroDivisionError: return 1 pset = gp.PrimitiveSet("MAIN", 1) pset.addPrimitive(operator.add, 2) pset.addPrimitive(operator.sub, 2) pset.addPrimitive(operator.mul, 2) pset.addPrimitive(protectedDiv, 2) pset.addPrimitive(operator.neg, 1) pset.addPrimitive(math.cos, 1) pset.addPrimitive(math.sin, 1) pset.addEphemeralConstant("rand101", lambda: random.randint(-1,1)) pset.renameArguments(ARG0='x') creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", gp.PrimitiveTree, fitness=creator.FitnessMin) toolbox = base.Toolbox() toolbox.register("expr", gp.genHalfAndHalf, pset=pset, min_=1, max_=2) toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.expr) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("compile", gp.compile, pset=pset) def evalSymbReg(individual, points): # Transform the tree expression in a callable function func = toolbox.compile(expr=individual) # Evaluate the mean squared error between the expression # and the real function : x**4 + x**3 + x**2 + x sqerrors = ((func(x) - x**4 - x**3 - x**2 - x)**2 for x in points) return math.fsum(sqerrors) / len(points), toolbox.register("evaluate", evalSymbReg, points=[x/10. for x in range(-10,10)]) toolbox.register("select", tools.selAutomaticEpsilonLexicase) toolbox.register("mate", gp.cxOnePoint) toolbox.register("expr_mut", gp.genFull, min_=0, max_=2) toolbox.register("mutate", gp.mutUniform, expr=toolbox.expr_mut, pset=pset) toolbox.decorate("mate", gp.staticLimit(key=operator.attrgetter("height"), max_value=17)) toolbox.decorate("mutate", gp.staticLimit(key=operator.attrgetter("height"), max_value=17)) def main(): #random.seed(318) pop = toolbox.population(n=300) hof = tools.HallOfFame(1) stats_fit = tools.Statistics(lambda ind: ind.fitness.values) stats_size = tools.Statistics(len) mstats = tools.MultiStatistics(fitness=stats_fit, size=stats_size) mstats.register("avg", numpy.mean) mstats.register("std", numpy.std) mstats.register("min", numpy.min) mstats.register("max", numpy.max) pop, log = algorithms.eaSimple(pop, toolbox, 0.5, 0.1, 40, stats=mstats, halloffame=hof, verbose=True) # print log return pop, log, hof if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/gp/symbreg_harm.py0000644000076500000240000000645314456461441017160 0ustar00runnerstaff# This file is part of EAP. # # EAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # EAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with EAP. If not, see . import operator import math import random import numpy from deap import algorithms from deap import base from deap import creator from deap import tools from deap import gp # Define new functions def protectedDiv(left, right): try: return left / right except ZeroDivisionError: return 1 pset = gp.PrimitiveSet("MAIN", 1) pset.addPrimitive(operator.add, 2) pset.addPrimitive(operator.sub, 2) pset.addPrimitive(operator.mul, 2) pset.addPrimitive(protectedDiv, 2) pset.addPrimitive(operator.neg, 1) pset.addPrimitive(math.cos, 1) pset.addPrimitive(math.sin, 1) pset.addEphemeralConstant("rand101", lambda: random.randint(-1,1)) pset.renameArguments(ARG0='x') creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", gp.PrimitiveTree, fitness=creator.FitnessMin) toolbox = base.Toolbox() toolbox.register("expr", gp.genHalfAndHalf, pset=pset, min_=1, max_=2) toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.expr) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("compile", gp.compile, pset=pset) def evalSymbReg(individual, points): # Transform the tree expression in a callable function func = toolbox.compile(expr=individual) # Evaluate the mean squared error between the expression # and the real function : x**4 + x**3 + x**2 + x sqerrors = ((func(x) - x**4 - x**3 - x**2 - x)**2 for x in points) return math.fsum(sqerrors) / len(points), toolbox.register("evaluate", evalSymbReg, points=[x/10. for x in range(-10,10)]) toolbox.register("select", tools.selTournament, tournsize=3) toolbox.register("mate", gp.cxOnePoint) toolbox.register("expr_mut", gp.genFull, min_=0, max_=2) toolbox.register("mutate", gp.mutUniform, expr=toolbox.expr_mut, pset=pset) toolbox.decorate("mate", gp.staticLimit(key=operator.attrgetter("height"), max_value=17)) toolbox.decorate("mutate", gp.staticLimit(key=operator.attrgetter("height"), max_value=17)) def main(): random.seed(318) pop = toolbox.population(n=300) hof = tools.HallOfFame(1) stats_fit = tools.Statistics(lambda ind: ind.fitness.values) stats_size = tools.Statistics(len) mstats = tools.MultiStatistics(fitness=stats_fit, size=stats_size) mstats.register("avg", numpy.mean) mstats.register("std", numpy.std) mstats.register("min", numpy.min) mstats.register("max", numpy.max) pop, log = gp.harm(pop, toolbox, 0.5, 0.1, 40, alpha=0.05, beta=10, gamma=0.25, rho=0.9, stats=mstats, halloffame=hof, verbose=True) # print log return pop, log, hof if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/gp/symbreg_numpy.py0000644000076500000240000000633114456461441017374 0ustar00runnerstaff# This file is part of EAP. # # EAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # EAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with EAP. If not, see . import operator import math import random import numpy from functools import partial from deap import algorithms from deap import base from deap import creator from deap import tools from deap import gp # Define new functions def protectedDiv(left, right): with numpy.errstate(divide='ignore',invalid='ignore'): x = numpy.divide(left, right) if isinstance(x, numpy.ndarray): x[numpy.isinf(x)] = 1 x[numpy.isnan(x)] = 1 elif numpy.isinf(x) or numpy.isnan(x): x = 1 return x pset = gp.PrimitiveSet("MAIN", 1) pset.addPrimitive(numpy.add, 2, name="vadd") pset.addPrimitive(numpy.subtract, 2, name="vsub") pset.addPrimitive(numpy.multiply, 2, name="vmul") pset.addPrimitive(protectedDiv, 2) pset.addPrimitive(numpy.negative, 1, name="vneg") pset.addPrimitive(numpy.cos, 1, name="vcos") pset.addPrimitive(numpy.sin, 1, name="vsin") pset.addEphemeralConstant("rand101", partial(random.randint, -1, 1)) pset.renameArguments(ARG0='x') creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", gp.PrimitiveTree, fitness=creator.FitnessMin) toolbox = base.Toolbox() toolbox.register("expr", gp.genHalfAndHalf, pset=pset, min_=1, max_=2) toolbox.register("individual", tools.initIterate, creator.Individual, toolbox.expr) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("compile", gp.compile, pset=pset) samples = numpy.linspace(-1, 1, 10000) values = samples**4 + samples**3 + samples**2 + samples def evalSymbReg(individual): # Transform the tree expression in a callable function func = toolbox.compile(expr=individual) # Evaluate the sum of squared difference between the expression # and the real function values : x**4 + x**3 + x**2 + x diff = numpy.sum((func(samples) - values)**2) return diff, toolbox.register("evaluate", evalSymbReg) toolbox.register("select", tools.selTournament, tournsize=3) toolbox.register("mate", gp.cxOnePoint) toolbox.register("expr_mut", gp.genFull, min_=0, max_=2) toolbox.register('mutate', gp.mutUniform, expr=toolbox.expr_mut, pset=pset) def main(): random.seed(318) pop = toolbox.population(n=300) hof = tools.HallOfFame(1) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) algorithms.eaSimple(pop, toolbox, 0.5, 0.1, 40, stats, halloffame=hof) return pop, stats, hof if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000003200000000000010210 xustar0026 mtime=1689936700.73395 deap-1.4.1/examples/pso/0000755000076500000240000000000014456461475014321 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/pso/basic.py0000644000076500000240000000643014456461441015750 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import operator import random import numpy import math from deap import base from deap import benchmarks from deap import creator from deap import tools creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Particle", list, fitness=creator.FitnessMax, speed=list, smin=None, smax=None, best=None) def generate(size, pmin, pmax, smin, smax): part = creator.Particle(random.uniform(pmin, pmax) for _ in range(size)) part.speed = [random.uniform(smin, smax) for _ in range(size)] part.smin = smin part.smax = smax return part def updateParticle(part, best, phi1, phi2): u1 = (random.uniform(0, phi1) for _ in range(len(part))) u2 = (random.uniform(0, phi2) for _ in range(len(part))) v_u1 = map(operator.mul, u1, map(operator.sub, part.best, part)) v_u2 = map(operator.mul, u2, map(operator.sub, best, part)) part.speed = list(map(operator.add, part.speed, map(operator.add, v_u1, v_u2))) for i, speed in enumerate(part.speed): if abs(speed) < part.smin: part.speed[i] = math.copysign(part.smin, speed) elif abs(speed) > part.smax: part.speed[i] = math.copysign(part.smax, speed) part[:] = list(map(operator.add, part, part.speed)) toolbox = base.Toolbox() toolbox.register("particle", generate, size=2, pmin=-6, pmax=6, smin=-3, smax=3) toolbox.register("population", tools.initRepeat, list, toolbox.particle) toolbox.register("update", updateParticle, phi1=2.0, phi2=2.0) toolbox.register("evaluate", benchmarks.h1) def main(): pop = toolbox.population(n=5) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) logbook = tools.Logbook() logbook.header = ["gen", "evals"] + stats.fields GEN = 1000 best = None for g in range(GEN): for part in pop: part.fitness.values = toolbox.evaluate(part) if not part.best or part.best.fitness < part.fitness: part.best = creator.Particle(part) part.best.fitness.values = part.fitness.values if not best or best.fitness < part.fitness: best = creator.Particle(part) best.fitness.values = part.fitness.values for part in pop: toolbox.update(part, best) # Gather all the fitnesses in one list and print the stats logbook.record(gen=g, evals=len(pop), **stats.compile(pop)) print(logbook.stream) return pop, logbook, best if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/pso/basic_numpy.py0000644000076500000240000000613014456461441017175 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import operator import random import numpy from deap import base from deap import benchmarks from deap import creator from deap import tools creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Particle", numpy.ndarray, fitness=creator.FitnessMax, speed=list, smin=None, smax=None, best=None) def generate(size, pmin, pmax, smin, smax): part = creator.Particle(numpy.random.uniform(pmin, pmax, size)) part.speed = numpy.random.uniform(smin, smax, size) part.smin = smin part.smax = smax return part def updateParticle(part, best, phi1, phi2): u1 = numpy.random.uniform(0, phi1, len(part)) u2 = numpy.random.uniform(0, phi2, len(part)) v_u1 = u1 * (part.best - part) v_u2 = u2 * (best - part) part.speed += v_u1 + v_u2 for i, speed in enumerate(part.speed): if abs(speed) < part.smin: part.speed[i] = math.copysign(part.smin, speed) elif abs(speed) > part.smax: part.speed[i] = math.copysign(part.smax, speed) part += part.speed toolbox = base.Toolbox() toolbox.register("particle", generate, size=2, pmin=-6, pmax=6, smin=-3, smax=3) toolbox.register("population", tools.initRepeat, list, toolbox.particle) toolbox.register("update", updateParticle, phi1=2.0, phi2=2.0) toolbox.register("evaluate", benchmarks.h1) def main(): pop = toolbox.population(n=5) stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) logbook = tools.Logbook() logbook.header = ["gen", "evals"] + stats.fields GEN = 1000 best = None for g in range(GEN): for part in pop: part.fitness.values = toolbox.evaluate(part) if part.best is None or part.best.fitness < part.fitness: part.best = creator.Particle(part) part.best.fitness.values = part.fitness.values if best is None or best.fitness < part.fitness: best = creator.Particle(part) best.fitness.values = part.fitness.values for part in pop: toolbox.update(part, best) # Gather all the fitnesses in one list and print the stats logbook.record(gen=g, evals=len(pop), **stats.compile(pop)) print(logbook.stream) return pop, logbook, best if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/pso/multiswarm.py0000644000076500000240000002213514456461441017073 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . """Implementation of the Multiswarm Particle Swarm Optimization algorithm as presented in *Blackwell, Branke, and Li, 2008, Particle Swarms for Dynamic Optimization Problems.* """ import itertools import math import operator import random import numpy try: from itertools import imap except: # Python 3 nothing to do pass else: map = imap from deap import base from deap.benchmarks import movingpeaks from deap import creator from deap import tools scenario = movingpeaks.SCENARIO_2 NDIM = 5 BOUNDS = [scenario["min_coord"], scenario["max_coord"]] mpb = movingpeaks.MovingPeaks(dim=NDIM, **scenario) creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Particle", list, fitness=creator.FitnessMax, speed=list, best=None, bestfit=creator.FitnessMax) creator.create("Swarm", list, best=None, bestfit=creator.FitnessMax) def generate(pclass, dim, pmin, pmax, smin, smax): part = pclass(random.uniform(pmin, pmax) for _ in range(dim)) part.speed = [random.uniform(smin, smax) for _ in range(dim)] return part def convertQuantum(swarm, rcloud, centre, dist): dim = len(swarm[0]) for part in swarm: position = [random.gauss(0, 1) for _ in range(dim)] dist = math.sqrt(sum(x**2 for x in position)) if dist == "gaussian": u = abs(random.gauss(0, 1.0/3.0)) part[:] = [(rcloud * x * u**(1.0/dim) / dist) + c for x, c in zip(position, centre)] elif dist == "uvd": u = random.random() part[:] = [(rcloud * x * u**(1.0/dim) / dist) + c for x, c in zip(position, centre)] elif dist == "nuvd": u = abs(random.gauss(0, 1.0/3.0)) part[:] = [(rcloud * x * u / dist) + c for x, c in zip(position, centre)] del part.fitness.values del part.bestfit.values part.best = None return swarm def updateParticle(part, best, chi, c): ce1 = (c * random.uniform(0, 1) for _ in range(len(part))) ce2 = (c * random.uniform(0, 1) for _ in range(len(part))) ce1_p = map(operator.mul, ce1, map(operator.sub, best, part)) ce2_g = map(operator.mul, ce2, map(operator.sub, part.best, part)) a = map(operator.sub, map(operator.mul, itertools.repeat(chi), map(operator.add, ce1_p, ce2_g)), map(operator.mul, itertools.repeat(1 - chi), part.speed)) part.speed = list(map(operator.add, part.speed, a)) part[:] = list(map(operator.add, part, part.speed)) toolbox = base.Toolbox() toolbox.register("particle", generate, creator.Particle, dim=NDIM, pmin=BOUNDS[0], pmax=BOUNDS[1], smin=-(BOUNDS[1] - BOUNDS[0])/2.0, smax=(BOUNDS[1] - BOUNDS[0])/2.0) toolbox.register("swarm", tools.initRepeat, creator.Swarm, toolbox.particle) toolbox.register("update", updateParticle, chi=0.729843788, c=2.05) toolbox.register("convert", convertQuantum, dist="nuvd") toolbox.register("evaluate", mpb) def main(verbose=True): NSWARMS = 1 NPARTICLES = 5 NEXCESS = 3 RCLOUD = 0.5 # 0.5 times the move severity stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) logbook = tools.Logbook() logbook.header = "gen", "nswarm", "evals", "error", "offline_error", "avg", "max" # Generate the initial population population = [toolbox.swarm(n=NPARTICLES) for _ in range(NSWARMS)] # Evaluate each particle for swarm in population: for part in swarm: part.fitness.values = toolbox.evaluate(part) # Update swarm's attractors personal best and global best if not part.best or part.fitness > part.bestfit: part.best = toolbox.clone(part[:]) # Get the position part.bestfit.values = part.fitness.values # Get the fitness if not swarm.best or part.fitness > swarm.bestfit: swarm.best = toolbox.clone(part[:]) # Get the position swarm.bestfit.values = part.fitness.values # Get the fitness record = stats.compile(itertools.chain(*population)) logbook.record(gen=0, evals=mpb.nevals, nswarm=len(population), error=mpb.currentError(), offline_error=mpb.offlineError(), **record) if verbose: print(logbook.stream) generation = 1 while mpb.nevals < 5e5: # Check for convergence rexcl = (BOUNDS[1] - BOUNDS[0]) / (2 * len(population)**(1.0/NDIM)) not_converged = 0 worst_swarm_idx = None worst_swarm = None for i, swarm in enumerate(population): # Compute the diameter of the swarm for p1, p2 in itertools.combinations(swarm, 2): d = math.sqrt(sum((x1 - x2)**2. for x1, x2 in zip(p1, p2))) if d > 2*rexcl: not_converged += 1 # Search for the worst swarm according to its global best if not worst_swarm or swarm.bestfit < worst_swarm.bestfit: worst_swarm_idx = i worst_swarm = swarm break # If all swarms have converged, add a swarm if not_converged == 0: population.append(toolbox.swarm(n=NPARTICLES)) # If too many swarms are roaming, remove the worst swarm elif not_converged > NEXCESS: population.pop(worst_swarm_idx) # Update and evaluate the swarm for swarm in population: # Check for change if swarm.best and toolbox.evaluate(swarm.best) != swarm.bestfit.values: # Convert particles to quantum particles swarm[:] = toolbox.convert(swarm, rcloud=RCLOUD, centre=swarm.best) swarm.best = None del swarm.bestfit.values for part in swarm: # Not necessary to update if it is a new swarm # or a swarm just converted to quantum if swarm.best and part.best: toolbox.update(part, swarm.best) part.fitness.values = toolbox.evaluate(part) # Update swarm's attractors personal best and global best if not part.best or part.fitness > part.bestfit: part.best = toolbox.clone(part[:]) part.bestfit.values = part.fitness.values if not swarm.best or part.fitness > swarm.bestfit: swarm.best = toolbox.clone(part[:]) swarm.bestfit.values = part.fitness.values record = stats.compile(itertools.chain(*population)) logbook.record(gen=generation, evals=mpb.nevals, nswarm=len(population), error=mpb.currentError(), offline_error=mpb.offlineError(), **record) if verbose: print(logbook.stream) # Apply exclusion reinit_swarms = set() for s1, s2 in itertools.combinations(range(len(population)), 2): # Swarms must have a best and not already be set to reinitialize if population[s1].best and population[s2].best and not (s1 in reinit_swarms or s2 in reinit_swarms): dist = 0 for x1, x2 in zip(population[s1].best, population[s2].best): dist += (x1 - x2)**2. dist = math.sqrt(dist) if dist < rexcl: if population[s1].bestfit <= population[s2].bestfit: reinit_swarms.add(s1) else: reinit_swarms.add(s2) # Reinitialize and evaluate swarms for s in reinit_swarms: population[s] = toolbox.swarm(n=NPARTICLES) for part in population[s]: part.fitness.values = toolbox.evaluate(part) # Update swarm's attractors personal best and global best if not part.best or part.fitness > part.bestfit: part.best = toolbox.clone(part[:]) part.bestfit.values = part.fitness.values if not population[s].best or part.fitness > population[s].bestfit: population[s].best = toolbox.clone(part[:]) population[s].bestfit.values = part.fitness.values generation += 1 if __name__ == "__main__": main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/pso/speciation.py0000644000076500000240000001466414456461441017035 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . """Implementation of the Speciation Particle Swarm Optimization algorithm as presented in *Li, Blackwell, and Branke, 2006, Particle Swarm with Speciation and Adaptation in a Dynamic Environment.* """ import itertools import math import operator import random import numpy try: from itertools import imap except: # Python 3 nothing to do pass else: map = imap from deap import base from deap.benchmarks import movingpeaks from deap import creator from deap import tools scenario = movingpeaks.SCENARIO_2 NDIM = 5 BOUNDS = [scenario["min_coord"], scenario["max_coord"]] mpb = movingpeaks.MovingPeaks(dim=NDIM, **scenario) creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Particle", list, fitness=creator.FitnessMax, speed=list, best=None, bestfit=creator.FitnessMax) def generate(pclass, dim, pmin, pmax, smin, smax): part = pclass(random.uniform(pmin, pmax) for _ in range(dim)) part.speed = [random.uniform(smin, smax) for _ in range(dim)] return part def convert_quantum(swarm, rcloud, centre): dim = len(swarm[0]) for part in swarm: position = [random.gauss(0, 1) for _ in range(dim)] dist = math.sqrt(sum(x**2 for x in position)) # Gaussian distribution # u = abs(random.gauss(0, 1.0/3.0)) # part[:] = [(rcloud * x * u**(1.0/dim) / dist) + c for x, c in zip(position, centre)] # UVD distribution # u = random.random() # part[:] = [(rcloud * x * u**(1.0/dim) / dist) + c for x, c in zip(position, centre)] # NUVD distribution u = abs(random.gauss(0, 1.0/3.0)) part[:] = [(rcloud * x * u / dist) + c for x, c in zip(position, centre)] del part.fitness.values del part.bestfit.values part.best = None return swarm def updateParticle(part, best, chi, c): ce1 = (c*random.uniform(0, 1) for _ in range(len(part))) ce2 = (c*random.uniform(0, 1) for _ in range(len(part))) ce1_p = map(operator.mul, ce1, map(operator.sub, best, part)) ce2_g = map(operator.mul, ce2, map(operator.sub, part.best, part)) a = map(operator.sub, map(operator.mul, itertools.repeat(chi), map(operator.add, ce1_p, ce2_g)), map(operator.mul, itertools.repeat(1-chi), part.speed)) part.speed = list(map(operator.add, part.speed, a)) part[:] = list(map(operator.add, part, part.speed)) toolbox = base.Toolbox() toolbox.register("particle", generate, creator.Particle, dim=NDIM, pmin=BOUNDS[0], pmax=BOUNDS[1], smin=-(BOUNDS[1] - BOUNDS[0])/2.0, smax=(BOUNDS[1] - BOUNDS[0])/2.0) toolbox.register("swarm", tools.initRepeat, list, toolbox.particle) toolbox.register("update", updateParticle, chi=0.729843788, c=2.05) toolbox.register("convert", convert_quantum) toolbox.register("evaluate", mpb) def main(verbose=True): NPARTICLES = 100 RS = (BOUNDS[1] - BOUNDS[0]) / (50**(1.0/NDIM)) # between 1/20 and 1/10 of the domain's range PMAX = 10 RCLOUD = 1.0 # 0.5 times the move severity stats = tools.Statistics(lambda ind: ind.fitness.values) stats.register("avg", numpy.mean) stats.register("std", numpy.std) stats.register("min", numpy.min) stats.register("max", numpy.max) logbook = tools.Logbook() logbook.header = "gen", "nswarm", "evals", "error", "offline_error", "avg", "max" swarm = toolbox.swarm(n=NPARTICLES) generation = 0 while mpb.nevals < 5e5: # Evaluate each particle in the swarm for part in swarm: part.fitness.values = toolbox.evaluate(part) if not part.best or part.bestfit < part.fitness: part.best = toolbox.clone(part[:]) # Get the position part.bestfit.values = part.fitness.values # Get the fitness # Sort swarm into species, best individual comes first sorted_swarm = sorted(swarm, key=lambda ind: ind.bestfit, reverse=True) species = [] while sorted_swarm: found = False for s in species: dist = math.sqrt(sum((x1 - x2)**2 for x1, x2 in zip(sorted_swarm[0].best, s[0].best))) if dist <= RS: found = True s.append(sorted_swarm[0]) break if not found: species.append([sorted_swarm[0]]) sorted_swarm.pop(0) record = stats.compile(swarm) logbook.record(gen=generation, evals=mpb.nevals, nswarm=len(species), error=mpb.currentError(), offline_error=mpb.offlineError(), **record) if verbose: print(logbook.stream) # Detect change if any(s[0].bestfit.values != toolbox.evaluate(s[0].best) for s in species): # Convert particles to quantum particles for s in species: s[:] = toolbox.convert(s, rcloud=RCLOUD, centre=s[0].best) else: # Replace exceeding particles in a species with new particles for s in species: if len(s) > PMAX: n = len(s) - PMAX del s[PMAX:] s.extend(toolbox.swarm(n=n)) # Update particles that have not been reinitialized for s in species[:-1]: for part in s[:PMAX]: toolbox.update(part, s[0].best) del part.fitness.values # Return all but the worst species' updated particles to the swarm # The worst species is replaced by new particles swarm = list(itertools.chain(toolbox.swarm(n=len(species[-1])), *species[:-1])) generation += 1 if __name__ == '__main__': main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/examples/speed.txt0000644000076500000240000000077414456461441015362 0ustar00runnerstaffga/evosn ga/knapsack ga/evoknn_jmlr ga/evoknn ga/kursawefct ga/onemax_multidemic ga/onemax ga/onemax_numpy ga/onemax_short ga/tsp ga/nsga2 ga/onemax_mp ga/onemax_island ga/nqueens gp/adf_symbreg gp/ant gp/parity gp/spambase gp/symbreg gp/symbreg_harm gp/symbreg_numpy es/fctmin es/onefifth es/cma_minfct es/cma_1+l_minfct es/cma_bipop es/cma_mo coev/hillis coev/symbreg coev/coop_evol coev/coop_gen coev/coop_niche coev/coop_adapt de/basic de/sphere eda/emna eda/pbil pso/basic pso/speciation pso/multiswarm ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.7402704 deap-1.4.1/setup.cfg0000644000076500000240000000004614456461475013523 0ustar00runnerstaff[egg_info] tag_build = tag_date = 0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/setup.py0000644000076500000240000000735214456461441013414 0ustar00runnerstaff#!/usr/bin/env python import sys warnings = list() try: from setuptools import setup, Extension, find_packages modules = find_packages(exclude=['examples']) except ImportError: warnings.append("warning: using distutils.core.setup, cannot use \"develop\" option") from distutils.core import setup, Extension modules = ['deap', 'deap.benchmarks', 'deap.tests', 'deap.tools', 'deap.tools._hypervolume'] from setuptools.command.build_ext import build_ext from distutils.errors import CCompilerError, DistutilsExecError, \ DistutilsPlatformError # read the contents of README file from os import path import codecs this_directory = path.abspath(path.dirname(__file__)) long_description = codecs.open(path.join(this_directory, 'README.md'), 'r', 'utf-8').read() import deap if sys.platform == 'win32' and sys.version_info > (2, 6): # 2.6's distutils.msvc9compiler can raise an IOError when failing to # find the compiler # It can also raise ValueError http://bugs.python.org/issue7511 ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError, IOError, ValueError) else: ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError) class BuildFailed(Exception): pass class ve_build_ext(build_ext): # This class allows C extension building to fail. def run(self): try: build_ext.run(self) except DistutilsPlatformError as e: print(e) raise BuildFailed() def build_extension(self, ext): try: build_ext.build_extension(self, ext) except ext_errors as e: print(e) raise BuildFailed() def run_setup(build_ext): extra_modules = None if build_ext: extra_modules = list() hv_module = Extension("deap.tools._hypervolume.hv", sources=["deap/tools/_hypervolume/_hv.c", "deap/tools/_hypervolume/hv.cpp"]) extra_modules.append(hv_module) setup(name='deap', version=deap.__revision__, description='Distributed Evolutionary Algorithms in Python', long_description=long_description, long_description_content_type="text/markdown", author='deap Development Team', author_email='deap-users@googlegroups.com', url='https://www.github.com/deap', packages=find_packages(exclude=['examples', 'tests']), # packages=['deap', 'deap.tools', 'deap.tools._hypervolume', 'deap.benchmarks', 'deap.tests'], platforms=['any'], keywords=['evolutionary algorithms', 'genetic algorithms', 'genetic programming', 'cma-es', 'ga', 'gp', 'es', 'pso'], license='LGPL', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Intended Audience :: Education', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Topic :: Scientific/Engineering', 'Topic :: Software Development', ], ext_modules=extra_modules, cmdclass={"build_ext": ve_build_ext}, install_requires=['numpy'], ) try: run_setup(True) except BuildFailed: print("*" * 75) print("WARNING: The C extensions could not be compiled, " "speedups won't be available.") print("Now building without C extensions.") print("*" * 75) run_setup(False) print("*" * 75) print("WARNING: The C extensions could not be compiled, " "speedups won't be available.") print("Plain-Python installation succeeded.") print("*" * 75) print("\n".join(warnings)) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1689936700.7390878 deap-1.4.1/tests/0000755000076500000240000000000014456461475013044 5ustar00runnerstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/tests/test_algorithms.py0000644000076500000240000002104314456461441016617 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import random import numpy import pytest from deap import algorithms from deap import base from deap import benchmarks from deap.benchmarks.tools import hypervolume from deap import cma from deap import creator from deap import tools FITCLSNAME = "FIT_TYPE" INDCLSNAME = "IND_TYPE" HV_THRESHOLD = 116.0 # 120.777 is Optimal value def teardown_(): # Messy way to remove a class from the creator del creator.__dict__[FITCLSNAME] del creator.__dict__[INDCLSNAME] @pytest.fixture def setup_teardown_single_obj(): creator.create(FITCLSNAME, base.Fitness, weights=(-1.0,)) creator.create(INDCLSNAME, list, fitness=creator.__dict__[FITCLSNAME]) yield teardown_() @pytest.fixture def setup_teardown_multi_obj(): creator.create(FITCLSNAME, base.Fitness, weights=(-1.0, -1.0)) creator.create(INDCLSNAME, list, fitness=creator.__dict__[FITCLSNAME]) yield teardown_() @pytest.fixture def setup_teardown_multi_obj_numpy(): creator.create(FITCLSNAME, base.Fitness, weights=(-1.0, -1.0)) creator.create(INDCLSNAME, numpy.ndarray, fitness=creator.__dict__[FITCLSNAME]) yield teardown_() def test_cma(setup_teardown_single_obj): NDIM = 5 strategy = cma.Strategy(centroid=[0.0]*NDIM, sigma=1.0) toolbox = base.Toolbox() toolbox.register("evaluate", benchmarks.sphere) toolbox.register("generate", strategy.generate, creator.__dict__[INDCLSNAME]) toolbox.register("update", strategy.update) pop, _ = algorithms.eaGenerateUpdate(toolbox, ngen=100) best, = tools.selBest(pop, k=1) assert best.fitness.values < (1e-8,), "CMA algorithm did not converged properly." def test_nsga2(setup_teardown_multi_obj): NDIM = 5 BOUND_LOW, BOUND_UP = 0.0, 1.0 MU = 16 NGEN = 100 toolbox = base.Toolbox() toolbox.register("attr_float", random.uniform, BOUND_LOW, BOUND_UP) toolbox.register("individual", tools.initRepeat, creator.__dict__[INDCLSNAME], toolbox.attr_float, NDIM) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("evaluate", benchmarks.zdt1) toolbox.register("mate", tools.cxSimulatedBinaryBounded, low=BOUND_LOW, up=BOUND_UP, eta=20.0) toolbox.register("mutate", tools.mutPolynomialBounded, low=BOUND_LOW, up=BOUND_UP, eta=20.0, indpb=1.0/NDIM) toolbox.register("select", tools.selNSGA2) pop = toolbox.population(n=MU) fitnesses = toolbox.map(toolbox.evaluate, pop) for ind, fit in zip(pop, fitnesses): ind.fitness.values = fit pop = toolbox.select(pop, len(pop)) for gen in range(1, NGEN): offspring = tools.selTournamentDCD(pop, len(pop)) offspring = [toolbox.clone(ind) for ind in offspring] for ind1, ind2 in zip(offspring[::2], offspring[1::2]): if random.random() <= 0.9: toolbox.mate(ind1, ind2) toolbox.mutate(ind1) toolbox.mutate(ind2) del ind1.fitness.values, ind2.fitness.values invalid_ind = [ind for ind in offspring if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit pop = toolbox.select(pop + offspring, MU) hv = hypervolume(pop, [11.0, 11.0]) # hv = 120.777 # Optimal value assert hv > HV_THRESHOLD, "Hypervolume is lower than expected %f < %f" % (hv, HV_THRESHOLD) for ind in pop: assert not (any(numpy.asarray(ind) < BOUND_LOW) or any(numpy.asarray(ind) > BOUND_UP)) def test_mo_cma_es(setup_teardown_multi_obj_numpy): def distance(feasible_ind, original_ind): """A distance function to the feasibility region.""" return sum((f - o)**2 for f, o in zip(feasible_ind, original_ind)) def closest_feasible(individual): """A function returning a valid individual from an invalid one.""" feasible_ind = numpy.array(individual) feasible_ind = numpy.maximum(BOUND_LOW, feasible_ind) feasible_ind = numpy.minimum(BOUND_UP, feasible_ind) return feasible_ind def valid(individual): """Determines if the individual is valid or not.""" if any(individual < BOUND_LOW) or any(individual > BOUND_UP): return False return True NDIM = 5 BOUND_LOW, BOUND_UP = 0.0, 1.0 MU, LAMBDA = 10, 10 NGEN = 500 numpy.random.seed(128) # The MO-CMA-ES algorithm takes a full population as argument population = [creator.__dict__[INDCLSNAME](x) for x in numpy.random.uniform(BOUND_LOW, BOUND_UP, (MU, NDIM))] toolbox = base.Toolbox() toolbox.register("evaluate", benchmarks.zdt1) toolbox.decorate("evaluate", tools.ClosestValidPenalty(valid, closest_feasible, 1.0e+6, distance)) for ind in population: ind.fitness.values = toolbox.evaluate(ind) strategy = cma.StrategyMultiObjective(population, sigma=1.0, mu=MU, lambda_=LAMBDA) toolbox.register("generate", strategy.generate, creator.__dict__[INDCLSNAME]) toolbox.register("update", strategy.update) for gen in range(NGEN): # Generate a new population population = toolbox.generate() # Evaluate the individuals fitnesses = toolbox.map(toolbox.evaluate, population) for ind, fit in zip(population, fitnesses): ind.fitness.values = fit # Update the strategy with the evaluated individuals toolbox.update(population) # Note that we use a penalty to guide the search to feasible solutions, # but there is no guarantee that individuals are valid. # We expect the best individuals will be within bounds or very close. num_valid = 0 for ind in strategy.parents: dist = distance(closest_feasible(ind), ind) if numpy.isclose(dist, 0.0, rtol=1.e-5, atol=1.e-5): num_valid += 1 assert num_valid >= len(strategy.parents) # Note that NGEN=500 is enough to get consistent hypervolume > 116, # but not 119. More generations would help but would slow down testing. hv = hypervolume(strategy.parents, [11.0, 11.0]) assert hv > HV_THRESHOLD, "Hypervolume is lower than expected %f < %f" % (hv, HV_THRESHOLD) def test_nsga3(setup_teardown_multi_obj): NDIM = 5 BOUND_LOW, BOUND_UP = 0.0, 1.0 MU = 16 NGEN = 100 ref_points = tools.uniform_reference_points(2, p=12) toolbox = base.Toolbox() toolbox.register("attr_float", random.uniform, BOUND_LOW, BOUND_UP) toolbox.register("individual", tools.initRepeat, creator.__dict__[INDCLSNAME], toolbox.attr_float, NDIM) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("evaluate", benchmarks.zdt1) toolbox.register("mate", tools.cxSimulatedBinaryBounded, low=BOUND_LOW, up=BOUND_UP, eta=20.0) toolbox.register("mutate", tools.mutPolynomialBounded, low=BOUND_LOW, up=BOUND_UP, eta=20.0, indpb=1.0/NDIM) toolbox.register("select", tools.selNSGA3, ref_points=ref_points) pop = toolbox.population(n=MU) fitnesses = toolbox.map(toolbox.evaluate, pop) for ind, fit in zip(pop, fitnesses): ind.fitness.values = fit pop = toolbox.select(pop, len(pop)) # Begin the generational process for gen in range(1, NGEN): offspring = algorithms.varAnd(pop, toolbox, 1.0, 1.0) # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in offspring if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit # Select the next generation population pop = toolbox.select(pop + offspring, MU) hv = hypervolume(pop, [11.0, 11.0]) # hv = 120.777 # Optimal value assert hv > HV_THRESHOLD, "Hypervolume is lower than expected %f < %f" % (hv, HV_THRESHOLD) for ind in pop: assert not (any(numpy.asarray(ind) < BOUND_LOW) or any(numpy.asarray(ind) > BOUND_UP)) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/tests/test_benchmarks.py0000644000076500000240000000446214456461441016571 0ustar00runnerstaff"""Test functions from deap/benchmarks.""" import sys import unittest from deap import base from deap import creator from deap.benchmarks import binary class BenchmarkTest(unittest.TestCase): """Test object for unittest of deap/benchmarks.""" def setUp(self): @binary.bin2float(0, 1023, 10) def evaluate(individual): """Simplest evaluation function.""" return individual creator.create("FitnessMin", base.Fitness, weights=(-1.0,)) creator.create("Individual", list, fitness=creator.FitnessMin) self.toolbox = base.Toolbox() self.toolbox.register("evaluate", evaluate) def tearDown(self): del creator.FitnessMin def test_bin2float(self): # Correct evaluation of bin2float. zero_individual = creator.Individual([0] * 10) full_individual = creator.Individual([1] * 10) two_individiual = creator.Individual(8*[0] + [1, 0]) population = [zero_individual, full_individual, two_individiual] fitnesses = map(self.toolbox.evaluate, population) for ind, fit in zip(population, fitnesses): ind.fitness.values = fit assert population[0].fitness.values == (0.0, ) assert population[1].fitness.values == (1023.0, ) assert population[2].fitness.values == (2.0, ) # Incorrect evaluation of bin2float. wrong_size_individual = creator.Individual([0, 1, 0, 1, 0, 1, 0, 1, 1]) wrong_population = [wrong_size_individual] # It is up the user to make sure that bin2float gets an individual with # an adequate length; no exceptions are raised. fitnesses = map(self.toolbox.evaluate, wrong_population) for ind, fit in zip(wrong_population, fitnesses): # In python 2.7 operator.mul works in a different way than in # python3. Thus an error occurs in python2.7 but an assignment is # correctly executed in python3. if sys.version_info < (3, ): with self.assertRaises(AssertionError): ind.fitness.values = fit else: assert wrong_population[0].fitness.values == () if __name__ == "__main__": suite = unittest.TestLoader().loadTestsFromTestCase(BenchmarkTest) unittest.TextTestRunner(verbosity=2).run(suite) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/tests/test_convergence.py0000644000076500000240000003720614456461441016754 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . from itertools import islice import random import unittest try: import numpy except ImportError: numpy = False from deap import algorithms from deap import base from deap import benchmarks from deap.benchmarks.tools import hypervolume from deap import cma from deap import creator from deap import tools FITCLSNAME = "FIT_TYPE" INDCLSNAME = "IND_TYPE" HV_THRESHOLD = 116.0 # 120.777 is Optimal value class TearDownCreatorTestCase(unittest.TestCase): def tearDown(self): # Messy way to remove a class from the creator del creator.__dict__[FITCLSNAME] del creator.__dict__[INDCLSNAME] class TestSingleObjective(TearDownCreatorTestCase): def setUp(self): creator.create(FITCLSNAME, base.Fitness, weights=(-1.0,)) creator.create(INDCLSNAME, list, fitness=creator.__dict__[FITCLSNAME]) def test_cma(self): NDIM = 5 NGEN = 100 strategy = cma.Strategy(centroid=[0.0]*NDIM, sigma=1.0) toolbox = base.Toolbox() toolbox.register("evaluate", benchmarks.sphere) toolbox.register("generate", strategy.generate, creator.__dict__[INDCLSNAME]) toolbox.register("update", strategy.update) # Consume the algorithm until NGEN population, _ = algorithms.eaGenerateUpdate(toolbox, NGEN) best, = tools.selBest(population, k=1) self.assertLess(best.fitness.values[0], 1e-8) def test_cma_mixed_integer_1_p_1_no_constraint(self): N = 3 NGEN = 15000 toolbox = base.Toolbox() toolbox.register("evaluate", benchmarks.sphere) parent = (numpy.random.rand(N) * 2) + 1 strategy = cma.StrategyActiveOnePlusLambda(parent, 0.5, [0, 0, 0.1], lambda_=1) toolbox.register("generate", strategy.generate, ind_init=creator.__dict__[INDCLSNAME]) toolbox.register("update", strategy.update) best = None for gen in range(NGEN): # Generate a new population population = toolbox.generate() # Evaluate the individuals for individual in population: individual.fitness.values = toolbox.evaluate(individual) if best is None or individual.fitness >= best.fitness: best = individual # We must stop CMA-ES before the update becomes unstable if best.fitness.values[0] < 1e-12: break # Update the strategy with the evaluated individuals toolbox.update(population) self.assertLess(best.fitness.values[0], 1e-12) def test_cma_mixed_integer_1_p_20_no_constraint(self): N = 3 NGEN = 15000 toolbox = base.Toolbox() toolbox.register("evaluate", benchmarks.sphere) parent = (numpy.random.rand(N) * 2) + 1 strategy = cma.StrategyActiveOnePlusLambda(parent, 0.5, [0, 0, 0.1], lambda_=20) toolbox.register("generate", strategy.generate, ind_init=creator.__dict__[INDCLSNAME]) toolbox.register("update", strategy.update) best = None for gen in range(NGEN): # Generate a new population population = toolbox.generate() # Evaluate the individuals for individual in population: individual.fitness.values = toolbox.evaluate(individual) if best is None or individual.fitness >= best.fitness: best = individual # Stop when we've reached some kind of optimum if best.fitness.values[0] < 1e-12: break # Update the strategy with the evaluated individuals toolbox.update(population) self.assertLess(best.fitness.values[0], 1e-12) class TestSingleObjectiveConstrained(TearDownCreatorTestCase): def setUp(self): creator.create(FITCLSNAME, base.ConstrainedFitness, weights=(-1.0,)) creator.create(INDCLSNAME, list, fitness=creator.__dict__[FITCLSNAME]) def test_cma_mixed_integer_1_p_1_with_constraint(self): def c1(individual): if individual[0] + individual[1] < 0.1: return True return False def c2(individual): if individual[1] < 0.1: return True return False N = 5 NGEN = 15000 optimum = 0.015 toolbox = base.Toolbox() toolbox.register("evaluate", benchmarks.sphere) restarts = 10 # Allow a couple of restarts while restarts > 0: parent = (numpy.random.rand(N) * 2) + 1 strategy = cma.StrategyActiveOnePlusLambda(parent, 0.5, [0, 0, 0.1, 0, 0], lambda_=1) toolbox.register("generate", strategy.generate, ind_init=creator.__dict__[INDCLSNAME]) toolbox.register("update", strategy.update) best = None for gen in range(NGEN): # Generate a new population population = toolbox.generate() # Evaluate the individuals for individual in population: constraint_violation = c1(individual), c2(individual) if not any(constraint_violation): individual.fitness.values = toolbox.evaluate(individual) individual.fitness.constraint_violation = constraint_violation if best is None or individual.fitness >= best.fitness: best = individual # Stop when we've reached some kind of optimum if best.fitness.values[0] - optimum < 1e-7: restarts = 0 break # Update the strategy with the evaluated individuals toolbox.update(population) if strategy.condition_number > 10e12: # We've become unstable break restarts -= 1 self.assertLess(best.fitness.values[0] - optimum, 1e-7) def test_cma_mixed_integer_1_p_20_with_constraint(self): def c1(individual): if individual[0] + individual[1] < 0.1: return True return False def c2(individual): if individual[3] < 0.1: return True return False N = 5 NGEN = 15000 optimum = 0.015 toolbox = base.Toolbox() toolbox.register("evaluate", benchmarks.sphere) restarts = 10 # Allow a couple of restarts while restarts > 0: parent = (numpy.random.rand(N) * 2) + 1 strategy = cma.StrategyActiveOnePlusLambda(parent, 0.5, [0, 0, 0.1, 0, 0], lambda_=20) toolbox.register("generate", strategy.generate, ind_init=creator.__dict__[INDCLSNAME]) toolbox.register("update", strategy.update) best = None for gen in range(NGEN): # Generate a new population population = toolbox.generate() # Evaluate the individuals for individual in population: constraint_violation = c1(individual), c2(individual) if not any(constraint_violation): individual.fitness.values = toolbox.evaluate(individual) individual.fitness.constraint_violation = constraint_violation if best is None or individual.fitness >= best.fitness: best = individual if best.fitness.values[0] - optimum < 1e-7: restarts = 0 break # Stop when we've reached some kind of optimum toolbox.update(population) if strategy.condition_number > 10e12: # We've become unstable break restarts -= 1 self.assertLess(best.fitness.values[0] - optimum, 1e-7) class TestMultiObjective(TearDownCreatorTestCase): def setUp(self): creator.create(FITCLSNAME, base.Fitness, weights=(-1.0, -1.0)) creator.create(INDCLSNAME, list, fitness=creator.__dict__[FITCLSNAME]) def test_nsga2(self): NDIM = 5 BOUND_LOW, BOUND_UP = 0.0, 1.0 MU = 16 NGEN = 100 toolbox = base.Toolbox() toolbox.register("attr_float", random.uniform, BOUND_LOW, BOUND_UP) toolbox.register("individual", tools.initRepeat, creator.__dict__[INDCLSNAME], toolbox.attr_float, NDIM) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("evaluate", benchmarks.zdt1) toolbox.register("mate", tools.cxSimulatedBinaryBounded, low=BOUND_LOW, up=BOUND_UP, eta=20.0) toolbox.register("mutate", tools.mutPolynomialBounded, low=BOUND_LOW, up=BOUND_UP, eta=20.0, indpb=1.0/NDIM) toolbox.register("select", tools.selNSGA2) pop = toolbox.population(n=MU) fitnesses = toolbox.map(toolbox.evaluate, pop) for ind, fit in zip(pop, fitnesses): ind.fitness.values = fit pop = toolbox.select(pop, len(pop)) for gen in range(1, NGEN): offspring = tools.selTournamentDCD(pop, len(pop)) offspring = [toolbox.clone(ind) for ind in offspring] for ind1, ind2 in zip(offspring[::2], offspring[1::2]): if random.random() <= 0.9: toolbox.mate(ind1, ind2) toolbox.mutate(ind1) toolbox.mutate(ind2) del ind1.fitness.values, ind2.fitness.values invalid_ind = [ind for ind in offspring if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit pop = toolbox.select(pop + offspring, MU) hv = hypervolume(pop, [11.0, 11.0]) # hv = 120.777 # Optimal value self.assertGreater(hv, HV_THRESHOLD) for ind in pop: self.assertTrue(all(numpy.asarray(ind) >= BOUND_LOW)) self.assertTrue(all(numpy.asarray(ind) <= BOUND_UP)) def test_nsga3(self): NDIM = 5 BOUND_LOW, BOUND_UP = 0.0, 1.0 MU = 16 NGEN = 100 ref_points = tools.uniform_reference_points(2, p=12) toolbox = base.Toolbox() toolbox.register("attr_float", random.uniform, BOUND_LOW, BOUND_UP) toolbox.register("individual", tools.initRepeat, creator.__dict__[INDCLSNAME], toolbox.attr_float, NDIM) toolbox.register("population", tools.initRepeat, list, toolbox.individual) toolbox.register("evaluate", benchmarks.zdt1) toolbox.register("mate", tools.cxSimulatedBinaryBounded, low=BOUND_LOW, up=BOUND_UP, eta=20.0) toolbox.register("mutate", tools.mutPolynomialBounded, low=BOUND_LOW, up=BOUND_UP, eta=20.0, indpb=1.0/NDIM) toolbox.register("select", tools.selNSGA3, ref_points=ref_points) pop = toolbox.population(n=MU) fitnesses = toolbox.map(toolbox.evaluate, pop) for ind, fit in zip(pop, fitnesses): ind.fitness.values = fit pop = toolbox.select(pop, len(pop)) # Begin the generational process for gen in range(1, NGEN): # Vary the individuals offspring = list(islice(algorithms.varAnd(pop, toolbox, 1.0, 1.0), len(pop))) # Evaluate the individuals with an invalid fitness invalid_ind = [ind for ind in offspring if not ind.fitness.valid] fitnesses = toolbox.map(toolbox.evaluate, invalid_ind) for ind, fit in zip(invalid_ind, fitnesses): ind.fitness.values = fit # Select the next generation population pop = toolbox.select(pop + offspring, MU) hv = hypervolume(pop, [11.0, 11.0]) # hv = 120.777 # Optimal value self.assertGreater(hv, HV_THRESHOLD) for ind in pop: self.assertTrue(all(numpy.asarray(ind) >= BOUND_LOW)) self.assertTrue(all(numpy.asarray(ind) <= BOUND_UP)) @unittest.skipUnless(numpy, "requires numpy") class TestMultiObjectiveNumpy(TearDownCreatorTestCase): def setUp(self): creator.create(FITCLSNAME, base.Fitness, weights=(-1.0, -1.0)) creator.create(INDCLSNAME, numpy.ndarray, fitness=creator.__dict__[FITCLSNAME]) def test_mo_cma_es(self): def distance(feasible_ind, original_ind): """A distance function to the feasibility region.""" return sum((f - o)**2 for f, o in zip(feasible_ind, original_ind)) def closest_feasible(individual): """A function returning a valid individual from an invalid one.""" feasible_ind = numpy.array(individual) feasible_ind = numpy.maximum(BOUND_LOW, feasible_ind) feasible_ind = numpy.minimum(BOUND_UP, feasible_ind) return feasible_ind def valid(individual): """Determines if the individual is valid or not.""" if any(individual < BOUND_LOW) or any(individual > BOUND_UP): return False return True NDIM = 5 BOUND_LOW, BOUND_UP = 0.0, 1.0 MU, LAMBDA = 10, 10 NGEN = 500 numpy.random.seed(128) # The MO-CMA-ES algorithm takes a full population as argument population = [creator.__dict__[INDCLSNAME](x) for x in numpy.random.uniform(BOUND_LOW, BOUND_UP, (MU, NDIM))] toolbox = base.Toolbox() toolbox.register("evaluate", benchmarks.zdt1) toolbox.decorate("evaluate", tools.ClosestValidPenalty(valid, closest_feasible, 1.0e+6, distance)) for ind in population: ind.fitness.values = toolbox.evaluate(ind) strategy = cma.StrategyMultiObjective(population, sigma=1.0, mu=MU, lambda_=LAMBDA) toolbox.register("generate", strategy.generate, creator.__dict__[INDCLSNAME]) toolbox.register("update", strategy.update) for gen in range(NGEN): # Generate a new population population = toolbox.generate() # Evaluate the individuals fitnesses = toolbox.map(toolbox.evaluate, population) for ind, fit in zip(population, fitnesses): ind.fitness.values = fit # Update the strategy with the evaluated individuals toolbox.update(population) # Note that we use a penalty to guide the search to feasible solutions, # but there is no guarantee that individuals are valid. # We expect the best individuals will be within bounds or very close. num_valid = 0 for ind in strategy.parents: dist = distance(closest_feasible(ind), ind) if numpy.isclose(dist, 0.0, rtol=1.e-5, atol=1.e-5): num_valid += 1 self.assertGreaterEqual(num_valid, len(strategy.parents)) # Note that NGEN=500 is enough to get consistent hypervolume > 116, # but not 119. More generations would help but would slow down testing. hv = hypervolume(strategy.parents, [11.0, 11.0]) self.assertGreater(hv, HV_THRESHOLD, msg="Hypervolume is lower than expected") ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/tests/test_creator.py0000644000076500000240000000503714456461441016112 0ustar00runnerstaff# This file is part of DEAP. # # DEAP is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as # published by the Free Software Foundation, either version 3 of # the License, or (at your option) any later version. # # DEAP is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with DEAP. If not, see . import unittest import array try: import numpy except ImportError: numpy = False from deap import creator CNAME = "CLASS_NAME" class TestCreator(unittest.TestCase): def tearDown(self): creator.__dict__.pop(CNAME) def test_create(self): creator.create(CNAME, list) l = creator.__dict__[CNAME]([1, 2, 3, 4]) self.assertSequenceEqual(l, [1, 2, 3, 4]) def test_attribute(self): creator.create(CNAME, list, a=1) l = creator.__dict__[CNAME]([1, 2, 3, 4]) self.assertEqual(l.a, 1) def test_array(self): creator.create(CNAME, array.array, typecode="i") a = creator.__dict__[CNAME]([1, 2, 3, 4]) b = creator.__dict__[CNAME]([5, 6, 7, 8]) a[1:3], b[1:3] = b[1:3], a[1:3] ta = array.array("i", [1, 6, 7, 4]) tb = array.array("i", [5, 2, 3, 8]) self.assertSequenceEqual(a, ta) self.assertSequenceEqual(b, tb) @unittest.skipIf(not numpy, "Cannot import Numpy numerical library") def test_numpy_nocopy(self): creator.create(CNAME, numpy.ndarray) a = creator.__dict__[CNAME]([1, 2, 3, 4]) b = creator.__dict__[CNAME]([5, 6, 7, 8]) a[1:3], b[1:3] = b[1:3], a[1:3] ta = numpy.array([1, 6, 7, 4]) tb = numpy.array([5, 6, 7, 8]) numpy.testing.assert_array_equal(a, ta) numpy.testing.assert_array_equal(b, tb) @unittest.skipIf(not numpy, "Cannot import Numpy numerical library") def test_numpy_copy(self): creator.create(CNAME, numpy.ndarray) a = creator.__dict__[CNAME]([1, 2, 3, 4]) b = creator.__dict__[CNAME]([5, 6, 7, 8]) a[1:3], b[1:3] = b[1:3].copy(), a[1:3].copy() ta = numpy.array([1, 6, 7, 4]) tb = numpy.array([5, 2, 3, 8]) numpy.testing.assert_array_equal(a, ta) numpy.testing.assert_array_equal(b, tb) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/tests/test_init.py0000644000076500000240000000051614456461441015413 0ustar00runnerstafffrom functools import partial import random import unittest from deap import tools class LogbookTest(unittest.TestCase): def test_statistics_compile(self): l = 10 gen_idx = partial(random.sample, list(range(l)), l) i = tools.initIterate(list, gen_idx) self.assertSetEqual(set(i), set(range(l))) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/tests/test_logbook.py0000644000076500000240000000330414456461441016102 0ustar00runnerstaffimport unittest from deap import tools class LogbookTest(unittest.TestCase): def setUp(self): self.logbook = tools.Logbook() print() def test_multi_chapters(self): self.logbook.record(gen=0, evals=100, fitness={'obj 1': {'avg': 1.0, 'max': 10}, 'obj 2': {'avg': 1.0, 'max': 10}}, length={'avg': 1.0, 'max': 30}, test={'avg': 1.0, 'max': 20}) self.logbook.record(gen=0, evals=100, fitness={'obj 1': {'avg': 1.0, 'max': 10}, 'obj 2': {'avg': 1.0, 'max': 10}}, length={'avg': 1.0, 'max': 30}, test={'avg': 1.0, 'max': 20}) print(self.logbook.stream) def test_one_chapter(self): self.logbook.record(gen=0, evals=100, fitness={'avg': 1.0, 'max': 10}) self.logbook.record(gen=0, evals=100, fitness={'avg': 1.0, 'max': 10}) print(self.logbook.stream) def test_one_big_chapter(self): self.logbook.record(gen=0, evals=100, fitness={'obj 1': {'avg': 1.0, 'max': 10}, 'obj 2': {'avg': 1.0, 'max': 10}}) self.logbook.record(gen=0, evals=100, fitness={'obj 1': {'avg': 1.0, 'max': 10}, 'obj 2': {'avg': 1.0, 'max': 10}}) print(self.logbook.stream) def test_no_chapters(self): self.logbook.record(gen=0, evals=100, **{'avg': 1.0, 'max': 10}) self.logbook.record(gen=0, evals=100, **{'avg': 1.0, 'max': 10}) print(self.logbook.stream) if __name__ == "__main__": suite = unittest.TestLoader().loadTestsFromTestCase(LogbookTest) unittest.TextTestRunner(verbosity=2).run(suite) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/tests/test_multiproc.py0000644000076500000240000000122514456461441016464 0ustar00runnerstaffimport multiprocessing import unittest from deap import base from deap import creator def _evalOneMax(individual): return sum(individual), def test_multiproc(): creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("Individual", list, fitness=creator.FitnessMax) toolbox = base.Toolbox() toolbox.register("evaluate", _evalOneMax) # Process Pool of 4 workers pool = multiprocessing.Pool(processes=4) toolbox.register("map", pool.map) pop = [[1]*20 for _ in range(100)] fitnesses = toolbox.map(toolbox.evaluate, pop) for ind, fit in zip(pop, fitnesses): assert fit == (sum(ind),)././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/tests/test_mutation.py0000644000076500000240000000362114456461441016310 0ustar00runnerstaffimport unittest from unittest import mock from deap.tools.mutation import mutInversion class MutationTest(unittest.TestCase): def test_mutInverstion_size_zero_chromosome_returns_unchanged_chromosome_in_tuple(self): chromosome = [] expected = [] self.assertEqual((expected,), mutInversion(chromosome)) def test_mutInversion_size_one_chromosome_returns_unchanged_chromosome_in_tuple(self): chromosome = ["a"] expected = ["a"] self.assertEqual((expected,), mutInversion(chromosome)) @mock.patch("random.randrange") def test_mutInversion_same_random_indices_returns_unchanged_chromosome_in_tuple(self, mock_randrange): mock_randrange.side_effect = [2, 2] chromosome = ["a", "b", "c", "d", "e"] expected = ["a", "b", "c", "d", "e"] self.assertEqual((expected,), mutInversion(chromosome)) @mock.patch("random.randrange") def test_mutInversion_difference_of_one_random_indices_returns_unchanged_chromosome_in_tuple(self, mock_randrange): mock_randrange.side_effect = [2, 3] chromosome = ["a", "b", "c", "d", "e"] expected = ["a", "b", "c", "d", "e"] self.assertEqual((expected,), mutInversion(chromosome)) @mock.patch("random.randrange") def test_mutInversion_full_length_random_indices_returns_reversed_chromosome_in_tuple(self, mock_randrange): mock_randrange.side_effect = [0, 5] chromosome = ["a", "b", "c", "d", "e"] expected = ["e", "d", "c", "b", "a"] self.assertEqual((expected,), mutInversion(chromosome)) @mock.patch("random.randrange") def test_mutInversion_general_case_returns_correctly_mutated_chromosome_in_tuple(self, mock_randrange): mock_randrange.side_effect = [1, 4] chromosome = ["a", "b", "c", "d", "e"] expected = ["a", "d", "c", "b", "e"] self.assertEqual((expected,), mutInversion(chromosome)) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/tests/test_operators.py0000644000076500000240000000205514456461441016466 0ustar00runnerstaffimport unittest from unittest import mock import random from deap.tools import crossover class TestCxOrdered(unittest.TestCase): def setUp(self): pass def test_crossover(self): a = [8, 7, 3, 4, 5, 6, 0, 2, 1, 9] b = [7, 6, 0, 1, 2, 9, 8, 4, 3, 5] expected_ap = [4, 5, 6, 1, 2, 9, 0, 8, 7, 3] expected_bp = [1, 2, 9, 4, 5, 6, 8, 3, 7, 0] with mock.patch("random.sample", return_value=[3, 5]): ap, bp = crossover.cxOrdered(a, b) self.assertSequenceEqual(expected_ap, ap) self.assertSequenceEqual(expected_bp, bp) def test_crossover_identical(self): i1 = list(range(100)) random.shuffle(i1) i2 = list(range(100)) random.shuffle(i2) a, b = sorted(random.sample(range(len(i1)), 2)) with mock.patch("random.sample", return_value=[a, b]): ap, bp = crossover.cxOrdered(i1, i2) self.assertSequenceEqual(sorted(ap), list(range(len(ap)))) self.assertSequenceEqual(sorted(bp), list(range(len(bp)))) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/tests/test_pickle.py0000644000076500000240000001620414456461441015720 0ustar00runnerstaff import sys import unittest import array import pickle import operator import platform import functools import numpy from deap import creator from deap import base from deap import gp from deap import tools def func(): return "True" class Pickling(unittest.TestCase): def setUp(self): creator.create("FitnessMax", base.Fitness, weights=(1.0,)) creator.create("IndList", list, fitness=creator.FitnessMax) creator.create("IndArray", array.array, typecode='f', fitness=creator.FitnessMax) creator.create("IndNDArray", numpy.ndarray, typecode='f', fitness=creator.FitnessMax) creator.create("IndTree", gp.PrimitiveTree, fitness=creator.FitnessMax) self.toolbox = base.Toolbox() self.toolbox.register("func", func) self.toolbox.register("lambda_func", lambda: "True") def tearDown(self): del creator.FitnessMax del creator.IndList del creator.IndArray del creator.IndNDArray del creator.IndTree def test_pickle_fitness(self): fitness = creator.FitnessMax() fitness.values = (1.0,) fitness_s = pickle.dumps(fitness) fitness_l = pickle.loads(fitness_s) self.assertEqual(fitness, fitness_l, "Unpickled fitness != pickled fitness") def test_pickle_ind_list(self): ind = creator.IndList([1.0, 2.0, 3.0]) ind.fitness.values = (4.0,) ind_s = pickle.dumps(ind) ind_l = pickle.loads(ind_s) self.assertEqual(ind, ind_l, "Unpickled individual list != pickled individual list") self.assertEqual(ind.fitness, ind_l.fitness, "Unpickled individual fitness != pickled individual fitness") def test_pickle_ind_array(self): ind = creator.IndArray([1.0, 2.0, 3.0]) ind.fitness.values = (4.0,) ind_s = pickle.dumps(ind) ind_l = pickle.loads(ind_s) self.assertEqual(ind, ind_l, "Unpickled individual array != pickled individual array") self.assertEqual(ind.fitness, ind_l.fitness, "Unpickled individual fitness != pickled individual fitness") # @unittest.skipIf(platform.python_implementation() == "PyPy", "PyPy support for pickling ndarrays is very unstable.") def test_pickle_ind_ndarray(self): ind = creator.IndNDArray([1.0, 2.0, 3.0]) ind.fitness.values = (4.0,) ind_s = pickle.dumps(ind) ind_l = pickle.loads(ind_s) self.assertTrue(all(ind == ind_l), "Unpickled individual numpy.ndarray != pickled individual numpy.ndarray") self.assertEqual(ind.fitness, ind_l.fitness, "Unpickled individual fitness != pickled individual fitness") def test_pickle_delete_ind_list(self): creator.create("TempInd", list, fitness=creator.FitnessMax) ind = creator.TempInd([1.0, 2.0, 3.0]) del creator.TempInd ind.fitness.values = (4.0,) ind_s = pickle.dumps(ind) ind_l = pickle.loads(ind_s) self.assertEqual(ind, ind_l, "Unpickled individual list != pickled individual list") self.assertEqual(ind.fitness, ind_l.fitness, "Unpickled individual fitness != pickled individual fitness") def test_pickle_tree_input(self): pset = gp.PrimitiveSetTyped("MAIN", [int], int, "IN") pset.addPrimitive(operator.add, [int, int], int) expr = gp.genFull(pset, min_=1, max_=1) ind = creator.IndTree(expr) ind.fitness.values = (1.0,) ind_s = pickle.dumps(ind, pickle.HIGHEST_PROTOCOL) ind_l = pickle.loads(ind_s) msg = "Unpickled individual %s != pickled individual %s" % (str(ind), str(ind_l)) self.assertEqual(ind, ind_l, msg) msg = "Unpickled fitness %s != pickled fitness %s" % (str(ind.fitness), str(ind_l.fitness)) self.assertEqual(ind.fitness, ind_l.fitness, msg) def test_pickle_tree_term(self): pset = gp.PrimitiveSetTyped("MAIN", [], int, "IN") pset.addPrimitive(operator.add, [int, int], int) pset.addTerminal(1, int) expr = gp.genFull(pset, min_=1, max_=1) ind = creator.IndTree(expr) ind.fitness.values = (1.0,) ind_s = pickle.dumps(ind, pickle.HIGHEST_PROTOCOL) ind_l = pickle.loads(ind_s) msg = "Unpickled individual %s != pickled individual %s" % (str(ind), str(ind_l)) self.assertEqual(ind, ind_l, msg) msg = "Unpickled fitness %s != pickled fitness %s" % (str(ind.fitness), str(ind_l.fitness)) self.assertEqual(ind.fitness, ind_l.fitness, msg) def test_pickle_tree_ephemeral(self): pset = gp.PrimitiveSetTyped("MAIN", [], int, "IN") pset.addPrimitive(operator.add, [int, int], int) pset.addEphemeralConstant("E1", functools.partial(int, 2), int) expr = gp.genFull(pset, min_=1, max_=1) ind = creator.IndTree(expr) ind.fitness.values = (1.0,) ind_s = pickle.dumps(ind, pickle.HIGHEST_PROTOCOL) ind_l = pickle.loads(ind_s) msg = "Unpickled individual %s != pickled individual %s" % (str(ind), str(ind_l)) self.assertEqual(ind, ind_l, msg) msg = "Unpickled fitness %s != pickled fitness %s" % (str(ind.fitness), str(ind_l.fitness)) self.assertEqual(ind.fitness, ind_l.fitness, msg) def test_pickle_population(self): ind1 = creator.IndList([1.0, 2.0, 3.0]) ind1.fitness.values = (1.0,) ind2 = creator.IndList([4.0, 5.0, 6.0]) ind2.fitness.values = (2.0,) ind3 = creator.IndList([7.0, 8.0, 9.0]) ind3.fitness.values = (3.0,) pop = [ind1, ind2, ind3] pop_s = pickle.dumps(pop) pop_l = pickle.loads(pop_s) self.assertEqual(pop[0], pop_l[0], "Unpickled individual list != pickled individual list") self.assertEqual(pop[0].fitness, pop_l[0].fitness, "Unpickled individual fitness != pickled individual fitness") self.assertEqual(pop[1], pop_l[1], "Unpickled individual list != pickled individual list") self.assertEqual(pop[1].fitness, pop_l[1].fitness, "Unpickled individual fitness != pickled individual fitness") self.assertEqual(pop[2], pop_l[2], "Unpickled individual list != pickled individual list") self.assertEqual(pop[2].fitness, pop_l[2].fitness, "Unpickled individual fitness != pickled individual fitness") # @unittest.skipIf(platform.python_implementation() == "PyPy", "PyPy support for pickling ndarrays (thus stats) is very unstable.") def test_pickle_logbook(self): stats = tools.Statistics() logbook = tools.Logbook() stats.register("mean", numpy.mean) record = stats.compile([1, 2, 3, 4, 5, 6, 8, 9, 10]) logbook.record(**record) logbook_s = pickle.dumps(logbook) logbook_r = pickle.loads(logbook_s) self.assertEqual(logbook, logbook_r, "Unpickled logbook != pickled logbook") @unittest.skipIf(sys.version_info < (2, 7), "Skipping test because Python version < 2.7 does not pickle partials.") def test_pickle_partial(self): func_s = pickle.dumps(self.toolbox.func) func_l = pickle.loads(func_s) self.assertEqual(self.toolbox.func(), func_l()) if __name__ == "__main__": suite = unittest.TestLoader().loadTestsFromTestCase(Pickling) unittest.TextTestRunner(verbosity=2).run(suite) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1689936673.0 deap-1.4.1/tests/test_statistics.py0000644000076500000240000000167514456461441016651 0ustar00runnerstafffrom operator import itemgetter import unittest import numpy from deap import tools class LogbookTest(unittest.TestCase): def test_statistics_compile(self): s = tools.Statistics() s.register("mean", numpy.mean) s.register("max", max) res = s.compile([1, 2, 3, 4]) self.assertDictEqual(res, {'max': 4, 'mean': 2.5}) res = s.compile([5, 6, 7, 8]) self.assertDictEqual(res, {'mean': 6.5, 'max': 8}) def test_multi_statistics_compile(self): len_stats = tools.Statistics(key=len) itm0_stats = tools.Statistics(key=itemgetter(0)) mstats = tools.MultiStatistics(length=len_stats, item=itm0_stats) mstats.register("mean", numpy.mean, axis=0) mstats.register("max", numpy.max, axis=0) res = mstats.compile([[0.0, 1.0, 1.0, 5.0], [2.0, 5.0]]) self.assertDictEqual(res, {'length': {'mean': 3.0, 'max': 4}, 'item': {'mean': 1.0, 'max': 2.0}})