././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1744923773.4954638 moarchiving-1.0.0/0000755000076500000240000000000015000266175013217 5ustar00hansenstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923733.0 moarchiving-1.0.0/LICENSE0000644000076500000240000000247015000266125014222 0ustar00hansenstaffThe BSD 3-Clause License Copyright (c) 2024 Inria Authors: Nikolaus Hansen, Nace Sever, Mila Nedić, Tea Tušar, 2024 Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright and authors notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright and authors notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors nor the authors names may be used to endorse or promote products derived from this software without specific prior written permission. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1744923773.4949694 moarchiving-1.0.0/PKG-INFO0000644000076500000240000003253715000266175014326 0ustar00hansenstaffMetadata-Version: 2.4 Name: moarchiving Version: 1.0.0 Summary: This package implements a non-dominated archive for 2, 3 or 4 objectives with hypervolume indicator and uncrowded hypervolume improvement computation. Author: Nace Sever, Mila Nedic, Tea Tusar Author-email: Nikolaus Hansen Project-URL: Homepage, https://github.com/cma-es/moarchiving Keywords: multi-objective,optimization Classifier: Development Status :: 4 - Beta Classifier: Environment :: Console Classifier: Intended Audience :: Education Classifier: Intended Audience :: Other Audience Classifier: Intended Audience :: Science/Research Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Topic :: Scientific/Engineering Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence Classifier: Topic :: Scientific/Engineering :: Mathematics Description-Content-Type: text/markdown License-File: LICENSE Requires-Dist: sortedcontainers>=2.4.0 Provides-Extra: arbitrary-precision Requires-Dist: fractions; extra == "arbitrary-precision" Dynamic: license-file # Introduction [This package](https://cma-es.github.io/moarchiving/moarchiving-apidocs/index.html) implements a multi-objective non-dominated archive for 2, 3 or 4 objectives, providing easy and fast access to multiple hypervolume indicators: - the hypervolume of the entire archive, - the contributing hypervolume of each element, - the [uncrowded hypervolume improvement](https://doi.org/10.1145/3321707.3321852) (see also [here](https://arxiv.org/abs/1904.08823)) of any given point in the objective space, and - the uncrowded hypervolume of the (unpruned) archive, here called [hypervolume plus](https://cma-es.github.io/moarchiving/moarchiving-apidocs/moarchiving.moarchiving.BiobjectiveNondominatedSortedList.html#hypervolume_plus). Additionally, the package provides a constrained version of the archive, which allows to store points with constraints. The source code is available [on GitHub](https://github.com/CMA-ES/moarchiving). ## Installation On a system shell, either like ``` pip install moarchiving ``` or from GitHub, for example ``` pip install git+https://github.com/CMA-ES/moarchiving.git@development ``` installing from the `development` branch. ## Testing ``` python -m moarchiving.test ``` on a system shell should output something like ``` doctest.testmod() TestResults(failed=0, attempted=90) ... OK unittest.TextTestRunner().run(unittest.TestLoader().loadTestsFromModule()) ....... ---------------------------------------------------------------------- Ran 7 tests in 0.001s ``` ## Links - [API documentation](https://cma-es.github.io/moarchiving/moarchiving-apidocs/index.html) - [This page including performance test examples](https://cma-es.github.io/moarchiving/) - [Code on Github](https://github.com/CMA-ES/moarchiving) ## Details `moarchiving` with 2 objectives uses the [`fractions.Fraction`](https://docs.python.org/3/library/fractions.html) type to avoid rounding errors when computing hypervolume differences, but its usage can also be easily switched off by assigning the respective class attributes `hypervolume_computation_float_type` and `hypervolume_final_float_type`. The Fraction type can become prohibitively computationally expensive with increasing precision. The implementation of the two-objective archive is heavily based on the [`bisect`](https://docs.python.org/3/library/bisect.html) module, while in three and four objectives it is based on the [`sortedcontainers`](https://pypi.org/project/sortedcontainers/) module. ## Releases - 1.0.0 addition of MOArchive classes for 3 and 4 objectives, as well as a class for handling solutions to constrained problems - 0.7.0 reimplementation of `BiobjectiveNondominatedSortedList.hypervolume_improvement` by extracting a sublist first. - 0.6.0 the `infos` attribute is a `list` with corresponding (arbitrary) information, e.g. for keeping the respective solutions. - 0.5.3 fixed assertion error when not using `fractions.Fraction` - 0.5.2 first published version # Usage examples 1. [Initialization](#1-initialization) 2. [Constrained MOArchive](#2-constrained-moarchive) 3. [Accessing solution information](#3-accessing-solution-information) 4. [Adding solutions](#4-adding-solutions) 5. [Archive size](#5-archive-size) 6. [Performance indicators](#6-performance-indicators) 7. [Contributing hypervolumes](#7-contributing-hypervolumes) 8. [Hypervolume improvement](#8-hypervolume-improvement) 9. [Distance to the Pareto front](#9-distance-to-the-pareto-front) 10. [Enabling or disabling fractions](#10-enabling-or-disabling-fractions) 11. [Additional functions](#11-additional-functions) 12. [Visualization of indicator values](#12-visualization-of-indicator-values) 13. [Performance tests](#13-performance-tests) ### 1. Initialization The MOArchive object can be created using the `get_mo_archive` function by providing a list of objective values, a reference point, or at least the number of objectives. Further solutions can be added using `add` or `add_list` methods, but the reference point cannot be changed once the instance is created. A list of information strings can be provided for each element, which will be stored as long as the corresponding element remains in the archive (e.g., the x values of the element). At any time, the list of non-dominated elements and their corresponding information can be accessed. ```python from moarchiving import get_mo_archive moa2obj = get_mo_archive([[1, 5], [2, 3], [4, 5], [5, 0]], reference_point=[10, 10], infos=["a", "b", "c", "d"]) moa3obj = get_mo_archive([[1, 2, 3], [3, 2, 1], [3, 3, 0], [2, 2, 1]], [10, 10, 10], ["a", "b", "c", "d"]) moa4obj = get_mo_archive([[1, 2, 3, 4], [1, 3, 4, 5], [4, 3, 2, 1], [1, 3, 0, 1]], reference_point=[10, 10, 10, 10], infos=["a", "b", "c", "d"]) print("points in the 2 objective archive:", list(moa2obj)) print("points in the 3 objective archive:", list(moa3obj)) print("points in the 4 objective archive:", list(moa4obj)) ``` points in the 2 objective archive: [[1, 5], [2, 3], [5, 0]] points in the 3 objective archive: [[3, 3, 0], [2, 2, 1], [1, 2, 3]] points in the 4 objective archive: [[1, 3, 0, 1], [1, 2, 3, 4]] MOArchive objects can also be initialized empty. ```python moa = get_mo_archive(reference_point=[4, 4, 4]) print("points in the empty archive:", list(moa)) ``` points in the empty archive: [] ### 2. Constrained MOArchive Constrained MOArchive supports all the functionalities of a non-constrained MOArchive, with the added capability of handling constraints when adding or initializing the archive. In addition to the objective values of a solution, constraint values must be provided in the form of a list or a number. A solution is deemed feasible when all its constraint values are less than or equal to zero. ```python from moarchiving import get_cmo_archive cmoa = get_cmo_archive([[1, 2, 3], [1, 3, 4], [4, 3, 2], [1, 3, 0]], [[3, 0], [0, 0], [0, 0], [0, 1]], reference_point=[5, 5, 5], infos=["a", "b", "c", "d"]) print("points in the archive:", list(cmoa)) ``` points in the archive: [[4, 3, 2], [1, 3, 4]] ### 3. Accessing solution information `archive.infos` is used to get the information on solutions in the archive. ```python # infos of the previously defined empty archive print("infos of the empty archive", moa.infos) print("infos of the constrained archive", cmoa.infos) ``` infos of the empty archive [] infos of the constrained archive ['c', 'b'] ### 4. Adding solutions Solutions can be added to the MOArchive at any time using the `add` function (for a single solution) or the `add_list` function (for multiple solutions). ```python moa.add([1, 2, 3], "a") print("points:", list(moa)) print("infos:", moa.infos) moa.add_list([[3, 2, 1], [2, 3, 2], [2, 2, 2]], ["b", "c", "d"]) print("points:", list(moa)) print("infos:", moa.infos) ``` points: [[1, 2, 3]] infos: ['a'] points: [[3, 2, 1], [2, 2, 2], [1, 2, 3]] infos: ['b', 'd', 'a'] When adding to the constrained archive, constraint values must be added as well. ```python cmoa.add_list([[3, 3, 3], [1, 1, 1]], [[0, 0], [42, 0]], ["e", "f"]) print("points:", list(cmoa)) print("infos:", cmoa.infos) ``` points: [[4, 3, 2], [3, 3, 3], [1, 3, 4]] infos: ['c', 'e', 'b'] ### 5. Archive size The MOArchive implements some functionality of a list (in the 2 objective case, it actually extends the `list` class, though this is not the case in 3 and 4 objectives). In particular, it includes the `len` method to get the number of solutions in the archive as well as the `in` keyword to check if a point is in the archive. ```python print("Points in the archive:", list(moa)) print("Length of the archive:", len(moa)) print("[2, 2, 2] in moa:", [2, 2, 2] in moa) print("[3, 2, 0] in moa:", [3, 2, 0] in moa) ``` Points in the archive: [[3, 2, 1], [2, 2, 2], [1, 2, 3]] Length of the archive: 3 [2, 2, 2] in moa: True [3, 2, 0] in moa: False ### 6. Performance indicators An archive provides the following performance indicators: - `hypervolume` - `hypervolume_plus`, providing additionally the closest distance to the reference area for an empty archive, see [here](https://doi.org/10.1145/3321707.3321852) and [here](https://doi.org/10.1109/TEVC.2022.3210897) - `hypervolume_plus_constr` (for CMOArchive), based on, but not completely equal to the one defined [here](https://doi.org/10.1016/j.ins.2022.05.106) Indicators are defined for maximization (the original `hypervolume_plus_constr` indicator is multiplied by -1). When the archive is not empty, all the indicators are positive and have the same value. As the archive does not (yet) support an ideal point, the values of indicators are not normalized. ```python print("Hypervolume of the archive:", moa.hypervolume) print("Hypervolume plus of the archive:", moa.hypervolume_plus) ``` Hypervolume of the archive: 12 Hypervolume plus of the archive: 12 In case of a constrained MOArchive, the `hypervolume_plus_constr` attribute can be accessed as well. ```python print("Hyperolume of the constrained archive:", cmoa.hypervolume) print("Hypervolume plus of the constrained archive:", cmoa.hypervolume_plus) print("Hypervolume plus constr of the constrained archive:", cmoa.hypervolume_plus_constr) ``` Hyperolume of the constrained archive: 14 Hypervolume plus of the constrained archive: 14 Hypervolume plus constr of the constrained archive: 14 ### 7. Contributing hypervolumes The `contributing_hypervolumes` attribute provides a list of hypervolume contributions for each point of the archive. Alternatively, the contribution for a single point can be computed using the `contributing_hypervolume(point)` method. ```python for i, objectives in enumerate(moa): assert moa.contributing_hypervolume(objectives) == moa.contributing_hypervolumes[i] print("contributing hv of point", objectives, "is", moa.contributing_hypervolume(objectives)) print("All contributing hypervolumes:", moa.contributing_hypervolumes) ``` contributing hv of point [3, 2, 1] is 2 contributing hv of point [2, 2, 2] is 2 contributing hv of point [1, 2, 3] is 2 All contributing hypervolumes: [Fraction(2, 1), Fraction(2, 1), Fraction(2, 1)] ### 8. Hypervolume improvement The `hypervolume_improvement(point)` method returns the improvement of the hypervolume if we would add the point to the archive. ```python point = [1, 3, 0] print(f"hypervolume before adding {point}: {moa.hypervolume}") print(f"hypervolume improvement of point {point}: {moa.hypervolume_improvement(point)}") moa.add(point) print(f"hypervolume after adding {point}: {moa.hypervolume}") ``` hypervolume before adding [1, 3, 0]: 12 hypervolume improvement of point [1, 3, 0]: 6 hypervolume after adding [1, 3, 0]: 18 ### 9. Distance to the empirical Pareto front The `distance_to_pareto_front(point)` method returns the distance between the given point and the Pareto front. ```python print(f"Current archive: {list(moa)}") print("Distance of [3, 2, 1] to pareto front:", moa.distance_to_pareto_front([3, 2, 1])) print("Distance of [3, 2, 2] to pareto front:", moa.distance_to_pareto_front([3, 3, 3])) ``` Current archive: [[1, 3, 0], [3, 2, 1], [2, 2, 2], [1, 2, 3]] Distance of [3, 2, 1] to pareto front: 0.0 Distance of [3, 2, 2] to pareto front: 1.0 ### 10. Enabling or disabling fractions To avoid loss of precision, fractions are used by default. This can be changed to floats by setting the `hypervolume_final_float_type` and `hypervolume_computation_float_type` function attributes. ```python import fractions get_mo_archive.hypervolume_computation_float_type = fractions.Fraction get_mo_archive.hypervolume_final_float_type = fractions.Fraction moa3_fr = get_mo_archive([[1, 2, 3], [2, 1, 3], [3, 3, 1.32], [1.3, 1.3, 3], [1.7, 1.1, 2]], reference_point=[4, 4, 4]) print(moa3_fr.hypervolume) get_mo_archive.hypervolume_computation_float_type = float get_mo_archive.hypervolume_final_float_type = float moa3_nofr = get_mo_archive([[1, 2, 3], [2, 1, 3], [3, 3, 1.32], [1.3, 1.3, 3], [1.7, 1.1, 2]], reference_point=[4, 4, 4]) print(moa3_nofr.hypervolume) ``` 161245156349030777798724819133399/10141204801825835211973625643008 15.899999999999999 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923759.0 moarchiving-1.0.0/README.md0000644000076500000240000003030315000266157014475 0ustar00hansenstaff# Introduction [This package](https://cma-es.github.io/moarchiving/moarchiving-apidocs/index.html) implements a multi-objective non-dominated archive for 2, 3 or 4 objectives, providing easy and fast access to multiple hypervolume indicators: - the hypervolume of the entire archive, - the contributing hypervolume of each element, - the [uncrowded hypervolume improvement](https://doi.org/10.1145/3321707.3321852) (see also [here](https://arxiv.org/abs/1904.08823)) of any given point in the objective space, and - the uncrowded hypervolume of the (unpruned) archive, here called [hypervolume plus](https://cma-es.github.io/moarchiving/moarchiving-apidocs/moarchiving.moarchiving.BiobjectiveNondominatedSortedList.html#hypervolume_plus). Additionally, the package provides a constrained version of the archive, which allows to store points with constraints. The source code is available [on GitHub](https://github.com/CMA-ES/moarchiving). ## Installation On a system shell, either like ``` pip install moarchiving ``` or from GitHub, for example ``` pip install git+https://github.com/CMA-ES/moarchiving.git@development ``` installing from the `development` branch. ## Testing ``` python -m moarchiving.test ``` on a system shell should output something like ``` doctest.testmod() TestResults(failed=0, attempted=90) ... OK unittest.TextTestRunner().run(unittest.TestLoader().loadTestsFromModule()) ....... ---------------------------------------------------------------------- Ran 7 tests in 0.001s ``` ## Links - [API documentation](https://cma-es.github.io/moarchiving/moarchiving-apidocs/index.html) - [This page including performance test examples](https://cma-es.github.io/moarchiving/) - [Code on Github](https://github.com/CMA-ES/moarchiving) ## Details `moarchiving` with 2 objectives uses the [`fractions.Fraction`](https://docs.python.org/3/library/fractions.html) type to avoid rounding errors when computing hypervolume differences, but its usage can also be easily switched off by assigning the respective class attributes `hypervolume_computation_float_type` and `hypervolume_final_float_type`. The Fraction type can become prohibitively computationally expensive with increasing precision. The implementation of the two-objective archive is heavily based on the [`bisect`](https://docs.python.org/3/library/bisect.html) module, while in three and four objectives it is based on the [`sortedcontainers`](https://pypi.org/project/sortedcontainers/) module. ## Releases - 1.0.0 addition of MOArchive classes for 3 and 4 objectives, as well as a class for handling solutions to constrained problems - 0.7.0 reimplementation of `BiobjectiveNondominatedSortedList.hypervolume_improvement` by extracting a sublist first. - 0.6.0 the `infos` attribute is a `list` with corresponding (arbitrary) information, e.g. for keeping the respective solutions. - 0.5.3 fixed assertion error when not using `fractions.Fraction` - 0.5.2 first published version # Usage examples 1. [Initialization](#1-initialization) 2. [Constrained MOArchive](#2-constrained-moarchive) 3. [Accessing solution information](#3-accessing-solution-information) 4. [Adding solutions](#4-adding-solutions) 5. [Archive size](#5-archive-size) 6. [Performance indicators](#6-performance-indicators) 7. [Contributing hypervolumes](#7-contributing-hypervolumes) 8. [Hypervolume improvement](#8-hypervolume-improvement) 9. [Distance to the Pareto front](#9-distance-to-the-pareto-front) 10. [Enabling or disabling fractions](#10-enabling-or-disabling-fractions) 11. [Additional functions](#11-additional-functions) 12. [Visualization of indicator values](#12-visualization-of-indicator-values) 13. [Performance tests](#13-performance-tests) ### 1. Initialization The MOArchive object can be created using the `get_mo_archive` function by providing a list of objective values, a reference point, or at least the number of objectives. Further solutions can be added using `add` or `add_list` methods, but the reference point cannot be changed once the instance is created. A list of information strings can be provided for each element, which will be stored as long as the corresponding element remains in the archive (e.g., the x values of the element). At any time, the list of non-dominated elements and their corresponding information can be accessed. ```python from moarchiving import get_mo_archive moa2obj = get_mo_archive([[1, 5], [2, 3], [4, 5], [5, 0]], reference_point=[10, 10], infos=["a", "b", "c", "d"]) moa3obj = get_mo_archive([[1, 2, 3], [3, 2, 1], [3, 3, 0], [2, 2, 1]], [10, 10, 10], ["a", "b", "c", "d"]) moa4obj = get_mo_archive([[1, 2, 3, 4], [1, 3, 4, 5], [4, 3, 2, 1], [1, 3, 0, 1]], reference_point=[10, 10, 10, 10], infos=["a", "b", "c", "d"]) print("points in the 2 objective archive:", list(moa2obj)) print("points in the 3 objective archive:", list(moa3obj)) print("points in the 4 objective archive:", list(moa4obj)) ``` points in the 2 objective archive: [[1, 5], [2, 3], [5, 0]] points in the 3 objective archive: [[3, 3, 0], [2, 2, 1], [1, 2, 3]] points in the 4 objective archive: [[1, 3, 0, 1], [1, 2, 3, 4]] MOArchive objects can also be initialized empty. ```python moa = get_mo_archive(reference_point=[4, 4, 4]) print("points in the empty archive:", list(moa)) ``` points in the empty archive: [] ### 2. Constrained MOArchive Constrained MOArchive supports all the functionalities of a non-constrained MOArchive, with the added capability of handling constraints when adding or initializing the archive. In addition to the objective values of a solution, constraint values must be provided in the form of a list or a number. A solution is deemed feasible when all its constraint values are less than or equal to zero. ```python from moarchiving import get_cmo_archive cmoa = get_cmo_archive([[1, 2, 3], [1, 3, 4], [4, 3, 2], [1, 3, 0]], [[3, 0], [0, 0], [0, 0], [0, 1]], reference_point=[5, 5, 5], infos=["a", "b", "c", "d"]) print("points in the archive:", list(cmoa)) ``` points in the archive: [[4, 3, 2], [1, 3, 4]] ### 3. Accessing solution information `archive.infos` is used to get the information on solutions in the archive. ```python # infos of the previously defined empty archive print("infos of the empty archive", moa.infos) print("infos of the constrained archive", cmoa.infos) ``` infos of the empty archive [] infos of the constrained archive ['c', 'b'] ### 4. Adding solutions Solutions can be added to the MOArchive at any time using the `add` function (for a single solution) or the `add_list` function (for multiple solutions). ```python moa.add([1, 2, 3], "a") print("points:", list(moa)) print("infos:", moa.infos) moa.add_list([[3, 2, 1], [2, 3, 2], [2, 2, 2]], ["b", "c", "d"]) print("points:", list(moa)) print("infos:", moa.infos) ``` points: [[1, 2, 3]] infos: ['a'] points: [[3, 2, 1], [2, 2, 2], [1, 2, 3]] infos: ['b', 'd', 'a'] When adding to the constrained archive, constraint values must be added as well. ```python cmoa.add_list([[3, 3, 3], [1, 1, 1]], [[0, 0], [42, 0]], ["e", "f"]) print("points:", list(cmoa)) print("infos:", cmoa.infos) ``` points: [[4, 3, 2], [3, 3, 3], [1, 3, 4]] infos: ['c', 'e', 'b'] ### 5. Archive size The MOArchive implements some functionality of a list (in the 2 objective case, it actually extends the `list` class, though this is not the case in 3 and 4 objectives). In particular, it includes the `len` method to get the number of solutions in the archive as well as the `in` keyword to check if a point is in the archive. ```python print("Points in the archive:", list(moa)) print("Length of the archive:", len(moa)) print("[2, 2, 2] in moa:", [2, 2, 2] in moa) print("[3, 2, 0] in moa:", [3, 2, 0] in moa) ``` Points in the archive: [[3, 2, 1], [2, 2, 2], [1, 2, 3]] Length of the archive: 3 [2, 2, 2] in moa: True [3, 2, 0] in moa: False ### 6. Performance indicators An archive provides the following performance indicators: - `hypervolume` - `hypervolume_plus`, providing additionally the closest distance to the reference area for an empty archive, see [here](https://doi.org/10.1145/3321707.3321852) and [here](https://doi.org/10.1109/TEVC.2022.3210897) - `hypervolume_plus_constr` (for CMOArchive), based on, but not completely equal to the one defined [here](https://doi.org/10.1016/j.ins.2022.05.106) Indicators are defined for maximization (the original `hypervolume_plus_constr` indicator is multiplied by -1). When the archive is not empty, all the indicators are positive and have the same value. As the archive does not (yet) support an ideal point, the values of indicators are not normalized. ```python print("Hypervolume of the archive:", moa.hypervolume) print("Hypervolume plus of the archive:", moa.hypervolume_plus) ``` Hypervolume of the archive: 12 Hypervolume plus of the archive: 12 In case of a constrained MOArchive, the `hypervolume_plus_constr` attribute can be accessed as well. ```python print("Hyperolume of the constrained archive:", cmoa.hypervolume) print("Hypervolume plus of the constrained archive:", cmoa.hypervolume_plus) print("Hypervolume plus constr of the constrained archive:", cmoa.hypervolume_plus_constr) ``` Hyperolume of the constrained archive: 14 Hypervolume plus of the constrained archive: 14 Hypervolume plus constr of the constrained archive: 14 ### 7. Contributing hypervolumes The `contributing_hypervolumes` attribute provides a list of hypervolume contributions for each point of the archive. Alternatively, the contribution for a single point can be computed using the `contributing_hypervolume(point)` method. ```python for i, objectives in enumerate(moa): assert moa.contributing_hypervolume(objectives) == moa.contributing_hypervolumes[i] print("contributing hv of point", objectives, "is", moa.contributing_hypervolume(objectives)) print("All contributing hypervolumes:", moa.contributing_hypervolumes) ``` contributing hv of point [3, 2, 1] is 2 contributing hv of point [2, 2, 2] is 2 contributing hv of point [1, 2, 3] is 2 All contributing hypervolumes: [Fraction(2, 1), Fraction(2, 1), Fraction(2, 1)] ### 8. Hypervolume improvement The `hypervolume_improvement(point)` method returns the improvement of the hypervolume if we would add the point to the archive. ```python point = [1, 3, 0] print(f"hypervolume before adding {point}: {moa.hypervolume}") print(f"hypervolume improvement of point {point}: {moa.hypervolume_improvement(point)}") moa.add(point) print(f"hypervolume after adding {point}: {moa.hypervolume}") ``` hypervolume before adding [1, 3, 0]: 12 hypervolume improvement of point [1, 3, 0]: 6 hypervolume after adding [1, 3, 0]: 18 ### 9. Distance to the empirical Pareto front The `distance_to_pareto_front(point)` method returns the distance between the given point and the Pareto front. ```python print(f"Current archive: {list(moa)}") print("Distance of [3, 2, 1] to pareto front:", moa.distance_to_pareto_front([3, 2, 1])) print("Distance of [3, 2, 2] to pareto front:", moa.distance_to_pareto_front([3, 3, 3])) ``` Current archive: [[1, 3, 0], [3, 2, 1], [2, 2, 2], [1, 2, 3]] Distance of [3, 2, 1] to pareto front: 0.0 Distance of [3, 2, 2] to pareto front: 1.0 ### 10. Enabling or disabling fractions To avoid loss of precision, fractions are used by default. This can be changed to floats by setting the `hypervolume_final_float_type` and `hypervolume_computation_float_type` function attributes. ```python import fractions get_mo_archive.hypervolume_computation_float_type = fractions.Fraction get_mo_archive.hypervolume_final_float_type = fractions.Fraction moa3_fr = get_mo_archive([[1, 2, 3], [2, 1, 3], [3, 3, 1.32], [1.3, 1.3, 3], [1.7, 1.1, 2]], reference_point=[4, 4, 4]) print(moa3_fr.hypervolume) get_mo_archive.hypervolume_computation_float_type = float get_mo_archive.hypervolume_final_float_type = float moa3_nofr = get_mo_archive([[1, 2, 3], [2, 1, 3], [3, 3, 1.32], [1.3, 1.3, 3], [1.7, 1.1, 2]], reference_point=[4, 4, 4]) print(moa3_nofr.hypervolume) ``` 161245156349030777798724819133399/10141204801825835211973625643008 15.899999999999999 ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1744923773.4899652 moarchiving-1.0.0/moarchiving/0000755000076500000240000000000015000266175015525 5ustar00hansenstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923733.0 moarchiving-1.0.0/moarchiving/__init__.py0000644000076500000240000000260015000266125017627 0ustar00hansenstaff# -*- coding: utf-8 -*- """This package implements a multi-objective non-dominated archive for 2, 3 or 4 objectives, providing easy and fast access to multiple hypervolume indicators: - the hypervolume of the entire archive, - the contributing hypervolume of each element, - the uncrowded hypervolume improvement (https://doi.org/10.1145/3321707.3321852, https://arxiv.org/abs/1904.08823) of any given point in the objective space, and - the uncrowded hypervolume of the (unpruned) archive, here called hypervolume plus (see `BiobjectiveNondominatedSortedList.hypervolume_plus`). Additionally, the package provides a constrained version of the archive, which allows to store points with constraints. The source code is available at https://github.com/CMA-ES/moarchiving Authors: Nikolaus Hansen, Nace Sever, Mila Nedić, Tea Tušar, 2024 License: BSD 3-Clause, see LICENSE file. """ __author__ = "Nikolaus Hansen, Nace Sever, Mila Nedic, Tea Tusar" __license__ = "BSD 3-clause" __version__ = "1.0.0" from .get_archive import get_mo_archive from .get_archive import get_cmo_archive from .moarchiving import BiobjectiveNondominatedSortedList from .moarchiving import BiobjectiveNondominatedSortedList as MOArchive2obj from .moarchiving3obj import MOArchive3obj from .moarchiving4obj import MOArchive4obj from .constrained_moarchive import CMOArchive # from . import tests # creates a circular import? ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923733.0 moarchiving-1.0.0/moarchiving/__main__.py0000644000076500000240000000011315000266125017605 0ustar00hansenstaffprint('use ``python -m moarchiving.test`` to test the moarchiving module') ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923733.0 moarchiving-1.0.0/moarchiving/constrained_moarchive.py0000644000076500000240000002743315000266125022451 0ustar00hansenstaff# -*- coding: utf-8 -*- """ Module for the CMOArchive class, which provides additional functionality for constrained multi-objective optimization to the MOArchive classes, while keeping the same interface. """ from moarchiving.moarchiving import BiobjectiveNondominatedSortedList as MOArchive2obj from moarchiving.moarchiving3obj import MOArchive3obj from moarchiving.moarchiving4obj import MOArchive4obj import warnings as _warnings try: import fractions except ImportError: _warnings.warn('`fractions` module not installed, arbitrary precision hypervolume computation not available') inf = float('inf') class CMOArchive: """ Class CMOArchive provides additional functionality for constrained multi-objective optimization to the MOArchive classes, while keeping the same interface. """ try: hypervolume_final_float_type = fractions.Fraction hypervolume_computation_float_type = fractions.Fraction except: hypervolume_final_float_type = float hypervolume_computation_float_type = float def __init__(self, list_of_f_vals=None, list_of_g_vals=None, reference_point=None, infos=None, n_obj=None, tau=1, hypervolume_final_float_type=None, hypervolume_computation_float_type=None): """ Initialize a CMOArchive object. Additionally to the list of objective values `list_of_f_vals`, also list of constraint vectors `list_of_g_vals` should be provided. The reference point is used for the hypervolume computation and pruning of the archive. The list of additional information `infos` can be used to store additional information for each objective vector. Tau is a threshold that is used for computing the indicator. """ hypervolume_final_float_type = CMOArchive.hypervolume_final_float_type \ if hypervolume_final_float_type is None else hypervolume_final_float_type hypervolume_computation_float_type = CMOArchive.hypervolume_computation_float_type \ if hypervolume_computation_float_type is None else hypervolume_computation_float_type if n_obj == 2: self.archive = MOArchive2obj( reference_point=reference_point, hypervolume_final_float_type=hypervolume_final_float_type, hypervolume_computation_float_type=hypervolume_computation_float_type) elif n_obj == 3: self.archive = MOArchive3obj( reference_point=reference_point, hypervolume_final_float_type=hypervolume_final_float_type, hypervolume_computation_float_type=hypervolume_computation_float_type) elif n_obj == 4: self.archive = MOArchive4obj( reference_point=reference_point, hypervolume_final_float_type=hypervolume_final_float_type, hypervolume_computation_float_type=hypervolume_computation_float_type) self.tau = tau self.n_obj = n_obj self._hypervolume_plus_constr = -inf if list_of_f_vals is not None: self.add_list(list_of_f_vals, list_of_g_vals, infos) def __iter__(self): """ Return an iterator over the objective vectors in the archive. """ return iter(self.archive) def __len__(self): """ Return the number of objective vectors in the archive. """ return len(self.archive) def add(self, f_vals, g_vals, info=None): """ Add the objective vector f_vals with corresponding constraints to the archive if it is feasible. If no feasible solution was found yet, also update the indicator. >>> from moarchiving.get_archive import get_cmo_archive >>> moa = get_cmo_archive(reference_point=[5, 5], tau=10) >>> moa.add([4, 4], 0) >>> list(moa) [[4, 4]] >>> moa.add([3, 4], 1) >>> list(moa) [[4, 4]] >>> moa.add([2, 2], 0) >>> list(moa) [[2, 2]] """ try: constraint_violation = sum([max(0, g) for g in g_vals]) except TypeError: constraint_violation = max(g_vals, 0) if constraint_violation > 0: if (self.archive.reference_point is not None and constraint_violation + self.tau < -self._hypervolume_plus_constr): self._hypervolume_plus_constr = -(constraint_violation + self.tau) else: self.archive.add(f_vals, info) if self.archive.reference_point is not None: self._hypervolume_plus_constr = max(self.archive._hypervolume_plus, -self.tau) def add_list(self, list_of_f_vals, list_of_g_vals, infos=None): """ Add a list of objective vectors f_vals with corresponding constraints vectors g_vals and infos to the archive. >>> from moarchiving.get_archive import get_cmo_archive >>> moa = get_cmo_archive(reference_point=[5, 5], tau=10) >>> moa.add_list([[4, 4], [3, 3], [2, 2]], [0, 1, 0]) >>> list(moa) [[2, 2]] >>> moa.add_list([[1, 6], [1, 3], [3, 0]], [[0], [0], [10]]) >>> list(moa) [[1, 3], [2, 2]] """ if infos is None: infos = [None] * len(list_of_f_vals) if self._hypervolume_plus_constr < 0: for obj, cons, info in zip(list_of_f_vals, list_of_g_vals, infos): self.add(obj, cons, info) else: try: list_of_g_vals = [sum([max(g, 0) for g in g_vals]) for g_vals in list_of_g_vals] except TypeError: list_of_g_vals = [max(g_vals, 0) for g_vals in list_of_g_vals] list_of_f_vals = [f_vals for f_vals, g_vals in zip(list_of_f_vals, list_of_g_vals) if g_vals == 0] infos = [info for info, g_vals in zip(infos, list_of_g_vals) if g_vals == 0] self.archive.add_list(list(list_of_f_vals), list(infos)) self._hypervolume_plus_constr = self.archive._hypervolume_plus def remove(self, f_vals): """ Remove a feasible point with objective vector f_vals from the archive. >>> from moarchiving.get_archive import get_cmo_archive >>> moa = get_cmo_archive([[2, 3], [1, 4], [4, 1]], [0, 0, 0], reference_point=[5, 5]) >>> list(moa) [[1, 4], [2, 3], [4, 1]] >>> moa.remove([2, 3]) >>> list(moa) [[1, 4], [4, 1]] """ info = self.archive.remove(f_vals) self._hypervolume_plus_constr = self.archive._hypervolume_plus return info @property def hypervolume(self): """ Return the hypervolume indicator. """ return self.archive.hypervolume @property def hypervolume_plus(self): """ Return the hypervolume_plus indicator. """ return self.archive.hypervolume_plus @property def hypervolume_plus_constr(self): """ Return the hypervolume_plus_constr (icmop) indicator. """ if self.archive.reference_point is None: raise ValueError("to compute the hypervolume_plus_constr indicator a reference" " point is needed (must be given initially)") return self._hypervolume_plus_constr @property def contributing_hypervolumes(self): """ Return the hypervolume contributions of each point in the archive. """ return self.archive.contributing_hypervolumes @property def infos(self): """ Return the list of additional information for each point in the archive. """ return self.archive.infos def compute_hypervolume(self, reference_point=None): """ Compute the hypervolume of the archive. """ if self.n_obj == 2: return self.archive.compute_hypervolume(reference_point) return self.archive.compute_hypervolume() def contributing_hypervolume(self, f_vals): """ Compute the hypervolume contribution of the objective vector f_vals to the archive. """ return self.archive.contributing_hypervolume(f_vals) def copy(self): """ Return a deep copy of the CMOArchive object. """ new_cmoa = CMOArchive(reference_point=self.archive.reference_point, tau=self.tau) new_cmoa.archive = self.archive.copy() new_cmoa._hypervolume_plus_constr = self._hypervolume_plus_constr return new_cmoa def distance_to_hypervolume_area(self, f_vals): """ Compute the distance of the objective vector f_vals to the hypervolume area. """ return self.archive.distance_to_hypervolume_area(f_vals) def distance_to_pareto_front(self, f_vals, ref_factor=1): """ Compute the distance of the objective vector f_vals to the Pareto front. """ return self.archive.distance_to_pareto_front(f_vals, ref_factor=ref_factor) def dominates(self, f_vals): """ Returns True if the objective vector f_vals is dominated by any of the points in the archive. """ return self.archive.dominates(f_vals) def dominators(self, f_vals, number_only=False): """ Returns a list of points in the archive that dominate the objective vector f_vals. If number_only is True, only the number of dominators is returned. """ return self.archive.dominators(f_vals, number_only=number_only) def hypervolume_improvement(self, f_vals): """ Compute the hypervolume improvement of the archive if the objective vector f_vals is added. """ return self.archive.hypervolume_improvement(f_vals) def hypervolume_plus_constr_improvement(self, f_vals, g_vals): """ Compute the improvement of the indicator if the objective vector f_vals is added. >>> from moarchiving.get_archive import get_cmo_archive >>> get_cmo_archive.hypervolume_final_float_type = float >>> moa = get_cmo_archive(reference_point=[5, 5], tau=4) # hv+c = -inf >>> moa.hypervolume_plus_constr_improvement([1, 1], 10) inf >>> moa.add([1, 1], [10, 0]) # hv+c = -14 >>> int(moa.hypervolume_plus_constr_improvement([2, 2], 4)) 6 >>> moa.add([2, 2], [3, 1]) # hv+c = -8 >>> int(moa.hypervolume_plus_constr_improvement([8, 9], 0)) 4 >>> moa.add([8, 9], [0, 0]) # hv+c = -4 >>> int(moa.hypervolume_plus_constr_improvement([8, 5], 0)) 1 >>> moa.add([8, 5], [0, 0]) # hv+c = -3 >>> int(moa.hypervolume_plus_constr_improvement([0, 0], 1)) 0 >>> moa.add([0, 0], [1, -3]) # hv+c = -3 >>> int(moa.hypervolume_plus_constr_improvement([4, 4], 0)) 4 >>> moa.add([4, 4], [0, 0]) # hv+c = 1 >>> int(moa.hypervolume_plus_constr_improvement([3, 3], 0)) 3 """ try: constraint_violation = sum([max(0, g) for g in g_vals]) except TypeError: constraint_violation = max(g_vals, 0) if constraint_violation > 0: if constraint_violation + self.tau < -self._hypervolume_plus_constr: return - self._hypervolume_plus_constr - (constraint_violation + self.tau) return 0 if not self.in_domain(f_vals): if self._hypervolume_plus_constr > 0: return 0 distance_to_hv_area = min(self.distance_to_hypervolume_area(f_vals), self.tau) if distance_to_hv_area < - self._hypervolume_plus_constr: return -self._hypervolume_plus_constr - distance_to_hv_area return 0 if not self.dominates(f_vals): return max(-self._hypervolume_plus_constr, 0) + self.hypervolume_improvement(f_vals) return 0 def in_domain(self, f_vals, reference_point=None): """ Returns True if the objective vector f_vals dominates the reference point. """ return self.archive.in_domain(f_vals, reference_point=reference_point) if __name__ == "__main__": import doctest print('doctest.testmod() in constrained_moarchive.py') print(doctest.testmod()) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923732.0 moarchiving-1.0.0/moarchiving/get_archive.py0000644000076500000240000002015715000266124020356 0ustar00hansenstaff"""Convenience create functions for any of the nondominated archives. The appropriate number of objectives, is derived from the input arguments. """ from moarchiving.moarchiving import BiobjectiveNondominatedSortedList as MOArchive2obj from moarchiving.moarchiving3obj import MOArchive3obj from moarchiving.moarchiving4obj import MOArchive4obj from moarchiving.constrained_moarchive import CMOArchive import warnings as _warnings try: import fractions except ImportError: _warnings.warn('`fractions` module not installed, arbitrary precision hypervolume computation not available') def get_mo_archive(list_of_f_vals=None, reference_point=None, infos=None, n_obj=None): """Return a nondominated archive instance with the proper number of objectives. `list_of_f_vals` is a list of objective vectors with `n_obj` objectives. If `list_of_f_vals` is not provided, `n_obj` can be provided to define the number of objectives which is by default 2. `reference_point` is used for hypervolume computation and pruning of the archive. A list of additional information for each objective vector, for example the solution from which the objective values were computed, can be provided in `infos`. """ if not hasattr(get_mo_archive, "hypervolume_final_float_type"): try: get_mo_archive.hypervolume_final_float_type = fractions.Fraction except: get_mo_archive.hypervolume_final_float_type = float if not hasattr(get_mo_archive, "hypervolume_computation_float_type"): try: get_mo_archive.hypervolume_computation_float_type = fractions.Fraction except: get_mo_archive.hypervolume_computation_float_type = float if (list_of_f_vals is None or len(list_of_f_vals) == 0) and n_obj is None and reference_point is None: n_obj = 2 if n_obj is None: if list_of_f_vals is not None and len(list_of_f_vals) > 0: n_obj = len(list_of_f_vals[0]) else: n_obj = len(reference_point) # check if the number of objectives matches the number of objectives in the list of f_vals # and the reference point if they are provided and not empty if list_of_f_vals is not None and len(list_of_f_vals) > 0 and reference_point is not None: if len(reference_point) != len(list_of_f_vals[0]): raise ValueError(f"n_obj ({len(reference_point)}) does not match the number of " f"objectives in the first element of list_of_f_vals " f"({len(list_of_f_vals[0])})") elif n_obj != len(list_of_f_vals[0]): _warnings.warn(f"n_obj ({n_obj}) does not match the number of objectives in " f"list_of_f_vals ({len(list_of_f_vals[0])})") n_obj = len(list_of_f_vals[0]) elif list_of_f_vals is not None and len(list_of_f_vals) > 0: if n_obj != len(list_of_f_vals[0]): _warnings.warn(f"n_obj ({n_obj}) does not match the number of objectives in " f"list_of_f_vals ({len(list_of_f_vals[0])})") n_obj = len(list_of_f_vals[0]) elif reference_point is not None: if n_obj != len(reference_point): _warnings.warn(f"n_obj ({n_obj}) does not match the number of objectives in " f"reference_point ({len(reference_point)})") n_obj = len(reference_point) if n_obj == 2: return MOArchive2obj(list_of_f_vals, reference_point=reference_point, infos=infos, hypervolume_final_float_type=get_mo_archive.hypervolume_final_float_type, hypervolume_computation_float_type=get_mo_archive.hypervolume_computation_float_type) elif n_obj == 3: return MOArchive3obj(list_of_f_vals, reference_point=reference_point, infos=infos, hypervolume_final_float_type=get_mo_archive.hypervolume_final_float_type, hypervolume_computation_float_type=get_mo_archive.hypervolume_computation_float_type) elif n_obj == 4: return MOArchive4obj(list_of_f_vals, reference_point=reference_point, infos=infos, hypervolume_final_float_type=get_mo_archive.hypervolume_final_float_type, hypervolume_computation_float_type=get_mo_archive.hypervolume_computation_float_type) else: raise ValueError(f"Unsupported number of objectives: {n_obj}") def get_cmo_archive(list_of_f_vals=None, list_of_g_vals=None, reference_point=None, infos=None, n_obj=None, tau=1): """Return a constrained nondominated archive instance with the proper number of objectives. `list_of_f_vals` is a list of objective vectors with `n_obj` objectives, `list_of_g_vals` is a list of constraint violation vectors (or values). If `list_of_f_vals` is not provided, `n_obj` can be provided to define the number of objectives. `reference_point` is used for the hypervolume computation and pruning of the archive. A list of additional information for each objective vector can be provided in `infos`. """ if not hasattr(get_cmo_archive, "hypervolume_final_float_type"): try: get_cmo_archive.hypervolume_final_float_type = fractions.Fraction except: get_cmo_archive.hypervolume_final_float_type = float if not hasattr(get_cmo_archive, "hypervolume_computation_float_type"): try: get_cmo_archive.hypervolume_computation_float_type = fractions.Fraction except: get_cmo_archive.hypervolume_computation_float_type = float if (list_of_f_vals is None or len(list_of_f_vals) == 0) and n_obj is None and reference_point is None: n_obj = 2 if n_obj is None: if list_of_f_vals is not None and len(list_of_f_vals) > 0: n_obj = len(list_of_f_vals[0]) else: n_obj = len(reference_point) # check if the number of objectives matches the number of objectives in the list of f_vals # and the reference point if they are provided and not empty if list_of_f_vals is not None and len(list_of_f_vals) > 0 and reference_point is not None: if len(reference_point) != len(list_of_f_vals[0]): raise ValueError(f"n_obj ({len(reference_point)}) does not match the number of " f"objectives in the first element of list_of_f_vals " f"({len(list_of_f_vals[0])})") elif n_obj != len(list_of_f_vals[0]): _warnings.warn(f"n_obj ({n_obj}) does not match the number of objectives in " f"list_of_f_vals ({len(list_of_f_vals[0])})") n_obj = len(list_of_f_vals[0]) elif list_of_f_vals is not None and len(list_of_f_vals) > 0: if n_obj != len(list_of_f_vals[0]): _warnings.warn(f"n_obj ({n_obj}) does not match the number of objectives in " f"list_of_f_vals ({len(list_of_f_vals[0])})") n_obj = len(list_of_f_vals[0]) elif reference_point is not None: if n_obj != len(reference_point): _warnings.warn(f"n_obj ({n_obj}) does not match the number of objectives in " f"reference_point ({len(reference_point)})") n_obj = len(reference_point) if list_of_f_vals is None and list_of_g_vals is not None: raise ValueError("list_of_f_vals must be provided if list_of_g_vals is provided") if list_of_f_vals is not None and list_of_g_vals is None: raise ValueError("list_of_g_vals must be provided if list_of_f_vals is provided") if list_of_f_vals is not None and list_of_g_vals is not None and len(list_of_f_vals) != len(list_of_g_vals): raise ValueError("list_of_f_vals and list_of_g_vals must have the same length") return CMOArchive(list_of_f_vals=list_of_f_vals, list_of_g_vals=list_of_g_vals, reference_point=reference_point, infos=infos, n_obj=n_obj, tau=tau, hypervolume_final_float_type=get_cmo_archive.hypervolume_final_float_type, hypervolume_computation_float_type=get_cmo_archive.hypervolume_computation_float_type) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923732.0 moarchiving-1.0.0/moarchiving/moarchiving.py0000644000076500000240000014136415000266124020410 0ustar00hansenstaff# -*- coding: utf-8 -*- """A bi-objective nondominated archive, implemented as sorted list and with incremental update in logarithmic time. """ from __future__ import division, print_function, unicode_literals import warnings as _warnings # from collections import deque # does not support deletion of slices!? import bisect as _bisect # to find the insertion index efficiently try: import fractions except ImportError: _warnings.warn( '`fractions` module not installed, arbitrary precision hypervolume computation not available') del division, print_function, unicode_literals inf = float('inf') def _debug_trace(*args, **kwargs): """return a string like printing the calling trace stack""" try: import traceback except: s = '' else: s = ''.join(traceback.format_stack(*args, **kwargs)) return s def true_fraction(val, copy=False): """return a `fractions.Fraction` object from `val`. Fixes the issue that `Fraction` does not convert an `np.intc` or `np.int32` type to infinite representation `int`. """ try: fractions.Fraction except NameError: return val if isinstance(val, fractions.Fraction): if copy: # Fraction(.) is almost 20 times slower than float(.) return fractions.Fraction(val) return val if not isinstance(val, (int, float)): val = float(val) return fractions.Fraction(val) class BiobjectiveNondominatedSortedList(list): """A sorted list of non-dominated unique objective-pairs. Non-domination here means smaller in at least one objective. The list is sorted (naturally) by the first objective. No equal entries in either objective exist in the list (assuming it is in a consistent state). The operation >>> from moarchiving import BiobjectiveNondominatedSortedList >>> any_list = BiobjectiveNondominatedSortedList(any_list) # doctest:+SKIP sorts and prunes the pair list `any_list` to become a consistent nondominated sorted archive. Afterwards, the methods `add` and `add_list` keep the list always in a consistent state. If a reference point was given on initialization, also the hypervolume of the archive is computed and updated. The `contributing_hypervolume` and `hypervolume_improvement` methods give the uncrowded hypervolume improvement, with or without removing the input from the archive before the computation, respectively, see https://arxiv.org/abs/1904.08823 Removing elements with `pop` or `del` keeps the archive sorted and non-dominated but does not update the hypervolume, which hence becomes inconsistent. >>> a = BiobjectiveNondominatedSortedList([[1,0.9], [0,1], [0,2]]) >>> a [[0, 1], [1, 0.9]] >>> a.add([0, 1]) # doesn't change anything, [0, 1] is not duplicated >>> BiobjectiveNondominatedSortedList( ... [[-0.749, -1.188], [-0.557, 1.1076], ... [0.2454, 0.4724], [-1.146, -0.110]]) [[-1.146, -0.11], [-0.749, -1.188]] >>> a._asserts() # consistency assertions Details: This list doesn't prevent the user to insert a new element anywhere and hence get into an inconsistent state. Inheriting from `sortedcontainers.SortedList` would ensure that the `list` remains at least sorted. See also: https://pypi.org/project/sortedcontainers https://code.activestate.com/recipes/577197-sortedcollection/ https://pythontips.com/2016/04/24/python-sorted-collections/ """ # DONE: implement large-precision hypervolume computation. # DONE (method remove): implement a `delete` method that also updates the hypervolume. # TODO (DONE): implement a copy method # TODO: compute a hypervolume also without a reference point. Using the # two extreme points as reference should just work fine also for # hypervolume improvement, as making them more extreme improves # the volume. This is not equivalent with putting the reference # to infty, as the contribution from a new extreme could be small. # TODO (discarded): currently, points beyond the reference point (which do not contribute # to the hypervolume) are discarded. We may want to keep them, for simplicity # in a separate list? # Default Values for meta control attributes make_expensive_asserts = False hypervolume_final_float_type = true_fraction """HV computation takes increasingly longer with increasing precision (number of iterations). Set ``BiobjectiveNondominatedSortedList.hypervolume_final_float_type = float`` when speed is an issue. """ # lambda x: x is marginally faster than float hypervolume_computation_float_type = true_fraction """HV computation takes increasingly longer with increasing precision (number of iterations). Precision may be less relevant here than for `hypervolume_final_float_type`. Set ``BiobjectiveNondominatedSortedList.hypervolume_computation_float_type = float`` here first when speed is an issue. """ maintain_contributing_hypervolumes = False def __init__(self, list_of_f_pairs=None, reference_point=None, sort=sorted, infos=None, hypervolume_final_float_type=None, hypervolume_computation_float_type=None): """`list_of_f_pairs` does not need to be sorted. f-pairs beyond the `reference_point` are pruned away. The `reference_point` is also used to compute the hypervolume. `sort` is a sorting function and ``sort=None`` will prevent a sort, which can be useful if the `list_of_f_pairs` is already sorted. CAVEAT: the interface, in particular the positional interface may change in future versions. """ if hypervolume_final_float_type is None: self.hypervolume_final_float_type = BiobjectiveNondominatedSortedList.hypervolume_final_float_type else: self.hypervolume_final_float_type = hypervolume_final_float_type if hypervolume_computation_float_type is None: self.hypervolume_computation_float_type = BiobjectiveNondominatedSortedList.hypervolume_computation_float_type else: self.hypervolume_computation_float_type = hypervolume_computation_float_type self.make_expensive_asserts = BiobjectiveNondominatedSortedList.make_expensive_asserts self.maintain_contributing_hypervolumes = BiobjectiveNondominatedSortedList.maintain_contributing_hypervolumes self.n_obj = 2 if list_of_f_pairs is not None and len(list_of_f_pairs): try: list_of_f_pairs = list_of_f_pairs.tolist() except: pass if len(list_of_f_pairs[0]) != 2: raise ValueError("need elements of len 2, got %s" " as first element" % str(list_of_f_pairs[0])) if sort is None: list.__init__(self, list_of_f_pairs) else: if infos is not None: f_pair2info = dict(zip([tuple(f_pair) for f_pair in list_of_f_pairs], infos)) list.__init__(self, sort(list_of_f_pairs)) infos = [f_pair2info[tuple(f_pair)] for f_pair in self] else: list.__init__(self, sort(list_of_f_pairs)) # super(BiobjectiveNondominatedSortedList, self).__init__(sort(list_of_f_pairs)) if reference_point is not None: self.reference_point = list(reference_point) else: self.reference_point = reference_point if infos is not None: if len(infos) != len(list_of_f_pairs): raise ValueError(f"need as many infos as f_pairs, got " f"{len(infos)} infos and {len(list_of_f_pairs)} f_pairs") self._infos = infos else: self._infos = None self.prune() # remove dominated entries, uses in_domain, hence ref-point if self.maintain_contributing_hypervolumes: self._contributing_hypervolumes = self.contributing_hypervolumes raise NotImplementedError('update of _contributing_hypervolumes in _add_HV and _subtract_HV not implemented') else: self._contributing_hypervolumes = [] self._set_HV() if reference_point is not None: if self._hypervolume > 0: self._hypervolume_plus = self._hypervolume else: if list_of_f_pairs is None or len(list_of_f_pairs) == 0: self._hypervolume_plus = -inf else: self._hypervolume_plus = -min([self.distance_to_hypervolume_area(f) for f in list_of_f_pairs]) else: self._hypervolume_plus = None self.make_expensive_asserts and self._asserts() def _debug_info(self): """return debug info as a list of (key, value) tuples""" def cut_list(l, n=100): n2 = int(n/2) - 2 try: if len(l) > n: return l[:n2] + ['...'] + [l[int(len(l) / 2)]] + ['...'] + l[-n2:] except: pass return l return [('len', len(self)), ('attributes', dict((k, cut_list(v)) for k, v in self.__dict__.items())), ('self', cut_list(self)), ] def add(self, f_pair, info=None): """insert `f_pair` in `self` if it is not (weakly) dominated. Return index at which the insertion took place or `None`. The list remains sorted in the process. The list remains non-dominated with unique elements, which means that some or many or even all of its present elements may be removed. `info` is added to the `infos` `list`. It can be an arbitrary object, e.g. a list or dictionary. It can in particular contain (or be) the solution ``x`` such that ``f_pair == fun(info['x'])``. Implementation detail: For performance reasons, `insert` is avoided in favor of `__setitem__`, if possible. >>> from moarchiving import BiobjectiveNondominatedSortedList >>> arch = BiobjectiveNondominatedSortedList() >>> len(arch.infos) == len(arch) == 0 True >>> len(arch), arch.add([2, 2]), len(arch), arch.infos (0, 0, 1, [None]) >>> arch.add([3, 1], info={'x': [-1, 2, 3], 'note': 'rocks'}) 1 >>> len(arch.infos) == len(arch) == 2 True >>> arch.infos[0], sorted(arch.infos[1].items()) (None, [('note', 'rocks'), ('x', [-1, 2, 3])]) >>> arch.infos[arch.index([3, 1])]['x'] [-1, 2, 3] """ f_pair = list(f_pair) # convert array to list if len(f_pair) != 2: raise ValueError("argument `f_pair` must be of length 2, was" " ``%s``" % str(f_pair)) if not self.in_domain(f_pair): if self.hypervolume_plus is not None and self.hypervolume_plus < 0: self._hypervolume_plus = max((self._hypervolume_plus, -self.distance_to_hypervolume_area(f_pair))) self._removed = [f_pair] return None idx = self.bisect_left(f_pair) if self.dominates_with(idx - 1, f_pair) or self.dominates_with(idx, f_pair): if f_pair not in self[idx - 1:idx + 1]: self._removed = [f_pair] return None assert idx == len(self) or not f_pair == self[idx] # here f_pair now is non-dominated self._add_at(idx, f_pair, info) # self.make_expensive_asserts and self._asserts() return idx def _add_at(self, idx, f_pair, info=None): """add `f_pair` at position `idx` and remove dominated elements. This method assumes that `f_pair` is not weakly dominated by `self` and that `idx` is the correct insertion place e.g. acquired by `bisect_left`. """ if self._infos is None and info is not None: # prepare for inserting info self._infos = len(self) * [None] # `_infos` and `self` are in a consistent state now if idx == len(self) or f_pair[1] > self[idx][1]: self.insert(idx, f_pair) if self._infos is not None: # if the list exists it needs to be updated self._infos.insert(idx, info) # also insert None, otherwise lists get out of sync self._add_HV(idx) # self.make_expensive_asserts and self._asserts() return # here f_pair now dominates self[idx] idx2 = idx + 1 while idx2 < len(self) and f_pair[1] <= self[idx2][1]: # f_pair also dominates self[idx2] # self.pop(idx) # slow # del self[idx] # slow idx2 += 1 # delete later in a chunk self._subtract_HV(idx, idx2) self._removed = self[idx:idx2] self[idx] = f_pair # on long lists [.] is much cheaper than insert if self._infos is not None: # if the list exists it needs to be updated self._infos[idx] = info del self[idx + 1:idx2] # can make `add` 20x faster if self._infos: del self._infos[idx + 1:idx2] self._add_HV(idx) assert len(self) >= 1 assert self._infos is None or len(self) == len(self.infos) == len(self._infos), ( self._infos, len(self._infos), len(self.infos)) # assert len(self) == len(self.infos), (self._infos, self.infos, len(self.infos), len(self)) # caveat: len(self.infos) creates a list if self._infos is None # self.make_expensive_asserts and self._asserts() def remove(self, f_pair): """remove element `f_pair`. Raises a `ValueError` (like `list`) if ``f_pair is not in self``. To avoid the error, checking ``if f_pair is in self`` first is a possible coding solution, like >>> from moarchiving import BiobjectiveNondominatedSortedList >>> nda = BiobjectiveNondominatedSortedList([[2, 3]]) >>> f_pair = [1, 2] >>> assert [2, 3] in nda and f_pair not in nda >>> if f_pair in nda: ... nda.remove(f_pair) >>> nda = BiobjectiveNondominatedSortedList() >>> nda.add_list([[6, 6], [5, 7], [4, 8], [3, 9]]) >>> nda.remove(nda[-1]) >>> _ = nda.add([2, 10]) >>> nda = BiobjectiveNondominatedSortedList._random_archive(p_ref_point=1) >>> for t in [None, float]: ... if t: ... nda.hypervolume_final_float_type = t ... nda.hypervolume_computation_float_type = t ... for pair in list(nda): ... len_ = len(nda) ... state = nda._state() ... nda.remove(pair) ... assert len(nda) == len_ - 1 ... if 100 * pair[0] - int(100 * pair[0]) < 0.7: ... res = nda.add(pair) ... assert all(state[i] == nda._state()[i] for i in ( ... [0, 3] if nda.hypervolume_final_float_type is float else [0, 2, 3])) Return `None` (like `list.remove`). """ idx = self.index(f_pair) self._subtract_HV(idx) if hasattr(self, '_hypervolume'): self._hypervolume_plus = self._hypervolume if self._hypervolume > 0 else -inf self._removed = [self[idx]] del self[idx] # == list.remove(self, f_pair) if self._infos: del self._infos[idx] def add_list(self, list_of_f_pairs, infos=None): """insert a list of f-pairs which doesn't need to be sorted. This is just a shortcut for looping over `add`, but `discarded` now contains the discarded elements from all `add` operations. >>> from moarchiving import BiobjectiveNondominatedSortedList >>> arch = BiobjectiveNondominatedSortedList() >>> list_of_f_pairs = [[1, 2], [0, 3]] >>> for f_pair in list_of_f_pairs: ... arch.add(f_pair) # return insert index or None 0 0 >>> arch == sorted(list_of_f_pairs) # both entries are nondominated True >>> arch.compute_hypervolume([3, 4]) == 5.0 True >>> arch.infos # to have infos use `add` instead [None, None] Return `None`. Details: discarded does not contain elements of `list_of_f_pairs`. When `list_of_pairs` is already sorted, `merge` may have a small performance benefit. """ removed = [] if infos is None: infos = len(list_of_f_pairs) * [None] # should we better create a non-dominated list and do a merge? for f_pair, info in zip(list_of_f_pairs, infos): if self.add(f_pair, info=info) is not None: removed += [self._removed] # slightly faster than .extend self._removed = removed # could contain elements of `list_of_f_pairs` self.make_expensive_asserts and self._asserts() def merge(self, list_of_f_pairs): """obsolete and replaced by `add_list`. merge in a sorted list of f-pairs. The list can contain dominated pairs, which are discarded during the merge. Return `None`. Details: merging 200 into 100_000 takes 3e-4s vs 4e-4s with `add_list`. The `discarded` property is not consistent with the overall merge. """ raise NotImplementedError() """ # _warnings.warn("merge was never thoroughly tested, use `add_list`") for f_pair in list_of_f_pairs: if not self.in_domain(f_pair): continue f_pair = list(f_pair) # convert array to list idx = self.bisect_left(f_pair, idx) if self.dominates_with(idx - 1, f_pair) or self.dominates_with(idx, f_pair): continue self._add_at(idx, f_pair) self.make_expensive_asserts and self._asserts() """ def copy(self): """return a "deep" copy of `self`""" nda = BiobjectiveNondominatedSortedList() for d in self.__dict__: setattr(nda, d, getattr(self, d)) # now fix all mutable references as a true copy list.__init__(nda, (p[:] for p in self)) nda.reference_point = [xi for xi in self.reference_point] nda._hypervolume = self.hypervolume_final_float_type(self._hypervolume) # with Fraction not necessary nda._contributing_hypervolumes = [hv for hv in self._contributing_hypervolumes] return nda def bisect_left(self, f_pair, lowest_index=0): """return index where `f_pair` may need to be inserted. Smaller indices have a strictly better f1 value or they have equal f1 and better f2 value. `lowest_index` restricts the search from below. Details: This method does a binary search in `self` using `bisect.bisect_left`. """ try: return _bisect.bisect_left(self, f_pair, lowest_index) except Exception: pass try: f_pair = f_pair.tolist() except Exception: f_pair = list(f_pair) return _bisect.bisect_left(self, f_pair, lowest_index) def dominates(self, f_pair): """return `True` if any element of `self` dominates or is equal to `f_pair`. Otherwise return `False`. >>> from moarchiving import BiobjectiveNondominatedSortedList as NDA >>> a = NDA([[0.39, 0.075], [0.0087, 0.14]]) >>> a.dominates(a[0]) # is always True if `a` is not empty True >>> a.dominates([-1, 33]) or a.dominates([33, -1]) False >>> a._asserts() See also `bisect_left` to find the closest index. """ if len(self) == 0: return False idx = self.bisect_left(f_pair) if self.dominates_with(idx - 1, f_pair) or self.dominates_with(idx, f_pair): return True return False def dominates_with(self, idx, f_pair): """return `True` if ``self[idx]`` dominates or is equal to `f_pair`. Otherwise return `False` or `None` if `idx` is out-of-range. >>> from moarchiving import BiobjectiveNondominatedSortedList as NDA >>> NDA().dominates_with(0, [1, 2]) is None # empty NDA True """ if idx < 0 or idx >= len(self): return None if self[idx][0] <= f_pair[0] and self[idx][1] <= f_pair[1]: return True return False def dominators(self, f_pair, number_only=False): """return the list of all `f_pair`-dominating elements in `self`, including an equal element. ``len(....dominators(...))`` is hence the number of dominating elements which can also be obtained without creating the list with ``number_only=True``. >>> from moarchiving import BiobjectiveNondominatedSortedList as NDA >>> a = NDA([[1.2, 0.1], [0.5, 1]]) >>> len(a) 2 >>> a.dominators([2, 3]) == a True >>> a.dominators([0.5, 1]) [[0.5, 1]] >>> len(a.dominators([0.6, 3])), a.dominators([0.6, 3], number_only=True) (1, 1) >>> a.dominators([0.5, 0.9]) [] """ idx = self.bisect_left(f_pair) if idx < len(self) and self[idx] == f_pair: res = 1 if number_only else [self[idx]] else: res = 0 if number_only else [] idx -= 1 while idx >= 0 and self[idx][1] <= f_pair[1]: if number_only: res += 1 else: res.insert(0, self[idx]) # keep sorted idx -= 1 return res def in_domain(self, f_pair, reference_point=None): """return `True` if `f_pair` is dominating the reference point, `False` otherwise. `True` means that `f_pair` contributes to the hypervolume if not dominated by other elements. `f_pair` may also be an index in `self` in which case ``self[f_pair]`` is tested to be in-domain. >>> from moarchiving import BiobjectiveNondominatedSortedList as NDA >>> a = NDA([[2.2, 0.1], [0.5, 1]], reference_point=[2, 2]) >>> assert len(a) == 1 >>> a.in_domain([0, 0]) True >>> a.in_domain([2, 1]) False >>> all(a.in_domain(ai) for ai in a) True >>> a.in_domain(0) True TODO: improve name? """ if reference_point is None: reference_point = self.reference_point if reference_point is None: return True try: f_pair = self[f_pair] except TypeError: pass except IndexError: raise # return None if (f_pair[0] >= reference_point[0] or f_pair[1] >= reference_point[1]): return False return True @property def infos(self): """`list` of complementary information corresponding to each archive entry""" return self._infos or len(self) * [None] # tuple is slower for len >= 1000 @property def hypervolume(self): """hypervolume of the entire list w.r.t. the "initial" reference point. Raise `ValueError` when no reference point was given initially. >>> from moarchiving import BiobjectiveNondominatedSortedList as NDA >>> a = NDA([[0.5, 0.4], [0.3, 0.7]], [2, 2.1]) >>> a._asserts() >>> a.reference_point == [2, 2.1] True >>> abs(a.hypervolume - a.compute_hypervolume(a.reference_point)) < 1e-11 True >>> a.add([0.2, 0.8]) 0 >>> a._asserts() >>> abs(a.hypervolume - a.compute_hypervolume(a.reference_point)) < 1e-11 True >>> a.add([0.3, 0.6]) 1 >>> a._asserts() >>> abs(a.hypervolume - a.compute_hypervolume(a.reference_point)) < 1e-11 True """ if self.reference_point is None: raise ValueError("to compute the hypervolume a reference" " point is needed (must be given initially)") if self.make_expensive_asserts: assert abs(self._hypervolume - self.compute_hypervolume(self.reference_point)) < 1e-12 return self._hypervolume @property def hypervolume_plus(self): """uncrowded hypervolume of the entire list w.r.t. the "initial" reference point. `hypervolume_plus` equals to the hypervolume when the archive is nonempty, otherwise it is the smallest Euclidean distance to the hypervolume area (AKA reference domain) times -1 of any element that was previously added but rejected because it did not dominate the reference point. Raise `ValueError` when no reference point was given initially. Details: conceptually, the distance computation is based on the nondominated archive as if it was not pruned by the reference point. >>> from moarchiving import BiobjectiveNondominatedSortedList as NDA >>> a = NDA(reference_point=[1, 1]) >>> a.hypervolume_plus -inf >>> a.add([1, 2]) >>> a.hypervolume_plus -1.0 >>> a.add([1, 1]) >>> a.hypervolume_plus -0.0 >>> a.add([0.5, 0.5]) 0 >>> float(a.hypervolume_plus) 0.25 """ if self.reference_point is None: raise ValueError("to compute the hypervolume_plus a reference" " point is needed (must be given initially)") return self._hypervolume_plus @property def contributing_hypervolumes(self): """`list` of contributing hypervolumes. Elements in the list are of type `self.hypervolume_computation_float_type`. Conversion to `float` in a list comprehension should always be possible. Changing this list will have unexpected consequences if `self.maintain_contributing_hypervolumes`, Details: The "initial" reference point is used for the outer points. If none is given, `inf` is used as reference. For the time being, the contributing hypervolumes are computed each time from scratch. :See also: `contributing_hypervolume` """ if self.maintain_contributing_hypervolumes: if not hasattr(self, '_contributing_hypervolumes'): self._contributing_hypervolumes = [ self.contributing_hypervolume(i) for i in range(len(self))] if len(self._contributing_hypervolumes) == len(self): return self._contributing_hypervolumes _warnings.warn("contributing hypervolumes seem not consistent") return [self.contributing_hypervolume(i) for i in range(len(self))] def contributing_hypervolume(self, idx): """return contributing hypervolume of element `idx`. If `idx` is an `f_pair`, return contributing hypervolume of element with value `f_pair`. If `f_pair` is not in `self`, return `hypervolume_improvement(f_pair)`, i.e., its uncrowded contributing hypervolume (which can be negative). The return type is ``self.hypervolume_computation_float_type` and by default `fractions.Fraction`, which can be converted to `float` like ``float(....contributing_hypervolume(idx))``. """ try: len(idx) except TypeError: pass else: # idx is a pair if idx in self: idx = self.index(idx) else: return self.hypervolume_improvement(idx) if idx == 0: y = self.reference_point[1] if self.reference_point else inf else: y = self[idx - 1][1] if idx in (len(self) - 1, -1): x = self.reference_point[0] if self.reference_point else inf else: x = self[idx + 1][0] if inf in (x, y): return inf Fc = self.hypervolume_computation_float_type dHV = (Fc(x) - Fc(self[idx][0])) * (Fc(y) - Fc(self[idx][1])) assert dHV >= 0 return dHV def distance_to_pareto_front(self, f_pair, ref_factor=1): """of a dominated `f_pair` also considering the reference domain. Non-dominated points have (by definition) a distance of zero, unless the archive is empty and the point does not dominate the reference point. The implementation assumes that all points of the archive are in the reference domain (and more extreme points have been pruned, as it is the default behavior). Details: the distance for dominated points is computed by iterating over the relevant kink points ``(self[i+1][0], self[i][1])``. In case of minimization, the boundary with two non-dominated points can be depicted like:: ...______. . <- reference point | x__. <- kink point | x___. <- kink point | | : : The three kink points which are possibly used for the computations are denoted by a dot. The outer kink points use one coordinate of the reference point. """ if self.in_domain(f_pair) and not self.dominates(f_pair): return 0 # return minimum distance if self.reference_point: ref_d0 = ref_factor * max((0, f_pair[0] - self.reference_point[0])) ref_d1 = ref_factor * max((0, f_pair[1] - self.reference_point[1])) else: ref_d0 = 0 ref_d1 = 0 if len(self) == 0: # otherwise we get an index error below return (ref_d0**2 + ref_d1**2)**0.5 # distances to the two outer kink points, given by the extreme # points and the respective the reference point coordinate, for # the left (and up) most point: squared_distances = [max((0, f_pair[0] - self[0][0]))**2 + ref_d1**2] # and the right most (and lowest) point squared_distances += [ref_d0**2 + max((0, f_pair[1] - self[-1][1]))**2] if len(self) == 1: return min(squared_distances)**0.5 for idx in range(self.bisect_left(f_pair), 0, -1): if idx == len(self): continue squared_distances.append( max((0, f_pair[1] - self[idx - 1][1]))**2 + max((0, f_pair[0] - self[idx][0]))**2) if self[idx][1] >= f_pair[1] or idx == 1: break if self.make_expensive_asserts and len(squared_distances) > 2: assert min(squared_distances[2:]) == min( [max((0, f_pair[0] - self[i + 1][0]))**2 + max((0, f_pair[1] - self[i][1]))**2 for i in range(len(self) - 1)]) return min(squared_distances)**0.5 def distance_to_hypervolume_area(self, f_pair): return (max((0, f_pair[0] - self.reference_point[0]))**2 + max((0, f_pair[1] - self.reference_point[1]))**2)**0.5 \ if self.reference_point else 0 def _hypervolume_improvement0(self, f_pair): """deprecated and only used for testing: return how much `f_pair` would improve the hypervolume. If dominated, return the distance to the empirical pareto front multiplied by -1. Else if not in domain, return distance to the reference point dominating area times -1. Overall this amounts to the uncrowded hypervolume improvement, see https://arxiv.org/abs/1904.08823 """ save_infos = self._infos.copy() if self._infos is not None else None save_hypervolume_plus = self._hypervolume_plus dist = self.distance_to_pareto_front(f_pair) if dist: return -dist hv0 = self.hypervolume state = self._state() removed = self.discarded # to get back previous state added = self.add(f_pair) is not None if added and self.discarded is not removed: add_back = self.discarded else: add_back = [] assert len(add_back) + len(self) - added == state[0] hv1 = self.hypervolume if added: self.remove(f_pair) if add_back: self.add_list(add_back) self._removed = removed if self.hypervolume_computation_float_type is not float and ( self.hypervolume_final_float_type is not float): assert state == self._state() if hv0 != self.hypervolume: _warnings.warn("HV changed from %f to %f while computing hypervolume_improvement" % (hv0, self.hypervolume)) self._infos = save_infos self._hypervolume_plus = save_hypervolume_plus return self.hypervolume_computation_float_type(hv1) - self.hypervolume def hypervolume_improvement(self, f_pair): """return how much `f_pair` would improve the hypervolume. If dominated, return the distance to the empirical pareto front multiplied by -1. Else if not in domain, return distance to the reference point dominating area times -1. Overall this amounts to the uncrowded hypervolume improvement, see https://arxiv.org/abs/1904.08823 Details: when ``self.reference_point is None`` and `f_pair` is a new extreme point, the returned hypervolume improvement is ``float('inf')``. This method extracts a sublist first and thereby tries to circumentvent to compute small differences between large hypervolumes. """ dist = self.distance_to_pareto_front(f_pair) if dist: return -dist if self.reference_point is None: if f_pair[0] < self[0][0] or f_pair[1] < self[-1][1]: return inf # find sublist that suffices to get the contributing volume i0 = self.bisect_left(f_pair) i1 = i0 while i1 < len(self) and f_pair[1] <= self[i1][1]: # f_pair also dominates self[i1] i1 += 1 r0 = self[i1][0] if i1 < len(self) else self.reference_point[0] r1 = self[i0-1][1] if i0 > 0 else self.reference_point[1] assaved = BiobjectiveNondominatedSortedList.make_expensive_asserts BiobjectiveNondominatedSortedList.make_expensive_asserts = False # prevent infinite recursion sub = BiobjectiveNondominatedSortedList(self[i0:i1], reference_point=[r0, r1], sort=None) BiobjectiveNondominatedSortedList.make_expensive_asserts = assaved hv0 = sub.hypervolume sub.add(f_pair) res = self.hypervolume_computation_float_type(sub.hypervolume) - hv0 if BiobjectiveNondominatedSortedList.make_expensive_asserts: assert abs(res - self._hypervolume_improvement0(f_pair)) < 1e-9 * (0.1 + res), ( res, self._hypervolume_improvement0(f_pair)) return res def _set_HV(self): """set current hypervolume value using `self.reference_point`. Raise `ValueError` if `self.reference_point` is `None`. TODO: we may need to store the list of _contributing_ hypervolumes to handle numerical rounding errors later. """ if self.reference_point is None: return None self._hypervolume = self.compute_hypervolume(self.reference_point) if self._hypervolume > 0: self._hypervolume_plus = self._hypervolume return self._hypervolume def compute_hypervolume(self, reference_point): """return hypervolume w.r.t. `reference_point`""" if reference_point is None: raise ValueError("to compute the hypervolume a reference" " point is needed (was `None`)") Fc = self.hypervolume_computation_float_type Ff = self.hypervolume_final_float_type hv = Ff(0.0) idx = 0 while idx < len(self) and not self.in_domain(self[idx], reference_point): idx += 1 if idx < len(self): hv += Ff((Fc(reference_point[0]) - Fc(self[idx][0])) * (Fc(reference_point[1]) - Fc(self[idx][1]))) idx += 1 while idx < len(self) and self.in_domain(self[idx], reference_point): hv += Ff((Fc(reference_point[0]) - Fc(self[idx][0])) * (Fc(self[idx - 1][1]) - Fc(self[idx][1]))) idx += 1 return hv def compute_hypervolumes(self, reference_point): """depricated, subject to removal, see `compute_hypervolume` and `contributing_hypervolumes`. Never implemented: return list of contributing hypervolumes w.r.t. reference_point """ # Old/experimental code (in a string to suppress pylint warnings): """ # construct self._hypervolumes_list # keep sum of different size elements separate, # say, a dict of index lists as indices[1e12] indices[1e6], indices[1], indices[1e-6]... hv = {} for key in indices: hv[key] = sum(_hypervolumes_list[i] for i in indices[key]) # we may use decimal.Decimal to compute the sum of hv decimal.getcontext().prec = 88 hv_sum = sum([decimal.Decimal(hv[key]) for key in hv]) """ raise NotImplementedError() def _subtract_HV(self, idx0, idx1=None): """remove contributing hypervolumes of elements ``self[idx0] to self[idx1 - 1]``. TODO: also update list of contributing hypervolumes in case. """ if self.maintain_contributing_hypervolumes: """Old or experimental: del self._contributing_hypervolumes[idx] # we also need to update the contributing HVs of the neighbors """ raise NotImplementedError("update list of hypervolumes") if self.reference_point is None: return None if idx1 is None: idx1 = idx0 + 1 if idx1 - idx0 == len(self): # subtract HV of all points assert idx0 == 0 dHV = -self._hypervolume self._hypervolume *= 0 # keep type return dHV if idx0 == 0: y = self.reference_point[1] else: y = self[idx0 - 1][1] Fc = self.hypervolume_computation_float_type Ff = self.hypervolume_final_float_type dHV = Fc(0.0) for idx in range(idx0, idx1): if idx == len(self) - 1: assert idx < len(self) x = self.reference_point[0] else: x = self[idx + 1][0] dHV -= (Fc(x) - Fc(self[idx][0])) * (Fc(y) - Fc(self[idx][1])) assert dHV <= 0 # and without loss of precision strictly smaller if ((Ff in (float, int) or isinstance(self._hypervolume, (float, int))) and self._hypervolume != 0 and abs(dHV) / self._hypervolume < 1e-9): _warnings.warn("_subtract_HV: subtracting {:.16e} from {:.16e} loses many digits of precision" "\nSelf info: {}\nTraceback: {}".format( -dHV, self._hypervolume, self._debug_info(), _debug_trace())) self._hypervolume += Ff(dHV) if self._hypervolume < 0: _warnings.warn("subtracting {:.16e} from the hypervolume lead to a" " negative hypervolume value of {:.16e}" "\nSelf info: {}\nTraceback: {}".format( -dHV, self._hypervolume, self._debug_info(), _debug_trace())) # assert self._hypervolume >= 0 return dHV def _add_HV(self, idx): """add contributing hypervolume of ``self[idx]`` to hypervolume. TODO: also update list of contributing hypervolumes in case. """ if self.maintain_contributing_hypervolumes: """Exerimental code: self._contributing_hypervolumes.insert(idx, dHV) if idx > 0: self._contributing_hypervolumes[idx - 1] = self.contributing_hypervolume(idx - 1) if idx < len(self) - 1: self._contributing_hypervolumes[idx + 1] = self.contributing_hypervolume(idx + 1) # TODO: proof read """ raise NotImplementedError("update list of hypervolumes") if self.reference_point is None: return None dHV = self.contributing_hypervolume(idx) Ff = self.hypervolume_final_float_type if self._hypervolume and ( Ff in (float, int) or isinstance(self._hypervolume, (float, int))) \ and abs(dHV) / self._hypervolume < 1e-9: _warnings.warn("_add_HV: adding {} to HV={} loses many digits of precision" "\nSelf info: {}\nTraceback: {}".format( dHV, self._hypervolume, self._debug_info(), _debug_trace())) self._hypervolume += Ff(dHV) self._hypervolume_plus = self._hypervolume return dHV def prune(self): """remove dominated or equal entries assuming that the list is sorted. Return number of dropped elements. Implementation details: pruning from right to left may be preferable, because list.insert(0) is O(n) while list.append is O(1), however it is not possible with the given sorting: in principle, the first element may dominate all others, which can only be discovered in the last step when traversing from right to left. This suggests that reverse sort may be better for pruning or we should inherit from `collections.deque` instead from `list`, but `deque` seems not to support deletion of slices. """ nb = len(self) removed = [] i = 0 while i < len(self): if self.in_domain(self[i]): break i += 1 removed += self[0:i] del self[0:i] if self._infos: del self._infos[0:i] i = 1 while i < len(self): i0 = i while i < len(self) and (self[i][1] >= self[i0 - 1][1] or not self.in_domain(self[i])): i += 1 # self.pop(i + 1) # about 10x slower in notebook test # prepare indices for the removed list i0r = i0 if i0 > 0: while i0r < i: if self[i0r] == self[i0 - 1]: i0r += 1 # skip self[i0r] as removed because it is still in self else: break ir = i if i + 1 < len(self): while ir > i0r: if self[ir] == self[i + 1]: ir -= 1 # skip self[ir] as removed as it is in self else: break removed += self[i0r:ir] del self[i0:i] if self._infos: del self._infos[i0:i] i = i0 + 1 self._removed = removed # [p for p in removed if p not in self] if self.maintain_contributing_hypervolumes: # Old or experimental code: """ :: self._contributing_hypervolumes = [ # simple solution self.contributing_hypervolume(i) for i in range(len(self))] """ raise NotImplementedError return nb - len(self) @property def discarded(self): """`list` of f-pairs discarded in the last relevant method call. Methods covered are `__init__`, `prune`, `add`, and `add_list`. Removed duplicates are not element of the discarded list except with `__init__`. When not inserted and not already in `self` also the input argument(s) show(s) up in `discarded`. Example to create a list of rank-k-non-dominated fronts: >>> from moarchiving import BiobjectiveNondominatedSortedList as NDA >>> all_ = [[0.1, 1], [-2, 3], [-4, 5], [-4, 5], [-4, 4.9]] >>> nda_list = NDA(all_) # rank-0-non-dominated >>> assert nda_list.discarded == [[-4, 5], [-4, 5]] """ try: return self._removed except AttributeError: return [] def _state(self): return len(self), self.discarded, self.hypervolume, self.reference_point @staticmethod def _random_archive(max_size=500, p_ref_point=0.5): from numpy import random as npr N = npr.randint(max_size) ref_point = list(npr.randn(2) + 1) if npr.rand() < p_ref_point else None return BiobjectiveNondominatedSortedList( [list(0.01 * npr.randn(2) + npr.rand(1) * [i, -i]) for i in range(N)], reference_point=ref_point) def _asserts(self): """make all kind of consistency assertions. >>> import moarchiving >>> a = moarchiving.BiobjectiveNondominatedSortedList( ... [[-0.749, -1.188], [-0.557, 1.1076], ... [0.2454, 0.4724], [-1.146, -0.110]], [10, 10]) >>> a._asserts() >>> for i in range(len(a)): ... assert a.contributing_hypervolume(i) == a.contributing_hypervolumes[i] >>> assert all(map(lambda x, y: x - 1e-9 < y < x + 1e-9, ... a.contributing_hypervolumes, ... [4.01367, 11.587422])) >>> len(a), a.add([-0.8, -1], info={'solution': None}), len(a) (2, 1, 3) >>> len(a) == len(a.infos) == 3 True >>> for i, p in enumerate(list(a)): ... a.remove(p) ... assert len(a) == len(a.infos) == 2 - i >>> assert len(a) == len(a.infos) == 0 >>> try: a.remove([0, 0]) ... except ValueError: pass ... else: raise AssertionError("remove did not raise ValueError") >>> import numpy as np >>> randn = np.random.randn >>> for _ in range(120): ... a = moarchiving.BiobjectiveNondominatedSortedList._random_archive() ... a.make_expensive_asserts = True ... if a.reference_point: ... for i, f_pair in enumerate([randn(2) + [i, -i] for i in range(10)] + ... [randn(2) / randn(2) + [i, -i] for i in range(10)]): ... if i % 4 == 1: ... _ = a.add(f_pair) ... h0 = a.hypervolume ... hi = a.hypervolume_improvement(list(f_pair)) ... hi_org = a._hypervolume_improvement0(list(f_pair)) ... assert hi == hi_org # didn't raise with rand instead of randn ... assert a.hypervolume == h0, (a.hypervolume, h0) # works OK with Fraction ... a._asserts() """ assert sorted(self) == self for pair in self: assert self.count(pair) == 1 tmp = BiobjectiveNondominatedSortedList.make_expensive_asserts BiobjectiveNondominatedSortedList.make_expensive_asserts = False assert BiobjectiveNondominatedSortedList(self) == self BiobjectiveNondominatedSortedList.make_expensive_asserts = tmp for pair in self: assert self.dominates(pair) assert not self.dominates([v - 0.001 for v in pair]) if self.reference_point is not None: assert abs(self._hypervolume - self.compute_hypervolume(self.reference_point)) < 1e-11 assert sum(self.contributing_hypervolumes) < self.hypervolume + 1e-11 if self.maintain_contributing_hypervolumes: assert len(self) == len(self._contributing_hypervolumes) assert len(self) == len(self.contributing_hypervolumes) # for i in range(len(self)): # assert self.contributing_hypervolume(i) == self.contributing_hypervolumes[i] if self.reference_point: tmp, self.make_expensive_asserts = self.make_expensive_asserts, False self.hypervolume_improvement([0, 0]) # does state assert self.make_expensive_asserts = tmp assert self._infos is None or len(self._infos) == len(self.infos) == len(self), ( self._infos, len(self._infos), len(self)) # assert len(self.infos) == len(self), (len(self.infos), len(self), self.infos, self._infos) # caveat: len(self.infos) creates a list if self._infos is None # asserts using numpy for convenience try: import numpy as np except ImportError: _warnings.warn("asserts using numpy omitted") else: if len(self) > 1: diffs = np.diff(self, 1, 0) assert all(diffs[:, 0] > 0) assert all(diffs[:, 1] < 0) if __name__ == "__main__": import doctest print('doctest.testmod() in moarchiving.py') print(doctest.testmod()) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923733.0 moarchiving-1.0.0/moarchiving/moarchiving3obj.py0000644000076500000240000005011415000266125021157 0ustar00hansenstaff# -*- coding: utf-8 -*- """ This module contains a MOArchiving3obj class for storing a set of non-dominated points in 3 objective space and efficiently calculating hypervolume with respect to the given reference point. """ from moarchiving.moarchiving import BiobjectiveNondominatedSortedList as MOArchive2obj from moarchiving.moarchiving_utils import (DLNode, ArchiveSortedList, compute_area_simple, remove_from_z, restart_list_y, lexicographic_less, one_contribution_3_obj, weakly_dominates, strictly_dominates) from moarchiving.moarchiving_parent import MOArchiveParent import math import warnings as _warnings try: from sortedcontainers import SortedList except ImportError: pass try: import fractions except ImportError: _warnings.warn('`fractions` module not installed, arbitrary precision hypervolume computation not available') inf = float('inf') class MOArchive3obj(MOArchiveParent): """ Class for storing a set of non-dominated points in 3 objective space and efficiently calculating hypervolume with respect to the given reference point. The archive is implemented as a doubly linked list, and can be modified using functions add and remove. Points of the archive can be accessed as a list of points order by the third coordinate using function points_list. >>> from moarchiving.get_archive import get_mo_archive >>> moa = get_mo_archive([[1, 2, 3], [3, 2, 1]]) >>> list(moa) # returns the list of points in the archive sorted by the third coordinate [[3, 2, 1], [1, 2, 3]] >>> moa.add([2, 2, 2]) # add a new point to the archive True >>> moa.add([3, 3, 3]) False >>> moa = get_mo_archive([[1, 2, 3], [2, 3, 4], [3, 2, 1]], ... reference_point=[4, 4, 4], infos=["A", "B", "C"]) >>> moa.infos # returns the list of infos for each point in the archive ['C', 'A'] >>> moa.hypervolume Fraction(10, 1) >>> get_mo_archive.hypervolume_final_float_type = float >>> get_mo_archive.hypervolume_computation_float_type = float >>> moa2 = get_mo_archive([[1, 2, 3], [2, 3, 4], [3, 2, 1]], reference_point=[4, 4, 4]) >>> moa2.hypervolume 10.0 """ try: hypervolume_final_float_type = fractions.Fraction hypervolume_computation_float_type = fractions.Fraction except: hypervolume_final_float_type = float hypervolume_computation_float_type = float def __init__(self, list_of_f_vals=None, reference_point=None, infos=None, hypervolume_final_float_type=None, hypervolume_computation_float_type=None): """ Create a new 3 objective archive object. f-vals beyond the `reference_point` are pruned away. The `reference_point` is also used to compute the hypervolume. infos are an optional list of additional information about the points in the archive. """ hypervolume_final_float_type = MOArchive3obj.hypervolume_final_float_type \ if hypervolume_final_float_type is None else hypervolume_final_float_type hypervolume_computation_float_type = MOArchive3obj.hypervolume_computation_float_type \ if hypervolume_computation_float_type is None else hypervolume_computation_float_type super().__init__(list_of_f_vals=list_of_f_vals, reference_point=reference_point, infos=infos, n_obj=3, hypervolume_final_float_type=hypervolume_final_float_type, hypervolume_computation_float_type=hypervolume_computation_float_type) self._removed = [] self.preprocessing() hv = self._set_HV() self._length = len(list(self)) if hv is not None and hv > 0: self._hypervolume_plus = self._hypervolume else: if list_of_f_vals is None or len(list_of_f_vals) == 0: self._hypervolume_plus = -inf else: self._hypervolume_plus = -min([self.distance_to_hypervolume_area(f) for f in list_of_f_vals]) def add(self, f_vals, info=None, update_hypervolume=True): """ Adds a new point to the archive, and updates the hypervolume if needed. Returns True if the point was added, False if it was dominated by another point in the archive >>> from moarchiving.get_archive import get_mo_archive >>> moa = get_mo_archive(reference_point=[4, 4, 4]) >>> moa.add([2, 3, 4]) False >>> moa.add([1, 2, 3]) True >>> list(moa) [[1, 2, 3]] >>> moa.add([3, 2, 1]) True >>> list(moa) [[3, 2, 1], [1, 2, 3]] >>> moa.add([2, 2, 2]) True >>> list(moa) [[3, 2, 1], [2, 2, 2], [1, 2, 3]] """ if len(f_vals) != self.n_obj: raise ValueError(f"argument `f_vals` must be of length {self.n_obj}, was ``{f_vals}``") if self.reference_point is not None and self.hypervolume_plus is not None: dist_to_hv_area = self.distance_to_hypervolume_area(f_vals) if -dist_to_hv_area > self._hypervolume_plus: self._hypervolume_plus = -dist_to_hv_area # q is the current point (so that we are consistent with the paper), # stop is the head of the list, and first_iter is a flag to check if we are at the # first iteration (since the first and last points are the same) q = self.head stop = self.head first_iter = True # Add 0.0 for 3d points so that it matches the original C code and create a new node object if self.n_obj == 3: f_vals = f_vals + [0.0] u = DLNode(x=f_vals, info=info) di = self.n_obj - 1 # loop over all the points in the archive and save the best candidates for cx and cy, # and check if the new point is dominated by any of the points in the archive dominated = False best_cx_candidates = None best_cy_candidates = None inserted = False removed = [] while q != stop or first_iter: first_iter = False # check if the new point is dominated by the current point if all(q.x[i] <= u.x[i] for i in range(self.n_obj)): dominated = True break # check if the new point dominates the current point if all(u.x[i] <= q.x[i] for i in range(self.n_obj)): q_next = q.next[di] remove_from_z(q, archive_dim=self.n_obj) removed.append(q.x[:3]) q = q_next continue """ 1) Set u.cx to the point q ∈ Q with the smallest q_x > u_x such that q_y < u_y and q u.x[0] and q.x[1] < u.x[1]: if best_cx_candidates is None or q.x[0] < best_cx_candidates.x[0]: best_cx_candidates = q elif q.x[0] == best_cx_candidates.x[0] and q.x[1] < best_cx_candidates.x[1]: best_cx_candidates = q if lexicographic_less(q.x, u.x) and q.x[0] < u.x[0] and q.x[1] > u.x[1]: if best_cy_candidates is None or q.x[1] < best_cy_candidates.x[1]: best_cy_candidates = q elif q.x[1] == best_cy_candidates.x[1] and q.x[0] < best_cy_candidates.x[0]: best_cy_candidates = q """ 2) For q ∈ Q, set q.cx to u iff u_y < q_y and u >> from moarchiving.get_archive import get_mo_archive >>> moa = get_mo_archive([[1, 2, 3], [2, 2, 2], [3, 2, 1]], reference_point=[4, 4, 4], ... infos=["A", "B", "C"]) >>> moa.remove([2, 2, 2]) 'B' >>> list(moa) [[3, 2, 1], [1, 2, 3]] >>> moa.remove([1, 2, 3]) 'A' >>> list(moa) [[3, 2, 1]] """ di = self.n_obj - 1 # Dimension index for sorting (z-axis in 3D) current = self.head.next[di] stop = self.head.prev[di] # Using SortedList to manage nodes by their y-coordinate, supporting custom sorting needs T = SortedList(key=lambda node: (node.x[1], node.x[0])) # Include sentinel nodes to manage edge conditions T.add(self.head) # self.head is a left sentinel T.add(self.head.prev[di]) # right sentinel remove_node = None while current != stop: if current.x[:3] == f_vals: remove_node = current current = current.next[di] continue T.add(current) # Remove nodes dominated by the current node nodes_to_remove = [node for node in T if node != current and strictly_dominates(current.x, node.x, n_obj=2)] for node in nodes_to_remove: T.remove(node) if current.closest[0].x[:3] == f_vals: # For every p ∈ Q \ {u} such that p.cx = u, set p.cx to the # point q ∈ Q \ {u} with the smallest q_x > p_x such that # q_y < p_y and q current.x[0] and node.x[1] < current.x[1]] if cx_candidates: current.closest[0] = min(cx_candidates, key=lambda node: node.x[0]) else: current.closest[0] = self.head if current.closest[1].x[:3] == f_vals: # For every p ∈ Q \ {u} such that p.cy = u, set p.cy to the # point q ∈ Q \ {u} with the smallest q_y > p_y such that # q_x < p_x and q current.x[1] and node.x[0] < current.x[0]] if cy_candidates: current.closest[1] = min(cy_candidates, key=lambda node: node.x[1]) else: current.closest[1] = self.head.prev[di] current = current.next[di] if remove_node is not None: remove_from_z(remove_node, archive_dim=self.n_obj) self._kink_points = None self._set_HV() self._length -= 1 return remove_node.info else: raise ValueError(f"Point {f_vals} not found in the archive") def add_list(self, list_of_f_vals, infos=None, add_method="compare"): """ Adds a list of points to the archive, and updates the hypervolume. points are added with the `add_method` method: - compare: compares the number of points to add with the number of points in the archive and uses the most efficient method based on that - one_by_one: adds the points one by one to the archive - reinit: reinitializes the archive with the new points >>> from moarchiving.get_archive import get_mo_archive >>> moa = get_mo_archive(reference_point=[4, 4, 4]) >>> moa.add_list([[2, 3, 3], [1, 2, 3]], infos=["A", "B"]) >>> list(moa), moa.infos ([[1, 2, 3]], ['B']) >>> moa.add_list([[3, 2, 1], [2, 2, 2], [3, 3, 3]], infos=["C", "D", "E"]) >>> list(moa), moa.infos ([[3, 2, 1], [2, 2, 2], [1, 2, 3]], ['C', 'D', 'B']) >>> moa.add_list([[1, 1, 1]]) >>> list(moa), moa.infos ([[1, 1, 1]], [None]) """ s = len(list_of_f_vals) if add_method == "compare": n = len(self) add_method = "one_by_one" if s == 1 or (n > 0 and s < math.log2(n) / 2) else "reinit" if infos is None: infos = [None] * s if add_method == "one_by_one": for f_val, info in zip(list_of_f_vals, infos): self.add(f_val, info=info, update_hypervolume=False) self._set_HV() elif add_method == "reinit": self.__init__(list(self) + list_of_f_vals, self.reference_point, self.infos + infos) else: raise ValueError(f"Unknown add method: {add_method}, " f"should be one of: 'compare', 'one_by_one', 'reinit'") def copy(self): """ Returns a copy of the archive >>> from moarchiving.get_archive import get_mo_archive >>> moa = get_mo_archive([[1, 2, 3], [2, 2, 2], [3, 2, 1]], reference_point=[4, 4, 4], ... infos=["A", "B", "C"]) >>> moa2 = moa.copy() >>> list(moa2), moa2.infos ([[3, 2, 1], [2, 2, 2], [1, 2, 3]], ['C', 'B', 'A']) >>> moa.remove([2, 2, 2]) 'B' >>> moa2.add([1.5, 1.5, 1.5], "D") True >>> list(moa2), moa2.infos ([[3, 2, 1], [1.5, 1.5, 1.5], [1, 2, 3]], ['C', 'D', 'A']) >>> list(moa), moa.infos ([[3, 2, 1], [1, 2, 3]], ['C', 'A']) """ return MOArchive3obj(list(self), self.reference_point, self.infos) def _get_kink_points(self): """ Function that returns the kink points of the archive. Kink point are calculated by making a sweep of the archive, where the state is one 2 objective archive of all possible kink points found so far, and another 2 objective archive which stores the non-dominated points so far in the sweep >>> from moarchiving.get_archive import get_mo_archive >>> moa = get_mo_archive([[1, 2, 3], [2, 2, 2], [3, 2, 1]], reference_point=[4, 4, 4]) >>> moa._get_kink_points() [[4, 4, 1], [3, 4, 2], [2, 4, 3], [1, 4, 4], [4, 2, 4]] """ if self.reference_point is None: ref_point = [inf] * self.n_obj else: ref_point = self.reference_point # initialize the two states, one for points and another for kink points points_state = MOArchive2obj([[ref_point[0], -inf], [-inf, ref_point[1]]]) kink_candidates = MOArchive2obj([ref_point[:2]]) # initialize the point dictionary, which will store the third coordinate of the points point_dict = { tuple(ref_point[:2]): -inf } kink_points = [] for point in self: # add the point to the kink state to get the dominated kink points, then take it out if kink_candidates.add(point[:2]) is not None: removed = kink_candidates._removed.copy() for removed_point in removed: z = point_dict[tuple(removed_point)] if z < point[2] and point[0] < removed_point[0] and point[1] < removed_point[1]: kink_points.append([removed_point[0], removed_point[1], point[2]]) kink_candidates._removed.clear() kink_candidates.remove(point[:2]) # add the point to the point state, and get two new kink point candidates idx = points_state.add(point[:2]) for i in range(2): p = [points_state[idx + i][0], points_state[idx - 1 + i][1]] point_dict[tuple(p)] = point[2] kink_candidates.add(p) # add all the remaining kink points to the list for point in kink_candidates: kink_points.append([point[0], point[1], ref_point[2]]) return kink_points def hypervolume_improvement(self, f_vals): """ Returns the hypervolume improvement of adding a point to the archive >>> from moarchiving.get_archive import get_mo_archive >>> moa = get_mo_archive([[1, 2, 3], [3, 2, 1]], reference_point=[4, 4, 4]) >>> moa.hypervolume_improvement([2, 2, 2]) 2.0 >>> moa.hypervolume_improvement([3, 3, 4]) -1.0 """ if f_vals in self: return 0 if self.dominates(f_vals): return -1 * self.distance_to_pareto_front(f_vals) return one_contribution_3_obj(self.head, DLNode(x=f_vals), self.hypervolume_computation_float_type) def compute_hypervolume(self, reference_point=None): """ Compute the hypervolume of the current state of archive >>> from moarchiving.get_archive import get_mo_archive >>> moa = get_mo_archive([[1, 2, 3], [3, 2, 1]], reference_point=[4, 4, 4]) >>> moa.compute_hypervolume() 10.0 """ if reference_point is not None: _warnings.warn("Reference point given at the initialization is used") Fc = self.hypervolume_computation_float_type p = self.head area = Fc(0) volume = Fc(0) restart_list_y(self.head) p = p.next[2].next[2] stop = self.head.prev[2] while p != stop: if p.ndomr < 1: p.cnext[0] = p.closest[0] p.cnext[1] = p.closest[1] area += compute_area_simple(p.x, 1, p.cnext[0], p.cnext[0].cnext[1], Fc) p.cnext[0].cnext[1] = p p.cnext[1].cnext[0] = p else: remove_from_z(p, archive_dim=self.n_obj) volume += area * (Fc(p.next[2].x[2]) - Fc(p.x[2])) p = p.next[2] return self.hypervolume_final_float_type(volume) def preprocessing(self): """ Preprocessing step to determine the closest points in x and y directions, as described in the paper and implemented in the original C code. """ di = self.n_obj - 1 t = ArchiveSortedList(iterable=[self.head, self.head.next[di]], key=lambda node: (node.x[1], node.x[0])) p = self.head.next[di].next[di] stop = self.head.prev[di] while p != stop: s = t.outer_delimiter_x(p) if weakly_dominates(s.x, p.x, self.n_obj) or weakly_dominates(t.next_y(s).x, p.x, self.n_obj): p.ndomr = 1 p = p.next[di] continue t.remove_dominated_y(p, s) p.closest[0] = s p.closest[1] = t.next_y(s) t.add_y(p, s) p = p.next[di] t.clear() if __name__ == "__main__": import doctest print('doctest.testmod() in moarchiving3obj.py') print(doctest.testmod()) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923733.0 moarchiving-1.0.0/moarchiving/moarchiving4obj.py0000644000076500000240000003166115000266125021166 0ustar00hansenstaff# -*- coding: utf-8 -*- """ This module contains a MOArchiving4obj class for storing a set of non-dominated points in 4 objective space and calculating hypervolume with respect to the given reference point. """ from moarchiving.moarchiving_utils import hv4dplusR, remove_from_z from moarchiving.moarchiving3obj import MOArchive3obj from moarchiving.moarchiving_parent import MOArchiveParent import warnings as _warnings try: import fractions except ImportError: _warnings.warn('`fractions` module not installed, arbitrary precision hypervolume computation not available') inf = float('inf') class MOArchive4obj(MOArchiveParent): """ Class for storing a set of non-dominated points in 4 objective space and calculating hypervolume with respect to the given reference point. The archive is implemented as a doubly linked list, and can be modified using functions add and remove. Points of the archive can be accessed as a list of points order by the fourth coordinate using function points_list. >>> from moarchiving.get_archive import get_mo_archive >>> moa = get_mo_archive([[1, 2, 3, 4], [4, 3, 2, 1]]) >>> list(moa) # returns the list of points in the archive sorted by the third coordinate [[4, 3, 2, 1], [1, 2, 3, 4]] >>> moa.add([2, 2, 2, 2]) # add a new point to the archive True >>> moa.add([3, 3, 3, 3]) False >>> get_mo_archive.hypervolume_final_float_type = fractions.Fraction >>> moa = get_mo_archive([[1, 2, 3, 4], [2, 3, 4, 5], [4, 3, 2, 1]], ... reference_point=[5, 5, 5, 5], infos=["A", "B", "C"]) >>> moa.infos # returns the list of infos for each point in the archive ['C', 'A'] >>> moa.hypervolume Fraction(44, 1) >>> get_mo_archive.hypervolume_final_float_type = float >>> get_mo_archive.hypervolume_computation_float_type = float >>> moa2 = get_mo_archive([[1, 2, 3, 4], [2, 3, 4, 5], [4, 3, 2, 1]], ... reference_point=[5, 5, 5, 5]) >>> moa2.hypervolume 44.0 """ try: hypervolume_final_float_type = fractions.Fraction hypervolume_computation_float_type = fractions.Fraction except: hypervolume_final_float_type = float hypervolume_computation_float_type = float def __init__(self, list_of_f_vals=None, reference_point=None, infos=None, hypervolume_final_float_type=None, hypervolume_computation_float_type=None): """ Create a new 4 objective archive object. f-vals beyond the `reference_point` are pruned away. The `reference_point` is also used to compute the hypervolume. infos are an optional list of additional information about the points in the archive. """ hypervolume_final_float_type = MOArchive4obj.hypervolume_final_float_type \ if hypervolume_final_float_type is None else hypervolume_final_float_type hypervolume_computation_float_type = MOArchive4obj.hypervolume_computation_float_type \ if hypervolume_computation_float_type is None else hypervolume_computation_float_type super().__init__(list_of_f_vals=list_of_f_vals, reference_point=reference_point, infos=infos, n_obj=4, hypervolume_final_float_type=hypervolume_final_float_type, hypervolume_computation_float_type=hypervolume_computation_float_type) self._hypervolume_already_computed = False self.remove_dominated() hv = self._set_HV() self._length = len(list(self)) self._hypervolume_already_computed = True if hv is not None and hv > 0: self._hypervolume_plus = self._hypervolume else: if list_of_f_vals is None or len(list_of_f_vals) == 0: self._hypervolume_plus = -inf else: self._hypervolume_plus = -min([self.distance_to_hypervolume_area(f) for f in list_of_f_vals]) def add(self, f_vals, info=None, update_hypervolume=True): """ Add a new point to the archive. update_hypervolume should be set to True, unless adding multiple points at once, in which case it is slightly more efficient to set it to True only for last point >>> from moarchiving.get_archive import get_mo_archive >>> moa = get_mo_archive(reference_point=[5, 5, 5, 5]) >>> moa.add([2, 3, 4, 5]) False >>> moa.add([1, 2, 3, 4]) True >>> list(moa) [[1, 2, 3, 4]] >>> moa.add([4, 3, 2, 1]) True >>> list(moa) [[4, 3, 2, 1], [1, 2, 3, 4]] >>> moa.add([2, 2, 2, 2]) True >>> list(moa) [[4, 3, 2, 1], [2, 2, 2, 2], [1, 2, 3, 4]] """ if len(f_vals) != self.n_obj: raise ValueError(f"argument `f_pair` must be of length {self.n_obj}, was ``{f_vals}``") if self.dominates(f_vals): return False if not self.in_domain(f_vals): dist_to_hv_area = self.distance_to_hypervolume_area(f_vals) if -dist_to_hv_area > self._hypervolume_plus: self._hypervolume_plus = -dist_to_hv_area return False self.__init__(list(self) + [f_vals], self.reference_point, self.infos + [info]) return True def remove(self, f_vals): """ Remove a point from the archive. Returns False if the point is not in the archive and it's Info if the point is removed >>> from moarchiving.get_archive import get_mo_archive >>> moa = get_mo_archive([[1, 2, 3, 4], [2, 2, 2, 2], [4, 3, 2, 1]], ... reference_point=[5, 5, 5, 5], infos=["A", "B", "C"]) >>> moa.remove([2, 2, 2, 2]) 'B' >>> list(moa) [[4, 3, 2, 1], [1, 2, 3, 4]] >>> moa.remove([1, 2, 3, 4]) 'A' >>> list(moa) [[4, 3, 2, 1]] """ points_list = list(self) if f_vals not in points_list: return False point_idx = points_list.index(f_vals) point_info = self.infos[point_idx] self.__init__([p for p in points_list if p != f_vals], self.reference_point, [info for p, info in zip(points_list, self.infos) if p != f_vals]) return point_info def add_list(self, list_of_f_vals, infos=None): """ Add a list of points to the archive. >>> from moarchiving.get_archive import get_mo_archive >>> moa = get_mo_archive(reference_point=[5, 5, 5, 5]) >>> moa.add_list([[1, 2, 4, 4], [1, 2, 3, 4]], infos=["A", "B"]) >>> list(moa), moa.infos ([[1, 2, 3, 4]], ['B']) >>> moa.add_list([[4, 3, 2, 1], [2, 2, 2, 2], [3, 3, 3, 3]], infos=["C", "D", "E"]) >>> list(moa), moa.infos ([[4, 3, 2, 1], [2, 2, 2, 2], [1, 2, 3, 4]], ['C', 'D', 'B']) >>> moa.add_list([[1, 1, 1, 1]]) >>> list(moa), moa.infos ([[1, 1, 1, 1]], [None]) """ if infos is None: infos = [None] * len(list_of_f_vals) self.__init__(list(self) + list_of_f_vals, self.reference_point, self.infos + infos) def copy(self): """ Return a copy of the archive. >>> from moarchiving.get_archive import get_mo_archive >>> moa = get_mo_archive([[1, 2, 3, 4], [2, 2, 2, 2], [4, 3, 2, 1]], ... reference_point=[5, 5, 5, 5], infos=["A", "B", "C"]) >>> moa2 = moa.copy() >>> list(moa2), moa2.infos ([[4, 3, 2, 1], [2, 2, 2, 2], [1, 2, 3, 4]], ['C', 'B', 'A']) >>> moa.remove([2, 2, 2, 2]) 'B' >>> moa2.add([0, 1, 3, 1.5], "D") True >>> list(moa2), moa2.infos ([[4, 3, 2, 1], [0, 1, 3, 1.5], [2, 2, 2, 2]], ['C', 'D', 'B']) >>> list(moa), moa.infos ([[4, 3, 2, 1], [1, 2, 3, 4]], ['C', 'A']) """ return MOArchive4obj(list(self), self.reference_point, self.infos) def _get_kink_points(self): """ Function that returns the kink points of the archive. Kink point are calculated by making a sweep of the archive, where the state is one 3 objective archive of all possible kink points found so far, and another 3 objective archive which stores the non-dominated points so far in the sweep >>> from moarchiving.get_archive import get_mo_archive >>> moa = get_mo_archive([[1, 2, 3, 4], [4, 3, 2, 1]], reference_point=[5, 5, 5, 5]) >>> moa._get_kink_points() [[5, 5, 5, 1], [5, 3, 5, 4], [4, 5, 5, 4], [5, 5, 2, 5], [5, 3, 3, 5], [4, 5, 3, 5], [5, 2, 5, 5], [1, 5, 5, 5]] """ if self.reference_point is None: max_point = max([max([point[i] for point in self]) for i in range(3)]) + 1 ref_point = [max_point] * self.n_obj else: ref_point = self.reference_point # initialize the two states, one for points and another for kink points points_state = MOArchive3obj(reference_point=ref_point[:3]) kink_candidates = MOArchive3obj([ref_point[:3]], reference_point=[r + 1 for r in ref_point[:3]]) # initialize the point dictionary, which will store the fourth coordinate of the points point_dict = { tuple(ref_point[:3]): -inf } kink_points = [] for point in self: # add the point to the kink state to get the dominated kink points, then take it out if kink_candidates.add(point[:3]): removed = kink_candidates._removed.copy() for removed_point in removed: w = point_dict[tuple(removed_point)] if w < point[3]: kink_points.append([removed_point[0], removed_point[1], removed_point[2], point[3]]) kink_candidates._removed.clear() kink_candidates.remove(point[:3]) # add the point to the point state, and get two new kink point candidates points_state.add(point[:3]) new_kink_candidates = points_state._get_kink_points() new_kink_candidates = [p for p in new_kink_candidates if (p[0] == point[0] or p[1] == point[1] or p[2] == point[2])] for p in new_kink_candidates: point_dict[tuple(p)] = point[3] kink_candidates.add(p) for point in kink_candidates: kink_points.append([point[0], point[1], point[2], ref_point[3]]) return kink_points def hypervolume_improvement(self, f_vals): """ Returns the hypervolume improvement of adding a point to the archive >>> from moarchiving.get_archive import get_mo_archive >>> moa = get_mo_archive([[1, 2, 3, 4], [4, 3, 2, 1]], reference_point=[5, 5, 5, 5]) >>> moa.hypervolume_improvement([2, 2, 2, 2]) 49.0 >>> moa.hypervolume_improvement([3, 3, 4, 5]) -1.0 """ if f_vals in list(self): return 0 if self.dominates(f_vals): return -1 * self.distance_to_pareto_front(f_vals) moa_copy = self.copy() moa_copy.add(f_vals) return self.hypervolume_final_float_type(moa_copy.hypervolume - self.hypervolume) def compute_hypervolume(self, reference_point=None): """ Compute the hypervolume of the archive. >>> from moarchiving.get_archive import get_mo_archive >>> moa = get_mo_archive([[1, 2, 3, 4], [4, 3, 2, 1]], reference_point=[5, 5, 5, 5]) >>> moa.compute_hypervolume() 44.0 """ if reference_point is not None: _warnings.warn("Reference point given at the initialization is used " "in 3 objective hypervolume computation") if self._hypervolume_already_computed: return self._hypervolume return self.hypervolume_final_float_type( hv4dplusR(self.head, self.hypervolume_computation_float_type)) def remove_dominated(self): """ Preprocessing step to remove dominated points. """ di = self.n_obj - 1 current = self.head.next[di] stop = self.head.prev[di] non_dominated_points = [] dominated_points = [] while current != stop: dominated = False for node in non_dominated_points: if node != current and all(node.x[i] <= current.x[i] for i in range(3)) and any( node.x[i] < current.x[i] for i in range(3)): dominated = True break if dominated: dominated_points.append(current) else: non_dominated_points.append(current) current = current.next[di] for point in dominated_points: remove_from_z(point, archive_dim=self.n_obj) if __name__ == "__main__": import doctest print('doctest.testmod() in moarchiving4obj.py') print(doctest.testmod()) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923733.0 moarchiving-1.0.0/moarchiving/moarchiving_parent.py0000644000076500000240000002750715000266125021764 0ustar00hansenstaff# -*- coding: utf-8 -*- """ This module contains a parent class to MOArchiving3obj and MOArchiving4obj, to avoid code duplication. """ from moarchiving.moarchiving_utils import setup_cdllist, weakly_dominates inf = float('inf') class MOArchiveParent: """Parent class for Moarchiving 3 and 4 objective classes, to avoid code duplication """ def __init__(self, list_of_f_vals=None, reference_point=None, infos=None, n_obj=None, hypervolume_final_float_type=None, hypervolume_computation_float_type=None): """Create a new archive object. """ self.hypervolume_final_float_type = hypervolume_final_float_type self.hypervolume_computation_float_type = hypervolume_computation_float_type if list_of_f_vals is not None and len(list_of_f_vals): try: list_of_f_vals = list_of_f_vals.tolist() except: pass list_of_f_vals = [list(f_vals) for f_vals in list_of_f_vals] if len(list_of_f_vals[0]) != n_obj: raise ValueError(f"need elements of length {n_obj}, got {list_of_f_vals[0]}" " as first element") else: list_of_f_vals = [] self.n_obj = n_obj self._length = 0 if infos is None: infos = [None] * len(list_of_f_vals) if reference_point is not None: self.reference_point = list(reference_point) self.head = setup_cdllist(self.n_obj, list_of_f_vals, self.reference_point, infos) else: self.reference_point = None self.head = setup_cdllist(self.n_obj, list_of_f_vals, [inf] * self.n_obj, infos) self._kink_points = None def __len__(self): return self._length def __iter__(self): pg = self._points_generator() el = next(pg, None) while el is not None: yield el.x[:self.n_obj] el = next(pg, None) def add(self, new, info=None, update_hypervolume=True): raise NotImplementedError("This method should be implemented in the child class") def remove(self, f_vals): raise NotImplementedError("This method should be implemented in the child class") def add_list(self, list_of_f_vals, infos=None): raise NotImplementedError("This method should be implemented in the child class") def copy(self): raise NotImplementedError("This method should be implemented in the child class") def dominates(self, f_val): """return `True` if any element of `points` dominates or is equal to `f_val`. Otherwise return `False`. >>> from moarchiving.get_archive import get_mo_archive >>> archive = get_mo_archive([[1, 2, 3], [3, 2, 1]]) >>> archive.dominates([2, 2, 2]) False >>> archive.dominates([1, 2, 3]) True >>> archive.dominates([3, 3, 3]) True """ for point in self._points_generator(): if weakly_dominates(point.x, f_val, self.n_obj): return True # points are sorted in lexicographic order, so we can return False # once we find a point that is lexicographically greater than f_val elif f_val[self.n_obj - 1] < point.x[self.n_obj - 1]: return False return False def dominators(self, f_val, number_only=False): """return the list of all `f_val`-dominating elements in `self`, including an equal element. ``len(....dominators(...))`` is hence the number of dominating elements which can also be obtained without creating the list with ``number_only=True``. >>> from moarchiving.get_archive import get_mo_archive >>> archive = get_mo_archive([[1, 2, 3], [3, 2, 1], [2, 2, 2], [3, 0, 3]]) >>> archive.dominators([1, 1, 1]) [] >>> archive.dominators([3, 3, 3]) [[3, 2, 1], [2, 2, 2], [3, 0, 3], [1, 2, 3]] >>> archive.dominators([2, 3, 4]) [[2, 2, 2], [1, 2, 3]] >>> archive.dominators([3, 3, 3], number_only=True) 4 """ dominators = [] if not number_only else 0 for point in self._points_generator(): if all(point.x[i] <= f_val[i] for i in range(self.n_obj)): if number_only: dominators += 1 else: dominators.append(point.x[:self.n_obj]) # points are sorted in lexicographic order, so we can break the loop # once we find a point that is lexicographically greater than f_val elif f_val[self.n_obj - 1] < point.x[self.n_obj - 1]: break return dominators def in_domain(self, f_vals, reference_point=None): """return `True` if `f_vals` is dominating the reference point, `False` otherwise. `True` means that `f_vals` contributes to the hypervolume if not dominated by other elements. >>> from moarchiving.get_archive import get_mo_archive >>> archive3obj = get_mo_archive(reference_point=[3, 3, 3]) >>> archive3obj.in_domain([2, 2, 2]) True >>> archive3obj.in_domain([0, 0, 3]) False >>> archive4obj = get_mo_archive(reference_point=[3, 3, 3, 3]) >>> archive4obj.in_domain([2, 2, 2, 2]) True >>> archive4obj.in_domain([0, 0, 0, 3]) False """ try: if len(f_vals) != self.n_obj: raise ValueError(f"argument `f_vals` must be of length {self.n_obj}, " f"was ``{f_vals}``") except TypeError: raise TypeError(f"argument `f_vals` must be a list, was ``{f_vals}``") if reference_point is None: reference_point = self.reference_point if reference_point is None: return True if any(f_vals[i] >= reference_point[i] for i in range(self.n_obj)): return False return True def _points_generator(self, include_head=False): """returns the points in the archive in a form of a python generator instead of a circular doubly linked list """ first_iter = True di = self.n_obj - 1 if include_head: curr = self.head stop = self.head else: curr = self.head.next[di].next[di] stop = self.head.prev[di] if curr == stop: return while curr != stop or first_iter: yield curr first_iter = False curr = curr.next[di] @property def infos(self): """`list` of complementary information corresponding to each archive entry, corresponding to each of the points in the archive >>> from moarchiving.get_archive import get_mo_archive >>> moa = get_mo_archive([[1, 2, 3], [3, 2, 1], [2, 2, 2]], infos=["a", "b", "c"]) >>> moa.infos ['b', 'c', 'a'] """ return [point.info for point in self._points_generator()] @property def hypervolume(self): """Return the hypervolume of the archive """ if self.reference_point is None: raise ValueError("to compute the hypervolume indicator a reference" " point is needed (must be given initially)") return self._hypervolume @property def hypervolume_plus(self): """Return the hypervolume_plus of the archive """ if self.reference_point is None: raise ValueError("to compute the hypervolume_plus indicator a reference" " point is needed (must be given initially)") return self._hypervolume_plus @property def contributing_hypervolumes(self): """`list` of hypervolume contributions of each point in the archive""" return [self.contributing_hypervolume(point[:self.n_obj]) for point in self] def contributing_hypervolume(self, f_vals): """Return the hypervolume contribution of a point in the archive >>> from moarchiving.get_archive import get_mo_archive >>> get_mo_archive.hypervolume_final_float_type = float >>> moa = get_mo_archive([[1, 2, 3], [3, 2, 1], [2, 3, 2]], reference_point=[4, 4, 4]) >>> moa.contributing_hypervolume([1, 2, 3]) 3.0 >>> moa.contributing_hypervolume([3, 2, 1]) 3.0 >>> moa.contributing_hypervolume([2, 3, 2]) 1.0 """ try: if len(f_vals) != self.n_obj: raise ValueError(f"argument `f_vals` must be of length {self.n_obj}, " f"was ``{f_vals}``") except TypeError: raise TypeError(f"argument `f_vals` must be a list, was ``{f_vals}``") if f_vals in self: hv_before = self._hypervolume removed_info = self.remove(f_vals) hv_after = self._hypervolume self.add(f_vals, info=removed_info) return hv_before - hv_after else: return self.hypervolume_improvement(f_vals) def _get_kink_points(self): raise NotImplementedError("This method should be implemented in the child class") def distance_to_pareto_front(self, f_vals, ref_factor=1): """Return the distance to the Pareto front of the archive, by calculating the distances to the kink points >>> from moarchiving.get_archive import get_mo_archive >>> moa = get_mo_archive([[1, 2, 3], [3, 2, 1], [2, 2, 2]], reference_point=[5, 5, 5]) >>> moa.distance_to_pareto_front([1, 2, 3]) 0.0 >>> moa.distance_to_pareto_front([3, 2, 3]) 0.0 >>> moa.distance_to_pareto_front([3, 3, 3]) 1.0 """ if self.in_domain(f_vals) and not self.dominates(f_vals): return 0 # return minimum distance if self.reference_point is not None: ref_di = [ref_factor * max((0, f_vals[i] - self.reference_point[i])) for i in range(self.n_obj)] else: ref_di = [0] * self.n_obj if len(self) == 0: return sum([ref_di[i] ** 2 for i in range(self.n_obj)]) ** 0.5 if self._kink_points is None: self._kink_points = self._get_kink_points() distances_squared = [] for point in self._kink_points: distances_squared.append(sum([max((0, f_vals[i] - point[i])) ** 2 for i in range(self.n_obj)])) return min(distances_squared) ** 0.5 def distance_to_hypervolume_area(self, f_vals): """Return the distance to the hypervolume area of the archive >>> from moarchiving.get_archive import get_mo_archive >>> moa = get_mo_archive(reference_point=[1, 1, 1]) >>> moa.distance_to_hypervolume_area([1, 2, 1]) 1.0 >>> moa.distance_to_hypervolume_area([1, 1, 1]) 0.0 >>> moa.distance_to_hypervolume_area([0, 0, 0]) 0.0 >>> moa.distance_to_hypervolume_area([4, 5, 1]) 5.0 """ if self.reference_point is None: return 0 return sum([max((0, f_vals[i] - self.reference_point[i])) ** 2 for i in range(self.n_obj)])**0.5 def hypervolume_improvement(self, f_vals): raise NotImplementedError("This method should be implemented in the child class") def _set_HV(self): """Set the hypervolume of the archive """ if self.reference_point is None: return None self._hypervolume = self.hypervolume_final_float_type(self.compute_hypervolume()) if self._hypervolume > 0: self._hypervolume_plus = self._hypervolume return self._hypervolume def compute_hypervolume(self): raise NotImplementedError("This method should be implemented in the child class") if __name__ == "__main__": import doctest print('doctest.testmod() in moarchiving_parent.py') print(doctest.testmod()) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923733.0 moarchiving-1.0.0/moarchiving/moarchiving_utils.py0000644000076500000240000003731115000266125021625 0ustar00hansenstaff"""This module contains various utility functions and classes for the MOArchiving package.""" import warnings as _warnings try: from sortedcontainers import SortedKeyList except ImportError: _warnings.warn('`sortedcontainers` module not installed, moarchiving for 3 and 4 objectives will not work') SortedKeyList = list class DLNode: """ A class to represent a node in a doubly linked list. """ def __init__(self, x=None, info=None): """ Initialize a node with the given x-coordinate and info. """ self.x = x if x else [None, None, None, None] self.closest = [None, None] # closest in x coordinate, closest in y coordinate self.cnext = [None, None] # current next self.next = [None, None, None, None] self.prev = [None, None, None, None] self.ndomr = 0 # number of dominators self.info = info def copy(self): """ copy the node """ new_node = DLNode() for var in self.__dict__: if isinstance(getattr(self, var), list): setattr(new_node, var, getattr(self, var).copy()) else: setattr(new_node, var, getattr(self, var)) return new_node class ArchiveSortedList(SortedKeyList): """ A class to represent a sorted list of nodes, together with additional methods that follow the definition in the paper.""" def __init__(self, iterable=None, key=lambda node: node.x[1]): """ Initialize the sorted list with the given iterable and key function. """ if SortedKeyList is list: raise ImportError("`MySortedList `requires `sortedcontainers` to be installed") super().__init__(iterable=iterable, key=key) def __str__(self): """ Return a string representation of the sorted list. """ return str([node.x for node in self]) def head_y(self): """ Return the point q from the list, with the smallest q_y """ return self[0] def head_x(self): """ Return the point q from the list, with the smallest q_x """ return self[-1] def next_y(self, s): """ Return the point q from the list, with the smallest q_y > s_y, for a given point s from the list """ return self[self.index(s) + 1] def next_x(self, s): """ Return the point q from the list, with the smallest q_x > s_x, for a given point s from the list """ return self[self.index(s) - 1] def outer_delimiter_y(self, p): """ Return the point q from the list, with the smallest q_y > p_y, such that q_x < p_x """ i = self.bisect_left(p) while i < len(self) and self[i].x[0] >= p.x[0]: i += 1 return self[i] def outer_delimiter_x(self, p): """ Return the point q from the list, with the smallest q_x > p_x, such that q_y < p_y """ i = self.bisect_left(p) - 1 while i > 0 and self[i].x[1] >= p.x[1]: i -= 1 return self[i] def remove_dominated_y(self, p, s): """ For s = outer_delimiter_x(p), remove all points q, such that p* <= q* from the list, and return them sorted by ascending order of q_y """ e = self.next_y(s) points_to_remove = [] while p.x[0] <= e.x[0]: points_to_remove.append(e) e = self.next_y(e) for q in points_to_remove: self.remove(q) return points_to_remove def remove_dominated_x(self, p, s): """ For s = outer_delimiter_y(p), remove all points q, such that p* <= q* from the list, and return them sorted by ascending order of q_x """ e = self.next_x(s) points_to_remove = [] while p.x[1] <= e.x[1]: points_to_remove.append(e) e = self.next_x(e) for q in points_to_remove: self.remove(q) return points_to_remove def add_y(self, p, s): """ Insert point p into the list, if s_y < p_y < next_y(s)_y or p_y < head_y_y """ if len(self) == 0: self.add(p) elif s.x[1] < p.x[1] < self.next_y(s).x[1]: self.add(p) elif p.x[1] < self.head_y().x[1] and s is None: self.add(p) def add_x(self, p, s): """ Insert point p into the list, if s_x < p_x < next_x(s)_x or p_x < head_x_x """ if len(self) == 0: self.add(p) elif s.x[0] < p.x[0] < self.next_x(s).x[0]: self.add(p) elif p.x[0] < self.head_x().x[0] and s is None: self.add(p) def my_lexsort(keys): """ Sort an array of keys in lexicographic order and return the indices. Equivalent to np.lexsort """ idk_key_tuple = list(enumerate([list(x)[::-1] for x in zip(*keys)])) idk_key_tuple.sort(key=lambda x: x[1]) return [x[0] for x in idk_key_tuple] # --------------- Auxiliary Functions --------------------- def lexicographic_less(a, b): """ Returns True if a is lexicographically less than b, False otherwise """ return a[2] < b[2] or (a[2] == b[2] and (a[1] < b[1] or (a[1] == b[1] and a[0] <= b[0]))) def init_sentinels_new(list_nodes, ref, dim): """ Initialize the sentinel nodes for the list of nodes given the reference point and the dimensionality """ s1, s2, s3 = list_nodes[0], list_nodes[1], list_nodes[2] # Initialize s1 node s1.x = [float('-inf'), ref[1], float('-inf'), float('-inf')] s1.closest = [s2, s1] s1.next = [None, None, s2, s2] s1.cnext = [None, None] s1.prev = [None, None, s3, s3] s1.ndomr = 0 # Initialize s2 node s2.x = [ref[0], float('-inf'), float('-inf'), float('-inf')] s2.closest = [s2, s1] s2.next = [None, None, s3, s3] s2.cnext = [None, None] s2.prev = [None, None, s1, s1] s2.ndomr = 0 # Initialize s3 node s3.x = [float('-inf'), float('-inf'), ref[2], ref[3] if dim == 4 else float('-inf')] s3.closest = [s2, s1] s3.next = [None, None, s1, None] s3.cnext = [None, None] s3.prev = [None, None, s2, s2] s3.ndomr = 0 return s1 def add_to_z(new): """ Add a new node to the list sorted by z """ new.next[2] = new.prev[2].next[2] new.next[2].prev[2] = new new.prev[2].next[2] = new def remove_from_z(old, archive_dim): """ Remove a node from the list sorted by z """ di = archive_dim - 1 old.prev[di].next[di] = old.next[di] old.next[di].prev[di] = old.prev[di] def setup_z_and_closest(head, new): """ Sets up the closest[0] and closest[1] pointers for the new node """ closest1 = head closest0 = head.next[2] q = head.next[2].next[2] newx = new.x while q and lexicographic_less(q.x, newx): if q.x[0] <= newx[0] and q.x[1] <= newx[1]: new.ndomr += 1 elif q.x[1] < newx[1] and ( q.x[0] < closest0.x[0] or (q.x[0] == closest0.x[0] and q.x[1] < closest0.x[1])): closest0 = q elif q.x[0] < newx[0] and ( q.x[1] < closest1.x[1] or (q.x[1] == closest1.x[1] and q.x[0] < closest1.x[0])): closest1 = q q = q.next[2] new.closest[0] = new.cnext[0] = closest0 new.closest[1] = new.cnext[1] = closest1 new.prev[2] = q.prev[2] if q else None new.next[2] = q def update_links(head, new, p): stop = head.prev[2] ndom = 0 all_delimiters_visited = False while p != stop and not all_delimiters_visited: if p.x[0] <= new.x[0] and p.x[1] <= new.x[1] and (p.x[0] < new.x[0] or p.x[1] < new.x[1]): all_delimiters_visited = True else: if new.x[0] <= p.x[0]: if new.x[1] <= p.x[1]: p.ndomr += 1 ndom += 1 remove_from_z(p, 3) elif new.x[0] < p.x[0] and (new.x[1] < p.closest[1].x[1] or ( new.x[1] == p.closest[1].x[1] and (new.x[0] < p.closest[1].x[0] or ( new.x[0] == p.closest[1].x[0] and new.x[2] < p.closest[1].x[2])))): p.closest[1] = new elif new.x[1] < p.x[1] and (new.x[0] < p.closest[0].x[0] or ( new.x[0] == p.closest[0].x[0] and (new.x[1] < p.closest[0].x[1] or ( new.x[1] == p.closest[0].x[1] and new.x[2] < p.closest[0].x[2])))): p.closest[0] = new p = p.next[2] return ndom def restart_list_y(head): """ Resets the cnext pointers for the y-dimension.""" head.next[2].cnext[1] = head head.cnext[0] = head.next[2] def compute_area_simple(p, di, s, u, Fc): """ Computes the area as described in the paper """ dj = 1 - di area = Fc(0) q = s area += (Fc(q.x[dj]) - Fc(p[dj])) * (Fc(u.x[di]) - Fc(p[di])) while p[dj] < u.x[dj]: q = u u = u.cnext[di] area += (Fc(q.x[dj]) - Fc(p[dj])) * (Fc(u.x[di]) - Fc(q.x[di])) return area def restart_base_setup_z_and_closest(head, new): # Sets up closest[0] and closest[1] for the new node p = head.next[2].next[2] closest1 = head closest0 = head.next[2] newx = new.x restart_list_y(head) while p and lexicographic_less(p.x, newx): p.cnext[0] = p.closest[0] p.cnext[1] = p.closest[1] p.cnext[0].cnext[1] = p p.cnext[1].cnext[0] = p if p.x[0] <= newx[0] and p.x[1] <= newx[1]: new.ndomr += 1 elif p.x[1] < newx[1] and ( p.x[0] < closest0.x[0] or (p.x[0] == closest0.x[0] and p.x[1] < closest0.x[1])): closest0 = p elif p.x[0] < newx[0] and ( p.x[1] < closest1.x[1] or (p.x[1] == closest1.x[1] and p.x[0] < closest1.x[0])): closest1 = p p = p.next[2] new.closest[0] = closest0 new.closest[1] = closest1 new.prev[2] = p.prev[2] if p else None new.next[2] = p def one_contribution_3_obj(head, new, Fc): """ Computes the contribution of adding a new point to the archive in three dimensions """ restart_base_setup_z_and_closest(head, new) if new.ndomr > 0: return 0 new.cnext[0] = new.closest[0] new.cnext[1] = new.closest[1] area = compute_area_simple(new.x, 1, new.cnext[0], new.cnext[0].cnext[1], Fc) p = new.next[2] lastz = Fc(new.x[2]) volume = Fc(0) while p and (p.x[0] > new.x[0] or p.x[1] > new.x[1]): volume += area * (Fc(p.x[2]) - lastz) p.cnext[0] = p.closest[0] p.cnext[1] = p.closest[1] if p.x[0] >= new.x[0] and p.x[1] >= new.x[1]: area -= compute_area_simple(p.x, 1, p.cnext[0], p.cnext[0].cnext[1], Fc) p.cnext[1].cnext[0] = p p.cnext[0].cnext[1] = p elif p.x[0] >= new.x[0]: if p.x[0] <= new.cnext[0].x[0]: x = [p.x[0], new.x[1], p.x[2]] area -= compute_area_simple(x, 1, new.cnext[0], new.cnext[0].cnext[1], Fc) p.cnext[0] = new.cnext[0] p.cnext[1].cnext[0] = p new.cnext[0] = p else: if p.x[1] <= new.cnext[1].x[1]: x = [new.x[0], p.x[1], p.x[2]] area -= compute_area_simple(x, 0, new.cnext[1], new.cnext[1].cnext[0], Fc) p.cnext[1] = new.cnext[1] p.cnext[0].cnext[1] = p new.cnext[1] = p lastz = p.x[2] p = p.next[2] if p: volume += area * (Fc(p.x[2]) - Fc(lastz)) return volume def setup_cdllist(n_obj, points, ref, infos): """ Set up a circular doubly linked list from the given data and reference point """ points = [p for p in points if strictly_dominates(p, ref, n_obj)] n = len(points) head = [DLNode(info=info) for info in ["s1", "s2", "s3"] + [None] * n] # init_sentinels_new accepts a list at the beginning, therefore we use head[0:3] init_sentinels_new(head[0:3], ref, n_obj) di = n_obj - 1 # Dimension index for sorting (z-axis in 3D) if n > 0: # Convert data to a structured format suitable for sorting and linking if n_obj == 3: # Using lexsort to sort by z, y, x in ascending order sorted_indices = my_lexsort(([p[0] for p in points], [p[1] for p in points], [p[2] for p in points])) elif n_obj == 4: # Using lexsort to sort by w, z, y, x in ascending order sorted_indices = my_lexsort(([p[0] for p in points], [p[1] for p in points], [p[2] for p in points], [p[3] for p in points])) else: raise ValueError("Only 3D and 4D points are supported") # Create nodes from sorted points for i, index in enumerate(sorted_indices): head[i + 3].x = points[index] head[i + 3].info = infos[index] if n_obj == 3: # Add 0.0 for 3d points so that it matches the original C code head[i + 3].x.append(0.0) # Link nodes s = head[0].next[di] s.next[di] = head[3] head[3].prev[di] = s for i in range(3, n + 2): head[i].next[di] = head[i + 1] if i + 1 < len(head) else head[0] head[i + 1].prev[di] = head[i] s = head[0].prev[di] s.prev[di] = head[n + 2] head[n + 2].next[di] = s return head[0] def weakly_dominates(a, b, n_obj): """ Return True if a weakly dominates b, False otherwise >>> weakly_dominates([1, 2, 3], [2, 3, 3], n_obj=3) True >>> weakly_dominates([1, 2, 3], [2, 2, 2], n_obj=3) False >>> weakly_dominates([1, 2, 3], [1, 2, 3], n_obj=3) True """ return all(a[i] <= b[i] for i in range(n_obj)) def strictly_dominates(a, b, n_obj): """ Return True if a strictly dominates b, False otherwise >>> strictly_dominates([1, 2, 3], [2, 3, 3], n_obj=3) True >>> strictly_dominates([1, 2, 3], [2, 2, 2], n_obj=3) False >>> strictly_dominates([1, 2, 3], [1, 2, 3], n_obj=3) False """ return (all(a[i] <= b[i] for i in range(n_obj)) and any(a[i] < b[i] for i in range(n_obj))) def hv3dplus(head, Fc): """ Computes the hypervolume indicator in d=3 in linear time """ p = head area = Fc(0) volume = Fc(0) restart_list_y(head) p = p.next[2].next[2] stop = head.prev[2] while p != stop: if p.ndomr < 1: p.cnext[0] = p.closest[0] p.cnext[1] = p.closest[1] area += compute_area_simple(p.x, 1, p.cnext[0], p.cnext[0].cnext[1], Fc) p.cnext[0].cnext[1] = p p.cnext[1].cnext[0] = p else: remove_from_z(p, 3) volume += area * (Fc(p.next[2].x[2]) - Fc(p.x[2])) p = p.next[2] return volume def hv4dplusR(head, Fc): """ Compute the hypervolume indicator in d=4 by iteratively computing the hypervolume indicator in d=3 (using hv3d+) """ hv = Fc(0) stop = head.prev[3] new = head.next[3].next[3] while new != stop: setup_z_and_closest(head, new) # Compute cx and cy of 'new' and determine next and prev in z add_to_z(new) # Add 'new' to list sorted by z update_links(head, new, new.next[2]) # Update cx and cy of the points above 'new' in z # and remove dominated points volume = hv3dplus(head, Fc) # Compute hv indicator in d=3 in linear time height = Fc(new.next[3].x[3]) - Fc(new.x[3]) hv += volume * height # Update hypervolume in d=4 new = new.next[3] return hv def hv4dplusU(head, Fc): """ Compute the hypervolume indicator in d=4 by iteratively computing the one contribution problem in d=3. """ volume = Fc(0) hv = Fc(0) last = head.prev[3] new = head.next[3].next[3] while new != last: volume += one_contribution_3_obj(head, new, Fc) add_to_z(new) update_links(head, new, new.next[2]) height = Fc(new.next[3].x[3]) - Fc(new.x[3]) hv += volume * height new = new.next[3] return hv ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923733.0 moarchiving-1.0.0/moarchiving/test.py0000644000076500000240000000276215000266125017060 0ustar00hansenstaff"""Run all doctests and unit tests of the moarchiving package:: python -m moarchiving.test """ import doctest import unittest import moarchiving as moa import moarchiving.tests def run_doctests(): for doctest_suite in [moa.moarchiving, moa.moarchiving3obj, moa.moarchiving4obj, moa.moarchiving_parent, moa.constrained_moarchive]: print(f'doctest.testmod({doctest_suite})') print(doctest.testmod(doctest_suite)) def run_unittests(): for unit_test_suite in [moa.tests.test_moarchiving2obj, moa.tests.test_moarchiving3obj, moa.tests.test_moarchiving4obj, moa.tests.test_constrained_moarchiving, moa.tests.test_sorted_list]: print(f'unittest.TextTestRunner().run(unittest.TestLoader().loadTestsFromModule({unit_test_suite}))') unittest.TextTestRunner().run(unittest.TestLoader().loadTestsFromModule(unit_test_suite)) if __name__ == "__main__": tmp = moa.BiobjectiveNondominatedSortedList.make_expensive_asserts moa.BiobjectiveNondominatedSortedList.make_expensive_asserts = True # print(moa.moarching.BiobjectiveNondominatedSortedList.make_expensive_asserts) run_doctests() run_unittests() moa.BiobjectiveNondominatedSortedList.make_expensive_asserts = tmp # print(moa.moarching.BiobjectiveNondominatedSortedList.make_expensive_asserts) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1744923773.4939437 moarchiving-1.0.0/moarchiving/tests/0000755000076500000240000000000015000266175016667 5ustar00hansenstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923732.0 moarchiving-1.0.0/moarchiving/tests/__init__.py0000644000076500000240000000041615000266124020773 0ustar00hansenstaff# -*- coding: utf-8 -*- """ Here are all the tests for moarchiving package. """ from . import (test_moarchiving2obj, test_moarchiving3obj, test_moarchiving4obj, test_constrained_moarchiving, test_sorted_list) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923732.0 moarchiving-1.0.0/moarchiving/tests/point_sampling.py0000644000076500000240000001522615000266124022264 0ustar00hansenstaff""" This module provides functions to generate non-dominated points for testing purposes """ import math import random def get_non_dominated_points(n_points, n_dim=3, mode='spherical'): """ Returns a list of non-dominated points: - n_points: number of points - n_dim: number of dimensions - mode: 'spherical' or 'linear' """ if n_dim == 2: if mode == 'spherical': return spherical_front_2d(1, n_points, normalized=False) elif mode == 'linear': return linear_front_2d(1, n_points, normalized=False) if n_dim == 3: if mode == 'spherical': return spherical_front_3d(1, n_points, normalized=False) elif mode == 'linear': return linear_front_3d(1, n_points, normalized=False) elif n_dim == 4: if mode == 'spherical': return spherical_front_4d(1, n_points, normalized=False) elif mode == 'linear': return linear_front_4d(1, n_points, normalized=False) else: raise ValueError("Invalid number of dimensions") def get_random_points(n_points, n_dim): """ Returns a list of random points between 0 and 1, with n_points and n_dim dimensions """ return [[random.random() for _ in range(n_dim)] for _ in range(n_points)] def get_stacked_points(n_points, points_definitions): """ Returns a list of points with n_points and n_dim dimensions, where point from i-th dimension is defined by points_definitions[i]: - 'random' for random value between 0 and 1 - int for a fixed value """ points = [] for i in range(n_points): points.append([]) for p_def in points_definitions: if p_def == 'random': points[-1].append(random.random()) elif isinstance(p_def, int): points[-1].append(p_def) else: raise ValueError("Invalid point definition") return points def permute_points(points, permutation): """ takes a list of points (n x dim) and a permutation (dim) and returns the points with the permutation applied """ return [[point[permutation[i]] for i in range(len(permutation))] for point in points] def spherical_front_2d(distance, num_points, normalized): """ Returns a list of non-dominated points on the 2D spherical front """ vectors = [] if normalized: v1 = [0, distance] v2 = [distance, 0] vectors = [v1, v2] while len(vectors) < num_points: phi = random.random() * math.pi / 2 vectors.append([distance * math.cos(phi), distance * math.sin(phi)]) return vectors def linear_front_2d(distance, num_points, normalized=True): """ Returns a list of non-dominated points on the 2D linear front """ vectors = [] if normalized: v1 = [1, 1 - distance] v2 = [1 - distance, 1] vectors = [v1, v2] while len(vectors) < num_points: x = random.random() vectors.append([x, 1 - x]) return vectors def spherical_front_3d(distance, num_points, normalized=True): """ Returns a list of non-dominated points on the 3D spherical front """ vectors = [] if normalized: v1 = [0, 0, distance] v2 = [0, distance, 0] v3 = [distance, 0, 0] vectors = [v1, v2, v3] while len(vectors) < num_points: x, y, z = 1, 1, 1 while (math.sqrt(x * x + y * y + z * z) > 1) or (x < 0.5 and y < 0.5 and z < 0.5): x = next_gaussian_double() y = next_gaussian_double() z = next_gaussian_double() r1 = math.sqrt(x * x + y * y + z * z) alpha = math.acos(z / r1) beta = math.atan2(y, x) vect = [distance * math.sin(alpha) * math.cos(beta), distance * math.sin(alpha) * math.sin(beta), distance * math.cos(alpha)] vectors.append(vect) return vectors def linear_front_3d(distance, num_points, normalized): """ Returns a list of non-dominated points on the 3D linear front """ vectors = [] if normalized: v1 = [1, 1, 1 - distance] v2 = [1, 1 - distance, 1] v3 = [1 - distance, 1, 1] vectors = [v1, v2, v3] while len(vectors) < num_points: array = [0.0] for _ in range(2): array.append(distance * random.random()) array.append(distance) array.sort() x = 1 - (array[1] - array[0]) y = 1 - (array[2] - array[1]) z = 1 - (array[3] - array[2]) vectors.append([x, y, z]) return vectors def linear_front_4d(distance, num_points, normalized): """ Returns a list of non-dominated points on the 4D linear front """ vectors = [] if normalized: v1 = [0, 0, 0, distance] v2 = [0, 0, distance, 0] v3 = [0, distance, 0, 0] v4 = [distance, 0, 0, 0] vectors = [v1, v2, v3, v4] while len(vectors) < num_points: array = [0.0] + [distance * random.random() for _ in range(3)] + [distance] array.sort() x = array[1] - array[0] y = array[2] - array[1] z = array[3] - array[2] w = array[4] - array[3] v = [x, y, z, w] vectors.append(v) return vectors def spherical_front_4d(distance, num_points, normalized): """ Returns a list of non-dominated points on the 4D spherical front """ vectors = [] if normalized: v1 = [0, 0, 0, distance] v2 = [0, 0, distance, 0] v3 = [0, distance, 0, 0] v4 = [distance, 0, 0, 0] vectors = [v1, v2, v3, v4] while len(vectors) < num_points: x, y, z, w = 1, 1, 1, 1 while (math.sqrt(x * x + y * y + z * z + w * w) > 1) or (x < 0.5 and y < 0.5 and z < 0.5 and w < 0.5): x = next_gaussian_double() y = next_gaussian_double() z = next_gaussian_double() w = next_gaussian_double() alpha = math.atan(math.sqrt(y * y + z * z + w * w) / x) beta = math.atan(math.sqrt(z * z + w * w) / y) gamma = 2 * math.atan(z / (math.sqrt(z * z + w * w) + w)) v = [ distance * math.cos(alpha), distance * math.sin(alpha) * math.cos(beta), distance * math.sin(alpha) * math.sin(beta) * math.cos(gamma), distance * math.sin(alpha) * math.sin(beta) * math.sin(gamma) ] vectors.append(v) return vectors def next_gaussian_double(): factor = 2.0 while True: result = random.gauss(0, 1) if result < -factor: continue if result > factor: continue if result >= 0: result = result / (2 * factor) else: result = (2 * factor + result) / (2 * factor) return result ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923733.0 moarchiving-1.0.0/moarchiving/tests/test_constrained_moarchiving.py0000644000076500000240000001233215000266125025173 0ustar00hansenstaff""" Test the CMOArchive class """ from moarchiving.get_archive import get_cmo_archive import unittest import random def list_to_set(lst): """ Converts a list of lists to a set of tuples """ return set([tuple(p) for p in lst]) class TestCMOArchiving(unittest.TestCase): """ Tests for the CMOArchive class """ def test_hypervolume_easy(self): """ test the hypervolume calculation for a simple case """ f_vals = [[1, 2, 3], [2, 3, 1], [3, 1, 2]] g_vals = [0, 0, 0] moa = get_cmo_archive(f_vals, g_vals, reference_point=[4, 4, 4], infos=["A", "B", "C"]) self.assertEqual(moa.hypervolume, 13) self.assertEqual(moa.infos, ["B", "C", "A"]) g_vals = [[0], [1], [0]] moa = get_cmo_archive(f_vals, g_vals, reference_point=[4, 4, 4], infos=["A", "B", "C"]) self.assertEqual(moa.hypervolume, 10) self.assertEqual(moa.infos, ["C", "A"]) def test_infos_dominated(self): """ test if the infos about dominated points are removed """ f_vals = [[1, 2, 3], [3, 2, 1], [2, 3, 4], [2, 1, 0], [0, 0, 0]] g_vals = [[0, 0], [0, 0], [0, 10], [0, 0], [9, 1]] infos = ["A", "B", "C", "D", "E"] moa = get_cmo_archive(f_vals, g_vals, [6, 6, 6], infos) # assert that only points A and D are stored in the archive self.assertSetEqual({"A", "D"}, set(moa.infos)) def test_add(self): """ test if the add_points function works correctly """ ref_point = [6, 6] f_vals = [[1, 2], [3, 4], [5, 1]] g_vals = [42, 0, 0] moa_ref = get_cmo_archive(f_vals, g_vals, ref_point) moa_no_ref = get_cmo_archive(f_vals, g_vals) for moa in [moa_ref, moa_no_ref]: # add point that is not dominated and does not dominate any other point moa.add([1, 5], [0]) self.assertEqual([[1, 5], [3, 4], [5, 1]], list(moa)) # add point that is dominated by another point in the archive moa.add([4, 4], [0]) self.assertEqual([[1, 5], [3, 4], [5, 1]], list(moa)) # add point that dominates another point in the archive moa.add([3, 3], [0]) self.assertEqual([[1, 5], [3, 3], [5, 1]], list(moa)) # don't add point, because it is not feasible moa.add([1, 1], [1]) self.assertEqual([[1, 5], [3, 3], [5, 1]], list(moa)) # do not add point with that have any constraint violation > 0 moa.add([2, 2], [-3, 2]) self.assertEqual([[1, 5], [3, 3], [5, 1]], list(moa)) def test_copy_CMOArchive(self): """ Test the copy function of the CMOArchive class """ f_vals = [[1, 2, 3], [2, 3, 1], [3, 1, 2]] g_vals = [0, 0, 0] moa = get_cmo_archive(f_vals, g_vals, reference_point=[6, 6, 6]) moa_copy = moa.copy() self.assertEqual(moa.hypervolume_plus_constr, moa_copy.hypervolume_plus_constr) moa.add([2, 2, 2], 0) self.assertEqual(len(moa), 4) self.assertEqual(len(moa_copy), 3) self.assertFalse(moa.hypervolume_plus_constr == moa_copy.hypervolume_plus_constr) def test_remove(self, n_points=100, n_points_remove=50): """ Test the remove function, by comparing the archive with 100 points added and then 50 removed, to the with only the other 50 points added """ f_vals = [[1, 2, 3], [2, 3, 1], [3, 1, 2]] moa_remove = get_cmo_archive(f_vals, [0, 0, 0], reference_point=[6, 6, 6]) moa_remove.remove([1, 2, 3]) self.assertEqual(len(moa_remove), 2) self.assertSetEqual(list_to_set(list(moa_remove)), list_to_set(f_vals[1:])) def test_hypervolume_plus_constr(self): """ test the hypervolume_plus_constr indicator """ moa = get_cmo_archive(reference_point=[1, 1, 1], tau=10) self.assertEqual(moa.hypervolume_plus_constr, -float('inf')) moa.add([2, 2, 2], 99) self.assertEqual(moa.hypervolume_plus_constr, - 99 - 10) moa.add_list([[0, 0, 5], [1, 2, 1], [3, 3, 2]], [14, 7, 76]) self.assertEqual(moa.hypervolume_plus_constr, -7 - 10) moa.add([20, 2, 20], 0) self.assertEqual(moa.hypervolume_plus_constr, -10) moa.add_list([[0, 0, 0], [4, 5, 1]], [3, 0]) self.assertEqual(moa.hypervolume_plus_constr, -5) moa.add([1, 1, 1], 0) self.assertEqual(moa.hypervolume_plus_constr, 0) moa.add([0.5, 0.5, 0.5], 0) self.assertEqual(moa.hypervolume_plus_constr, moa.hypervolume) moa = get_cmo_archive(reference_point=[1, 1, 1], tau=1) prev_hv_plus_constr = moa.hypervolume_plus_constr for i in range(1000): f_vals = [10 * random.random(), 5 * random.random(), random.random()] g_vals = max(random.random() - 0.3, 0) hv_plus_constr_improvement = moa.hypervolume_plus_constr_improvement(f_vals, g_vals) moa.add(f_vals, g_vals) self.assertLessEqual(prev_hv_plus_constr, moa.hypervolume_plus_constr) self.assertAlmostEqual(moa.hypervolume_plus_constr - prev_hv_plus_constr, hv_plus_constr_improvement, places=8) prev_hv_plus_constr = moa.hypervolume_plus_constr if __name__ == '__main__': unittest.main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923732.0 moarchiving-1.0.0/moarchiving/tests/test_moarchiving2obj.py0000644000076500000240000001170415000266124023360 0ustar00hansenstaff""" Test the BiobjectiveNondominatedSortedList class """ from moarchiving.moarchiving import BiobjectiveNondominatedSortedList import unittest import random import math inf = float('inf') def list_to_set(lst): """ Converts a list of lists to a set of tuples """ return set([tuple(p) for p in lst]) class TestMOArchiving2obj(unittest.TestCase): """ Tests for the BiobjectiveNondominatedSortedList class """ def test_hypervolume_easy(self): """ test the hypervolume calculation for a simple case """ points = [[1, 2], [2, 1]] moa = BiobjectiveNondominatedSortedList(points, reference_point=[3, 3], infos=["A", "B"]) self.assertEqual(moa.hypervolume, 3) def test_infos_non_dominated(self): """ test if the infos are stored correctly - if the points are non dominated, the infos should be the same""" points = [ [1, 2], [2, 1], [1.3, 1.7], [1.5, 1.5] ] infos = [str(p) for p in points] moa = BiobjectiveNondominatedSortedList(points, [3, 3], infos=infos) # assert that the infos are stored in the same order as the points self.assertEqual([str(p[:2]) for p in moa], moa.infos) # assert that all the points in the archive are non dominated and thus have the same info self.assertSetEqual(set([str(p) for p in points]), set(moa.infos)) moa_add = BiobjectiveNondominatedSortedList(reference_point=[3, 3]) moa_add.add_list(points, infos=infos) self.assertEqual([str(p[:2]) for p in moa_add], moa_add.infos) self.assertSetEqual(set([str(p) for p in points]), set(moa_add.infos)) def test_infos_dominated(self): """ test if the infos about dominated points are removed """ points = [ [1, 3], [3, 2], [2, 3], [3, 1] ] infos = ["A", "B", "C", "D"] moa = BiobjectiveNondominatedSortedList(points, [6, 6], infos=infos) # assert that only points A and D are stored in the archive self.assertSetEqual({"A", "D"}, set(moa.infos)) moa_add = BiobjectiveNondominatedSortedList(reference_point=[6, 6]) moa_add.add_list(points, infos=infos) self.assertSetEqual({"A", "D"}, set(moa_add.infos)) def test_add(self): """ test if the add_points function works correctly """ ref_point = [6, 6] start_points = [[1, 3], [5, 1]] moa_ref = BiobjectiveNondominatedSortedList(start_points, ref_point, infos=["A", "B"]) moa_no_ref = BiobjectiveNondominatedSortedList(start_points, infos=["A", "B"]) for moa in [moa_ref, moa_no_ref]: # add point that is not dominated and does not dominate any other point u1 = [3, 2] moa.add(u1, info="C") self.assertSetEqual(list_to_set(start_points + [u1]), list_to_set(moa)) self.assertSetEqual({"A", "B", "C"}, set(moa.infos)) # add point that is dominated by another point in the archive u2 = [4, 4] moa.add(u2, info="D") self.assertSetEqual(list_to_set(start_points + [u1]), list_to_set(moa)) self.assertSetEqual({"A", "B", "C"}, set(moa.infos)) # add point that dominates another point in the archive u3 = [2, 2] moa.add(u3, info="E") self.assertSetEqual(list_to_set(start_points + [u3]), list_to_set(moa)) self.assertSetEqual({"A", "B", "E"}, set(moa.infos)) def test_copy_MOArchive(self): """ Test the copy function of the MOArchive3obj class """ points = [[1, 3], [2, 2], [3, 1]] moa = BiobjectiveNondominatedSortedList(points, reference_point=[6, 6]) moa_copy = moa.copy() self.assertEqual(moa.hypervolume, moa_copy.hypervolume) moa.add([1.5, 1.5]) moa_copy.add([0.5, 5]) self.assertNotEqual(moa.hypervolume, moa_copy.hypervolume) self.assertEqual(len(moa), 3) self.assertEqual(len(moa_copy), 4) def test_hypervolume_plus(self): """ test the hypervolume_plus indicator """ moa = BiobjectiveNondominatedSortedList(reference_point=[1, 1]) self.assertEqual(moa.hypervolume_plus, -inf) moa.add([2, 2]) self.assertEqual(moa.hypervolume_plus, -math.sqrt(2)) moa.add_list([[0, 5], [1, 2], [3, 2]]) self.assertEqual(moa.hypervolume_plus, -1) moa.add([1, 1]) self.assertEqual(moa.hypervolume_plus, 0) moa.add([0.5, 0.5]) self.assertEqual(moa.hypervolume_plus, moa.hypervolume) moa = BiobjectiveNondominatedSortedList(reference_point=[1, 1]) prev_hv_plus = moa.hypervolume_plus for i in range(1000): point = [10 * random.random(), 10 * random.random()] moa.add(point) self.assertLessEqual(prev_hv_plus, moa.hypervolume_plus) prev_hv_plus = moa.hypervolume_plus if __name__ == '__main__': unittest.main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923732.0 moarchiving-1.0.0/moarchiving/tests/test_moarchiving3obj.py0000644000076500000240000007007615000266124023370 0ustar00hansenstaff""" Test the MOArchive3obj class """ from moarchiving.moarchiving3obj import MOArchive3obj from moarchiving.moarchiving_utils import DLNode, my_lexsort from moarchiving.moarchiving import BiobjectiveNondominatedSortedList as MOArchive2obj from moarchiving.tests.point_sampling import (get_non_dominated_points, get_random_points, get_stacked_points) import unittest import math import random def list_to_set(lst): return set([tuple(p) for p in lst]) class TestMOArchiving3obj(unittest.TestCase): def test_hypervolume_easy(self): """ test the hypervolume calculation for a simple case """ points = [[1, 2, 3], [2, 3, 1], [3, 1, 2]] moa = MOArchive3obj(points, reference_point=[4, 4, 4], infos=["A", "B", "C"]) self.assertEqual(moa.hypervolume, 13) def test_infos_non_dominated(self): """ test if the infos are stored correctly - if the points are non dominated, the infos should be the same """ points = [ [1, 2, 3], [3, 2, 1], [2, 3, 1], [1, 3, 2] ] infos = [str(p) for p in points] moa = MOArchive3obj(points, [6, 6, 6], infos) # assert that the infos are stored in the same order as the points self.assertEqual([str(p[:3]) for p in moa], moa.infos) # assert that all the points in the archive are non dominated and thus have the same info self.assertSetEqual(set([str(p) for p in points]), set(moa.infos)) def test_infos_dominated(self): """ test if the infos about dominated points are removed """ points = [ [1, 2, 3], [3, 2, 1], [2, 3, 4], [2, 1, 0] ] infos = ["A", "B", "C", "D"] moa = MOArchive3obj(points, [6, 6, 6], infos) # assert that only points A and D are stored in the archive self.assertSetEqual({"A", "D"}, set(moa.infos)) def test_in_domain(self): """ test if the in_domain function works correctly """ ref_point = [6, 6, 6] moa = MOArchive3obj([[1, 1, 1]], ref_point) # test if the points are in the domain self.assertTrue(moa.in_domain([1, 2, 3])) self.assertTrue(moa.in_domain([5.9, 5.9, 5.9])) # test if the point is not in the domain self.assertFalse(moa.in_domain([7, 8, 9])) self.assertFalse(moa.in_domain([6, 6, 6])) self.assertFalse(moa.in_domain([0, 0, 6])) def test_add(self): """ test if the add_points function works correctly """ ref_point = [6, 6, 6] start_points = [[1, 2, 5], [3, 5, 1], [5, 1, 4]] moa_ref = MOArchive3obj(start_points, ref_point) moa_no_ref = MOArchive3obj(start_points) for moa in [moa_ref, moa_no_ref]: # add point that is not dominated and does not dominate any other point u1 = [2, 3, 3] moa.add(u1) self.assertSetEqual(list_to_set(start_points + [u1]), list_to_set(list(moa))) # add point that is dominated by another point in the archive u2 = [4, 5, 2] moa.add(u2) self.assertSetEqual(list_to_set(start_points + [u1]), list_to_set(list(moa))) # add point that dominates another point in the archive u3 = [3, 1, 2] moa.add(u3) self.assertSetEqual(list_to_set(start_points[:2] + [u1, u3]), list_to_set(list(moa))) def test_hypervolume_after_add(self): """ Calculate the hypervolume of the archive after adding points and compare it to the hypervolume obtained by adding the points to a new archive """ ref_point = [1, 1, 1] pop_size = 20 n_gen = 4 points = get_non_dominated_points(pop_size * n_gen) for gen in range(1, n_gen + 1): moa_true = MOArchive3obj(points[:(gen * pop_size)], ref_point) true_hv = moa_true.hypervolume moa_add = MOArchive3obj([], ref_point) for i in range(gen * pop_size): moa_add.add(points[i]) moa_add_gen = MOArchive3obj([], ref_point) for i in range(gen): moa_add_gen.add_list(points[(i * pop_size):((i + 1) * pop_size)]) self.assertAlmostEqual(moa_add.hypervolume, true_hv, places=6) self.assertAlmostEqual(moa_add_gen.hypervolume, true_hv, places=6) self.assertEqual(len(moa_add), len(moa_true)) self.assertEqual(len(moa_add_gen), len(moa_true)) def test_length(self): """ Test that the length of the archive is correct after adding and removing points """ ref_point = [1, 1, 1] n_points_add = 100 points = get_stacked_points(n_points_add, ['random', 'random', 'random']) moa = MOArchive3obj([], ref_point) # add points one by one for point in points: moa.add(point) self.assertEqual(len(moa), len(list(moa))) # remove points one by one points = list(moa) for point in points: moa.remove(point) self.assertEqual(len(moa), len(list(moa))) def test_dominates(self): """ Test the dominates function """ ref_point = [6, 6, 6] points = [[1, 3, 5], [5, 3, 1], [4, 4, 4]] moa = MOArchive3obj(points, ref_point) # test that the points that are already in the archive are dominated for p in points: self.assertTrue(moa.dominates(p)) # test other dominated points self.assertTrue(moa.dominates([5, 5, 5])) self.assertTrue(moa.dominates([2, 4, 5])) # test non dominated points self.assertFalse(moa.dominates([3, 3, 3])) self.assertFalse(moa.dominates([2, 5, 4])) self.assertFalse(moa.dominates([5, 1, 3])) def test_dominators(self): """ Test the dominators function """ ref_point = [6, 6, 6] points = [[1, 2, 3], [3, 1, 2], [2, 3, 1], [3, 2, 1], [2, 1, 3], [1, 3, 2]] moa = MOArchive3obj(points, ref_point) # test that the points that are already in the archive are dominated by itself for p in points: self.assertEqual([p], moa.dominators(p)) self.assertEqual(1, moa.dominators(p, number_only=True)) # test other dominated points self.assertEqual(list_to_set([[1, 2, 3], [2, 3, 1], [2, 1, 3], [1, 3, 2]]), list_to_set(moa.dominators([2, 3, 4]))) self.assertEqual(4, moa.dominators([2, 3, 4], number_only=True)) self.assertEqual([], moa.dominators([2, 2, 2])) self.assertEqual(0, moa.dominators([2, 2, 2], number_only=True)) self.assertEqual(list_to_set(points), list_to_set(moa.dominators([3, 3, 3]))) self.assertEqual(6, moa.dominators([3, 3, 3], number_only=True)) def test_distance_to_hypervolume_area(self): """ Test the distance_to_hypervolume_area function first for a case where the reference point is not set, then for points in and outside the hypervolume area """ moa = MOArchive3obj() self.assertEqual(0, moa.distance_to_hypervolume_area([1, 1, 1])) moa.reference_point = [2, 2, 2] # for points in the hypervolume area, the distance should be 0 self.assertEqual(0, moa.distance_to_hypervolume_area([0, 0, 0])) self.assertEqual(0, moa.distance_to_hypervolume_area([1, 1, 1])) self.assertEqual(0, moa.distance_to_hypervolume_area([2, 2, 2])) self.assertEqual(0, moa.distance_to_hypervolume_area([0, 1, 2])) # for points outside the hypervolume area, the distance should be the Euclidean distance # to the hypervolume area self.assertEqual(1, moa.distance_to_hypervolume_area([2, 2, 3])) self.assertEqual(1, moa.distance_to_hypervolume_area([2, 0, 3])) self.assertEqual(10, moa.distance_to_hypervolume_area([0, 0, 12])) self.assertAlmostEqual(math.sqrt(2), moa.distance_to_hypervolume_area([0, 3, 3]), places=6) self.assertAlmostEqual(math.sqrt(2), moa.distance_to_hypervolume_area([2, 3, 3]), places=6) self.assertAlmostEqual(math.sqrt(3), moa.distance_to_hypervolume_area([3, 3, 3]), places=6) self.assertAlmostEqual(math.sqrt(75), moa.distance_to_hypervolume_area([7, 7, 7]), places=6) def test_distance_to_pareto_front_simple(self): """ Test the distance_to_pareto_front function by comparing it to hand calculated values """ points = [[1, 2, 3], [2, 3, 1], [3, 1, 2]] moa = MOArchive3obj(points, reference_point=[6, 6, 6]) self.assertEqual(0, moa.distance_to_pareto_front([1, 1, 1])) self.assertEqual(3 ** 0.5, moa.distance_to_pareto_front([4, 4, 4])) self.assertEqual((1 + 1 + 6 ** 2) ** 0.5, moa.distance_to_pareto_front([7, 7, 7])) self.assertEqual(0, moa.distance_to_pareto_front([2, 4, 3])) self.assertEqual(0, moa.distance_to_pareto_front([3, 2, 4])) self.assertEqual(1, moa.distance_to_pareto_front([3, 3, 4])) def test_distance_to_pareto_front_compare_2obj(self): """ Test the distance_to_pareto_front function by comparing it to the 2obj version """ n_points = 100 n_test_points = 100 points = get_stacked_points(n_points, ['random', 'random', 0]) moa3obj = MOArchive3obj(points, reference_point=[1, 1, 1]) moa2obj = MOArchive2obj([[p[0], p[1]] for p in points], reference_point=[1, 1]) new_points = get_stacked_points(n_test_points, ['random', 'random', 1]) for point in new_points: d2 = moa2obj.distance_to_pareto_front(point[:2]) d3 = moa3obj.distance_to_pareto_front(point) self.assertAlmostEqual(d2, d3, places=8) def test_copy_DLNode(self): """ Test the copy function of the DLNode class """ n1 = DLNode([1, 2, 3, 4], "node 1") n2 = DLNode([5, 6, 7, 8], "node 2") n1.closest[1] = n2 n2.closest[0] = n1 n1_copy = n1.copy() n2_copy = n2.copy() n2_copy.x = [-1, -2, -3, -4] n1.x[0] = 10 n1.closest[1] = n1 self.assertEqual(n1_copy.x[0], 1) self.assertEqual(n1_copy.closest[1].x[0], 5) self.assertEqual(n2.x[0], 5) self.assertEqual(n2_copy.x[0], -1) def test_copy_MOArchive(self): """ Test the copy function of the MOArchive3obj class """ points = [[1, 2, 3], [2, 3, 1], [3, 1, 2]] moa = MOArchive3obj(points, reference_point=[6, 6, 6]) moa_copy = moa.copy() self.assertEqual(moa.hypervolume, moa_copy.hypervolume) moa.add([2, 2, 2]) self.assertEqual(len(moa), 4) self.assertEqual(len(moa_copy), 3) self.assertFalse(moa.hypervolume == moa_copy.hypervolume) def test_remove(self, n_points=100, n_points_remove=50): """ Test the remove function, by comparing the archive with 100 points added and then 50 removed, to the with only the other 50 points added """ points = [[1, 2, 3], [2, 3, 1], [3, 1, 2]] moa_remove = MOArchive3obj(points, reference_point=[6, 6, 6]) moa_remove.remove([1, 2, 3]) self.assertEqual(len(moa_remove), 2) self.assertSetEqual(list_to_set(list(moa_remove)), list_to_set(points[1:])) self.assertEqual(moa_remove.hypervolume, MOArchive3obj(points[1:], reference_point=[6, 6, 6]).hypervolume) points = get_non_dominated_points(n_points) remove_idx = list(range(n_points_remove)) keep_idx = [i for i in range(n_points) if i not in remove_idx] moa_true = MOArchive3obj([points[i] for i in keep_idx], reference_point=[1, 1, 1]) moa_remove = MOArchive3obj(points, reference_point=[1, 1, 1]) for i in remove_idx: moa_remove.remove(points[i]) self.assertEqual(len(moa_remove), len(list(moa_remove))) moa_add = MOArchive3obj([], reference_point=[1, 1, 1]) for i in keep_idx: moa_add.add(points[i]) # assert that the points are the same in all archives and the hypervolume is the same self.assertEqual(len(moa_add), len(moa_true)) self.assertEqual(len(moa_remove), len(moa_true)) self.assertSetEqual(list_to_set(list(moa_remove)), list_to_set(list(moa_true))) self.assertSetEqual(list_to_set(list(moa_add)), list_to_set(list(moa_true))) self.assertEqual(moa_remove.hypervolume, moa_true.hypervolume) self.assertEqual(moa_add.hypervolume, moa_true.hypervolume) moa = MOArchive3obj([[1, 2, 3], [2, 3, 1], [3, 1, 2]], reference_point=[6, 6, 6]) moa.add([1, 1, 1]) moa.remove([1, 1, 1]) self.assertEqual(len(moa), 0) def test_contributing_hypervolume(self): """ Test the contributing_hypervolume function first for a simple case, and then compare it to the 2obj version, with one objective set to 0 """ points = [[1, 2, 3], [2, 3, 1], [3, 1, 2]] moa = MOArchive3obj(points, reference_point=[4, 4, 4]) self.assertEqual(moa.contributing_hypervolume([1, 2, 3]), 3) self.assertEqual(moa.contributing_hypervolume([2, 3, 1]), 3) self.assertEqual(moa.contributing_hypervolume([3, 1, 2]), 3) points = [[1, 2, 3], [1, 3, 2], [2, 1, 3], [2, 3, 1], [3, 1, 2], [3, 2, 1]] moa = MOArchive3obj(points, reference_point=[4, 4, 4]) for p in points: self.assertEqual(moa.contributing_hypervolume(list(p)), 1) points = get_stacked_points(100, ['random', 'random', 0]) moa = MOArchive3obj(points, reference_point=[1, 1, 1]) moa2obj = MOArchive2obj([[p[0], p[1]] for p in points], reference_point=[1, 1]) for p in moa2obj: self.assertAlmostEqual(moa.contributing_hypervolume(p + [0]), moa2obj.contributing_hypervolume(p), places=8) def test_hypervolume_improvement(self): """ Test the hypervolume_improvement function first for a simple case, and then compare it to the 2obj version, with one objective set to 0 """ points = [[1, 2, 3], [2, 3, 1], [3, 1, 2]] moa = MOArchive3obj(points, reference_point=[4, 4, 4]) self.assertEqual(moa.hypervolume_improvement([1, 2, 3]), 0) self.assertEqual(moa.hypervolume_improvement([2, 3, 1]), 0) self.assertEqual(moa.hypervolume_improvement([3, 1, 2]), 0) self.assertEqual(moa.hypervolume_improvement([4, 4, 4]), -moa.distance_to_pareto_front([4, 4, 4])) self.assertEqual(moa.hypervolume_improvement([1, 1, 1]), 14) self.assertEqual(moa.hypervolume_improvement([2, 2, 2]), 1) points = get_stacked_points(100, ['random', 'random', 0]) moa = MOArchive3obj(points, reference_point=[1, 1, 1]) moa2obj = MOArchive2obj([[p[0], p[1]] for p in points], reference_point=[1, 1]) new_points = get_random_points(100, 2) hv_start = moa.hypervolume for p in new_points: hv_imp2obj = float(moa2obj.hypervolume_improvement(p)) if hv_imp2obj > 0: self.assertAlmostEqual(hv_imp2obj, moa.hypervolume_improvement(p + [0]), places=8) else: self.assertAlmostEqual(hv_imp2obj, moa.hypervolume_improvement(p + [1]), places=8) # make sure this doesn't change the hypervolume of the archive hv_end = moa.hypervolume self.assertAlmostEqual(hv_start, hv_end, places=8) def test_get_non_dominated_points(self): """ Test the get_non_dominated_points function: - check if the number of points is correct - check if the points are non-dominated and in the [0, 1] range """ n_points = 1000 for mode in ['spherical', 'linear']: points = get_non_dominated_points(n_points, mode=mode) self.assertEqual(len(points), n_points) moa = MOArchive3obj(points, reference_point=[1, 1, 1]) self.assertEqual(len(moa), n_points) self.assertSetEqual(list_to_set(points), list_to_set(moa)) def test_lexsort(self): """ Test the lexsort function, by comparing it to the output of the numpy implementation """ points = [ [0.16, 0.86, 0.47], [0.66, 0.37, 0.29], [0.79, 0.79, 0.04], [0.28, 0.99, 0.29], [0.51, 0.37, 0.38], [0.92, 0.62, 0.07], [0.16, 0.53, 0.70], [0.01, 0.98, 0.94], [0.67, 0.17, 0.54], [0.79, 0.72, 0.05] ] my_lexsort_result = my_lexsort(([p[0] for p in points], [p[1] for p in points], [p[2] for p in points])) np_lexsort_result = [2, 9, 5, 1, 3, 4, 0, 8, 6, 7] self.assertEqual(my_lexsort_result, np_lexsort_result) points = [ [0.6394267984578837, 0.025010755222666936, 0.27502931836911926], [0.22321073814882275, 0.7364712141640124, 0.6766994874229113], [0.8921795677048454, 0.08693883262941615, 0.4219218196852704], [0.029797219438070344, 0.21863797480360336, 0.5053552881033624], [0.026535969683863625, 0.1988376506866485, 0.6498844377795232], [0.5449414806032167, 0.2204406220406967, 0.5892656838759087], [0.8094304566778266, 0.006498759678061017, 0.8058192518328079], [0.6981393949882269, 0.3402505165179919, 0.15547949981178155], [0.9572130722067812, 0.33659454511262676, 0.09274584338014791], [0.09671637683346401, 0.8474943663474598, 0.6037260313668911] ] my_lexsort_result = my_lexsort(([p[0] for p in points], [p[1] for p in points], [p[2] for p in points])) np_lexsort_result = [8, 7, 0, 2, 3, 5, 9, 4, 1, 6] self.assertEqual(my_lexsort_result, np_lexsort_result) def test_hypervolume_plus(self): """ test the hypervolume_plus indicator """ moa = MOArchive3obj(reference_point=[1, 1, 1]) self.assertEqual(moa.hypervolume_plus, -float('inf')) moa.add([2, 2, 2]) self.assertEqual(moa.hypervolume_plus, -math.sqrt(3)) moa.add_list([[0, 0, 5], [1, 2, 1], [3, 3, 2]]) self.assertEqual(moa.hypervolume_plus, -1) moa.add([1, 1, 1]) self.assertEqual(moa.hypervolume_plus, 0) moa.add([0.5, 0.5, 0.5]) self.assertEqual(moa.hypervolume_plus, moa.hypervolume) moa = MOArchive3obj(reference_point=[1, 1, 1]) prev_hv_plus = moa.hypervolume_plus for i in range(1000): point = [10 * random.random(), 10 * random.random(), 10 * random.random()] moa.add(point) self.assertLessEqual(prev_hv_plus, moa.hypervolume_plus) prev_hv_plus = moa.hypervolume_plus def test_hypervolume(self): """ test the hypervolume calculation, by comparing to the result of original implementation in C""" points = [ [0.16, 0.86, 0.47], [0.66, 0.37, 0.29], [0.79, 0.79, 0.04], [0.28, 0.99, 0.29], [0.51, 0.37, 0.38], [0.92, 0.62, 0.07], [0.16, 0.53, 0.70], [0.01, 0.98, 0.94], [0.67, 0.17, 0.54], [0.79, 0.72, 0.05] ] moa = MOArchive3obj(points, reference_point=[1, 1, 1]) self.assertAlmostEqual(moa.hypervolume, 0.318694, places=6) self.assertEqual(moa.hypervolume_plus, moa.hypervolume) points = [ [0.6394267984578837, 0.025010755222666936, 0.27502931836911926], [0.22321073814882275, 0.7364712141640124, 0.6766994874229113], [0.8921795677048454, 0.08693883262941615, 0.4219218196852704], [0.029797219438070344, 0.21863797480360336, 0.5053552881033624], [0.026535969683863625, 0.1988376506866485, 0.6498844377795232], [0.5449414806032167, 0.2204406220406967, 0.5892656838759087], [0.8094304566778266, 0.006498759678061017, 0.8058192518328079], [0.6981393949882269, 0.3402505165179919, 0.15547949981178155], [0.9572130722067812, 0.33659454511262676, 0.09274584338014791], [0.09671637683346401, 0.8474943663474598, 0.6037260313668911] ] moa = MOArchive3obj(points, reference_point=[1, 1, 1]) self.assertAlmostEqual(moa.hypervolume, 0.52192086148367, places=6) self.assertEqual(moa.hypervolume_plus, moa.hypervolume) points = [ [0.6394267984578837, 0.025010755222666936, 0.27502931836911926], [0.22321073814882275, 0.7364712141640124, 0.6766994874229113], [0.8921795677048454, 0.08693883262941615, 0.4219218196852704], [0.029797219438070344, 0.21863797480360336, 0.5053552881033624], [0.026535969683863625, 0.1988376506866485, 0.6498844377795232], [0.5449414806032167, 0.2204406220406967, 0.5892656838759087], [0.8094304566778266, 0.006498759678061017, 0.8058192518328079], [0.6981393949882269, 0.3402505165179919, 0.15547949981178155], [0.9572130722067812, 0.33659454511262676, 0.09274584338014791], [0.09671637683346401, 0.8474943663474598, 0.6037260313668911], [0.8071282732743802, 0.7297317866938179, 0.5362280914547007], [0.9731157639793706, 0.3785343772083535, 0.552040631273227], [0.8294046642529949, 0.6185197523642461, 0.8617069003107772], [0.577352145256762, 0.7045718362149235, 0.045824383655662215], [0.22789827565154686, 0.28938796360210717, 0.0797919769236275], [0.23279088636103018, 0.10100142940972912, 0.2779736031100921], [0.6356844442644002, 0.36483217897008424, 0.37018096711688264], [0.2095070307714877, 0.26697782204911336, 0.936654587712494], [0.6480353852465935, 0.6091310056669882, 0.171138648198097], [0.7291267979503492, 0.1634024937619284, 0.3794554417576478], [0.9895233506365952, 0.6399997598540929, 0.5569497437746462], [0.6846142509898746, 0.8428519201898096, 0.7759999115462448], [0.22904807196410437, 0.03210024390403776, 0.3154530480590819], [0.26774087597570273, 0.21098284358632646, 0.9429097143350544], [0.8763676264726689, 0.3146778807984779, 0.65543866529488], [0.39563190106066426, 0.9145475897405435, 0.4588518525873988], [0.26488016649805246, 0.24662750769398345, 0.5613681341631508], [0.26274160852293527, 0.5845859902235405, 0.897822883602477], [0.39940050514039727, 0.21932075915728333, 0.9975376064951103], [0.5095262936764645, 0.09090941217379389, 0.04711637542473457], [0.10964913035065915, 0.62744604170309, 0.7920793643629641], [0.42215996679968404, 0.06352770615195713, 0.38161928650653676], [0.9961213802400968, 0.529114345099137, 0.9710783776136181], [0.8607797022344981, 0.011481021942819636, 0.7207218193601946], [0.6817103690265748, 0.5369703304087952, 0.2668251899525428], [0.6409617985798081, 0.11155217359587644, 0.434765250669105], [0.45372370632920644, 0.9538159275210801, 0.8758529403781941], [0.26338905075109076, 0.5005861130502983, 0.17865188053013137], [0.9126278393448205, 0.8705185698367669, 0.2984447914486329], [0.6389494948660052, 0.6089702114381723, 0.1528392685496348], [0.7625108000751513, 0.5393790301196257, 0.7786264786305582], [0.5303536721951775, 0.0005718961279435053, 0.3241560570046731], [0.019476742385832302, 0.9290986162646171, 0.8787218778231842], [0.8316655293611794, 0.30751412540266143, 0.05792516649418755], [0.8780095992040405, 0.9469494452979941, 0.08565345206787878], [0.4859904633166138, 0.06921251846838361, 0.7606021652572316], [0.7658344293069878, 0.1283914644997628, 0.4752823780987313], [0.5498035934949439, 0.2650566289400591, 0.8724330410852574], [0.4231379402008869, 0.21179820544208205, 0.5392960887794583], [0.7299310690899762, 0.2011510633896959, 0.31171629130089495], [0.9951493566608947, 0.6498780576394535, 0.43810008391450406], [0.5175758410355906, 0.12100419586826572, 0.22469733703155736], [0.33808556214745533, 0.5883087184572333, 0.230114732596577], [0.22021738445155947, 0.07099308600903254, 0.6311029572700989], [0.22894178381115438, 0.905420013006128, 0.8596354002537465], [0.07085734988865344, 0.23800463436899522, 0.6689777782962806], [0.2142368073704386, 0.132311848725025, 0.935514240580671], [0.5710430933252845, 0.47267102631179414, 0.7846194242907534], [0.8074969977666434, 0.1904099143618777, 0.09693081422882333], [0.4310511824063775, 0.4235786230199208, 0.467024668036675], [0.7290758494598506, 0.6733645472933015, 0.9841652113659661], [0.09841787115195888, 0.4026212821022688, 0.33930260539496315], [0.8616725363527911, 0.24865633392028563, 0.1902089084408115], [0.4486135478331319, 0.4218816398344042, 0.27854514466694047], [0.2498064478821005, 0.9232655992760128, 0.44313074505345695], [0.8613491047618306, 0.5503253124498481, 0.05058832952488124], [0.9992824684127266, 0.8360275850799519, 0.9689962572847513], [0.9263669830081276, 0.8486957344143055, 0.16631111060391401], [0.48564112545071847, 0.21374729919918167, 0.4010402925494526], [0.058635399972178925, 0.3789731189769161, 0.9853088437797259], [0.26520305817215195, 0.7840706019485694, 0.4550083673391433], [0.4230074859901629, 0.9573176408596732, 0.9954226894927138], [0.5557683234056182, 0.718408275296326, 0.15479682527406413], [0.2967078254945642, 0.9687093649691588, 0.5791802908162562], [0.5421952013742742, 0.7479755603790641, 0.05716527290748308], [0.5841775944589712, 0.5028503829195136, 0.8527198920482854], [0.15743272793948326, 0.9607789032744504, 0.08011146524058688], [0.1858249609807232, 0.5950351064500277, 0.6752125536040902], [0.2352038950009312, 0.11988661394712419, 0.8902873141294375], [0.24621534778862486, 0.5945191535334412, 0.6193815103321031], [0.4192249153358725, 0.5836722892912247, 0.5227827155319589], [0.9347062577364272, 0.20425919942353643, 0.7161918007894148], [0.23868595261584602, 0.3957858467912545, 0.6716902229599713], [0.2999970797987622, 0.31617719627185403, 0.7518644924144021], [0.07254311449315731, 0.4582855226185861, 0.9984544408544423], [0.9960964478550944, 0.073260721099633, 0.2131543122670404], [0.26520041475040135, 0.9332593779937091, 0.8808641736864395], [0.8792702424845428, 0.36952708873888396, 0.15774683235723197], [0.833744954639807, 0.703539925087371, 0.6116777657259501], [0.9872330636315043, 0.6539763177107326, 0.007823107152157949], [0.8171041351154616, 0.2993787521999779, 0.6633887149660773], [0.9389300039271039, 0.13429111439336772, 0.11542867041910221], [0.10703597770941764, 0.5532236408848159, 0.2723482123148163], [0.6048298270302239, 0.7176121871387979, 0.20359731232745293], [0.6342379588850797, 0.2639839016304094, 0.48853185214937656], [0.9053364910793232, 0.8461037132948555, 0.09229846771273342], [0.42357577256372636, 0.27668022397225167, 0.0035456890877823], [0.7711192230196271, 0.6371133773013796, 0.2619552624343482], [0.7412309083479308, 0.5516804211263913, 0.42768691898067934], [0.009669699608339966, 0.07524386007376704, 0.883106393300143] ] moa = MOArchive3obj(points, reference_point=[1, 1, 1]) self.assertAlmostEqual(moa.hypervolume, 0.812479094965706, places=8) moa = MOArchive3obj([[p[0] - 1, p[1] - 1, p[2] - 1] for p in points], reference_point=[0, 0, 0]) self.assertAlmostEqual(moa.hypervolume, 0.812479094965706, places=8) moa = MOArchive3obj(points, reference_point=[1, 2, 3]) self.assertAlmostEqual(moa.hypervolume, 5.61969774713577, places=8) self.assertEqual(moa.hypervolume_plus, moa.hypervolume) if __name__ == '__main__': unittest.main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923732.0 moarchiving-1.0.0/moarchiving/tests/test_moarchiving4obj.py0000644000076500000240000007276115000266124023374 0ustar00hansenstaff""" Test the MOArchive4obj class """ from moarchiving.moarchiving3obj import MOArchive3obj from moarchiving.moarchiving4obj import MOArchive4obj from moarchiving.moarchiving import BiobjectiveNondominatedSortedList as MOArchive2obj from moarchiving.tests.point_sampling import (get_non_dominated_points, get_stacked_points, get_random_points, permute_points) import unittest import random import math def list_to_set(lst): """ Converts a list of lists to a set of tuples """ return set([tuple(p) for p in lst]) def get_small_test_archive(): """ Returns a small test archive """ points = [[1, 2, 3, 4], [4, 1, 2, 3], [3, 4, 1, 2], [2, 3, 4, 1]] infos = [str(p) for p in points] return MOArchive4obj(points, [6, 6, 6, 6], infos) class TestMOArchiving4obj(unittest.TestCase): """ Tests for the MOArchive4obj class """ def test_hypervolume_easy(self): """ test the hypervolume calculation for a 'simple' case """ points = [[0, 1, 2, 3], [1, 2, 3, 0], [2, 3, 0, 1], [3, 0, 1, 2]] moa = MOArchive4obj(points, reference_point=[4, 4, 4, 4], infos=["A", "B", "C", "D"]) self.assertEqual(71, moa.hypervolume) def test_infos_non_dominated(self): """ test if the infos are stored correctly - if the points are non dominated, the infos should be the same""" moa = get_small_test_archive() # assert that the infos are stored in the same order as the points self.assertEqual([str(p) for p in moa], moa.infos) def test_infos_dominated(self): """ test if the infos about dominated points are removed """ points = [[1, 2, 3, 4], [2, 2, 3, 5], [5, 4, 3, 2], [5, 5, 5, 5]] infos = [str(p) for p in points] moa = MOArchive4obj(points, [6, 6, 6, 6], infos) non_dominated_points = [[1, 2, 3, 4], [5, 4, 3, 2]] self.assertSetEqual(set([str(p) for p in non_dominated_points]), set(moa.infos)) self.assertEqual([str(p) for p in moa], moa.infos) def test_in_domain(self): """ test if the in_domain function works correctly for 3obj points""" moa = get_small_test_archive() # test if the points are in the domain self.assertTrue(moa.in_domain([1, 2, 3, 4])) self.assertTrue(moa.in_domain([5.9, 5.9, 5.9, 5.9])) self.assertTrue(moa.in_domain([-1, -1, -1, -1])) self.assertTrue(moa.in_domain([-1, 1, -1, 1])) self.assertTrue(moa.in_domain([0, 0, 0, 0])) # test if the point is not in the domain self.assertFalse(moa.in_domain([7, 8, 9, 10])) self.assertFalse(moa.in_domain([6, 6, 6, 6])) self.assertFalse(moa.in_domain([0, 0, 6, 0])) def test_add(self): """ test if the add_points function works correctly for 4obj points""" ref_point = [6, 6, 6, 6] start_points = [[1, 2, 5, 4], [2, 3, 5, 1], [3, 5, 1, 4]] moa_ref = MOArchive4obj(start_points, ref_point, infos=["A", "B", "C"]) moa_no_ref = MOArchive4obj(start_points, infos=["A", "B", "C"]) for moa in [moa_ref, moa_no_ref]: # add point that is not dominated and does not dominate any other point u1 = [3, 3, 3, 3] moa.add(u1, "D") self.assertSetEqual(list_to_set(start_points + [u1]), list_to_set(list(moa))) # add point that is dominated by another point in the archive u2 = [4, 5, 2, 4] moa.add(u2, "E") self.assertSetEqual(list_to_set(start_points + [u1]), list_to_set(list(moa))) # add point that dominates another point in the archive u3 = [2, 3, 1, 4] moa.add(u3, "F") self.assertSetEqual(list_to_set(start_points[:2] + [u1, u3]), list_to_set(list(moa))) def test_hypervolume_after_add(self): """ test if the hypervolume is calculated correctly after adding points """ ref_point = [1, 1, 1, 1] pop_size = 20 n_gen = 4 points = get_non_dominated_points(pop_size * n_gen, n_dim=4) for gen in range(1, n_gen + 1): moa_true = MOArchive4obj(points[:(gen * pop_size)], ref_point) true_hv = moa_true.hypervolume moa_add = MOArchive4obj([], ref_point) for i in range(gen * pop_size): moa_add.add(points[i]) moa_add_list = MOArchive4obj([], ref_point) for i in range(gen): moa_add_list.add_list(points[i * pop_size:(i + 1) * pop_size]) self.assertAlmostEqual(moa_add.hypervolume, true_hv, places=6) self.assertAlmostEqual(moa_add_list.hypervolume, true_hv, places=6) self.assertEqual(len(moa_add), len(moa_true)) self.assertEqual(len(moa_add_list), len(moa_true)) def test_length(self): """ Test that the length of the archive is correct after adding and removing points """ ref_point = [1, 1, 1, 1] n_points_add = 100 points = get_stacked_points(n_points_add, ['random', 'random', 'random', 'random']) moa = MOArchive4obj([], ref_point) # add points one by one for point in points: moa.add(point) self.assertEqual(len(moa), len(list(moa))) # remove points one by one points = list(moa) for point in points: moa.remove(point) self.assertEqual(len(moa), len(list(moa))) def test_dominates(self): """ test the dominates function """ moa = get_small_test_archive() # test that the points that are already in the archive are dominated for p in moa: self.assertTrue(moa.dominates(p)) # test other dominated points self.assertTrue(moa.dominates([5, 5, 5, 5])) self.assertTrue(moa.dominates([2, 3, 4, 5])) self.assertTrue(moa.dominates([4, 5, 2, 3])) # test non dominated points self.assertFalse(moa.dominates([3, 3, 3, 3])) self.assertFalse(moa.dominates([5, 3, 3, 2])) self.assertFalse(moa.dominates([0, 5, 5, 5])) def test_dominators(self): """ test the dominators function """ moa = get_small_test_archive() # test that the points that are already in the archive are dominated by itself for p in moa: self.assertEqual([p], moa.dominators(p)) self.assertEqual(1, moa.dominators(p, number_only=True)) # test other dominated points pass def test_distance_to_hypervolume_area(self): """ test the distance_to_hypervolume_area function """ moa = MOArchive4obj() self.assertEqual(0, moa.distance_to_hypervolume_area([1, 1, 1, 1])) moa.reference_point = [2, 2, 2, 2] # for points in the hypervolume area, the distance should be 0 self.assertEqual(0, moa.distance_to_hypervolume_area([0, 0, 0, 0])) self.assertEqual(0, moa.distance_to_hypervolume_area([1, 1, 1, 1])) self.assertEqual(0, moa.distance_to_hypervolume_area([2, 2, 2, 2])) self.assertEqual(0, moa.distance_to_hypervolume_area([0, 1, 2, 2])) # for points outside the hypervolume area, the distance should be the Euclidean distance # to the hypervolume area self.assertEqual(1, moa.distance_to_hypervolume_area([2, 2, 3, 2])) self.assertEqual(1, moa.distance_to_hypervolume_area([2, 0, 3, 2])) self.assertEqual(10, moa.distance_to_hypervolume_area([0, 0, 0, 12])) self.assertAlmostEqual(math.sqrt(2), moa.distance_to_hypervolume_area([0, 3, 3, 0]), places=6) self.assertAlmostEqual(math.sqrt(2), moa.distance_to_hypervolume_area([2, 3, 3, 2]), places=6) self.assertAlmostEqual(math.sqrt(4), moa.distance_to_hypervolume_area([3, 3, 3, 3]), places=6) self.assertAlmostEqual(math.sqrt(7**2 * 4), moa.distance_to_hypervolume_area([9, 9, 9, 9]), places=6) def test_distance_to_pareto_front_compare_2obj(self): """ test the distance_to_pareto_front function, by comparing it to the 2obj pareto front """ # first make a pseudo 4obj pareto front and compare it to 2obj pareto front n_points = 100 n_test_points = 100 # set random seed points = get_stacked_points(n_points, ['random', 'random', 0, 0]) moa4obj = MOArchive4obj(points, reference_point=[1, 1, 1, 1]) moa2obj = MOArchive2obj([[p[0], p[1]] for p in points], reference_point=[1, 1]) permutations = [[0, 1, 2, 3], [1, 2, 0, 3], [2, 0, 1, 3], [3, 2, 1, 0], [2, 3, 0, 1]] for permutation in permutations: perm_points = permute_points(points, permutation) moa4obj_perm = MOArchive4obj(perm_points, reference_point=[1, 1, 1, 1]) new_points = get_stacked_points(n_test_points, ['random', 'random', 1, 1]) for point in new_points: d2 = moa2obj.distance_to_pareto_front(point[:2]) d4 = moa4obj.distance_to_pareto_front(point) d4_perm = moa4obj_perm.distance_to_pareto_front(permute_points([point], permutation)[0]) self.assertAlmostEqual(d2, d4, places=8) self.assertAlmostEqual(d4, d4_perm, places=8) def test_distance_to_pareto_front_compare_3obj(self): """ test the distance_to_pareto_front function, by comparing it to the 3obj pareto front """ # first make a pseudo 4obj pareto front and compare it to 3obj pareto front n_points = 100 n_test_points = 10 # set random seed points = get_stacked_points(n_points, ['random', 'random', 'random', 0]) moa4obj = MOArchive4obj(points, reference_point=[1, 1, 1, 1]) moa3obj = MOArchive3obj([[p[0], p[1], p[2]] for p in points], reference_point=[1, 1, 1]) permutations = [[0, 1, 2, 3], [1, 2, 3, 0], [2, 0, 1, 3], [3, 2, 1, 0], [2, 3, 0, 1]] for permutation in permutations: perm_points = permute_points(points, permutation) moa4obj_perm = MOArchive4obj(perm_points, reference_point=[1, 1, 1, 1]) new_points = get_stacked_points(n_test_points, ['random', 'random', 'random', 1]) for point in new_points: d3 = moa3obj.distance_to_pareto_front(point[:3]) d4 = moa4obj.distance_to_pareto_front(point) d4_perm = moa4obj_perm.distance_to_pareto_front(permute_points([point], permutation)[0]) self.assertAlmostEqual(d3, d4, places=8) self.assertAlmostEqual(d4, d4_perm, places=8) def test_distance_to_pareto_front(self): """ test the distance_to_pareto_front function, by randomly sampling points and computing the distance to the selected point """ n_points_archive = 100 n_test_points = 50 n_points_sampled = 1000 # set random seed points = get_non_dominated_points(n_points_archive, n_dim=4) moa = MOArchive4obj(points, reference_point=[1, 1, 1, 1]) for i in range(n_test_points): point = get_random_points(1, 4)[0] while not moa.dominates(point): point = get_random_points(1, 4)[0] distance = moa.distance_to_pareto_front(point) min_dist = 2 for j in range(n_points_sampled): sample = [p + random.gauss(0, distance) for p in point] while moa.dominates(sample): sample = [p + random.gauss(0, distance) for p in point] dist = math.sqrt(sum([(p - s) ** 2 for p, s in zip(point, sample)])) if dist < min_dist: min_dist = dist self.assertTrue(distance <= dist) def test_remove(self, n_points=100, n_points_remove=50): """ Test the remove function, by comparing the archive with 100 non-dominated points added and then 50 removed, to the one with only the other 50 points added """ points = [[1, 2, 3, 4], [2, 3, 4, 1], [3, 4, 1, 2]] moa_remove = MOArchive4obj(points, reference_point=[6, 6, 6, 6]) moa_remove.remove([1, 2, 3, 4]) self.assertEqual(len(list(moa_remove)), 2) self.assertSetEqual(list_to_set(list(moa_remove)), list_to_set(points[1:])) self.assertEqual(moa_remove.hypervolume, MOArchive4obj(points[1:], reference_point=[6, 6, 6, 6]).hypervolume) points = get_non_dominated_points(n_points, n_dim=4) remove_idx = list(range(n_points_remove)) keep_idx = [i for i in range(n_points) if i not in remove_idx] moa_true = MOArchive4obj([points[i] for i in keep_idx], reference_point=[1, 1, 1, 1]) moa_remove = MOArchive4obj(points, reference_point=[1, 1, 1, 1]) for i in remove_idx: moa_remove.remove(points[i]) self.assertEqual(len(list(moa_remove)), len(moa_remove)) moa_add = MOArchive4obj([], reference_point=[1, 1, 1, 1]) for i in keep_idx: moa_add.add(points[i]) # assert that the points are the same in all archives and the hypervolume is the same self.assertEqual(len(list(moa_add)), len(list(moa_true))) self.assertEqual(len(list(moa_remove)), len(list(moa_true))) self.assertSetEqual(list_to_set(list(moa_remove)), list_to_set(list(moa_true))) self.assertSetEqual(list_to_set(list(moa_add)), list_to_set(list(moa_true))) self.assertEqual(moa_remove.hypervolume, moa_true.hypervolume) self.assertEqual(moa_add.hypervolume, moa_true.hypervolume) moa = MOArchive4obj([[1, 2, 3, 4], [2, 3, 4, 1], [3, 4, 1, 2]], reference_point=[6, 6, 6, 6]) moa.add([1, 1, 1, 1]) moa.remove([1, 1, 1, 1]) self.assertEqual(len(list(moa)), 0) def test_contributing_hypervolume(self): """ test the contributing_hypervolume function, by comparing it to the 3obj result """ points = [[1, 2, 3, 4], [1, 2, 4, 3], [1, 3, 2, 4], [1, 3, 4, 2], [1, 4, 2, 3], [1, 4, 3, 2], [2, 1, 3, 4], [2, 1, 4, 3], [2, 3, 1, 4], [2, 3, 4, 1], [2, 4, 1, 3], [2, 4, 3, 1], [3, 1, 2, 4], [3, 1, 4, 2], [3, 2, 1, 4], [3, 2, 4, 1], [3, 4, 1, 2], [3, 4, 2, 1], [4, 1, 2, 3], [4, 1, 3, 2], [4, 2, 1, 3], [4, 2, 3, 1], [4, 3, 1, 2], [4, 3, 2, 1]] moa = MOArchive4obj(points, reference_point=[5, 5, 5, 5]) for p in points: self.assertEqual(moa.contributing_hypervolume(list(p)), 1) points = get_stacked_points(100, [0, 'random', 'random', 'random']) moa = MOArchive4obj(points, reference_point=[1, 1, 1, 1]) moa3obj = MOArchive3obj([[p[1], p[2], p[3]] for p in points], reference_point=[1, 1, 1]) for p in moa3obj: self.assertAlmostEqual(moa.contributing_hypervolume([0] + p), moa3obj.contributing_hypervolume(p), places=8) def test_hypervolume_improvement(self): """ test the hypervolume_improvement function, by comparing it to the 3obj result """ points = [[1, 2, 3, 4], [1, 2, 4, 3], [1, 3, 2, 4], [1, 3, 4, 2], [1, 4, 2, 3], [1, 4, 3, 2], [2, 1, 3, 4], [2, 1, 4, 3], [2, 3, 1, 4], [2, 3, 4, 1], [2, 4, 1, 3], [2, 4, 3, 1], [3, 1, 2, 4], [3, 1, 4, 2], [3, 2, 1, 4], [3, 2, 4, 1], [3, 4, 1, 2], [3, 4, 2, 1], [4, 1, 2, 3], [4, 1, 3, 2], [4, 2, 1, 3], [4, 2, 3, 1], [4, 3, 1, 2], [4, 3, 2, 1]] moa = MOArchive4obj(points, reference_point=[5, 5, 5, 5]) self.assertEqual(moa.hypervolume_improvement([1, 2, 3, 4]), 0) self.assertEqual(moa.hypervolume_improvement([2, 3, 4, 1]), 0) self.assertEqual(moa.hypervolume_improvement([3, 4, 1, 2]), 0) self.assertEqual(moa.hypervolume_improvement([4, 4, 4, 4]), -moa.distance_to_pareto_front([4, 4, 4, 4])) self.assertEqual(moa.hypervolume_improvement([1, 1, 1, 1]), 131) self.assertEqual(moa.hypervolume_improvement([2, 2, 2, 2]), 20) self.assertEqual(moa.hypervolume_improvement([3, 3, 3, 3]), 1) points = get_stacked_points(100, [0, 'random', 'random', 'random']) new_points = get_random_points(100, 3) moa = MOArchive4obj(points, reference_point=[1, 1, 1, 1]) moa3obj = MOArchive3obj([[p[1], p[2], p[3]] for p in points], reference_point=[1, 1, 1]) for p in new_points: hv_imp2obj = float(moa3obj.hypervolume_improvement(p)) if hv_imp2obj > 0: self.assertAlmostEqual(hv_imp2obj, moa.hypervolume_improvement([0] + p), places=8) else: self.assertAlmostEqual(hv_imp2obj, moa.hypervolume_improvement([1] + p), places=8) def test_hypervolume_plus(self): """ test the hypervolume_plus indicator """ moa = MOArchive4obj(reference_point=[1, 1, 1, 1]) self.assertEqual(moa.hypervolume_plus, -float('inf')) moa.add([2, 2, 2, 2]) self.assertEqual(moa.hypervolume_plus, -math.sqrt(4)) moa.add_list([[0, 0, 0, 5], [1, 1, 2, 1], [0, 3, 3, 2]]) self.assertEqual(moa.hypervolume_plus, -1) moa.add([1, 1, 1, 1]) self.assertEqual(moa.hypervolume_plus, 0) moa.add([0.5, 0.5, 0.5, 0.5]) self.assertEqual(moa.hypervolume_plus, moa.hypervolume) moa = MOArchive4obj(reference_point=[2, 2, 2, 2]) prev_hv_plus = moa.hypervolume_plus for i in range(1000): point = [10 * random.random(), 10 * random.random(), 10 * random.random(), 10 * random.random()] moa.add(point) self.assertLessEqual(prev_hv_plus, moa.hypervolume_plus) prev_hv_plus = moa.hypervolume_plus def test_hypervolume(self): """ test the hypervolume calculation, by comparing to the result of original implementation in C""" points = [ [1.0, 2.0, 3.0, 1.0], [4.0, 5.0, 6.0, 0.5], [7.0, 8.0, 9.0, 0.7], [2.0, 1.0, 0.5, 0.6], [3.0, 4.0, 5.0, 0.8], [6.0, 7.0, 8.0, 0.3], [9.0, 1.0, 2.0, 0.9], [5.0, 6.0, 7.0, 0.2], [8.0, 9.0, 1.0, 0.4], [0.0, 1.0, 2.0, 0.1] ] moa = MOArchive4obj(points, reference_point=[10, 10, 10, 10]) self.assertEqual(8143.6, float(moa.hypervolume)) self.assertEqual(moa.hypervolume_plus, moa.hypervolume) points = [ [0.6394267984578837, 0.025010755222666936, 0.27502931836911926, 0.22321073814882275], [0.7364712141640124, 0.6766994874229113, 0.8921795677048454, 0.08693883262941615], [0.4219218196852704, 0.029797219438070344, 0.21863797480360336, 0.5053552881033624], [0.026535969683863625, 0.1988376506866485, 0.6498844377795232, 0.5449414806032167], [0.2204406220406967, 0.5892656838759087, 0.8094304566778266, 0.006498759678061017], [0.8058192518328079, 0.6981393949882269, 0.3402505165179919, 0.15547949981178155], [0.9572130722067812, 0.33659454511262676, 0.09274584338014791, 0.09671637683346401], [0.8474943663474598, 0.6037260313668911, 0.8071282732743802, 0.7297317866938179], [0.5362280914547007, 0.9731157639793706, 0.3785343772083535, 0.552040631273227], [0.8294046642529949, 0.6185197523642461, 0.8617069003107772, 0.577352145256762] ] moa = MOArchive4obj(points, reference_point=[1, 1, 1, 1]) self.assertAlmostEqual(0.37037902191204, float(moa.hypervolume), places=8) self.assertEqual(moa.hypervolume_plus, moa.hypervolume) points = [ [0.6394267984578837, 0.025010755222666936, 0.27502931836911926, 0.22321073814882275], [0.7364712141640124, 0.6766994874229113, 0.8921795677048454, 0.08693883262941615], [0.4219218196852704, 0.029797219438070344, 0.21863797480360336, 0.5053552881033624], [0.026535969683863625, 0.1988376506866485, 0.6498844377795232, 0.5449414806032167], [0.2204406220406967, 0.5892656838759087, 0.8094304566778266, 0.006498759678061017], [0.8058192518328079, 0.6981393949882269, 0.3402505165179919, 0.15547949981178155], [0.9572130722067812, 0.33659454511262676, 0.09274584338014791, 0.09671637683346401], [0.8474943663474598, 0.6037260313668911, 0.8071282732743802, 0.7297317866938179], [0.5362280914547007, 0.9731157639793706, 0.3785343772083535, 0.552040631273227], [0.8294046642529949, 0.6185197523642461, 0.8617069003107772, 0.577352145256762], [0.7045718362149235, 0.045824383655662215, 0.22789827565154686, 0.28938796360210717], [0.0797919769236275, 0.23279088636103018, 0.10100142940972912, 0.2779736031100921], [0.6356844442644002, 0.36483217897008424, 0.37018096711688264, 0.2095070307714877], [0.26697782204911336, 0.936654587712494, 0.6480353852465935, 0.6091310056669882], [0.171138648198097, 0.7291267979503492, 0.1634024937619284, 0.3794554417576478], [0.9895233506365952, 0.6399997598540929, 0.5569497437746462, 0.6846142509898746], [0.8428519201898096, 0.7759999115462448, 0.22904807196410437, 0.03210024390403776], [0.3154530480590819, 0.26774087597570273, 0.21098284358632646, 0.9429097143350544], [0.8763676264726689, 0.3146778807984779, 0.65543866529488, 0.39563190106066426], [0.9145475897405435, 0.4588518525873988, 0.26488016649805246, 0.24662750769398345], [0.5613681341631508, 0.26274160852293527, 0.5845859902235405, 0.897822883602477], [0.39940050514039727, 0.21932075915728333, 0.9975376064951103, 0.5095262936764645], [0.09090941217379389, 0.04711637542473457, 0.10964913035065915, 0.62744604170309], [0.7920793643629641, 0.42215996679968404, 0.06352770615195713, 0.38161928650653676], [0.9961213802400968, 0.529114345099137, 0.9710783776136181, 0.8607797022344981], [0.011481021942819636, 0.7207218193601946, 0.6817103690265748, 0.5369703304087952], [0.2668251899525428, 0.6409617985798081, 0.11155217359587644, 0.434765250669105], [0.45372370632920644, 0.9538159275210801, 0.8758529403781941, 0.26338905075109076], [0.5005861130502983, 0.17865188053013137, 0.9126278393448205, 0.8705185698367669], [0.2984447914486329, 0.6389494948660052, 0.6089702114381723, 0.1528392685496348], [0.7625108000751513, 0.5393790301196257, 0.7786264786305582, 0.5303536721951775], [0.0005718961279435053, 0.3241560570046731, 0.019476742385832302, 0.9290986162646171], [0.8787218778231842, 0.8316655293611794, 0.30751412540266143, 0.05792516649418755], [0.8780095992040405, 0.9469494452979941, 0.08565345206787878, 0.4859904633166138], [0.06921251846838361, 0.7606021652572316, 0.7658344293069878, 0.1283914644997628], [0.4752823780987313, 0.5498035934949439, 0.2650566289400591, 0.8724330410852574], [0.4231379402008869, 0.21179820544208205, 0.5392960887794583, 0.7299310690899762], [0.2011510633896959, 0.31171629130089495, 0.9951493566608947, 0.6498780576394535], [0.43810008391450406, 0.5175758410355906, 0.12100419586826572, 0.22469733703155736], [0.33808556214745533, 0.5883087184572333, 0.230114732596577, 0.22021738445155947], [0.07099308600903254, 0.6311029572700989, 0.22894178381115438, 0.905420013006128], [0.8596354002537465, 0.07085734988865344, 0.23800463436899522, 0.6689777782962806], [0.2142368073704386, 0.132311848725025, 0.935514240580671, 0.5710430933252845], [0.47267102631179414, 0.7846194242907534, 0.8074969977666434, 0.1904099143618777], [0.09693081422882333, 0.4310511824063775, 0.4235786230199208, 0.467024668036675], [0.7290758494598506, 0.6733645472933015, 0.9841652113659661, 0.09841787115195888], [0.4026212821022688, 0.33930260539496315, 0.8616725363527911, 0.24865633392028563], [0.1902089084408115, 0.4486135478331319, 0.4218816398344042, 0.27854514466694047], [0.2498064478821005, 0.9232655992760128, 0.44313074505345695, 0.8613491047618306], [0.5503253124498481, 0.05058832952488124, 0.9992824684127266, 0.8360275850799519], [0.9689962572847513, 0.9263669830081276, 0.8486957344143055, 0.16631111060391401], [0.48564112545071847, 0.21374729919918167, 0.4010402925494526, 0.058635399972178925], [0.3789731189769161, 0.9853088437797259, 0.26520305817215195, 0.7840706019485694], [0.4550083673391433, 0.4230074859901629, 0.9573176408596732, 0.9954226894927138], [0.5557683234056182, 0.718408275296326, 0.15479682527406413, 0.2967078254945642], [0.9687093649691588, 0.5791802908162562, 0.5421952013742742, 0.7479755603790641], [0.05716527290748308, 0.5841775944589712, 0.5028503829195136, 0.8527198920482854], [0.15743272793948326, 0.9607789032744504, 0.08011146524058688, 0.1858249609807232], [0.5950351064500277, 0.6752125536040902, 0.2352038950009312, 0.11988661394712419], [0.8902873141294375, 0.24621534778862486, 0.5945191535334412, 0.6193815103321031], [0.4192249153358725, 0.5836722892912247, 0.5227827155319589, 0.9347062577364272], [0.20425919942353643, 0.7161918007894148, 0.23868595261584602, 0.3957858467912545], [0.6716902229599713, 0.2999970797987622, 0.31617719627185403, 0.7518644924144021], [0.07254311449315731, 0.4582855226185861, 0.9984544408544423, 0.9960964478550944], [0.073260721099633, 0.2131543122670404, 0.26520041475040135, 0.9332593779937091], [0.8808641736864395, 0.8792702424845428, 0.36952708873888396, 0.15774683235723197], [0.833744954639807, 0.703539925087371, 0.6116777657259501, 0.9872330636315043], [0.6539763177107326, 0.007823107152157949, 0.8171041351154616, 0.2993787521999779], [0.6633887149660773, 0.9389300039271039, 0.13429111439336772, 0.11542867041910221], [0.10703597770941764, 0.5532236408848159, 0.2723482123148163, 0.6048298270302239], [0.7176121871387979, 0.20359731232745293, 0.6342379588850797, 0.2639839016304094], [0.48853185214937656, 0.9053364910793232, 0.8461037132948555, 0.09229846771273342], [0.42357577256372636, 0.27668022397225167, 0.0035456890877823, 0.7711192230196271], [0.6371133773013796, 0.2619552624343482, 0.7412309083479308, 0.5516804211263913], [0.42768691898067934, 0.009669699608339966, 0.07524386007376704, 0.883106393300143], [0.9039285715598931, 0.5455902892055223, 0.8345950198860167, 0.582509566489794], [0.14809378556748265, 0.12744551928213876, 0.3082583499301337, 0.89898148874259], [0.7961223048880417, 0.8607025820009028, 0.8989246365264746, 0.21007653833975404], [0.24952973922292443, 0.10279362167178563, 0.7801162418714427, 0.8841347014510089], [0.4063773898321168, 0.6206615101507128, 0.15455333833220464, 0.9298810156936744], [0.864605696219964, 0.9762060329309629, 0.8107717199403969, 0.8814162046633244], [0.024786361898188725, 0.7365644717550821, 0.33218546794642867, 0.9308158860483255], [0.8022351389371389, 0.8640640283752794, 0.810749316574389, 0.26680570959447203], [0.7873745091354711, 0.10809562640295711, 0.8721667829060897, 0.8585932513377816], [0.22243371754566443, 0.816586605596929, 0.4603032346789421, 0.30519086733860057], [0.7953454991528618, 0.22759548740777036, 0.02366443470145152, 0.19312978832770866], [0.3282619511977065, 0.8643529420302863, 0.9668891040483611, 0.2791249927218714], [0.6414817386076277, 0.39967838436006087, 0.9811496871982601, 0.5362157324787219], [0.9392371403247157, 0.11534175185142759, 0.970400611022228, 0.17856781617246364], [0.9625343157615555, 0.2654663625229686, 0.1084025472147111, 0.43456375856464435], [0.7285450606527043, 0.31367731419499123, 0.6062088533061433, 0.5114230596694781], [0.38519543334472717, 0.5765880434965995, 0.25472250613858194, 0.7087852838341706], [0.0016912782186294661, 0.9255751654990827, 0.5384519970927919, 0.7194299991448455], [0.7419500778394765, 0.6706285044329995, 0.3642214717812642, 0.06997381112631018], [0.6642376849112723, 0.3302000360425964, 0.31391564505835967, 0.8480152795063355], [0.7197542630139502, 0.3003222682112642, 0.30928466220865325, 0.40839290861921684], [0.40240038705772463, 0.295655202525947, 0.12728779905915322, 0.4204463337729083], [0.940363670730183, 0.6773179452727329, 0.9028055457325826, 0.6155149159513805], [0.3009498745655653, 0.5479372131356982, 0.0004059396972875273, 0.2869137168689272], [0.4298881499898346, 0.579984781195682, 0.6547056237030716, 0.4649881902470142] ] moa = MOArchive4obj(points, reference_point=[1, 1, 1, 1]) self.assertAlmostEqual(0.666453313693048, float(moa.hypervolume), places=8) moa = MOArchive4obj([[p[0] - 1, p[1] - 1, p[2] - 1, p[3] - 1] for p in points], reference_point=[0, 0, 0, 0]) self.assertAlmostEqual(0.666453313693048, float(moa.hypervolume), places=8) moa = MOArchive4obj(points, reference_point=[1, 2, 3, 4]) self.assertAlmostEqual(22.4083467226742, float(moa.hypervolume), places=8) self.assertEqual(moa.hypervolume_plus, moa.hypervolume) if __name__ == '__main__': unittest.main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923732.0 moarchiving-1.0.0/moarchiving/tests/test_sorted_list.py0000644000076500000240000001110215000266124022620 0ustar00hansenstaff""" Test the MySortedList class. """ import unittest from moarchiving.moarchiving_utils import ArchiveSortedList, DLNode class TestSortedList(unittest.TestCase): """ Tests for the MySortedList class """ def test_init(self): """ test the initialization of the MySortedList """ sl = ArchiveSortedList() self.assertEqual(str(sl), "[]") sl = ArchiveSortedList([DLNode([3, 0]), DLNode([1, 2]), DLNode([0, 3]), DLNode([2, 1])]) self.assertEqual(str(sl), "[[3, 0], [2, 1], [1, 2], [0, 3]]") def test_add(self): """ test the add method of the MySortedList """ sl = ArchiveSortedList() self.assertEqual(str(sl), "[]") sl.add(DLNode([3, 0])) self.assertEqual(str(sl), "[[3, 0]]") sl.add(DLNode([1, 2])) self.assertEqual(str(sl), "[[3, 0], [1, 2]]") sl.add(DLNode([0, 3])) self.assertEqual(str(sl), "[[3, 0], [1, 2], [0, 3]]") sl.add(DLNode([2, 1])) self.assertEqual(str(sl), "[[3, 0], [2, 1], [1, 2], [0, 3]]") def test_remove(self): """ test the remove method of the MySortedList """ n1 = DLNode([3, 0]) n2 = DLNode([1, 2]) n3 = DLNode([0, 3]) n4 = DLNode([2, 1]) sl = ArchiveSortedList([n1, n2, n3, n4]) self.assertEqual(str(sl), "[[3, 0], [2, 1], [1, 2], [0, 3]]") sl.remove(n1) self.assertEqual(str(sl), "[[2, 1], [1, 2], [0, 3]]") sl.remove(n2) self.assertEqual(str(sl), "[[2, 1], [0, 3]]") sl.remove(n3) self.assertEqual(str(sl), "[[2, 1]]") sl.remove(n4) self.assertEqual(str(sl), "[]") def test_head(self): """ test the head_x and head_y methods of the MySortedList """ sl = ArchiveSortedList([DLNode([3, 0]), DLNode([1, 2]), DLNode([0, 3]), DLNode([2, 1])]) self.assertEqual(sl.head_y().x, [3, 0]) self.assertEqual(sl.head_x().x, [0, 3]) def test_next(self): """ test the next_x and next_y methods of the MySortedList """ n1 = DLNode([3, 0]) n2 = DLNode([1, 2]) n3 = DLNode([0, 3]) n4 = DLNode([2, 1]) sl = ArchiveSortedList([n1, n2, n3, n4]) self.assertEqual(sl.next_y(n1), n4) self.assertEqual(sl.next_y(n4), n2) self.assertEqual(sl.next_y(n2), n3) self.assertEqual(sl.next_x(n3), n2) self.assertEqual(sl.next_x(n2), n4) self.assertEqual(sl.next_x(n4), n1) def test_outer_delimiter(self): """ test the outer_delimiter_x and outer_delimiter_y methods of the MySortedList """ n1 = DLNode([3, 0]) n2 = DLNode([1, 2]) n3 = DLNode([0, 3]) n4 = DLNode([2, 1]) sl = ArchiveSortedList([n1, n2, n3, n4]) self.assertEqual(sl.outer_delimiter_y(DLNode([1.5, 1.5])), n2) self.assertEqual(sl.outer_delimiter_y(DLNode([0.5, 1.5])), n3) self.assertEqual(sl.outer_delimiter_y(DLNode([1.5, 0.5])), n2) self.assertEqual(sl.outer_delimiter_x(DLNode([1.5, 1.5])), n4) self.assertEqual(sl.outer_delimiter_x(DLNode([0.5, 1.5])), n4) self.assertEqual(sl.outer_delimiter_x(DLNode([1.5, 0.5])), n1) self.assertEqual(sl.outer_delimiter_x(DLNode([-1, 4])), n3) def test_remove_dominated(self): """ test the remove_dominated_x and remove_dominated_y methods of the MySortedList """ n1 = DLNode([3, 0]) n2 = DLNode([1, 2]) n3 = DLNode([0, 3]) n4 = DLNode([2, 1]) sl = ArchiveSortedList([n1, n2, n3, n4]) p = DLNode([1.5, 0.5]) s = sl.outer_delimiter_x(p) points_to_remove = sl.remove_dominated_y(p, s) self.assertEqual(points_to_remove, [n4]) self.assertEqual(str(sl), "[[3, 0], [1, 2], [0, 3]]") sl = ArchiveSortedList([n1, n2, n3, n4]) p = DLNode([0.5, 0.5]) s = sl.outer_delimiter_x(p) points_to_remove = sl.remove_dominated_y(p, s) self.assertEqual(points_to_remove, [n4, n2]) self.assertEqual(str(sl), "[[3, 0], [0, 3]]") sl = ArchiveSortedList([n1, n2, n3, n4]) p = DLNode([1.5, 0.5]) s = sl.outer_delimiter_y(p) points_to_remove = sl.remove_dominated_x(p, s) self.assertEqual(points_to_remove, [n4]) self.assertEqual(str(sl), "[[3, 0], [1, 2], [0, 3]]") sl = ArchiveSortedList([n1, n2, n3, n4]) p = DLNode([0.5, 0.5]) s = sl.outer_delimiter_y(p) points_to_remove = sl.remove_dominated_x(p, s) self.assertEqual(points_to_remove, [n2, n4]) self.assertEqual(str(sl), "[[3, 0], [0, 3]]") if __name__ == '__main__': unittest.main() ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1744923773.4944355 moarchiving-1.0.0/moarchiving.egg-info/0000755000076500000240000000000015000266175017217 5ustar00hansenstaff././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923773.0 moarchiving-1.0.0/moarchiving.egg-info/PKG-INFO0000644000076500000240000003253715000266175020326 0ustar00hansenstaffMetadata-Version: 2.4 Name: moarchiving Version: 1.0.0 Summary: This package implements a non-dominated archive for 2, 3 or 4 objectives with hypervolume indicator and uncrowded hypervolume improvement computation. Author: Nace Sever, Mila Nedic, Tea Tusar Author-email: Nikolaus Hansen Project-URL: Homepage, https://github.com/cma-es/moarchiving Keywords: multi-objective,optimization Classifier: Development Status :: 4 - Beta Classifier: Environment :: Console Classifier: Intended Audience :: Education Classifier: Intended Audience :: Other Audience Classifier: Intended Audience :: Science/Research Classifier: Operating System :: OS Independent Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Topic :: Scientific/Engineering Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence Classifier: Topic :: Scientific/Engineering :: Mathematics Description-Content-Type: text/markdown License-File: LICENSE Requires-Dist: sortedcontainers>=2.4.0 Provides-Extra: arbitrary-precision Requires-Dist: fractions; extra == "arbitrary-precision" Dynamic: license-file # Introduction [This package](https://cma-es.github.io/moarchiving/moarchiving-apidocs/index.html) implements a multi-objective non-dominated archive for 2, 3 or 4 objectives, providing easy and fast access to multiple hypervolume indicators: - the hypervolume of the entire archive, - the contributing hypervolume of each element, - the [uncrowded hypervolume improvement](https://doi.org/10.1145/3321707.3321852) (see also [here](https://arxiv.org/abs/1904.08823)) of any given point in the objective space, and - the uncrowded hypervolume of the (unpruned) archive, here called [hypervolume plus](https://cma-es.github.io/moarchiving/moarchiving-apidocs/moarchiving.moarchiving.BiobjectiveNondominatedSortedList.html#hypervolume_plus). Additionally, the package provides a constrained version of the archive, which allows to store points with constraints. The source code is available [on GitHub](https://github.com/CMA-ES/moarchiving). ## Installation On a system shell, either like ``` pip install moarchiving ``` or from GitHub, for example ``` pip install git+https://github.com/CMA-ES/moarchiving.git@development ``` installing from the `development` branch. ## Testing ``` python -m moarchiving.test ``` on a system shell should output something like ``` doctest.testmod() TestResults(failed=0, attempted=90) ... OK unittest.TextTestRunner().run(unittest.TestLoader().loadTestsFromModule()) ....... ---------------------------------------------------------------------- Ran 7 tests in 0.001s ``` ## Links - [API documentation](https://cma-es.github.io/moarchiving/moarchiving-apidocs/index.html) - [This page including performance test examples](https://cma-es.github.io/moarchiving/) - [Code on Github](https://github.com/CMA-ES/moarchiving) ## Details `moarchiving` with 2 objectives uses the [`fractions.Fraction`](https://docs.python.org/3/library/fractions.html) type to avoid rounding errors when computing hypervolume differences, but its usage can also be easily switched off by assigning the respective class attributes `hypervolume_computation_float_type` and `hypervolume_final_float_type`. The Fraction type can become prohibitively computationally expensive with increasing precision. The implementation of the two-objective archive is heavily based on the [`bisect`](https://docs.python.org/3/library/bisect.html) module, while in three and four objectives it is based on the [`sortedcontainers`](https://pypi.org/project/sortedcontainers/) module. ## Releases - 1.0.0 addition of MOArchive classes for 3 and 4 objectives, as well as a class for handling solutions to constrained problems - 0.7.0 reimplementation of `BiobjectiveNondominatedSortedList.hypervolume_improvement` by extracting a sublist first. - 0.6.0 the `infos` attribute is a `list` with corresponding (arbitrary) information, e.g. for keeping the respective solutions. - 0.5.3 fixed assertion error when not using `fractions.Fraction` - 0.5.2 first published version # Usage examples 1. [Initialization](#1-initialization) 2. [Constrained MOArchive](#2-constrained-moarchive) 3. [Accessing solution information](#3-accessing-solution-information) 4. [Adding solutions](#4-adding-solutions) 5. [Archive size](#5-archive-size) 6. [Performance indicators](#6-performance-indicators) 7. [Contributing hypervolumes](#7-contributing-hypervolumes) 8. [Hypervolume improvement](#8-hypervolume-improvement) 9. [Distance to the Pareto front](#9-distance-to-the-pareto-front) 10. [Enabling or disabling fractions](#10-enabling-or-disabling-fractions) 11. [Additional functions](#11-additional-functions) 12. [Visualization of indicator values](#12-visualization-of-indicator-values) 13. [Performance tests](#13-performance-tests) ### 1. Initialization The MOArchive object can be created using the `get_mo_archive` function by providing a list of objective values, a reference point, or at least the number of objectives. Further solutions can be added using `add` or `add_list` methods, but the reference point cannot be changed once the instance is created. A list of information strings can be provided for each element, which will be stored as long as the corresponding element remains in the archive (e.g., the x values of the element). At any time, the list of non-dominated elements and their corresponding information can be accessed. ```python from moarchiving import get_mo_archive moa2obj = get_mo_archive([[1, 5], [2, 3], [4, 5], [5, 0]], reference_point=[10, 10], infos=["a", "b", "c", "d"]) moa3obj = get_mo_archive([[1, 2, 3], [3, 2, 1], [3, 3, 0], [2, 2, 1]], [10, 10, 10], ["a", "b", "c", "d"]) moa4obj = get_mo_archive([[1, 2, 3, 4], [1, 3, 4, 5], [4, 3, 2, 1], [1, 3, 0, 1]], reference_point=[10, 10, 10, 10], infos=["a", "b", "c", "d"]) print("points in the 2 objective archive:", list(moa2obj)) print("points in the 3 objective archive:", list(moa3obj)) print("points in the 4 objective archive:", list(moa4obj)) ``` points in the 2 objective archive: [[1, 5], [2, 3], [5, 0]] points in the 3 objective archive: [[3, 3, 0], [2, 2, 1], [1, 2, 3]] points in the 4 objective archive: [[1, 3, 0, 1], [1, 2, 3, 4]] MOArchive objects can also be initialized empty. ```python moa = get_mo_archive(reference_point=[4, 4, 4]) print("points in the empty archive:", list(moa)) ``` points in the empty archive: [] ### 2. Constrained MOArchive Constrained MOArchive supports all the functionalities of a non-constrained MOArchive, with the added capability of handling constraints when adding or initializing the archive. In addition to the objective values of a solution, constraint values must be provided in the form of a list or a number. A solution is deemed feasible when all its constraint values are less than or equal to zero. ```python from moarchiving import get_cmo_archive cmoa = get_cmo_archive([[1, 2, 3], [1, 3, 4], [4, 3, 2], [1, 3, 0]], [[3, 0], [0, 0], [0, 0], [0, 1]], reference_point=[5, 5, 5], infos=["a", "b", "c", "d"]) print("points in the archive:", list(cmoa)) ``` points in the archive: [[4, 3, 2], [1, 3, 4]] ### 3. Accessing solution information `archive.infos` is used to get the information on solutions in the archive. ```python # infos of the previously defined empty archive print("infos of the empty archive", moa.infos) print("infos of the constrained archive", cmoa.infos) ``` infos of the empty archive [] infos of the constrained archive ['c', 'b'] ### 4. Adding solutions Solutions can be added to the MOArchive at any time using the `add` function (for a single solution) or the `add_list` function (for multiple solutions). ```python moa.add([1, 2, 3], "a") print("points:", list(moa)) print("infos:", moa.infos) moa.add_list([[3, 2, 1], [2, 3, 2], [2, 2, 2]], ["b", "c", "d"]) print("points:", list(moa)) print("infos:", moa.infos) ``` points: [[1, 2, 3]] infos: ['a'] points: [[3, 2, 1], [2, 2, 2], [1, 2, 3]] infos: ['b', 'd', 'a'] When adding to the constrained archive, constraint values must be added as well. ```python cmoa.add_list([[3, 3, 3], [1, 1, 1]], [[0, 0], [42, 0]], ["e", "f"]) print("points:", list(cmoa)) print("infos:", cmoa.infos) ``` points: [[4, 3, 2], [3, 3, 3], [1, 3, 4]] infos: ['c', 'e', 'b'] ### 5. Archive size The MOArchive implements some functionality of a list (in the 2 objective case, it actually extends the `list` class, though this is not the case in 3 and 4 objectives). In particular, it includes the `len` method to get the number of solutions in the archive as well as the `in` keyword to check if a point is in the archive. ```python print("Points in the archive:", list(moa)) print("Length of the archive:", len(moa)) print("[2, 2, 2] in moa:", [2, 2, 2] in moa) print("[3, 2, 0] in moa:", [3, 2, 0] in moa) ``` Points in the archive: [[3, 2, 1], [2, 2, 2], [1, 2, 3]] Length of the archive: 3 [2, 2, 2] in moa: True [3, 2, 0] in moa: False ### 6. Performance indicators An archive provides the following performance indicators: - `hypervolume` - `hypervolume_plus`, providing additionally the closest distance to the reference area for an empty archive, see [here](https://doi.org/10.1145/3321707.3321852) and [here](https://doi.org/10.1109/TEVC.2022.3210897) - `hypervolume_plus_constr` (for CMOArchive), based on, but not completely equal to the one defined [here](https://doi.org/10.1016/j.ins.2022.05.106) Indicators are defined for maximization (the original `hypervolume_plus_constr` indicator is multiplied by -1). When the archive is not empty, all the indicators are positive and have the same value. As the archive does not (yet) support an ideal point, the values of indicators are not normalized. ```python print("Hypervolume of the archive:", moa.hypervolume) print("Hypervolume plus of the archive:", moa.hypervolume_plus) ``` Hypervolume of the archive: 12 Hypervolume plus of the archive: 12 In case of a constrained MOArchive, the `hypervolume_plus_constr` attribute can be accessed as well. ```python print("Hyperolume of the constrained archive:", cmoa.hypervolume) print("Hypervolume plus of the constrained archive:", cmoa.hypervolume_plus) print("Hypervolume plus constr of the constrained archive:", cmoa.hypervolume_plus_constr) ``` Hyperolume of the constrained archive: 14 Hypervolume plus of the constrained archive: 14 Hypervolume plus constr of the constrained archive: 14 ### 7. Contributing hypervolumes The `contributing_hypervolumes` attribute provides a list of hypervolume contributions for each point of the archive. Alternatively, the contribution for a single point can be computed using the `contributing_hypervolume(point)` method. ```python for i, objectives in enumerate(moa): assert moa.contributing_hypervolume(objectives) == moa.contributing_hypervolumes[i] print("contributing hv of point", objectives, "is", moa.contributing_hypervolume(objectives)) print("All contributing hypervolumes:", moa.contributing_hypervolumes) ``` contributing hv of point [3, 2, 1] is 2 contributing hv of point [2, 2, 2] is 2 contributing hv of point [1, 2, 3] is 2 All contributing hypervolumes: [Fraction(2, 1), Fraction(2, 1), Fraction(2, 1)] ### 8. Hypervolume improvement The `hypervolume_improvement(point)` method returns the improvement of the hypervolume if we would add the point to the archive. ```python point = [1, 3, 0] print(f"hypervolume before adding {point}: {moa.hypervolume}") print(f"hypervolume improvement of point {point}: {moa.hypervolume_improvement(point)}") moa.add(point) print(f"hypervolume after adding {point}: {moa.hypervolume}") ``` hypervolume before adding [1, 3, 0]: 12 hypervolume improvement of point [1, 3, 0]: 6 hypervolume after adding [1, 3, 0]: 18 ### 9. Distance to the empirical Pareto front The `distance_to_pareto_front(point)` method returns the distance between the given point and the Pareto front. ```python print(f"Current archive: {list(moa)}") print("Distance of [3, 2, 1] to pareto front:", moa.distance_to_pareto_front([3, 2, 1])) print("Distance of [3, 2, 2] to pareto front:", moa.distance_to_pareto_front([3, 3, 3])) ``` Current archive: [[1, 3, 0], [3, 2, 1], [2, 2, 2], [1, 2, 3]] Distance of [3, 2, 1] to pareto front: 0.0 Distance of [3, 2, 2] to pareto front: 1.0 ### 10. Enabling or disabling fractions To avoid loss of precision, fractions are used by default. This can be changed to floats by setting the `hypervolume_final_float_type` and `hypervolume_computation_float_type` function attributes. ```python import fractions get_mo_archive.hypervolume_computation_float_type = fractions.Fraction get_mo_archive.hypervolume_final_float_type = fractions.Fraction moa3_fr = get_mo_archive([[1, 2, 3], [2, 1, 3], [3, 3, 1.32], [1.3, 1.3, 3], [1.7, 1.1, 2]], reference_point=[4, 4, 4]) print(moa3_fr.hypervolume) get_mo_archive.hypervolume_computation_float_type = float get_mo_archive.hypervolume_final_float_type = float moa3_nofr = get_mo_archive([[1, 2, 3], [2, 1, 3], [3, 3, 1.32], [1.3, 1.3, 3], [1.7, 1.1, 2]], reference_point=[4, 4, 4]) print(moa3_nofr.hypervolume) ``` 161245156349030777798724819133399/10141204801825835211973625643008 15.899999999999999 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923773.0 moarchiving-1.0.0/moarchiving.egg-info/SOURCES.txt0000644000076500000240000000140615000266175021104 0ustar00hansenstaffLICENSE README.md pyproject.toml moarchiving/__init__.py moarchiving/__main__.py moarchiving/constrained_moarchive.py moarchiving/get_archive.py moarchiving/moarchiving.py moarchiving/moarchiving3obj.py moarchiving/moarchiving4obj.py moarchiving/moarchiving_parent.py moarchiving/moarchiving_utils.py moarchiving/test.py moarchiving.egg-info/PKG-INFO moarchiving.egg-info/SOURCES.txt moarchiving.egg-info/dependency_links.txt moarchiving.egg-info/requires.txt moarchiving.egg-info/top_level.txt moarchiving/tests/__init__.py moarchiving/tests/point_sampling.py moarchiving/tests/test_constrained_moarchiving.py moarchiving/tests/test_moarchiving2obj.py moarchiving/tests/test_moarchiving3obj.py moarchiving/tests/test_moarchiving4obj.py moarchiving/tests/test_sorted_list.py././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923773.0 moarchiving-1.0.0/moarchiving.egg-info/dependency_links.txt0000644000076500000240000000000115000266175023265 0ustar00hansenstaff ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923773.0 moarchiving-1.0.0/moarchiving.egg-info/requires.txt0000644000076500000240000000007115000266175021615 0ustar00hansenstaffsortedcontainers>=2.4.0 [arbitrary-precision] fractions ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923773.0 moarchiving-1.0.0/moarchiving.egg-info/top_level.txt0000644000076500000240000000001415000266175021744 0ustar00hansenstaffmoarchiving ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1744923733.0 moarchiving-1.0.0/pyproject.toml0000644000076500000240000000410515000266125016126 0ustar00hansenstaff# see _setup.py for release instructions [build-system] requires = [ "setuptools >=70.1", # "setuptools-scm", # if we want to use git versions for versioning # "wheel", # setuptools doesn't need wheel anymore, see https://pypi.org/project/wheel/ ] build-backend = "setuptools.build_meta" [project] name = "moarchiving" description = "This package implements a non-dominated archive for 2, 3 or 4 objectives with hypervolume indicator and uncrowded hypervolume improvement computation." dynamic = ["version", # see tool.setuptools.dynamic below # "readme", ] readme = "README.md" # manually copied and cut authors = [ { name = "Nikolaus Hansen", email = "authors_firstname.lastname@inria.fr" }, { name = "Nace Sever" }, { name = "Mila Nedic" }, { name = "Tea Tusar" }, ] keywords = [ "multi-objective", "optimization", ] classifiers = [ "Development Status :: 4 - Beta", "Environment :: Console", "Intended Audience :: Education", "Intended Audience :: Other Audience", "Intended Audience :: Science/Research", "Operating System :: OS Independent", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Topic :: Scientific/Engineering", "Topic :: Scientific/Engineering :: Artificial Intelligence", "Topic :: Scientific/Engineering :: Mathematics", ] dependencies = [ "sortedcontainers >=2.4.0", ] [project.optional-dependencies] arbitrary-precision = ["fractions"] [project.urls] Homepage = "https://github.com/cma-es/moarchiving" [tool.setuptools.dynamic] version = {attr = "moarchiving.__version__"} [tool.ruff.lint] ignore = ["E722", "E741"] [tool.ruff.lint.per-file-ignores] "__init__.py" = ["F401"] # [tool.setuptools] # packages = ["moarchiving", "moarchiving.tests"] # automatic detection fails in my dirty folder # include-package-data = false # true is default # [tool.setuptools.package-data] # "*" = ["LICENSE"] # [tool.setuptools.exclude-package-data] # "*" = ["*"] # [tool.setuptools.packages.find] # for . works only in a clean folder # where = ["src"] ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1744923773.4955368 moarchiving-1.0.0/setup.cfg0000644000076500000240000000004615000266175015040 0ustar00hansenstaff[egg_info] tag_build = tag_date = 0