pax_global_header00006660000000000000000000000064146312741740014523gustar00rootroot0000000000000052 comment=f9f7d3cdf91bae89f255335e083e9ddd5325f8df Coq-Equations-1.3.1-8.20/000077500000000000000000000000001463127417400146025ustar00rootroot00000000000000Coq-Equations-1.3.1-8.20/.github/000077500000000000000000000000001463127417400161425ustar00rootroot00000000000000Coq-Equations-1.3.1-8.20/.github/workflows/000077500000000000000000000000001463127417400201775ustar00rootroot00000000000000Coq-Equations-1.3.1-8.20/.github/workflows/build.yml000066400000000000000000000026651463127417400220320ustar00rootroot00000000000000name: CI on: push: # On any push on a "stable" branch branches: - main - 8.14 - 8.13 - 8.12 - 8.11 pull_request: # On all pull-request changes release: # At release time types: - created jobs: build: runs-on: ubuntu-latest strategy: matrix: coq_version: - 'dev' ocaml_version: - '4.09-flambda' target: [ local, hott, dune ] fail-fast: false steps: - name: Checkout code uses: actions/checkout@v2 with: fetch-depth: 1 - name: Docker-Coq-Action uses: coq-community/docker-coq-action@v1 with: opam_file: 'coq-equations.opam' coq_version: ${{ matrix.coq_version }} ocaml_version: ${{ matrix.ocaml_version }} before_script: | startGroup "Workaround permission issue" sudo chown -R coq:coq . # <-- ocamlfind list endGroup script: | startGroup "Build project" opam exec -- ./configure.sh --enable-${{matrix.target}} opam exec -- make -j 2 ci-${{matrix.target}} endGroup uninstall: | startGroup "Clean project" make clean endGroup - name: Revert permissions # to avoid a warning at cleanup time if: ${{ always() }} run: sudo chown -R 1001:116 . # <-- Coq-Equations-1.3.1-8.20/.gitignore000066400000000000000000000016141463127417400165740ustar00rootroot00000000000000.*.cache *~ *.d *.vo *.vok *.vos /Makefile.coq /Makefile.coq.conf /Makefile.hott /Makefile.hott.conf src/META.coq-equations *.bak *.cm* *.o *.glob *.aux *.mtc* *.maf *.bbl *.blg *.log *.map *.out *.toc *.a /doc/auto/ /doc/coqdoc.sty /doc/equations.pdf /doc/intro.pdf /doc/intro.tex /theories/.coq-native/ /test-suite/.coq-native/ /html/ /examples/nlia.cache /examples/.nia.cache docs/.sass-cache/ docs/_site /examples/*.tex /examples/coqdoc.sty /test-suite/Makefile /test-suite/bisect.coq /test-suite/Makefile.conf /src/*.annot *.lock /doc/equations.fls /doc/equations.fdb_latexmk /.nia.cache /src/g_equations.ml # Dune-generated files .merlin _build *.install /test-suite/*.agdai /custom-HoTT/ /examples/Makefile /examples/Makefile.conf /Makefile.conf Equations-HoTT _opam custom-HoTT doc/equations_intro.tex doc/equations_intro.html doc/coqdoc.css doc/index.html META .vscode/Coq-Equations.code-workspace Coq-Equations-1.3.1-8.20/.vscode/000077500000000000000000000000001463127417400161435ustar00rootroot00000000000000Coq-Equations-1.3.1-8.20/.vscode/tasks.json000066400000000000000000000005701463127417400201650ustar00rootroot00000000000000{ // See https://go.microsoft.com/fwlink/?LinkId=733558 // for the documentation about the tasks.json format "version": "2.0.0", "tasks": [ { "label": "make", "type": "shell", "command": "make", "group": { "kind": "build", "isDefault": true } } ] }Coq-Equations-1.3.1-8.20/CHANGES.md000066400000000000000000000011531463127417400161740ustar00rootroot00000000000000Changes in Equations 1.3beta2: ------------------------------ - Fix #399: allow simplification in indices when splitting a variable, to expose the head of the index. - Fix #389: error derving EqDec in HoTT variant. - Allow universe binder annotations @{} on Equations definitions. - Fix "struct" parsing issue that required a reset of Coq sometimes - Pattern enhancements: no explicit shadowing of pattern variables is allowed anymore. Fix numerous bugs where generated implicit names introduced by the elaboration of patterns could shadow user-given names, leading to incorrect names in right-hand sides. Coq-Equations-1.3.1-8.20/Gemfile000066400000000000000000000001121463127417400160670ustar00rootroot00000000000000source "https://rubygems.org" gem "github-pages", group: :jekyll_plugins Coq-Equations-1.3.1-8.20/LICENSE000066400000000000000000000574751463127417400156310ustar00rootroot00000000000000 GNU LESSER GENERAL PUBLIC LICENSE Version 2.1, February 1999 Copyright (C) 1991, 1999 Free Software Foundation, Inc. 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. [This is the first released version of the Lesser GPL. It also counts as the successor of the GNU Library Public License, version 2, hence the version number 2.1.] Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below. When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things. To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it. For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights. We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library. To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others. Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license. Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs. When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library. We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances. For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License. In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system. Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library. The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run. GNU LESSER GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you". A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables. The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".) "Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library. Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does. 1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) The modified work must itself be a software library. b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change. c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License. d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful. (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library. In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices. Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy. This option is useful when you wish to copy part of the code of the Library into a program that is not a library. 4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange. If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code. 5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License. However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables. When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law. If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.) Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself. 6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications. You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things: a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.) b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with. c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution. d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place. e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy. For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute. 7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things: a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above. b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. 8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it. 10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License. 11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation. 14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS Coq-Equations-1.3.1-8.20/Makefile000066400000000000000000000041301463127417400162400ustar00rootroot00000000000000# One of these two files will have been generated .PHONY: all default makefiles clean-makefiles all: Makefile.coq $(MAKE) -f Makefile.coq test -f Makefile.hott && $(MAKE) -f Makefile.hott || true install: Makefile.coq $(MAKE) -f Makefile.coq install test -f Makefile.hott && $(MAKE) -f Makefile.hott install || true makefiles: test-suite/Makefile examples/Makefile clean-makefiles: rm -f test-suite/Makefile examples/Makefile test-suite/Makefile: test-suite/_CoqProject cd test-suite && coq_makefile -f _CoqProject -o Makefile examples/Makefile: examples/_CoqProject cd examples && coq_makefile -f _CoqProject -o Makefile pre-all:: makefiles # Ensure we make the bytecode version as well post-all:: bytefiles clean-examples: makefiles cd examples && $(MAKE) clean clean-test-suite: makefiles cd test-suite && $(MAKE) clean test-suite: makefiles all cd test-suite && $(MAKE) .PHONY: test-suite examples: examples/Makefile all cd examples && $(MAKE) .PHONY: examples clean: clean-makefiles makefiles $(MAKE) -f Makefile.coq clean test -f Makefile.hott && make -f Makefile.hott clean || true $(MAKE) clean-examples clean-test-suite siteexamples: examples/*.glob sh siteexamples.sh doc: html mkdir -p html/api && ocamldoc -html -d html/api \ `ocamlfind query -r coq-core.lib coq-core.kernel coq-core.tactics coq-core.proofs \ coq-core.toplevel coq-core.ltac coq-core.plugins.extraction -i-format` \ -I src src/*.ml toplevel: src/equations_plugin.cma bytefiles "$(OCAMLFIND)" ocamlc -linkpkg -linkall -g $(CAMLDEBUG) $(CAMLFLAGS) $(CAMLPKGS) \ -package coq-core.toplevel,coq-core.plugins.extraction \ $< $(COQLIB)/toplevel/coqtop_bin.ml -o coqtop_equations dune:- dune build ci-dune: opam install -j 2 -y coq-hott.dev dune build ci-hott: opam install -j 2 -y coq-hott.dev test -f Makefile.hott && $(MAKE) -f Makefile.hott all $(MAKE) -f Makefile.hott install $(MAKE) -f Makefile.hott uninstall ci-local: $(MAKE) -f Makefile.coq all $(MAKE) test-suite examples $(MAKE) -f Makefile.coq install $(MAKE) -f Makefile.coq uninstall ci: ci-local .PHONY: ci-dune ci-hott ci-local Coq-Equations-1.3.1-8.20/Makefile.coq.local000066400000000000000000000001371463127417400201150ustar00rootroot00000000000000CAMLPKGS+=-package coq-core.plugins.extraction,coq-core.plugins.cc ci: all test-suite examples Coq-Equations-1.3.1-8.20/Makefile.hott.local000066400000000000000000000001371463127417400203110ustar00rootroot00000000000000CAMLPKGS+=-package coq-core.plugins.extraction,coq-core.plugins.cc ci: all test-suite examples Coq-Equations-1.3.1-8.20/Makefile.local000066400000000000000000000012601463127417400173320ustar00rootroot00000000000000makefiles: test-suite/Makefile examples/Makefile test-suite/Makefile: test-suite/_CoqProject cd test-suite && coq_makefile -f _CoqProject -o Makefile examples/Makefile: examples/_CoqProject cd examples && coq_makefile -f _CoqProject -o Makefile pre-all:: makefiles # Ensure we make the bytecode version as well post-all:: bytefiles clean-examples: cd examples && $(MAKE) clean clean-test-suite: makefiles cd test-suite && $(MAKE) clean test-suite: makefiles cd test-suite && $(MAKE) .PHONY: test-suite examples: examples/Makefile cd examples && $(MAKE) .PHONY: examples clean:: makefiles clean-examples clean-test-suite siteexamples: examples/*.glob sh siteexamples.sh Coq-Equations-1.3.1-8.20/README.dev000066400000000000000000000002741463127417400162420ustar00rootroot00000000000000Developers: =============== To use merlin and make the documentation, you need a Coq with a findlib META file. Then you can use [make apidoc] to build the html version of the ocaml API. Coq-Equations-1.3.1-8.20/README.md000066400000000000000000000147061463127417400160710ustar00rootroot00000000000000### **Equations** - a function definition plugin. [![Build Status](https://github.com/mattam82/Coq-Equations/actions/workflows/build.yml/badge.svg?branch=main&event=push)](https://github.com/mattam82/Coq-Equations/actions/workflows/build.yml) [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.3012649.svg)](https://zenodo.org/record/3012649#.XcEydZNKjOQ) [![Zulip Chat](https://img.shields.io/badge/zulip-join_chat-brightgreen.svg)](https://coq.zulipchat.com/#narrow/stream/237659-Equations-devs.20.26.20users) Copyright 2009-2022 Matthieu Sozeau `matthieu.sozeau@inria.fr` Copyright 2015-2018 Cyprien Mangin `cyprien.mangin@m4x.org` Distributed under the terms of the GNU Lesser General Public License Version 2.1 or later (see [LICENSE](http://github.com/mattam82/Coq-Equations/raw/main/LICENSE) for details). Equations provides a notation for writing programs by dependent pattern-matching and (well-founded) recursion in [Coq](http://coq.inria.fr). It compiles everything down to eliminators for inductive types, equality and accessibility, providing a definitional extension to the Coq kernel. The plugin can be used with Coq's standard logic in `Prop` for a proof-irrelevant, erasable interpretation of pattern-matching, or with a polymorphic logic in `Type` or re-using the prelude of the [HoTT/Coq](http://github.com/HoTT/HoTT) library for a proof-relevant interpretation. In all cases, the resulting definitions are axiom-free. ***Table of Contents*** - [Documentation](#documentation) - [Papers](#papers) - [Gallery](examples) - [Installation](#installation) - [HoTT Variant](#hott-variant) ## Live demo Try it now in your browser with [JSCoq](http://mattam82.github.io/Coq-Equations/assets/jsexamples/equations_intro.html)! ## Documentation - The [reference manual](http://github.com/mattam82/Coq-Equations/raw/main/doc/equations.pdf) provides an introduction and a summary of the commands and options. This introduction can also be followed interactively with Equations installed: [equations_intro.v](http://github.com/mattam82/Coq-Equations/raw/main/doc/equations_intro.v) - A gallery of [examples](http://mattam82.github.io/Coq-Equations/examples) provides more consequent developments using Equations. ## Papers and presentations - [Equations Reloaded: High-Level Dependently-Typed Functional Programming and Proving in Coq](https://sozeau.gitlabpages.inria.fr/www/research/publications/Equations_Reloaded-ICFP19.pdf). Matthieu Sozeau and Cyprien Mangin. In: Proc. ACM Program. Lang. 3, ICFP, Article 86 (August 2019), 29 pages. [DOI](https://doi.org/10.1145/3341690), [slides](https://sozeau.gitlabpages.inria.fr/www/research/publications/Equations_Reloaded-ICFP19-190819.pdf). This presents version 1.2 and above of the package. See [Equations Reloaded](http://mattam82.github.io/Coq-Equations/equations-reloaded) for associated material, including a VM to run the examples. - [Equations for HoTT](https://sozeau.gitlabpages.inria.fr/www/research/publications/Equations_for_HoTT-HoTT19-130819.pdf). Matthieu Sozeau, Talk given at the [Homotopy Type Theory 2019](https://hott.github.io/HoTT-2019//programme/#sozeau) Conference in Pittsburgh, PA, August 2019. This explains the no-confusion principle and strong equivalences used by Equations and Jesper Cockx's version of dependent pattern-matching in Agda in terms of HoTT. - [Equations for Hereditary Substitution in Leivant's Predicative System F: A Case Study](https://sozeau.gitlabpages.inria.fr/www/research/publications/Equations_for_Hereditary_Substitution_in_Leivants_Predicative_System_F:_a_case_study.pdf). Cyprien Mangin and Matthieu Sozeau. In: Proceedings Tenth International Workshop on Logical Frameworks and Meta Languages: Theory and Practice. Volume 185 of EPTCS. May 2015 - LFMTP'15. This is a case study on a proof of normalization for an hereditary substitution procedure on a variant of System F. - [Equations: A Dependent Pattern-Matching Compiler](https://link.springer.com/chapter/10.1007/978-3-642-14052-5_29) Matthieu Sozeau (2010) In: Kaufmann M., Paulson L.C. (eds) Interactive Theorem Proving. ITP 2010. Lecture Notes in Computer Science, vol 6172. Springer, Berlin, Heidelberg. This presents an earlier version of the package. ## Installation The latest version works with Coq 8.13 (branch [8.13](https://github.com/mattam82/Coq-Equations/tree/8.13)), Coq 8.14 (branch [8.14](https://github.com/mattam82/Coq-Equations/tree/8.14)), Coq 8.15 (branch [8.15](https://github.com/mattam82/Coq-Equations/tree/8.15)), and the current Coq main branch (branch [main](https://github.com/mattam82/Coq-Equations/tree/main)). See [releases](https://github.com/mattam82/Coq-Equations/releases) for sources and official releases. ### Install with OPAM This package is available on [OPAM](http://opam.ocaml.org/). Activate the [Coq repository](https://coq.inria.fr/opam-using.html) if you didn't do it yet: opam repo add coq-released https://coq.inria.fr/opam/released and run: opam install coq-equations To get the beta versions of Coq, activate the repository: opam repo add coq-core-dev https://coq.inria.fr/opam/core-dev To get the development version of Equations, activate the repository: opam repo add coq-extra-dev https://coq.inria.fr/opam/extra-dev ### Install from source Alternatively, to compile Equations, simply run: ./configure.sh make in the toplevel directory, with `coqc` and `ocamlc` in your path. Optionally, one can build the test-suite or examples: make examples test-suite Then add the paths to your `.coqrc`: Add ML Path "/Users/mat/research/coq/equations/src". Add Rec LoadPath "/Users/mat/research/coq/equations/theories" as Equations. Or install it: make install As usual, you will need to run this command with the appropriate privileges if the version of Coq you are using is installed system-wide, rather than in your own directory. E.g. on Ubuntu, you would prefix the command with `sudo` and then enter your user account password when prompted. ## HoTT Variant The HoTT variant of Equations works with the coq-hott library for Coq 8.13 and up. When using `opam`, simply install first the `coq-hott` library and `coq-equations` will install its HoTT variant. From source, first install `coq-hott` and then use: ./configure.sh --enable-hott This will compile the `HoTT` library variant in addition to the standard one. Then, after `make install`, one can import the plugin in Coq, using: From Equations Require Import HoTT.All.Coq-Equations-1.3.1-8.20/TODO000066400000000000000000000001261463127417400152710ustar00rootroot00000000000000+ In HoTT_light and everywhere, use dependent elimination as -> shorten to depelim as Coq-Equations-1.3.1-8.20/_CoqProject000066400000000000000000000036101463127417400167350ustar00rootroot00000000000000src/META.coq-equations.in -I src -I test-suite -Q src Equations -Q theories Equations -I theories -I examples CAMLDEBUG = "-bin-annot -g -for-pack Equations -w -58" COQDOCFLAGS = "-parse-comments -utf8 -interpolate" src/equations_common.mli src/equations_common.ml src/ederive.ml src/ederive.mli src/sigma_types.mli src/sigma_types.ml src/subterm.mli src/subterm.ml src/eqdec.mli src/eqdec.ml src/depelim.mli src/depelim.ml src/syntax.mli src/syntax.ml src/context_map.mli src/context_map.ml src/simplify.mli src/simplify.ml src/splitting.mli src/splitting.ml src/covering.mli src/covering.ml src/principles_proofs.mli src/principles_proofs.ml src/principles.mli src/principles.ml src/equations.mli src/equations.ml src/noconf_hom.ml src/noconf_hom.mli src/noconf.ml src/noconf.mli src/extra_tactics.ml src/extra_tactics.mli src/g_equations.mlg src/equations_plugin.mllib theories/Init.v theories/Signature.v theories/CoreTactics.v theories/Prop/SigmaNotations.v theories/Prop/Logic.v theories/Prop/Classes.v theories/Prop/EqDec.v theories/Prop/EqDecInstances.v theories/Prop/Subterm.v theories/Prop/DepElim.v theories/Prop/Tactics.v theories/Prop/Constants.v theories/Prop/NoConfusion.v theories/Prop/NoConfusion_UIP.v theories/Prop/FunctionalInduction.v theories/Prop/Loader.v theories/Prop/Telescopes.v theories/Prop/TransparentEquations.v theories/Prop/OpaqueEquations.v theories/Prop/NoCycle.v theories/Prop/Equations.v theories/Type/Logic.v theories/Type/FunctionalExtensionality.v theories/Type/Relation.v theories/Type/Relation_Properties.v theories/Type/WellFounded.v theories/Type/Classes.v theories/Type/EqDec.v theories/Type/DepElim.v theories/Type/Tactics.v theories/Type/Subterm.v theories/Type/Constants.v theories/Type/EqDecInstances.v theories/Type/NoConfusion.v theories/Type/FunctionalInduction.v theories/Type/Loader.v theories/Type/Telescopes.v theories/Type/WellFoundedInstances.v theories/Type/All.v Coq-Equations-1.3.1-8.20/_HoTTProject000066400000000000000000000027441463127417400170400ustar00rootroot00000000000000-I src -I test-suite -Q theories Equations -Q src Equations -I theories -I examples COQFLAGS = "-noinit -indices-matter" CAMLDEBUG = "-bin-annot -g -for-pack Equations -w -58" COQDOCFLAGS = "-parse-comments -utf8 -interpolate" src/equations_common.mli src/equations_common.ml src/ederive.ml src/ederive.mli src/sigma_types.mli src/sigma_types.ml src/subterm.mli src/subterm.ml src/eqdec.mli src/eqdec.ml src/depelim.mli src/depelim.ml src/syntax.mli src/syntax.ml src/context_map.mli src/context_map.ml src/simplify.mli src/simplify.ml src/splitting.mli src/splitting.ml src/covering.mli src/covering.ml src/principles_proofs.mli src/principles_proofs.ml src/principles.mli src/principles.ml src/equations.mli src/equations.ml src/noconf.ml src/noconf.mli src/noconf_hom.ml src/noconf_hom.mli src/extra_tactics.ml src/extra_tactics.mli src/g_equations.mlg src/equations_plugin.mllib theories/Init.v theories/Signature.v theories/CoreTactics.v # HoTT-specific files needing the HoTT library. theories/HoTT/Logic.v theories/HoTT/Relation.v theories/HoTT/Relation_Properties.v theories/HoTT/WellFounded.v theories/HoTT/Classes.v theories/HoTT/EqDec.v theories/HoTT/DepElim.v theories/HoTT/FunctionalInduction.v theories/HoTT/Constants.v theories/HoTT/Tactics.v theories/HoTT/Subterm.v theories/HoTT/NoConfusion.v theories/HoTT/EqDecInstances.v theories/HoTT/Loader.v theories/HoTT/Telescopes.v theories/HoTT/WellFoundedInstances.v theories/HoTT/All.v test-suite/BasicsHoTT.v test-suite/issues/issue389.v Coq-Equations-1.3.1-8.20/buildHoTT.sh000066400000000000000000000004231463127417400167730ustar00rootroot00000000000000#!/usr/bin/env bash if [ -d Equations-HoTT ] then echo "Equations-HoTT already built" else git clone -b cumulative-paths http://github.com/mattam82/HoTT Equations-HoTT cd Equations-HoTT rm -f .gitmodules ./autogen.sh ./configure make cd .. fiCoq-Equations-1.3.1-8.20/configure.sh000077500000000000000000000005731463127417400171270ustar00rootroot00000000000000#!/usr/bin/env bash rm -f Makefile.coq Makefile.hott if [ "$1" == "--enable-hott" ] then if command -v coqtop >/dev/null 2>&1 then coq_makefile -f _HoTTProject -o Makefile.hott else echo "Error: coqtop not found in path" fi fi if command -v coqtop >/dev/null 2>&1 then coq_makefile -f _CoqProject -o Makefile.coq else echo "Error: coqtop not found in path" fi Coq-Equations-1.3.1-8.20/coq-equations.opam000066400000000000000000000021641463127417400202530ustar00rootroot00000000000000opam-version: "2.0" version: "dev" authors: [ "Matthieu Sozeau " "Cyprien Mangin " ] dev-repo: "git+https://github.com/mattam82/Coq-Equations.git" maintainer: "matthieu.sozeau@inria.fr" homepage: "https://mattam82.github.io/Coq-Equations" bug-reports: "https://github.com/mattam82/Coq-Equations/issues" license: "LGPL-2.1-only" synopsis: "A function definition package for Coq" description: """ Equations is a function definition plugin for Coq, that allows the definition of functions by dependent pattern-matching and well-founded, mutual or nested structural recursion and compiles them into core terms. It automatically derives the clauses equations, the graph of the function and its associated elimination principle. """ tags: [ "keyword:dependent pattern-matching" "keyword:functional elimination" "category:Miscellaneous/Coq Extensions" "logpath:Equations" ] build: [ ["./configure.sh"] [make "-j%{jobs}%"] ] install: [ [make "install"] ] run-test: [ [make "test-suite"] ] depends: [ "coq" {= "dev"} "ocamlfind" {build} ] #depopts: [ # "coq-hott" {= "8.13"} #] Coq-Equations-1.3.1-8.20/dev/000077500000000000000000000000001463127417400153605ustar00rootroot00000000000000Coq-Equations-1.3.1-8.20/dev/include000066400000000000000000000004361463127417400167310ustar00rootroot00000000000000#use "include";; #cd".";; #directory "src";; #use "syntax.ml";; open Syntax open Covering open Splitting #install_printer (* lhs *) pplhs;; #install_printer (* clause *) ppclause;; #install_printer (* splitting *) ppsplit;; #install_printer (* splitting *) ppcontext_map_empty;; Coq-Equations-1.3.1-8.20/doc/000077500000000000000000000000001463127417400153475ustar00rootroot00000000000000Coq-Equations-1.3.1-8.20/doc/Makefile000066400000000000000000000007771463127417400170220ustar00rootroot00000000000000vfiles := equations_intro.v texfiles := equations.tex manual.tex COQC?="$(COQBIN)coqc" COQDOC?="$(COQBIN)coqdoc" all: equations.pdf equations.pdf: equations.tex $(texfiles) ${vfiles:.v=.tex} latexmk -pdf equations clean: rm -f $(vfiles:.v=.vo) rm -f $(vfiles:.v=.glob) rm -f $(vfiles:.v=.tex) latexmk -c equations clean-all: clean latexmk -C equations $(vfiles:.v=.vo): %.vo: %.v $(COQC) $< $(vfiles:.v=.tex): %.tex: %.vo $(COQDOC) --latex --body-only --interpolate --parse-comments ${<:.vo=.v} Coq-Equations-1.3.1-8.20/doc/abbrevs.sty000066400000000000000000000070551463127417400175430ustar00rootroot00000000000000\usepackage{ifmtarg} \usepackage{ifthen} \usepackage{qsymbols} \usepackage{xspace} \newqsymbol{`<}{\langle} \newqsymbol{`>}{\rangle} \newcommand{\unique}{\mbox{ unique }} \def\text#1{\mbox{#1}} \newcommand{\dom}[1]{\mathbb{#1}} \newqsymbol{`N}{\dom{N}} \newqsymbol{`Z}{\dom{Z}} \newqsymbol{`R}{\dom{R}} \newqsymbol{`Q}{\dom{Q}} \newqsymbol{`|}{\mid} \newqsymbol{`O}{\varnothing} % Definitional equality \def\eqdef{\triangleq} %\def\eqdef{\buildrel \rm {\scriptsize def} \over =} %\newqsymbol{"="}{\eqdef} \def\FV#1{\mathcal{F}\mathcal{V}(#1)} % Similar or equal %\newqsymbol{"~="}{\simeq} % Names \def\namefont{\textsc} \newcommand{\name}[1]{\namefont{#1}\xspace} \def\SSReflect{\name{SSReflect}} \def\Isabelle{\name{Isabelle}} \def\IsabelleHOL{\name{Isabelle/HOL}} \def\HOL{\name{HOL}} \def\Coq{\name{Coq}} \def\Gallina{\name{Gallina}} \def\Gallium{\name{Gallium}} \def\CoC{\name{CoC}} \def\CCIfull{Calcul des Constructions (Co-)Inductives} \def\CCI{\name{CCI}} \def\CICfull{Calculus of (Co-)Inductive Constructions} \def\CIC{\name{CIC}} \def\CICq{\namefont{CIC}$_{?}$\xspace} \def\CCq{$\namefont{CC}_{?}$\xspace} \def\CC{\name{CC}} \def\ECC{\name{ECC}} \def\ECCfull{Extended Calculus of Constructions} \def\CCfull{Calcul des Constructions} \def\TCC{\name{TCC}} \def\PVS{\name{PVS}} \def\ML{\name{ML}} \def\DML{\name{DML}} \def\Twelf{\name{Twelf}} \def\Cayenne{\name{Cayenne}} \def\Epigram{\name{Epigram}} \def\Agda{\name{Agda}} \def\Alf{\name{Alf}} \def\AgdaTwo{\name{Agda 2}} \def\ATS{\name{ATS}} \def\Concoqtion{\name{Concoqtion}} \def\Omegapdx{\name{$\Omega\text{mega}$}} \def\omegapdx{\name{$\Omega\text{mega}$}} \def\C{\name{C}} \def\CVS{\name{CVS}} \def\Refine{\name{Refine}} \def\LogiCal{\name{LogiCal}} \def\TypiCal{\name{TypiCal}} \def\PCRI{\name{PCRI}} \def\Demons{\name{D\'emons}} \def\LRI{\name{LRI}} \def\INRIAFuturs{\name{INRIA} Futurs} \def\INRIASaclay{\name{INRIA} Saclay} \def\ProVal{\name{ProVal}} \def\Programatica{\name{Programatica}} \def\Program{\name{Program}} \def\CurryHoward{\textsc{C}urry-\textsc{H}oward\xspace} \def\lc{$\lambda$-calcul} \def\lcst{$\lambda^{\rightarrow}$} \def\LMS{\name{LMS}} \def\Haskell{\name{Haskell}} \def\Russell{\name{Russell}} \def\Ocaml{\name{OCaml}} \def\Ynot{\name{Ynot}} \def\Subtac{\name{Subtac}} \def\eterm{\name{eterm}} \def\ps{\emph{Predicate subtyping}} \def\Ltac{$\mathcal{L}_{\text{tac}}$\xspace} \def\Prolog{\name{Prolog}} \def\Function{\name{Function}} \def\CurryHoward{\name{Curry}-\name{Howard}} \def\Ynot{\name{Ynot}} \def\JML{\name{JML}} \def\Pangolin{\name{Pangolin}} \def\Oleg{\name{OLEG}} \def\Lego{\name{LEGO}} \def\CDuce{\name{$\mathbb{C}$Duce}} \def\Hoare{\name{Hoare}} \def\Java{\name{Java}} \def\Why{\name{Why}} \def\Krakatoa{\name{Krakatoa}} \def\Caduceus{\name{Caduceus}} \def\Ergo{\name{Alt-Ergo}} \def\Simplify{\name{Simplify}} \def\Equations{\name{Equations}} \def\NuPRL{\name{NuPRL}} \def\Matita{\name{Matita}} \def\INRIA{\name{INRIA}} \def\Microsoft{\name{Microsoft}} \def\GHC{\name{GHC}} \newcommand{\TODO}[1]{\textbf{TODO:} \emph{#1}} \newcommand{\FIXME}[1]{\textbf{FIXME:} \emph{#1}} \newcommand{\KEEP}[1]{\textbf{KEEP ? #1}} %\newcommand{\Problem}[1]{\textbf{Problem:} \emph{#1}} % Equations environment \newenvironment{equations}{\begin{displaymath}\begin{array}{ll}}{\end{array}\end{displaymath}} \newcommand{\firsteq}[1]{& #1 } \newcommand{\step}[3]{\vspace{0.2em}\\ & \if#1\empty\\ #2 & #3\else\quad\textbf{\{ #1 \}}\vspace{0.2em}\\ #2 & #3\fi} % Usualy not defined \def\coloneqq{:\mathrel{=}} \def\Coloneqq{::\mathrel{=}} \def\opcit{\textit{op. cit.}\xspace} \def\eng#1{``\textit{#1}''} \def\people#1{\textsf{#1}}Coq-Equations-1.3.1-8.20/doc/biblio.bib000066400000000000000000020770241463127417400173010ustar00rootroot00000000000000%% This BibTeX bibliography file was created using BibDesk. %% https://bibdesk.sourceforge.io/ %% Created for sozeau at 2020-01-31 17:28:01 +0100 %% Saved with string encoding Unicode (UTF-8) @string{aaecc = {Applicable Algebra in Engineering Communication and Computing}} @string{acm = {ACM Press}} @string{acmpress = {ACM Press}} @string{addresslri = {Univ. Paris-Sud, France}} @string{ai = {Artificial Intelligence}} @string{alp90 = {Proc. 2nd Int. Conf. on Algebraic and Logic Programming, LNCS 463}} @string{alp92 = {Proc. 3rd Int. Conf. on Algebraic and Logic Programming, LNCS 632}} @string{alp94 = {Proc. 4th Int. Conf. on Algebraic and Logic Programming, LNCS 850}} @string{alp96 = {Proc. 5th Int. Conf. on Algebraic and Logic Programming, LNCS 1139}} @string{alpplilp98 = {Proc. Joint Int. Symp. on Programming Languages,Implementations, Logics and Programs (PLILP) and Algebraic and Logic Programming (ALP) conferences}} @string{amm = {American Mathematical Monthly}} @string{ap = {Academic Press}} @string{aplas = {Asian Symposium on Programming Languages and Systems (APLAS)}} @string{aplas03 = aplas # {, Beijing, China}} @string{april = {April}} @string{atopls = {ACM Transactions on Programming Languages and Systems}} @string{aw = {Addison-Wesley}} @string{beatcs = {EATCS Bulletin}} @string{birk = {Birkh{\"a}user}} @string{cacm = {Communications of the {ACM}}} @string{cade80 = {Proc. 5th Conf. on Automated Deduction, Les Arcs, France, LNCS 87}} @string{cade84 = {Proc. 7th Int. Conf. on Automated Deduction, Napa, LNCS 170}} @string{cade86 = {Proc. 8th Int. Conf. on Automated Deduction, Oxford, England, LNCS 230}} @string{cade90 = {Proc. 10th Int. Conf. on Automated Deduction, Kaiserslautern, LNCS 449}} @string{cade92 = {Proc. 11th Int. Conf. on Automated Deduction, Saratoga Springs, NY, LNAI 607}} @string{cav = {International Conference on Computer Aided Verification (CAV)}} @string{cav02 = cav # {, Copenhagen, Denmark}} @string{cav96 = cav # {, New Brunswick, New Jersey}} @string{ccl = {Constraints in Computational Logics}} @string{ccl94 = {Proc. 1st Int. Conf. on Constraint in Computational Logics, LNCS 845}} @string{cecsl = {Conference of the European Association for Computer Science Logic}} @string{crin = {Centre de Recherche en Informatique de Nancy}} @string{csfw = {IEE Computer Security Foundations Workshop (CSFW)}} @string{csfw02 = csfw # {, Cape Breton, Nova Scotia}} @string{csl = {International Workshop on Computer Science Logic (CSL)}} @string{csl93 = {Proc. Conf. Computer Science Logic}} @string{csl94 = csl # {, Kazimierz, Poland}} @string{csl95 = {Proc. Conf. Computer Science Logic}} @string{csl97 = csl # {, Aarhus, Denmark}} @string{ctrs90 = {Proc. 2nd Int. Workshop on Conditional and Typed Rewriting Systems, Montreal, LNCS 516}} @string{ctrs91 = {Proc. Conditional and Typed Rewriting Systems, LNCS 516}} @string{ctrs92 = {Proc. 3rd Int. Workshop on Conditional Term Rewriting Systems, Pont-{\`a}-Mousson, LNCS 656}} @string{ctrs93 = {Proc. Conditional Term Rewriting Systems, LNCS 656}} @string{cup = {Cambridge University Press}} @string{cwi = {Centrum voor Wiskunde en Informatica}} @string{dbpl = {Database Programming Languages (DBPL)}} @string{dbpl05 = dbpl} @string{diku = {University of Copenhagen, Department of Computer Science}} @string{disc = {International Symposium on Distributed Computing (DISC)}} @string{disc06 = disc} @string{disco90 = {Proc. Int. Symposium on Design and Implementation of Symbolic Computation Systems, LNCS 429}} @string{dmtcs = {Discrete Mathematics and Theoretical Computer Science}} @string{ecoop = {European Conference on Object-Oriented Programming (ECOOP)}} @string{ecoop00 = ecoop # {, Sophia Antipolis and Cannes, France}} @string{ecoop02 = ecoop # {, Malaga, Spain}} @string{ecoop03 = ecoop # {, Darmstadt, Germany}} @string{ecoop04 = ecoop # {, Oslo, Norway}} @string{ecoop06 = ecoop # {, Nantes, France}} @string{oopsla = {{ACM} {SIGPLAN} {C}onference on {O}bject {O}riented {P}rogramming: {S}ystems, {L}anguages, and {A}pplications ({OOPSLA})}} @string{ecoop90 = oopsla # {/} # ecoop # {, Ottawa, Ontario}} @string{ecoop92 = ecoop} @string{ecoop95 = ecoop} @string{ecoop98 = ecoop} @string{ecoop99 = ecoop # {, Lisbon, Portugal}} @string{elsevier = {Elsevier}} @string{enslyon = {Ecole Normale Sup{\'e}rieure de Lyon}} @string{ensparis = {Ecole Normale Sup{\'e}rieure de Paris}} @string{entcs = {Electronic Notes in Theoretical Computer Science}} @string{es = {Elsevier Science Publishers}} @string{esop = {European Symposium on Programming (ESOP)}} @string{esop00 = esop # {, Berlin, Germany}} @string{esop01 = esop # {, Genova, Italy}} @string{esop02 = esop # {, Grenoble, France}} @string{esop88 = esop # {, Nancy, France}} @string{esop92 = esop # {, Rennes, France}} @string{esop94 = esop # {, Edinburgh, Scotland}} @string{eup = {Edinburgh Univ. Press}} @string{fac = {Formal Aspect of Computing}} @string{flops = {International Symposium on Functional and Logic Programming (FLOPS)}} @string{flops04 = flops # {, Nara, Japan}} @string{informal = {{\rm, informal proceedings}}} @string{fool = {International Workshop on Foundations of Object-Oriented Languages (FOOL)} # informal} @string{fpca = {ACM Symposium on Functional Programming Languages and Computer Architecture (FPCA)}} @string{fpca87 = fpca # {, Portland, Oregon}} @string{fpca89 = fpca # {, London, England}} @string{fpca93 = fpca # {, Copenhagen, Denmark}} @string{fpca95 = fpca # {San Diego, California}} @string{frr = {Rapport de Recherche}} @string{fsttcs = {Foundations of Software Technology and Theoretical Computer Science (FSTTCS)}} @string{fsttcs93 = fsttcs # {, Bombay, India}} @string{greco = {Greco de Programmation}} @string{gtm = {Graduate Texts in Mathematics}} @string{haskellw = {ACM Haskell Workshop} # informal} @string{hoots = {Workshop on Higher Order Operational Techniques in Semantics (HOOTS)}} @string{iandcomp = {Information and Computation}} @string{ic = {Information and Computation}} @string{icalp = {International Colloquium on Automata, Languages and Programming (ICALP)}} @string{icalp77 = {Proc. 4th Int. Coll. on Automata, Languages and Programming, Turku, Finland}} @string{icalp78 = {Proc. 5th Int. Coll. on Automata, Languages and Programming, LNCS 62}} @string{icalp82 = {Proc. 9th Int. Coll. on Automata, Languages and Programming, LNCS 140}} @string{icalp83 = {Proc. 10th Int. Coll. on Automata, Languages and Programming, LNCS 154}} @string{icalp85 = {Proc. 12th Int. Coll. on Automata, Languages and Programming, Nafplion, LNCS 194}} @string{icalp87 = {Proc. 14th Int. Coll. on Automata, Languages and Programming, LNCS 372}} @string{icalp88 = {Proc. 15th Int. Coll. on Automata, Languages and Programming, LNCS 317}} @string{icalp89 = {Proc. 16th Int. Coll. on Automata, Languages and Programming, LNCS 372}} @string{icalp90 = {Proc. 17th Int. Coll. on Automata, Languages and Programming, Warwick, LNCS 443}} @string{icalp91 = {Proc. 18th Int. Coll. on Automata, Languages and Programming, Madrid, LNCS 510}} @string{icalp92 = {Proc. 19th Int. Coll. on Automata, Languages and Programming, LNCS 623}} @string{icalp93 = {Proc. 20th Int. Coll. on Automata, Languages and Programming, LNCS 700}} @string{icalp94 = {Proc. 21th Int. Coll. on Automata, Languages and Programming, LNCS 820}} @string{icalp98 = icalp # {, Aalborg, Denmark}} @string{icalp99 = {Proc. 25th Int. Coll. on Automata, Languages and Programming, To appear}} @string{icfp = {{ACM} {SIGPLAN} {I}nternational {C}onference on {F}unctional {P}rogramming ({ICFP})}} @string{icfp00 = icfp # {, Montreal, Canada}} @string{icfp01 = icfp # {, Firenze, Italy}} @string{icfp02 = icfp # {, Pittsburgh, Pennsylvania}} @string{icfp03 = icfp # {, Uppsala, Sweden}} @string{icfp04 = icfp # {, Snowbird, Utah}} @string{icfp96 = icfp # {, Philadelphia, Pennsylvania}} @string{icfp97 = icfp # {, Amsterdam, The Netherlands}} @string{icfp98 = icfp # {, Baltimore, Maryland}} @string{icfp99 = icfp # {, Paris, France}} @string{icomp = {Information and Computation}} @string{icont = {Information and Control}} @string{ieeecsp = {{IEEE} Comp. Soc. Press}} @string{ieeetcom = {IEEE Trans. Communications}} @string{ieice = {IEICE}} @string{ijait = {International Journal on Artificial Intelligence Tools}} @string{ijcis = {International Journal of Computer and Information Sciences}} @string{ijfcs = {International Journal of Foundations of Computer Science}} @string{ijpp = {International Journal on Parallel Programming}} @string{inpg = {Institut National Polytechnique de Grenoble}} @string{inria = {Institut National de Recherche en Informatique et en Automatique, Unit{\'e} Rocquencourt}} @string{ipl = {Information Processing Letters}} @string{issac88 = {Proc. of the 19th Int. Symp. on Symbolic and Algebraic Computation}} @string{issac89 = {Proc. of the 20th Int. Symp. on Symbolic and Algebraic Computation, Portland, Oregon}} @string{ja = {Journal of Algebra}} @string{jacm = {Journal of the {ACM}}} @string{january = {January}} @string{jar = {Journal of Automated Reasoning}} @string{jcss = {Journal of Computer and System Sciences}} @string{jfla = {Journ{\'e}es Francophones des Langages Applicatifs}} @string{jflp = {Journal of Functional and Logic Programming}} @string{jfp = {Journal of Functional Programming}} @string{jlap = {Journal of Logic and Algebraic Programming}} @string{jlc = {Journal of Logic and Computation}} @string{jlp = {Journal of Logic Programming}} @string{jmcm = {Journal of {M}athematical and Computer Modelling}} @string{jpaa = {Journal of Pure and Applied Algebra}} @string{jsc = {Journal of Symbolic Computation}} @string{jsl = {Journal of Symbolic Logic}} @string{labri = {LAboratoire Bordelais de Recherche en Informatique}} @string{lfcompsci = {International Symposium on Logical Foundations of Computer Science (LFCS)}} @string{lfcompsci94 = lfcompsci # {, St. Petersburg, Russia}} @string{lfcs = {Laboratory for Foundations of Computer Science, University of Edinburgh}} @string{lfp = {ACM Symposium on Lisp and Functional Programming (LFP)}} @string{lfp80 = lfp # {, Stanford, California}} @string{lfp84 = lfp # {, Austin, Texas}} @string{lfp86 = lfp # {, Cambridge, Massachusetts}} @string{lfp88 = lfp # {, Snowbird, Utah}} @string{lfp90 = lfp} @string{lfp92 = lfp # {, San Francisco, California}} @string{lfp94 = lfp # {, Orlando, Florida}} @string{lics = {IEEE Symposium on Logic in Computer Science (LICS)}} @string{lics00 = lics} @string{lics01 = lics # {, Boston, Massachusetts}} @string{lics02 = lics} @string{lics03 = lics # {, Ottawa, Canada}} @string{lics1 = {Proc. 1st IEEE Symp. Logic in Computer Science, Cambridge, Mass.}} @string{lics2 = {Proc. 2nd IEEE Symp. Logic in Computer Science, Ithaca, NY}} @string{lics3 = {Proc. 3rd IEEE Symp. Logic in Computer Science, Edinburgh}} @string{lics4 = {Proc. 4th IEEE Symp. Logic in Computer Science}} @string{lics5 = {Proc. 5th IEEE Symp. Logic in Computer Science, Philadelphia}} @string{lics6 = {Proc. 6th IEEE Symp. Logic in Computer Science, Amsterdam}} @string{lics7 = {Proc. 7th IEEE Symp. Logic in Computer Science, Santa Cruz}} @string{lics86 = lics # {, Cambridge, Massachusetts}} @string{lics87 = lics # {, Ithaca, New York}} @string{lics88 = lics # {, Edinburgh, Scotland}} @string{lics89 = lics # {, Asilomar, California}} @string{lics90 = lics # {, Philadelphia, Pennsylvania}} @string{lics91 = lics} @string{lics92 = lics # {, Santa Cruz, California}} @string{lics93 = lics} @string{lics94 = lics} @string{lics95 = lics} @string{lics96 = lics # {, New Brunswick, New Jersey}} @string{lics97 = lics} @string{lics98 = lics # {, Indianapolis, Indiana}} @string{lics99 = lics # {, Trento, Italy}} @string{liens = {Laboratoire d'Informatique de l'{\'E}cole Normale Sup{\'e}rieure}} @string{lifo = {Laboratoire d'Informatique Fondamentale d'Orl{\'e}ans}} @string{lip = {Laboratoire de l'Informatique du Parallelisme}} @string{lmcs = {Logical Methods in Computer Science}} @string{lnai = {Lecture Notes in Artificial Intelligence}} @string{lncs = {Lecture Notes in Computer Science}} @string{lncs104 = {Proc. 5th GI Conf., LNCS 104}} @string{lncs107 = {Int. Coll. Formalization of Programming Concepts, LNCS 107}} @string{lncs131 = {Proc. IBM Workshop on Logics of Programs, LNCS 131}} @string{lncs137 = {Proc. 5th Int. Symp. on Programming, Turin, LNCS 137}} @string{lncs144 = {Proc. EUROCAM 82, Marseille, LNCS 144}} @string{lncs145 = {Proc. 6th GI Conf.}} @string{lncs159 = {Proc. 8th. Coll. on Trees and Algebra in Programming, LNCS 159}} @string{lncs164 = {Proc. Logics of Programming Workshop}} @string{lncs173 = {Semantics of Data Types, Sophia-Antipolis, LNCS 173}} @string{lncs174 = {Proc. EUROSAM 84, Cambridge, LNCS 174}} @string{lncs185 = {Proc. CAAP 85, LNCS 185}} @string{lncs197 = {Seminar on Concurrency, CMU-Pittsburgh, LNCS 197}} @string{lncs201 = {Functional Programming Languages and Computer Architecture, Nancy, LNCS 201}} @string{lncs202 = {Proc. Rewriting Techniques and Applications 85, Dijon, LNCS 202}} @string{lncs203 = {Proc. EUROCAL 85, Linz, LNCS 203}} @string{lncs204 = {Proc. EUROCAL 85, Linz, LNCS 204}} @string{lncs210 = {Proc. STACS 86, Orsay, LNCS 210}} @string{lncs213 = {Proc. ESOP 86, Saarbr{\"u}cken, LNCS 213}} @string{lncs214 = {Proc. CAAP 86, Nice, LNCS 214}} @string{lncs217 = {Programs as Data Objects, Copenhagen, LNCS 217}} @string{lncs226 = {Proc. 13th ICALP, Rennes, LNCS 226}} @string{lncs230 = {Proc. 8th Conf. on Automated Deduction, Oxford, LNCS 230}} @string{lncs232 = {Fundamentals of Artificial Intelligence, LNCS 232}} @string{lncs233 = {Proc. Math. Found. Computer Science, Bratislava, LNCS 233}} @string{lncs241 = {Proc. 6th Conf. on Foundations of Software Technology and Theoretical Computer Science, New Delhi, LNCS 241}} @string{lncs249 = {Proc. CAAP 87, Pisa, LNCS 249}} @string{lncs250 = {Proc. CFLP, Pisa, LNCS 250}} @string{lncs255 = {Petri Nets: Applications and Relationships to Other Models of Concurrency, Bad Honnef, LNCS 255}} @string{lncs258 = {Proc. PARLE 87, vol. I: Parallel Architectures, Eindhoven, LNCS 258}} @string{lncs259 = {Proc. PARLE 87, vol. II: Parallel Languages, Eindhoven, LNCS 259}} @string{lncs272 = {Future Parallel Computers, an advanced course, Pisa, LNCS 272}} @string{lncs283 = {Proc. Category Theory and Computer Science, LNCS 283}} @string{lncs287 = {Proc. 7th Conf. Found. of Software Technology and Theoretical Computer Science, Pune, INDIA, LNCS 287}} @string{lncs294 = {Proc. STACS 88, Bordeaux, LNCS 294}} @string{lncs298 = {Proc. 3rd Workshop on Mathematical Foundations of Programming Language Semantics, LNCS 298}} @string{lncs299 = {Proc. CAAP 88, Nancy, LNCS 299}} @string{lncs300 = {Proc. ESOP 88, Nancy, LNCS 300}} @string{lncs306 = {Proc. Workshop on Found. of Logic and Functional Programming, Trento, LNCS 306}} @string{lncs308 = {Proc. 1st Int. Workshop on Conditional Term Rewriting Systems, Orsay, LNCS 308}} @string{lncs332 = {Recent Trends in Data Type Specifications, S. Sanella and A. Tarleki eds., LNCS 332}} @string{lncs343 = {Proc. 1st Workshop on Algebraic and Logic Programming, Gaussig, LNCS 343}} @string{lncs349 = {Proc. STACS 89, Paderborn, LNCS 349}} @string{lncs351 = {Proc. TAPSOFT 89 (Vol. 1), Barcelona, LNCS 351}} @string{lncs352 = {Proc. TAPSOFT 89 (Vol. 2), Barcelona, LNCS 352}} @string{lncs354 = {Linear Time, Branching Time and Partial Order in Logics and Models for Concurrency, Noordwijkerhout, LNCS 354}} @string{lncs380 = {Proc. FCT'89, LNCS 380}} @string{lncs415 = {Proc. STACS 90, Rouen, LNCS 415}} @string{lncs431 = {Proc. CAAP 90, Copenhague, LNCS 431}} @string{lncs432 = {Proc. ESOP 90, Copenhague, LNCS 432}} @string{lncs452 = {Proc. MFCS 90, Bansk{\`{a}} Bystrica, LNCS 452}} @string{lncs456 = {Proc. PLILP'90, LNCS 456}} @string{lncs463 = {Proc. Conf. on Algebraic and Logic Programming, Nancy, LNCS 463}} @string{lncs480 = {Proc. 8th Symp. on Theoretical Aspects of Computer Science, Hamburg, LNCS 480}} @string{lncs493 = {Proc. TAPSOFT'91---CAAP, LNCS 493}} @string{lncs535 = {Fundamentals of Artificial Intelligence, P. Jorrand Editor, LNCS 535}} @string{lncs582 = {Proc. European Symp. on Programming, LNCS 582}} @string{lncs59 = {Proc. Proc. Eight Colloquium on Trees in Algebra and Programming, LNCS 59}} @string{lncs78 = {Edinburgh LCF, LNCS 78}} @string{lncs86 = {Proc. Winter School on Abstract Software Specifications, Copenhagen, LNCS 86}} @string{lncs99 = {Algebraic Semantics, LNCS 99}} @string{lncs996 = {Proc. of the 1994 Workshop on Types for Proofs and Programs, LNCS 996}} @string{lnm = {Lecture Notes in Mathematics}} @string{lp = {Lisp Pointers}} @string{lri = {Laboratoire de Recherche en Informatique}} @string{mcgh = {McGraw-Hill}} @string{me = {Matthieu Sozeau}} @string{mfcs15 = {Proc. 15th Mathematical Foundations of Computer Science, Bansk{\`a} Bystrica}} @string{mfcs16 = {Proc. 16th Mathematical Foundations of Computer Science, Warsaw, LNCS 520}} @string{mfcs92 = {Proc. 17th Mathematical Foundations of Computer Science, Praha, LNCS}} @string{mfcs93 = {Proc. 18th Mathematical Foundations of Computer Science}} @string{mfcs98 = {Proc. 23rd Mathematical Foundations of Computer Science}} @string{mfps = {Workshop on the Mathematical Foundations of Programming Semantics (MFPS)}} @string{mfps01 = mfps # {, Aarhus, Denmark}} @string{mfps89 = mfps # {, New Orleans, Louisiana}} @string{mfps95 = mfps # {, New Orleans, Louisiana}} @string{mgh = {McGraw-Hill}} @string{mit = {MIT Press}} @string{mp = {MIT Press}} @string{mitpress = mp} @string{mlw = {ACM SIGPLAN Workshop on ML} # informal} @string{mpi = {Max-Planck-Institut f{\"u}r Informatik}} @string{mpri = {Master Parisien de Recherche en Informatique}} @string{mscs = {Mathematical Structures in Computer Science}} @string{mst = {Math. Systems Theory}} @string{nh = {North Holland}} @string{njc = {Nordic Journal of Computing}} @string{no = {No}} @string{oopsla03 = oopsla # {, Anaheim, California}} @string{oopslapre96 = {{C}onference on {O}bject {O}riented {P}rogramming: {S}ystems, {L}anguages, and {A}pplications ({OOPSLA})}} @string{oopsla86 = oopslapre96 # {, Portland, Oregon}} @string{oopsla89 = oopslapre96 # {, New Orleans, Louisiana}} @string{oopsla90 = oopslapre96 # {/} # ecoop # {, Ottawa, Ontario}} @string{oopsla98 = oopsla # {, Vancouver, British Columbia}} @string{orsay = {Orsay, France}} @string{osdi = {USENIX Symposium on Operating Systems Design and Implementation (OSDI)}} @string{osdi00 = osdi # {, San Diego, California}} @string{osdi96 = osdi # {, Seattle, Washington}} @string{paste = {ACM SIGPLAN--SIGSOFT Workshop on Program Analysis for Software Tools and Engineering (PASTE)}} @string{paste01 = paste # {, Snowbird, Utah}} @string{ph = {Prentice Hall}} @string{phd = {phd thesis}} @string{pldi = {{ACM SIGPLAN Conference on Programming Language Design and Implementation (PLDI)}}} @string{pldi00 = pldi # {, Vancouver, British Columbia, Canada}} @string{pldi01 = pldi # {, Snowbird, Utah}} @string{pldi02 = pldi # {, Berlin, Germany}} @string{pldi03 = pldi # {, San Diego, California}} @string{pldi88 = pldi # {, {A}tlanta, {G}eorgia}} @string{pldi89 = pldi # {, Portland, Oregon}} @string{pldi90 = pldi # {, White Plains, New York}} @string{pldi91 = pldi # {, Toronto, Ontario}} @string{pldi92 = pldi # {, San Francisco, California}} @string{pldi93 = pldi # {, Albuquerque, New Mexico}} @string{pldi94 = pldi # {, Orlando, Florida}} @string{pldi95 = pldi # {, La Jolla, California}} @string{pldi96 = pldi # {, Philadephia, Pennsylvania}} @string{pldi97 = pldi # {, Las Vegas, Nevada}} @string{pldi99 = pldi # {, {A}tlanta, {G}eorgia}} @string{pods = {Principles of Database Systems (PODS)}} @string{pods06 = pods} @string{popl = {{ACM} {SIGPLAN--SIGACT} {S}ymposium on {P}rinciples of {P}rogramming {L}anguages ({POPL})}} @string{popl00 = popl # {, Boston, Massachusetts}} @string{popl01 = popl # {, London, England}} @string{popl02 = popl # {, Portland, Oregon}} @string{popl03 = popl # {, New Orleans, Louisiana}} @string{popl04 = popl # {, Venice, Italy}} @string{popl05 = popl # {, Long Beach, California}} @string{popl08 = popl # {, San Francisco, California}} @string{popl10 = {Proc. 10th ACM Symp. on Principles of Programming Languages, Austin, Texas}} @string{popl11 = {Proc. 11th ACM Symp. on Principles of Programming Languages, Salt Lake City}} @string{popl12 = {Proc. 12th ACM Symp. on Principles of Programming Languages, New Orleans}} @string{popl13 = {Proc. 13th ACM Symp. on Principles of Programming Languages, St. Petersburg Beach, Florida}} @string{popl14 = {Proc. 14th ACM Symp. on Principles of Programming Languages, Munich}} @string{popl15 = {Proc. 15th ACM Symp. on Principles of Programming Languages, San Diego, California}} @string{popl3 = {Proc. 3rd ACM Symp. on Principles of Programming Languages, Atlanta}} @string{popl5 = {Proc. 5th ACM Symp. on Principles of Programming Languages, Tucson, Arizona}} @string{popl6 = {Proc. 6th ACM Symp. on Principles of Programming Languages, San Antonio, Texas}} @string{popl7 = {Proc. 7th ACM Symp. on Principles of Programming Languages, Las Vegas}} @string{poplpre92 = {{ACM} {S}ymposium on {P}rinciples of {P}rogramming {L}anguages ({POPL})}} @string{popl73 = poplpre92 # {, Boston, Massachusetts}} @string{popl75 = poplpre92 # {, Palo Alto, California}} @string{popl76 = poplpre92 # {, {A}tlanta, {G}eorgia}} @string{popl77 = poplpre92 # {, Los Angeles, California}} @string{popl78 = poplpre92 # {, Tucson, Arizona}} @string{popl79 = poplpre92 # {, San Antonio, Texas}} @string{popl80 = poplpre92 # {, Las Vegas, Nevada}} @string{popl81 = poplpre92 # {, Williamsburg, Virginia}} @string{popl82 = poplpre92 # {, Albuquerque, New Mexico}} @string{popl83 = poplpre92 # {, Austin, Texas}} @string{popl84 = poplpre92 # {, Salt Lake City, Utah}} @string{popl85 = poplpre92 # {, New Orleans, Louisiana}} @string{popl86 = poplpre92 # {, St.\ Petersburg Beach, Florida}} @string{popl87 = poplpre92 # {, Munich, Germany}} @string{popl88 = poplpre92 # {, San Diego, California}} @string{popl89 = poplpre92 # {, Austin, Texas}} @string{popl9 = {Proc. 9th ACM Symp. on Principles of Programming Languages, Albuquerque, New Mexico}} @string{popl90 = poplpre92 # {, {S}an {F}rancisco, {C}alifornia}} @string{popl91 = poplpre92 # {, Orlando, Florida}} @string{popl92 = popl # {, Albuquerque, New Mexico}} @string{popl93 = popl # {, Charleston, South Carolina}} @string{popl94 = popl # {, {P}ortland, {O}regon}} @string{popl95 = popl # {, San Francisco, California}} @string{popl96 = popl # {, St.~Petersburg Beach, Florida}} @string{popl97 = popl # {, Paris, France}} @string{popl98 = popl # {, San Diego, California}} @string{popl99 = popl # {, San Antonio, Texas}} @string{pp = {Pergamon Press}} @string{ppdp = {ACM SIGPLAN International Conference on Principles and Practice of Declarative Programming (PPDP)}} @string{ppdp01 = ppdp # {, Firenze, Italy}} @string{ppdp99 = ppdp # {, Paris France}} @string{proc = {Proceedings of the}} @string{r = {Reidel}} @string{rairo = {R.A.I.R.O.}} @string{ria = {Revue Fran{\c{c}}aise d'Intelligence Artificielle}} @string{rr = {Research Report}} @string{rta = {International Conference on Rewriting Techniques and Applications (RTA)}} @string{rta03 = rta # {, Valencia, Spain}} @string{rta85 = {Proc. 1st Rewriting Techniques and Applications, Dijon, LNCS 202}} @string{rta87 = {Proc. 2nd Rewriting Techniques and Applications, Bordeaux, LNCS 256}} @string{rta89 = {Proc. 3rd Rewriting Techniques and Applications, Chapel Hill, LNCS 355}} @string{rta91 = {Proc. 4th Rewriting Techniques and Applications, LNCS 488}} @string{rta93 = {Proc. 5th Rewriting Techniques and Applications, Montr{\'e}al, LNCS 690}} @string{rta95 = {Proc. 6th Rewriting Techniques and Applications, Kaiserslautern, LNCS 914}} @string{rta96 = {Proc. 7th Rewriting Techniques and Applications, New Jersey}} @string{sa = {Scientific American}} @string{sas = {International Symposium on Static Analysis (SAS)}} @string{sas01 = sas # {, Paris, France}} @string{sas95 = sas # {, Glasgow, Scotland}} @string{sas96 = sas # {, Aachen, Germany}} @string{sas97 = sas # {, Paris, France}} @string{scp = {Science of Computer Programming}} @string{siamjc = {SIAM Journal on Computing}} @string{sicomp = {SIAM Journal on Computing}} @string{signot = {SIGPLAN Notices}} @string{sosp = {ACM Symposium on Operating Systems Principles (SOSP)}} @string{sosp93 = sosp # {, Asheville, North Carolina}} @string{space = {Workshop on Semantics, Program Analysis and Computing Environments for Memory Management (SPACE)} # informal} @string{spe = {Software - Practice and Experience}} @string{springer = {Springer-Verlag}} @string{ssl93 = {Proc. Int. Workshop on Semantics of Specification Languages, Utrecht, October 1993}} @string{stacs93 = {Proc. 10th Symposium on Theoretical Aspects of Computer Science, W{\"u}rzburg, LNCS}} @string{sv = {Springer-Verlag}} @string{symsac = {Proc. ACM Symp. Symbolic and Algebraic Computation}} @string{symsac71 = {Proc. of the 2nd Symp. on Symbolic and Algebraic Computation}} @string{symsac86 = {Proc. of the 17th Symp. on Symbolic and Algebraic Computation}} @string{tacs = {International Symposium on Theoretical Aspects of Computer Software (TACS)}} @string{tacs01 = tacs # {, Sendai, Japan}} @string{tacs94 = tacs # {, Sendai, Japan}} @string{taoop = {C. A. Gunter and J. C. Mitchell, editors, {\em Theoretical Aspects of Object-Oriented Programming: Types, Semantics, and Language Design}, MIT Press, 1994}} @string{tapsoft = {Theory and Practice of Software Development (TAPSOFT)}} @string{tapsoft93 = tapsoft # {, Orsay, France}} @string{tapsoft97 = tapsoft # {, Lille, France}} @string{tcs = {Theoretical Computer Science}} @string{these = {Th{\`e}se de Doctorat}} @string{thesedoctorat = {Th{\`e}se de Doctorat}} @string{theseh = {Th{\`e}se d'habilitation}} @string{theseinpg = {Th{\`e}se de Doctorat, Institut National Polytechnique de Grenoble, France}} @string{theselri = {Th{\`e}se de Doctorat, Universit{\'e} de Paris-Sud, France}} @string{thesenancy = {Th{\`e}se de Doctorat, Universit{\'e} de Nancy I, France}} @string{tic = {ACM SIGPLAN Workshop on Types in Compilation ({TIC})}} @string{tic97 = tic # {, Amsterdam, The Netherlands}} @string{tic98 = tic # {, Kyoto, Japan}} @string{tlca = {International Conference on Typed Lambda Calculi and Applications (TLCA)}} @string{tlca01 = tlca # {, Krak{\'{o}}ow, Poland}} @string{tlca03 = tlca # {, Valencia, Spain}} @string{tlca93 = tlca # {, Utrecht, The Netherlands}} @string{tlca97 = tlca # {, Nancy, France}} @string{tlca99 = tlca # {, L'Aquila, Italy}} @string{tldi = {ACM SIGPLAN Workshop on Types in Language Design and Implementation (TLDI)}} @string{tldi03 = tldi # {, New Orleans, Louisiana}} @string{tocl = {ACM Transactions on Computational Logic}} @string{toplas = {ACM Transactions on Programming Languages and Systems}} @string{tose = {IEEE Transactions on Software Engineering}} @string{tpa = {Workshop on Types for Program Analysis (TPA)} # informal} @string{tr = {Tech. Report}} @string{tsi = {Technique et Science Informatiques}} @string{types = {International Workshop on Types for Proofs and Programs (TYPES)}} @string{types93 = types # {, Nijmegen, The Netherlands}} @string{types98 = types # {, Kloster Irsee, Germany}} @string{u-lille = {Universit{\'e} des Sciences et Techniques de Lille Flandres Artois}} @string{univcaen = {Universit{\'e} de Caen}} @string{univdenisdiderot = {Universit{\'e} Denis Diderot}} @string{univnancy1 = {Universit{\'e} Henri Poincar{\'e} Nancy 1}} @string{univparis7 = {Universit{\'e} Paris 7}} @string{univparisnord = {Universit{\'e} Paris-Nord}} @string{ups = {Universit{\'e} Paris-Sud}} @string{ups-orsay = {Universit{\'e} Paris-Sud, Orsay, France}} @string{wad79 = {Proc. 4th Workshop on Automated Deduction, Austin, Texas}} @string{webdb = {International Workshop on the Web and Databases (WebDB)}} @string{xsym = {Database and XML Technologies: International XML Database Symposium (XSym)}} @string{yes = {Yes}} @misc{equationscoqpl20, Abstract = {{We present an internalization of the 2-groupoid interpretation of the calculus of construction that allows to realize the univalence axiom, proof irrelevance and reasoning modulo. As an example, we show that in our setting, the type of Church integers is equal to the inductive type of natural numbers.}}, Affiliation = {Inria}, Author = {Matthieu Sozeau}, Date-Added = {2020-01-31 17:26:43 +0100}, Date-Modified = {2020-01-31 17:27:59 +0100}, Howpublished = {Talk given at the CoqPL 2020 Workshop}, Language = {Anglais}, Month = January, Title = {{An Equations Tutorial}}, Type = {slides}, Url = {http://www.irif.fr/~sozeau/research/publications/An_Equations_Tutorial-250120-CoqPL20.pdf}, Year = {2020}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/hal-00786589}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/groupoidint.pdf}, Bdsk-Url-3 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/internalizingITT.pdf}, Bdsk-Url-4 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations_for_Hereditary_Substitution_in_Leivants_Predicative_System_F:_a_case_study.pdf}, Bdsk-Url-5 = {http://dx.doi.org/10.4204/EPTCS.185}, Bdsk-Url-6 = {http://www.irif.fr/~sozeau/research/publications/drafts/Equations_Reloaded_ICFP.pdf}, Bdsk-Url-7 = {http://www.irif.fr/~sozeau/research/publications/drafts/Equations_Reloaded.pdf}} @inproceedings{gueneau:hal-02167236, Address = {Portland, United States}, Author = {Gu{\'e}neau, Arma{\"e}l and Jourdan, Jacques-Henri and Chargu{\'e}raud, Arthur and Pottier, Fran{\c c}ois}, Booktitle = {{ITP 2019 - 10th Conference on Interactive Theorem Proving}}, Date-Added = {2019-11-03 23:11:34 +0100}, Date-Modified = {2019-11-03 23:11:34 +0100}, Hal_Id = {hal-02167236}, Hal_Version = {v1}, Keywords = {Complexity analysis ; Interactive deductive program verification}, Month = Sep, Pdf = {https://hal.inria.fr/hal-02167236/file/main.pdf}, Title = {{Formal Proof and Analysis of an Incremental Cycle Detection Algorithm}}, Url = {https://hal.inria.fr/hal-02167236}, Year = {2019}, Bdsk-Url-1 = {https://hal.inria.fr/hal-02167236}} @article{DBLP:journals/jfp/RossbergRD14, Author = {Andreas Rossberg and Claudio V. Russo and Derek Dreyer}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/journals/jfp/RossbergRD14}, Date-Added = {2019-10-30 23:03:35 +0100}, Date-Modified = {2019-10-30 23:03:35 +0100}, Doi = {10.1017/S0956796814000264}, Journal = {J. Funct. Program.}, Number = {5}, Pages = {529--607}, Timestamp = {Sat, 27 May 2017 14:24:34 +0200}, Title = {F-ing modules}, Url = {https://doi.org/10.1017/S0956796814000264}, Volume = {24}, Year = {2014}, Bdsk-Url-1 = {https://doi.org/10.1017/S0956796814000264}} @unpublished{tabtansoz19, Author = {Nicolas Tabareau and {\'E}ric Tanter and Matthieu Sozeau}, Date-Added = {2019-09-12 09:47:28 +0200}, Date-Modified = {2019-09-12 09:58:18 +0200}, Keywords = {Coq ; Parametricity ; Type structures ; Program reasoning ; Type theory ; Homotopy Type Theory}, Month = August, Note = {Submitted}, Title = {{The Marriage of Univalence and Parametricity}}, Url = {https://arxiv.org/abs/1909.05027}, Year = {2019}, Bdsk-Url-1 = {https://hal.inria.fr/hal-01559073}, Bdsk-Url-2 = {https://doi.org/10.1145/3234615}} @misc{equationsreloaded-icfp19-talk, Abstract = {{We present an internalization of the 2-groupoid interpretation of the calculus of construction that allows to realize the univalence axiom, proof irrelevance and reasoning modulo. As an example, we show that in our setting, the type of Church integers is equal to the inductive type of natural numbers.}}, Affiliation = {Inria Paris & IRIF, Universit{\'e} Paris 7 Denis Diderot}, Author = {Matthieu Sozeau}, Date-Added = {2019-08-20 08:01:14 +0200}, Date-Modified = {2019-08-20 08:02:00 +0200}, Howpublished = {Talk given at ICFP 2019}, Language = {Anglais}, Month = August, Title = {{Equations Reloaded}}, Type = {slides}, Url = {http://www.irif.fr/~sozeau/research/publications/Equations_Reloaded-ICFP19-190819.pdf}, Year = {2019}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/hal-00786589}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/groupoidint.pdf}, Bdsk-Url-3 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/internalizingITT.pdf}, Bdsk-Url-4 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations_for_Hereditary_Substitution_in_Leivants_Predicative_System_F:_a_case_study.pdf}, Bdsk-Url-5 = {http://dx.doi.org/10.4204/EPTCS.185}, Bdsk-Url-6 = {http://www.irif.fr/~sozeau/research/publications/drafts/Equations_Reloaded_ICFP.pdf}, Bdsk-Url-7 = {http://www.irif.fr/~sozeau/research/publications/drafts/Equations_Reloaded.pdf}} @misc{equationsforhott, Abstract = {{We present an internalization of the 2-groupoid interpretation of the calculus of construction that allows to realize the univalence axiom, proof irrelevance and reasoning modulo. As an example, we show that in our setting, the type of Church integers is equal to the inductive type of natural numbers.}}, Affiliation = {Inria Paris & IRIF, Universit{\'e} Paris 7 Denis Diderot}, Author = {Matthieu Sozeau}, Date-Added = {2019-08-14 09:40:58 -0400}, Date-Modified = {2019-08-14 09:42:56 -0400}, Howpublished = {Talk given at the Homotopy Type Theory 2019 Conference}, Language = {Anglais}, Month = August, Title = {{Equations for HoTT}}, Type = {slides}, Url = {http://www.irif.fr/~sozeau/research/publications/Equations_for_HoTT-HoTT19-130819.pdf}, Year = {2019}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/hal-00786589}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/groupoidint.pdf}, Bdsk-Url-3 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/internalizingITT.pdf}, Bdsk-Url-4 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations_for_Hereditary_Substitution_in_Leivants_Predicative_System_F:_a_case_study.pdf}, Bdsk-Url-5 = {http://dx.doi.org/10.4204/EPTCS.185}, Bdsk-Url-6 = {http://www.irif.fr/~sozeau/research/publications/drafts/Equations_Reloaded_ICFP.pdf}, Bdsk-Url-7 = {http://www.irif.fr/~sozeau/research/publications/drafts/Equations_Reloaded.pdf}} @inproceedings{DBLP.conf/cpp/BauerGLSSS17, Author = {Andrej Bauer and Jason Gross and Peter LeFanu Lumsdaine and Michael Shulman and Matthieu Sozeau and Bas Spitters}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/cpp/BauerGLSSS17}, Booktitle = {Proceedings of the 6th {ACM} {SIGPLAN} Conference on Certified Programs and Proofs, {CPP} 2017, Paris, France, January 16-17, 2017}, Crossref = {DBLP:conf/cpp/2017}, Date-Added = {2019-07-16 07:42:42 -0400}, Date-Modified = {2019-07-16 07:43:09 -0400}, Doi = {10.1145/3018610.3018615}, Pages = {164--172}, Publisher = {ACM}, Timestamp = {Tue, 06 Nov 2018 16:59:23 +0100}, Title = {The HoTT library: a formalization of homotopy type theory in Coq}, Url = {https://doi.org/10.1145/3018610.3018615}, Year = {2017}, Bdsk-Url-1 = {https://doi.org/10.1145/3018610.3018615}} @article{DBLP.journals/pacmpl/TabareauTS18, Author = {Nicolas Tabareau and {\'{E}}ric Tanter and Matthieu Sozeau}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/journals/pacmpl/TabareauTS18}, Date-Added = {2019-07-16 07:33:14 -0400}, Date-Modified = {2019-07-16 07:33:19 -0400}, Doi = {10.1145/3236787}, Journal = {{PACMPL}}, Number = {{ICFP}}, Pages = {92:1--92:29}, Timestamp = {Wed, 21 Nov 2018 12:44:28 +0100}, Title = {Equivalences for free: univalent parametricity for effective transport}, Url = {https://doi.org/10.1145/3236787}, Volume = {2}, Year = {2018}, Bdsk-Url-1 = {https://doi.org/10.1145/3236787}} @inproceedings{DBLP.conf/cpp/WinterhalterST19, Author = {Th{\'{e}}o Winterhalter and Matthieu Sozeau and Nicolas Tabareau}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/cpp/WinterhalterST19}, Booktitle = {Proceedings of the 8th {ACM} {SIGPLAN} International Conference on Certified Programs and Proofs, {CPP} 2019, Cascais, Portugal, January 14-15, 2019}, Crossref = {DBLP:conf/cpp/2019}, Date-Added = {2019-07-16 07:29:17 -0400}, Date-Modified = {2019-07-16 07:30:04 -0400}, Doi = {10.1145/3293880.3294095}, Pages = {91--103}, Timestamp = {Fri, 04 Jan 2019 10:46:45 +0100}, Title = {Eliminating reflection from type theory}, Url = {https://doi.org/10.1145/3293880.3294095}, Year = {2019}, Bdsk-Url-1 = {https://doi.org/10.1145/3293880.3294095}} @proceedings{DBLP:conf/cpp/2019, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/cpp/2019}, Date-Added = {2019-07-16 07:29:17 -0400}, Date-Modified = {2019-07-16 07:29:17 -0400}, Editor = {Assia Mahboubi and Magnus O. Myreen}, Isbn = {978-1-4503-6222-1}, Publisher = {{ACM}}, Timestamp = {Fri, 04 Jan 2019 10:45:31 +0100}, Title = {Proceedings of the 8th {ACM} {SIGPLAN} International Conference on Certified Programs and Proofs, {CPP} 2019, Cascais, Portugal, January 14-15, 2019}, Url = {https://dl.acm.org/citation.cfm?id=3293880}, Year = {2019}, Bdsk-Url-1 = {https://dl.acm.org/citation.cfm?id=3293880}} @misc{coqcoqcodet, Author = {Matthieu Sozeau and Simon Boulier and Yannick Forster and Nicolas Tabareau and Th{\'e}o Winterhalter}, Date-Added = {2019-07-12 13:38:10 -0400}, Date-Modified = {2019-10-15 00:15:00 +0200}, Howpublished = {Abstract for the Coq Workshop 2019}, Month = {July}, Title = {{Coq Coq Codet! Towards a Verified Toolchain for Coq in MetaCoq}}, Url = {http://www.irif.fr/~sozeau/research/publications/Coq_Coq_Codet-CoqWS19.pdf}, Year = {2019}, Bdsk-Url-1 = {http://www.irif.fr/~sozeau/research/publications/drafts/Coq_Coq_Correct.pdf}} @article{coqcoqcorrect, Articleno = {8}, Author = {Matthieu Sozeau and Simon Boulier and Yannick Forster and Nicolas Tabareau and Th{\'e}o Winterhalter}, Date-Added = {2019-07-12 13:32:48 -0400}, Date-Modified = {2019-12-12 11:01:37 +0100}, Doi = {10.1145/3371076}, Journal = {{Proceedings of the ACM on Programming Languages}}, Month = {January}, Number = {POPL}, Numpages = {28}, Title = {{Coq Coq Correct! Verifying Typechecking and Erasure for Coq, in Coq}}, Url = {http://www.irif.fr/~sozeau/research/publications/Coq_Coq_Correct-POPL20.pdf}, Volume = {4}, Year = {2020}, Bdsk-Url-1 = {http://www.irif.fr/~sozeau/research/publications/Coq_Coq_Correct-POPL20.pdf}, Bdsk-Url-2 = {https://doi.org/10.1145/3371076}} @inproceedings{DBLP:conf/popl/LampropoulosGHH17, Author = {Leonidas Lampropoulos and Diane Gallois{-}Wong and Catalin Hritcu and John Hughes and Benjamin C. Pierce and Li{-}yao Xia}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/popl/LampropoulosGHH17}, Booktitle = {Proceedings of the 44th {ACM} {SIGPLAN} Symposium on Principles of Programming Languages, {POPL} 2017, Paris, France, January 18-20, 2017}, Crossref = {DBLP:conf/popl/2017}, Date-Added = {2019-07-10 03:40:25 -0400}, Date-Modified = {2019-07-10 03:40:25 -0400}, Pages = {114--129}, Timestamp = {Tue, 06 Nov 2018 11:07:42 +0100}, Title = {Beginner's luck: a language for property-based generators}, Url = {http://dl.acm.org/citation.cfm?id=3009868}, Year = {2017}, Bdsk-Url-1 = {http://dl.acm.org/citation.cfm?id=3009868}} @proceedings{DBLP:conf/popl/2017, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/popl/2017}, Date-Added = {2019-07-10 03:40:25 -0400}, Date-Modified = {2019-07-10 03:40:25 -0400}, Doi = {10.1145/3009837}, Editor = {Giuseppe Castagna and Andrew D. Gordon}, Isbn = {978-1-4503-4660-3}, Publisher = {{ACM}}, Timestamp = {Tue, 06 Nov 2018 11:07:42 +0100}, Title = {Proceedings of the 44th {ACM} {SIGPLAN} Symposium on Principles of Programming Languages, {POPL} 2017, Paris, France, January 18-20, 2017}, Url = {https://doi.org/10.1145/3009837}, Year = {2017}, Bdsk-Url-1 = {https://doi.org/10.1145/3009837}} @inproceedings{DBLP:conf/lics/PedrotT17, Author = {Pierre{-}Marie P{\'{e}}drot and Nicolas Tabareau}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/lics/PedrotT17}, Booktitle = {32nd Annual {ACM/IEEE} Symposium on Logic in Computer Science, {LICS} 2017, Reykjavik, Iceland, June 20-23, 2017}, Crossref = {DBLP:conf/lics/2017}, Date-Added = {2019-07-10 01:38:26 -0400}, Date-Modified = {2019-07-10 01:38:26 -0400}, Doi = {10.1109/LICS.2017.8005113}, Pages = {1--12}, Timestamp = {Thu, 07 Sep 2017 09:27:11 +0200}, Title = {An effectful way to eliminate addiction to dependence}, Url = {https://doi.org/10.1109/LICS.2017.8005113}, Year = {2017}, Bdsk-Url-1 = {https://doi.org/10.1109/LICS.2017.8005113}} @proceedings{DBLP:conf/lics/2017, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/lics/2017}, Date-Added = {2019-07-10 01:38:26 -0400}, Date-Modified = {2019-07-10 01:38:26 -0400}, Isbn = {978-1-5090-3018-7}, Publisher = {{IEEE} Computer Society}, Timestamp = {Tue, 15 Aug 2017 20:32:42 +0200}, Title = {32nd Annual {ACM/IEEE} Symposium on Logic in Computer Science, {LICS} 2017, Reykjavik, Iceland, June 20-23, 2017}, Url = {http://ieeexplore.ieee.org/xpl/mostRecentIssue.jsp?punumber=7999337}, Year = {2017}, Bdsk-Url-1 = {http://ieeexplore.ieee.org/xpl/mostRecentIssue.jsp?punumber=7999337}} @inproceedings{DBLP:conf/popl/AbelPTS13, Author = {Andreas Abel and Brigitte Pientka and David Thibodeau and Anton Setzer}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/popl/AbelPTS13}, Booktitle = {The 40th Annual {ACM} {SIGPLAN-SIGACT} Symposium on Principles of Programming Languages, {POPL} '13, Rome, Italy - January 23 - 25, 2013}, Crossref = {DBLP:conf/popl/2013}, Date-Added = {2019-07-10 01:37:39 -0400}, Date-Modified = {2019-07-10 01:37:39 -0400}, Doi = {10.1145/2429069.2429075}, Pages = {27--38}, Timestamp = {Tue, 06 Nov 2018 11:07:43 +0100}, Title = {Copatterns: programming infinite structures by observations}, Url = {https://doi.org/10.1145/2429069.2429075}, Year = {2013}, Bdsk-Url-1 = {https://doi.org/10.1145/2429069.2429075}} @proceedings{DBLP:conf/popl/2013, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/popl/2013}, Date-Added = {2019-07-10 01:37:39 -0400}, Date-Modified = {2019-07-10 01:37:39 -0400}, Editor = {Roberto Giacobazzi and Radhia Cousot}, Isbn = {978-1-4503-1832-7}, Publisher = {{ACM}}, Timestamp = {Fri, 18 Jan 2013 17:48:28 +0100}, Title = {The 40th Annual {ACM} {SIGPLAN-SIGACT} Symposium on Principles of Programming Languages, {POPL} '13, Rome, Italy - January 23 - 25, 2013}, Url = {http://dl.acm.org/citation.cfm?id=2429069}, Year = {2013}, Bdsk-Url-1 = {http://dl.acm.org/citation.cfm?id=2429069}} @inproceedings{DBLP:conf/itp/SchaferTS15, Author = {Steven Sch{\"{a}}fer and Tobias Tebbi and Gert Smolka}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/itp/SchaferTS15}, Booktitle = {Interactive Theorem Proving - 6th International Conference, {ITP} 2015, Nanjing, China, August 24-27, 2015, Proceedings}, Crossref = {DBLP:conf/itp/2015}, Date-Added = {2019-07-10 01:09:54 -0400}, Date-Modified = {2019-07-10 01:09:54 -0400}, Doi = {10.1007/978-3-319-22102-1\_24}, Pages = {359--374}, Timestamp = {Tue, 14 May 2019 10:00:37 +0200}, Title = {Autosubst: Reasoning with de Bruijn Terms and Parallel Substitutions}, Url = {https://doi.org/10.1007/978-3-319-22102-1\_24}, Year = {2015}, Bdsk-Url-1 = {https://doi.org/10.1007/978-3-319-22102-1%5C_24}} @proceedings{DBLP:conf/itp/2015, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/itp/2015}, Date-Added = {2019-07-10 01:09:54 -0400}, Date-Modified = {2019-07-10 01:09:54 -0400}, Doi = {10.1007/978-3-319-22102-1}, Editor = {Christian Urban and Xingyuan Zhang}, Isbn = {978-3-319-22101-4}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Timestamp = {Tue, 14 May 2019 10:00:37 +0200}, Title = {Interactive Theorem Proving - 6th International Conference, {ITP} 2015, Nanjing, China, August 24-27, 2015, Proceedings}, Url = {https://doi.org/10.1007/978-3-319-22102-1}, Volume = {9236}, Year = {2015}, Bdsk-Url-1 = {https://doi.org/10.1007/978-3-319-22102-1}} @misc{scheplerbij, Author = {Daniel Schepler}, Date-Added = {2019-07-08 13:51:50 -0400}, Date-Modified = {2019-07-08 13:55:32 -0400}, Howpublished = {Post on coq-club}, Month = {December}, Title = {{Bijective function implies equal types is provably inconsistent with functional extensionality in Coq}}, Url = {https://sympa.inria.fr/sympa/arc/coq-club/2013-12/msg00114.html}, Year = {2013}, Bdsk-Url-1 = {https://sympa.inria.fr/sympa/arc/coq-club/2013-12/msg00114.html}} @misc{equationsreloadedartifact, Abstract = {{We present an internalization of the 2-groupoid interpretation of the calculus of construction that allows to realize the univalence axiom, proof irrelevance and reasoning modulo. As an example, we show that in our setting, the type of Church integers is equal to the inductive type of natural numbers.}}, Affiliation = {Inria Paris & IRIF, Universit{\'e} Paris 7 Denis Diderot}, Author = {Matthieu Sozeau and Cyprien Mangin}, Date-Added = {2019-07-08 11:17:31 -0400}, Date-Modified = {2019-07-08 12:27:40 -0400}, Doi = {10.1145/3342526}, Howpublished = {{Available on the ACM DL}}, Month = August, Title = {{Equations Reloaded Accompanying Material}}, Year = {2019}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/hal-00786589}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/groupoidint.pdf}, Bdsk-Url-3 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/internalizingITT.pdf}, Bdsk-Url-4 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations_for_Hereditary_Substitution_in_Leivants_Predicative_System_F:_a_case_study.pdf}, Bdsk-Url-5 = {http://dx.doi.org/10.4204/EPTCS.185}, Bdsk-Url-6 = {http://www.irif.fr/~sozeau/research/publications/drafts/Equations_Reloaded_ICFP.pdf}, Bdsk-Url-7 = {http://www.irif.fr/~sozeau/research/publications/drafts/Equations_Reloaded.pdf}} @misc{matthieu_sozeau_2019_3012649, Author = {Matthieu Sozeau and Cyprien Mangin}, Date-Added = {2019-06-25 17:19:24 -0400}, Date-Modified = {2019-06-25 17:19:24 -0400}, Doi = {10.5281/zenodo.3012649}, Month = may, Title = {Equations v1.2}, Url = {https://doi.org/10.5281/zenodo.3012649}, Year = 2019, Bdsk-Url-1 = {https://doi.org/10.5281/zenodo.3012649}} @misc{forstersozeau19, Author = {{Yannick Forster and Matthieu Sozeau}}, Date-Added = {2019-06-11 11:19:55 -0400}, Date-Modified = {2019-06-18 15:57:35 -0400}, Journal = {Frontiers of Realizability}, Month = {August}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/}, Read = {Oui}, Title = {{A mechanically verified proof erasure for Coq}}, Year = {2009}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @inproceedings{DBLP:conf/popl/StrubSFC12, Author = {Pierre{-}Yves Strub and Nikhil Swamy and C{\'{e}}dric Fournet and Juan Chen}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/popl/StrubSFC12}, Booktitle = {Proceedings of the 39th {ACM} {SIGPLAN-SIGACT} Symposium on Principles of Programming Languages, {POPL} 2012, Philadelphia, Pennsylvania, USA, January 22-28, 2012}, Crossref = {DBLP:conf/popl/2012}, Date-Added = {2019-06-10 06:08:40 -0400}, Date-Modified = {2019-06-10 06:08:40 -0400}, Doi = {10.1145/2103656.2103723}, Pages = {571--584}, Timestamp = {Tue, 06 Nov 2018 11:07:43 +0100}, Title = {Self-certification: bootstrapping certified typecheckers in F* with Coq}, Url = {https://doi.org/10.1145/2103656.2103723}, Year = {2012}, Bdsk-Url-1 = {https://doi.org/10.1145/2103656.2103723}} @proceedings{DBLP:conf/popl/2012, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/popl/2012}, Date-Added = {2019-06-10 06:08:40 -0400}, Date-Modified = {2019-06-10 06:08:40 -0400}, Editor = {John Field and Michael Hicks}, Isbn = {978-1-4503-1083-3}, Publisher = {{ACM}}, Timestamp = {Sun, 03 Dec 2017 00:17:33 +0100}, Title = {Proceedings of the 39th {ACM} {SIGPLAN-SIGACT} Symposium on Principles of Programming Languages, {POPL} 2012, Philadelphia, Pennsylvania, USA, January 22-28, 2012}, Url = {http://dl.acm.org/citation.cfm?id=2103656}, Year = {2012}, Bdsk-Url-1 = {http://dl.acm.org/citation.cfm?id=2103656}} @inproceedings{DBLP:conf/cade/Krauss06, Author = {Alexander Krauss}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/cade/Krauss06}, Booktitle = {Automated Reasoning, Third International Joint Conference, {IJCAR} 2006, Seattle, WA, USA, August 17-20, 2006, Proceedings}, Crossref = {DBLP:conf/cade/2006}, Date-Added = {2019-05-31 23:09:29 +0200}, Date-Modified = {2019-05-31 23:09:29 +0200}, Doi = {10.1007/11814771\_48}, Pages = {589--603}, Timestamp = {Tue, 14 May 2019 10:00:39 +0200}, Title = {Partial Recursive Functions in Higher-Order Logic}, Url = {https://doi.org/10.1007/11814771\_48}, Year = {2006}, Bdsk-Url-1 = {https://doi.org/10.1007/11814771%5C_48}} @proceedings{DBLP:conf/cade/2006, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/cade/2006}, Date-Added = {2019-05-31 23:09:29 +0200}, Date-Modified = {2019-05-31 23:09:29 +0200}, Doi = {10.1007/11814771}, Editor = {Ulrich Furbach and Natarajan Shankar}, Isbn = {3-540-37187-7}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Timestamp = {Tue, 14 May 2019 10:00:39 +0200}, Title = {Automated Reasoning, Third International Joint Conference, {IJCAR} 2006, Seattle, WA, USA, August 17-20, 2006, Proceedings}, Url = {https://doi.org/10.1007/11814771}, Volume = {4130}, Year = {2006}, Bdsk-Url-1 = {https://doi.org/10.1007/11814771}} @article{paulson1986constructing, Author = {Paulson, Lawrence C}, Date-Added = {2019-05-31 20:39:18 +0200}, Date-Modified = {2019-05-31 20:42:12 +0200}, Journal = {Journal of Symbolic Computation}, Number = {4}, Pages = {325--355}, Publisher = {Elsevier}, Title = {{Constructing Recursion operators in Intuitionistic Type Theory}}, Url = {https://www.sciencedirect.com/science/article/pii/S0747717186800025/pdf?md5=4df038c66455b64726734b09ad0ea894&isDTMRedir=Y&pid=1-s2.0-S0747717186800025-main.pdf&_valck=1}, Volume = {2}, Year = {1986}, Bdsk-Url-1 = {https://www.sciencedirect.com/science/article/pii/S0747717186800025/pdf?md5=4df038c66455b64726734b09ad0ea894&isDTMRedir=Y&pid=1-s2.0-S0747717186800025-main.pdf&_valck=1}} @misc{sozeau.MetaCoqNomadic, Address = {Paris, France}, Author = {Matthieu Sozeau}, Date-Added = {2019-05-23 18:07:22 +0200}, Date-Modified = {2019-05-23 18:07:56 +0200}, Howpublished = {Talk given at Nomadic Labs}, Month = {May}, Pdf = {http://www.irif.fr/~sozeau/research/publications/The_MetaCoq_Project-NomadicLabs-230519.pdf}, Read = {Oui}, Title = {{The MetaCoq Project}}, Type = {slides}, Year = {2019}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @article{DBLP:journals/pacmpl/KaiserZKRD18, Author = {Jan{-}Oliver Kaiser and Beta Ziliani and Robbert Krebbers and Yann R{\'{e}}gis{-}Gianas and Derek Dreyer}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/journals/pacmpl/KaiserZKRD18}, Date-Added = {2019-05-22 20:17:59 +0200}, Date-Modified = {2019-05-22 20:17:59 +0200}, Doi = {10.1145/3236773}, Journal = {{PACMPL}}, Number = {{ICFP}}, Pages = {78:1--78:31}, Timestamp = {Wed, 21 Nov 2018 12:44:28 +0100}, Title = {Mtac2: typed tactics for backward reasoning in Coq}, Url = {https://doi.org/10.1145/3236773}, Volume = {2}, Year = {2018}, Bdsk-Url-1 = {https://doi.org/10.1145/3236773}} @inproceedings{helix.CoqPL, Abstract = {{We present an internalization of the 2-groupoid interpretation of the calculus of construction that allows to realize the univalence axiom, proof irrelevance and reasoning modulo. As an example, we show that in our setting, the type of Church integers is equal to the inductive type of natural numbers.}}, Address = {Cascais, Portugal}, Affiliation = {PI.R2 - INRIA Paris - Rocquencourt , Preuves, Programmes et Syst{\`e}mes - PPS , ASCOLA - INRIA - EMN , Laboratoire d'Informatique de Nantes Atlantique - LINA}, Author = {Vadim Zaliva and Matthieu Sozeau}, Booktitle = {CoqPL}, Date-Added = {2019-05-22 20:14:45 +0200}, Date-Modified = {2019-05-22 20:16:03 +0200}, Language = {Anglais}, Month = January, Title = {{Reification of shallow-embedded DSLs in Coq with automated verification}}, Url = {http://www.crocodile.org/lord/vzaliva-CoqPL19.pdf}, Year = {2019}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/hal-00786589}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/groupoidint.pdf}, Bdsk-Url-3 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/internalizingITT.pdf}, Bdsk-Url-4 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations_for_Hereditary_Substitution_in_Leivants_Predicative_System_F:_a_case_study.pdf}, Bdsk-Url-5 = {http://dx.doi.org/10.4204/EPTCS.185}, Bdsk-Url-6 = {http://conf.researchr.org/event/CoqPL-2017/main-certicoq-a-verified-compiler-for-coq}, Bdsk-Url-7 = {http://www.crocodile.org/lord/vzaliva-CoqPL19.pdf}} @unpublished{metacoqproject, Abstract = {{We present an internalization of the 2-groupoid interpretation of the calculus of construction that allows to realize the univalence axiom, proof irrelevance and reasoning modulo. As an example, we show that in our setting, the type of Church integers is equal to the inductive type of natural numbers.}}, Affiliation = {PI.R2 - INRIA Paris - Rocquencourt , Preuves, Programmes et Syst{\`e}mes - PPS , ASCOLA - INRIA - EMN , Laboratoire d'Informatique de Nantes Atlantique - LINA}, Author = {Matthieu Sozeau and Abhishek Anand and Simon Boulier and Cyril Cohen and Yannick Forster and Fabian Kunze and Gregory Malecha and Nicolas Tabareau and Th{\'e}o Winterhalter}, Date-Added = {2019-05-22 20:02:37 +0200}, Date-Modified = {2019-10-15 00:13:09 +0200}, Language = {Anglais}, Month = April, Note = {{In revision}}, Title = {{The MetaCoq Project}}, Url = {http://www.irif.fr/~sozeau/research/publications/drafts/The_MetaCoq_Project.pdf}, Year = {2019}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/hal-00786589}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/groupoidint.pdf}, Bdsk-Url-3 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/internalizingITT.pdf}, Bdsk-Url-4 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations_for_Hereditary_Substitution_in_Leivants_Predicative_System_F:_a_case_study.pdf}, Bdsk-Url-5 = {http://dx.doi.org/10.4204/EPTCS.185}, Bdsk-Url-6 = {http://www.irif.fr/~sozeau/research/publications/drafts/Equations_Reloaded_ICFP.pdf}, Bdsk-Url-7 = {http://www.irif.fr/~sozeau/research/publications/drafts/The_MetaCoq_Project.pdf}} @inproceedings{DBLP:conf/cpp/MullenPWTG18, Author = {Eric Mullen and Stuart Pernsteiner and James R. Wilcox and Zachary Tatlock and Dan Grossman}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/cpp/MullenPWTG18}, Booktitle = {Proceedings of the 7th {ACM} {SIGPLAN} International Conference on Certified Programs and Proofs, {CPP} 2018, Los Angeles, CA, USA, January 8-9, 2018}, Crossref = {DBLP:conf/cpp/2018}, Date-Added = {2019-05-22 19:39:18 +0200}, Date-Modified = {2019-05-22 19:39:18 +0200}, Doi = {10.1145/3167089}, Pages = {172--185}, Timestamp = {Wed, 21 Nov 2018 12:44:26 +0100}, Title = {{\OE}uf: minimizing the Coq extraction {TCB}}, Url = {https://doi.org/10.1145/3167089}, Year = {2018}, Bdsk-Url-1 = {https://doi.org/10.1145/3167089}} @unpublished{forster:itp19, Author = {Yannick Forster and Fabian Kunze}, Date-Added = {2019-05-22 19:32:50 +0200}, Date-Modified = {2019-05-22 19:34:21 +0200}, Month = {April}, Title = {{A certifying extraction with time bounds from Coq to call-by-value λ-calculus}}, Url = {http://www.ps.uni-saarland.de/Publications/documents/ForsterKunze_2019_Certifying-extraction.pdf}, Year = {2019}, Bdsk-Url-1 = {http://www.ps.uni-saarland.de/Publications/documents/ForsterKunze_2019_Certifying-extraction.pdf}} @phdthesis{streicherhabil, Author = {Thomas Streicher}, Date-Added = {2019-03-01 19:10:53 +0100}, Date-Modified = {2019-03-01 19:24:26 +0100}, School = {LMU M{\"u}nchen}, Title = {Semantical Investigations into Intensional Type Theory}, Type = {Habilitationsschrift}, Year = {1993}} @article{DBLP:journals/jfp/CockxD18, Author = {Jesper Cockx and Dominique Devriese}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/journals/jfp/CockxD18}, Date-Added = {2019-03-01 13:59:51 +0100}, Date-Modified = {2019-03-01 13:59:51 +0100}, Doi = {10.1017/S095679681800014X}, Journal = {J. Funct. Program.}, Pages = {e12}, Timestamp = {Fri, 02 Nov 2018 09:31:48 +0100}, Title = {Proof-relevant unification: Dependent pattern matching with only the axioms of your type theory}, Url = {https://doi.org/10.1017/S095679681800014X}, Volume = {28}, Year = {2018}, Bdsk-Url-1 = {https://doi.org/10.1017/S095679681800014X}} @book{mathcomp, Author = {Assia Mahboubi and Enrico Tassi and Yves Bertot and Georges Gonthier}, Date-Added = {2019-02-26 18:19:22 +0100}, Date-Modified = {2019-02-26 18:24:48 +0100}, Title = {{Mathematical Components}}, Year = {2018}} @inproceedings{pedrot:hal-01840643, Address = {Thessaloniki, Greece}, Author = {P{\'e}drot, Pierre-Marie and Tabareau, Nicolas}, Booktitle = {{ESOP 2018 - 27th European Symposium on Programming}}, Date-Added = {2019-02-25 15:28:43 +0100}, Date-Modified = {2019-02-25 15:28:43 +0100}, Doi = {10.1007/978-3-319-89884-1\_9}, Hal_Id = {hal-01840643}, Hal_Version = {v1}, Month = Apr, Pages = {245-271}, Pdf = {https://hal.inria.fr/hal-01840643/file/main.pdf}, Publisher = {{Springer}}, Series = {LNCS}, Title = {{Failure is Not an Option An Exceptional Type Theory}}, Url = {https://hal.inria.fr/hal-01840643}, Volume = {10801}, Year = {2018}, Bdsk-Url-1 = {https://hal.inria.fr/hal-01840643}, Bdsk-Url-2 = {https://doi.org/10.1007/978-3-319-89884-1%5C_9}} @inproceedings{DBLP:conf/haskell/VazouLP17, Author = {Niki Vazou and Leonidas Lampropoulos and Jeff Polakow}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/haskell/VazouLP17}, Booktitle = {Proceedings of the 10th {ACM} {SIGPLAN} International Symposium on Haskell, Oxford, United Kingdom, September 7-8, 2017}, Crossref = {DBLP:conf/haskell/2017}, Date-Added = {2019-02-23 13:02:05 +0100}, Date-Modified = {2019-02-23 13:02:05 +0100}, Doi = {10.1145/3122955.3122963}, Pages = {63--74}, Timestamp = {Tue, 06 Nov 2018 16:58:22 +0100}, Title = {A tale of two provers: verifying monoidal string matching in liquid Haskell and Coq}, Url = {https://doi.org/10.1145/3122955.3122963}, Year = {2017}, Bdsk-Url-1 = {https://doi.org/10.1145/3122955.3122963}} @proceedings{DBLP:conf/haskell/2017, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/haskell/2017}, Date-Added = {2019-02-23 13:02:05 +0100}, Date-Modified = {2019-02-23 13:02:05 +0100}, Doi = {10.1145/3122955}, Editor = {Iavor S. Diatchki}, Isbn = {978-1-4503-5182-9}, Publisher = {{ACM}}, Timestamp = {Tue, 06 Nov 2018 16:58:22 +0100}, Title = {Proceedings of the 10th {ACM} {SIGPLAN} International Symposium on Haskell, Oxford, United Kingdom, September 7-8, 2017}, Url = {https://doi.org/10.1145/3122955}, Year = {2017}, Bdsk-Url-1 = {https://doi.org/10.1145/3122955}} @article{DBLP:journals/pacmpl/CockxA18, Author = {Jesper Cockx and Andreas Abel}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/journals/pacmpl/CockxA18}, Date-Added = {2019-02-22 17:01:35 +0100}, Date-Modified = {2019-02-22 17:01:35 +0100}, Doi = {10.1145/3236770}, Journal = {{PACMPL}}, Number = {{ICFP}}, Pages = {75:1--75:30}, Timestamp = {Wed, 21 Nov 2018 12:44:28 +0100}, Title = {Elaborating dependent (co)pattern matching}, Url = {https://doi.org/10.1145/3236770}, Volume = {2}, Year = {2018}, Bdsk-Url-1 = {https://doi.org/10.1145/3236770}} @article{gilbert:hal-01859964, Author = {Gilbert, Ga{\"e}tan and Cockx, Jesper and Sozeau, Matthieu and Tabareau, Nicolas}, Date-Added = {2019-02-22 16:45:34 +0100}, Date-Modified = {2019-02-22 16:45:34 +0100}, Doi = {10.1145/329031610.1145/3290316}, Hal_Id = {hal-01859964}, Hal_Version = {v2}, Journal = {{Proceedings of the ACM on Programming Languages}}, Month = Jan, Pages = {1-28}, Pdf = {https://hal.inria.fr/hal-01859964/file/main_popl.pdf}, Publisher = {{ACM}}, Series = {POPL'19}, Title = {{Definitional Proof-Irrelevance without K}}, Url = {https://hal.inria.fr/hal-01859964}, Year = {2019}, Bdsk-Url-1 = {https://hal.inria.fr/hal-01859964}, Bdsk-Url-2 = {https://doi.org/10.1145/329031610.1145/3290316}} @article{tabareau.hal-01559073, Author = {Tabareau, Nicolas and Tanter, {\'E}ric and Sozeau, Matthieu}, Date-Added = {2019-02-22 16:44:46 +0100}, Date-Modified = {2019-07-16 07:32:36 -0400}, Doi = {10.1145/3234615}, Hal_Id = {hal-01559073}, Hal_Version = {v5}, Journal = {{Proceedings of the ACM on Programming Languages}}, Keywords = {Coq ; Parametricity ; Type structures ; Program reasoning ; Type theory ; Homotopy Type Theory}, Month = Sep, Pages = {1-29}, Pdf = {https://hal.inria.fr/hal-01559073/file/main_icfp.pdf}, Publisher = {{ACM}}, Series = {ICFP'18}, Title = {{Equivalences for Free}}, Url = {https://hal.inria.fr/hal-01559073}, Year = {2018}, Bdsk-Url-1 = {https://hal.inria.fr/hal-01559073}, Bdsk-Url-2 = {https://doi.org/10.1145/3234615}} @article{DBLP:journals/pacmpl/PoulsenRTKV18, Author = {Casper Bach Poulsen and Arjen Rouvoet and Andrew Tolmach and Robbert Krebbers and Eelco Visser}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/journals/pacmpl/PoulsenRTKV18}, Date-Added = {2019-02-22 16:33:20 +0100}, Date-Modified = {2019-02-22 16:33:20 +0100}, Doi = {10.1145/3158104}, Journal = {{PACMPL}}, Number = {{POPL}}, Pages = {16:1--16:34}, Timestamp = {Tue, 06 Nov 2018 12:51:05 +0100}, Title = {Intrinsically-typed definitional interpreters for imperative languages}, Url = {https://doi.org/10.1145/3158104}, Volume = {2}, Year = {2018}, Bdsk-Url-1 = {https://doi.org/10.1145/3158104}} @inproceedings{DBLP:conf/itp/VytiniotisCW12, Author = {Dimitrios Vytiniotis and Thierry Coquand and David Wahlstedt}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/itp/VytiniotisCW12}, Booktitle = {Interactive Theorem Proving - Third International Conference, {ITP} 2012, Princeton, NJ, USA, August 13-15, 2012. Proceedings}, Crossref = {DBLP:conf/itp/2012}, Date-Added = {2019-02-22 14:36:21 +0100}, Date-Modified = {2019-02-22 14:36:21 +0100}, Doi = {10.1007/978-3-642-32347-8\_17}, Pages = {250--265}, Timestamp = {Sun, 21 May 2017 00:18:59 +0200}, Title = {Stop When You Are Almost-Full - Adventures in Constructive Termination}, Url = {https://doi.org/10.1007/978-3-642-32347-8\_17}, Year = {2012}, Bdsk-Url-1 = {https://doi.org/10.1007/978-3-642-32347-8%5C_17}} @phdthesis{hancockphd, Author = {Peter Hancock}, Date-Added = {2019-02-22 14:29:34 +0100}, Date-Modified = {2019-02-22 14:30:51 +0100}, School = {{LFCS}}, Title = {Ordinals and Interactive Programs}, Url = {http://www.lfcs.inf.ed.ac.uk/reports/00/ECS-LFCS-00-421/index.html}, Year = {2000}, Bdsk-Url-1 = {http://www.lfcs.inf.ed.ac.uk/reports/00/ECS-LFCS-00-421/index.html}} @inproceedings{hughes1996proving, Author = {Hughes, John and Pareto, Lars and Sabry, Amr}, Booktitle = {POPL}, Date-Added = {2019-02-21 11:30:58 +0100}, Date-Modified = {2019-02-21 11:30:58 +0100}, Pages = {410--423}, Title = {Proving the correctness of reactive systems using sized types}, Volume = {96}, Year = {1996}} @inproceedings{abel2017sized, Author = {Andreas Abel and Andrea Vezzosi and Theo Winterhalter}, Booktitle = {POPL}, Date-Added = {2019-02-21 11:24:21 +0100}, Date-Modified = {2019-02-21 11:31:42 +0100}, Pages = {410--423}, Title = {Normalization by evaluation for sized dependent types}, Volume = {96}, Year = {1996}} @phdthesis{pareto1998sized, Author = {Pareto, Lars}, Date-Added = {2019-02-21 11:22:19 +0100}, Date-Modified = {2019-02-21 11:22:25 +0100}, Publisher = {Citeseer}, Title = {Sized types}} @misc{sozeau.M12019, Author = {{Matthieu Sozeau}}, Date-Added = {2019-02-12 16:15:46 +0100}, Date-Modified = {2019-02-12 16:16:52 +0100}, Howpublished = {S{\'e}minaire Introduction {\`a} la Recherche, Universit{\'e} Paris Diderot}, Keywords = {Coq}, Month = {February 12th}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/research/publications/Programmation_certifi{\'e}e_et_assistants_de_preuve-120219.pdf}, Title = {{Programmation certifi{\'e}e et assistants de preuve}}, Type = {slides}, Year = {2019}} @misc{sozeau.SPropDeducteam, Address = {Cachan, France}, Author = {Matthieu Sozeau}, Date-Added = {2019-01-31 18:45:23 +0100}, Date-Modified = {2019-01-31 18:46:01 +0100}, Howpublished = {Talk given at the Deducteam seminar}, Month = {January}, Pdf = {http://www.irif.fr/~sozeau/research/publications/Definitional_Proof-Irrelevance_without_K-Deducteam-310119.pdf}, Read = {Oui}, Title = {{Definitional Proof-Irrelevance without K}}, Type = {slides}, Year = {2019}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @misc{the_coq_development_team_2018, Author = {The Coq Development Team}, Date-Added = {2018-12-20 21:09:02 +0100}, Date-Modified = {2018-12-20 21:09:12 +0100}, Doi = {10.5281/zenodo.1219885}, Month = {Apr}, Publisher = {Zenodo}, Title = {The Coq Proof Assistant, version 8.8.0}, Year = {2018}, Bdsk-Url-1 = {https://doi.org/10.5281/zenodo.1219885}} @misc{sozeau.CdF18, Address = {Paris, France}, Author = {Matthieu Sozeau}, Date-Added = {2018-12-12 16:18:00 +0100}, Date-Modified = {2018-12-12 16:19:03 +0100}, Howpublished = {S{\'e}minaire au Coll{\`e}ge de France}, Month = {12 D{\'e}cembre}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Programmer_avec_Coq-CdF-121218.pdf}, Read = {Oui}, Title = {{Programmer avec Coq: filtrage d{\'e}pendant et r{\'e}cursion}}, Type = {slides}, Year = {2018}} @phdthesis{DBLP:phd/hal/PaulinMohring89, Author = {Christine Paulin{-}Mohring}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/phd/hal/PaulinMohring89}, Date-Added = {2018-12-07 00:13:54 +0100}, Date-Modified = {2018-12-07 00:13:54 +0100}, School = {Paris Diderot University, France}, Timestamp = {Tue, 12 Jul 2016 16:30:40 +0200}, Title = {Extraction de programmes dans le Calcul des Constructions. (Program Extraction in the Calculus of Constructions)}, Url = {https://tel.archives-ouvertes.fr/tel-00431825}, Year = {1989}, Bdsk-Url-1 = {https://tel.archives-ouvertes.fr/tel-00431825}} @misc{sozeau.SPropPPS, Address = {Paris, France}, Author = {Matthieu Sozeau}, Date-Added = {2018-11-09 20:06:54 +0100}, Date-Modified = {2018-11-09 20:07:40 +0100}, Howpublished = {Talk given at the PPS Days, IRIF}, Month = {November}, Pdf = {http://www.irif.fr/~sozeau/research/publications/A_Universe_of_Strict_Propositions_in_Type_Theory-PPS-091118.pdf}, Read = {Oui}, Title = {{A Universe of Strict Propositions in Type Theory}}, Type = {slides}, Year = {2018}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @misc{sozeau.MetaCoqLRI, Address = {Gif-sur-Yvette, France}, Author = {Matthieu Sozeau}, Date-Added = {2018-10-26 19:00:42 +0200}, Date-Modified = {2018-10-26 19:01:48 +0200}, Howpublished = {Talk given at the VALS Seminar, LRI}, Month = {October}, Pdf = {http://www.irif.fr/~sozeau/research/publications/The_MetaCoq_Project-LRI-261018.pdf}, Read = {Oui}, Title = {{The MetaCoq Project}}, Type = {slides}, Year = {2018}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @inproceedings{DBLP:conf/itp/ForsterS17, Author = {Yannick Forster and Gert Smolka}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/itp/ForsterS17}, Booktitle = {Interactive Theorem Proving - 8th International Conference, {ITP} 2017, Bras{\'{\i}}lia, Brazil, September 26-29, 2017, Proceedings}, Crossref = {DBLP:conf/itp/2017}, Date-Added = {2018-10-25 14:23:41 +0200}, Date-Modified = {2018-10-25 14:23:41 +0200}, Doi = {10.1007/978-3-319-66107-0\_13}, Pages = {189--206}, Timestamp = {Tue, 12 Sep 2017 12:29:08 +0200}, Title = {Weak Call-by-Value Lambda Calculus as a Model of Computation in Coq}, Url = {https://doi.org/10.1007/978-3-319-66107-0\_13}, Year = {2017}, Bdsk-Url-1 = {https://doi.org/10.1007/978-3-319-66107-0%5C_13}} @proceedings{DBLP:conf/itp/2017, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/itp/2017}, Date-Added = {2018-10-25 14:23:41 +0200}, Date-Modified = {2018-10-25 14:23:41 +0200}, Doi = {10.1007/978-3-319-66107-0}, Editor = {Mauricio Ayala{-}Rinc{\'{o}}n and C{\'{e}}sar A. Mu{\~{n}}oz}, Isbn = {978-3-319-66106-3}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Timestamp = {Wed, 06 Sep 2017 14:53:52 +0200}, Title = {Interactive Theorem Proving - 8th International Conference, {ITP} 2017, Bras{\'{\i}}lia, Brazil, September 26-29, 2017, Proceedings}, Url = {https://doi.org/10.1007/978-3-319-66107-0}, Volume = {10499}, Year = {2017}, Bdsk-Url-1 = {https://doi.org/10.1007/978-3-319-66107-0}} @article{DBLP:journals/corr/abs-1806-03205, Archiveprefix = {arXiv}, Author = {Fabian Kunze and Gert Smolka and Yannick Forster}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/journals/corr/abs-1806-03205}, Date-Added = {2018-10-24 22:44:03 +0200}, Date-Modified = {2018-10-24 22:44:03 +0200}, Eprint = {1806.03205}, Journal = {CoRR}, Timestamp = {Mon, 13 Aug 2018 16:46:47 +0200}, Title = {Formal Small-step Verification of a Call-by-value Lambda Calculus Machine}, Url = {http://arxiv.org/abs/1806.03205}, Volume = {abs/1806.03205}, Year = {2018}, Bdsk-Url-1 = {http://arxiv.org/abs/1806.03205}} @article{DBLP:journals/jfp/ZilianiDKNV15, Author = {Beta Ziliani and Derek Dreyer and Neelakantan R. Krishnaswami and Aleksandar Nanevski and Viktor Vafeiadis}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/journals/jfp/ZilianiDKNV15}, Date-Added = {2018-10-24 21:50:00 +0200}, Date-Modified = {2018-10-24 21:50:00 +0200}, Doi = {10.1017/S0956796815000118}, Journal = {J. Funct. Program.}, Timestamp = {Thu, 15 Jun 2017 21:30:55 +0200}, Title = {Mtac: {A} monad for typed tactic programming in Coq}, Url = {https://doi.org/10.1017/S0956796815000118}, Volume = {25}, Year = {2015}, Bdsk-Url-1 = {https://doi.org/10.1017/S0956796815000118}} @inproceedings{DBLP.conf/itp/AnandBCST18, Author = {Abhishek Anand and Simon Boulier and Cyril Cohen and Matthieu Sozeau and Nicolas Tabareau}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/itp/AnandBCST18}, Booktitle = {{ITP} 2018}, Crossref = {DBLP:conf/itp/2018}, Date-Added = {2018-10-16 17:05:18 +0000}, Date-Modified = {2019-07-16 07:40:01 -0400}, Doi = {10.1007/978-3-319-94821-8\_2}, Month = {July}, Pages = {20--39}, Publisher = {{Springer}}, Series = {LNCS}, Timestamp = {Wed, 03 Oct 2018 12:55:05 +0200}, Title = {Towards Certified Meta-Programming with Typed Template-Coq}, Url = {https://hal.inria.fr/hal-01809681}, Year = {2018}, Bdsk-Url-1 = {https://doi.org/10.1007/978-3-319-94821-8%5C_2}, Bdsk-Url-2 = {https://hal.inria.fr/hal-01809681}} @proceedings{DBLP:conf/itp/2018, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/itp/2018}, Date-Added = {2018-10-16 17:05:18 +0000}, Date-Modified = {2018-10-16 17:05:18 +0000}, Doi = {10.1007/978-3-319-94821-8}, Editor = {Jeremy Avigad and Assia Mahboubi}, Isbn = {978-3-319-94820-1}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Timestamp = {Thu, 05 Jul 2018 14:28:25 +0200}, Title = {Interactive Theorem Proving - 9th International Conference, {ITP} 2018, Held as Part of the Federated Logic Conference, FloC 2018, Oxford, UK, July 9-12, 2018, Proceedings}, Url = {https://doi.org/10.1007/978-3-319-94821-8}, Volume = {10895}, Year = {2018}, Bdsk-Url-1 = {https://doi.org/10.1007/978-3-319-94821-8}} @inproceedings{DBLP.conf/rta/TimanyS18, Author = {Amin Timany and Matthieu Sozeau}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/rta/TimanyS18}, Booktitle = {3rd International Conference on Formal Structures for Computation and Deduction, {FSCD} 2018, July 9-12, 2018, Oxford, {UK}}, Crossref = {DBLP:conf/rta/2018}, Date-Added = {2018-10-16 16:59:08 +0000}, Date-Modified = {2019-07-16 07:40:52 -0400}, Doi = {10.4230/LIPIcs.FSCD.2018.29}, Pages = {29:1--29:16}, Series = {LIPIcs}, Timestamp = {Thu, 23 Aug 2018 15:56:40 +0200}, Title = {Cumulative Inductive Types In Coq}, Url = {https://doi.org/10.4230/LIPIcs.FSCD.2018.29}, Year = {2018}, Bdsk-Url-1 = {https://doi.org/10.4230/LIPIcs.FSCD.2018.29}} @inproceedings{gilbert.hal-01859964, Address = {Lisbon, Portugal}, Author = {Gilbert, Ga{\"e}tan and Cockx, Jesper and Sozeau, Matthieu and Tabareau, Nicolas}, Booktitle = {{46th ACM SIGPLAN Symposium on Principles of Programming Languages (POPL 2019)}}, Date-Added = {2018-10-16 16:14:16 +0000}, Date-Modified = {2018-10-16 16:26:58 +0000}, Hal_Id = {hal-01859964}, Hal_Version = {v1}, Month = Jan, Pdf = {https://hal.inria.fr/hal-01859964/file/main_popl.pdf}, Series = {POPL}, Title = {{Definitional Proof-Irrelevance without K}}, Url = {https://hal.inria.fr/hal-01859964}, Year = {2019}, Bdsk-Url-1 = {https://hal.inria.fr/hal-01859964}} @misc{sozeau.usaarland18, Address = {Saarbr{\"u}cken, Germany}, Author = {{Matthieu Sozeau}}, Date-Added = {2018-04-26 13:17:48 +0000}, Date-Modified = {2018-04-26 13:25:13 +0000}, Howpublished = {Lecture notes for a guest lecture given at Saarland University}, Month = {April}, Title = {{A Gentle Introduction to Equations Or How to Match Regexps with Dependently-Typed Continuations}}, Type = {slides}, Url = {http://www.irif.fr/~sozeau/research/publications/A_Gentle_Introduction_to_Equations-USaarland-260418.pdf}, Year = {2018}, Bdsk-Url-1 = {http://www.irif.fr/~sozeau/research/publications/A_Gentle_Introduction_to_Equations-USaarland-260418.pdf}} @article{winterhaltertabsoz18, Abstract = {{We present an internalization of the 2-groupoid interpretation of the calculus of construction that allows to realize the univalence axiom, proof irrelevance and reasoning modulo. As an example, we show that in our setting, the type of Church integers is equal to the inductive type of natural numbers.}}, Author = {Winterhalter, Th{\'e}o and Sozeau, Matthieu and Tabareau, Nicolas}, Date-Added = {2018-04-12 08:41:48 +0000}, Date-Modified = {2018-12-13 21:58:50 +0100}, Language = {Anglais}, Month = October, Note = {{Accepted at CPP'19}}, Title = {{Eliminating Reflection from Type Theory}}, Url = {http://www.irif.fr/~sozeau/research/publications/drafts/Eliminating_Reflection_from_Type_Theory.pdf}, Year = {2018}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/hal-00786589}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/groupoidint.pdf}, Bdsk-Url-3 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/internalizingITT.pdf}, Bdsk-Url-4 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations_for_Hereditary_Substitution_in_Leivants_Predicative_System_F:_a_case_study.pdf}, Bdsk-Url-5 = {http://dx.doi.org/10.4204/EPTCS.185}, Bdsk-Url-6 = {http://www.irif.fr/~sozeau/research/publications/drafts/Eliminating_Reflection_from_Type_Theory_ICFP.pdf}, Bdsk-Url-7 = {http://www.irif.fr/~sozeau/research/publications/drafts/Eliminating_Reflection_from_Type_Theory.pdf}} @article{equationsreloaded, Abstract = {{We present an internalization of the 2-groupoid interpretation of the calculus of construction that allows to realize the univalence axiom, proof irrelevance and reasoning modulo. As an example, we show that in our setting, the type of Church integers is equal to the inductive type of natural numbers.}}, Affiliation = {Inria Paris & IRIF, Universit{\'e} Paris 7 Denis Diderot}, Author = {Matthieu Sozeau and Cyprien Mangin}, Date-Added = {2018-04-12 08:37:47 +0000}, Date-Modified = {2019-07-18 11:31:35 -0400}, Doi = {10.1145/3341690}, Journal = {{PACMPL}}, Language = {Anglais}, Month = August, Number = {ICFP}, Pages = {86-115}, Title = {{Equations Reloaded: High-Level Dependently-Typed Programming and Proving in Coq}}, Url = {http://www.irif.fr/~sozeau/research/publications/Equations_Reloaded-ICFP19.pdf}, Volume = {3}, Year = {2019}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/hal-00786589}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/groupoidint.pdf}, Bdsk-Url-3 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/internalizingITT.pdf}, Bdsk-Url-4 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations_for_Hereditary_Substitution_in_Leivants_Predicative_System_F:_a_case_study.pdf}, Bdsk-Url-5 = {http://dx.doi.org/10.4204/EPTCS.185}, Bdsk-Url-6 = {http://www.irif.fr/~sozeau/research/publications/drafts/Equations_Reloaded_ICFP.pdf}, Bdsk-Url-7 = {http://www.irif.fr/~sozeau/research/publications/drafts/Equations_Reloaded.pdf}} @inproceedings{DBLP:conf/csl/Abel06, Author = {Andreas Abel}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/csl/Abel06}, Booktitle = {Computer Science Logic, 20th International Workshop, {CSL} 2006, 15th Annual Conference of the EACSL, Szeged, Hungary, September 25-29, 2006, Proceedings}, Crossref = {DBLP:conf/csl/2006}, Date-Added = {2018-03-16 03:24:05 +0000}, Date-Modified = {2018-03-16 03:24:05 +0000}, Doi = {10.1007/11874683_5}, Pages = {72--88}, Timestamp = {Fri, 02 Jun 2017 13:01:07 +0200}, Title = {Semi-continuous Sized Types and Termination}, Url = {https://doi.org/10.1007/11874683_5}, Year = {2006}, Bdsk-Url-1 = {https://doi.org/10.1007/11874683_5}} @proceedings{DBLP:conf/csl/2006, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/csl/2006}, Date-Added = {2018-03-16 03:24:05 +0000}, Date-Modified = {2018-03-16 03:24:05 +0000}, Doi = {10.1007/11874683}, Editor = {Zolt{\'{a}}n {\'{E}}sik}, Isbn = {3-540-45458-6}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Timestamp = {Fri, 02 Jun 2017 13:01:07 +0200}, Title = {Computer Science Logic, 20th International Workshop, {CSL} 2006, 15th Annual Conference of the EACSL, Szeged, Hungary, September 25-29, 2006, Proceedings}, Url = {https://doi.org/10.1007/11874683}, Volume = {4207}, Year = {2006}, Bdsk-Url-1 = {https://doi.org/10.1007/11874683}} @article{DBLP:journals/lmcs/Abel08, Author = {Andreas Abel}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/journals/lmcs/Abel08}, Date-Added = {2018-03-16 03:23:34 +0000}, Date-Modified = {2018-03-16 03:23:34 +0000}, Doi = {10.2168/LMCS-4(2:3)2008}, Journal = {Logical Methods in Computer Science}, Number = {2}, Timestamp = {Sat, 20 May 2017 00:22:52 +0200}, Title = {Semi-Continuous Sized Types and Termination}, Url = {https://doi.org/10.2168/LMCS-4(2:3)2008}, Volume = {4}, Year = {2008}, Bdsk-Url-1 = {https://doi.org/10.2168/LMCS-4(2:3)2008}} @article{DBLP:journals/pacmpl/0001VW17, Author = {Andreas Abel and Andrea Vezzosi and Th{\'{e}}o Winterhalter}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/journals/pacmpl/0001VW17}, Date-Added = {2018-03-16 03:20:38 +0000}, Date-Modified = {2018-03-16 03:20:38 +0000}, Doi = {10.1145/3110277}, Journal = {{PACMPL}}, Number = {{ICFP}}, Pages = {33:1--33:30}, Timestamp = {Tue, 12 Sep 2017 16:16:51 +0200}, Title = {Normalization by evaluation for sized dependent types}, Url = {http://doi.acm.org/10.1145/3110277}, Volume = {1}, Year = {2017}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/3110277}, Bdsk-Url-2 = {https://dx.doi.org/10.1145/3110277}} @inproceedings{DBLP:conf/cpp/Spector-Zabusky18, Author = {Antal Spector{-}Zabusky and Joachim Breitner and Christine Rizkallah and Stephanie Weirich}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/cpp/Spector-Zabusky18}, Booktitle = {Proceedings of the 7th {ACM} {SIGPLAN} International Conference on Certified Programs and Proofs, {CPP} 2018, Los Angeles, CA, USA, January 8-9, 2018}, Crossref = {DBLP:conf/cpp/2018}, Date-Added = {2018-03-15 12:40:54 +0000}, Date-Modified = {2018-03-15 12:40:54 +0000}, Doi = {10.1145/3167092}, Pages = {14--27}, Timestamp = {Sat, 30 Dec 2017 17:34:07 +0100}, Title = {Total Haskell is reasonable Coq}, Url = {http://doi.acm.org/10.1145/3167092}, Year = {2018}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/3167092}, Bdsk-Url-2 = {https://dx.doi.org/10.1145/3167092}} @article{DBLP:journals/corr/CohenCHM16, Archiveprefix = {arXiv}, Author = {Cyril Cohen and Thierry Coquand and Simon Huber and Anders M{\"{o}}rtberg}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/journals/corr/CohenCHM16}, Date-Added = {2018-03-15 12:35:02 +0000}, Date-Modified = {2018-03-15 12:35:02 +0000}, Eprint = {1611.02108}, Journal = {CoRR}, Timestamp = {Wed, 07 Jun 2017 14:41:32 +0200}, Title = {Cubical Type Theory: a constructive interpretation of the univalence axiom}, Url = {http://arxiv.org/abs/1611.02108}, Volume = {abs/1611.02108}, Year = {2016}, Bdsk-Url-1 = {http://arxiv.org/abs/1611.02108}} @inproceedings{martinlof1971hauptsatz, Author = {Martin-L{\"o}f, Per}, Booktitle = {Proceedings of the 2nd Scandinavian logic symposium}, Date-Added = {2018-03-15 11:20:22 +0000}, Date-Modified = {2018-03-15 11:20:22 +0000}, Editor = {Fenstad, Jan Erik}, Pages = {179 -- 216}, Publisher = {North-Holland}, Series = {Studies in Logic and the Foundations of Mathematics}, Title = {Hauptsatz for the intuitionistic theory of iterated inductive definitions}, Volume = {63}, Year = 1971} @incollection{Martin-Lof-1972, Author = {Martin-L{\"o}f, Per}, Booktitle = {Twenty-five years of constructive type theory ({V}enice, 1995)}, Date-Added = {2018-03-15 11:20:22 +0000}, Date-Modified = {2018-03-15 11:20:22 +0000}, Editor = {Giovanni Sambin and Jan M. Smith}, Mrclass = {03B15 (03F55)}, Mrnumber = 1686864, Pages = {127--172}, Publisher = {Oxford University Press}, Series = {Oxford Logic Guides}, Title = {An intuitionistic theory of types}, Volume = 36, Year = 1998} @incollection{Martin-Lof-1973, Author = {Martin-L{\"o}f, Per}, Booktitle = {Logic Colloquium '73, Proceedings of the Logic Colloquium}, Date-Added = {2018-03-15 11:20:22 +0000}, Date-Modified = {2018-03-15 11:20:22 +0000}, Editor = {H.E. Rose and J.C. Shepherdson}, Mrclass = {02C15 (02D99)}, Mrnumber = {0387009 (52 \#7856)}, Mrreviewer = {Horst Luckhardt}, Pages = {73--118}, Publisher = {North-Holland}, Series = {Studies in Logic and the Foundations of Mathematics}, Title = {An intuitionistic theory of types: predicative part}, Volume = 80, Year = 1975} @techreport{hml75, Address = {Stockholm, Sweden}, Author = {Hancock, Peter and Martin-L\"{o}f, Per}, Date-Added = {2018-03-15 11:20:22 +0000}, Date-Modified = {2018-03-15 11:20:22 +0000}, Institution = {University of Stockholm}, Number = 3, Title = {Syntax and semantics of the language of primitive recursive functions}, Year = 1975} @incollection{Martin-Lof-1979, Author = {Martin-L{\"o}f, Per}, Booktitle = {Logic, Methodology and Philosophy of Science VI, Proceedings of the Sixth International Congress of Logic, Methodology and Philosophy of Science, Hannover 1979}, Date-Added = {2018-03-15 11:20:22 +0000}, Date-Modified = {2018-03-15 11:20:22 +0000}, Doi = {10.1016/S0049-237X(09)70189-2}, Editor = {L. Jonathan Cohen and Jerzy {{\L}o{\'s}} and Helmut Pfeiffer and Klaus-Peter Podewski}, Mrclass = {03F50 (03B70 03F55 68Q45)}, Mrnumber = {682410 (85d:03112)}, Mrreviewer = {B. H. Mayoh}, Pages = {153--175}, Publisher = {North-Holland}, Series = {Studies in Logic and the Foundations of Mathematics}, Title = {Constructive mathematics and computer programming}, Url = {http://dx.doi.org/10.1016/S0049-237X(09)70189-2}, Volume = 104, Year = 1982, Bdsk-Url-1 = {http://dx.doi.org/10.1016/S0049-237X(09)70189-2}} @book{martin-lof:bibliopolis, Author = {Martin-L{\"o}f, Per}, Date-Added = {2018-03-15 11:20:22 +0000}, Date-Modified = {2018-03-15 11:20:22 +0000}, Isbn = {88-7088-105-9}, Mrclass = {03B15 (03F50 03F55)}, Mrnumber = {769301 (86j:03005)}, Mrreviewer = {M. M. Richter}, Pages = {iv+91}, Publisher = {Bibliopolis}, Series = {Studies in Proof Theory}, Subtitle = {Notes by Giovanni Sambin}, Title = {Intuitionistic type theory}, Volume = {1}, Year = {1984}} @article{martin2006100, Author = {Martin-L{\"o}f, Per}, Date-Added = {2018-03-15 11:20:22 +0000}, Date-Modified = {2018-03-15 11:20:22 +0000}, Journal = {The Computer Journal}, Number = {3}, Pages = {345--350}, Publisher = {BCS}, Title = {100 years of {Z}ermelo's axiom of choice: what was the problem with it?}, Volume = {49}, Year = {2006}} @inproceedings{DBLP:conf/lpar/JouannaudS17, Author = {Jean{-}Pierre Jouannaud and Pierre{-}Yves Strub}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/lpar/JouannaudS17}, Booktitle = {LPAR-21, 21st International Conference on Logic for Programming, Artificial Intelligence and Reasoning, Maun, Botswana, May 7-12, 2017}, Crossref = {DBLP:conf/lpar/2017}, Date-Added = {2018-03-08 17:44:46 +0000}, Date-Modified = {2018-03-08 17:44:46 +0000}, Pages = {474--489}, Timestamp = {Thu, 23 Nov 2017 16:56:11 +0100}, Title = {Coq without Type Casts: {A} Complete Proof of Coq Modulo Theory}, Url = {http://www.easychair.org/publications/paper/340342}, Year = {2017}, Bdsk-Url-1 = {http://www.easychair.org/publications/paper/340342}} @proceedings{DBLP:conf/lpar/2017, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/lpar/2017}, Date-Added = {2018-03-08 17:44:46 +0000}, Date-Modified = {2018-03-08 17:44:46 +0000}, Editor = {Thomas Eiter and David Sands}, Publisher = {EasyChair}, Series = {EPiC Series in Computing}, Timestamp = {Thu, 23 Nov 2017 16:56:11 +0100}, Title = {LPAR-21, 21st International Conference on Logic for Programming, Artificial Intelligence and Reasoning, Maun, Botswana, May 7-12, 2017}, Url = {http://www.easychair.org/publications/volume/LPAR-21}, Volume = {46}, Year = {2017}, Bdsk-Url-1 = {http://www.easychair.org/publications/volume/LPAR-21}} @article{DBLP:journals/jfp/0001P16, Author = {Andreas Abel and Brigitte Pientka}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/journals/jfp/0001P16}, Date-Added = {2018-03-04 15:44:11 +0000}, Date-Modified = {2018-03-04 15:44:11 +0000}, Doi = {10.1017/S0956796816000022}, Journal = {J. Funct. Program.}, Pages = {e2}, Timestamp = {Sat, 27 May 2017 14:24:34 +0200}, Title = {Well-founded recursion with copatterns and sized types}, Url = {https://doi.org/10.1017/S0956796816000022}, Volume = {26}, Year = {2016}, Bdsk-Url-1 = {https://doi.org/10.1017/S0956796816000022}} @phdthesis{DBLP:phd/hal/Lescuyer11, Author = {St{\'{e}}phane Lescuyer}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/phd/hal/Lescuyer11}, Date-Added = {2018-03-04 15:28:41 +0000}, Date-Modified = {2018-03-04 15:28:58 +0000}, School = {University of Paris-Sud, Orsay, France}, Timestamp = {Mon, 11 Jul 2016 18:16:19 +0200}, Title = {Formalizing and Implementing a Reflexive Tactic for Automated Deduction in Coq}, Url = {https://tel.archives-ouvertes.fr/tel-00713668}, Year = {2011}, Bdsk-Url-1 = {https://tel.archives-ouvertes.fr/tel-00713668}} @inproceedings{DBLP:conf/csl/BirkedalBCGSV16, Author = {Lars Birkedal and Ales Bizjak and Ranald Clouston and Hans Bugge Grathwohl and Bas Spitters and Andrea Vezzosi}, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/csl/BirkedalBCGSV16}, Booktitle = {25th {EACSL} Annual Conference on Computer Science Logic, {CSL} 2016, August 29 - September 1, 2016, Marseille, France}, Crossref = {DBLP:conf/csl/2016}, Date-Added = {2018-03-04 12:35:00 +0000}, Date-Modified = {2018-03-04 12:35:00 +0000}, Doi = {10.4230/LIPIcs.CSL.2016.23}, Pages = {23:1--23:17}, Timestamp = {Wed, 27 Sep 2017 13:54:28 +0200}, Title = {Guarded Cubical Type Theory: Path Equality for Guarded Recursion}, Url = {https://doi.org/10.4230/LIPIcs.CSL.2016.23}, Year = {2016}, Bdsk-Url-1 = {https://doi.org/10.4230/LIPIcs.CSL.2016.23}} @inproceedings{DBLP:conf/popl/SwamyHKRDFBFSKZ16, Author = {Nikhil Swamy and Catalin Hritcu and Chantal Keller and Aseem Rastogi and Antoine Delignat{-}Lavaud and Simon Forest and Karthikeyan Bhargavan and C{\'{e}}dric Fournet and Pierre{-}Yves Strub and Markulf Kohlweiss and Jean Karim Zinzindohoue and Santiago Zanella B{\'{e}}guelin}, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.org/rec/bib/conf/popl/SwamyHKRDFBFSKZ16}, Booktitle = {Proceedings of the 43rd Annual {ACM} {SIGPLAN-SIGACT} Symposium on Principles of Programming Languages, {POPL} 2016, St. Petersburg, FL, USA, January 20 - 22, 2016}, Crossref = {DBLP:conf/popl/2016}, Date-Added = {2018-02-22 13:03:47 +0000}, Date-Modified = {2018-02-22 13:03:47 +0000}, Doi = {10.1145/2837614.2837655}, Pages = {256--270}, Timestamp = {Wed, 09 Mar 2016 08:11:59 +0100}, Title = {Dependent types and multi-monadic effects in {F}}, Url = {http://doi.acm.org/10.1145/2837614.2837655}, Year = {2016}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/2837614.2837655}, Bdsk-Url-2 = {https://dx.doi.org/10.1145/2837614.2837655}} @inproceedings{DBLP:conf/lics/DagandM13, Author = {Pierre{-}{\'{E}}variste Dagand and Conor McBride}, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.org/rec/bib/conf/lics/DagandM13}, Booktitle = {28th Annual {ACM/IEEE} Symposium on Logic in Computer Science, {LICS} 2013, New Orleans, LA, USA, June 25-28, 2013}, Crossref = {DBLP:conf/lics/2013}, Date-Added = {2018-02-22 12:55:56 +0000}, Date-Modified = {2018-02-22 12:55:56 +0000}, Doi = {10.1109/LICS.2013.60}, Pages = {530--539}, Timestamp = {Thu, 25 May 2017 00:42:41 +0200}, Title = {A Categorical Treatment of Ornaments}, Url = {https://doi.org/10.1109/LICS.2013.60}, Year = {2013}, Bdsk-Url-1 = {https://doi.org/10.1109/LICS.2013.60}} @proceedings{DBLP:conf/lics/2013, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.org/rec/bib/conf/lics/2013}, Date-Added = {2018-02-22 12:55:56 +0000}, Date-Modified = {2018-02-22 12:55:56 +0000}, Isbn = {978-1-4799-0413-6}, Publisher = {{IEEE} Computer Society}, Timestamp = {Wed, 27 May 2015 19:02:35 +0200}, Title = {28th Annual {ACM/IEEE} Symposium on Logic in Computer Science, {LICS} 2013, New Orleans, LA, USA, June 25-28, 2013}, Url = {http://ieeexplore.ieee.org/xpl/mostRecentIssue.jsp?punumber=6570844}, Year = {2013}, Bdsk-Url-1 = {http://ieeexplore.ieee.org/xpl/mostRecentIssue.jsp?punumber=6570844}} @article{DBLP:journals/jfp/Dagand17, Author = {Pierre{-}{\'{E}}variste Dagand}, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.org/rec/bib/journals/jfp/Dagand17}, Date-Added = {2018-02-22 12:55:12 +0000}, Date-Modified = {2018-02-22 12:55:12 +0000}, Doi = {10.1017/S0956796816000356}, Journal = {J. Funct. Program.}, Pages = {e9}, Timestamp = {Sat, 27 May 2017 14:24:34 +0200}, Title = {The essence of ornaments}, Url = {https://doi.org/10.1017/S0956796816000356}, Volume = {27}, Year = {2017}, Bdsk-Url-1 = {https://doi.org/10.1017/S0956796816000356}} @inproceedings{templatecoq18, Abstract = {{We present an internalization of the 2-groupoid interpretation of the calculus of construction that allows to realize the univalence axiom, proof irrelevance and reasoning modulo. As an example, we show that in our setting, the type of Church integers is equal to the inductive type of natural numbers.}}, Author = {Abhishek Anand and Simon Boulier and Cyril Cohen and Matthieu Sozeau and Nicolas Tabareau}, Booktitle = {ITP}, Date-Added = {2018-02-07 11:11:14 +0000}, Date-Modified = {2018-10-16 16:48:53 +0000}, Language = {Anglais}, Month = January, Note = {{Accepted}}, Title = {{Towards Certified Meta Programming with Typed Template-Coq}}, Url = {https://hal.inria.fr/hal-01809681}, Year = {2018}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/hal-00786589}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/groupoidint.pdf}, Bdsk-Url-3 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/internalizingITT.pdf}, Bdsk-Url-4 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations_for_Hereditary_Substitution_in_Leivants_Predicative_System_F:_a_case_study.pdf}, Bdsk-Url-5 = {http://dx.doi.org/10.4204/EPTCS.185}, Bdsk-Url-6 = {http://www.irif.fr/~sozeau/research/publications/drafts/Towards_Certified_Meta-Programming_with_Typed_Template-Coq.pdf}, Bdsk-Url-7 = {https://hal.inria.fr/hal-01809681}} @inproceedings{timanysoz18, Abstract = {{We present an internalization of the 2-groupoid interpretation of the calculus of construction that allows to realize the univalence axiom, proof irrelevance and reasoning modulo. As an example, we show that in our setting, the type of Church integers is equal to the inductive type of natural numbers.}}, Address = {Oxford, England}, Author = {Amin Timany and Matthieu Sozeau}, Booktitle = {FSCD}, Date-Added = {2018-02-07 11:07:33 +0000}, Date-Modified = {2018-04-12 08:47:46 +0000}, Language = {Anglais}, Month = August, Note = {{Accepted}}, Title = {{Cumulative Inductive Types in Coq}}, Url = {http://www.irif.fr/~sozeau/research/publications/drafts/Cumulative_Inductive_Types_in_Coq_v2.pdf}, Year = {2018}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/hal-00786589}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/groupoidint.pdf}, Bdsk-Url-3 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/internalizingITT.pdf}, Bdsk-Url-4 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations_for_Hereditary_Substitution_in_Leivants_Predicative_System_F:_a_case_study.pdf}, Bdsk-Url-5 = {http://dx.doi.org/10.4204/EPTCS.185}, Bdsk-Url-6 = {http://www.irif.fr/~sozeau/research/publications/drafts/Cumulative_Inductive_Types_in_Coq_v2.pdf}} @misc{sozeauEUTypes18, Address = {Nijmegen, Netherlands}, Author = {Matthieu Sozeau}, Date-Added = {2018-02-07 10:39:03 +0000}, Date-Modified = {2018-02-07 10:57:36 +0000}, Howpublished = {Talk given at the EUTypes 2018 Working Meeting}, Month = {January}, Read = {Oui}, Title = {Nested, Well-Founded and Mutual recursion in Equations}, Type = {slides}, Url = {https://eutypes.cs.ru.nl/eutypes_pmwiki/uploads/Meetings/Sozeau.pdf}, Year = {2018}, Bdsk-Url-1 = {https://popl18.sigplan.org/event/pepm-2018-equations-from-clauses-to-splittings-to-functions-poster-demo-talk-}, Bdsk-Url-2 = {https://eutypes.cs.ru.nl/eutypes_pmwiki/uploads/Meetings/Sozeau.pdf}} @inproceedings{sozeauCoqCoqPL18, Address = {Los Angeles, CA, USA}, Author = {Matthieu Sozeau and Maxime D{\'e}n{\`e}s and Yves Bertot}, Booktitle = {CoqPL'18}, Date-Added = {2018-02-07 10:33:32 +0000}, Date-Modified = {2018-02-07 10:34:40 +0000}, Month = {January}, Read = {Oui}, Title = {{Coq dev talk}}, Url = {https://popl18.sigplan.org/event/coqpl-2018-session-with-the-coq-development-team}, Year = {2018}, Bdsk-Url-1 = {https://popl18.sigplan.org/event/pepm-2018-equations-from-clauses-to-splittings-to-functions-poster-demo-talk-}, Bdsk-Url-2 = {https://popl18.sigplan.org/event/coqpl-2018-session-with-the-coq-development-team}} @inproceedings{sozeauCoqPL18, Address = {Los Angeles, CA, USA}, Author = {Simon Boulier and Matthieu Sozeau and Nicolas Tabareau and Abhishek Anand}, Booktitle = {CoqPL'18}, Date-Added = {2018-02-07 10:31:30 +0000}, Date-Modified = {2018-02-07 10:33:30 +0000}, Month = {January}, Read = {Oui}, Title = {{Typed Template Coq}}, Url = {https://popl18.sigplan.org/event/coqpl-2018-typed-template-coq}, Year = {2018}, Bdsk-Url-1 = {https://popl18.sigplan.org/event/pepm-2018-equations-from-clauses-to-splittings-to-functions-poster-demo-talk-}, Bdsk-Url-2 = {https://popl18.sigplan.org/event/coqpl-2018-typed-template-coq}} @misc{matthieu_sozeau_2017_1117298, Author = {{Matthieu Sozeau and Cyprien Mangin}}, Date-Added = {2018-02-07 10:22:21 +0000}, Date-Modified = {2018-02-07 10:23:53 +0000}, Doi = {10.5281/zenodo.1117298}, Month = dec, Title = {{Equations 1.0 for Coq 8.7}}, Url = {https://doi.org/10.5281/zenodo.1117298}, Year = 2017, Bdsk-Url-1 = {https://doi.org/10.5281/zenodo.1117298}} @inproceedings{malecha2016easy-and-efficient, Address = {Berlin, Heidelberg}, Author = {Malecha, Gregory and Bengtson, Jesper}, Booktitle = {ESOP}, Date-Added = {2018-02-02 13:29:13 +0000}, Date-Modified = {2018-10-25 14:29:30 +0200}, Doi = {10.1007/978-3-662-49498-1_21}, Editor = {Thiemann, Peter}, Isbn = {978-3-662-49498-1}, Pages = {532--559}, Publisher = {Springer Berlin Heidelberg}, Title = {Extensible and Efficient Automation Through Reflective Tactics}, Url = {http://dx.doi.org/10.1007/978-3-662-49498-1_21}, Year = {2016}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/978-3-662-49498-1_21}} @inproceedings{10.1007/978-3-642-14052-5_8, Abstract = {Coq has within its logic a programming language that can be used to replace many deduction steps into a single computation, this is the so-called reflection. In this paper, we present two extensions of the evaluation mechanism that preserve its correctness and make it possible to deal with cpu-intensive tasks such as proof checking of SAT traces.}, Address = {Berlin, Heidelberg}, Author = {Armand, Micha{\"e}l and Gr{\'e}goire, Benjamin and Spiwack, Arnaud and Th{\'e}ry, Laurent}, Booktitle = {Interactive Theorem Proving}, Date-Added = {2018-02-02 12:04:02 +0000}, Date-Modified = {2018-02-02 12:04:02 +0000}, Editor = {Kaufmann, Matt and Paulson, Lawrence C.}, Isbn = {978-3-642-14052-5}, Pages = {83--98}, Publisher = {Springer Berlin Heidelberg}, Title = {Extending Coq with Imperative Features and Its Application to SAT Verification}, Year = {2010}} @article{DBLP:journals/pacmpl/0001OV18, Author = {Andreas Abel and Joakim {\"{O}}hman and Andrea Vezzosi}, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.org/rec/bib/journals/pacmpl/0001OV18}, Date-Added = {2018-02-02 10:20:10 +0000}, Date-Modified = {2018-02-02 10:20:10 +0000}, Doi = {10.1145/3158111}, Journal = {{PACMPL}}, Number = {{POPL}}, Pages = {23:1--23:29}, Timestamp = {Fri, 05 Jan 2018 12:57:30 +0100}, Title = {Decidability of conversion for type theory in type theory}, Url = {http://doi.acm.org/10.1145/3158111}, Volume = {2}, Year = {2018}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/3158111}, Bdsk-Url-2 = {https://dx.doi.org/10.1145/3158111}} @article{ziliani_sozeau_2017, Author = {ZILIANI, BETA and SOZEAU, MATTHIEU}, Date-Added = {2018-02-02 09:54:57 +0000}, Date-Modified = {2018-02-02 09:54:57 +0000}, Doi = {10.1017/S0956796817000028}, Journal = {Journal of Functional Programming}, Pages = {e10}, Publisher = {Cambridge University Press}, Title = {A comprehensible guide to a new unifier for CIC including universe polymorphism and overloading}, Volume = {27}, Year = {2017}, Bdsk-Url-1 = {https://dx.doi.org/10.1017/S0956796817000028}} @phdthesis{boutillier:tel-01054723, Author = {Boutillier, Pierre}, Date-Added = {2018-01-26 09:50:37 +0000}, Date-Modified = {2018-01-26 09:50:37 +0000}, Hal_Id = {tel-01054723}, Hal_Version = {v1}, Keywords = {Coq (software) ; Proof assistant ; Induction (logic) ; Pattern-matching ; Coq (logiciel) ; Assistants de preuve ; Langages de programmation -- S{\'e}mantique ; Induction (logique) ; filtrage}, Month = Feb, Pdf = {https://tel.archives-ouvertes.fr/tel-01054723/file/these_boutillier.pdf}, School = {{Universit{\'e} Paris-Diderot - Paris VII}}, Title = {{New tool to compute with inductive in Coq}}, Type = {Theses}, Url = {https://tel.archives-ouvertes.fr/tel-01054723}, Year = {2014}, Bdsk-Url-1 = {https://tel.archives-ouvertes.fr/tel-01054723}} @phdthesis{DBLP:phd/hal/Boutillier14, Author = {Pierre Boutillier}, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.org/rec/bib/phd/hal/Boutillier14}, Date-Added = {2018-01-26 09:49:59 +0000}, Date-Modified = {2018-01-26 09:49:59 +0000}, School = {Paris Diderot University, France}, Timestamp = {Thu, 30 Jun 2016 14:34:56 +0200}, Title = {De nouveaux outils pour calculer avec des inductifs en Coq. (New tool to compute with inductive in Coq)}, Url = {https://tel.archives-ouvertes.fr/tel-01054723}, Year = {2014}, Bdsk-Url-1 = {https://tel.archives-ouvertes.fr/tel-01054723}} @misc{the_coq_development_team_2017_1133970, Author = {{The Coq Development Team}}, Date-Added = {2018-01-25 16:12:01 +0000}, Date-Modified = {2018-01-26 09:53:27 +0000}, Doi = {10.5281/zenodo.1133970}, Month = dec, Title = {{The Coq Proof Assistant, version 8.7.1}}, Url = {https://doi.org/10.5281/zenodo.1133970}, Year = 2017, Bdsk-Url-1 = {https://dx.doi.org/10.5281/zenodo.1133970}, Bdsk-Url-2 = {https://doi.org/10.5281/zenodo.1133970}} @article{Wu2014, Abstract = {There are numerous textbooks on regular languages. Many of them focus on finite automata for proving properties. Unfortunately, automata are not so straightforward to formalise in theorem provers. The reason is that natural representations for automata are graphs, matrices or functions, none of which are inductive datatypes. Regular expressions can be defined straightforwardly as a datatype and a corresponding reasoning infrastructure comes for free in theorem provers. We show in this paper that a central result from formal language theory---the Myhill-Nerode Theorem---can be recreated using only regular expressions. From this theorem many closure properties of regular languages follow.}, Author = {Wu, Chunhan and Zhang, Xingyuan and Urban, Christian}, Date-Added = {2018-01-20 16:45:48 +0000}, Date-Modified = {2018-01-20 16:45:48 +0000}, Day = {01}, Doi = {10.1007/s10817-013-9297-2}, Issn = {1573-0670}, Journal = {Journal of Automated Reasoning}, Month = {Apr}, Number = {4}, Pages = {451--480}, Title = {A Formalisation of the Myhill-Nerode Theorem Based on Regular Expressions}, Url = {https://doi.org/10.1007/s10817-013-9297-2}, Volume = {52}, Year = {2014}, Bdsk-Url-1 = {https://doi.org/10.1007/s10817-013-9297-2}} @article{Owens2008, Abstract = {Higher-order logic proof systems combine functional programming with logic, providing functional programmers with a comfortable setting for the formalization of programs, specifications, and proofs. However, a possibly unfamiliar aspect of working in such an environment is that formally establishing program termination is necessary. In many cases, termination can be automatically proved, but there are useful programs that diverge and others that always terminate but have difficult termination proofs. We discuss techniques that support the expression of such programs as logical functions.}, Author = {Owens, Scott and Slind, Konrad}, Date-Added = {2018-01-20 16:43:45 +0000}, Date-Modified = {2018-01-20 16:43:45 +0000}, Day = {01}, Doi = {10.1007/s10990-008-9038-0}, Issn = {1573-0557}, Journal = {Higher-Order and Symbolic Computation}, Month = {Dec}, Number = {4}, Pages = {377--409}, Title = {Adapting functional programs to higher order logic}, Url = {https://doi.org/10.1007/s10990-008-9038-0}, Volume = {21}, Year = {2008}, Bdsk-Url-1 = {https://doi.org/10.1007/s10990-008-9038-0}} @article{yi_2006, Author = {YI, KWANGKEUN}, Date-Added = {2018-01-20 16:42:10 +0000}, Date-Modified = {2018-01-20 16:42:10 +0000}, Doi = {10.1017/S0956796806006149}, Journal = {Journal of Functional Programming}, Number = {6}, Pages = {663--670}, Publisher = {Cambridge University Press}, Title = {EDUCATIONAL PEARL: `Proof-directed debugging' revisited for a first-order version}, Volume = {16}, Year = {2006}, Bdsk-Url-1 = {https://dx.doi.org/10.1017/S0956796806006149}} @inproceedings{DBLP:conf/cpp/CoquandS11, Author = {Thierry Coquand and Vincent Siles}, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.org/rec/bib/conf/cpp/CoquandS11}, Booktitle = {Certified Programs and Proofs - First International Conference, {CPP} 2011, Kenting, Taiwan, December 7-9, 2011. Proceedings}, Crossref = {DBLP:conf/cpp/2011}, Date-Added = {2018-01-20 16:31:49 +0000}, Date-Modified = {2018-01-20 16:31:49 +0000}, Doi = {10.1007/978-3-642-25379-9_11}, Pages = {119--134}, Timestamp = {Thu, 25 May 2017 00:42:11 +0200}, Title = {A Decision Procedure for Regular Expression Equivalence in Type Theory}, Url = {https://doi.org/10.1007/978-3-642-25379-9_11}, Year = {2011}, Bdsk-Url-1 = {https://doi.org/10.1007/978-3-642-25379-9_11}} @inproceedings{DBLP:conf/popl/Weirich17, Author = {Stephanie Weirich}, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.org/rec/bib/conf/popl/Weirich17}, Booktitle = {Proceedings of the 44th {ACM} {SIGPLAN} Symposium on Principles of Programming Languages, {POPL} 2017, Paris, France, January 18-20, 2017}, Crossref = {DBLP:conf/popl/2017}, Date-Added = {2018-01-20 16:29:24 +0000}, Date-Modified = {2018-01-20 16:29:24 +0000}, Pages = {1}, Timestamp = {Wed, 28 Dec 2016 13:22:29 +0100}, Title = {The influence of dependent types (keynote)}, Url = {http://dl.acm.org/citation.cfm?id=3009923}, Year = {2017}, Bdsk-Url-1 = {http://dl.acm.org/citation.cfm?id=3009923}} @article{Brzozowski:1964:DRE:321239.321249, Acmid = {321249}, Address = {New York, NY, USA}, Author = {Brzozowski, Janusz A.}, Date-Added = {2018-01-20 13:41:40 +0000}, Date-Modified = {2018-01-20 16:04:37 +0000}, Doi = {10.1145/321239.321249}, Issn = {0004-5411}, Issue_Date = {Oct. 1964}, Journal = {J. ACM}, Month = oct, Number = {4}, Pages = {481--494}, Publisher = {ACM}, Title = {Derivatives of Regular Expressions}, Url = {http://doi.acm.org/10.1145/321239.321249}, Volume = {11}, Year = {1964}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/321239.321249}, Bdsk-Url-2 = {https://dx.doi.org/10.1145/321239.321249}} @article{Harper:1999:PD:968578.968582, Acmid = {968582}, Address = {New York, NY, USA}, Author = {Harper, Robert}, Date-Added = {2018-01-20 13:28:57 +0000}, Date-Modified = {2018-01-20 16:04:56 +0000}, Doi = {10.1017/S0956796899003378}, Issn = {0956-7968}, Issue_Date = {July 1999}, Journal = {J. Funct. Program.}, Month = jul, Number = {4}, Pages = {463--469}, Publisher = {Cambridge University Press}, Title = {{Proof-Directed Debugging}}, Url = {http://dx.doi.org/10.1017/S0956796899003378}, Volume = {9}, Year = {1999}, Bdsk-Url-1 = {http://dx.doi.org/10.1017/S0956796899003378}} @inproceedings{DBLP:conf/cpp/DoczkalKS13, Author = {Christian Doczkal and Jan{-}Oliver Kaiser and Gert Smolka}, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.org/rec/bib/conf/cpp/DoczkalKS13}, Booktitle = {Certified Programs and Proofs - Third International Conference, {CPP} 2013, Melbourne, VIC, Australia, December 11-13, 2013, Proceedings}, Crossref = {DBLP:conf/cpp/2013}, Date-Added = {2018-01-20 12:47:25 +0000}, Date-Modified = {2018-01-20 12:47:25 +0000}, Doi = {10.1007/978-3-319-03545-1_6}, Pages = {82--97}, Timestamp = {Thu, 25 May 2017 00:42:11 +0200}, Title = {A Constructive Theory of Regular Languages in Coq}, Url = {https://doi.org/10.1007/978-3-319-03545-1_6}, Year = {2013}, Bdsk-Url-1 = {https://doi.org/10.1007/978-3-319-03545-1_6}} @proceedings{DBLP:conf/cpp/2013, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.org/rec/bib/conf/cpp/2013}, Date-Added = {2018-01-20 12:47:18 +0000}, Date-Modified = {2018-01-20 12:47:18 +0000}, Doi = {10.1007/978-3-319-03545-1}, Editor = {Georges Gonthier and Michael Norrish}, Isbn = {978-3-319-03544-4}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Timestamp = {Thu, 25 May 2017 00:42:11 +0200}, Title = {Certified Programs and Proofs - Third International Conference, {CPP} 2013, Melbourne, VIC, Australia, December 11-13, 2013, Proceedings}, Url = {https://doi.org/10.1007/978-3-319-03545-1}, Volume = {8307}, Year = {2013}, Bdsk-Url-1 = {https://doi.org/10.1007/978-3-319-03545-1}} @techreport{timany.hal-01615123, Author = {Timany, Amin and Sozeau, Matthieu}, Date-Added = {2017-12-15 14:07:01 +0000}, Date-Modified = {2017-12-15 14:10:24 +0000}, Hal_Id = {hal-01615123}, Hal_Version = {v1}, Institution = {{KU Leuven, Belgium ; Inria Paris}}, Keywords = {logic ; metatheory ; calculus of constructions ; set theory ; th{\'e}orie des ensembles ; logique ; m{\'e}tath{\'e}orie ; Coq ; calcul des constructions}, Month = Oct, Number = {RR-9105}, Pages = {30}, Pdf = {https://hal.inria.fr/hal-01615123/file/RR-9105.pdf}, Title = {{Consistency of the Predicative Calculus of Cumulative Inductive Constructions (pCuIC)}}, Type = {Research Report}, Url = {https://hal.inria.fr/hal-01615123}, Year = {2017}, Bdsk-Url-1 = {https://hal.inria.fr/hal-01615123}} @misc{manginsozeauPEPM2018, Address = {Los Angeles, CA, USA}, Author = {Cyprien Mangin and Matthieu Sozeau}, Date-Added = {2017-12-14 07:32:08 +0000}, Date-Modified = {2018-02-07 10:56:11 +0000}, Howpublished = {Talk and Poster at PEPM 2018 - ACM SIGPLAN Workshop on Partial Evaluation and Program Manipulation}, Month = {January}, Read = {Oui}, Title = {Equations: From Clauses to Splittings to Functions}, Type = {slides}, Url = {https://popl18.sigplan.org/event/pepm-2018-equations-from-clauses-to-splittings-to-functions-poster-demo-talk-}, Year = {2018}, Bdsk-Url-1 = {https://popl18.sigplan.org/event/pepm-2018-equations-from-clauses-to-splittings-to-functions-poster-demo-talk-}} @mastersthesis{winterhalter:hal-01626651, Author = {Winterhalter, Th{\'e}o}, Date-Added = {2017-10-31 13:25:40 +0000}, Date-Modified = {2017-10-31 13:25:40 +0000}, Hal_Id = {hal-01626651}, Hal_Version = {v1}, Month = Sep, Pdf = {https://hal.inria.fr/hal-01626651/file/main.pdf}, School = {{ENS Cachan}}, Title = {{A Restricted Version of Reflection Compatible with Univalent Homotopy Type Theory}}, Url = {https://hal.inria.fr/hal-01626651}, Year = {2017}, Bdsk-Url-1 = {https://hal.inria.fr/hal-01626651}} @misc{sozeau.Coq/univs/GeocalTalk, Author = {Matthieu Sozeau}, Date-Added = {2017-11-14 19:17:29 +0000}, Date-Modified = {2017-11-14 19:18:44 +0000}, Howpublished = {Talk at G{\'e}ocal-LAC days, Nantes, France}, Keywords = {Coq Universes}, Month = {November 14th}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Cumulative_Inductive_Types_in_Coq-GeocalLAC-141117.pdf}, Title = {{Cumulative Inductive Types in Coq}}, Type = {slides}, Year = {2017}} @phdthesis{2016arXiv160605916B, Adsnote = {Provided by the SAO/NASA Astrophysics Data System}, Adsurl = {http://adsabs.harvard.edu/abs/2016arXiv160605916B}, Archiveprefix = {arXiv}, Author = {{Brunerie}, G.}, Date-Added = {2017-11-08 11:29:28 +0000}, Date-Modified = {2017-11-08 11:31:48 +0000}, Eprint = {1606.05916}, Journal = {ArXiv e-prints}, Keywords = {Mathematics - Algebraic Topology, Computer Science - Logic in Computer Science, Mathematics - Logic}, Month = jun, Primaryclass = {math.AT}, School = {{Laboratoire J.A. Dieudonn{\'e}}}, Title = {{On the homotopy groups of spheres in homotopy type theory}}, Url = {https://arxiv.org/abs/1606.05916}, Year = 2016, Bdsk-Url-1 = {https://arxiv.org/abs/1606.05916}} @inproceedings{DBLP:conf/popl/AltenkirchK16, Author = {Thorsten Altenkirch and Ambrus Kaposi}, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.org/rec/bib/conf/popl/AltenkirchK16}, Booktitle = {{POPL} 2016, St. Petersburg, FL, USA, January 20 - 22, 2016}, Date-Added = {2017-11-07 13:49:23 +0000}, Date-Modified = {2017-11-07 13:49:23 +0000}, Doi = {10.1145/2837614.2837638}, Pages = {18--29}, Timestamp = {Thu, 15 Jun 2017 21:39:12 +0200}, Title = {Type theory in type theory using quotient inductive types}, Url = {http://doi.acm.org/10.1145/2837614.2837638}, Year = {2016}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/2837614.2837638}, Bdsk-Url-2 = {http://dx.doi.org/10.1145/2837614.2837638}} @proceedings{DBLP:conf/popl/2016, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.org/rec/bib/conf/popl/2016}, Date-Added = {2017-11-07 13:49:23 +0000}, Date-Modified = {2017-11-07 13:49:23 +0000}, Editor = {Rastislav Bod{\'{\i}}k and Rupak Majumdar}, Isbn = {978-1-4503-3549-2}, Publisher = {{ACM}}, Timestamp = {Wed, 09 Mar 2016 08:11:59 +0100}, Title = {Proceedings of the 43rd Annual {ACM} {SIGPLAN-SIGACT} Symposium on Principles of Programming Languages, {POPL} 2016, St. Petersburg, FL, USA, January 20 - 22, 2016}, Url = {http://dl.acm.org/citation.cfm?id=2837614}, Year = {2016}, Bdsk-Url-1 = {http://dl.acm.org/citation.cfm?id=2837614}} @inproceedings{boulier:hal-01445835, Address = {Paris, France}, Author = {Boulier, Simon and P{\'e}drot, Pierre-Marie and Tabareau, Nicolas}, Booktitle = {{Certified Programs and Proofs (CPP 2017)}}, Date-Added = {2017-11-07 13:42:15 +0000}, Date-Modified = {2017-11-07 13:42:15 +0000}, Doi = {10.1145/3018610.3018620}, Hal_Id = {hal-01445835}, Hal_Version = {v1}, Keywords = {Program translation ; Dependent type theory}, Month = Jan, Pages = {182 - 194}, Pdf = {https://hal.inria.fr/hal-01445835/file/main.pdf}, Title = {{The next 700 syntactical models of type theory}}, Url = {https://hal.inria.fr/hal-01445835}, Year = {2017}, Bdsk-Url-1 = {https://hal.inria.fr/hal-01445835}, Bdsk-Url-2 = {http://dx.doi.org/10.1145/3018610.3018620}} @article{2017arXiv170501163A, Adsnote = {Provided by the SAO/NASA Astrophysics Data System}, Adsurl = {http://adsabs.harvard.edu/abs/2017arXiv170501163A}, Archiveprefix = {arXiv}, Author = {{Anand}, A. and {Morrisett}, G.}, Date-Added = {2017-10-19 10:35:52 +0000}, Date-Modified = {2017-10-19 10:35:52 +0000}, Eprint = {1705.01163}, Journal = {ArXiv e-prints}, Keywords = {Computer Science - Logic in Computer Science}, Month = may, Primaryclass = {cs.LO}, Title = {{Revisiting Parametricity: Inductives and Uniformity of Propositions}}, Year = 2017} @inproceedings{AnandMCoqPL2018, Address = {Los Angeles, CA, USA}, Author = {{Anand}, A. and {Morrisett}, G.}, Booktitle = {CoqPL'18}, Date-Added = {2017-12-05 16:50:21 +0000}, Date-Modified = {2017-12-05 16:51:11 +0000}, Month = January, Primaryclass = {cs.LO}, Title = {{Revisiting Parametricity: Inductives and Uniformity of Propositions}}, Year = 2018} @inproceedings{forsterkunze16, Author = {Yannick Forster and Fabian Kunze}, Booktitle = {Coq Workshop 2016}, Date-Added = {2017-10-17 10:13:53 +0000}, Date-Modified = {2017-10-17 10:18:25 +0000}, Title = {{Verified Extraction from Coq to a Lambda-Calculus}}, Url = {https://www.ps.uni-saarland.de/~forster/coq-workshop-16/abstract-coq-ws-16.pdf}, Year = {2016}, Bdsk-Url-1 = {https://www.ps.uni-saarland.de/~forster/coq-workshop-16/abstract-coq-ws-16.pdf}} @unpublished{timanysoz17, Abstract = {{We present an internalization of the 2-groupoid interpretation of the calculus of construction that allows to realize the univalence axiom, proof irrelevance and reasoning modulo. As an example, we show that in our setting, the type of Church integers is equal to the inductive type of natural numbers.}}, Affiliation = {PI.R2 - INRIA Paris - Rocquencourt , Preuves, Programmes et Syst{\`e}mes - PPS , ASCOLA - INRIA - EMN , Laboratoire d'Informatique de Nantes Atlantique - LINA}, Author = {Timany, Amin and Sozeau, Matthieu}, Date-Added = {2017-10-11 22:53:08 +0000}, Date-Modified = {2017-10-11 22:53:51 +0000}, Language = {Anglais}, Month = October, Note = {{Submitted}}, Title = {{Cumulative Inductive Types in Coq}}, Url = {http://www.irif.fr/~sozeau/research/publications/drafts/Cumulative_Inductive_Types_in_Coq.pdf}, Year = {2017}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/hal-00786589}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/groupoidint.pdf}, Bdsk-Url-3 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/internalizingITT.pdf}, Bdsk-Url-4 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations_for_Hereditary_Substitution_in_Leivants_Predicative_System_F:_a_case_study.pdf}, Bdsk-Url-5 = {http://dx.doi.org/10.4204/EPTCS.185}, Bdsk-Url-6 = {http://www.irif.fr/~sozeau/research/publications/drafts/Cumulative_Inductive_Types_in_Coq.pdf}} @inbook{Monin2013, Abstract = {When reasoning on formulas involving large-size inductively defined relations, such as the semantics of a real programming language, many steps require the inversion of a hypothesis. The built-in ``inversion'' tactic of Coq can then be used, but it suffers from severe controllability, maintenance and efficiency issues, which makes it unusable in practice in large applications.}, Address = {Berlin, Heidelberg}, Author = {Monin, Jean-Fran{\c{c}}ois and Shi, Xiaomu}, Booktitle = {Interactive Theorem Proving: 4th International Conference, ITP 2013, Rennes, France, July 22-26, 2013. Proceedings}, Date-Added = {2017-10-11 13:43:21 +0000}, Date-Modified = {2017-10-11 13:43:21 +0000}, Doi = {10.1007/978-3-642-39634-2_25}, Editor = {Blazy, Sandrine and Paulin-Mohring, Christine and Pichardie, David}, Isbn = {978-3-642-39634-2}, Pages = {338--353}, Publisher = {Springer Berlin Heidelberg}, Title = {Handcrafted Inversions Made Operational on Operational Semantics}, Url = {https://doi.org/10.1007/978-3-642-39634-2_25}, Year = {2013}, Bdsk-Url-1 = {https://doi.org/10.1007/978-3-642-39634-2_25}, Bdsk-Url-2 = {http://dx.doi.org/10.1007/978-3-642-39634-2_25}} @misc{agda, Author = {The Agda Team}, Date-Added = {2017-10-11 11:29:44 +0000}, Date-Modified = {2017-10-11 11:33:56 +0000}, Title = {Agda}, Url = {http://wiki.portal.chalmers.se/agda/pmwiki.php}, Bdsk-Url-1 = {http://wiki.portal.chalmers.se/agda/pmwiki.php}} @article{PLMS:PLMS0370, Author = {van den Berg, Benno and Garner, Richard}, Date-Added = {2017-10-11 11:09:06 +0000}, Date-Modified = {2017-10-11 11:09:06 +0000}, Doi = {10.1112/plms/pdq026}, Issn = {1460-244X}, Journal = {Proceedings of the London Mathematical Society}, Number = {2}, Pages = {370--394}, Publisher = {Oxford University Press}, Title = {Types are weak ω-groupoids}, Url = {http://dx.doi.org/10.1112/plms/pdq026}, Volume = {102}, Year = {2011}, Bdsk-Url-1 = {http://dx.doi.org/10.1112/plms/pdq026}} @inproceedings{DBLP:conf/cpp/CockxD17, Author = {Jesper Cockx and Dominique Devriese}, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.uni-trier.de/rec/bib/conf/cpp/CockxD17}, Booktitle = {Proceedings of the 6th {ACM} {SIGPLAN} Conference on Certified Programs and Proofs, {CPP} 2017, Paris, France, January 16-17, 2017}, Crossref = {DBLP:conf/cpp/2017}, Date-Added = {2017-10-11 10:32:42 +0000}, Date-Modified = {2017-10-11 10:32:42 +0000}, Doi = {10.1145/3018610.3018612}, Pages = {173--181}, Timestamp = {Thu, 15 Jun 2017 21:38:39 +0200}, Title = {Lifting proof-relevant unification to higher dimensions}, Url = {http://doi.acm.org/10.1145/3018610.3018612}, Year = {2017}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/3018610.3018612}, Bdsk-Url-2 = {http://dx.doi.org/10.1145/3018610.3018612}} @proceedings{DBLP:conf/cpp/2017, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.uni-trier.de/rec/bib/conf/cpp/2017}, Date-Added = {2017-10-11 10:32:42 +0000}, Date-Modified = {2017-10-11 10:32:42 +0000}, Doi = {10.1145/3018610}, Editor = {Yves Bertot and Viktor Vafeiadis}, Isbn = {978-1-4503-4705-1}, Publisher = {{ACM}}, Timestamp = {Mon, 02 Jan 2017 14:41:55 +0100}, Title = {Proceedings of the 6th {ACM} {SIGPLAN} Conference on Certified Programs and Proofs, {CPP} 2017, Paris, France, January 16-17, 2017}, Url = {http://doi.acm.org/10.1145/3018610}, Year = {2017}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/3018610}, Bdsk-Url-2 = {http://dx.doi.org/10.1145/3018610}} @inproceedings{DBLP:conf/icfp/CockxDP14, Author = {Jesper Cockx and Dominique Devriese and Frank Piessens}, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.uni-trier.de/rec/bib/conf/icfp/CockxDP14}, Booktitle = {Proceedings of the 19th {ACM} {SIGPLAN} international conference on Functional programming, Gothenburg, Sweden, September 1-3, 2014}, Crossref = {DBLP:conf/icfp/2014}, Date-Added = {2017-10-11 10:32:42 +0000}, Date-Modified = {2017-10-11 10:32:42 +0000}, Doi = {10.1145/2628136.2628139}, Pages = {257--268}, Timestamp = {Sun, 04 Jun 2017 10:05:10 +0200}, Title = {Pattern matching without {K}}, Url = {http://doi.acm.org/10.1145/2628136.2628139}, Year = {2014}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/2628136.2628139}, Bdsk-Url-2 = {http://dx.doi.org/10.1145/2628136.2628139}} @proceedings{DBLP:conf/icfp/2014, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.uni-trier.de/rec/bib/conf/icfp/2014}, Date-Added = {2017-10-11 10:32:42 +0000}, Date-Modified = {2017-10-11 10:32:42 +0000}, Editor = {Johan Jeuring and Manuel M. T. Chakravarty}, Isbn = {978-1-4503-2873-9}, Publisher = {{ACM}}, Timestamp = {Thu, 20 Nov 2014 18:05:13 +0100}, Title = {Proceedings of the 19th {ACM} {SIGPLAN} international conference on Functional programming, Gothenburg, Sweden, September 1-3, 2014}, Url = {http://dl.acm.org/citation.cfm?id=2628136}, Year = {2014}, Bdsk-Url-1 = {http://dl.acm.org/citation.cfm?id=2628136}} @phdthesis{DBLP:phd/basesearch/Cockx17, Author = {Jesper Cockx}, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.uni-trier.de/rec/bib/phd/basesearch/Cockx17}, Date-Added = {2017-10-11 10:32:42 +0000}, Date-Modified = {2017-10-11 10:32:42 +0000}, School = {Katholieke Universiteit Leuven, Belgium}, Timestamp = {Tue, 27 Jun 2017 16:33:36 +0200}, Title = {Dependent Pattern Matching and Proof-Relevant Unification}, Url = {https://lirias.kuleuven.be/handle/123456789/583556}, Year = {2017}, Bdsk-Url-1 = {https://lirias.kuleuven.be/handle/123456789/583556}} @misc{lean, Author = {Avigad, Jeremy and Ebner, Gabriel and Ullrich, Sebastian}, Date-Added = {2017-10-11 10:31:34 +0000}, Date-Modified = {2017-10-11 10:31:34 +0000}, Month = {October}, Note = {Available at \url{https://leanprover.github.io/reference/lean_reference.pdf}}, Title = {{The Lean Reference Manual, release 3.3.0}}, Year = {2017}} @article{2017arXiv170607526R, Adsnote = {Provided by the SAO/NASA Astrophysics Data System}, Adsurl = {http://adsabs.harvard.edu/abs/2017arXiv170607526R}, Archiveprefix = {arXiv}, Author = {{Rijke}, E. and {Shulman}, M. and {Spitters}, B.}, Date-Added = {2017-10-08 12:22:15 +0000}, Date-Modified = {2017-10-08 12:22:15 +0000}, Eprint = {1706.07526}, Journal = {ArXiv e-prints}, Keywords = {Mathematics - Category Theory, Computer Science - Logic in Computer Science, Mathematics - Logic, 03B15, 03B35, 03B45, 03B70, 03F65, 03G30, 18A32, 18B25, 18C35, 18C50, 18D30, F.3.1, F.4.1}, Month = jun, Primaryclass = {math.CT}, Title = {{Modalities in homotopy type theory}}, Year = 2017} @phdthesis{Pau96b, Author = {Christine Paulin-Mohring}, Date-Added = {2017-10-08 09:50:38 +0000}, Date-Modified = {2019-07-08 13:57:11 -0400}, Month = dec, School = {Universit{\'e} Claude Bernard Lyon I}, Title = {D{\'e}finitions Inductives en Th{\'e}orie des Types d'Ordre Sup{\'e}rieur}, Type = {Habilitation {\`a} diriger les recherches}, Url = {http://www.lri.fr/~paulin/PUBLIS/habilitation.ps.gz}, Year = 1996, Bdsk-Url-1 = {http://www.lri.fr/~paulin/PUBLIS/habilitation.ps.gz}} @inproceedings{timanyTYPES17, Address = {Budapest, Hungary}, Author = {{Amin Timany and Matthieu Sozeau and Bart Jacobs}}, Booktitle = {TYPES 2017 Proceedings}, Date-Added = {2017-06-27 12:44:50 +0000}, Date-Modified = {2017-06-27 12:49:15 +0000}, Keywords = {Coq}, Month = {May}, Pdf = {http://types2017.elte.hu/proc.pdf#page=105}, Title = {{Cumulative Inductive Types in Coq}}, Year = {2017}} @misc{sozeau.Coq/EJCP/2017, Author = {{Matthieu Sozeau}}, Date-Added = {2017-06-27 12:36:21 +0000}, Date-Modified = {2017-06-27 12:37:34 +0000}, Howpublished = {Lecture notes for a course given at EJCP'17 in Toulouse}, Keywords = {Coq}, Month = {June 27th}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/research/publications/Introduction_to_Type_Theory-EJCP-270617.pdf}, Title = {{Introduction to Type Theory and Interactive Theorem Proving in Coq}}, Type = {slides}, Year = {2017}} @inproceedings{DBLP:journals/corr/ManginS15, Author = {Cyprien Mangin and Matthieu Sozeau}, Booktitle = {Proceedings Tenth International Workshop on Logical Frameworks and Meta Languages: Theory and Practice, {LFMTP} 2015, Berlin, Germany, 1 August 2015.}, Crossref = {DBLP:journals/corr/CervesatoC15}, Date-Added = {2017-03-05 12:50:54 +0000}, Date-Modified = {2017-03-05 12:50:54 +0000}, Doi = {10.4204/EPTCS.185.5}, Pages = {71--86}, Title = {{Equations for Hereditary Substitution in Leivant's Predicative System F: A Case Study}}, Url = {http://dx.doi.org/10.4204/EPTCS.185.5}, Year = {2015}, Bdsk-Url-1 = {http://dx.doi.org/10.4204/EPTCS.185.5}} @article{DBLP:journals/entcs/Lasson14, Author = {Marc Lasson}, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.uni-trier.de/rec/bib/journals/entcs/Lasson14}, Date-Added = {2017-02-15 16:25:32 +0000}, Date-Modified = {2017-02-15 16:25:32 +0000}, Doi = {10.1016/j.entcs.2014.10.013}, Journal = {Electr. Notes Theor. Comput. Sci.}, Pages = {229--244}, Timestamp = {Tue, 04 Nov 2014 10:58:51 +0100}, Title = {Canonicity of Weak {\(\omega\)}-groupoid Laws Using Parametricity Theory}, Url = {http://dx.doi.org/10.1016/j.entcs.2014.10.013}, Volume = {308}, Year = {2014}, Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.entcs.2014.10.013}} @inproceedings{certicoq.CoqPL, Abstract = {{We present an internalization of the 2-groupoid interpretation of the calculus of construction that allows to realize the univalence axiom, proof irrelevance and reasoning modulo. As an example, we show that in our setting, the type of Church integers is equal to the inductive type of natural numbers.}}, Address = {Paris, France}, Affiliation = {PI.R2 - INRIA Paris - Rocquencourt , Preuves, Programmes et Syst{\`e}mes - PPS , ASCOLA - INRIA - EMN , Laboratoire d'Informatique de Nantes Atlantique - LINA}, Author = {Abhishek Anand and Andrew Appel and Greg Morrisett and Zoe Paraskevopoulou and Randy Pollack and Olivier Savary Belanger and Matthieu Sozeau and Matthew Weaver}, Booktitle = {CoqPL}, Date-Added = {2017-02-01 17:46:34 +0000}, Date-Modified = {2018-03-04 15:50:14 +0000}, Language = {Anglais}, Month = January, Title = {{CertiCoq: A verified compiler for Coq}}, Url = {http://conf.researchr.org/event/CoqPL-2017/main-certicoq-a-verified-compiler-for-coq}, Year = {2017}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/hal-00786589}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/groupoidint.pdf}, Bdsk-Url-3 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/internalizingITT.pdf}, Bdsk-Url-4 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations_for_Hereditary_Substitution_in_Leivants_Predicative_System_F:_a_case_study.pdf}, Bdsk-Url-5 = {http://dx.doi.org/10.4204/EPTCS.185}, Bdsk-Url-6 = {http://conf.researchr.org/event/CoqPL-2017/main-certicoq-a-verified-compiler-for-coq}} @mastersthesis{lewertowski.hal-01250862, Author = {Lewertowski, Gabriel}, Date-Added = {2016-11-09 07:03:18 +0000}, Date-Modified = {2016-11-09 07:10:49 +0000}, Hal_Id = {hal-01250862}, Hal_Version = {v1}, Month = Sep, Pdf = {https://hal.inria.fr/hal-01250862/file/rapport%20%281%29.pdf}, School = {{Univerist{\'e} Paris Diderot Paris 7}}, Title = {{Ensembles nominaux dans Coq/SSreflect}}, Url = {https://hal.inria.fr/hal-01250862}, Year = {2015}, Bdsk-Url-1 = {https://hal.inria.fr/hal-01250862}} @mastersthesis{mangin.hal-01250855, Author = {Mangin, Cyprien}, Date-Added = {2016-11-09 06:50:53 +0000}, Date-Modified = {2016-11-09 07:10:58 +0000}, Hal_Id = {hal-01250855}, Hal_Version = {v1}, Month = Aug, Pdf = {https://hal.inria.fr/hal-01250855/file/main%20%286%29.pdf}, School = {{Universit{\'e} Paris 7 Denis Diderot}}, Title = {{Eliminating Dependent Pattern-Matching in Coq}}, Url = {https://hal.inria.fr/hal-01250855}, Year = {2015}, Bdsk-Url-1 = {https://hal.inria.fr/hal-01250855}} @inproceedings{conf/lics/JaberLPST16, Author = {Guilhem Jaber and Gabriel Lewertowski and Pierre{-}Marie P{\'{e}}drot and Matthieu Sozeau and Nicolas Tabareau}, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.uni-trier.de/rec/bib/conf/lics/JaberLPST16}, Booktitle = {Proceedings of the 31st Annual {ACM/IEEE} Symposium on Logic in Computer Science, {LICS} '16, New York, NY, USA, July 5-8, 2016}, Crossref = {DBLP:conf/lics/2016}, Date-Added = {2016-11-09 06:40:31 +0000}, Date-Modified = {2016-11-09 06:44:46 +0000}, Doi = {10.1145/2933575.2935320}, Pages = {367--376}, Timestamp = {Thu, 27 Oct 2016 11:27:54 +0200}, Title = {The Definitional Side of the Forcing}, Url = {http://doi.acm.org/10.1145/2933575.2935320}, Year = {2016}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/2933575.2935320}, Bdsk-Url-2 = {http://dx.doi.org/10.1145/2933575.2935320}} @proceedings{DBLP:conf/lics/2016, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.uni-trier.de/rec/bib/conf/lics/2016}, Date-Added = {2016-11-09 06:40:31 +0000}, Date-Modified = {2016-11-09 06:40:31 +0000}, Doi = {10.1145/2933575}, Editor = {Martin Grohe and Eric Koskinen and Natarajan Shankar}, Isbn = {978-1-4503-4391-6}, Publisher = {{ACM}}, Timestamp = {Thu, 27 Oct 2016 11:23:41 +0200}, Title = {Proceedings of the 31st Annual {ACM/IEEE} Symposium on Logic in Computer Science, {LICS} '16, New York, NY, USA, July 5-8, 2016}, Url = {http://doi.acm.org/10.1145/2933575}, Year = {2016}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/2933575}, Bdsk-Url-2 = {http://dx.doi.org/10.1145/2933575}} @article{2016arXiv161004591B, Adsnote = {Provided by the SAO/NASA Astrophysics Data System}, Adsurl = {http://adsabs.harvard.edu/abs/2016arXiv161004591B}, Archiveprefix = {arXiv}, Author = {{Bauer, A. and Gross, J. and LeFanu Lumsdaine, P. and Shulman, M. and Sozeau, M. and Spitters, B.}}, Date-Added = {2016-11-09 06:26:34 +0000}, Date-Modified = {2016-11-17 07:43:23 +0000}, Eprint = {1610.04591}, Journal = {ArXiv e-prints}, Keywords = {Computer Science - Logic in Computer Science, Mathematics - Logic, 03B70, 03B15, 55U35, F.4.1}, Month = oct, Note = {Accepted at CPP'17}, Primaryclass = {cs.LO}, Title = {{The HoTT Library: A formalization of homotopy type theory in Coq}}, Url = {https://arxiv.org/abs/1610.04591}, Year = 2016, Bdsk-Url-1 = {https://arxiv.org/abs/1610.04591}} @article{zilsoz17unif.JFP, Author = {Beta Ziliani and Matthieu Sozeau}, Date-Added = {2016-11-09 06:23:07 +0000}, Date-Modified = {2018-02-02 10:02:05 +0000}, Doi = {10.1017/S0956796817000028}, Journal = {Journal of Functional Programming}, Pages = {e10}, Publisher = {Cambridge University Press}, Title = {{A Comprehensible Guide to a New Unifier for CIC Including Universe Polymorphism and Overloading}}, Url = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/unification-jfp.pdf}, Volume = {27}, Year = {2017}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/unif.pdf}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/unification-jfp.pdf}, Bdsk-Url-3 = {https://doi.org/10.1017/S0956796817000028}} @misc{sozeau.Coq/dev/CoqWS16, Author = {{Matthieu Sozeau}}, Date-Added = {2016-08-26 13:19:26 +0000}, Date-Modified = {2016-08-26 13:21:18 +0000}, Howpublished = {Talk at the Coq Workshop in Nancy, France}, Keywords = {Coq}, Month = {August 26th}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/research/publications/Coq_8.6-CoqWS16-260816.pdf}, Title = {{Coq 8.6}}, Type = {slides}, Year = {2016}} @misc{sozeau16cluf, Author = {Matthieu Sozeau}, Date-Added = {2016-07-28 10:41:44 +0000}, Date-Modified = {2016-07-28 10:45:37 +0000}, Howpublished = {Invited talk at the Categorical Logic and Univalent Foundations workshop, Leeds, UK}, Month = {July 28th}, Read = {Oui}, Title = {{Forcing Translations in Type Theory}}, Type = {slides}, Url = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Forcing_Translations_in_Type_Theory-CLUF-280716.pdf}, Year = {2016}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Coq_support_for_HoTT-HoTTUF-290615.pdf}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Forcing_Translations_in_Type_Theory-CLUF-280716.pdf}} @misc{sozeau16icms, Author = {Matthieu Sozeau}, Date-Added = {2016-07-15 14:07:05 +0000}, Date-Modified = {2016-07-20 10:48:57 +0000}, Howpublished = {Invited talk at the International Conference on Mathematical Software in Berlin, Germany}, Month = {July 14th}, Read = {Oui}, Title = {{Coq for HoTT}}, Type = {slides}, Url = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Coq_for_HoTT-ICMS16-140716.pdf}, Year = {2016}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Coq_support_for_HoTT-HoTTUF-290615.pdf}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Coq_for_HoTT-ICMS16-140716.pdf}} @misc{sozeau.Coq/dev/DeepSpec16, Author = {{Maxime D{\'e}n{\`e}s, Matthieu Sozeau}}, Date-Added = {2016-06-10 10:19:46 +0000}, Date-Modified = {2016-06-10 10:22:30 +0000}, Howpublished = {Talk at the DeepSpec Kickoff Workshop in Princeton, NJ, USA}, Keywords = {Coq Universes}, Month = {June 8th}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/research/publications/Coq_8.6-DeepSpec16-080616.pdf}, Title = {{Coq 8.6}}, Type = {slides}, Year = {2016}} @misc{sozeau.Coq/univs/CIW16, Author = {Matthieu Sozeau}, Date-Added = {2016-06-10 09:28:12 +0000}, Date-Modified = {2016-06-10 09:31:26 +0000}, Howpublished = {Talk at the Second Coq Implementors Workshop, Sophia-Antipolis, France}, Keywords = {Coq Universes}, Month = {June 2nd}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Universe_Polymorphism_in_Coq_for_the_OCaml_hacker-CIW16-020616.pdf}, Title = {{Universe Polymorphism for the OCaml hacker}}, Type = {slides}, Year = {2016}} @misc{sozeau.Coq/Equations/Dagstuhl16, Author = {Matthieu Sozeau}, Date-Added = {2016-06-10 08:06:36 +0000}, Date-Modified = {2016-06-10 08:08:05 +0000}, Howpublished = {Talk at Dagstuhl}, Keywords = {dependent types pattern matching unification}, Month = {March}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations:_a_function_definition_toolbox_for_Coq-Dagstuhl-290316.pdf}, Read = {Oui}, Title = {{Equations: a function definition toolbox for Coq}}, Type = {slides}, Year = {2016}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @misc{sozeau.Coq/dev/Coq16, Address = {St Petersburg, FL}, Author = {Maxime D{\'e}n{\`e}s, Matthieu Sozeau}, Date-Added = {2016-06-10 08:02:10 +0000}, Date-Modified = {2016-06-10 08:03:13 +0000}, Howpublished = {Talk given at the second CoqPL Workshop}, Month = {January}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Coq_8.5_at_work-CoqPL16-230116.pdf}, Read = {Oui}, Title = {{Coq 8.5 at work}}, Type = {slides}, Year = {2016}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @misc{zilsoz.unicoq/ICFP15, Address = {Vancouver, Canada}, Author = {Matthieu Sozeau}, Date-Added = {2015-09-02 15:54:14 +0000}, Date-Modified = {2015-09-02 15:55:13 +0000}, Howpublished = {Talk given at ICFP'15}, Month = {September}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/A_Unification_Algorithm_for_Coq_featuring_Universe_Polymorphism_and_Overloading-ICFP15-010915.pdf}, Read = {Oui}, Title = {{A Unification Algorithm for Coq featuring Universe Polymorphism and Overloading}}, Type = {slides}, Year = {2015}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @misc{sozeau15hottuf, Author = {Matthieu Sozeau}, Date-Added = {2015-06-29 12:53:42 +0000}, Date-Modified = {2016-07-15 14:09:53 +0000}, Howpublished = {Invited talk at the HoTT/UF workshop in Warsaw, Poland}, Month = {June}, Read = {Oui}, Title = {{Coq support for HoTT}}, Type = {slides}, Url = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Coq_support_for_HoTT-HoTTUF-290615.pdf}, Year = {2015}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Coq_support_for_HoTT-HoTTUF-290615.pdf}} @misc{sozeau15epit, Abstract = {Les classes de types ont d{\'e}montr{\'e} leur utilit{\'e} pour la programmation polymorphique de haut-niveau en Haskell et la construction de hi{\'e}rarchies de structures math{\'e}matiques dans l'assistant de preuve Isabelle. Ce m{\'e}canisme de surcharge tr{\`e}s versatile peut se g{\'e}n{\'e}raliser aux types d{\'e}pendants et offre alors de nouvelles formes de g{\'e}n{\'e}ricit{\'e}. Il permet aussi d'int{\'e}grer une forme de programmation logique au moment du typage. On pr{\'e}sentera une impl{\'e}mentation l{\'e}g{\`e}re d'un syst{\`e}me de classes de types de premi{\`e}re classe dans Coq, bas{\'e}e sur un enrichissement de constructions existantes du langage Gallina. On illustrera ce nouvel outil par des exemples mettant en oeuvre les classes pour programmer, prouver et organiser les d{\'e}veloppements en Coq. }, Author = {Matthieu Sozeau}, Date-Added = {2015-06-02 09:16:11 +0000}, Date-Modified = {2015-06-02 09:24:09 +0000}, Howpublished = {Lecture at the "{\'E}cole de Printemps d'Informatique Th{\'e}orique 2015" in Fr{\'e}jus, France}, Month = {May}, Pdf = {https://raw.githubusercontent.com/yurug/coqepit/master/support/day2/cours3.pdf}, Read = {Oui}, Title = {{Functional Programming and Proving in Coq}}, Type = {slides}, Year = {2015}} @misc{sozeau.Coq/univs/TYPES18, Author = {Matthieu Sozeau}, Date-Added = {2018-06-20 10:33:34 +0000}, Date-Modified = {2018-06-20 10:34:53 +0000}, Howpublished = {Invited talk at the TYPES 2018 conference, Braga, Portugal}, Keywords = {Coq Universes}, Month = {June 20th}, Pdf = {http://www.pps.univ-paris-diderot.fr/~sozeau/research/publications/The_Predicative_Polymorphic_Cumulative_Calculus_of_Inductive_Constructions_and_its_implementation-TYPES18-200618.pdf}, Title = {{The Predicative, Polymorphic, Cumulative Calculus of Inductive Constructions and its implementation}}, Type = {slides}, Year = {2018}} @inproceedings{mansoz15fpred.LFMTP, Affiliation = {PI.R2 - INRIA Paris - Rocquencourt , Preuves, Programmes et Syst{\`e}mes - PPS , ASCOLA - INRIA - EMN , Laboratoire d'Informatique de Nantes Atlantique - LINA}, Author = {Mangin, Cyprien and Sozeau, Matthieu}, Booktitle = {Proceedings Tenth International Workshop on Logical Frameworks and Meta Languages: Theory and Practice}, Date-Added = {2015-05-08 16:53:24 +0000}, Date-Modified = {2016-07-28 11:02:55 +0000}, Doi = {10.4204/EPTCS.185}, Language = {Anglais}, Month = may, Note = {{LFMTP'15}}, Series = {{EPTCS}}, Title = {{Equations for Hereditary Substitution in Leivant's Predicative System F: A Case Study}}, Url = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations_for_Hereditary_Substitution_in_Leivants_Predicative_System_F:_a_case_study.pdf}, Volume = {185}, Year = {2015}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/hal-00786589}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/groupoidint.pdf}, Bdsk-Url-3 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/internalizingITT.pdf}, Bdsk-Url-4 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations_for_Hereditary_Substitution_in_Leivants_Predicative_System_F:_a_case_study.pdf}, Bdsk-Url-5 = {http://dx.doi.org/10.4204/EPTCS.185}} @inproceedings{DBLP:conf/itp/MalechaCB14, Author = {Gregory Malecha and Adam Chlipala and Thomas Braibant}, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.uni-trier.de/rec/bib/conf/itp/MalechaCB14}, Booktitle = {Interactive Theorem Proving - 5th International Conference, {ITP} 2014, Held as Part of the Vienna Summer of Logic, {VSL} 2014, Vienna, Austria, July 14-17, 2014. Proceedings}, Crossref = {DBLP:conf/itp/2014}, Date-Added = {2015-05-08 09:41:58 +0000}, Date-Modified = {2015-05-08 09:41:58 +0000}, Doi = {10.1007/978-3-319-08970-6_24}, Pages = {374--389}, Timestamp = {Mon, 30 Jun 2014 12:30:02 +0200}, Title = {Compositional Computational Reflection}, Url = {http://dx.doi.org/10.1007/978-3-319-08970-6_24}, Year = {2014}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/978-3-319-08970-6_24}} @unpublished{soztab15intern.ITP, Abstract = {{We present an internalization of the 2-groupoid interpretation of the calculus of construction that allows to realize the univalence axiom, proof irrelevance and reasoning modulo. As an example, we show that in our setting, the type of Church integers is equal to the inductive type of natural numbers.}}, Affiliation = {PI.R2 - INRIA Paris - Rocquencourt , Preuves, Programmes et Syst{\`e}mes - PPS , ASCOLA - INRIA - EMN , Laboratoire d'Informatique de Nantes Atlantique - LINA}, Author = {Sozeau, Matthieu and Tabareau, Nicolas}, Date-Added = {2015-04-29 18:05:48 +0000}, Date-Modified = {2015-04-29 18:10:29 +0000}, Language = {Anglais}, Month = Feb, Title = {{Internalizing Intensional Type Theory}}, Url = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/internalizingITT.pdf}, Year = {2015}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/hal-00786589}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/groupoidint.pdf}, Bdsk-Url-3 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/internalizingITT.pdf}} @misc{zilsoz15unif.talk.MIT, Author = {Matthieu Sozeau}, Date-Added = {2015-04-29 18:01:01 +0000}, Date-Modified = {2015-04-29 18:14:06 +0000}, Howpublished = {Talk given at MIT, Cambridge, MA}, Month = {April 29th}, Title = {A Predictable Unification Algorithm for Coq}, Type = {slides}, Url = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/A_Predictable_Unification_Algorithm_for_Coq-MIT-290415.pdf}, Year = {2015}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/A_Predictable_Unification_Algorithm_for_Coq-MIT-290415.pdf}} @inproceedings{zilsoz15unif.ICFP, Author = {Beta Ziliani and Matthieu Sozeau}, Booktitle = {ACM SIGPLAN International Conference on Functional Programming 2015}, Date-Added = {2015-04-29 17:59:19 +0000}, Date-Modified = {2015-09-02 16:10:16 +0000}, Title = {A Unification Algorithm for Coq featuring Universe Polymorphism and Overloading}, Url = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/A_Unification_Algorithm_for_Coq_featuring_Universe_Polymorphism_and_Overloading-ICFP15.pdf}, Year = {2015}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/unif.pdf}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/A_Unification_Algorithm_for_Coq_featuring_Universe_Polymorphism_and_Overloading-ICFP15.pdf}} @article{licatacubical, Author = {Licata, Daniel R and Brunerie, Guillaume}, Date-Added = {2015-04-28 21:49:39 +0000}, Date-Modified = {2015-04-28 21:52:06 +0000}, Journal = {LICS}, Title = {{A Cubical Approach to Synthetic Homotopy Theory}}, Url = {http://dlicata.web.wesleyan.edu/pubs/lb15cubicalsynth/lb15cubicalsynth.pdf}, Year = {2015}, Bdsk-Url-1 = {http://dlicata.web.wesleyan.edu/pubs/lb15cubicalsynth/lb15cubicalsynth.pdf}} @unpublished{shullumhit, Author = {{Mike Shulman and Peter LeFanu Lumsdaine}}, Date-Added = {2015-04-28 21:24:49 +0000}, Date-Modified = {2015-04-28 21:26:13 +0000}, Title = {{Semantics of Higher Inductive Types}}, Year = {2012}} @inproceedings{DBLP:conf/popl/Sojakova15, Author = {Kristina Sojakova}, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.uni-trier.de/rec/bib/conf/popl/Sojakova15}, Booktitle = {Proceedings of the 42nd Annual {ACM} {SIGPLAN-SIGACT} Symposium on Principles of Programming Languages, {POPL} 2015, Mumbai, India, January 15-17, 2015}, Crossref = {DBLP:conf/popl/2015}, Date-Added = {2015-04-28 21:22:36 +0000}, Date-Modified = {2015-04-28 21:22:36 +0000}, Doi = {10.1145/2676726.2676983}, Pages = {31--42}, Timestamp = {Sat, 20 Dec 2014 16:58:56 +0100}, Title = {Higher Inductive Types as Homotopy-Initial Algebras}, Url = {http://doi.acm.org/10.1145/2676726.2676983}, Year = {2015}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/2676726.2676983}, Bdsk-Url-2 = {http://dx.doi.org/10.1145/2676726.2676983}} @proceedings{DBLP:conf/popl/2015, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.uni-trier.de/rec/bib/conf/popl/2015}, Date-Added = {2015-04-28 21:22:36 +0000}, Date-Modified = {2015-04-28 21:22:36 +0000}, Editor = {Sriram K. Rajamani and David Walker}, Isbn = {978-1-4503-3300-9}, Publisher = {{ACM}}, Timestamp = {Sat, 20 Dec 2014 16:56:16 +0100}, Title = {Proceedings of the 42nd Annual {ACM} {SIGPLAN-SIGACT} Symposium on Principles of Programming Languages, {POPL} 2015, Mumbai, India, January 15-17, 2015}, Url = {http://dl.acm.org/citation.cfm?id=2676726}, Year = {2015}, Bdsk-Url-1 = {http://dl.acm.org/citation.cfm?id=2676726}} @book{chlipala2011certified, Author = {Chlipala, Adam}, Date-Added = {2015-04-28 21:08:38 +0000}, Date-Modified = {2015-04-28 21:51:19 +0000}, Number = {11}, Publisher = {MIT Press}, Title = {{Certified Programming with Dependent Types}}, Volume = {20}, Year = {2011}} @phdthesis{eades2014semantic, Author = {Eades III, Harley D}, Date-Added = {2015-04-23 09:19:41 +0000}, Date-Modified = {2015-04-23 09:22:35 +0000}, School = {The University of Iowa}, Title = {The semantic analysis of advanced programming languages}, Url = {http://metatheorem.org/wp-content/papers/thesis.pdf}, Year = {2014}, Bdsk-Url-1 = {http://metatheorem.org/wp-content/papers/thesis.pdf}} @conference{eadespstt10, Address = {Edinburgh, Scotland}, Author = {Harley D. Eades III and Aaron Stump}, Booktitle = {International Workshop on Proof-Search in Type Theories}, Date-Added = {2015-04-23 08:56:47 +0000}, Date-Modified = {2015-04-23 09:22:49 +0000}, Month = {July}, Organization = {A FLoC workshop}, Pdf = {http://homepage.divms.uiowa.edu/~astump/papers/pstt-2010.pdf}, Title = {{Hereditary Substitution for Stratified System F}}, Url = {http://homepage.divms.uiowa.edu/~astump/papers/pstt-2010.pdf}, Year = {2010}, Bdsk-Url-1 = {http://homepage.divms.uiowa.edu/~astump/papers/pstt-2010.pdf}} @inproceedings{DBLP:journals/corr/EadesS13, Author = {Harley D. Eades III and Aaron Stump}, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.uni-trier.de/rec/bib/journals/corr/EadesS13}, Booktitle = {Proceedings First Workshop on Control Operators and their Semantics, {COS} 2013, Eindhoven, The Netherlands, June 24-25, 2013.}, Crossref = {DBLP:journals/corr/deLiguoroS13}, Date-Added = {2015-04-23 08:50:49 +0000}, Date-Modified = {2015-04-23 08:50:49 +0000}, Doi = {10.4204/EPTCS.127.4}, Pages = {45--65}, Timestamp = {Mon, 28 Oct 2013 16:56:55 +0100}, Title = {Hereditary Substitution for the {\(\lambda\)}{\(\Delta\)}-Calculus}, Url = {http://dx.doi.org/10.4204/EPTCS.127.4}, Year = {2013}, Bdsk-Url-1 = {http://dx.doi.org/10.4204/EPTCS.127.4}} @proceedings{DBLP:journals/corr/deLiguoroS13, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.uni-trier.de/rec/bib/journals/corr/deLiguoroS13}, Date-Added = {2015-04-23 08:50:49 +0000}, Date-Modified = {2015-04-23 08:50:49 +0000}, Doi = {10.4204/EPTCS.127}, Editor = {Ugo de'Liguoro and Alexis Saurin}, Series = {{EPTCS}}, Timestamp = {Mon, 28 Oct 2013 16:56:55 +0100}, Title = {Proceedings First Workshop on Control Operators and their Semantics, {COS} 2013, Eindhoven, The Netherlands, June 24-25, 2013}, Url = {http://dx.doi.org/10.4204/EPTCS.127}, Volume = {127}, Year = {2013}, Bdsk-Url-1 = {http://dx.doi.org/10.4204/EPTCS.127}} @article{DBLP:journals/corr/abs-1202-4905, Author = {Andrea Asperti and Wilmer Ricciotti and Claudio Sacerdoti Coen and Enrico Tassi}, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.uni-trier.de/rec/bib/journals/corr/abs-1202-4905}, Date-Added = {2015-02-23 23:29:05 +0000}, Date-Modified = {2015-02-23 23:29:18 +0000}, Doi = {10.2168/LMCS-8(1:18)2012}, Journal = {LMCS}, Number = {1}, Timestamp = {Fri, 22 Jun 4460306 20:59:12 +}, Title = {{A Bi-Directional Refinement Algorithm for the Calculus of (Co)Inductive Constructions}}, Url = {http://dx.doi.org/10.2168/LMCS-8(1:18)2012}, Volume = {8}, Year = {2012}, Bdsk-Url-1 = {http://dx.doi.org/10.2168/LMCS-8(1:18)2012}} @article{BoveKraussSozeau2011, Author = {Ana Bove and Alexander Krauss and Matthieu Sozeau}, Date-Added = {2015-02-19 00:49:38 +0000}, Date-Modified = {2019-02-23 13:24:09 +0100}, Doi = {10.1017/S0960129514000115}, Issn = {1469-8072}, Journal = {Mathematical Structures in Computer Science}, Month = {2}, Numpages = {51}, Pages = {1--51}, Title = {Partiality and recursion in interactive theorem provers -- an overview}, Url = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Partiality_and_Recursion_in_Interactive_Theorem_Provers_-_An_Overview.pdf}, Volume = {FirstView}, Year = {2015}, Bdsk-Url-1 = {http://journals.cambridge.org/article_S0960129514000115}, Bdsk-Url-2 = {http://dx.doi.org/10.1017/S0960129514000115}, Bdsk-Url-3 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Partiality_and_Recursion_in_Interactive_Theorem_Provers_-_An_Overview.pdf}} @mastersthesis{haselwarter:hal-01114573, Author = {Haselwarter, Philipp}, Date-Added = {2015-02-18 23:36:32 +0000}, Date-Modified = {2015-02-18 23:36:32 +0000}, Hal_Id = {hal-01114573}, Hal_Version = {v1}, Month = Sep, School = {{Universit{\'e} Paris 7}}, Title = {{Towards a Proof-Irrelevant Calculus of Inductive Constructions}}, Url = {https://hal.inria.fr/hal-01114573}, Year = {2014}, Bdsk-Url-1 = {https://hal.inria.fr/hal-01114573}} @unpublished{assaf:ccexpl, Author = {Ali Assaf}, Date-Added = {2015-02-03 16:32:21 +0000}, Date-Modified = {2015-02-03 16:32:47 +0000}, Month = {2014}, Title = {{A calculus of constructions with explicit subtyping}}} @unpublished{altenkirch:ctt, Author = {Thorsten Altenkirch and Ambrus Kaposi}, Date-Added = {2015-02-03 14:50:08 +0000}, Date-Modified = {2015-02-03 14:51:00 +0000}, Month = {August}, Note = {Draft}, Title = {{A syntax for cubical type theory}}, Year = {2014}} @misc{licata:cubicaltt, Author = {Dan Licata and Guillaume Brunerie}, Date-Added = {2015-02-03 14:47:22 +0000}, Date-Modified = {2015-02-03 14:49:16 +0000}, Howpublished = {{Talk at the Oxford Homotopy Type Theory Workshop}}, Month = {November}, Title = {{A Cubical Infinite-Dimensional Type Theory}}, Url = {https://www.youtube.com/watch?v=lt8JgGRw7gg&list=UU7qaZ5pg0l0cDKb1y4yhHRQ}, Year = {2014}, Bdsk-Url-1 = {https://www.youtube.com/watch?v=lt8JgGRw7gg&list=UU7qaZ5pg0l0cDKb1y4yhHRQ}} @inproceedings{DBLP-conf/itp/SozeauT14, Author = {Matthieu Sozeau and Nicolas Tabareau}, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.uni-trier.de/rec/bib/conf/itp/SozeauT14}, Booktitle = {Interactive Theorem Proving - 5th International Conference, {ITP} 2014, Held as Part of the Vienna Summer of Logic, {VSL} 2014, Vienna, Austria, July 14-17, 2014. Proceedings}, Crossref = {DBLP:conf/itp/2014}, Date-Added = {2015-01-18 18:29:24 +0000}, Date-Modified = {2015-02-19 00:29:58 +0000}, Doi = {10.1007/978-3-319-08970-6_32}, Pages = {499--514}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Universe_Polymorphism_in_Coq.pdf}, Timestamp = {Mon, 30 Jun 2014 12:30:02 +0200}, Title = {{Universe Polymorphism in Coq}}, Year = {2014}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/978-3-319-08970-6_32}} @misc{sozeau.Coq/dev/CoqPL15, Address = {Mumbai, India}, Author = {Matthieu Sozeau}, Date-Added = {2015-01-18 17:51:37 +0000}, Date-Modified = {2015-01-18 17:52:23 +0000}, Howpublished = {Talk given at the First CoqPL workshop}, Month = {January}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Coq-dev-talk-CoqPL-180115.pdf}, Read = {Oui}, Title = {{Coq dev talk}}, Type = {slides}, Year = {2015}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @article{MLTTpred, Author = {Per Martin-L{\"o}f}, Date-Added = {2014-10-17 14:54:56 +0000}, Date-Modified = {2014-10-17 14:56:55 +0000}, Journal = {Logic Colloquium '73}, Number = {80}, Pages = {73--118}, Title = {{An intuitionistic theory of types: predicative part}}, Volume = {Studies in Logic and the Foundations of Mathematics}, Year = {1975}} @misc{sozeau.Coq/dev/Coq14, Address = {Vienna, Austria}, Author = {Matthieu Sozeau}, Date-Added = {2014-07-29 15:53:38 +0000}, Date-Modified = {2014-07-29 15:54:36 +0000}, Howpublished = {Talk given at the Sixth Coq Workshop}, Keywords = {rewriting setoid typeclasses}, Month = {July}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Coq-8.5-Coq6-180714.pdf}, Read = {Oui}, Title = {{Coq 8.5: What's New, What's Next?}}, Type = {slides}, Year = {2014}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @misc{sozeau.Coq/unif/UNIF14, Address = {Vienna, Austria}, Author = {Matthieu Sozeau}, Date-Added = {2014-07-29 15:50:01 +0000}, Date-Modified = {2014-07-29 15:51:44 +0000}, Howpublished = {Talk given at the UNIF workshop}, Keywords = {rewriting setoid typeclasses}, Month = {July}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Towards_A_Better_Behaved_Unification_Algorithm_for_Coq-UNIF14-130714.pdf}, Read = {Oui}, Title = {{Towards A Better Behaved Unification Algorithm for Coq}}, Type = {slides}, Year = {2014}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @misc{sozeau.Coq/rewrite/TYPES14, Address = {Paris, France}, Author = {Matthieu Sozeau}, Date-Added = {2014-07-29 15:48:22 +0000}, Date-Modified = {2014-07-29 15:49:33 +0000}, Howpublished = {Talk given at TYPES'14}, Keywords = {rewriting setoid typeclasses}, Month = {May}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Proof-Relevant_Rewriting_Strategies-TYPES14-130514.pdf}, Read = {Oui}, Title = {{Proof-Relevant Rewriting Strategies}}, Type = {slides}, Year = {2014}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @misc{sozeau.Coq/rewrite/Coq14, Address = {Vienna, Austria}, Author = {Matthieu Sozeau}, Date-Added = {2014-07-29 15:46:54 +0000}, Date-Modified = {2014-07-29 15:55:09 +0000}, Howpublished = {Talk given at the Sixth Coq Workshop}, Keywords = {rewriting setoid typeclasses}, Month = {July}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Proof-Relevant_Rewriting_Strategies-Coq6-170714.pdf}, Read = {Oui}, Title = {{Proof-relevant rewriting strategies}}, Type = {slides}, Year = {2014}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @misc{sozeau.Coq/univs/ITP14talk, Author = {Matthieu Sozeau}, Date-Added = {2014-07-29 15:40:40 +0000}, Date-Modified = {2014-07-29 15:41:30 +0000}, Howpublished = {Talk at ITP'14, Vienna, Austria}, Keywords = {Coq Universes}, Month = {July 16th}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Universe_Polymorphism_in_Coq-ITP14-160714.pdf}, Title = {{Universe Polymorphism in Coq}}, Type = {slides}, Year = {2014}} @misc{sozeau.Coq/univs/AIM14, Author = {Matthieu Sozeau}, Date-Added = {2014-07-29 15:33:15 +0000}, Date-Modified = {2014-07-29 15:34:55 +0000}, Howpublished = {Invited talk at the Agda Implementers Meeting, Paris, France}, Keywords = {Coq Universes}, Month = {May 27th}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Universe_Polymorphism_Subtyping_and_Unification-AIM-270514.pdf}, Title = {{Universe Polymorphism: Subtyping and Unification}}, Type = {slides}, Year = {2014}} @inproceedings{DBLP:conf/tldi/AspertiG12, Author = {Andrea Asperti and Ferruccio Guidi}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TLDI}, Crossref = {DBLP:conf/tldi/2012}, Date-Added = {2014-05-26 13:48:39 +0000}, Date-Modified = {2014-05-26 13:48:39 +0000}, Ee = {http://doi.acm.org/10.1145/2103786.2103797}, Pages = {79-90}, Title = {Type systems for dummies}, Year = {2012}} @proceedings{DBLP:conf/tldi/2012, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TLDI}, Date-Added = {2014-05-26 13:48:39 +0000}, Date-Modified = {2014-05-26 13:48:39 +0000}, Editor = {Benjamin C. Pierce}, Ee = {http://dl.acm.org/citation.cfm?id=2103786}, Isbn = {978-1-4503-1120-5}, Publisher = {ACM}, Title = {Proceedings of TLDI 2012: The Seventh ACM SIGPLAN Workshop on Types in Languages Design and Implementation, Philadelphia, PA, USA, Saturday, January 28, 2012}, Year = {2012}} @misc{sozeau.groupoidint/Deducteam, Affiliation = {PI.R2 - INRIA Paris - Rocquencourt , Preuves, Programmes et Syst{\`e}mes - PPS , ASCOLA - INRIA - EMN , Laboratoire d'Informatique de Nantes Atlantique - LINA}, Author = {Sozeau, Matthieu and Tabareau, Nicolas}, Date-Added = {2014-04-24 11:24:28 +0000}, Date-Modified = {2014-04-24 11:27:46 +0000}, Howpublished = {Talk given at the Deducteam seminar, Paris}, Language = {Anglais}, Month = April, Title = {{Towards an Internalization of the Groupoid Model of Type Theory}}, Url = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Towards_an_Internalization_of_the_Groupoid_Model_of_Type_Theory-Deducteam-250414.pdf}, Year = {2014}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/hal-00786589}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/groupoidint.pdf}, Bdsk-Url-3 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Towards_an_Internalization_of_the_Groupoid_Model_of_Type_Theory-Deducteam-250414.pdf}} @article{2013arXiv1303.0584A, Adsnote = {Provided by the SAO/NASA Astrophysics Data System}, Adsurl = {http://adsabs.harvard.edu/abs/2013arXiv1303.0584A}, Archiveprefix = {arXiv}, Author = {{Ahrens}, B. and {Kapulkin}, C. and {Shulman}, M.}, Date-Added = {2014-02-11 15:09:34 +0000}, Date-Modified = {2014-02-11 15:13:46 +0000}, Eprint = {1303.0584}, Journal = {ArXiv e-prints}, Keywords = {Mathematics - Category Theory, Mathematics - Logic, 18A15}, Month = mar, Primaryclass = {math.CT}, Title = {{Univalent categories and the Rezk completion}}, Url = {http://arxiv.org/abs/1303.0584}, Year = 2013, Bdsk-Url-1 = {http://arxiv.org/abs/1303.0584}} @unpublished{sozeau.groupoidint, Abstract = {{We present an internalization of the 2-groupoid interpretation of the calculus of construction that allows to realize the univalence axiom, proof irrelevance and reasoning modulo. As an example, we show that in our setting, the type of Church integers is equal to the inductive type of natural numbers.}}, Affiliation = {PI.R2 - INRIA Paris - Rocquencourt , Preuves, Programmes et Syst{\`e}mes - PPS , ASCOLA - INRIA - EMN , Laboratoire d'Informatique de Nantes Atlantique - LINA}, Author = {Sozeau, Matthieu and Tabareau, Nicolas}, Date-Added = {2014-02-10 14:36:15 +0000}, Date-Modified = {2014-04-24 11:27:28 +0000}, Language = {Anglais}, Month = Feb, Note = {Under revision}, Title = {{Towards an Internalization of the Groupoid Model of Type Theory}}, Url = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/groupoidint.pdf}, Year = {2014}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/hal-00786589}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/groupoidint.pdf}} @phdthesis{garillot:pastel-00649586, Abstract = {{Cette th{\`e}se pr{\'e}sente des avanc{\'e}es dans l'utilisation des Structures Canoniques, un m{\'e}canisme du langage de programmation de l'assistant de preuve Coq, {\'e}quivalent {\`a} la notion de classes de types. Elle fournit un nouveau mod{\`e}le pour le d{\'e}veloppement de hi{\'e}rarchies math{\'e}matiques {\`a} l'aide d'enregistrements d{\'e}pendants, et, en guise d'illustration, fournit une reformulation de la preuve formelle de correction du cryptosyst{\`e}me RSA, offrant des m{\'e}thodes de raisonnement alg{\'e}brique ainsi que la repr{\'e}sentation en th{\'e}orie des types des notions math{\'e}matiques n{\'e}cessaires (incluant les groupes cycliques, les groupes d'automorphisme, les isomorphismes de groupe). Nous produisons une extension du m{\'e}canisme d'inf{\'e}rence de Structures Canoniques {\`a} l'aide de types fant{\^o}mes, et l'appliquons au traitement de fonctions partielles. Ensuite, nous consid{\'e}rons un traitement g{\'e}n{\'e}rique de plusieurs formes de d{\'e}finitions de sous-groupes rencontr{\'e}es au long de la preuve du th{\'e}or{\`e}me de Feit-Thomspon, une large librairie d'alg{\`e}bre formelle d{\'e}velopp{\'e}e au sein de l'{\'e}quipe Mathematical Components au laboratoire commun MSR-INRIA. Nous montrons qu'un traitement unifi{\'e} de ces 16 sous-groupes nous permet de raccourcir la preuve de leur propri{\'e}t{\'e}s {\'e}l{\'e}mentaires, et d'obtenir des d{\'e}finitions offrant une meilleure compositionnalit{\'e}. Nous formalisons une correspondance entre l'{\'e}tude de ces fonctorielles, et des propri{\'e}t{\'e} de th{\'e}orie des groupes usuelles, telles que repr{\'e}sent{\'e}es par la classe des groupes qui les v{\'e}rifie. Nous concluons en explorant les possibilit{\'e}s d'analyse de la fonctorialit{\'e} de ces d{\'e}finitions par l'inspection de leur type, et sugg{\'e}rons une voie d'approche vers l'obtention d'instances d'un r{\'e}sultat de param{\'e}tricit{\'e} en Coq.}}, Affiliation = {Microsoft Research - Inria Joint Centre - MSR - INRIA}, Author = {Garillot, Fran{\c c}ois}, Date-Added = {2014-02-10 13:57:19 +0000}, Date-Modified = {2014-02-10 13:59:38 +0000}, Hal_Id = {pastel-00649586}, Keywords = {th{\'e}orie de la preuve, th{\'e}orie des types, programmation g{\'e}n{\'e}rique, classes de types, composants math{\'e}matiques, langages de programmation, assistants de preuve, Coq}, Language = {Anglais}, Month = Dec, Pdf = {http://pastel.archives-ouvertes.fr/pastel-00649586/PDF/manuscript.pdf}, School = {Ecole Polytechnique X}, Title = {{Generic Proof Tools and Finite Group Theory}}, Url = {http://pastel.archives-ouvertes.fr/pastel-00649586}, Year = {2011}, Bdsk-Url-1 = {http://pastel.archives-ouvertes.fr/pastel-00649586}} @incollection{dybjer:internaltt, Author = {Dybjer, Peter}, Booktitle = {Types for Proofs and Programs}, Date-Added = {2014-02-10 13:32:10 +0000}, Date-Modified = {2014-02-10 14:10:19 +0000}, Doi = {10.1007/3-540-61780-9_66}, Editor = {Berardi, Stefano and Coppo, Mario}, Isbn = {978-3-540-61780-8}, Pages = {120-134}, Publisher = {Springer Berlin Heidelberg}, Series = {LNCS}, Title = {Internal type theory}, Url = {http://dx.doi.org/10.1007/3-540-61780-9_66}, Volume = {1158}, Year = {1996}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/3-540-61780-9_66}} @article{coquand:cubical, Author = {Marc Bezem and Thierry Coquand and Simon Huber}, Date-Added = {2014-02-10 13:24:43 +0000}, Date-Modified = {2014-02-10 14:19:03 +0000}, Month = {December}, Title = {A Model of Type Theory in Cubical Sets}, Url = {http://www.cse.chalmers.se/~coquand/mod1.pdf}, Year = {2013}, Bdsk-Url-1 = {http:%5C%5Cuf-ias-2012.wikispaces.com/file/view/semi.pdf}, Bdsk-Url-2 = {http://www.cse.chalmers.se/~coquand/mod1.pdf}} @article{DBLP:journals/aml/Palmgren12, Author = {Erik Palmgren}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Added = {2014-02-10 12:05:03 +0000}, Date-Modified = {2014-02-10 12:06:06 +0000}, Ee = {http://dx.doi.org/10.1007/s00153-011-0252-9}, Journal = {Arch. Math. Log.}, Number = {1-2}, Pages = {35-47}, Title = {Proof-relevance of families of setoids and identity in type theory}, Url = {http://www2.math.uu.se/~palmgren/pfsitt-rev.pdf}, Volume = {51}, Year = {2012}, Bdsk-Url-1 = {http://www2.math.uu.se/~palmgren/pfsitt-rev.pdf}} @inproceedings{DBLP:bibsonomy_cupart, Author = {Martin Hofmann}, Booktitle = {Semantics and Logics of Computation}, Date-Added = {2014-01-29 13:56:50 +0000}, Date-Modified = {2014-10-17 17:43:47 +0000}, Pages = {241-298}, Title = {Syntax and Semantics of Dependent Types}, Url = {http://www.tcs.ifi.lmu.de/mitarbeiter/martin-hofmann/pdfs/syntaxandsemanticsof-dependenttypes.pdf}, Year = {1997}, Bdsk-Url-1 = {http://www.tcs.ifi.lmu.de/mitarbeiter/martin-hofmann/pdfs/syntaxandsemanticsof-dependenttypes.pdf}} @url{HoTT/HoTT, Author = {{The HoTT Development Team}}, Date-Added = {2014-01-29 12:24:47 +0000}, Date-Modified = {2014-01-29 12:29:32 +0000}, Title = {{Homotopy Type Theory in Coq}}, Url = {http://github.com/HoTT/HoTT}, Bdsk-Url-1 = {http://github.com/HoTT/HoTT}} @inproceedings{sozeau.Coq/univs/ITP14, Abstract = { Type Classes have met a large success in Haskell and Isabelle, as a solution for sharing notations by overloading and for specifying with abstract structures by quantification on contexts. However, both systems are limited by second-class implementations of these constructs, and these limitations are only overcomed by ad-hoc extensions to the respective systems. We propose an embedding of type classes into a dependent type theory that is first-class and supports some of the most popular extensions right away. The implementation is correspondingly cheap, general and very well integrated inside the system, as we have experimented in Coq. We show how it can be used to help structured programming and proving by way of examples.}, Address = {Vienna, Austria}, Author = {Matthieu Sozeau and Nicolas Tabareau}, Booktitle = {ITP'14}, Date-Added = {2014-01-29 12:20:18 +0000}, Date-Modified = {2014-04-18 08:40:09 +0000}, Keywords = {Universes}, Note = {To appear}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Universe_Polymorphism_in_Coq.pdf}, Read = {Oui}, Title = {{Universe Polymorphism in Coq}}, Year = {2014}} @manual{coq:refman:8.4, Author = {{The {Coq} development team}}, Date-Added = {2014-01-29 12:15:56 +0000}, Date-Modified = {2014-01-29 12:17:45 +0000}, Organization = {Inria}, Publisher = {Inria πr²}, Read = {Oui}, Title = {Coq 8.4 Reference Manual}, Url = {http://coq.inria.fr/distrib/V8.4pl3/refman/}, Year = {2012}, Bdsk-Url-1 = {http://coq.inria.fr/V8.2beta4/doc/refman/html/Reference-Manual027.html}, Bdsk-Url-2 = {http://coq.inria.fr/V8.2beta4/doc/refman/html/}, Bdsk-Url-3 = {http://coq.inria.fr/distrib/V8.4pl3/refman/}} @article{tierney1972sheaf, Author = {Tierney, M.}, Date-Added = {2014-01-29 12:06:03 +0000}, Date-Modified = {2014-01-29 12:06:03 +0000}, Journal = {LNM 274}, Pages = {13--42}, Publisher = {Springer}, Title = {{Sheaf theory and the continuum hypothesis}}, Year = {1972}} @inproceedings{pottier-fork, Author = {Pottier, F.}, Booktitle = {Proceedings of the 38th {ACM} Symposium on Principles of Programming Languages}, Date-Added = {2014-01-29 12:05:50 +0000}, Date-Modified = {2014-01-29 12:05:50 +0000}, Title = {A typed store-passing translation for general references}, Year = {2011}} @article{appel2001indexed, Author = {Appel, A.W. and McAllester, D.}, Date-Added = {2014-01-29 12:05:41 +0000}, Date-Modified = {2014-01-29 12:05:41 +0000}, Issn = {0164-0925}, Journal = {ACM Transactions on Programming Languages and Systems (TOPLAS)}, Number = {5}, Pages = {657--683}, Publisher = {ACM}, Title = {{An indexed model of recursive types for foundational proof-carrying code}}, Volume = {23}, Year = {2001}} @inproceedings{plotkin1993logic, Author = {Plotkin, G. and Abadi, M.}, Booktitle = {Typed Lambda Calculi and Applications}, Date-Added = {2014-01-29 12:05:09 +0000}, Date-Modified = {2014-01-29 12:05:09 +0000}, Title = {{A logic for parametric polymorphism}}, Year = {1993}} @book{cohen1966, Author = {Cohen, P.J. and Davis, M.}, Date-Added = {2014-01-29 12:04:54 +0000}, Date-Modified = {2014-01-29 12:04:54 +0000}, Publisher = {WA Benjamin New York}, Title = {{Set theory and the continuum hypothesis}}, Year = {1966}} @article{streicher-forcing, Author = {Streicher, T.}, Date-Added = {2014-01-29 12:04:33 +0000}, Date-Modified = {2014-01-29 12:04:33 +0000}, Journal = {S{\'e}minaire R{\'e}alisabilit{\'e} {\`a} Chamb{\'e}ry}, Title = {{Forcing within Classical Realizability}}, Year = {2010}} @article{krivine-realizability, Author = {Krivine, JL}, Date-Added = {2014-01-29 12:04:16 +0000}, Date-Modified = {2014-01-29 12:04:16 +0000}, Journal = {Panoramas et syntheses, Soci{\'e}t{\'e} Math{\'e}matique de France}, Title = {{Realizability in classical logic. Course notes of a series of lectures given in the University of Marseille, may 2004 (last revision: july 2005)}}} @unpublished{krivine:forcing, Author = {{K}rivine, {J}ean-{L}ouis}, Date-Added = {2014-01-29 12:04:16 +0000}, Date-Modified = {2014-01-29 12:04:16 +0000}, Hal_Id = {hal-00321410}, Title = {{S}tructures de r{\'e}alisabilit{\'e}, {RAM} et ultrafiltre sur {N}}, Url = {http://hal.archives-ouvertes.fr/hal-00321410/PDF/Structures_de_realisabilite_2_HAL.pdf}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/hal-00321410/PDF/Structures_de_realisabilite_2_HAL.pdf}} @incollection{PittsAM:operfl, Author = {A.~M.~Pitts and I.~D.~B.~Stark}, Booktitle = {Higher Order Operational Techniques in Semantics}, Date-Added = {2014-01-29 12:04:06 +0000}, Date-Modified = {2014-01-29 12:04:06 +0000}, Hidecomment = {PAGES={227--273}}, Publisher = {CUP}, Title = {Operational Reasoning for Functions with Local State}, Year = 1998} @inproceedings{icfp2010, Author = {Dreyer, D. and Neis, G. and Birkedal, L.}, Booktitle = {Proceedings of the 15th ACM International Conference on Functional programming}, Date-Added = {2014-01-29 12:03:51 +0000}, Date-Modified = {2014-01-29 12:03:51 +0000}, Title = {{The impact of higher-order state and control effects on local relational reasoning}}, Year = {2010}} @inproceedings{ladr, Author = {Dreyer, D. and Neis, G. and Rossberg, A. and Birkedal, L.}, Booktitle = {Proceedings of the 38th ACM Symposium on Principles of Programming Languages}, Date-Added = {2014-01-29 12:03:42 +0000}, Date-Modified = {2014-01-29 12:03:42 +0000}, Title = {{A relational modal logic for higher-order stateful ADTs}}, Year = {2010}} @unpublished{topostree, Author = {Birkedal, L. and M{\o}gelberg, R. and Schwinghammer, J. and St{\o}vring, K.}, Date-Added = {2014-01-29 12:03:30 +0000}, Date-Modified = {2014-01-29 12:03:30 +0000}, Title = {First steps in synthetic guarded domain theory: step-indexing in the topos of trees}, Year = {2011}} @inproceedings{popl2011, Author = {Birkedal, L. and Reus, B. and Schwinghammer, J. and St{\o}vring, K. and Thamsborg, J. and Yang, H.}, Booktitle = {Proceedings of the 38th ACM Symposium on Principles of Programming Languages}, Date-Added = {2014-01-29 12:03:27 +0000}, Date-Modified = {2014-01-29 12:03:27 +0000}, Title = {{Step-indexed Kripke models over recursive worlds}}, Year = {2011}} @unpublished{jaber11, Author = {Jaber, G. and Tabareau, N.}, Date-Added = {2014-01-29 12:03:10 +0000}, Date-Modified = {2014-01-29 12:03:10 +0000}, Note = {\texttt{http://tabareau.fr/FLR}}, Title = {Decomposing Logical Relations with Forcing (technical appendix)}, Year = 2011} @article{coquand2010note, Author = {Coquand, T. and Jaber, G.}, Date-Added = {2014-01-29 12:03:08 +0000}, Date-Modified = {2014-01-29 12:03:08 +0000}, Journal = {Fundamenta Informaticae}, Number = {1}, Pages = {43--52}, Publisher = {IOS Press}, Title = {{A Note on Forcing and Type Theory}}, Volume = {100}, Year = {2010}} @inproceedings{vmm, Author = {Appel, A.W. and Melli{\`e}s, P.-A. and Richards, C.D. and Vouillon, J.}, Booktitle = {Proceedings of the 34th ACM Symposium on Principles of Programming Languages}, Date-Added = {2014-01-29 12:02:52 +0000}, Date-Modified = {2014-01-29 12:02:52 +0000}, Title = {{A very modal model of a modern, major, general type system}}, Year = {2007}} @phdthesis{ahmed04, Address = {Princeton, NJ, USA}, Author = {A.~Ahmed}, Date-Added = {2014-01-29 12:02:26 +0000}, Date-Modified = {2014-01-29 12:02:26 +0000}, Order_No = {AAI3136691}, School = {Princeton University}, Title = {Semantics of types for mutable state}, Year = {2004}} @article{ahmed2006step, Author = {Ahmed, A.}, Date-Added = {2014-01-29 12:02:26 +0000}, Date-Modified = {2014-01-29 12:02:26 +0000}, Journal = {Programming Languages and Systems}, Pages = {69--83}, Publisher = {Springer}, Title = {{Step-indexed syntactic logical relations for recursive and quantified types}}, Year = {2006}} @inproceedings{amal:statedep, Author = {Ahmed, A.W. and Dreyer, D. and Rossberg, A.}, Booktitle = {Proceedings of the 36th ACM Symposium on Principles of Programming Languages}, Date-Added = {2014-01-29 12:02:26 +0000}, Date-Modified = {2014-01-29 12:02:26 +0000}, Title = {State-Dependent Representation Independence}, Year = {2009}} @inproceedings{lslr, Author = {Dreyer, D. and Ahmed, A. and Birkedal, L.}, Booktitle = {Proceedings of the 24th Annual IEEE Symposium on Logic In Computer Science}, Date-Added = {2014-01-29 12:02:26 +0000}, Date-Modified = {2014-01-29 12:02:26 +0000}, Title = {{Logical step-indexed logical relations}}, Year = {2009}} @incollection{groupoid-interp, Address = {New York}, Author = {Hofmann, Martin and Streicher, Thomas}, Booktitle = {Twenty-five years of constructive type theory ({V}enice, 1995)}, Date-Added = {2014-01-29 12:01:09 +0000}, Date-Modified = {2014-02-10 14:06:49 +0000}, Mrclass = {03B15 (68N15 68Q55)}, Mrnumber = {1686862}, Pages = {83--111}, Publisher = {Oxford Univ. Press}, Series = {Oxford Logic Guides}, Title = {{The Groupoid Interpretation of Type Theory}}, Url = {http://www.tcs.informatik.uni-muenchen.de/lehre/SS97/types-vl/venedig.ps}, Volume = {36}, Year = {1998}, Bdsk-Url-1 = {http://www.tcs.informatik.uni-muenchen.de/lehre/SS97/types-vl/venedig.ps}} @unpublished{coq_unival_axiom, Author = {Bauer, Andrej and LeFanu Lumsdaine, Peter}, Date-Added = {2014-01-29 11:57:55 +0000}, Date-Modified = {2014-02-10 12:13:51 +0000}, Note = {{}}, Title = {{A Coq proof that Univalence Axioms implies Functional Extensionality}}, Url = {http://ncatlab.org/nlab/files/BauerLumsdaineUnivalence.pdf}, Year = 2011, Bdsk-Url-1 = {http://ncatlab.org/nlab/files/BauerLumsdaineUnivalence.pdf}} @inproceedings{DBLP:conf/popl/LicataH12, Author = {Daniel R. Licata and Robert Harper}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {POPL}, Crossref = {DBLP:conf/popl/2012}, Date-Added = {2014-01-29 11:57:42 +0000}, Date-Modified = {2014-01-29 11:57:42 +0000}, Ee = {http://doi.acm.org/10.1145/2103656.2103697}, Pages = {337-348}, Title = {{Canonicity for 2-dimensional type theory}}, Year = {2012}} @inproceedings{DBLP:conf/csl/KellerL12, Author = {Chantal Keller and Marc Lasson}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {CSL}, Crossref = {DBLP:conf/csl/2012}, Date-Added = {2014-01-29 11:57:17 +0000}, Date-Modified = {2014-01-29 11:57:17 +0000}, Ee = {http://dx.doi.org/10.4230/LIPIcs.CSL.2012.381}, Pages = {381-395}, Title = {{Parametricity in an Impredicative Sort}}, Year = {2012}} @article{barras:_gener_takeut_gandy_inter, Author = {Bruno Barras and Thierry Coquand and Simon Huber}, Date-Added = {2014-01-29 11:56:17 +0000}, Date-Modified = {2014-02-10 13:16:47 +0000}, Title = {A Generalization of Takeuti-Gandy Interpretation}, Url = {http://uf-ias-2012.wikispaces.com/file/view/semi.pdf}, Year = {2013}, Bdsk-Url-1 = {http:%5C%5Cuf-ias-2012.wikispaces.com/file/view/semi.pdf}, Bdsk-Url-2 = {http://uf-ias-2012.wikispaces.com/file/view/semi.pdf}} @article{cheng2012weak, Author = {Cheng, Eugenia and Leinster, Tom}, Date-Added = {2014-01-29 11:55:50 +0000}, Date-Modified = {2014-01-29 11:55:50 +0000}, Title = {Weak $\omega$-categories via terminal coalgebras}, Url = {http://arxiv.org/abs/1212.5853}, Year = {2012}, Bdsk-Url-1 = {http://arxiv.org/abs/1212.5853}} @incollection{krauss:Edberg, Author = {Kraus, Nicolai and Escard{\'o}, Mart{\'\i}n and Coquand, Thierry and Altenkirch, Thorsten}, Booktitle = {Typed Lambda Calculi and Applications}, Date-Added = {2014-01-28 07:23:16 +0000}, Date-Modified = {2014-01-28 07:23:16 +0000}, Doi = {10.1007/978-3-642-38946-7_14}, Editor = {Hasegawa, Masahito}, Isbn = {978-3-642-38945-0}, Pages = {173-188}, Publisher = {Springer Berlin Heidelberg}, Series = {Lecture Notes in Computer Science}, Title = {Generalizations of Hedberg's Theorem}, Url = {http://dx.doi.org/10.1007/978-3-642-38946-7_14}, Volume = {7941}, Year = {2013}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/978-3-642-38946-7_14}} @misc{sozeau.Coq/POPL14, Abstract = {{We present an internalization of the 2-groupoid interpretation of the calculus of construction that allows to realize the univalence axiom, proof irrelevance and reasoning modulo. As an example, we show that in our setting, the type of Church integers is equal to the inductive type of natural numbers.}}, Affiliation = {PI.R2 - INRIA Paris - Rocquencourt , Preuves, Programmes et Syst{\`e}mes - PPS}, Author = {Matthieu Sozeau}, Date-Added = {2014-01-23 22:43:14 +0000}, Date-Modified = {2014-01-23 22:50:45 +0000}, Howpublished = {SIGPLAN Programming Language Software Award 2013 talk, at POPL'14, San Diego}, Language = {Anglais}, Month = {January 23rd}, Title = {{Coq -- Recent History}}, Type = {slides}, Url = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Coq_-_Recent_History-230114-POPL14.pdf}, Year = {2014}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/hal-00786589}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Coq_-_Recent_History-230114-POPL14.pdf}} @article{DBLP:journals/corr/abs-1111-0123, Author = {Gyesik Lee and Benjamin Werner}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Added = {2014-01-16 07:18:11 +0000}, Date-Modified = {2014-01-16 07:18:11 +0000}, Ee = {http://dx.doi.org/10.2168/LMCS-7(4:5)2011}, Journal = {Logical Methods in Computer Science}, Number = {4}, Title = {Proof-irrelevant model of CC with predicative induction and judgmental equality}, Volume = {7}, Year = {2011}} @article{brackets, Added-At = {2010-08-25T08:47:38.000+0200}, Author = {Awodey, Steven and Bauer, Andrej}, Bibsource = {{DBLP, http://dblp.uni-trier.de}}, Biburl = {http://www.bibsonomy.org/bibtex/218684c0604e5579fb5bf11e9a19d7185/miguel.pagano}, Date-Added = {2014-01-16 07:16:06 +0000}, Date-Modified = {2014-01-16 07:16:06 +0000}, Ee = {{http://dx.doi.org/10.1093/logcom/14.4.447}}, Interhash = {bfb43fd48c29ea65435ed57f71627217}, Intrahash = {18684c0604e5579fb5bf11e9a19d7185}, Journal = {{J. Log. Comput.}}, Keywords = {imported}, Number = {{4}}, Pages = {447-471}, Printed = {{TT}}, Timestamp = {2010-08-25T08:47:38.000+0200}, Title = {{Propositions as [Types]}}, Volume = {{14}}, Year = {{2004}}} @article{abelScherer:types10, Author = {Andreas Abel and Gabriel Scherer}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Added = {2014-01-15 13:53:35 +0000}, Date-Modified = {2014-01-15 13:54:13 +0000}, Ee = {http://dx.doi.org/10.2168/LMCS-8(1:29)2012}, Journal = lmcs, Note = {TYPES'10 special issue.}, Number = 1, Pages = {1--36}, Title = {{On Irrelevance and Algorithmic Equality in Predicative Type Theory}}, Volume = 8, Year = 2012} @article{DBLP:journals/corr/HerbelinS13, Author = {Hugo Herbelin and Arnaud Spiwack}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Added = {2014-01-15 13:52:24 +0000}, Date-Modified = {2014-01-15 13:52:24 +0000}, Ee = {http://arxiv.org/abs/1309.5767}, Journal = {CoRR}, Title = {The Rooster and the Syntactic Bracket}, Volume = {abs/1309.5767}, Year = {2013}} @misc{sozeau.Coq/univs/coqwg13, Author = {Matthieu Sozeau}, Date-Added = {2013-11-29 11:04:21 +0000}, Date-Modified = {2015-02-19 00:37:47 +0000}, Howpublished = {Talk given at the Coq Working Group, Paris, France}, Keywords = {Coq Universes}, Month = {November 26th}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Universe_Polymorphism_and_Fast_Projections-CoqWG-261113.pdf}, Title = {{Universe Polymorphism and Fast Projections}}, Type = {slides}, Year = {2013}} @misc{sozeau.Coq/univs/typex13, Author = {Matthieu Sozeau}, Date-Added = {2013-12-17 12:09:35 +0000}, Date-Modified = {2013-12-17 12:11:05 +0000}, Howpublished = {Talk given at the Typex Meeting, Paris}, Keywords = {Coq Universes}, Month = {December 17th}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Universe_Polymorphism_Subtyping_and_Unification-TYPEX-171213.pdf}, Read = {Oui}, Title = {{Universe Polymorphism: Subtyping and Unification}}, Type = {slides}, Year = {2013}} @inbook{chan:arith, Author = {Tat-Hung Chan}, Chapter = {Appendix D}, Crossref = {DBLP:books/sp/ConstableJE82}, Date-Added = {2013-07-08 10:05:48 +0000}, Date-Modified = {2013-07-08 10:07:53 +0000}, Pages = {227--264}, Title = {{An Algorithm For Checking PL/CV Arithmetic Inferences}}} @book{DBLP:books/sp/ConstableJE82, Author = {Robert L. Constable and Scott Johnson and C. D. Eichenlaub}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Added = {2013-07-08 10:05:40 +0000}, Date-Modified = {2014-01-30 06:28:09 +0000}, Ee = {http://dx.doi.org/10.1007/3-540-11492-0}, Isbn = {3-540-11492-0}, Publisher = {Springer}, Series = {LNCS}, Title = {An Introduction to the PL/CV2 Programming Logic}, Volume = {135}, Year = {1982}} @inproceedings{DBLP:conf/itp/BengtsonJB12, Author = {Jesper Bengtson and Jonas Braband Jensen and Lars Birkedal}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {ITP}, Crossref = {DBLP:conf/itp/2012}, Date-Added = {2013-10-18 14:39:23 +0000}, Date-Modified = {2013-10-18 14:39:23 +0000}, Ee = {http://dx.doi.org/10.1007/978-3-642-32347-8_21}, Pages = {315-331}, Title = {Charge! - A Framework for Higher-Order Separation Logic in Coq}, Year = {2012}} @proceedings{DBLP:conf/itp/2012, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {ITP}, Date-Added = {2013-10-18 14:39:23 +0000}, Date-Modified = {2013-10-18 14:39:23 +0000}, Editor = {Lennart Beringer and Amy P. Felty}, Ee = {http://dx.doi.org/10.1007/978-3-642-32347-8}, Isbn = {978-3-642-32346-1}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Interactive Theorem Proving - Third International Conference, ITP 2012, Princeton, NJ, USA, August 13-15, 2012. Proceedings}, Volume = {7406}, Year = {2012}} @book{hottbook, Address = {Institute for Advanced Study}, Author = {{The Univalent Foundations Program}}, Date-Modified = {2014-01-30 06:29:17 +0000}, Title = {Homotopy Type Theory: Univalent Foundations for Mathematics}, Url = {http://homotopytypetheory.org/book}, Year = 2013, Bdsk-Url-1 = {http://homotopytypetheory.org/book}} @misc{sozeau.Coq/univs/types13, Author = {Matthieu Sozeau}, Date-Added = {2013-04-26 15:22:11 +0000}, Date-Modified = {2013-04-26 15:23:01 +0000}, Howpublished = {Talk given at TYPES'13, Toulouse, France}, Keywords = {Coq Universes}, Month = {April 25th}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Universe_Polymorphism_and_Inference_in_Coq-TYPES13-250413.pdf}, Read = {Oui}, Title = {{Universe Polymorphism and Inference in Coq}}, Type = {slides}, Year = {2013}} @phdthesis{Girard:1972fk, Author = {J.-Y. Girard}, Date-Added = {2013-03-31 20:13:48 +0000}, Date-Modified = {2013-03-31 20:13:48 +0000}, School = {Universit\'e Paris~7}, Title = {Interpr\'etation fonctionnelle et \'elimination des coupures de l'arithm\'etique d'ordre sup\'erieur}, Year = {1972}} @inproceedings{Abel:2011fk, Author = {Andreas Abel and Brigitte Pientka}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TLCA}, Crossref = {DBLP:conf/tlca/2011}, Date-Added = {2013-03-19 12:38:54 +0000}, Date-Modified = {2013-03-19 12:38:54 +0000}, Ee = {http://dx.doi.org/10.1007/978-3-642-21691-6_5}, Pages = {10-26}, Title = {Higher-Order Dynamic Pattern Unification for Dependent Types and Records}, Year = {2011}} @misc{sozeau.UnivalenceForFree-GTTypes, Abstract = {{We present an internalization of the 2-groupoid interpretation of the calculus of construction that allows to realize the univalence axiom, proof irrelevance and reasoning modulo. As an example, we show that in our setting, the type of Church integers is equal to the inductive type of natural numbers.}}, Affiliation = {PI.R2 - INRIA Paris - Rocquencourt , Preuves, Programmes et Syst{\`e}mes - PPS , ASCOLA - INRIA - EMN , Laboratoire d'Informatique de Nantes Atlantique - LINA}, Author = {Matthieu Sozeau}, Date-Added = {2013-03-14 15:47:04 +0000}, Date-Modified = {2014-01-16 10:19:48 +0000}, Howpublished = {Talk given at the Types and Realizability Working Group, Paris}, Language = {Anglais}, Month = march, Title = {{Univalence for free}}, Type = {slides}, Url = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Univalence_for_Free-GTTypes-130313.pdf}, Year = {2013}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/hal-00786589}, Bdsk-Url-2 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Univalence_for_Free-GTTypes-130313.pdf}} @unpublished{Sozeau:2013fk, Abstract = {{We present an internalization of the 2-groupoid interpretation of the calculus of construction that allows to realize the univalence axiom, proof irrelevance and reasoning modulo. As an example, we show that in our setting, the type of Church integers is equal to the inductive type of natural numbers.}}, Affiliation = {PI.R2 - INRIA Paris - Rocquencourt , Preuves, Programmes et Syst{\`e}mes - PPS , ASCOLA - INRIA - EMN , Laboratoire d'Informatique de Nantes Atlantique - LINA}, Author = {Sozeau, Matthieu and Tabareau, Nicolas}, Date-Added = {2013-02-20 10:44:17 +0000}, Date-Modified = {2013-02-20 10:44:17 +0000}, Hal_Id = {hal-00786589}, Language = {Anglais}, Month = Feb, Pdf = {http://hal.archives-ouvertes.fr/hal-00786589/PDF/main.pdf}, Title = {{Univalence for free}}, Url = {http://hal.archives-ouvertes.fr/hal-00786589}, Year = {2013}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/hal-00786589}} @misc{sozeau.Coq/classes/MPRI13, Author = {Matthieu Sozeau}, Date-Added = {2013-02-14 10:29:58 +0000}, Date-Modified = {2013-02-14 10:40:38 +0000}, Keywords = {type classes coq}, Month = {February}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/teaching/Classes/Modular-MPRI13.pdf}, Read = {Oui}, Title = {{Modular and Generic development in Coq}}, Type = {slides}, Year = {2013}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @inbook{Voevodsky:2011yq, Author = {Voevodsky, Vladimir}, Booktitle = {Logic, Language, Information and Computation}, Booktitle1 = {Lecture Notes in Computer Science}, Date = {2011 %@ 978-3-642-20919-2}, Date-Added = {2013-01-31 14:57:51 +0000}, Date-Modified = {2013-10-18 14:27:14 +0000}, Date1 = {2011-01-01}, Pages = {4-4}, Publisher = {Springer Berlin Heidelberg}, Title = {Univalent Foundations of Mathematics}, Type = {10.1007/978-3-642-20920-8_4}, Url = {http://dx.doi.org/10.1007/978-3-642-20920-8_4}, Volume = {6642}, Year = {2011}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/978-3-642-20920-8_4}} @article{Pelayo:2012uq, Abstract = {Recent discoveries have been made connecting abstract homotopy theory and the field of type theory from logic and theoretical computer science. This has given rise to a new field, which has been christened "homotopy type theory". In this direction, Vladimir Voevodsky observed that it is possible to model type theory using simplicial sets and that this model satisfies an additional property, called the Univalence Axiom, which has a number of striking consequences. He has subsequently advocated a program, which he calls univalent foundations, of developing mathematics in the setting of type theory with the Univalence Axiom and possibly other additional axioms motivated by the simplicial set model. Because type theory possesses good computational properties, this program can be carried out in a computer proof assistant. In this paper we give an introduction to homotopy type theory in Voevodsky's setting, paying attention to both theoretical and practical issues. In particular, the paper serves as an introduction to both the general ideas of homotopy type theory as well as to some of the concrete details of Voevodsky's work using the well-known proof assistant Coq. The paper is written for a general audience of mathematicians with basic knowledge of algebraic topology; the paper does not assume any preliminary knowledge of type theory, logic, or computer science.}, Author = {{\'A}lvaro Pelayo and Michael A. Warren}, Date-Added = {2013-01-31 14:52:42 +0000}, Date-Modified = {2013-01-31 14:52:42 +0000}, Eprint = {1210.5658}, Month = {10}, Title = {Homotopy type theory and Voevodsky's univalent foundations}, Url = {http://arxiv.org/abs/1210.5658}, Year = {2012}, Bdsk-Url-1 = {http://arxiv.org/abs/1210.5658}} @article{DBLP:journals/corr/abs-0812-0409, Author = {Peter LeFanu Lumsdaine}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Added = {2013-01-31 14:49:49 +0000}, Date-Modified = {2013-01-31 14:49:49 +0000}, Ee = {http://dx.doi.org/10.2168/LMCS-6(3:24)2010}, Journal = {Logical Methods in Computer Science}, Number = {3}, Title = {Weak omega-categories from intensional type theory}, Volume = {6}, Year = {2010}} @inproceedings{DBLP:conf/csl/AltenkirchR12, Author = {Thorsten Altenkirch and Ondrej Rypacek}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {CSL}, Crossref = {DBLP:conf/csl/2012}, Date-Added = {2013-01-31 14:47:40 +0000}, Date-Modified = {2013-01-31 14:47:40 +0000}, Ee = {http://dx.doi.org/10.4230/LIPIcs.CSL.2012.16}, Pages = {16-30}, Title = {A Syntactical Approach to Weak omega-Groupoids}, Year = {2012}} @misc{sozeau12forcingplugin, Author = {Sozeau, Matthieu and Tabareau, Nicolas and Jaber, Guilhem}, Date-Added = {2012-12-19 22:16:07 +0000}, Date-Modified = {2012-12-22 00:44:46 +0000}, Title = {{Forcing in Coq}}, Url = {http://github.com/mattam82/Forcing}, Year = {2012}, Bdsk-Url-1 = {http://github.com/mattam82/Forcing}} @article{chlipalappdt10, Author = {Adam Chlipala}, Date-Added = {2012-12-19 21:55:58 +0000}, Date-Modified = {2012-12-19 21:57:42 +0000}, Journal = {Journal of Formalized Reasoning}, Number = {2}, Pages = {1--93}, Title = {{An Introduction to Programming and Proving with Dependent Types in Coq}}, Url = {http://jfr.unibo.it/article/view/1978}, Volume = {3}, Year = {2010}, Bdsk-Url-1 = {http://jfr.unibo.it/article/view/1978}} @misc{sozeau.Coq/classes/ias12b, Author = {Matthieu Sozeau}, Date-Added = {2012-12-10 18:58:00 +0000}, Date-Modified = {2012-12-10 19:12:23 +0000}, Howpublished = {Tutorial given at the IAS, Princeton, NJ}, Keywords = {type classes coq}, Month = {December 10th}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Coq_with_Classes-IAS-101212.pdf}, Read = {Oui}, Title = {{Coq with Classes}}, Type = {slides}, Year = {2012}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @inproceedings{DBLP:conf/icfp/DevrieseP11, Author = {Dominique Devriese and Frank Piessens}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {ICFP}, Crossref = {DBLP:conf/icfp/2011}, Date-Added = {2012-12-10 18:17:12 +0000}, Date-Modified = {2012-12-10 18:17:12 +0000}, Ee = {http://doi.acm.org/10.1145/2034773.2034796}, Pages = {143-155}, Title = {On the bright side of type classes: instance arguments in Agda}, Year = {2011}} @inproceedings{DBLP:conf/icfp/GonthierZND11, Author = {Georges Gonthier and Beta Ziliani and Aleksandar Nanevski and Derek Dreyer}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {ICFP}, Crossref = {DBLP:conf/icfp/2011}, Date-Added = {2012-12-08 23:19:55 +0000}, Date-Modified = {2012-12-21 07:02:19 +0000}, Ee = {http://doi.acm.org/10.1145/2034773.2034798}, Pages = {163-175}, Title = {{How to make ad hoc proof automation less ad hoc}}, Year = {2011}} @misc{sozeau.Coq/univs/ias12, Author = {Matthieu Sozeau}, Date-Added = {2012-12-07 19:50:16 +0000}, Date-Modified = {2012-12-07 19:52:12 +0000}, Howpublished = {Talk given at the Institute for Advanced Study, Princeton, NJ}, Keywords = {Coq Universes}, Month = {December 5th}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Universe_Polymorphism_and_Inference_in_Coq-IAS-051212.pdf}, Read = {Oui}, Title = {{Universe Polymorphism and Inference in Coq}}, Type = {slides}, Year = {2012}} @article{DBLP:journals/mscs/SpittersW11, Author = {Bas Spitters and Eelis van der Weegen}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Added = {2012-11-22 02:59:58 +0000}, Date-Modified = {2012-11-22 02:59:58 +0000}, Ee = {http://dx.doi.org/10.1017/S0960129511000119}, Journal = {Mathematical Structures in Computer Science}, Number = {4}, Pages = {795-825}, Title = {Type classes for mathematics in type theory}, Volume = {21}, Year = {2011}} @article{DBLP:journals/jar/AspertiCTZ07, Author = {Andrea Asperti and Claudio Sacerdoti Coen and Enrico Tassi and Stefano Zacchiroli}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Added = {2012-11-22 02:58:41 +0000}, Date-Modified = {2012-11-22 02:58:41 +0000}, Ee = {http://dx.doi.org/10.1007/s10817-007-9070-5}, Journal = {J. Autom. Reasoning}, Number = {2}, Pages = {109-139}, Title = {User Interaction with the Matita Proof Assistant}, Volume = {39}, Year = {2007}} @article{DBLP:journals/tcs/HarperP91, Author = {Robert Harper and Robert Pollack}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Added = {2012-11-22 02:56:35 +0000}, Date-Modified = {2012-11-22 02:56:35 +0000}, Ee = {http://dx.doi.org/10.1016/0304-3975(90)90108-T}, Journal = {Theor. Comput. Sci.}, Number = {1}, Pages = {107-136}, Title = {Type Checking with Universes}, Volume = {89}, Year = {1991}} @misc{sozeau.Coq/classes/Penn12, Author = {Matthieu Sozeau}, Date-Added = {2012-10-08 15:05:48 +0000}, Date-Modified = {2012-10-08 15:06:45 +0000}, Howpublished = {Lecture at the "Advanced Martial Arts in Coq" course at University of Pennsylvania}, Keywords = {type classes coq}, Month = {October}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Coq_with_Classes-Penn-081012.pdf}, Read = {Oui}, Title = {{Coq with Classes}}, Type = {slides}, Year = {2012}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @misc{sozeau.Coq/classes/IAS12, Author = {Matthieu Sozeau}, Date-Added = {2012-10-08 15:04:57 +0000}, Date-Modified = {2012-10-08 15:05:38 +0000}, Howpublished = {Talk given at the IAS seminar in Princeton, NJ}, Keywords = {type classes coq}, Month = {October}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Coq_with_Classes-IAS-031012.pdf}, Read = {Oui}, Title = {{Coq with Classes}}, Type = {slides}, Year = {2012}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @misc{sozeau.Coq/Equations/Tallinn12, Author = {Matthieu Sozeau}, Date-Added = {2012-10-08 14:52:15 +0000}, Date-Modified = {2012-10-08 14:53:30 +0000}, Howpublished = {Talk given at the Institute of Cybernetics, Tallinn, Estonia}, Keywords = {dependent types pattern matching unification}, Month = {May}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations:_A_Dependent_Pattern-Matching_Compiler-Tallinn-310512.pdf}, Read = {Oui}, Title = {Equations: A Dependent Pattern-Matching Suite}, Type = {slides}, Year = {2012}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @misc{CasteranSozeau-GentleIntro, Author = {Pierre Cast{\'e}ran and Matthieu Sozeau}, Date-Added = {2012-10-08 03:26:25 +0000}, Date-Modified = {2014-01-16 10:16:04 +0000}, Keywords = {Type classes}, Month = {May}, Title = {{A Gentle Introduction to Type Classes and Rewriting in Coq}}, Url = {http://www.labri.fr/perso/casteran/CoqArt/TypeClassesTut/typeclassestut.pdf}, Year = {2012}, Bdsk-Url-1 = {http://www.labri.fr/perso/casteran/CoqArt/TypeClassesTut/typeclassestut.pdf}} @inproceedings{geneves-tphols04, Address = {Salt Lake City, Utah, United States}, Author = {Pierre Genev\`es and Jean-Yves Vion-Dury}, Booktitle = {TPHOLs '04: Emerging Trends Proceedings of the 17th International Conference on Theorem Proving in Higher Order Logics}, Date-Added = {2012-09-26 15:58:36 +0000}, Date-Modified = {2012-09-26 15:58:36 +0000}, Location = {Park City, Utah, United States}, Month = {August}, Pages = {181-198}, Publisher = {University Of Utah}, Title = {{XPath} Formal Semantics and Beyond: A {Coq}-Based Approach}, Year = {2004}} @inproceedings{hal-00685150, Abstract = {{This paper presents an intuitionistic forcing translation for the Calculus of Constructions (CoC), a translation that corresponds to an internalization of the presheaf construction in CoC. Depending on the chosen set of forcing conditions, the resulting type system can be extended with extra logical principles. The translation is proven correct-in the sense that it preserves type checking-and has been implemented in Coq. As a case study, we show how the forcing translation on integers (which corresponds to the internalization of the topos of trees) allows us to define general inductive types in Coq, without the strict positivity condition. Using such general inductive types, we can construct a shallow embedding of the pure \lambda-calculus in Coq, without defining an axiom on the existence of an universal domain. We also build another forcing layer where we prove the negation of the continuum hypothesis.}}, Address = {Dubrovnik, Croatie}, Affiliation = {ASCOLA - INRIA - EMN , Laboratoire d'Informatique de Nantes Atlantique - LINA , PI.R2 - INRIA Paris - Rocquencourt}, Audience = {internationale}, Author = {Jaber, Guilhem and Tabareau, Nicolas and Sozeau, Matthieu}, Booktitle = {{Proceedings of LICS'12}}, Date-Added = {2012-05-19 17:11:27 +0200}, Date-Modified = {2012-10-07 19:26:33 +0000}, Hal_Id = {hal-00685150}, Language = {Anglais}, Month = Jun, Pdf = {http://hal.archives-ouvertes.fr/hal-00685150/PDF/forcing\_lics.pdf}, Title = {{Extending Type Theory with Forcing}}, Url = {http://hal.archives-ouvertes.fr/hal-00685150}, Year = {2012}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/hal-00685150}} @misc{sozeau.Coq/classes/JFLA12, Author = {Matthieu Sozeau}, Date-Added = {2012-02-08 01:41:37 +0100}, Date-Modified = {2012-02-08 01:43:34 +0100}, Howpublished = {Lecture notes for a course given at JFLA'12 in Carnac, France}, Keywords = {type classes coq}, Month = {February}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Coq_with_Classes-JFLA-040212.pdf}, Read = {Oui}, Title = {{Coq with Classes}}, Type = {slides}, Year = {2012}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @misc{sozeau.Coq/classes/Kent11, Author = {Matthieu Sozeau}, Date-Added = {2012-01-31 18:32:35 +0100}, Date-Modified = {2012-01-31 18:33:16 +0100}, Howpublished = {Talk given at the PLAS seminar in Canterbury - UK}, Keywords = {type classes coq}, Month = {November}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Coq_with_Classes-Kent-071111.pdf}, Read = {Oui}, Title = {{Coq with Classes}}, Type = {slides}, Year = {2011}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @misc{sozeau.Coq/classes/IMDEA11, Author = {Matthieu Sozeau}, Date-Added = {2011-10-24 11:46:41 +0200}, Date-Modified = {2011-10-24 11:48:45 +0200}, Howpublished = {Talk given at the IMDEA seminar in Madrid - Spain}, Keywords = {type classes coq}, Month = {October}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Coq_with_Classes-IMDEA-201011.pdf}, Read = {Oui}, Title = {{Coq with Classes}}, Type = {slides}, Year = {2011}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @misc{sozeau.Coq/classes/PPS11, Author = {Matthieu Sozeau}, Date-Added = {2011-10-24 11:41:07 +0200}, Date-Modified = {2012-02-08 01:46:18 +0100}, Howpublished = {Talk given at the PPS days in Trouville - France}, Keywords = {type classes coq}, Month = {September}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Coq_with_Classes-PPS-050911.pdf}, Read = {Oui}, Title = {{Coq with Classes}}, Type = {slides}, Year = {2011}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @misc{sozeau.Coq/Equations/Shonan11, Author = {Matthieu Sozeau}, Date-Added = {2011-10-24 11:37:29 +0200}, Date-Modified = {2011-10-24 11:39:39 +0200}, Howpublished = {Talk given at the DTP meeting in Shonan - Japan}, Keywords = {dependent types pattern matching unification}, Month = {September}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations:_A_Dependent_Pattern-Matching_Compiler-Shonan-150911.pdf}, Read = {Oui}, Title = {Equations: A Dependent Pattern-Matching Suite}, Type = {slides}, Year = {2011}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @article{3108, Author = {Jael Kriener and Andy King}, Date-Added = {2011-09-26 13:11:11 +0200}, Date-Modified = {2011-09-26 13:11:11 +0200}, Journal = {Theory and Practice of Logic Programming}, Keywords = {abstract interpretation, backwards analysis, boolean formulae, constraints, cut, determinacy inference, Prolog}, Month = {July}, Number = {4-5}, Pages = {537-553}, Publication_Type = {article}, Publisher = {Cambridge University Press}, Submission_Id = {18724_1304501874}, Title = {Red{A}lert: {D}eterminacy {I}nference for {P}rolog}, Url = {http://www.cs.kent.ac.uk/pubs/2011/3108}, Volume = {11}, Year = {2011}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAeLi4vLi4vLi4vRG93bmxvYWRzL2NvbnRlbnQucGRmTxEBbgAAAAABbgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAyAgu/0grAAAACKJGC2NvbnRlbnQucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWQ+N7Kse05AAAAAAAAAAAAAwACAAAJIAAAAAAAAAAAAAAAAAAAAAlEb3dubG9hZHMAABAACAAAyAgS3wAAABEACAAAyrHRGQAAAAEADAAIokYACKI2AACTlgACAC9NYWNpbnRvc2ggSEQ6VXNlcnM6AG1hdDoARG93bmxvYWRzOgBjb250ZW50LnBkZgAADgAYAAsAYwBvAG4AdABlAG4AdAAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAH1VzZXJzL21hdC9Eb3dubG9hZHMvY29udGVudC5wZGYAABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAEUAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAABtw==}, Bdsk-Url-1 = {http://www.cs.kent.ac.uk/pubs/2011/3108}} @inproceedings{SPIWACK:2010:INRIA-00502500:1, Abstract = {{The Coq proof assistant is a large development, a lot of which happens to be more or less dependent on the type of tactics. To be able to perform tweaks in this type more easily in the future, we propose an API for building tactics which doesn't need to expose the type of tactics and yet has a fairly small amount of primitives. This API accompanies an entirely new implementation of the core tactic engine of Coq which aims at handling more gracefully existential variables (aka. metavariables) in proofs - like in more recent proof assistants like Matita and Agda2. We shall, then, leverage this newly acquired independence of the concrete type of tactics from the API to add backtracking abilities.}}, Address = {Edinburgh, Royaume-Uni}, Affiliation = {Laboratoire d'informatique de l'{\'e}cole polytechnique - LIX - CNRS : UMR7161 - Polytechnique - X - TypiCal - INRIA Saclay - Ile de France - INRIA - CNRS : UMR - Polytechnique - X}, Audience = {internationale}, Author = {Spiwack, Arnaud}, Booktitle = {{Proof Search in Type Theory}}, Date-Added = {2011-09-26 12:57:27 +0200}, Date-Modified = {2011-09-26 12:57:27 +0200}, Hal_Id = {inria-00502500}, Language = {Anglais}, Pdf = {http://hal.inria.fr/inria-00502500/PDF/tactics.pdf}, Title = {{An abstract type for constructing tactics in Coq}}, Url = {http://hal.inria.fr/inria-00502500/en/}, Year = {2010}, Bdsk-Url-1 = {http://hal.inria.fr/inria-00502500/en/}} @article{Coen:2007:TSS:1243520.1243826, Acmid = {1243826}, Address = {Amsterdam, The Netherlands, The Netherlands}, Author = {Coen, Claudio Sacerdoti and Tassi, Enrico and Zacchiroli, Stefano}, Date-Added = {2011-09-26 12:52:51 +0200}, Date-Modified = {2011-09-26 12:52:51 +0200}, Doi = {10.1016/j.entcs.2006.09.026}, Issn = {1571-0661}, Issue = {2}, Journal = {Electron. Notes Theor. Comput. Sci.}, Keywords = {Interactive Theorem Proving, Small Step Semantics, Tacticals}, Month = {May}, Numpages = {18}, Pages = {125--142}, Publisher = {Elsevier Science Publishers B. V.}, Title = {Tinycals: Step by Step Tacticals}, Url = {http://dl.acm.org/citation.cfm?id=1243520.1243826}, Volume = {174}, Year = {2007}, Bdsk-Url-1 = {http://dl.acm.org/citation.cfm?id=1243520.1243826}, Bdsk-Url-2 = {http://dx.doi.org/10.1016/j.entcs.2006.09.026}} @misc{sozeau.Coq/Equations/Chalmers11, Author = {Matthieu Sozeau}, Date-Added = {2011-08-18 10:30:13 +0200}, Date-Modified = {2011-08-18 10:31:24 +0200}, Howpublished = {Talk given at University of Chalmers - Gothenburg}, Keywords = {dependent types pattern matching unification}, Month = {January}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations:_A_Dependent_Pattern-Matching_Compiler-Chalmers-190111.pdf}, Read = {Oui}, Title = {Equations: A Dependent Pattern-Matching Compiler}, Type = {slides}, Year = {2011}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @inproceedings{DBLP:conf/tphol/Courant02, Author = {Judica{\"e}l Courant}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TPHOLs}, Crossref = {DBLP:conf/tphol/2002}, Date-Added = {2011-02-24 16:59:19 +0100}, Date-Modified = {2011-02-24 16:59:43 +0100}, Ee = {http://link.springer.de/link/service/series/0558/bibs/2410/24100115.htm}, Pages = {115-130}, Title = {{Explicit Universes for the Calculus of Constructions}}, Year = {2002}} @proceedings{DBLP:conf/tphol/2002, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TPHOLs}, Date-Added = {2011-02-24 16:59:19 +0100}, Date-Modified = {2014-01-30 06:28:47 +0000}, Isbn = {3-540-44039-9}, Publisher = {Springer}, Series = {LNCS}, Title = {Theorem Proving in Higher Order Logics, 15th International Conference, TPHOLs 2002, Hampton, VA, USA, August 20-23, 2002, Proceedings}, Volume = {2410}, Year = {2002}} @article{AspertiCompact, Author = {Andrea Asperti and Wilmer Ricciotti and Claudio Sacerdoti Coen and Enrico Tassi}, Date-Added = {2011-02-24 16:56:48 +0100}, Date-Modified = {2011-02-24 16:58:02 +0100}, Journal = {Journal Sadhana}, Pages = {71--144}, Title = {A compact kernel for the calculus of inductive constructions}, Volume = {34}, Year = {2009}} @article{HerbelinUniverses, Author = {Hugo Herbelin}, Date-Added = {2011-02-24 16:51:56 +0100}, Date-Modified = {2011-02-24 16:54:03 +0100}, Note = {Manuscript}, Title = {{Type Inference with Algebraic Universes in the Calculus of Inductive Constructions}}, Url = {http://pauillac.inria.fr/~herbelin/publis/univalgcci.pdf}, Year = {2005}, Bdsk-Url-1 = {http://pauillac.inria.fr/~herbelin/publis/univalgcci.pdf}} @inproceedings{tphols2000-Balaa, Author = {Antonia Balaa and Yves Bertot}, Crossref = {tphols2000}, Date-Added = {2011-01-18 10:48:47 +0100}, Date-Modified = {2011-01-18 10:48:47 +0100}, Pages = {1--16}, Title = {Fix-point Equations for Well-Founded Recursion in Type Theory}} @inproceedings{BerVKom08, Address = {New York, NY, USA}, Author = {Yves Bertot and Vladimir Komendantsky}, Booktitle = {PPDP '08: Proceedings of the 10th international ACM SIGPLAN conference on Principles and practice of declarative programming}, Date-Added = {2011-01-18 10:48:05 +0100}, Date-Modified = {2011-01-18 10:48:05 +0100}, Doi = {http://doi.acm.org/10.1145/1389449.1389461}, Isbn = {978-1-60558-117-0}, Location = {Valencia, Spain}, Pages = {89--96}, Publisher = {ACM}, Title = {Fixed point semantics and partial recursion in Coq}, Url = {http://hal.inria.fr/inria-00190975/}, X-Editorial-Board = {yes}, X-International-Audience = {yes}, X-Proceedings = {yes}, Year = {2008}, Bdsk-Url-1 = {http://hal.inria.fr/inria-00190975/}, Bdsk-Url-2 = {http://doi.acm.org/10.1145/1389449.1389461}} @misc{sozeau.Coq/Equations/TYPES10, Author = {Matthieu Sozeau}, Date-Added = {2010-10-18 16:02:31 +0200}, Date-Modified = {2010-10-18 16:11:59 +0200}, Howpublished = {Talk given at TYPES'10, Warsaw, Poland}, Keywords = {dependent types pattern matching unification}, Month = {October}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations:_A_Dependent_Pattern-Matching_Compiler-TYPES10-151010.pdf}, Read = {Oui}, Title = {Equations: A Dependent Pattern-Matching Compiler}, Type = {slides}, Year = {2010}} @misc{sozeau.Coq/Equations/UPenn10, Author = {Matthieu Sozeau}, Date-Added = {2010-10-14 12:26:42 +0200}, Date-Modified = {2010-10-14 12:30:00 +0200}, Howpublished = {Talk given at the University of Pennsylvania PLClub}, Keywords = {dependent types pattern matching unification}, Month = {September}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations:_A_Dependent_Pattern-Matching_Compiler-UPenn-240910.pdf}, Read = {Oui}, Title = {Equations: A Dependent Pattern-Matching Compiler}, Type = {slides}, Year = {2010}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @misc{sozeau.Coq/Equations/ITP10slides, Author = {Matthieu Sozeau}, Date-Added = {2010-10-14 12:25:13 +0200}, Date-Modified = {2010-10-18 16:12:29 +0200}, Howpublished = {Talk given at ITP'10, Edinburgh, UK}, Keywords = {dependent types pattern matching unification}, Month = {July}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations:_A_Dependent_Pattern-Matching_Compiler-ITP10-140710.pdf}, Read = {Oui}, Title = {Equations: A Dependent Pattern-Matching Compiler}, Type = {slides}, Year = {2010}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @misc{sozeau.Coq/Elaborations/DTP10, Author = {Matthieu Sozeau}, Date-Added = {2010-07-13 01:47:24 +0100}, Date-Modified = {2010-07-13 01:50:36 +0100}, Howpublished = {Invited Talk at DTP'10 - Edinburgh, UK}, Keywords = {dependent types pattern matching unification}, Month = {July}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Elaborations_in_Type_Theory-DTP10-100710.pdf}, Read = {Oui}, Title = {Elaborations in Type Theory}, Type = {slides}, Year = {2010}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @inproceedings{BartheGZ09, Author = {Gilles Barthe and Benjamin Gr\'egoire and Santiago Zanella}, Booktitle = {36th symposium Principles of Programming Languages}, Date-Added = {2010-02-18 14:02:30 -0500}, Date-Modified = {2010-02-18 14:02:30 -0500}, Month = jan, Publisher = {ACM Press}, Title = {Formal Certification of Code-Based Cryptographic Proofs}, Topic = {CRYPTO}, Url = {http://www-sop.inria.fr/everest/personnel/Benjamin.Gregoire/Publi/popl09.pdf}, Year = 2009, Bdsk-Url-1 = {http://www-sop.inria.fr/everest/personnel/Benjamin.Gregoire/Publi/popl09.pdf}} @webpage{CertiCrypt, Date-Added = {2010-02-18 14:01:39 -0500}, Date-Modified = {2010-02-18 14:01:47 -0500}, Url = {http://www.msr-inria.inria.fr/projects/sec/certicrypt/index.html}, Bdsk-Url-1 = {http://www.msr-inria.inria.fr/projects/sec/certicrypt/index.html}} @inproceedings{1432087, Address = {Berlin, Heidelberg}, Author = {Barthe, Gilles and Gr\'{e}goire, Benjamin and Riba, Colin}, Booktitle = {CSL '08: Proceedings of the 22nd international workshop on Computer Science Logic}, Date-Added = {2010-02-11 13:00:46 -0500}, Date-Modified = {2010-02-11 13:00:46 -0500}, Doi = {http://dx.doi.org/10.1007/978-3-540-87531-4_35}, Isbn = {978-3-540-87530-7}, Location = {Bertinoro, Italy}, Pages = {493--507}, Publisher = {Springer-Verlag}, Title = {Type-Based Termination with Sized Products}, Year = {2008}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/978-3-540-87531-4_35}} @inproceedings{BartheGP06, Author = {Gilles Barthe and Benjamin Gr{\'e}goire and Fernando Pastawski}, Booktitle = {Logic for Programming, Artificial Intelligence, and Reasoning, 13th International Conference, LPAR 2006, Phnom Penh, Cambodia, November 13-17, 2006, Proceedings}, Date-Added = {2010-02-04 01:02:40 -0500}, Date-Modified = {2010-02-04 01:02:40 -0500}, Pages = {257-271}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {CIC\^: Type-Based Termination of Recursive Definitions in the Calculus of Inductive Constructions}, Topic = {TYPETHEORY}, Volume = {4246}, Year = {2006}} @inproceedings{BartheGR08, Author = {Gilles Barthe and Benjamin Gr\'egoire and Colin Riba}, Booktitle = {17th EACSL Annual Conference on Computer Science Logic, 15th-19th September 2008, Bertinoro, Italy}, Date-Added = {2010-02-04 00:24:51 -0500}, Date-Modified = {2010-02-04 00:24:51 -0500}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Type-Based Termination with Sized Products}, Topic = {TYPETHEORY}, Year = {2008}} @proceedings{DBLP:conf/ppdp/2008, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {PPDP}, Date-Added = {2010-02-04 00:00:07 -0500}, Date-Modified = {2010-02-04 00:00:07 -0500}, Editor = {Sergio Antoy and Elvira Albert}, Isbn = {978-1-60558-117-0}, Publisher = {ACM}, Title = {Proceedings of the 10th International ACM SIGPLAN Conference on Principles and Practice of Declarative Programming, July 15-17, 2008, Valencia, Spain}, Year = {2008}} @inproceedings{DBLP:conf/types/BertotK08, Author = {Yves Bertot and Ekaterina Komendantskaya}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Crossref = {DBLP:conf/types/2008}, Date-Added = {2010-02-03 23:59:08 -0500}, Date-Modified = {2010-02-03 23:59:08 -0500}, Ee = {http://dx.doi.org/10.1007/978-3-642-02444-3_14}, Pages = {220-236}, Title = {Using Structural Recursion for Corecursion}, Year = {2008}} @proceedings{DBLP:conf/types/2008, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Date-Added = {2010-02-03 23:59:08 -0500}, Date-Modified = {2010-02-03 23:59:08 -0500}, Editor = {Stefano Berardi and Ferruccio Damiani and Ugo de'Liguoro}, Ee = {http://dx.doi.org/10.1007/978-3-642-02444-3}, Isbn = {978-3-642-02443-6}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Types for Proofs and Programs, International Conference, TYPES 2008, Torino, Italy, March 26-29, 2008, Revised Selected Papers}, Volume = {5497}, Year = {2009}} @webpage{ForMath, Author = {ForMath}, Date-Added = {2010-02-03 11:57:20 -0500}, Date-Modified = {2010-02-03 12:38:48 -0500}, Title = {Formalization of Mathematics}, Url = {http://www.formath.cs.ru.nl/}, Bdsk-Url-1 = {http://www.formath.cs.ru.nl/}} @inproceedings{DBLP:conf/tphol/GarillotGMR09, Author = {Fran\c{c}ois Garillot and Georges Gonthier and Assia Mahboubi and Laurence Rideau}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TPHOLs}, Crossref = {DBLP:conf/tphol/2009}, Date-Added = {2010-02-03 11:52:24 -0500}, Date-Modified = {2010-02-03 11:52:24 -0500}, Ee = {http://dx.doi.org/10.1007/978-3-642-03359-9_23}, Pages = {327-342}, Title = {Packaging Mathematical Structures}, Year = {2009}} @phdthesis{tassi:phd, Author = {Enrico Tassi}, Date-Added = {2010-02-02 11:54:16 -0500}, Date-Modified = {2010-02-02 11:56:17 -0500}, Month = {March}, Pdf = {http://www.msr-inria.inria.fr/~gares/thesis.pdf}, Read = {Oui}, School = {University of Bologna}, Title = {Interactive Theorem Provers: issues faced as a user and tackled as a developer}, Url = {http://www.cs.unibo.it/pub/TR/UBLCS/ABSTRACTS/2008.bib?ncstrl.cabernet//BOLOGNA-UBLCS-2008-03}, Year = {2008}, Bdsk-Url-1 = {http://www.cs.unibo.it/pub/TR/UBLCS/ABSTRACTS/2008.bib?ncstrl.cabernet//BOLOGNA-UBLCS-2008-03}} @article{Wilson:2009auto, Author = {Sean Wilson and Jacques Fleuriot and Alan Smaill}, Date-Added = {2010-02-02 11:24:21 -0500}, Date-Modified = {2010-02-02 11:26:51 -0500}, Journal = {Special Issue of Fundamenta Informaticae on Dependently Typed Programming}, Note = {To appear}, Rating = {4}, Read = {Oui}, Title = {Automation for Dependently Typed Functional Programming}, Url = {http://homepages.inf.ed.ac.uk/s0091720/Wilson2009AutomationForDependentlyTypedFunctionProgramming.pdf}, Year = {2009}, Bdsk-Url-1 = {http://homepages.inf.ed.ac.uk/s0091720/Wilson2009AutomationForDependentlyTypedFunctionProgramming.pdf}} @webpage{GoNative, Date-Added = {2010-02-02 11:11:41 -0500}, Date-Modified = {2010-02-02 11:13:25 -0500}, Key = {GoNative}, Title = {GoNative}, Url = {http://sos.cse.lehigh.edu/gonative/}, Bdsk-Url-1 = {http://sos.cse.lehigh.edu/gonative/}} @inproceedings{chlipala:icfp09, Address = {New York, NY, USA}, Author = {Chlipala, Adam and Malecha, Gregory and Morrisett, Greg and Shinnar, Avraham and Wisnesky, Ryan}, Booktitle = {ICFP '09: Proceedings of the 14th ACM SIGPLAN international conference on Functional programming}, Date-Added = {2010-02-01 16:22:26 -0500}, Date-Modified = {2010-02-01 16:22:45 -0500}, Doi = {http://doi.acm.org/10.1145/1596550.1596565}, Isbn = {978-1-60558-332-7}, Location = {Edinburgh, Scotland}, Pages = {79--90}, Publisher = {ACM}, Title = {Effective interactive proofs for higher-order imperative programs}, Url = {http://portal.acm.org/ft_gateway.cfm?id=1596565&type=mp4&coll=Portal&dl=ACM&CFID=74245116&CFTOKEN=67325694}, Year = {2009}, Bdsk-Url-1 = {http://portal.acm.org/ft_gateway.cfm?id=1596565&type=mp4&coll=Portal&dl=ACM&CFID=74245116&CFTOKEN=67325694}, Bdsk-Url-2 = {http://doi.acm.org/10.1145/1596550.1596565}} @article{DBLP:journals/mscs/AspertiGN09, Author = {Andrea Asperti and Herman Geuvers and Raja Natarajan}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Added = {2010-01-30 18:05:30 -0500}, Date-Modified = {2010-01-30 18:05:30 -0500}, Ee = {http://dx.doi.org/10.1017/S0960129509990041}, Journal = {Mathematical Structures in Computer Science}, Number = {5}, Pages = {877-896}, Title = {Social processes, program verification and all that}, Volume = {19}, Year = {2009}} @inproceedings{DBLP:conf/sosp/KleinEHACDEEKNSTW09, Author = {Gerwin Klein and Kevin Elphinstone and Gernot Heiser and June Andronick and David Cock and Philip Derrin and Dhammika Elkaduwe and Kai Engelhardt and Rafal Kolanski and Michael Norrish and Thomas Sewell and Harvey Tuch and Simon Winwood}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {SOSP}, Crossref = {DBLP:conf/sosp/2009}, Date-Added = {2010-01-30 15:51:01 -0500}, Date-Modified = {2010-01-30 15:51:01 -0500}, Ee = {http://doi.acm.org/10.1145/1629575.1629596}, Pages = {207-220}, Title = {seL4: formal verification of an OS kernel}, Year = {2009}} @proceedings{DBLP:conf/sosp/2009, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {SOSP}, Date-Added = {2010-01-30 15:51:01 -0500}, Date-Modified = {2010-01-30 15:51:01 -0500}, Editor = {Jeanna Neefe Matthews and Thomas E. Anderson}, Isbn = {978-1-60558-752-3}, Publisher = {ACM}, Title = {Proceedings of the 22nd ACM Symposium on Operating Systems Principles 2009, SOSP 2009, Big Sky, Montana, USA, October 11-14, 2009}, Year = {2009}} @misc{spitters:algclass, Author = {Bas Spitters and Eelis van der Weegen}, Date-Added = {2010-01-29 13:59:35 -0500}, Date-Modified = {2010-01-29 14:01:16 -0500}, Howpublished = {Submitted}, Keywords = {Type classes}, Month = {January}, Title = {Developing the algebraic hierarchy using type classes}, Url = {http://www.xs4all.nl/~weegen/eelis/research/math-classes/}, Year = {2010}, Bdsk-Url-1 = {http://www.xs4all.nl/~weegen/eelis/research/math-classes/}} @inproceedings{DBLP:conf/plpv/Megacz07, Author = {Adam Megacz}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {PLPV}, Crossref = {DBLP:conf/plpv/2007}, Date-Added = {2010-01-28 16:27:55 -0500}, Date-Modified = {2010-01-28 16:27:55 -0500}, Ee = {http://doi.acm.org/10.1145/1292597.1292601}, Pages = {11-20}, Title = {A coinductive monad for prop-bounded recursion}, Year = {2007}} @inproceedings{swierstra:hoare-state-monad, Author = {Wouter Swierstra}, Booktitle = {Theorem Proving in Higher Order Logics, 22nd International Conference, TPHOLS 2009}, Date-Added = {2010-01-28 16:21:27 -0500}, Date-Modified = {2010-01-28 16:21:27 -0500}, Editor = {Tobias Nipkow and Christian Urban}, Publisher = {Spring Verlag}, Series = {Lecture Notes in Computer Science}, Title = {A Hoare Logic for the State Monad}, Volume = {5674}, Year = {2009}} @inproceedings{DBLP:conf/plpv/StumpDPSS09, Author = {Aaron Stump and Morgan Deters and Adam Petcher and Todd Schiller and Timothy W. Simpson}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {PLPV}, Crossref = {DBLP:conf/plpv/2009}, Date-Added = {2010-01-28 15:23:09 -0500}, Date-Modified = {2010-01-28 15:23:09 -0500}, Ee = {http://doi.acm.org/10.1145/1481848.1481856}, Pages = {49-58}, Title = {Verified programming in Guru}, Year = {2009}} @proceedings{DBLP:conf/plpv/2009, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {PLPV}, Date-Added = {2010-01-28 15:23:09 -0500}, Date-Modified = {2010-01-28 15:23:09 -0500}, Editor = {Thorsten Altenkirch and Todd D. Millstein}, Isbn = {978-1-60558-330-3}, Publisher = {ACM}, Title = {Proceedings of the 3rd ACM Workshop Programming Languages meets Program Verification, PLPV 2009, Savannah, GA, USA, January 20, 2009}, Year = {2009}} @unpublished{alti:pisigma-new, Author = {Thorsten Altenkirch and Nils Anders Danielsson and Andres L\"oh and Nicolas Oury}, Date-Added = {2010-01-28 12:02:15 -0500}, Date-Modified = {2010-01-29 14:03:07 -0500}, Month = {November}, Note = {Accepted at FLOPS 2010}, Title = {PiSigma: Dependent Types Without the Sugar}, Year = {2009}} @misc{sozeau.Coq/Equations/PPS10, Author = {Matthieu Sozeau}, Date-Added = {2010-01-22 16:34:15 +0100}, Date-Modified = {2010-02-01 17:38:56 -0500}, Howpublished = {Talk given at PPS - Universit{\'e} Paris 7}, Keywords = {dependent types pattern matching unification}, Month = {January}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations:_A_Dependent_Pattern-Matching_Compiler-PPS-130110.pdf}, Read = {Oui}, Title = {Equations: A Dependent Pattern-Matching Compiler}, Type = {slides}, Year = {2010}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @article{DBLP:journals/entcs/KirchnerM07, Author = {Florent Kirchner and C{\'e}sar Mu{\~n}oz}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Added = {2010-01-04 16:46:24 +0100}, Date-Modified = {2010-01-04 17:47:40 +0100}, Ee = {http://dx.doi.org/10.1016/j.entcs.2006.10.057}, Journal = {Electr. Notes Theor. Comput. Sci.}, Number = {11}, Pages = {47-58}, Title = {{PVS\#: Streamlined Tacticals for PVS}}, Volume = {174}, Year = {2007}} @inproceedings{DBLP:conf/frocos/LescuyerC09, Author = {St{\'e}phane Lescuyer and Sylvain Conchon}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {FroCos}, Crossref = {DBLP:conf/frocos/2009}, Date-Added = {2009-12-30 20:38:35 +0100}, Date-Modified = {2009-12-30 20:38:35 +0100}, Ee = {http://dx.doi.org/10.1007/978-3-642-04222-5_18}, Pages = {287-303}, Title = {Improving Coq Propositional Reasoning Using a Lazy CNF Conversion Scheme}, Year = {2009}} @proceedings{DBLP:conf/frocos/2009, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {FroCoS}, Date-Added = {2009-12-30 20:38:35 +0100}, Date-Modified = {2009-12-30 20:38:35 +0100}, Editor = {Silvio Ghilardi and Roberto Sebastiani}, Ee = {http://dx.doi.org/10.1007/978-3-642-04222-5}, Isbn = {978-3-642-04221-8}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Frontiers of Combining Systems, 7th International Symposium, FroCoS 2009, Trento, Italy, September 16-18, 2009. Proceedings}, Volume = {5749}, Year = {2009}} @article{lescuyer10jfla, Author = {St{\'e}phane Lescuyer}, Date-Added = {2009-12-30 20:22:04 +0100}, Date-Modified = {2009-12-30 20:24:19 +0100}, Journal = {21{\`e}mes Journ{\'e}es Francophones des Langages Applicatifs}, Title = {Conteneurs de premi{\`e}re classe en Coq}, Year = {2010}} @article{braibantKleen, Abstract = {Rewriting is an essential tool for computer-based reasoning, both automated and assisted. It is so because rewriting is a general notion that permits to model a wide range of problems and provides means to effectively solve them. In a proof assistant, rewriting can be used to replace terms in arbitrary contexts, generalizing the usual equational reasoning to reasoning modulo arbitrary relations. This can be done provided the necessary proofs that functions appearing in goals are congruent with respect to specific relations. We present a new implementation of generalized rewriting in the Coq proof assistant, making essential use of the expressive power of dependent types and a recently implemented type class mechanism. The tactic improves on and generalizes previous versions by supporting natively higher-order functions, polymorphism and subrelations. The type class system inspired from Haskell provides a perfect interface between the user and such tactics, making them easily extensible.}, Author = {Thomas Braibant and Damien Pous}, Date-Added = {2009-12-30 20:16:51 +0100}, Date-Modified = {2009-12-30 20:18:59 +0100}, Journal = {1st Coq Workshop proceedings}, Month = {August}, Read = {Oui}, Title = {{A Tactic for Deciding Kleen Algebras}}, Url = {http://hal.archives-ouvertes.fr/docs/00/38/30/70/PDF/CoqTacticForKleeneAlgebras.pdf}, Year = {2009}, Bdsk-Url-1 = {http://hal.archives-ouvertes.fr/docs/00/38/30/70/PDF/CoqTacticForKleeneAlgebras.pdf}} @inproceedings{dockins09:sa, Author = {Robert Dockins and Aquinas Hobor and Andrew W. Appel}, Booktitle = {The 7th Asian Symposium on Programming Languages and Systems}, Date-Added = {2009-12-30 19:15:11 +0100}, Date-Modified = {2009-12-30 19:15:11 +0100}, Pages = {{to appear}}, Publisher = {Springer ENTCS}, Title = {A Fresh Look at Separation Algebras and Share Accounting}, Url = {http://msl.cs.princeton.edu/fresh-sa.pdf}, Year = 2009, Bdsk-Url-1 = {http://msl.cs.princeton.edu/fresh-sa.pdf}} @article{sozeau.Coq/rewrite/JFR, Abstract = {Rewriting is an essential tool for computer-based reasoning, both automated and assisted. It is so because rewriting is a general notion that permits to model a wide range of problems and provides means to effectively solve them. In a proof assistant, rewriting can be used to replace terms in arbitrary contexts, generalizing the usual equational reasoning to reasoning modulo arbitrary relations. This can be done provided the necessary proofs that functions appearing in goals are congruent with respect to specific relations. We present a new implementation of generalized rewriting in the Coq proof assistant, making essential use of the expressive power of dependent types and a recently implemented type class mechanism. The tactic improves on and generalizes previous versions by supporting natively higher-order functions, polymorphism and subrelations. The type class system inspired from Haskell provides a perfect interface between the user and such tactics, making them easily extensible.}, Author = {Matthieu Sozeau}, Date-Added = {2009-12-27 19:24:23 +0100}, Date-Modified = {2010-01-04 11:57:16 +0100}, Journal = {Journal of Formalized Reasoning}, Keywords = {rewriting setoid typeclasses}, Month = {December}, Number = {1}, Pages = {41-62}, Pdf = {http://jfr.cib.unibo.it/article/view/1574/1077}, Read = {Oui}, Title = {{A New Look at Generalized Rewriting in Type Theory}}, Volume = {2}, Year = {2009}} @article{journals/mscs/BoveC05, Author = {Ana Bove and Venanzio Capretta}, Bibdate = {2005-09-16}, Bibsource = {DBLP, http://dblp.uni-trier.de/db/journals/mscs/mscs15.html#BoveC05}, Date-Added = {2009-12-25 23:04:30 +0100}, Date-Modified = {2009-12-25 23:04:30 +0100}, Journal = {Mathematical Structures in Computer Science}, Number = {4}, Pages = {671--708}, Title = {Modelling general recursion in type theory}, Url = {http://dx.doi.org/10.1017/S0960129505004822}, Volume = {15}, Year = {2005}, Bdsk-Url-1 = {http://dx.doi.org/10.1017/S0960129505004822}} @inproceedings{conf/types/BarrasCGHS08, Author = {Bruno Barras and Pierre Corbineau and Benjamin Gr{\'e}goire and Hugo Herbelin and Jorge Luis Sacchini}, Bibdate = {2009-06-13}, Bibsource = {DBLP, http://dblp.uni-trier.de/db/conf/types/types2008.html#BarrasCGHS08}, Booktitle = {TYPES}, Date-Added = {2009-12-25 23:00:06 +0100}, Date-Modified = {2009-12-25 23:03:06 +0100}, Editor = {Stefano Berardi and Ferruccio Damiani and Ugo de'Liguoro}, Isbn = {978-3-642-02443-6}, Pages = {32--48}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {A New Elimination Rule for the Calculus of Inductive Constructions}, Url = {http://dx.doi.org/10.1007/978-3-642-02444-3}, Volume = {5497}, Year = {2008}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/978-3-642-02444-3}} @inproceedings{GADTcomplete, Abstract = {GADTs have proven to be an invaluable language extension, for ensuring data invariants and program correctness among others. Unfortunately, they pose a tough problem for type inference: we lose the principal-type property, which is necessary for modular type inference.}, Address = {New York, NY, USA}, Author = {Schrijvers, Tom and Jones, Simon P. and Sulzmann, Martin and Vytiniotis, Dimitrios}, Booktitle = {ICFP '09: Proceedings of the 14th ACM SIGPLAN international conference on Functional programming}, Citeulike-Article-Id = {6347287}, Citeulike-Linkout-0 = {http://portal.acm.org/citation.cfm?id=1596550.1596599}, Citeulike-Linkout-1 = {http://dx.doi.org/10.1145/1596550.1596599}, Date-Added = {2009-12-25 22:53:21 +0100}, Date-Modified = {2009-12-25 22:53:44 +0100}, Doi = {10.1145/1596550.1596599}, Isbn = {978-1-60558-332-7}, Keywords = {haskell, type checking}, Location = {Edinburgh, Scotland}, Pages = {341--352}, Posted-At = {2009-12-10 09:21:58}, Priority = {2}, Publisher = {ACM}, Title = {Complete and decidable type inference for GADTs}, Url = {http://dx.doi.org/10.1145/1596550.1596599}, Year = {2009}, Bdsk-Url-1 = {http://dx.doi.org/10.1145/1596550.1596599}} @inproceedings{DBLP:conf/tphol/AspertiRCT09, Author = {Andrea Asperti and Wilmer Ricciotti and Claudio Sacerdoti Coen and Enrico Tassi}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TPHOLs}, Crossref = {DBLP:conf/tphol/2009}, Date-Added = {2009-12-23 15:52:59 +0100}, Date-Modified = {2009-12-23 15:52:59 +0100}, Ee = {http://dx.doi.org/10.1007/978-3-642-03359-9_8}, Pages = {84-98}, Title = {Hints in Unification}, Year = {2009}} @inproceedings{DBLP:conf/tphol/McCreight09, Author = {Andrew McCreight}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TPHOLs}, Crossref = {DBLP:conf/tphol/2009}, Date-Added = {2009-12-23 04:22:15 +0100}, Date-Modified = {2009-12-23 04:22:15 +0100}, Ee = {http://dx.doi.org/10.1007/978-3-642-03359-9_24}, Pages = {343-358}, Title = {Practical Tactics for Separation Logic}, Year = {2009}} @inproceedings{DBLP:conf/fpca/Augustsson85, Author = {Lennart Augustsson}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {FPCA}, Date-Added = {2009-11-08 23:36:58 -0500}, Date-Modified = {2009-11-08 23:38:21 -0500}, Doi = {10.1007/3-540-15975-4}, Pages = {368-381}, Title = {Compiling Pattern Matching}, Year = {1985}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/3-540-15975-4}} @book{IntrotoHAT, Date-Added = {2009-09-08 14:35:45 -0400}, Date-Modified = {2009-09-08 14:35:45 -0400}, Editor = {M. J. C. Gordon and T. F. Melham}, Publisher = {Cambridge University Press}, Title = {Introduction to {HOL}: A theorem proving environment for higher order logic}, Url = {http://www.dcs.glasgow.ac.uk/~tfm/HOLbook.html}, Year = {1993}, Bdsk-Url-1 = {http://www.dcs.glasgow.ac.uk/~tfm/HOLbook.html}} @misc{Constable86implementingmathematics, Author = {Robert L. Constable and Stuart F. Allen and S. F. Allen and H. M. Bromley and W. R. Cleaveland and J. F. Cremer and R. W. Harper and Douglas J. Howe and T. B. Knoblock and N. P. Mendler and P. Panangaden and Scott F. Smith and James T. Sasaki and S. F. Smith}, Date-Added = {2009-09-08 14:29:23 -0400}, Date-Modified = {2009-09-08 14:29:23 -0400}, Title = {Implementing Mathematics with The Nuprl Proof Development System}, Year = {1986}} @misc{sozeau.Coq/rewrite/types09, Abstract = {Rewriting is an essential tool for computer-based reasoning, both automated and assisted. It is so because rewriting is a general notion that permits to model a wide range of problems and provides means to effectively solve them. In a proof assistant, rewriting can be used to replace terms in arbitrary contexts, generalizing the usual equational reasoning to reasoning modulo arbitrary relations. This can be done provided the necessary proofs that functions appearing in goals are congruent with respect to specific relations. We present a new implementation of generalized rewriting in the Coq proof assistant, making essential use of the expressive power of dependent types and a recently implemented type class mechanism. The tactic improves on and generalizes previous versions by supporting natively higher-order functions, polymorphism and subrelations. The type class system inspired from Haskell provides a perfect interface between the user and such tactics, making them easily extensible.}, Address = {Aussois, France}, Author = {Matthieu Sozeau}, Date-Added = {2009-08-22 11:11:35 +0200}, Date-Modified = {2009-08-22 11:19:56 +0200}, Howpublished = {TYPES'09}, Keywords = {rewriting setoid typeclasses}, Month = {May}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/A_New_Look_at_Generalized_Rewriting_in_Type_Theory-TYPES09-130509.pdf}, Read = {Oui}, Title = {{A New Look at Generalized Rewriting in Type Theory}}, Type = {slides}, Year = {2009}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @misc{sozeau.Coq/rewrite/coq09, Abstract = {Rewriting is an essential tool for computer-based reasoning, both automated and assisted. It is so because rewriting is a general notion that permits to model a wide range of problems and provides means to effectively solve them. In a proof assistant, rewriting can be used to replace terms in arbitrary contexts, generalizing the usual equational reasoning to reasoning modulo arbitrary relations. This can be done provided the necessary proofs that functions appearing in goals are congruent with respect to specific relations. We present a new implementation of generalized rewriting in the Coq proof assistant, making essential use of the expressive power of dependent types and a recently implemented type class mechanism. The tactic improves on and generalizes previous versions by supporting natively higher-order functions, polymorphism and subrelations. The type class system inspired from Haskell provides a perfect interface between the user and such tactics, making them easily extensible.}, Address = {Munich, Germany}, Author = {Matthieu Sozeau}, Date-Added = {2009-08-22 11:09:30 +0200}, Date-Modified = {2009-08-22 11:19:37 +0200}, Howpublished = {Talk given at the First Coq Workshop}, Keywords = {rewriting setoid typeclasses}, Month = {August}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/A_New_Look_at_Generalized_Rewriting_in_Type_Theory-Coq09-210809.pdf}, Read = {Oui}, Title = {{A New Look at Generalized Rewriting in Type Theory}}, Type = {slides}, Year = {2009}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @unpublished{wilson:supporting, Author = {Sean Wilson and Jacques Fleuriot and Alan Smaill}, Date-Added = {2009-04-20 11:56:56 -0400}, Date-Modified = {2010-02-02 11:28:27 -0500}, Note = {Draft}, Title = {Supporting Dependently Typed Functional Programming with Testing and User-Assisted Proof Automation}, Url = {http://homepages.inf.ed.ac.uk/s0091720/Wilson2009TAPDraft.pdf}, Year = {2010}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAvLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL1dpbHNvbjIwMDlUQVBEcmFmdC5wZGZPEQGiAAAAAAGiAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gWV2lsc29uMjAwOVRBUERyYWZ0LnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/Wuck45LkAAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk41qkAAAABABAANnfIAA7btAAH+TgAAJDnAAIAPU1hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOldpbHNvbjIwMDlUQVBEcmFmdC5wZGYAAA4ALgAWAFcAaQBsAHMAbwBuADIAMAAwADkAVABBAFAARAByAGEAZgB0AC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAwVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy9XaWxzb24yMDA5VEFQRHJhZnQucGRmABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAFYAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAAB/A==}, Bdsk-Url-1 = {http://homepages.inf.ed.ac.uk/s0091720/Wilson2009TAPDraft.pdf}} @inproceedings{Luttik97specificationof, Author = {Sebastiaan P. Luttik and Eelco Visser}, Booktitle = {2nd International Workshop on the Theory and Practice of Algebraic Specifications (ASF+SDF'97), Electronic Workshops in Computing}, Date-Added = {2009-04-20 11:54:16 -0400}, Date-Modified = {2009-04-20 11:55:54 -0400}, Publisher = {Springer-Verlag}, Read = {Oui}, Title = {Specification of Rewriting Strategies}, Url = {http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.21.4049}, Year = {1997}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAhLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL0xWOTcucGRmTxEBagAAAAABagACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAxquYFkgrAAAANnfICExWOTcucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/1ezJOOSdAAAAAAAAAAAAAwADAAAJIAAAAAAAAAAAAAAAAAAAAAZwYXBlcnMAEAAIAADGq3v2AAAAEQAIAADJONaNAAAAAQAQADZ3yAAO27QAB/k4AACQ5wACAC9NYWNpbnRvc2ggSEQ6VXNlcnM6bWF0OnJlc2VhcmNoOnBhcGVyczpMVjk3LnBkZgAADgASAAgATABWADkANwAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAIlVzZXJzL21hdC9yZXNlYXJjaC9wYXBlcnMvTFY5Ny5wZGYAEwABLwAAFQACAAr//wAAAAgADQAaACQASAAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAG2}, Bdsk-Url-1 = {http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.21.4049}} @article{Lengrand:2006td, Abstract = {Based on natural deduction, Pure Type Systems ( ) can express a wide range of type theories. In order to express proof-search in such theories, we introduce the Pure Type Sequent Calculi ( ) by enriching a sequent calculus due to Herbelin, adapted to proof-search and strongly related to natural deduction. are equipped with a normalisation procedure, adapted from Herbelin's and defined by local rewrite rules as in Cut-elimination, using explicit substitutions. It satisfies Subject Reduction and it is confluent. A is logically equivalent to its corresponding , and the former is strongly normalising if and only if the latter is. We show how the conversion rules can be incorporated inside logical rules (as in syntax-directed rules for type checking), so that basic proof-search tactics in type theory are merely the root-first application of our inference rules. Keywords: Type theory, , sequent calculus, proof-search, strong normalisation. }, Author = {Lengrand, St{\'e}phane and Dyckhoff, Roy and McKinna, James}, Date-Added = {2009-04-03 17:45:00 -0400}, Date-Modified = {2009-04-03 17:45:10 -0400}, Journal = {Computer Science Logic}, M3 = {10.1007/11874683{\_}29}, Pages = {441--455}, Read = {Oui}, Title = {A Sequent Calculus for Type Theory}, Ty = {CHAPTER}, Url = {http://dx.doi.org/10.1007/11874683_29}, Year = {2006}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAmLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL0NTTDA2TG9uZy5wZGZPEQF+AAAAAAF+AAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gNQ1NMMDZMb25nLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/VMsk45HgAAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk41mgAAAABABAANnfIAA7btAAH+TgAAJDnAAIANE1hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOkNTTDA2TG9uZy5wZGYADgAcAA0AQwBTAEwAMAA2AEwAbwBuAGcALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASACdVc2Vycy9tYXQvcmVzZWFyY2gvcGFwZXJzL0NTTDA2TG9uZy5wZGYAABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAE0AAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAABzw==}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/11874683_29}} @inproceedings{351258, Address = {New York, NY, USA}, Author = {Hinze, Ralf}, Booktitle = {ICFP '00: Proceedings of the fifth ACM SIGPLAN international conference on Functional programming}, Date-Added = {2009-04-03 17:41:34 -0400}, Date-Modified = {2014-07-29 16:07:05 +0000}, Doi = {http://doi.acm.org/10.1145/351240.351258}, Isbn = {1-58113-202-6}, Pages = {186--197}, Publisher = {ACM}, Title = {Deriving backtracking monad transformers}, Url = {http://portal.acm.org/ft_gateway.cfm?id=351258&type=pdf&coll=Portal&dl=GUIDE&CFID=28832090&CFTOKEN=51771456}, Year = {2000}, Bdsk-Url-1 = {http://portal.acm.org/ft_gateway.cfm?id=351258&type=pdf&coll=Portal&dl=GUIDE&CFID=28832090&CFTOKEN=51771456}, Bdsk-Url-2 = {http://doi.acm.org/10.1145/351240.351258}} @inproceedings{1086390, Address = {New York, NY, USA}, Author = {Kiselyov,, Oleg and Shan,, Chung-chieh and Friedman,, Daniel P. and Sabry,, Amr}, Booktitle = {ICFP '05: Proceedings of the tenth ACM SIGPLAN international conference on Functional programming}, Date-Added = {2009-04-03 17:39:46 -0400}, Date-Modified = {2009-04-03 17:39:46 -0400}, Doi = {http://doi.acm.org/10.1145/1086365.1086390}, Isbn = {1-59593-064-7}, Location = {Tallinn, Estonia}, Pages = {192--203}, Publisher = {ACM}, Title = {Backtracking, interleaving, and terminating monad transformers: (functional pearl)}, Url = {http://portal.acm.org/ft_gateway.cfm?id=1086390&type=pdf&coll=Portal&dl=GUIDE&CFID=28832090&CFTOKEN=51771456}, Year = {2005}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAsLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL0xvZ2ljVC1pY2ZwMjAwNS5wZGZPEQGWAAAAAAGWAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gTTG9naWNULWljZnAyMDA1LnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/V+Mk45J8AAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk41o8AAAABABAANnfIAA7btAAH+TgAAJDnAAIAOk1hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOkxvZ2ljVC1pY2ZwMjAwNS5wZGYADgAoABMATABvAGcAaQBjAFQALQBpAGMAZgBwADIAMAAwADUALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASAC1Vc2Vycy9tYXQvcmVzZWFyY2gvcGFwZXJzL0xvZ2ljVC1pY2ZwMjAwNS5wZGYAABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAFMAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAAB7Q==}, Bdsk-Url-1 = {http://portal.acm.org/ft_gateway.cfm?id=1086390&type=pdf&coll=Portal&dl=GUIDE&CFID=28832090&CFTOKEN=51771456}, Bdsk-Url-2 = {http://doi.acm.org/10.1145/1086365.1086390}} @misc{sozeau.Coq/Programing/PPS09, Address = {Paris, France}, Author = {Matthieu Sozeau}, Date-Added = {2009-02-25 23:28:21 +0100}, Date-Modified = {2009-02-25 23:30:25 +0100}, Howpublished = {PPS Seminar}, Keywords = {dependent types, Coq, Program}, Month = {26th February}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Programming_with_Dependent_Types_in_Coq-PPS-260209.pdf}, Read = {Oui}, Title = {{Programming with Dependent Types in Coq}}, Type = {slides}, Year = {2009}} @inproceedings{DBLP:conf/mpc/AbelCD08, Author = {Andreas Abel and Thierry Coquand and Peter Dybjer}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {MPC}, Crossref = {DBLP:conf/mpc/2008}, Date-Added = {2009-02-06 10:13:33 +0100}, Date-Modified = {2009-02-06 10:13:59 +0100}, Ee = {http://dx.doi.org/10.1007/978-3-540-70594-9_4}, Pages = {29-56}, Pdf = {http://www.tcs.informatik.uni-muenchen.de/~abel/mpc08.pdf}, Title = {Verifying a Semantic beta-eta-Conversion Test for Martin-L{\"o}f Type Theory}, Year = {2008}} @proceedings{DBLP:conf/mpc/2008, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {MPC}, Date-Added = {2009-02-06 10:13:33 +0100}, Date-Modified = {2009-02-06 10:13:33 +0100}, Editor = {Philippe Audebaud and Christine Paulin-Mohring}, Isbn = {978-3-540-70593-2}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Mathematics of Program Construction, 9th International Conference, MPC 2008, Marseille, France, July 15-18, 2008. Proceedings}, Volume = {5133}, Year = {2008}} @article{Ganzinger:2004iq, Abstract = {Indexing data structures have a crucial impact on the performance of automated theorem provers. Examples are discrimination trees, which are like tries where terms are seen as strings and common prefixes are shared, and substitution trees, where terms keep their tree structure and all common contexts can be shared. Here we describe a new indexing data structure, called context trees, where, by means of a limited kind of context variables, common subterms also can be shared, even if they occur below different function symbols. Apart from introducing the concept, we also provide evidence for its practical value. We show how context trees can be implemented by means of abstract machine instructions. Experiments with benchmarks for forward matching show that our implementation is competitive with tightly coded current state-of-the-art implementations of the other main techniques. In particular, space consumption of context trees is significantly less than for other index structures.}, Author = {Ganzinger, Harald and Nieuwenhuis, Robert and Nivela, Pilar}, Date-Added = {2009-02-05 17:25:06 +0100}, Date-Modified = {2009-02-05 17:25:06 +0100}, Day = {01}, Journal = {Journal of Automated Reasoning}, M3 = {10.1023/B:JARS.0000029963.64213.ac}, Month = {02}, Number = {2}, Pages = {103--120}, Title = {Fast Term Indexing with Coded Context Trees}, Ty = {JOUR}, Url = {http://dx.doi.org/10.1023/B:JARS.0000029963.64213.ac}, Volume = {32}, Year = {2004}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAiLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL2Z0aWN0LnBkZk8RAW4AAAAAAW4AAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yAlmdGljdC5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP9okyTjlWQAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTjXSQAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgAwTWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6ZnRpY3QucGRmAA4AFAAJAGYAdABpAGMAdAAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAI1VzZXJzL21hdC9yZXNlYXJjaC9wYXBlcnMvZnRpY3QucGRmAAATAAEvAAAVAAIACv//AAAACAANABoAJABJAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAbs=}, Bdsk-Url-1 = {http://dx.doi.org/10.1023/B:JARS.0000029963.64213.ac}} @article{Riazanov:2008cv, Abstract = {Code trees {$[$}8{$]$} is an indexing technique used for implementing several indexed operations on terms in the theorem prover Vampire {$[$}5{$]$}. Code trees offer greater flexibility than discrimination trees. In this paper we review a new, considerably faster, version of code trees based on a different representation of the query term. We also introduce a partially adaptive version of code trees. }, Author = {Riazanov, Alexandre and Voronkov, Andrei}, Date-Added = {2009-02-05 17:22:08 +0100}, Date-Modified = {2009-02-05 17:22:08 +0100}, Journal = {Logics in Artificial Intelligence}, M3 = {10.1007/3-540-40006-0{\_}15}, Pages = {209--223}, Title = {Partially Adaptive Code Trees}, Ty = {CHAPTER}, Url = {http://dx.doi.org/10.1007/3-540-40006-0_15}, Year = {2008}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAuLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3BhcmFkYXB0Y29kZXRyZWVzLnBkZk8RAZ4AAAAAAZ4AAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yBVwYXJhZGFwdGNvZGV0cmVlcy5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP90iyTjmAgAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTjX8gAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgA8TWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6cGFyYWRhcHRjb2RldHJlZXMucGRmAA4ALAAVAHAAYQByAGEAZABhAHAAdABjAG8AZABlAHQAcgBlAGUAcwAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAL1VzZXJzL21hdC9yZXNlYXJjaC9wYXBlcnMvcGFyYWRhcHRjb2RldHJlZXMucGRmAAATAAEvAAAVAAIACv//AAAACAANABoAJABVAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAfc=}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/3-540-40006-0_15}} @article{Graf:1995tg, Abstract = {Sophisticated maintenance and retrieval of first-order predicate calculus terms is a major key to efficient automated reasoning. We present a new indexing technique, which accelerates the speed of the basic retrieval operations such as finding complementary literals in resolution theorem proving or finding critical pairs during completion. Subsumption and reduction are also supported. Moreover, the new technique not only provides maintenance and efficient retrieval of terms but also of idem-potent substitutions. Substitution trees achieve maximal search speed paired with minimal memory requirements in various experiments and outperform traditional techniques such as path indexing, discrimination tree indexing, and abstraction trees by combining their advantages and adding some new features.}, Author = {Graf, Peter}, Date-Added = {2009-02-05 17:19:52 +0100}, Date-Modified = {2009-02-09 20:03:51 +0100}, Journal = {Rewriting Techniques and Applications}, M3 = {10.1007/3-540-59200-8{\_}52}, Pages = {117--131}, Rating = {3}, Read = {Oui}, Title = {Substitution tree indexing}, Ty = {CHAPTER}, Url = {http://dx.doi.org/10.1007/3-540-59200-8_52}, Year = {1995}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAnLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3N1YnRyZWVpbmQucGRmTxEBggAAAAABggACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAxquYFkgrAAAANnfIDnN1YnRyZWVpbmQucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/3ZnJOOYNAAAAAAAAAAAAAwADAAAJIAAAAAAAAAAAAAAAAAAAAAZwYXBlcnMAEAAIAADGq3v2AAAAEQAIAADJONf9AAAAAQAQADZ3yAAO27QAB/k4AACQ5wACADVNYWNpbnRvc2ggSEQ6VXNlcnM6bWF0OnJlc2VhcmNoOnBhcGVyczpzdWJ0cmVlaW5kLnBkZgAADgAeAA4AcwB1AGIAdAByAGUAZQBpAG4AZAAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAKFVzZXJzL21hdC9yZXNlYXJjaC9wYXBlcnMvc3VidHJlZWluZC5wZGYAEwABLwAAFQACAAr//wAAAAgADQAaACQATgAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAHU}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/3-540-59200-8_52}} @article{McCune:1992lg, Abstract = {This article addresses the problem of indexing and retrieving first-order predicate calculus terms in the context of automated deduction programs. The four retrieval operations of concern are to find variants, generalizations, instances, and terms that unify with a given term. Discrimination-tree indexing is reviewed, and several variations are presented. The path-indexing method is also reviewed. Experiments were conducted on large sets of terms to determine how the properties of the terms affect the performance of the two indexing methods. Results of the experiments are presented.}, Author = {McCune, William}, Date-Added = {2009-02-05 17:15:44 +0100}, Date-Modified = {2009-02-09 21:01:54 +0100}, Day = {01}, Journal = {Journal of Automated Reasoning}, M3 = {10.1007/BF00245458}, Month = {10}, Number = {2}, Pages = {147--167}, Rating = {3}, Read = {Oui}, Title = {Experiments with discrimination-tree indexing and path indexing for term retrieval}, Ty = {JOUR}, Url = {http://dx.doi.org/10.1007/BF00245458}, Volume = {9}, Year = {1992}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxArLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL21jY3VuZWluZGV4aW5nLnBkZk8RAZIAAAAAAZIAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yBJtY2N1bmVpbmRleGluZy5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP9xgyTjl3QAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTjXzQAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgA5TWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6bWNjdW5laW5kZXhpbmcucGRmAAAOACYAEgBtAGMAYwB1AG4AZQBpAG4AZABlAHgAaQBuAGcALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASACxVc2Vycy9tYXQvcmVzZWFyY2gvcGFwZXJzL21jY3VuZWluZGV4aW5nLnBkZgATAAEvAAAVAAIACv//AAAACAANABoAJABSAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAeg=}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/BF00245458}} @inproceedings{sozeau.Coq/classes/ctpc, Abstract = {Les classes de types (Type Classes) ont remport{\'e} un grand succ{\`e}s dans le langage de programmation fonctionnelle Haskell et l'assistant de preuve Isabelle, comme solution permettant de surcharger des notations et sp{\'e}cifier avec des structures abstraites en quantifiant sur les contextes. Nous pr{\'e}sentons un plongement superficiel des classes de types dans une th{\'e}orie des types d{\'e}pendants qui fait des classes des objets de premi{\`e}re classe et supporte directement les extensions les plus populaires de Haskell. L'impl{\'e}mentation du syst{\`e}me est l{\'e}g{\`e}re et s'appuie sur des constructions existantes du langage qui sont simplement raffin{\'e}es pour obtenir un ensemble bien int{\'e}gr{\'e} {\`a} l'environnement Coq. On pr{\'e}sente sur des exemples comment ce syst{\`e}me peut {\^e}tre utilis{\'e} pour la programmation et la preuve. }, Address = {Toulouse, France}, Author = {Matthieu Sozeau and Nicolas Oury}, Booktitle = {Journ{\'e}es Nationales du GDR GPL}, Date-Added = {2009-01-30 17:08:36 +0100}, Date-Modified = {2009-07-15 22:19:46 +0200}, Keywords = {Type classes}, Month = {January}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Classes_de_types_de_premiere_classe.pdf}, Read = {Oui}, Slides = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/First-Class_Type_Classes-GDR_GPL-300109.pdf}, Title = {{Classes de types de premi{\`e}re classe}}, Year = {2009}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/978-3-540-71067-7_23}} @misc{sozeau.Coq/classes/GDRGPL09, Abstract = {Les classes de types ont d{\'e}montr{\'e} leur utilit{\'e} pour la programmation polymorphique de haut-niveau en Haskell et la construction de hi{\'e}rarchies de structures math{\'e}matiques dans l'assistant de preuve Isabelle. Ce m{\'e}canisme de surcharge tr{\`e}s versatile peut se g{\'e}n{\'e}raliser aux types d{\'e}pendants et offre alors de nouvelles formes de g{\'e}n{\'e}ricit{\'e}. Il permet aussi d'int{\'e}grer une forme de programmation logique au moment du typage. On pr{\'e}sentera une impl{\'e}mentation l{\'e}g{\`e}re d'un syst{\`e}me de classes de types de premi{\`e}re classe dans Coq, bas{\'e}e sur un enrichissement de constructions existantes du langage Gallina. On illustrera ce nouvel outil par des exemples mettant en oeuvre les classes pour programmer, prouver et organiser les d{\'e}veloppements en Coq. }, Address = {ENSEEIHT - Toulouse}, Author = {Matthieu Sozeau}, Date-Added = {2009-01-30 17:02:43 +0100}, Date-Modified = {2009-01-30 17:05:51 +0100}, Howpublished = {Talk given at the {GDR GPL}}, Keywords = {Type classes}, Month = {30th January}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/First-Class_Type_Classes-GDR_GPL-300109.pdf}, Read = {Oui}, Title = {{F}irst-{C}lass {T}ype {C}lasses}, Type = {slides}, Year = {2009}} @misc{sozeau.Coq/Programing/Orleans09, Address = {Orl{\'e}ans, France}, Author = {Matthieu Sozeau}, Date-Added = {2009-01-26 18:19:58 +0100}, Date-Modified = {2009-01-26 18:21:45 +0100}, Howpublished = {LIFO Seminar}, Month = {26th January}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/An_Environment_for_Proving_and_Programming-LIFO-260109.pdf}, Read = {Oui}, Title = {{An Environment for Proving and Programming}}, Type = {slides}, Year = {2009}} @article{pottier-njc-00, Author = {Fran{\c c}ois Pottier}, Date-Added = {2009-01-26 11:45:26 +0100}, Date-Modified = {2009-01-26 11:45:43 +0100}, Journal = njc, Month = nov, Number = {4}, Pages = {312--347}, Title = {A Versatile Constraint-Based Type Inference System}, Url = {http://cristal.inria.fr/~fpottier/publis/fpottier-njc-2000.ps.gz}, Volume = {7}, Year = {2000}, Bdsk-Url-1 = {http://cristal.inria.fr/~fpottier/publis/fpottier-njc-2000.ps.gz}} @misc{sozeau.Coq/Programing/Nijmegen09, Address = {Nijmegen, Netherlands}, Author = {Matthieu Sozeau}, Date-Added = {2009-01-21 11:59:32 +0100}, Date-Modified = {2009-01-26 18:20:32 +0100}, Howpublished = {ICIS Seminar}, Month = {20th January}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/An_Environment_for_Programming_with_Dependent_Types-Nijmegen-200109.pdf}, Read = {Oui}, Title = {{An Environment for Programming with Dependent Types}}, Type = {slides}, Year = {2009}} @article{sozeau.Coq/rewrite/newlook, Abstract = {Rewriting is an essential tool for computer-based reasoning, both automated and assisted. It is so because rewriting is a general notion that permits to model a wide range of problems and provides means to effectively solve them. In a proof assistant, rewriting can be used to replace terms in arbitrary contexts, generalizing the usual equational reasoning to reasoning modulo arbitrary relations. This can be done provided the necessary proofs that functions appearing in goals are congruent with respect to specific relations. We present a new implementation of generalized rewriting in the Coq proof assistant, making essential use of the expressive power of dependent types and a recently implemented type class mechanism. The tactic improves on and generalizes previous versions by supporting natively higher-order functions, polymorphism and subrelations. The type class system inspired from Haskell provides a perfect interface between the user and such tactics, making them easily extensible.}, Author = {Matthieu Sozeau}, Date-Added = {2009-01-19 11:51:48 +0100}, Date-Modified = {2009-08-22 11:13:06 +0200}, Journal = {1st Coq Workshop proceedings}, Keywords = {rewriting setoid typeclasses}, Month = {August}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/A_New_Look_at_Generalized_Rewriting_in_Type_Theory.pdf}, Read = {Oui}, Slides = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/A_New_Look_at_Generalized_Rewriting_in_Type_Theory-Coq09-210809.pdf}, Title = {{A New Look at Generalized Rewriting in Type Theory}}, Year = {2009}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @article{werner:ProofIrrelevanceJAR, Abstract = {We present a type theory with some proof-irrelevance built into the conversion rule. We argue that this feature is particularly useful when type theory is used as the logical formalism underlying a theorem prover. We also show a close relation with the subset types of the theory of PVS. Finally we show that in these theories, because of the additional extentionality, the axiom of choice implies the decidability of equality, that is, almost classical logic. }, Author = {Werner, Benjamin}, Date-Added = {2009-01-13 23:35:01 +0100}, Date-Modified = {2009-01-13 23:36:30 +0100}, Journal = {Journal of Automated Reasoning}, M3 = {10.1007/11814771{\_}49}, Pages = {604--618}, Title = {{On the Strength of Proof-Irrelevant Type Theories}}, Ty = {CHAPTER}, Url = {http://dx.doi.org/10.1007/11814771_49}, Year = {2006}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/11814771_49}} @misc{sozeau.Coq/Programing/nottingham08, Address = {Nottingham, UK}, Author = me, Date-Added = {2009-01-12 22:40:32 +0100}, Date-Modified = {2009-01-12 22:43:01 +0100}, Howpublished = {Talk given at the {F}oundations of {P}rogramming seminar}, Month = {15th February}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Program-ing_in_Coq-nottingham-150208.pdf}, Read = {Oui}, Title = {{P}rogram-ing in {C}oq}, Type = {slides}, Year = {2008}} @article{hoare:challenge, Address = {New York, NY, USA}, Author = {Tony Hoare}, Date-Added = {2009-01-06 12:51:14 +0100}, Date-Modified = {2009-01-06 12:51:23 +0100}, Doi = {http://doi.acm.org/10.1145/602382.602403}, Issn = {0004-5411}, Journal = {J. ACM}, Number = {1}, Pages = {63--69}, Publisher = {ACM}, Title = {The verifying compiler: A grand challenge for computing research}, Volume = {50}, Year = {2003}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/602382.602403}} @inproceedings{nanevsky08icfp, Address = {Victoria, BC, Canada}, Author = {Aleksandar Nanevski and Greg Morrisett and Avraham Shinnar and Paul Govereau and Lars Birkedal}, Booktitle = {ICFP '08: Proceeding of the 13th ACM SIGPLAN International Conference on Functional Programming}, Date-Added = {2008-12-31 12:33:11 +0100}, Date-Modified = {2008-12-31 12:35:11 +0100}, Doi = {http://doi.acm.org/10.1145/1411204.1411237}, Isbn = {978-1-59593-919-7}, Pages = {229--240}, Publisher = {ACM}, Read = {Oui}, Title = {Ynot: dependent types for imperative programs}, Year = {2008}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/1411204.1411237}} @techreport{Salvesen91cr, Author = {Anne Salvesen}, Date-Added = {2008-12-26 00:02:42 +0100}, Date-Modified = {2008-12-26 00:05:36 +0100}, Note = {Submitted for journal publication}, Read = {Oui}, Title = {{The Church-Rosser Property for Pure Type Systems with βη-reduction}}, Url = {http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.39.860}, Year = {1991}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxArLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzLzEwLjEuMS4zOS44NjAwLnBkZk8RAZIAAAAAAZIAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yBIxMC4xLjEuMzkuODYwMC5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP9SWyTjkZQAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTjWVQAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgA5TWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6MTAuMS4xLjM5Ljg2MDAucGRmAAAOACYAEgAxADAALgAxAC4AMQAuADMAOQAuADgANgAwADAALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASACxVc2Vycy9tYXQvcmVzZWFyY2gvcGFwZXJzLzEwLjEuMS4zOS44NjAwLnBkZgATAAEvAAAVAAIACv//AAAACAANABoAJABSAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAeg=}, Bdsk-Url-1 = {http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.39.860}} @phdthesis{goguen.PhD, Author = {Healfdene Goguen}, Date-Added = {2008-12-25 23:31:59 +0100}, Date-Modified = {2008-12-26 00:00:58 +0100}, Read = {Oui}, School = {University of Edinburgh}, Title = {{A Typed Operational Semantics for Type Theory}}, Url = {http://www.lfcs.inf.ed.ac.uk/reports/94/ECS-LFCS-94-304/index.html}, Year = {1994}, Bdsk-Url-1 = {http://www.lfcs.inf.ed.ac.uk/reports/94/ECS-LFCS-94-304/index.html}} @inproceedings{DBLP:conf/tlca/Goguen95, Author = {Healfdene Goguen}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TLCA}, Crossref = {DBLP:conf/tlca/1995}, Date-Added = {2008-12-25 23:28:28 +0100}, Date-Modified = {2008-12-25 23:28:28 +0100}, Pages = {186-200}, Title = {Typed Operational Semantics}, Year = {1995}} @proceedings{DBLP:conf/tlca/1995, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TLCA}, Date-Added = {2008-12-25 23:28:28 +0100}, Date-Modified = {2008-12-25 23:28:28 +0100}, Editor = {Mariangiola Dezani-Ciancaglini and Gordon D. Plotkin}, Isbn = {3-540-59048-X}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Typed Lambda Calculi and Applications, Second International Conference on Typed Lambda Calculi and Applications, TLCA '95, Edinburgh, UK, April 10-12, 1995, Proceedings}, Volume = {902}, Year = {1995}} @phdthesis{alti:phd93, Author = {Thorsten Altenkirch}, Date-Added = {2008-12-25 22:15:01 +0100}, Date-Modified = {2008-12-25 22:15:01 +0100}, Month = {November}, School = {University of Edinburgh}, Title = {Constructions, Inductive Types and Strong Normalization}, Year = {1993}} @inproceedings{DBLP:conf/csl/BartheM96, Author = {Gilles Barthe and Paul-Andr{\'e} Melli{\`e}s}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {CSL}, Crossref = {DBLP:conf/csl/1996}, Date-Added = {2008-12-25 22:06:08 +0100}, Date-Modified = {2008-12-25 22:06:33 +0100}, Pages = {34-57}, Read = {Oui}, Title = {On the Subject Reduction Property for Algebraic Type Systems}, Url = {http://www.pps.jussieu.fr/~mellies/papers/csl96.ps.gz}, Year = {1996}, Bdsk-Url-1 = {http://www.pps.jussieu.fr/~mellies/papers/csl96.ps.gz}} @inproceedings{DBLP:conf/types/MelliesW96, Author = {Paul-Andr{\'e} Melli{\`e}s and Benjamin Werner}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Crossref = {DBLP:conf/types/1996}, Date-Added = {2008-12-25 22:04:51 +0100}, Date-Modified = {2008-12-25 22:05:35 +0100}, Pages = {254-276}, Read = {Oui}, Title = {A Generic Normalisation Proof for Pure Type Systems}, Url = {http://www.pps.jussieu.fr/~mellies/papers/pts.ps}, Year = {1996}, Bdsk-Url-1 = {http://www.pps.jussieu.fr/~mellies/papers/pts.ps}} @inproceedings{weegen08types, Author = {Eelis van der Weegen and James McKinna}, Booktitle = {TYPES'08}, Date-Added = {2008-12-16 20:37:13 +0100}, Date-Modified = {2008-12-16 20:39:55 +0100}, Read = {Oui}, Title = {{A Machine-checked Proof of the Average-case Complexity of Quicksort in Coq}}, Url = {http://www.xs4all.nl/~weegen/eelis/research/quicksort/}, Year = {2008}, Bdsk-Url-1 = {http://www.xs4all.nl/~weegen/eelis/research/quicksort/}} @inproceedings{oury-swierstra:power-of-pi, Address = {Victoria, BC, Canada}, Author = {Nicolas Oury and Wouter Swierstra}, Booktitle = {ICFP '08: Proceeding of the 13th ACM SIGPLAN International Conference on Functional Programming}, Date-Added = {2008-12-14 18:31:10 +0100}, Date-Modified = {2008-12-14 18:32:40 +0100}, Doi = {http://doi.acm.org/10.1145/1411204.1411213}, Isbn = {978-1-59593-919-7}, Pages = {39--50}, Pdf = {http://www.cs.nott.ac.uk/~wss/Publications/ThePowerOfPi.pdf}, Publisher = {ACM}, Title = {The {Power} of {Pi}}, Url = {http://portal.acm.org/ft_gateway.cfm?id=1411213&type=pdf&coll=Portal&dl=GUIDE&CFID=15142937&CFTOKEN=76020526}, Year = {2008}, Bdsk-Url-1 = {http://portal.acm.org/ft_gateway.cfm?id=1411213&type=pdf&coll=Portal&dl=GUIDE&CFID=15142937&CFTOKEN=76020526}, Bdsk-Url-2 = {http://doi.acm.org/10.1145/1411204.1411213}} @misc{sozeau.thesis-slides, Address = {Orsay, France}, Author = {Matthieu Sozeau}, Date-Added = {2008-12-09 22:27:26 +0100}, Date-Modified = {2008-12-26 15:36:30 +0100}, Howpublished = {Thesis Defense}, Month = {8th December}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/An_Environment_for_Programming_with_Dependent_Types-LRI-081208.pdf}, Read = {Oui}, School = {Paris 11 University}, Title = {{An Environment for Programming with Dependent Types}}, Type = {slides}, Year = {2008}} @inproceedings{bentonTLDI09, Author = {Nick Benton and Nicolas Tabareau}, Booktitle = {TLDI}, Date-Added = {2008-12-05 20:10:32 +0100}, Date-Modified = {2009-01-07 22:32:52 +0100}, Title = {{Compiling Functional Types to Relational Specifications for Low Level Imperative Code}}, Url = {http://research.microsoft.com/~nick/tldi02d-benton.pdf}, Year = {2009}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxArLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3RsZGkwMmQtYmVudG9uLnBkZk8RAZIAAAAAAZIAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yBJ0bGRpMDJkLWJlbnRvbi5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP+l2yTjo6AAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTja2AAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgA5TWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6dGxkaTAyZC1iZW50b24ucGRmAAAOACYAEgB0AGwAZABpADAAMgBkAC0AYgBlAG4AdABvAG4ALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASACxVc2Vycy9tYXQvcmVzZWFyY2gvcGFwZXJzL3RsZGkwMmQtYmVudG9uLnBkZgATAAEvAAAVAAIACv//AAAACAANABoAJABSAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAeg=}, Bdsk-Url-1 = {http://research.microsoft.com/~nick/tldi02d-benton.pdf}} @inproceedings{sozeau.Coq/Equations/ITP10, Author = {Matthieu Sozeau}, Booktitle = {First International Conference on Interactive Theorem Proving}, Date-Added = {2008-11-06 12:39:36 +0100}, Date-Modified = {2010-07-19 11:40:30 -0400}, Month = {July}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations:_A_Dependent_Pattern-Matching_Compiler.pdf}, Publisher = {Springer}, Read = {Oui}, Slides = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Equations:_A_Dependent_Pattern-Matching_Compiler-ITP10-140710.pdf}, Title = {Equations: A Dependent Pattern-Matching Compiler}, Year = {2010}, Bdsk-Url-1 = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/Equations.pdf}} @phdthesis{sozeau.thesis, Address = {Orsay, France}, Author = {Matthieu Sozeau}, Date-Added = {2008-11-06 11:12:10 +0100}, Date-Modified = {2008-12-26 15:38:14 +0100}, Month = {December}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/thesis-sozeau.pdf}, Read = {Oui}, School = {Universit{\'e} Paris 11}, Slides = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/An_Environment_for_Programming_with_Dependent_Types-LRI-081208.pdf}, Title = {Un environnement pour la programmation avec types d{\'e}pendants}, Year = {2008}} @misc{sozeau.Coq/classes/Gallium08, Abstract = {Les classes de types ont d{\'e}montr{\'e} leur utilit{\'e} pour la programmation polymorphique de haut-niveau en Haskell et la construction de hi{\'e}rarchies de structures math{\'e}matiques dans l'assistant de preuve Isabelle. Ce m{\'e}canisme de surcharge tr{\`e}s versatile peut se g{\'e}n{\'e}raliser aux types d{\'e}pendants et offre alors de nouvelles formes de g{\'e}n{\'e}ricit{\'e}. Il permet aussi d'int{\'e}grer une forme de programmation logique au moment du typage. On pr{\'e}sentera une impl{\'e}mentation l{\'e}g{\`e}re d'un syst{\`e}me de classes de types de premi{\`e}re classe dans Coq, bas{\'e}e sur un enrichissement de constructions existantes du langage Gallina. On illustrera ce nouvel outil par des exemples mettant en oeuvre les classes pour programmer, prouver et organiser les d{\'e}veloppements en Coq. }, Address = {Rocquencourt}, Author = {Matthieu Sozeau}, Date-Added = {2008-11-03 15:40:49 +0100}, Date-Modified = {2008-11-06 14:11:29 +0100}, Howpublished = {Talk given at the {G}allium seminar}, Keywords = {Type classes}, Month = {3rd November}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/First-Class_Type_Classes-Gallium-031108.pdf}, Read = {Oui}, Title = {{F}irst-{C}lass {T}ype {C}lasses}, Type = {slides}, Year = {2008}} @inproceedings{CS07, Author = {Thierry Coquand and Arnaud Spiwack}, Booktitle = {Proceedings of 14th Symposium, Calculemus 2007, 6th International Conference, MKM 2007}, Date-Added = {2008-10-20 10:38:32 +0200}, Date-Modified = {2008-10-20 10:38:58 +0200}, Publisher = {Springer}, Read = {Oui}, Title = {Towards Constructive Homological Algebra in Type Theory}, Url = {http://arnaud.spiwack.free.fr/papers/tchaitt.pdf}, Year = {2007}, Bdsk-Url-1 = {http://arnaud.spiwack.free.fr/papers/tchaitt.pdf}} @phdthesis{Brady:2005nu, Author = {Edwin C. Brady}, Date-Added = {2008-10-17 01:05:51 +0200}, Date-Modified = {2008-10-17 01:14:52 +0200}, School = {University of Durham}, Title = {Practical Implementation of a Dependently Typed Functional Programming Language}, Url = {http://www.cs.st-andrews.ac.uk/~eb/writings/thesis.ps.gz}, Year = {2005}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxApLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3RoZXNpcy1icmFkeS5wZGZPEQGKAAAAAAGKAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gQdGhlc2lzLWJyYWR5LnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/kY8k459kAAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk42ckAAAABABAANnfIAA7btAAH+TgAAJDnAAIAN01hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOnRoZXNpcy1icmFkeS5wZGYAAA4AIgAQAHQAaABlAHMAaQBzAC0AYgByAGEAZAB5AC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAqVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy90aGVzaXMtYnJhZHkucGRmABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAFAAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAAB3g==}, Bdsk-Url-1 = {http://www.cs.st-andrews.ac.uk/~eb/writings/thesis.ps.gz}} @phdthesis{werner94these, Author = {Benjamin Werner}, Date-Added = {2008-10-17 00:54:06 +0200}, Date-Modified = {2008-10-17 01:03:57 +0200}, Read = {Oui}, School = {Universit{\'e} Paris 7}, Title = {Une Th{\'e}orie des Constructions Inductives}, Topics = {logic}, Type = thesedoctorat, Url = {http://cat.inist.fr/?aModele=afficheN&cpsidt=1815936}, Year = 1994, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAqLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3RoZXNpcy13ZXJuZXIucGRmTxEBjgAAAAABjgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAxquYFkgrAAAANnfIEXRoZXNpcy13ZXJuZXIucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/6SLJOOjkAAAAAAAAAAAAAwADAAAJIAAAAAAAAAAAAAAAAAAAAAZwYXBlcnMAEAAIAADGq3v2AAAAEQAIAADJONrUAAAAAQAQADZ3yAAO27QAB/k4AACQ5wACADhNYWNpbnRvc2ggSEQ6VXNlcnM6bWF0OnJlc2VhcmNoOnBhcGVyczp0aGVzaXMtd2VybmVyLnBkZgAOACQAEQB0AGgAZQBzAGkAcwAtAHcAZQByAG4AZQByAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgArVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy90aGVzaXMtd2VybmVyLnBkZgAAEwABLwAAFQACAAr//wAAAAgADQAaACQAUQAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAHj}, Bdsk-Url-1 = {http://cat.inist.fr/?aModele=afficheN&cpsidt=1815936}} @phdthesis{ZumkellerPhD, Author = {Roland Zumkeller}, Date-Added = {2008-10-17 00:39:50 +0200}, Date-Modified = {2008-10-17 00:42:43 +0200}, Month = {October}, Read = {Oui}, School = {{\'E}cole Polytechnique}, Title = {Global Optimization in Type Theory}, Url = {http://roland.zumkeller.googlepages.com/thesis-zumkeller.pdf}, Year = {2008}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAtLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3RoZXNpcy16dW1rZWxsZXIucGRmTxEBmgAAAAABmgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAxquYFkgrAAAANnfIFHRoZXNpcy16dW1rZWxsZXIucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/6S3JOOjlAAAAAAAAAAAAAwADAAAJIAAAAAAAAAAAAAAAAAAAAAZwYXBlcnMAEAAIAADGq3v2AAAAEQAIAADJONrVAAAAAQAQADZ3yAAO27QAB/k4AACQ5wACADtNYWNpbnRvc2ggSEQ6VXNlcnM6bWF0OnJlc2VhcmNoOnBhcGVyczp0aGVzaXMtenVta2VsbGVyLnBkZgAADgAqABQAdABoAGUAcwBpAHMALQB6AHUAbQBrAGUAbABsAGUAcgAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIALlVzZXJzL21hdC9yZXNlYXJjaC9wYXBlcnMvdGhlc2lzLXp1bWtlbGxlci5wZGYAEwABLwAAFQACAAr//wAAAAgADQAaACQAVAAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAHy}, Bdsk-Url-1 = {http://roland.zumkeller.googlepages.com/thesis-zumkeller.pdf}} @inproceedings{Aydemir08, Author = {Brian Aydemir and Arthur Chargu\'{e}raud and Benjamin C. Pierce and Randy Pollack and Stephanie Weirich}, Bcp = yes, Booktitle = popl08, Date-Added = {2008-10-14 19:37:09 +0200}, Date-Modified = {2008-10-14 19:37:43 +0200}, Keys = {poplmark}, Month = jan, Pages = {3--15}, Plclub = yes, Publisher = {ACM}, Read = {Oui}, Short = {http://www.cis.upenn.edu/~bcpierce/papers/binders.pdf}, Title = {Engineering formal metatheory}, Url = {http://www.cis.upenn.edu/~bcpierce/papers/binders.pdf}, Year = {2008}, Bdsk-Url-1 = {http://www.cis.upenn.edu/~bcpierce/papers/binders.pdf}} @mastersthesis{puech:master, Author = {Matthias Puech}, Date-Added = {2008-10-14 16:01:25 +0200}, Date-Modified = {2008-10-14 16:03:11 +0200}, Month = {Septembre}, Read = {Oui}, School = {Universit{\'e} Paris 7}, Title = {Reconnaissance automatique de structures math{\'e}matiques dans l'assistant de preuve Coq}, Url = {http://mqtthiqs.hd.free.fr/~mqtt/rapport/rapport.pdf}, Year = {2008}, Bdsk-Url-1 = {http://mqtthiqs.hd.free.fr/~mqtt/rapport/rapport.pdf}} @inproceedings{LTAC, Author = {Delahaye, David}, Booktitle = {Proceedings of Logic for Programming and Automated Reasoning (LPAR), Reunion Island (France)}, Date-Added = {2008-10-13 20:11:53 +0200}, Date-Modified = {2008-10-13 23:04:48 +0200}, Month = {November}, Pages = {85--95}, Publisher = {Springer-Verlag}, Series = {LNCS/LNAI}, Title = {A {T}actic {L}anguage for the {S}ystem {{\textsf Coq}}}, Url = {http://cedric.cnam.fr/~delahaye/publications/LPAR2000-ltac.ps.gz}, Volume = 1955, Year = 2000, Bdsk-Url-1 = {http://cedric.cnam.fr/~delahaye/publications/LPAR2000-ltac.ps.gz}} @article{barthe:computational, Address = {New York, NY, USA}, Author = {Gilles Barthe}, Date-Added = {2008-10-13 13:41:15 +0200}, Date-Modified = {2008-10-13 13:46:05 +0200}, Doi = {http://dx.doi.org/10.1017/S0960129505004901}, Issn = {0960-1295}, Journal = {Mathematical. Structures in Comp. Sci.}, Number = {5}, Pages = {839--874}, Publisher = {Cambridge University Press}, Title = {A computational view of implicit coercions in type theory}, Volume = {15}, Year = {2005}, Bdsk-Url-1 = {http://dx.doi.org/10.1017/S0960129505004901}} @unpublished{lh07extensible, Author = {Daniel R. Licata and Robert Harper}, Date-Added = {2008-10-13 13:32:18 +0200}, Date-Modified = {2008-10-14 16:04:35 +0200}, Note = {Submitted to POPL'08}, Read = {Oui}, Title = {An Extensible Theory of Indexed Types}, Url = {http://www.cs.cmu.edu/~drl/pubs/lh07extensible/lh07extensible.pdf}, Year = {2008}, Bdsk-Url-1 = {http://www.cs.cmu.edu/~drl/pubs/lh07extensible/lh07extensible.pdf}} @article{Coquand:2008zx, Abstract = {Around thirty years ago, P. Martin-L{\"o}f {$[$}12{$]$} suggested that the intuitionistic theory of types, originally designed as a formal system for constructive mathematics, could be viewed as a programming language. The conclusion of this paper stresses the mutual benefit of relating constructive mathematics and computer programming. In one direction one gets a precise system of notations for both statements and proofs, and one obtains the computerization of abstract intuitionistic mathematics that was asked by Bishop [2]. In the other direction, computer programming ``gets access to the whole conceptual apparatus of pure mathematics''. }, Author = {Thierry Coquand}, Date-Added = {2008-10-13 13:27:38 +0200}, Date-Modified = {2008-10-13 14:37:30 +0200}, Journal = {Programming Languages and Systems}, M3 = {10.1007/978-3-540-78739-6{\_}12}, Pages = {146--147}, Title = {Constructive Mathematics and Functional Programming (Abstract)}, Ty = {CHAPTER}, Url = {http://dx.doi.org/10.1007/978-3-540-78739-6_12}, Year = {2008}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/978-3-540-78739-6_12}} @misc{proute:liens, Author = {Alain Prout{\'e}}, Date-Added = {2008-10-13 13:20:40 +0200}, Date-Modified = {2008-10-13 13:22:09 +0200}, Howpublished = {Conf{\'e}rence faite {\`a} l'Universit{\'e} de la M{\'e}diterrann{\'e}e}, Month = {mai}, Read = {Oui}, Title = {Sur quelques liens entre th{\'e}orie des topos et th{\'e}orie de la d{\'e}monstration}, Url = {http://iml.univ-mrs.fr/ldp/Seminaire/documents0607/proute.pdf}, Year = {2007}, Bdsk-Url-1 = {http://iml.univ-mrs.fr/ldp/Seminaire/documents0607/proute.pdf}} @misc{wilson:dtp08, Author = {Sean Wilson}, Date-Added = {2008-10-13 13:16:26 +0200}, Date-Modified = {2008-10-13 13:18:03 +0200}, Howpublished = {Talk given at DTP'08}, Month = {February}, Read = {Oui}, Title = {Supporting the Development of Dependently Typed Functional Programs}, Url = {http://sneezy.cs.nott.ac.uk/darcs/DTP08/slides/Sean.pdf}, Year = {2008}, Bdsk-Url-1 = {http://sneezy.cs.nott.ac.uk/darcs/DTP08/slides/Sean.pdf}} @inproceedings{razetfem, Author = {Beno{\^\i}t Razet}, Booktitle = {Mathematically Structured Functional Programming}, Date-Added = {2008-10-13 13:10:07 +0200}, Date-Modified = {2009-02-05 12:48:36 +0100}, Note = {To appear}, Pdf = {http://pauillac.inria.fr/~razet/PDF/razet_msfp08.pdf}, Read = {Oui}, Series = {Electronic Notes in Theoretical Computer Science}, Title = {{Simulating Finite Eilenberg Machines with a Reactive Engine}}, Year = {2008}} @inproceedings{conf/fossacs/BartheP01, Abstract = { We propose a theoretical foundation for proof reuse, based on the novel idea of a computational interpretation of type isomorphisms. }, Author = {Gilles Barthe and Olivier Pons}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {FoSSaCS}, Crossref = {fossacs2001}, Date-Added = {2008-10-13 12:38:42 +0200}, Date-Modified = {2008-10-13 12:54:55 +0200}, Ee = {http://link.springer.de/link/service/series/0558/bibs/2030/20300057.htm}, Pages = {57-71}, Ps = {http://www3.iie.cnam.fr/~pons/PAPERS/fossacs01.ps.gz}, Title = {Type Isomorphisms and Proof Reuse in Dependent Type Theory.}, Topics = {team}, Type_Publi = {icolcomlec}, Url = {http://www3.iie.cnam.fr/~pons/PAPERS/fossacs01.pdf}, Year = {2001}, Bdsk-Url-1 = {http://www3.iie.cnam.fr/~pons/PAPERS/fossacs01.pdf}} @inproceedings{REYNOLDS80, Abstract = {A generalization of many-sorted algebras, called category-sorted algebras, is defined and applied to the language-design problem of avoiding anomalies in the interaction of implicit conversions and generic operators. The definition of a simple imperative language (without any binding mechanisms) is used as an example.}, Address = {Berlin}, Author = {Reynolds, John C.}, Booksubtitle = {Proceedings of a Workshop}, Booktitle = {Semantics-Directed Compiler Generation}, Checked = {22 August 1990}, Date-Added = {2008-10-13 11:28:20 +0200}, Date-Modified = {2008-10-13 11:28:20 +0200}, Dates = {January 14--18}, Editor = {Neil D. Jones}, Filename = {cattheodesign}, Pages = {211--258}, Place = {Aarhus, Denmark}, Publisher = {Springer-Verlag}, Reprint = {Reprinted in \authorcite{Gunter and Mitchell} \crosscite[pages~25--64]{GUNTER94}}, Series = {Lecture Notes in Computer Science}, Title = {Using Category Theory to Design Implicit Conversions and Generic Operators}, Updated = {9 May 2001}, Volume = {94}, Year = {1980}} @article{breazu91a, Address = {biblioth{\`e}que D{\'e}mons}, Author = {Val Breazu-Tannen and Thierry Coquand and Carl A. Gunter}, Date-Added = {2008-10-13 11:21:36 +0200}, Date-Modified = {2008-10-13 11:21:36 +0200}, Journal = {INofrmation and computation}, Number = 1, Pages = {172-221}, Title = {Inheritance as implicit coercion}, Volume = 93, Year = 1991} @inproceedings{Gregoire-Leroy-02, Abstract = {Motivated by applications to proof assistants based on dependent types, we develop and prove correct a strong reducer and $\beta$-equivalence checker for the $\lambda$-calculus with products, sums, and guarded fixpoints. Our approach is based on compilation to the bytecode of an abstract machine performing weak reductions on non-closed terms, derived with minimal modifications from the ZAM machine used in the Objective Caml bytecode interpreter, and complemented by a recursive ``read back'' procedure. An implementation in the Coq proof assistant demonstrates important speed-ups compared with the original interpreter-based implementation of strong reduction in Coq.}, Author = {Benjamin Gr{\'e}goire and Xavier Leroy}, Crossref = {icfp02}, Date-Added = {2008-10-12 16:18:19 +0200}, Date-Modified = {2008-10-13 14:41:19 +0200}, Pages = {235--246}, Title = {A compiled implementation of strong reduction}, Url = {http://pauillac.inria.fr/~xleroy/publi/strong-reduction.pdf}, Bdsk-Url-1 = {http://pauillac.inria.fr/~xleroy/publi/strong-reduction.pdf}} @article{debruijn72, Author = {Nikolas. G. de Bruijn}, Date-Added = {2008-10-12 10:26:13 +0200}, Date-Modified = {2008-10-12 10:26:13 +0200}, Journal = {Proc. of the Koninklijke Nederlands Akademie}, Number = 5, Pages = {380--392}, Title = {Lambda Calculus with nameless dummies, a tool for automatic formula manipulation, with application to the {Church-Rosser} theorem}, Volume = 75, Year = 1972} @misc{PiSigma, Author = {Nicolas Oury and Thorsten Altenkirch}, Date-Added = {2008-10-11 23:25:42 +0200}, Date-Modified = {2008-10-13 14:44:33 +0200}, Howpublished = {Prototype available on the web}, Title = {PiSigma}, Url = {http://sneezy.cs.nott.ac.uk/cgi-bin/PiSigma}, Year = {2008}, Bdsk-Url-1 = {http://sneezy.cs.nott.ac.uk/cgi-bin/PiSigma}} @article{kaplan-tarjan-99, Author = {Haim Kaplan and Robert E. Tarjan}, Date-Added = {2008-10-11 20:42:51 +0200}, Date-Modified = {2008-10-11 20:42:51 +0200}, Journal = jacm, Number = {5}, Pages = {577--603}, Title = {Purely functional, real-time deques with catenation}, Url = {http://www.math.tau.ac.il/~haimk/bob.ps}, Volume = {46}, Year = {1999}, Bdsk-Url-1 = {http://www.math.tau.ac.il/~haimk/bob.ps}} @phdthesis{ChlipalaPhD, Author = {Adam Chlipala}, Date-Added = {2008-10-11 15:40:51 +0200}, Date-Modified = {2008-10-11 15:46:21 +0200}, Number = {UCB/EECS-2007-113}, School = {University of California at Berkeley}, Title = {Implementing Certified Programming Language Tools in Dependent Type Theory}, Type = {{Technical Report}}, Url = {http://adam.chlipala.net/papers/ChlipalaPhD/}, Year = {2007}, Bdsk-Url-1 = {http://adam.chlipala.net/papers/ChlipalaPhD/}} @incollection{barendregt92, Author = {Henk Barendregt}, Booktitle = {Handbook of Logic in Computer Science}, Date-Added = {2008-10-11 13:33:30 +0200}, Date-Modified = {2008-10-11 23:44:54 +0200}, Doi = {10.1.1.26.4391}, Editor = {{Abramsky et al.}}, Publisher = {Oxford Univ. Press}, Title = {Typed lambda calculi}, Topics = {lambda-calculus}, Url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.26.4391&rep=rep1&type=pdf}, Year = 1993, Bdsk-Url-1 = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.26.4391&rep=rep1&type=pdf}, Bdsk-Url-2 = {http://dx.doi.org/10.1.1.26.4391}} @misc{mpriextract, Author = {Christine Paulin-Mohring}, Date-Added = {2008-10-11 11:30:04 +0200}, Date-Modified = {2008-10-11 14:27:19 +0200}, Howpublished = {Notes de cours du MPRI}, Title = {Extraction de programmes et r{\'e}alisabilit{\'e}}, Url = {http://logical.inria.fr/mpri/notes/cours006.html}, Year = {2008}, Bdsk-Url-1 = {http://logical.inria.fr/mpri/notes/cours006.html}} @book{girard:pointaveugle, Author = {Jean-Yves Girard}, Date-Added = {2008-10-10 17:39:54 +0200}, Date-Modified = {2008-10-13 14:41:00 +0200}, Month = {Juillet}, Publisher = {Hermann}, Series = {Visions des sciences}, Title = {Le point aveugle: tome 1. Cours de Logique, Vers la perfection.}, Url = {http://iml.univ-mrs.fr/~girard/}, Volume = {1}, Year = {2006}, Bdsk-Url-1 = {http://iml.univ-mrs.fr/~girard/}} @book{fromfregetogodel, Abstract = {{

The fundamental texts of the great classical period in modern logic, some of them never before available in English translation, are here gathered together for the first time. Modern logic, heralded by Leibniz, may be said to have been initiated by Boole, De Morgan, and Jevons, but it was the publication in 1879 of Gottlob Frege's Begriffsschrift that opened a great epoch in the history of logic by presenting, in full-fledged form, the propositional calculus and quantification theory.

Frege's book, translated in its entirety, begins the present volume. The emergence of two new fields, set theory and foundations of mathematics, on the borders of logic, mathematics, and philosophy, is depicted by the texts that follow. Peano and Dedekind illustrate the trend that led to Principia Mathematica. Burali-Forti, Cantor, Russell, Richard, and K\"{o}nig mark the appearance of the modern paradoxes. Hilbert, Russell, and Zermelo show various ways of overcoming these paradoxes and initiate, respectively, proof theory, the theory of types, and axiomatic set theory. Skolem generalizes L\"{o}wenheim's theorem, and he and Fraenkel amend Zermelo's axiomatization of set theory, while von Neumann offers a somewhat different system. The controversy between Hubert and Brouwer during the twenties is presented in papers of theirs and in others by Weyl, Bernays, Ackermann, and Kolmogorov. The volume concludes with papers by Herbrand and by G\"{o}del, including the latter's famous incompleteness paper.

Of the forty-five contributions here collected all but five are presented in extenso. Those not originally written in English have been translated with exemplary care and exactness; the translators are themselves mathematical logicians as well as skilled interpreters of sometimes obscure texts. Each paper is introduced by a note that sets it in perspective, explains its importance, and points out difficulties in interpretation. Editorial comments and footnotes are interpolated where needed, and an extensive bibliography is included.

}}, Author = {van Heijenoort, Jean}, Citeulike-Article-Id = {616905}, Date-Added = {2008-10-10 16:29:59 +0200}, Date-Modified = {2008-10-10 16:30:11 +0200}, Howpublished = {Paperback}, Isbn = {0674324498}, Keywords = {frege, godel, logic}, Month = {January}, Posted-At = {2006-05-07 22:17:38}, Priority = {2}, Publisher = {{Harvard University Press}}, Title = {From Frege to G\"{o}del : A Source Book in Mathematical Logic, 1879-1931 (Source Books in the History of the Sciences)}, Url = {http://www.amazon.ca/exec/obidos/redirect?tag=citeulike09-20\&path=ASIN/0674324498}, Year = {2002}, Bdsk-Url-1 = {http://www.amazon.ca/exec/obidos/redirect?tag=citeulike09-20%5C&path=ASIN/0674324498}} @inproceedings{DBLP:conf/types/Geuvers94, Author = {Herman Geuvers}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Crossref = {DBLP:conf/types/1994}, Date-Added = {2008-10-09 16:29:47 +0200}, Date-Modified = {2008-10-09 16:33:04 +0200}, Pages = {14-38}, Title = {A short and flexible proof of Strong Normalization for the Calculus of Constructions}, Url = {http://www.cs.ru.nl/~herman/PUBS/BRABasSNCC.ps.gz}, Year = {1994}, Bdsk-Url-1 = {http://www.cs.ru.nl/~herman/PUBS/BRABasSNCC.ps.gz}} @proceedings{DBLP:conf/types/1994, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Date-Added = {2008-10-09 16:29:47 +0200}, Date-Modified = {2008-10-09 16:29:47 +0200}, Editor = {Peter Dybjer and Bengt Nordstr{\"o}m and Jan M. Smith}, Isbn = {3-540-60579-7}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Types for Proofs and Programs, International Workshop TYPES'94, B{\aa}stad, Sweden, June 6-10, 1994, Selected Papers}, Volume = {996}, Year = {1995}} @inproceedings{DBLP:conf/lics/GeuversW94, Author = {Herman Geuvers and Benjamin Werner}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {LICS}, Crossref = {DBLP:conf/lics/LICS9}, Date-Added = {2008-10-09 16:10:11 +0200}, Date-Modified = {2008-10-09 16:10:42 +0200}, Pages = {320-329}, Title = {On the Church-Rosser Property for Expressive Type Systems and its Consequences for their Metatheoretic Study}, Url = {http://www.cs.ru.nl/~herman/PUBS/LICS94_FixCR.ps.gz}, Year = {1994}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxApLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL0xJQ1M5NF9GaXhDUi5wZGZPEQGKAAAAAAGKAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gQTElDUzk0X0ZpeENSLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/Vwck45JMAAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk41oMAAAABABAANnfIAA7btAAH+TgAAJDnAAIAN01hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOkxJQ1M5NF9GaXhDUi5wZGYAAA4AIgAQAEwASQBDAFMAOQA0AF8ARgBpAHgAQwBSAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAqVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy9MSUNTOTRfRml4Q1IucGRmABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAFAAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAAB3g==}, Bdsk-Url-1 = {http://www.cs.ru.nl/~herman/PUBS/LICS94_FixCR.ps.gz}} @inproceedings{chargueraud-pottier-08, Author = {Arthur Chargu{\'e}raud and Fran{\c c}ois Pottier}, Booktitle = icfp, Date-Added = {2008-10-09 15:49:05 +0200}, Date-Modified = {2008-12-04 15:56:07 +0100}, Month = sep, Title = {Functional Translation of a Calculus of Capabilities}, Url = {http://cristal.inria.fr/~fpottier/publis/chargueraud-pottier-capabilities.pdf}, Year = {2008}, Bdsk-Url-1 = {http://cristal.inria.fr/~fpottier/publis/chargueraud-pottier-capabilities.pdf}} @article{filliatre-00, Author = {Jean-Christophe Filli{\^a}tre}, Date-Added = {2008-10-09 15:48:40 +0200}, Date-Modified = {2008-10-09 15:51:48 +0200}, Journal = jfp, Month = jul, Number = {4}, Pages = {709--745}, Title = {Verification of Non-Functional Programs using Interpretations in Type Theory}, Url = {http://www.lri.fr/~filliatr/ftp/publis/jphd.ps.gz}, Volume = {13}, Year = {2003}, Bdsk-Url-1 = {http://www.lri.fr/~filliatr/ftp/publis/jphd.ps.gz}} @inproceedings{sage-06, Author = {Jessica Gronski and Kenneth Knowles and Aaron Tomb and Stephen N. Freund and Cormac Flanagan}, Booktitle = {Scheme and Functional Programming}, Date-Added = {2008-10-09 15:42:28 +0200}, Date-Modified = {2008-10-09 15:42:28 +0200}, Month = sep, Title = {{Sage}: Hybrid Checking for Flexible Specifications}, Url = {http://www.cs.williams.edu/~freund/papers/06-sfp.pdf}, Year = {2006}, Bdsk-Url-1 = {http://www.cs.williams.edu/~freund/papers/06-sfp.pdf}} @article{DBLP:journals/entcs/LovasP08, Author = {William Lovas and Frank Pfenning}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Added = {2008-10-09 15:20:41 +0200}, Date-Modified = {2008-10-09 15:20:41 +0200}, Ee = {http://dx.doi.org/10.1016/j.entcs.2007.09.021}, Journal = {Electr. Notes Theor. Comput. Sci.}, Pages = {113-128}, Title = {A Bidirectional Refinement Type System for LF}, Volume = {196}, Year = {2008}} @phdthesis{McKinnaPhD, Author = {James Hugh McKinna}, Date-Added = {2008-10-09 14:37:07 +0200}, Date-Modified = {2008-10-09 14:40:05 +0200}, Read = {Oui}, School = {Edinburgh University}, Title = {Deliverables: a categorical approach to program development in type theory}, Url = {http://www.lfcs.inf.ed.ac.uk/reports/92/ECS-LFCS-92-247/}, Year = {1992}, Bdsk-Url-1 = {http://www.lfcs.inf.ed.ac.uk/reports/92/ECS-LFCS-92-247/}} @inproceedings{DBLP:conf/mkm/Cruz-FilipeGW04, Author = {Lu\'{\i}s Cruz-Filipe and Herman Geuvers and Freek Wiedijk}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {MKM}, Crossref = {DBLP:conf/mkm/2004}, Date-Added = {2008-10-09 13:56:04 +0200}, Date-Modified = {2008-10-09 13:57:13 +0200}, Ee = {http://springerlink.metapress.com/openurl.asp?genre=article{\&}issn=0302-9743{\&}volume=3119{\&}spage=88}, Pages = {88-103}, Title = {C-CoRN, the Constructive Coq Repository at Nijmegen}, Url = {http://www.cs.ru.nl/~herman/PUBS/ccorn.pdf}, Year = {2004}, Bdsk-Url-1 = {http://www.cs.ru.nl/~herman/PUBS/ccorn.pdf}} @proceedings{DBLP:conf/mkm/2004, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {MKM}, Date-Added = {2008-10-09 13:56:04 +0200}, Date-Modified = {2008-10-09 13:56:04 +0200}, Editor = {Andrea Asperti and Grzegorz Bancerek and Andrzej Trybulec}, Isbn = {3-540-23029-7}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Mathematical Knowledge Management, Third International Conference, MKM 2004, Bialowieza, Poland, September 19-21, 2004, Proceedings}, Volume = {3119}, Year = {2004}} @article{DBLP:journals/corr/abs-0809-1552, Author = {Russell O'Connor and Bas Spitters}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Added = {2008-10-09 13:51:21 +0200}, Date-Modified = {2008-10-09 13:51:21 +0200}, Ee = {http://arxiv.org/abs/0809.1552}, Journal = {CoRR}, Title = {A computer verified, monadic, functional implementation of the integral}, Volume = {abs/0809.1552}, Year = {2008}} @phdthesis{CaprettaPhD, Address = {The Netherlands}, Author = {Venanzio Capretta}, Date-Added = {2008-10-09 13:45:50 +0200}, Date-Modified = {2008-10-09 13:46:47 +0200}, Keywords = {setoids type theory}, Month = {April}, School = {University of Nijmegen}, Title = {Abstraction and Computation}, Url = {http://www.cs.ru.nl/~venanzio/publications/Abstraction_Computation.pdf}, Year = {2002}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxA0Li4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL0Fic3RyYWN0aW9uX0NvbXB1dGF0aW9uLnBkZk8RAbYAAAAAAbYAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yBtBYnN0cmFjdGlvbl9Db21wdXRhdGlvbi5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP9T8yTjkcQAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTjWYQAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgBCTWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6QWJzdHJhY3Rpb25fQ29tcHV0YXRpb24ucGRmAA4AOAAbAEEAYgBzAHQAcgBhAGMAdABpAG8AbgBfAEMAbwBtAHAAdQB0AGEAdABpAG8AbgAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIANVVzZXJzL21hdC9yZXNlYXJjaC9wYXBlcnMvQWJzdHJhY3Rpb25fQ29tcHV0YXRpb24ucGRmAAATAAEvAAAVAAIACv//AAAACAANABoAJABbAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAhU=}, Bdsk-Url-1 = {http://www.cs.ru.nl/~venanzio/publications/Abstraction_Computation.pdf}} @article{barthe03jfp, Author = {Gilles Barthe and Venanzio Capretta and Olivier Pons}, Date-Added = {2008-10-09 13:34:21 +0200}, Date-Modified = {2008-10-09 13:34:21 +0200}, Journal = jfp, Number = {2}, Pages = {261--293}, Title = {Setoids in type theory}, Url = {http://www3.iie.cnam.fr/~pons/PAPERS/setoid.ps.gz}, Volume = {13}, Year = {2003}, Bdsk-Url-1 = {http://www3.iie.cnam.fr/~pons/PAPERS/setoid.ps.gz}} @book{JacobsB:cltt, Author = {Jacobs, Bart}, Date-Added = {2008-10-09 13:17:06 +0200}, Date-Modified = {2008-10-13 14:41:35 +0200}, Publisher = {Elsevier Science}, Read = {Oui}, Title = {Categorical Logic and Type Theory}, Url = {http://www.cs.ru.nl/B.Jacobs/CLT/bookinfo.html}, Year = 1999, Bdsk-Url-1 = {http://www.cs.ru.nl/B.Jacobs/CLT/bookinfo.html}} @inproceedings{DBLP:conf/types/Hofmann93, Author = {Martin Hofmann}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Crossref = {DBLP:conf/types/1993}, Date-Added = {2008-10-09 13:12:56 +0200}, Date-Modified = {2008-10-09 13:14:12 +0200}, Pages = {166-190}, Title = {Elimination of Extensionality in Martin-L{\"o}f Type Theory}, Url = {http://www.springerlink.com/content/p29l737l49117u34/}, Year = {1993}, Bdsk-Url-1 = {http://www.springerlink.com/content/p29l737l49117u34/}} @book{MARTINLOF84, Address = {Naples, Italy}, Author = {Martin-L{\"o}f, Per}, Checked = {19 January 1988}, Date-Added = {2008-10-09 13:03:43 +0200}, Date-Modified = {2008-10-09 13:03:43 +0200}, Isbn = {88-7088-105-9}, Pages = {ix+91}, Publisher = {Bibliopolis}, Title = {Intuitionistic Type Theory}, Year = {1984}} @inproceedings{DBLP:conf/lics/SalvesenS88, Author = {Anne Salvesen and Jan M. Smith}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {LICS}, Crossref = {DBLP:conf/lics/LICS3}, Date-Added = {2008-10-09 12:55:45 +0200}, Date-Modified = {2008-10-09 12:56:14 +0200}, Pages = {384-391}, Pdf = {https://eprints.kfupm.edu.sa/71405/1/71405.pdf}, Title = {The Strength of the Subset Type in Martin-L{\"o}f's Type Theory}, Year = {1988}} @proceedings{DBLP:conf/lics/LICS3, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {LICS}, Date-Added = {2008-10-09 12:55:45 +0200}, Date-Modified = {2008-10-09 12:55:45 +0200}, Publisher = {IEEE Computer Society}, Title = {Proceedings, Third Annual Symposium on Logic in Computer Science, 5-8 July 1988, Edinburgh, Scotland, UK}, Year = {1988}} @article{pierce-turner-00, Author = {Benjamin C. Pierce and David N. Turner}, Date-Added = {2008-10-08 22:17:53 +0200}, Date-Modified = {2008-10-08 22:17:53 +0200}, Journal = toplas, Month = jan, Number = {1}, Pages = {1--44}, Title = {Local Type Inference}, Url = {http://doi.acm.org/10.1145/345099.345100}, Volume = {22}, Year = {2000}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/345099.345100}} @inproceedings{freeman91, Address = {HC 602}, Author = {Tim Freeman and Frank Pfenning}, Booktitle = {Proc. {ACM} {SIGPLAN} Conf. on Programming Language Design and Implementation}, Date-Added = {2008-10-08 22:14:29 +0200}, Date-Modified = {2008-10-09 14:58:51 +0200}, Title = {Refinement Types for {ML}}, Url = {http://www.cs.cmu.edu/~fp/papers/pldi91.pdf}, Year = 1991, Bdsk-Url-1 = {http://www.cs.cmu.edu/~fp/papers/pldi91.pdf}} @article{simplify05, Author = {David Detlefs and Greg Nelson and James B. Saxe}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Added = {2008-10-08 21:28:06 +0200}, Date-Modified = {2008-10-08 21:28:06 +0200}, Ee = {http://doi.acm.org/10.1145/1066100.1066102}, Journal = {J. ACM}, Number = {3}, Pages = {365-473}, Title = {Simplify: a theorem prover for program checking.}, Volume = {52}, Year = {2005}} @inproceedings{DBLP:conf/types/MagnussonN93, Author = {Lena Magnusson and Bengt Nordstr{\"o}m}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Crossref = {DBLP:conf/types/1993}, Date-Added = {2008-10-08 10:45:41 +0200}, Date-Modified = {2008-10-08 10:47:30 +0200}, Pages = {213-237}, Title = {The ALF Proof Editor and Its Proof Engine}, Url = {http://www.springerlink.com/content/f437316686106358/}, Year = {1993}, Bdsk-Url-1 = {http://www.springerlink.com/content/f437316686106358/}} @phdthesis{CornesPhD, Author = {Cristina Cornes}, Date-Added = {2008-10-08 09:11:04 +0200}, Date-Modified = {2019-10-18 15:02:57 +0200}, Keywords = {dependent types pattern matching unification}, Month = {Novembre}, School = {Universit\'e Paris 7}, Title = {Conception d'un langage de haut niveau de repr{\'e}sentation de preuves: R{\'e}currence par filtrage de motifs, Unification en pr{\'e}sesence de types inductifs primitifs, Synth{\`e}se de lemmes d'inversion}, Year = {1997}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxApLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3RoZXNlLWNvcm5lcy5wZGZPEQGKAAAAAAGKAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gQdGhlc2UtY29ybmVzLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/f68k45osAAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk42HsAAAABABAANnfIAA7btAAH+TgAAJDnAAIAN01hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOnRoZXNlLWNvcm5lcy5wZGYAAA4AIgAQAHQAaABlAHMAZQAtAGMAbwByAG4AZQBzAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAqVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy90aGVzZS1jb3JuZXMucGRmABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAFAAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAAB3g==}, Bdsk-Url-1 = {http://www.fing.edu.uy/~cornes/Papers/These.ps}} @inproceedings{Leroy-compcert-06, Abstract = {This paper reports on the development and formal certification (proof of semantic preservation) of a compiler from Cminor (a C-like imperative language) to PowerPC assembly code, using the Coq proof assistant both for programming the compiler and for proving its correctness. Such a certified compiler is useful in the context of formal methods applied to the certification of critical software: the certification of the compiler guarantees that the safety properties proved on the source code hold for the executable compiled code as well.}, Author = {Xavier Leroy}, Booktitle = {33rd symposium Principles of Programming Languages}, Date-Added = {2008-10-07 15:11:08 +0200}, Date-Modified = {2008-10-07 15:11:08 +0200}, Pages = {42--54}, Publisher = {ACM Press}, Title = {Formal certification of a compiler back-end, or: programming a compiler with a proof assistant}, Url = {http://gallium.inria.fr/~xleroy/publi/compiler-certif.pdf}, Urlpublisher = {http://doi.acm.org/10.1145/1111037.1111042}, Xtopic = {compcert}, Year = 2006, Bdsk-Url-1 = {http://gallium.inria.fr/~xleroy/publi/compiler-certif.pdf}} @unpublished{Leroy-backend-08, Abstract = { This article describes the development and formal verification (proof of semantic preservation) of a compiler back-end from Cminor (a simple imperative intermediate language) to PowerPC assembly code, using the Coq proof assistant both for programming the compiler and for proving its correctness. Such a verified compiler is useful in the context of formal methods applied to the certification of critical software: the verification of the compiler guarantees that the safety properties proved on the source code hold for the executable compiled code as well. (Much extended and revised version of \cite{Leroy-compcert-06}.)}, Author = {Xavier Leroy}, Date-Added = {2008-10-07 15:08:56 +0200}, Date-Modified = {2008-10-07 15:08:56 +0200}, Month = JUL, Note = {Submitted}, Title = {A formally verified compiler back-end}, Url = {http://gallium.inria.fr/~xleroy/publi/compcert-backend.pdf}, Xtopic = {compcert}, Year = 2008, Bdsk-Url-1 = {http://gallium.inria.fr/~xleroy/publi/compcert-backend.pdf}} @inproceedings{DBLP:conf/fm/BlazyDL06, Author = {Sandrine Blazy and Zaynah Dargaye and Xavier Leroy}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {FM}, Crossref = {DBLP:conf/fm/2006}, Date-Added = {2008-10-07 14:57:27 +0200}, Date-Modified = {2008-10-07 14:57:44 +0200}, Ee = {http://dx.doi.org/10.1007/11813040_31}, Pages = {460-475}, Pdf = {http://gallium.inria.fr/~dargaye/publications/cfront.pdf}, Title = {Formal Verification of a C Compiler Front-End}, Year = {2006}} @proceedings{DBLP:conf/fm/2006, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {FM}, Date-Added = {2008-10-07 14:57:27 +0200}, Date-Modified = {2008-10-07 14:57:27 +0200}, Editor = {Jayadev Misra and Tobias Nipkow and Emil Sekerinski}, Isbn = {3-540-37215-6}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {FM 2006: Formal Methods, 14th International Symposium on Formal Methods, Hamilton, Canada, August 21-27, 2006, Proceedings}, Volume = {4085}, Year = {2006}} @mastersthesis{glondu:master, Author = {St{\'e}phane Glondu}, Date-Added = {2008-10-07 14:53:55 +0200}, Date-Modified = {2008-10-07 14:54:48 +0200}, School = {Universit{\'e} Paris 7}, Title = {Garantie formelle de correction pour l'extraction Coq}, Url = {http://stephane.glondu.net/rapport2007.pdf}, Year = {2007}, Bdsk-Url-1 = {http://stephane.glondu.net/rapport2007.pdf}} @misc{compcert, Author = {Gallium and Marelle and CEDRIC and PPS}, Date-Added = {2008-10-07 14:47:44 +0200}, Date-Modified = {2008-10-07 15:23:24 +0200}, Howpublished = {Compilers You Can \emph{Formally} Trust}, Title = {The {C}omp{C}ert project}, Url = {http://compcert.inria.fr/index.html}, Year = {2008}, Bdsk-Url-1 = {http://compcert.inria.fr/index.html}} @inproceedings{CPwTP-icfp05, Abstract = {{ Applied Type System (ATS) is recently proposed as a framework for designing and formalizing (advanced) type systems in support of practical programming. In ATS, the definition of type equality involves a constraint relation, which may or may not be algorithmically decidable. To support practical programming, we adopted a design in the past that imposes certain restrictions on the syntactic form of constraints so that some effective means can be found for solving constraints automatically. Evidently, this is a rather {\em ad hoc} design in its nature. In this paper, we rectify the situation by presenting a fundamentally different design, which we claim to be both novel and practical. Instead of imposing syntactical restrictions on constraints, we provide a means for the programmer to construct proofs that attest to the validity of constraints. In particular, we are to accommodate a programming paradigm that enables the programmer to combine programming with theorem proving. Also we present some concrete examples in support of the practicality this design. }}, Address = {Tallinn, Estonia}, Author = {Chiyan Chen and Hongwei Xi}, Booktitle = {Proceedings of the Tenth ACM SIGPLAN International Conference on Functional Programming}, Date-Added = {2008-10-07 14:20:56 +0200}, Date-Modified = {2008-10-07 14:21:20 +0200}, Month = {September}, Pages = {66--77}, Pdf = {http://www.cs.bu.edu/~hwxi/academic/papers/icfp05.pdf}, Read = {Oui}, Title = {{Combining Programming with Theorem Proving}}, Year = 2005} @article{Barthe:2006gp, Abstract = {We present a practical tool for defining and proving properties of recursive functions in the Coq proof assistant. The tool generates from pseudo-code the graph of the intended function as an inductive relation. Then it proves that the relation actually represents a function, which is by construction the function that we are trying to define. Then, we generate induction and inversion principles, and a fixpoint equation for proving other properties of the function. Our tool builds upon state-of-the-art techniques for defining recursive functions, and can also be used to generate executable functions from inductive descriptions of their graph. We illustrate the benefits of our tool on two case studies. }, Author = {Barthe, Gilles and Forest, Julien and Pichardie, David and Rusu, Vlad}, Date-Added = {2008-10-07 00:22:10 +0200}, Date-Modified = {2008-10-07 11:57:06 +0200}, Doi = {10.1007/11737414_9}, Journal = {Functional and Logic Programming}, Pages = {114--129}, Pdf = {http://www.irisa.fr/vertecs/Publis/Ps/2006-FLOPS.pdf}, Title = {{D}efining and {R}easoning {A}bout {R}ecursive {F}unctions: {A} {P}ractical {T}ool for the {C}oq {P}roof {A}ssistant}, Year = {2006}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/11737414_9}} @inproceedings{DBLP:conf/tphol/Barras00, Author = {Bruno Barras}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TPHOLs}, Crossref = {DBLP:conf/tphol/2000}, Date-Added = {2008-10-07 00:13:39 +0200}, Date-Modified = {2008-10-07 11:58:31 +0200}, Pages = {17-37}, Title = {Programming and {C}omputing in {HOL}}, Url = {http://www.springerlink.com/content/m115j378141x6234/}, Year = {2000}, Bdsk-Url-1 = {http://www.springerlink.com/content/m115j378141x6234/}} @inproceedings{DBLP:conf/popl/McKinna06, Author = {James McKinna}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {POPL}, Crossref = {DBLP:conf/popl/2006}, Date-Added = {2008-10-06 23:34:36 +0200}, Date-Modified = {2008-10-06 23:34:36 +0200}, Ee = {http://doi.acm.org/10.1145/1111037.1111038}, Pages = {1}, Title = {Why dependent types matter}, Year = {2006}} @proceedings{DBLP:conf/popl/2006, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {POPL}, Date-Added = {2008-10-06 23:34:36 +0200}, Date-Modified = {2008-10-06 23:34:36 +0200}, Editor = {J. Gregory Morrisett and Simon L. Peyton Jones}, Isbn = {1-59593-027-2}, Publisher = {ACM}, Title = {Proceedings of the 33rd ACM SIGPLAN-SIGACT Symposium on Principles of Programming Languages, POPL 2006, Charleston, South Carolina, USA, January 11-13, 2006}, Year = {2006}} @inproceedings{DBLP:conf/mfcs/McKinnaB93, Author = {James McKinna and Rod M. Burstall}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {MFCS}, Crossref = {DBLP:conf/mfcs/1993}, Date-Added = {2008-10-06 23:31:58 +0200}, Date-Modified = {2008-10-06 23:32:23 +0200}, Pages = {32-67}, Title = {Deliverables: A Categorial Approach to Program Development in Type Theory}, Url = {http://www.lfcs.inf.ed.ac.uk/reports/92/ECS-LFCS-92-242/ECS-LFCS-92-242.ps}, Year = {1993}, Bdsk-Url-1 = {http://www.lfcs.inf.ed.ac.uk/reports/92/ECS-LFCS-92-242/ECS-LFCS-92-242.ps}} @proceedings{DBLP:conf/mfcs/1993, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {MFCS}, Date-Added = {2008-10-06 23:31:58 +0200}, Date-Modified = {2008-10-06 23:31:58 +0200}, Editor = {Andrzej M. Borzyszkowski and Stefan Sokolowski}, Isbn = {3-540-57182-5}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Mathematical Foundations of Computer Science 1993, 18th International Symposium, MFCS'93, Gdansk, Poland, August 30 - September 3, 1993, Proceedings}, Volume = {711}, Year = {1993}} @inproceedings{DBLP:conf/types/McBride96, Author = {Conor McBride}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Crossref = {DBLP:conf/types/1996}, Date-Added = {2008-10-06 23:26:41 +0200}, Date-Modified = {2008-10-07 14:14:07 +0200}, Pages = {236-253}, Title = {Inverting {I}nductively {D}efined {R}elations in {LEGO}}, Url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.41.7471&rep=rep1&type=pdf}, Year = {1996}, Bdsk-Url-1 = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.41.7471&rep=rep1&type=pdf}} @inproceedings{MartinLofP:ittpp, Author = {Per Martin-L{\"o}f}, Booktitle = {Logic Colloquium 1973}, Date-Added = {2008-10-06 22:54:14 +0200}, Date-Modified = {2008-10-06 23:26:31 +0200}, Editors = {Rose, E.H. and Sheperdson J.C.}, Pages = {73--118}, Poublisher = {North-Holland}, Title = {An intuitionistic theory of types, Predicative part}, Year = {1975}} @article{DowekHardinKirchner, Author = {Gilles Dowek and Th{\'e}r{\`e}se Hardin and Claude Kirchner}, Date-Added = {2008-10-06 19:47:52 +0200}, Date-Modified = {2008-10-13 14:38:23 +0200}, Journal = jar, Pages = {33--72}, Title = {Theorem Proving Modulo}, Topics = {logical}, Url = {http://www.inria.fr/rrrt/rr-3400.html}, Volume = 31, Year = 2003, Bdsk-Url-1 = {http://www.inria.fr/rrrt/rr-3400.html}} @phdthesis{dowek91phd, Author = {Gilles Dowek}, Date-Added = {2008-10-06 19:45:27 +0200}, Date-Modified = {2008-10-07 14:09:44 +0200}, School = {Universit\'e Paris 7}, Title = {D{\'e}monstration automatique dans le calcul des constructions}, Type = thesedoctorat, Url = {http://cat.inist.fr/?aModele=afficheN&cpsidt=151166}, Year = 1991, Bdsk-Url-1 = {http://cat.inist.fr/?aModele=afficheN&cpsidt=151166}} @inproceedings{DBLP:conf/icalp/Gimenez98, Author = {Eduardo Gim{\'e}nez}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {ICALP}, Crossref = {DBLP:conf/icalp/1998}, Date-Added = {2008-10-06 14:57:05 +0200}, Date-Modified = {2008-10-06 14:58:04 +0200}, Ee = {http://link.springer.de/link/service/series/0558/bibs/1443/14430397.htm}, Pages = {397-408}, Pdf = {https://eprints.kfupm.edu.sa/67190/1/67190.pdf}, Title = {Structural Recursive Definitions in Type Theory}, Year = {1998}} @proceedings{DBLP:conf/icalp/1998, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {ICALP}, Date-Added = {2008-10-06 14:57:05 +0200}, Date-Modified = {2008-10-06 14:57:05 +0200}, Editor = {Kim Guldstrand Larsen and Sven Skyum and Glynn Winskel}, Isbn = {3-540-64781-3}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Automata, Languages and Programming, 25th International Colloquium, ICALP'98, Aalborg, Denmark, July 13-17, 1998, Proceedings}, Volume = {1443}, Year = {1998}} @phdthesis{boutin97These, Author = {Samuel Boutin}, Date-Added = {2008-10-05 17:56:36 +0200}, Date-Modified = {2008-10-17 00:51:41 +0200}, Month = {April}, Read = {Oui}, School = {Paris 7}, Title = {R{\'e}flexions sur les quotients}, Type = {th{\`e}se d'Universit{\'e}}, Url = {http://pauillac.inria.fr/~boutin/publi_w/these.ps.gz}, Year = {1997}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxApLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3RoZXNlLWJvdXRpbi5wZGZPEQGKAAAAAAGKAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gQdGhlc2UtYm91dGluLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/dxMk45hIAAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk42AIAAAABABAANnfIAA7btAAH+TgAAJDnAAIAN01hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOnRoZXNlLWJvdXRpbi5wZGYAAA4AIgAQAHQAaABlAHMAZQAtAGIAbwB1AHQAaQBuAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAqVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy90aGVzZS1ib3V0aW4ucGRmABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAFAAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAAB3g==}, Bdsk-Url-1 = {http://pauillac.inria.fr/~boutin/publi_w/these.ps.gz}} @inproceedings{DBLP:conf/types/CornesT95, Author = {Cristina Cornes and Delphine Terrasse}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Crossref = {DBLP:conf/types/1995}, Date-Added = {2008-10-05 17:05:24 +0200}, Date-Modified = {2008-10-07 11:58:04 +0200}, Pages = {85-104}, Title = {Automating {I}nversion of {I}nductive {P}redicates in {C}oq}, Url = {http://www.fing.edu.uy/~cornes/Papers/inversion.ps.gz}, Year = {1995}, Bdsk-Url-1 = {http://www.fing.edu.uy/~cornes/Papers/inversion.ps.gz}} @manual{coq:refman:8.2, Author = {{The {Coq} development team}}, Date-Added = {2008-10-05 12:08:54 +0200}, Date-Modified = {2008-10-05 12:09:56 +0200}, Organization = {INRIA}, Publisher = {INRIA TypiCal}, Read = {Oui}, Title = {Coq 8.2 Reference Manual}, Url = {http://coq.inria.fr/V8.2beta4/doc/refman/html/}, Year = {2008}, Bdsk-Url-1 = {http://coq.inria.fr/V8.2beta4/doc/refman/html/Reference-Manual027.html}, Bdsk-Url-2 = {http://coq.inria.fr/V8.2beta4/doc/refman/html/}} @misc{coquand92baastad, Author = {Thierry Coquand}, Date-Added = {2008-10-04 23:17:56 +0200}, Date-Modified = {2008-10-13 14:37:16 +0200}, Note = {Proceedings of the Workshop on Logical Frameworks}, Title = {Pattern Matching with Dependent Types}, Url = {http://www.cs.chalmers.se/~coquand/pattern.ps}, Year = 1992, Bdsk-Url-1 = {http://www.cs.chalmers.se/~coquand/pattern.ps}} @inproceedings{coquand86, Address = {HC 145}, Author = {Thierry Coquand}, Crossref = {lics86}, Date-Added = {2008-10-04 23:17:33 +0200}, Date-Modified = {2008-10-04 23:17:33 +0200}, Title = {An analysis of {G}irard's Paradox}, Topics = {logic}, Year = 1986} @inproceedings{DBLP:conf/csl/BlanquiJS07, Author = {Fr{\'e}d{\'e}ric Blanqui and Jean-Pierre Jouannaud and Pierre-Yves Strub}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {CSL}, Crossref = {DBLP:conf/csl/2007}, Date-Added = {2008-10-04 15:45:48 +0200}, Date-Modified = {2008-10-04 15:45:48 +0200}, Ee = {http://dx.doi.org/10.1007/978-3-540-74915-8_26}, Pages = {328-342}, Title = {Building Decision Procedures in the Calculus of Inductive Constructions}, Year = {2007}} @proceedings{DBLP:conf/csl/2007, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {CSL}, Date-Added = {2008-10-04 15:45:48 +0200}, Date-Modified = {2008-10-04 15:45:48 +0200}, Editor = {Jacques Duparc and Thomas A. Henzinger}, Isbn = {978-3-540-74914-1}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Computer Science Logic, 21st International Workshop, CSL 2007, 16th Annual Conference of the EACSL, Lausanne, Switzerland, September 11-15, 2007, Proceedings}, Volume = {4646}, Year = {2007}} @phdthesis{GimenezPhD, Author = {Carlos Eduardo Gim{\'e}nez}, Date-Added = {2008-10-04 15:25:18 +0200}, Date-Modified = {2019-10-18 14:55:43 +0200}, Month = {D{\'e}cembre}, Read = {Oui}, School = {Ecole Normale Sup\'{e}rieure de Lyon}, Title = {Un calcul de constructions infinies et son application {\`a} la v{\'e}rification de syst{\`e}mes communicants}, Url = {ftp://ftp.ens-lyon.fr/pub/LIP/Rapports/PhD/PhD96-11.ps.Z}, Year = {1996}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxArLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3RoZXNpcy1naW1lbmV6LnBkZk8RAZIAAAAAAZIAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yBJ0aGVzaXMtZ2ltZW5lei5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP+RoyTjn2gAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTjZygAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgA5TWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6dGhlc2lzLWdpbWVuZXoucGRmAAAOACYAEgB0AGgAZQBzAGkAcwAtAGcAaQBtAGUAbgBlAHoALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASACxVc2Vycy9tYXQvcmVzZWFyY2gvcGFwZXJzL3RoZXNpcy1naW1lbmV6LnBkZgATAAEvAAAVAAIACv//AAAACAANABoAJABSAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAeg=}, Bdsk-Url-1 = {ftp://ftp.inria.fr/INRIA/LogiCal/Eduardo.Gimenez/thesis.ps.gz}} @inproceedings{Let2008, Author = {Pierre Letouzey}, Booktitle = {{Logic and Theory of Algorithms, Fourth Conference on Computability in Europe, CiE 2008}}, Date-Added = {2008-10-02 13:00:50 +0200}, Date-Modified = {2008-10-13 14:42:56 +0200}, Editor = {A. Beckmann and C. Dimitracopoulos and B. L{\"o}we}, Pdf = {http://www.pps.jussieu.fr/~letouzey/download/letouzey_extr_cie08.pdf}, Publisher = {Springer-Verlag}, Read = {Oui}, Series = {Lecture Notes in Computer Science}, Title = {{Coq Extraction, an Overview}}, Volume = 5028, Year = 2008} @phdthesis{hofmann95These, Author = {Martin Hofmann}, Date-Added = {2008-10-02 12:43:18 +0200}, Date-Modified = {2008-10-13 14:42:34 +0200}, School = {Edinburgh university}, Title = {Extensional concepts in intensional type theory}, Type = {PhD thesis}, Url = {http://www.lfcs.inf.ed.ac.uk/reports/95/ECS-LFCS-95-327/}, Year = {1995}, Bdsk-Url-1 = {http://www.lfcs.inf.ed.ac.uk/reports/95/ECS-LFCS-95-327/}} @article{hoffmann82toplas, Author = {C. M. Hoffmann and Michael J. O'Donnell}, Date-Added = {2008-10-02 11:38:43 +0200}, Date-Modified = {2008-10-02 11:38:43 +0200}, Journal = {Transactions on Programming Languages and Systems}, Month = jan, Number = 1, Pages = {83--112}, Title = {Programming with equations}, Volume = 4, Year = 1982} @inproceedings{sheard-04, Author = {Tim Sheard}, Booktitle = oopsla, Date-Added = {2008-10-01 16:42:17 +0200}, Date-Modified = {2008-10-01 16:42:17 +0200}, Month = oct, Pages = {116--119}, Title = {Languages of the Future}, Url = {http://doi.acm.org/10.1145/1028664.1028711}, Year = {2004}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/1028664.1028711}} @misc{ICFPC07, Author = {ICFPC}, Date-Added = {2008-10-01 16:34:01 +0200}, Date-Modified = {2008-10-04 22:42:52 +0200}, Month = {juillet}, Note = {Organized by the Software Technology group at Utrecht University}, Title = {The 10th ICFP Programming Contest}, Url = {http://save-endo.cs.uu.nl/}, Year = {2007}, Bdsk-Url-1 = {http://save-endo.cs.uu.nl/}} @article{DBLP:journals/jsyml/Dybjer00, Author = {Peter Dybjer}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Added = {2008-10-01 16:32:51 +0200}, Date-Modified = {2008-10-01 16:32:51 +0200}, Journal = {J. Symb. Log.}, Number = {2}, Pages = {525-549}, Title = {A General Formulation of Simultaneous Inductive-Recursive Definitions in Type Theory}, Volume = {65}, Year = {2000}} @inproceedings{okasaki-96, Author = {Chris Okasaki}, Booktitle = icfp, Date-Added = {2008-10-01 16:29:00 +0200}, Date-Modified = {2008-10-01 16:29:00 +0200}, Month = may, Pages = {62--72}, Title = {The role of lazy evaluation in amortized data structures}, Url = {http://www.eecs.usma.edu/webs/people/okasaki/icfp96.ps}, Year = {1996}, Bdsk-Url-1 = {http://www.eecs.usma.edu/webs/people/okasaki/icfp96.ps}} @techreport{okasaki-phd-96, Author = {Chris Okasaki}, Date-Added = {2008-10-01 16:28:06 +0200}, Date-Modified = {2008-10-01 16:28:06 +0200}, Institution = {School of Computer Science, Carnegie Mellon University}, Month = sep, Number = {CMU-CS-96-177}, Title = {Purely Functional Data Structures}, Url = {http://www.cs.cmu.edu/~rwh/theses/okasaki.pdf}, Year = {1996}, Bdsk-Url-1 = {http://www.cs.cmu.edu/~rwh/theses/okasaki.pdf}} @inproceedings{FilLet2004, Author = {Jean-Christophe Filli{\^a}tre and Pierre Letouzey}, Booktitle = {{European Symposium on Programing, ESOP'2004}}, Date-Added = {2008-10-01 16:23:17 +0200}, Date-Modified = {2008-10-13 14:37:55 +0200}, Editor = {D. Schmidt}, Publisher = {Springer-Verlag}, Series = {Lecture Notes in Computer Science}, Title = {{Functors for Proofs and Programs}}, Url = {http://www.lri.fr/~filliatr/ftp/publis/fpp.ps.gz}, Volume = 2986, Year = 2004, Bdsk-Url-1 = {http://www.lri.fr/~filliatr/ftp/publis/fpp.ps.gz}} @inproceedings{DBLP:conf/fossacs/BarrasB08, Author = {Bruno Barras and Bruno Bernardo}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {FoSSaCS}, Crossref = {DBLP:conf/fossacs/2008}, Date-Added = {2008-10-01 16:07:07 +0200}, Date-Modified = {2008-10-01 16:07:07 +0200}, Ee = {http://dx.doi.org/10.1007/978-3-540-78499-9_26}, Pages = {365-379}, Title = {The Implicit Calculus of Constructions as a Programming Language with Dependent Types}, Year = {2008}} @proceedings{DBLP:conf/fossacs/2008, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {FoSSaCS}, Date-Added = {2008-10-01 16:07:07 +0200}, Date-Modified = {2008-10-01 16:07:07 +0200}, Editor = {Roberto M. Amadio}, Isbn = {978-3-540-78497-5}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Foundations of Software Science and Computational Structures, 11th International Conference, FOSSACS 2008, Held as Part of the Joint European Conferences on Theory and Practice of Software, ETAPS 2008, Budapest, Hungary, March 29 - April 6, 2008. Proceedings}, Volume = {4962}, Year = {2008}} @article{DBLP:journals/spe/BoehmAP95, Author = {Hans-Juergen Boehm and Russell R. Atkinson and Michael F. Plass}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Added = {2008-10-01 15:58:12 +0200}, Date-Modified = {2008-10-01 15:58:27 +0200}, Journal = {Softw., Pract. Exper.}, Number = {12}, Pages = {1315-1330}, Pdf = {http://www.cs.ubc.ca/local/reading/proceedings/spe91-95/spe/vol25/issue12/spe986.pdf}, Title = {Ropes: An Alternative to Strings}, Volume = {25}, Year = {1995}} @inproceedings{CruLet2005, Author = {Lu\'{\i}s Cruz-Filipe and Pierre Letouzey}, Booktitle = {{12th Symposium on the Integration of Symbolic Computation and Mechanized Reasoning, Calculemus'2005}}, Date-Added = {2008-10-01 15:38:11 +0200}, Date-Modified = {2008-10-13 14:36:51 +0200}, Note = {{To appear}}, Pdf = {http://www.pps.jussieu.fr/~letouzey/download/lcf_pl_extr05.pdf}, Title = {{A Large-Scale Experiment in Executing Extracted Programs}}, Year = 2005} @inproceedings{DBLP:conf/lics/HofmannS94, Author = {Martin Hofmann and Thomas Streicher}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {LICS}, Crossref = {DBLP:conf/lics/LICS9}, Date-Added = {2008-09-28 22:17:46 +0200}, Date-Modified = {2008-10-02 12:49:27 +0200}, Pages = {208-212}, Read = {Oui}, Title = {A Groupoid Model Refutes Uniqueness of Identity Proofs}, Url = {http://www.tcs.informatik.uni-muenchen.de/~mhofmann/SH.dvi.gz}, Year = {1994}, Bdsk-Url-1 = {http://www.tcs.informatik.uni-muenchen.de/~mhofmann/SH.dvi.gz}} @book{Streicher91, Author = {Thomas Streicher}, Date-Added = {2008-09-28 22:15:52 +0200}, Date-Modified = {2008-09-28 22:15:52 +0200}, Publisher = springer, Title = {Semantics of Type Theory}, Year = 1991} @unpublished{ghani-hosc07, Author = {Neil Ghani and Patricia Johann}, Date-Added = {2008-09-28 13:16:26 +0200}, Date-Modified = {2008-10-01 16:47:05 +0200}, Note = {Submitted to the Journal of Higher Order and Symbolic Computation}, Pdf = {http://www.cs.nott.ac.uk/~nxg/papers/ghani-hosc08.pdf}, Title = {Programming with Nested Types: A Principled Approach.}, Year = {2007}} @inproceedings{ghani-popl07, Author = {Neil Ghani and Patricia Johann}, Booktitle = {Proceedings of Principles and Programming Languages (POPL), 2008}, Date-Added = {2008-09-28 13:16:03 +0200}, Date-Modified = {2008-09-28 13:17:25 +0200}, Pages = {297-308}, Pdf = {http://www.cs.nott.ac.uk/~nxg/papers/ghani-popl08.pdf}, Read = {Oui}, Title = {Foundations for Structured Programming with GADTs}, Year = {2008}} @inbook{sozeau.Coq/manual/setoid, Author = {Matthieu Sozeau}, Chapter = {User defined equalities and relations}, Date-Added = {2008-09-26 10:31:45 +0200}, Date-Modified = {2018-12-20 21:14:38 +0100}, Publisher = {Inria}, Read = {Oui}, Title = {Coq Reference Manual}, Url = {http://coq.inria.fr/V8.2rc2/doc/refman/html/Reference-Manual029.html}, Year = {2008--2018}, Bdsk-Url-1 = {http://coq.inria.fr/V8.2beta4/doc/refman/html/Reference-Manual029.html}, Bdsk-Url-2 = {http://coq.inria.fr/V8.2rc2/doc/refman/html/Reference-Manual029.html}} @inbook{sozeau.Coq/manual/Program, Author = {Matthieu Sozeau}, Chapter = {Program}, Date-Added = {2008-09-26 10:27:18 +0200}, Date-Modified = {2018-12-20 21:14:15 +0100}, Publisher = {Inria}, Read = {Oui}, Title = {Coq Reference Manual}, Url = {http://coq.inria.fr/V8.2rc2/doc/refman/html/Reference-Manual027.html}, Year = {2008--2018}, Bdsk-Url-1 = {http://coq.inria.fr/V8.2beta4/doc/refman/html/Reference-Manual027.html}, Bdsk-Url-2 = {http://coq.inria.fr/V8.2rc2/doc/refman/html/Reference-Manual027.html}} @inbook{sozeau.Coq/manual/typeclasses, Author = {Matthieu Sozeau}, Chapter = {Type Classes}, Date-Added = {2008-09-26 10:18:40 +0200}, Date-Modified = {2018-12-20 21:14:30 +0100}, Publisher = {Inria}, Read = {Oui}, Title = {Coq Reference Manual}, Url = {http://coq.inria.fr/doc/refman/html/Reference-Manual024.html}, Year = {2008--2018}, Bdsk-Url-1 = {http://coq.inria.fr/V8.2beta4/doc/refman/html/Reference-Manual024.html}, Bdsk-Url-2 = {http://coq.inria.fr/V8.2rc2/doc/refman/html/Reference-Manual024.html}, Bdsk-Url-3 = {http://coq.inria.fr/doc/refman/html/Reference-Manual024.html}} @article{swierstra:la-carte, Author = {Wouter Swierstra}, Date-Added = {2008-09-11 15:54:19 +0200}, Date-Modified = {2008-09-11 15:54:41 +0200}, Journal = {Journal of Functional Programming}, Month = {July}, Number = {4}, Pages = {423--436}, Pdf = {http://www.cs.nott.ac.uk/~wss/Publications/DataTypesALaCarte.pdf}, Read = {Oui}, Title = {Data types \`a la carte}, Volume = {18}, Year = {2008}} @inproceedings{altenkirch-mcbride-wierstra:ott-now, Address = {Freiburg, Germany}, Author = {Thorsten Altenkirch and Conor McBride and Wouter Swierstra}, Booktitle = {PLPV'07}, Date-Added = {2008-09-11 15:53:26 +0200}, Date-Modified = {2014-02-10 14:17:15 +0000}, Pdf = {http://www.cs.nott.ac.uk/~txa/publ/obseqnow.pdf}, Read = {Oui}, Title = {Observational Equality, Now!}, Year = {2007}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAlLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL09ic0VxTm93LnBkZk8RAXoAAAAAAXoAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yAxPYnNFcU5vdy5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP9ZuyTjkrgAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTjWngAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgAzTWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6T2JzRXFOb3cucGRmAAAOABoADABPAGIAcwBFAHEATgBvAHcALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASACZVc2Vycy9tYXQvcmVzZWFyY2gvcGFwZXJzL09ic0VxTm93LnBkZgATAAEvAAAVAAIACv//AAAACAANABoAJABMAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAco=}} @phdthesis{ParentPhd, Author = {Catherine Parent}, Date-Added = {2008-09-10 22:42:06 +0200}, Date-Modified = {2008-09-10 22:44:10 +0200}, Read = {Oui}, School = {ENS Lyon}, Title = {Synth{\`e}se de preuves de programmes dans le Calcul des Constructions Inductives}, Url = {http://www-verimag.imag.fr/~parent/These/main.ps}, Year = {1995}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAgLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL21haW4ucHNPEQFmAAAAAAFmAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gHbWFpbi5wcwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/cMMk45dMAAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk418MAAAABABAANnfIAA7btAAH+TgAAJDnAAIALk1hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOm1haW4ucHMADgAQAAcAbQBhAGkAbgAuAHAAcwAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASACFVc2Vycy9tYXQvcmVzZWFyY2gvcGFwZXJzL21haW4ucHMAABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAEcAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAABsQ==}, Bdsk-Url-1 = {http://www-verimag.imag.fr/~parent/These/main.ps}} @phdthesis{LetouzeyPhd, Author = {Pierre Letouzey}, Date-Added = {2008-09-10 22:10:33 +0200}, Date-Modified = {2008-10-01 15:39:49 +0200}, Month = jul, School = ups, Title = {Programmation fonctionnelle certifi{\'e}e: l'extraction de programmes dans l'assistant {Coq}}, Topics = {team}, Type = these, Type_Publi = {these}, Url = {http://www.pps.jussieu.fr/~letouzey/download/these_letouzey.pdf}, Year = 2004, Bdsk-Url-1 = {http://www.pps.jussieu.fr/~letouzey/download/these_letouzey.pdf}} @article{journals/toplas/Castagna95, Address = {New York, NY, USA}, Author = {Giuseppe Castagna}, Date-Added = {2008-09-10 21:54:03 +0200}, Date-Modified = {2008-09-10 22:14:05 +0200}, Doi = {http://doi.acm.org/10.1145/203095.203096}, Issn = {0164-0925}, Journal = {ACM Trans. Program. Lang. Syst.}, Number = {3}, Pages = {431--447}, Publisher = {ACM}, Read = {Oui}, Title = {Covariance and contravariance: conflict without a cause}, Url = {http://portal.acm.org/ft_gateway.cfm?id=203096&type=pdf&coll=GUIDE&dl=GUIDE&CFID=2321643&CFTOKEN=26586263}, Volume = {17}, Year = {1995}, Bdsk-Url-1 = {http://portal.acm.org/ft_gateway.cfm?id=203096&type=pdf&coll=GUIDE&dl=GUIDE&CFID=2321643&CFTOKEN=26586263}, Bdsk-Url-2 = {http://doi.acm.org/10.1145/203095.203096}} @inproceedings{augustsson98cayenne, Address = {Baltimore, Maryland, United States}, Author = {Lennart Augustsson}, Booktitle = {ICFP '98: Proceedings of the third ACM SIGPLAN international conference on Functional programming}, Date-Added = {2008-09-10 20:59:52 +0200}, Date-Modified = {2008-09-10 21:01:15 +0200}, Doi = {http://doi.acm.org/10.1145/289423.289451}, Isbn = {1-58113-024-4}, Pages = {239--250}, Publisher = {ACM}, Title = {Cayenne --- a language with dependent types}, Url = {http://portal.acm.org/ft_gateway.cfm?id=289451&type=pdf&coll=GUIDE&dl=GUIDE&CFID=2351448&CFTOKEN=28940269}, Year = {1998}, Bdsk-Url-1 = {http://portal.acm.org/ft_gateway.cfm?id=289451&type=pdf&coll=GUIDE&dl=GUIDE&CFID=2351448&CFTOKEN=28940269}, Bdsk-Url-2 = {http://doi.acm.org/10.1145/289423.289451}} @inproceedings{DBLP:conf/types/GianantonioM02, Author = {Pietro Di Gianantonio and Marino Miculan}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Crossref = {DBLP:conf/types/2002}, Date-Added = {2008-09-09 23:12:48 +0200}, Date-Modified = {2008-09-09 23:12:48 +0200}, Ee = {http://link.springer.de/link/service/series/0558/bibs/2646/26460148.htm}, Pages = {148-161}, Title = {A Unifying Approach to Recursive and Co-recursive Definitions}, Year = {2002}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAqLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzLzEwLjEuMS44LjY0NjEucGRmTxEBjgAAAAABjgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAxquYFkgrAAAANnfIETEwLjEuMS44LjY0NjEucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/1KzJOORnAAAAAAAAAAAAAwADAAAJIAAAAAAAAAAAAAAAAAAAAAZwYXBlcnMAEAAIAADGq3v2AAAAEQAIAADJONZXAAAAAQAQADZ3yAAO27QAB/k4AACQ5wACADhNYWNpbnRvc2ggSEQ6VXNlcnM6bWF0OnJlc2VhcmNoOnBhcGVyczoxMC4xLjEuOC42NDYxLnBkZgAOACQAEQAxADAALgAxAC4AMQAuADgALgA2ADQANgAxAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgArVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy8xMC4xLjEuOC42NDYxLnBkZgAAEwABLwAAFQACAAr//wAAAAgADQAaACQAUQAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAHj}} @misc{sozeau.Coq/order, Author = {Matthieu Sozeau}, Date-Added = {2008-09-09 16:47:41 +0200}, Date-Modified = {2008-09-09 16:49:36 +0200}, Howpublished = {Coq development}, Read = {Oui}, Title = {{O}rder theory using type classes in {C}oq}, Web = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/coq}, Year = {2008}} @misc{fulger:labels, Address = {Tallinn, Estonia}, Author = {Diana Fulger}, Date-Added = {2008-09-09 16:01:40 +0200}, Date-Modified = {2008-09-09 16:03:30 +0200}, Howpublished = {Talk at the EffTT Small TYPES Workshop}, Month = {December}, Pdf = {http://cs.ioc.ee/efftt/fulger-slides.pdf}, Title = {Reasoning about effects: tree labelling}, Year = {2007}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAqLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL2Z1bGdlci1zbGlkZXMucGRmTxEBjgAAAAABjgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAxquYFkgrAAAANnfIEWZ1bGdlci1zbGlkZXMucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/2ifJOOVaAAAAAAAAAAAAAwADAAAJIAAAAAAAAAAAAAAAAAAAAAZwYXBlcnMAEAAIAADGq3v2AAAAEQAIAADJONdKAAAAAQAQADZ3yAAO27QAB/k4AACQ5wACADhNYWNpbnRvc2ggSEQ6VXNlcnM6bWF0OnJlc2VhcmNoOnBhcGVyczpmdWxnZXItc2xpZGVzLnBkZgAOACQAEQBmAHUAbABnAGUAcgAtAHMAbABpAGQAZQBzAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgArVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy9mdWxnZXItc2xpZGVzLnBkZgAAEwABLwAAFQACAAr//wAAAAgADQAaACQAUQAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAHj}} @inproceedings{REYNOLDS83, Author = {Reynolds, John C.}, Booktitle = {Information Processing 83, Paris, France}, Checked = {checked January 1988}, Date-Added = {2008-09-09 11:10:27 +0200}, Date-Modified = {2008-09-10 20:32:56 +0200}, Editor = {R. E. A. Mason}, Fulladdress = {Amsterdam}, Fullpublisher = {Elsevier Science Publishers B. V. (North-Holland)}, Pages = {513--523}, Publisher = elsevier, Source = {reynolds bib, with corrections from physical copy}, Title = {Types, Abstraction, and Parametric Polymorphism}, Url = {http://mathgate.info/cebrown/notes/reynolds83.php}, Year = {1983}, Bdsk-Url-1 = {http://mathgate.info/cebrown/notes/reynolds83.php}} @article{mcbride:concon, Abstract = {We present four constructions for standard equipment which can be generated for every inductive datatype: case analysis, structural recursion, no confusion, acyclicity. Our constructions follow a two-level approach---they require less work than the standard techniques which inspired them [11,8]. Moreover, given a suitably heterogeneous notion of equality, they extend without difficulty to inductive families of datatypes. These constructions are vital components of the translation from dependently typed programs in pattern matching style [7] to the equivalent programs expressed in terms of induction principles [21] and as such play a crucial behind-the-scenes rle in Epigram [25]. }, Author = {McBride, Conor and Goguen, Healfdene and McKinna, James}, Date-Added = {2008-09-07 01:31:03 +0200}, Date-Modified = {2008-10-07 01:22:26 +0200}, Doi = {10.1007/11617990_12}, Journal = {Types for Proofs and Programs}, Pages = {186--200}, Pdf = {http://www.e-pig.org/downloads/concon.pdf}, Read = {Oui}, Title = {{A} {F}ew {C}onstructions on {C}onstructors}, Year = {2004}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAjLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL2NvbmNvbi5wZGZPEQFyAAAAAAFyAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gKY29uY29uLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/XU8k45MsAAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk41rsAAAABABAANnfIAA7btAAH+TgAAJDnAAIAMU1hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOmNvbmNvbi5wZGYAAA4AFgAKAGMAbwBuAGMAbwBuAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAkVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy9jb25jb24ucGRmABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAEoAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAABwA==}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/11617990_12}} @phdthesis{norell:thesis, Address = {SE-412 96 G\"{o}teborg, Sweden}, Author = {Ulf Norell}, Date-Added = {2008-09-05 01:52:25 +0200}, Date-Modified = {2010-02-18 14:07:32 -0500}, Keywords = {agda}, Month = {September}, Pdf = {http://www.cs.chalmers.se/~ulfn/papers/thesis.pdf}, Read = {Oui}, School = {Department of Computer Science and Engineering, Chalmers University of Technology}, Title = {Towards a practical programming language based on dependent type theory}, Url = {http://www.cs.chalmers.se/~ulfn/papers/thesis.html}, Year = 2007, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAjLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3RoZXNpcy5wZGZPEQFyAAAAAAFyAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gKdGhlc2lzLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/pO8k46OYAAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk42tYAAAABABAANnfIAA7btAAH+TgAAJDnAAIAMU1hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOnRoZXNpcy5wZGYAAA4AFgAKAHQAaABlAHMAaQBzAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAkVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy90aGVzaXMucGRmABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAEoAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAABwA==}, Bdsk-Url-1 = {http://www.cs.chalmers.se/~ulfn/papers/thesis.html}} @inproceedings{DBLP:conf/csl/Miquel03, Author = {Alexandre Miquel}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {CSL}, Crossref = {DBLP:conf/csl/2003}, Date-Added = {2008-09-03 16:52:46 +0200}, Date-Modified = {2008-09-03 19:14:25 +0200}, Ee = {http://springerlink.metapress.com/openurl.asp?genre=article{\&}issn=0302-9743{\&}volume=2803{\&}spage=441}, Pages = {441-454}, Pdf = {http://www.pps.jussieu.fr/~miquel/publis/csl03.pdf}, Title = {A {S}trongly {N}ormalising {C}urry-{H}oward {C}orrespondence for {I{Z}{F}} {S}et {T}heory}, Year = {2003}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAiLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL2NzbDAzLnBkZk8RAW4AAAAAAW4AAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yAljc2wwMy5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP9dnyTjkzQAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTjWvQAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgAwTWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6Y3NsMDMucGRmAA4AFAAJAGMAcwBsADAAMwAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAI1VzZXJzL21hdC9yZXNlYXJjaC9wYXBlcnMvY3NsMDMucGRmAAATAAEvAAAVAAIACv//AAAACAANABoAJABJAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAbs=}} @proceedings{DBLP:conf/csl/2003, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {CSL}, Date-Added = {2008-09-03 16:52:46 +0200}, Date-Modified = {2008-09-03 16:52:46 +0200}, Editor = {Matthias Baaz and Johann A. Makowsky}, Isbn = {3-540-40801-0}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Computer Science Logic, 17th International Workshop, {C}{S}{L} 2003, 12th {A}nnual {C}onference of the {E}{A}{C}{S}{L}, and 8th {K}urt {G}{\"o}del {C}olloquium, {K}{G}{C} 2003, {V}ienna, {A}ustria, {A}ugust 25-30, 2003, {P}roceedings}, Volume = {2803}, Year = {2003}} @article{Corbineau:2004qp, Abstract = {In this paper we present a contraction-free sequent calculus including inductive definitions for the first-order intuitionistic logic. We show that it is a natural extension to Dyckhoff's LJT calculus and we prove the contraction- and cut-elimination properties, thus extending Dyckhoff's result, in order to validate its use as a basis for proof-search procedures. Finally we describe the proof-search strategy used in our implementation as a tactic in the Coq proof assistant {$[$}2{$]$}.}, Author = {Corbineau, Pierre}, Date-Added = {2008-08-28 15:24:44 +0200}, Date-Modified = {2008-08-28 15:24:44 +0200}, Journal = {Types for Proofs and Programs}, Pages = {162--177}, Title = {First-{O}rder {R}easoning in the {C}alculus of {I}nductive {C}onstructions}, Ty = {CHAPTER}, Url = {http://www.springerlink.com/content/h5gecycqu69jcwq3}, Year = {2004}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAlLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL2Z1bGx0ZXh0LnBkZk8RAXoAAAAAAXoAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yAxmdWxsdGV4dC5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP9owyTjlWwAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTjXSwAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgAzTWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6ZnVsbHRleHQucGRmAAAOABoADABmAHUAbABsAHQAZQB4AHQALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASACZVc2Vycy9tYXQvcmVzZWFyY2gvcGFwZXJzL2Z1bGx0ZXh0LnBkZgATAAEvAAAVAAIACv//AAAACAANABoAJABMAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAco=}, Bdsk-Url-1 = {http://www.springerlink.com/content/h5gecycqu69jcwq3}} @inproceedings{paulin93tlca, Author = {Christine Paulin-Mohring}, Booktitle = {Typed Lambda Calculi and Applications}, Date-Added = {2008-08-28 15:00:17 +0200}, Date-Modified = {2008-09-10 22:52:02 +0200}, Pages = {328--345}, Publisher = SV, Series = LNCS, Title = {{I}nductive {D}efinitions in the {S}ystem {Coq} - {R}ules and {P}roperties}, Topics = {team}, Url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.32.5387&rep=rep1&type=pdf}, Volume = 664, Year = 1993, Bdsk-Url-1 = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.32.5387&rep=rep1&type=pdf}} @article{COQUAND88, Author = {Thierry Coquand and G\'{e}rard Huet}, Date-Added = {2008-08-28 14:10:57 +0200}, Date-Modified = {2008-09-22 14:05:39 +0200}, Journal = {Information and Computation}, Key = {Coquand88}, Month = {February/March}, Number = {2--3}, Pages = {95--120}, Pdf = {ftp://ftp.inria.fr/INRIA/publication/publi-pdf/RR/RR-0530.pdf}, Source = {ergobib}, Title = {The {C}alculus of {C}onstructions}, Volume = {76}, Year = {1988}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAkLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL1JSLTA1MzAucGRmTxEBdgAAAAABdgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAxquYFkgrAAAANnfIC1JSLTA1MzAucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/1pvJOOS1AAAAAAAAAAAAAwADAAAJIAAAAAAAAAAAAAAAAAAAAAZwYXBlcnMAEAAIAADGq3v2AAAAEQAIAADJONalAAAAAQAQADZ3yAAO27QAB/k4AACQ5wACADJNYWNpbnRvc2ggSEQ6VXNlcnM6bWF0OnJlc2VhcmNoOnBhcGVyczpSUi0wNTMwLnBkZgAOABgACwBSAFIALQAwADUAMwAwAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAlVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy9SUi0wNTMwLnBkZgAAEwABLwAAFQACAAr//wAAAAgADQAaACQASwAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAHF}} @misc{sozeau.Coq/classes/proval08, Address = {Orsay}, Author = {Matthieu Sozeau}, Date-Added = {2008-08-27 19:20:35 +0200}, Date-Modified = {2008-08-27 19:20:46 +0200}, Howpublished = {Talk given at the {P}ro{V}al workgroup}, Keywords = {Type classes}, Month = {3rd March}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/First-Class_Type_Classes-LRI-030308.pdf}, Read = {Oui}, Title = {{F}irst-{C}lass {T}ype {C}lasses, in {C}oq}, Type = {slides}, Year = {2008}} @techreport{Luo92, Author = {Zhaohui Luo and Robert Pollack}, Date-Added = {2008-08-07 16:39:41 +0200}, Date-Modified = {2008-09-10 22:28:42 +0200}, Institution = {University of Edinburgh}, Keywords = {misc}, Month = may, Number = {ECS-LFCS-92-211}, Title = {The {L{E}{G}{O}} {P}roof {D}evelopment {S}ystem: {A} {U}ser's {M}anual}, Url = {http://www.cs.rhul.ac.uk/~zhaohui/LEGO92.ps.gz}, Year = {1992}, Bdsk-Url-1 = {http://www.cs.rhul.ac.uk/~zhaohui/LEGO92.ps.gz}} @phdthesis{Luo90, Author = {Zhaohui Luo}, Date-Added = {2008-08-07 16:39:33 +0200}, Date-Modified = {2008-09-10 22:26:55 +0200}, Month = jun, School = {Department of Computer Science, University of Edinburgh}, Title = {An {E}xtended {C}alculus of {C}onstructions}, Url = {http://www.cs.rhul.ac.uk/~zhaohui/THESIS90.ps.gz}, Year = 1990, Bdsk-Url-1 = {http://www.cs.rhul.ac.uk/~zhaohui/THESIS90.ps.gz}} @inproceedings{DBLP:conf/csl/Luo96, Author = {Zhaohui Luo}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {CSL}, Crossref = {DBLP:conf/csl/1996}, Date-Added = {2008-08-07 15:44:23 +0200}, Date-Modified = {2008-09-10 22:27:38 +0200}, Pages = {276-296}, Title = {Coercive {S}ubtyping in {T}ype {T}heory.}, Url = {http://www.cs.rhul.ac.uk/~zhaohui/SUBTYPING96.ps.gz}, Year = {1996}, Bdsk-Url-1 = {http://www.cs.rhul.ac.uk/~zhaohui/SUBTYPING96.ps.gz}} @inproceedings{DBLP:conf/types/LuoL03, Author = {Yong Luo and Zhaohui Luo}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Crossref = {DBLP:conf/types/2003}, Date-Added = {2008-08-07 15:44:23 +0200}, Date-Modified = {2008-08-07 15:44:23 +0200}, Ee = {http://springerlink.metapress.com/openurl.asp?genre=article{\&}issn=0302-9743{\&}volume=3085{\&}spage=276}, Pages = {276-292}, Title = {Combining {I}ncoherent {C}oercions for {S}igma-{T}ypes.}, Year = {2003}} @article{DBLP:journals/tcs/LuoS99, Author = {Zhaohui Luo and Sergei Soloviev}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Added = {2008-08-07 15:44:23 +0200}, Date-Modified = {2008-08-07 15:44:23 +0200}, Ee = {http://www.elsevier.nl/locate/entcs/volume29.html}, Journal = {Electr. Notes Theor. Comput. Sci.}, Title = {Dependent {C}oercions.}, Volume = {29}, Year = {1999}} @phdthesis{Barras99, Author = {Bruno Barras}, Date-Added = {2008-08-07 15:41:33 +0200}, Date-Modified = {2008-10-04 08:54:14 +0200}, Month = nov, School = {Universit{\'e} Paris~7}, Title = {Auto-validation d'un syst{\`e}me de preuves avec familles inductives}, Type = {Th{\`e}se de Doctorat}, Url = {http://pauillac.inria.fr/~barras/publi/these_barras.ps.gz}, Year = 1999, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxApLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3RoZXNlX2JhcnJhcy5wZGZPEQGKAAAAAAGKAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gQdGhlc2VfYmFycmFzLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/idMk45zsAAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk42SsAAAABABAANnfIAA7btAAH+TgAAJDnAAIAN01hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOnRoZXNlX2JhcnJhcy5wZGYAAA4AIgAQAHQAaABlAHMAZQBfAGIAYQByAHIAYQBzAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAqVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy90aGVzZV9iYXJyYXMucGRmABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAFAAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAAB3g==}, Bdsk-Url-1 = {http://pauillac.inria.fr/~barras/publi/these_barras.ps.gz}} @techreport{Barras96a, Author = {Bruno Barras}, Date-Added = {2008-08-07 15:41:33 +0200}, Date-Modified = {2008-10-04 08:54:21 +0200}, Institution = {INRIA}, Month = oct, Number = {3026}, Pdf = {http://hal.inria.fr/docs/00/07/36/67/PDF/RR-3026.pdf}, Title = {Coq en {C}oq}, Type = {Rapport de Recherche}, Year = {1996}} @inproceedings{Barras96b, Address = {Aussois, France}, Author = {Bruno Barras}, Booktitle = {Proceedings of the 1996 Workshop on Types for Proofs and Programs}, Date-Added = {2008-08-07 15:41:33 +0200}, Date-Modified = {2008-10-04 08:54:30 +0200}, Editor = {E.~Gimenez and C.~Paulin-Mohring}, Month = dec, Pages = {28--45}, Publisher = {Springer-Verlag LNCS 1512}, Title = {Verification of the {I}nterface of a {S}mall {P}roof {S}ystem in {C}oq}, Year = {1996}} @article{Rushby98:TSE, Author = {John Rushby and Sam Owre and Natarajan Shankar}, Date-Added = {2008-08-07 15:39:55 +0200}, Date-Modified = {2008-10-13 14:43:27 +0200}, Journal = {IEEE Transactions on Software Engineering}, Month = sep, Number = 9, Pages = {709--720}, Title = {Subtypes for {S}pecifications: {P}redicate {S}ubtyping in {P{V}{S}}}, Url = {http://pvs.csl.sri.com/papers/subtypes98/}, Volume = 24, Year = 1998, Bdsk-Url-1 = {http://pvs.csl.sri.com/papers/subtypes98/}} @inproceedings{Shankar&Owre:WADT99, Address = {Toulouse, France}, Author = {Natarajan Shankar and Sam Owre}, Booktitle = {Recent Trends in Algebraic Development Techniques, {WADT '99}}, Date-Added = {2008-08-07 15:39:43 +0200}, Date-Modified = {2008-09-10 22:32:56 +0200}, Editor = {Didier Bert and Christine Choppy and Peter Mosses}, Month = sep, Pages = {37--52}, Publisher = {Springer-Verlag}, Series = {Lecture Notes in Computer Science}, Title = {Principles and {P}ragmatics of {S}ubtyping in {P{V}{S}}}, Url = {http://pvs.csl.sri.com/papers/wadt99/}, Volume = 1827, Year = 1999, Bdsk-Url-1 = {http://pvs.csl.sri.com/papers/wadt99/}} @techreport{pvs97semantics, Address = {Menlo Park, CA}, Author = {Sam Owre and Natarajan Shankar}, Date-Added = {2008-08-07 15:35:37 +0200}, Date-Modified = {2008-09-10 22:36:36 +0200}, Institution = {Computer Science Laboratory, SRI International}, Month = aug, Number = {SRI-CSL-97-2}, Title = {The {F}ormal {S}emantics of {P{V}{S}}}, Url = {http://www.csl.sri.com/papers/csl-97-2/}, Year = 1997, Bdsk-Url-1 = {http://www.csl.sri.com/papers/csl-97-2/}} @manual{pvs99reference, Address = {Menlo Park, CA}, Author = {Sam Owre and Natarajan Shankar and John M. Rushby and D. W. J. Stringer-Calvert}, Date-Added = {2008-08-07 15:35:36 +0200}, Date-Modified = {2008-10-13 14:43:50 +0200}, Month = sep, Organization = {Computer Science Laboratory, SRI International}, Pdf = {http://pvs.csl.sri.com/doc/pvs-language-reference.pdf}, Title = {P{V}{S} {L}anguage {R}eference}, Year = 1999} @misc{cayenne, Author = {Lennart Augustsson}, Date-Added = {2008-08-07 15:34:46 +0200}, Date-Modified = {2008-08-07 15:34:46 +0200}, Key = {Cayenne Web Page}, Note = {\url{http://www.cs.chalmers.se/~augustss/cayenne/}}, Title = {{Cayenne} --- {Hotter} than {Haskell}}, Year = 1999} @misc{coq:8.2, Author = {The Coq development team}, Date-Added = {2008-07-30 19:04:15 +0200}, Date-Modified = {2008-07-30 19:05:58 +0200}, Title = {The {C}oq proof assistant, version 8.2}, Url = {http://coq.inria.fr}, Year = {2008}, Bdsk-Url-1 = {http://coq.inria.fr}} @article{Gregoire:2005rb, Abstract = {We present a new implementation of a reflexive tactic which solves equalities in a ring structure inside the Coq system. The efficiency is improved to a point that we can now prove equalities that were previously beyond reach. A special care has been taken to implement efficient algorithms while keeping the complexity of the correctness proofs low. This leads to a single tool, with a single implementation, which can be addressed for a ring or for a semi-ring, abstract or not, using the Leibniz equality or a setoid equality. This example shows that such reflective methods can be effectively used in symbolic computation.}, Author = {Gr{\'e}goire, Benjamin and Mahboubi, Assia}, Date-Added = {2008-07-30 18:59:47 +0200}, Date-Modified = {2008-07-30 18:59:47 +0200}, Journal = {Theorem Proving in Higher Order Logics}, M3 = {10.1007/11541868{\_}7}, Pages = {98--113}, Title = {Proving {E}qualities in a {C}ommutative {R}ing {D}one {R}ight in {C}oq}, Ty = {CHAPTER}, Url = {http://dx.doi.org/10.1007/11541868_7}, Year = {2005}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/11541868_7}} @article{Ballarin:2006gy, Abstract = {The generic proof assistant Isabelle provides a landscape of specification contexts that is considerably richer than that of most other provers. Theories are the level of specification where object-logics are axiomatised. Isabelle's proof language Isar enables local exploration in contexts generated in the course of natural deduction proofs. Finally, locales, which may be seen as detached proof contexts, offer an intermediate level of specification geared towards reuse. All three kinds of contexts are structured, to different extents. We analyse the ``topology'' of Isabelle's landscape of specification contexts, by means of development graphs, in order to establish what kinds of reuse are possible. }, Author = {Ballarin, Clemens}, Date-Added = {2008-07-30 18:58:28 +0200}, Date-Modified = {2008-07-30 18:58:28 +0200}, Journal = {Mathematical Knowledge Management}, M3 = {10.1007/11812289{\_}4}, Pages = {31--43}, Title = {Interpretation of {L}ocales in {I}sabelle: {T}heories and {P}roof {C}ontexts}, Ty = {CHAPTER}, Url = {http://dx.doi.org/10.1007/11812289_4}, Year = {2006}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAmLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL21rbTIwMDYtMS5wZGZPEQF+AAAAAAF+AAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gNbWttMjAwNi0xLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/ckck45egAAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk419gAAAABABAANnfIAA7btAAH+TgAAJDnAAIANE1hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOm1rbTIwMDYtMS5wZGYADgAcAA0AbQBrAG0AMgAwADAANgAtADEALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASACdVc2Vycy9tYXQvcmVzZWFyY2gvcGFwZXJzL21rbTIwMDYtMS5wZGYAABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAE0AAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAABzw==}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/11812289_4}} @inproceedings{Pfenning91lf, Address = {Cambridge, England}, Author = {Frank Pfenning}, Booktitle = {Logical Frameworks}, Date-Added = {2008-07-30 18:54:18 +0200}, Date-Modified = {2008-09-03 19:35:15 +0200}, Editor = {G\'{e}rard Huet and Gordon Plotkin}, Fullurldvi = {http://www.cs.cmu.edu/~fp/papers/lfproc91.dvi.gz}, Fullurlps = {http://www.cs.cmu.edu/~fp/papers/lfproc91.ps.gz}, Keywords = {LF, Elf}, Pages = {149--181}, Publisher = {Cambridge University Press}, Title = {{L}ogic {P}rogramming in the {L{F}} {L}ogical {F}ramework}, Url = {http://www.cs.cmu.edu/~fp/papers/lfproc91.ps.gz}, Year = {1991}, Bdsk-Url-1 = {http://www.cs.cmu.edu/~fp/papers/lfproc91.ps.gz}} @article{chr-fundeps, Address = {New York, NY, USA}, Author = {Martin Sulzmann and Gregory J. Duck and Simon Peyton-Jones and Peter J. Stuckey}, Date-Added = {2008-07-30 18:51:36 +0200}, Date-Modified = {2008-07-30 18:51:58 +0200}, Doi = {http://dx.doi.org/10.1017/S0956796806006137}, Issn = {0956-7968}, Journal = {J. Funct. Program.}, Number = {1}, Pages = {83--129}, Publisher = {Cambridge University Press}, Title = {Understanding functional dependencies via constraint handling rules}, Volume = {17}, Year = {2007}, Bdsk-Url-1 = {http://dx.doi.org/10.1017/S0956796806006137}} @webpage{hugs, Date-Added = {2008-07-30 18:42:25 +0200}, Date-Modified = {2008-07-30 19:07:04 +0200}, Key = {Hugs}, Title = {Hugs 98}, Url = {http://www.haskell.org/hugs/}, Bdsk-Url-1 = {http://www.haskell.org/hugs/}} @webpage{ghc, Date-Added = {2008-07-30 18:38:51 +0200}, Date-Modified = {2008-07-30 19:06:51 +0200}, Key = {GHC}, Title = {The {G}lasgow {H}askell compiler}, Url = {http://www.haskell.org/ghc/}, Bdsk-Url-1 = {http://www.haskell.org/ghc/}} @inproceedings{replib, Address = {Portland, Oregon, USA}, Author = {Stephanie Weirich}, Booktitle = {Haskell '06: Proceedings of the 2006 ACM SIGPLAN workshop on Haskell}, Date-Added = {2008-07-30 18:21:12 +0200}, Date-Modified = {2008-07-30 18:24:04 +0200}, Doi = {http://doi.acm.org/10.1145/1159842.1159844}, Isbn = {1-59593-489-8}, Pages = {1--12}, Publisher = {ACM}, Title = {Rep{L}ib: a library for derivable type classes}, Url = {http://www.seas.upenn.edu/~sweirich/RepLib/haskell08-weirich.pdf}, Year = {2006}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAuLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL2hhc2tlbGwwOC13ZWlyaWNoLnBkZk8RAZ4AAAAAAZ4AAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yBVoYXNrZWxsMDgtd2VpcmljaC5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP9vcyTjlygAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTjXugAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgA8TWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6aGFza2VsbDA4LXdlaXJpY2gucGRmAA4ALAAVAGgAYQBzAGsAZQBsAGwAMAA4AC0AdwBlAGkAcgBpAGMAaAAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAL1VzZXJzL21hdC9yZXNlYXJjaC9wYXBlcnMvaGFza2VsbDA4LXdlaXJpY2gucGRmAAATAAEvAAAVAAIACv//AAAACAANABoAJABVAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAfc=}, Bdsk-Url-1 = {http://portal.acm.org/ft_gateway.cfm?id=1159844&type=pdf&coll=GUIDE&dl=GUIDE&CFID=38908461&CFTOKEN=56666391}, Bdsk-Url-2 = {http://doi.acm.org/10.1145/1159842.1159844}, Bdsk-Url-3 = {http://www.seas.upenn.edu/~sweirich/RepLib/haskell08-weirich.pdf}} @inproceedings{uniplate, Address = {Freiburg, Germany}, Author = {Neil Mitchell and Colin Runciman}, Booktitle = {Haskell '07: Proceedings of the ACM SIGPLAN workshop on Haskell workshop}, Date-Added = {2008-07-30 18:13:10 +0200}, Date-Modified = {2008-07-30 18:13:24 +0200}, Doi = {http://doi.acm.org/10.1145/1291201.1291208}, Isbn = {978-1-59593-674-5}, Pages = {49--60}, Publisher = {ACM}, Title = {Uniform boilerplate and list processing}, Url = {http://portal.acm.org/ft_gateway.cfm?id=1291208&type=pdf&coll=GUIDE&dl=GUIDE&CFID=38907636&CFTOKEN=93170946}, Year = {2007}, Bdsk-Url-1 = {http://portal.acm.org/ft_gateway.cfm?id=1291208&type=pdf&coll=GUIDE&dl=GUIDE&CFID=38907636&CFTOKEN=93170946}, Bdsk-Url-2 = {http://doi.acm.org/10.1145/1291201.1291208}} @inproceedings{Davies97, Author = {Rowan Davies}, Booktitle = {International Conference on Algebraic Methodology and Software Technology}, Date-Added = {2008-07-30 18:07:40 +0200}, Date-Modified = {2008-07-30 18:07:40 +0200}, Fullissn = {0302-9743}, Publisher = springer, Series = {Lecture Notes in Computer Science}, Title = {A refinement-type checker for {S}tandard {M{L}}}, Volume = {1349}, Year = {1997}} @book{Milner&90, Author = {Robin Milner and Mads Tofte and Robert Harper}, Checked = yes, Date-Added = {2008-07-30 18:06:27 +0200}, Date-Modified = {2008-07-30 18:06:27 +0200}, Publisher = mp, Title = {The {D}efinition of {S}tandard {M{L}}}, Year = 1990} @inproceedings{regis-gianas-pottier-08, Abstract = {We present a Hoare logic for a call-by-value programming language equipped with recursive, higher-order functions, algebraic data types, and a polymorphic type system in the style of Hindley and Milner. It is the theoretical basis for a tool that extracts proof obligations out of programs annotated with logical assertions. These proof obligations, expressed in a typed, higher-order logic, are discharged using off-the-shelf automated or interactive theorem provers. Although the technical apparatus that we exploit is by now standard, its application to functional programming languages appears to be new, and (we claim) deserves attention. As a sample application, we check the partial correctness of a balanced binary search tree implementation.}, Author = {Yann R{\'e}gis-Gianas and Fran{\c c}ois Pottier}, Booktitle = {Proceedings of the Ninth International Conference on Mathematics of Program Construction (MPC'08)}, Date-Added = {2008-07-30 15:29:27 +0200}, Date-Modified = {2008-07-30 15:31:05 +0200}, Month = JUL, Pdf = {http://cristal.inria.fr/~fpottier/publis/regis-gianas-pottier-hoarefp.pdf}, Publisher = {Springer-Verlag}, Series = {Lecture Notes in Computer Science}, Title = {A {Hoare} {L}ogic for {C}all-by-{V}alue {F}unctional {P}rograms}, Url = {http://cristal.inria.fr/~fpottier/publis/regis-gianas-pottier-hoarefp.ps.gz}, Volume = {5133}, Year = {2008}, Bdsk-Url-1 = {http://cristal.inria.fr/~fpottier/publis/regis-gianas-pottier-hoarefp.ps.gz}} @inproceedings{mu08aop, Address = {Berlin Heidelberg}, Author = {Shin-Cheng Mu and Hsiang-Shang Ko and Patrik Jansson}, Booktitle = {Mathematics of Program Construction}, Date-Added = {2008-07-19 15:28:03 +0200}, Date-Modified = {2008-07-19 15:41:40 +0200}, Editor = {Philippe Audebaud and Christine Paulin-Mohring}, Month = {July}, Pages = {268-283}, Pdf = {http://www.iis.sinica.edu.tw/~scm/pub/mpc08.pdf}, Publisher = {Springer-Verlag}, Series = {Lecture Notes in Computer Science}, Title = {Algebra of {P}rogramming {U}sing {D}ependent {T}ypes}, Volume = {5133}, Year = {2008}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAiLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL21wYzA4LnBkZk8RAW4AAAAAAW4AAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yAltcGMwOC5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP9ycyTjl6QAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTjX2QAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgAwTWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6bXBjMDgucGRmAA4AFAAJAG0AcABjADAAOAAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAI1VzZXJzL21hdC9yZXNlYXJjaC9wYXBlcnMvbXBjMDgucGRmAAATAAEvAAAVAAIACv//AAAACAANABoAJABJAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAbs=}, Bdsk-Url-1 = {http://www.iis.sinica.edu.tw/~scm/pub/mpc08.pdf}} @article{GunterGM93, Address = {Duluth, MN, USA}, Author = {Carl A. Gunter and Elsa L. Gunter and David B. MacQueen}, Date-Added = {2008-07-11 16:19:20 +0200}, Date-Modified = {2008-07-11 16:20:39 +0200}, Doi = {http://dx.doi.org/10.1006/inco.1993.1070}, Issn = {0890-5401}, Journal = {Inf. Comput.}, Number = {2}, Pages = {303--323}, Publisher = {Academic Press, Inc.}, Read = {Oui}, Title = {Computing {M}{L} equality kinds using abstract interpretation}, Volume = {107}, Year = {1993}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAnLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL0d1bnRlckdNOTMucGRmTxEBggAAAAABggACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAxquYFkgrAAAANnfIDkd1bnRlckdNOTMucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/1aHJOOSPAAAAAAAAAAAAAwADAAAJIAAAAAAAAAAAAAAAAAAAAAZwYXBlcnMAEAAIAADGq3v2AAAAEQAIAADJONZ/AAAAAQAQADZ3yAAO27QAB/k4AACQ5wACADVNYWNpbnRvc2ggSEQ6VXNlcnM6bWF0OnJlc2VhcmNoOnBhcGVyczpHdW50ZXJHTTkzLnBkZgAADgAeAA4ARwB1AG4AdABlAHIARwBNADkAMwAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAKFVzZXJzL21hdC9yZXNlYXJjaC9wYXBlcnMvR3VudGVyR005My5wZGYAEwABLwAAFQACAAr//wAAAAgADQAaACQATgAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAHU}, Bdsk-Url-1 = {http://dx.doi.org/10.1006/inco.1993.1070}} @article{ROBINSON:1993sf, Abstract = {Traditionally, mathematical logic has been content with in principle' formalizations of deductive inference, which place little emphasis on the description of practical reasoning. Requirements for formal descriptions of practical inference methods are now emerging however. For example, interactive reasoning systems are needed for verification of computer systems, and should support practically convenient reasoning techniques as strongly as possible. This paper describes a proof paradigm which formalises a hierarchical, problem-reduction style of reasoning which is widely useful in practical reasoning. It is a goal directed paradigm, which gives a central role to equivalence transformations. A hierarchy of subgoals can co-exist at a single point in the proof, and these subgoals may be of arbitrary type. The approach allows good access to contextual information when transforming subgoals, and can be applied to a variety of logics. }, Annote = {10.1093/logcom/3.1.47}, Author = {Robinson, Peter J. and Staples, John}, Date-Added = {2008-05-30 16:13:06 +0200}, Date-Modified = {2008-09-03 19:36:33 +0200}, Journal = {Journal of Logic and Computation}, Journal1 = {J Logic Computation}, Number = {1}, Pages = {47--61}, Title = {{F}ormalizing a {H}ierarchical {S}tructure of {P}ractical {M}athematical {R}easoning}, Ty = {JOUR}, Url = {http://logcom.oxfordjournals.org/cgi/content/abstract/3/1/47}, Volume = {3}, Year = {1993}, Bdsk-Url-1 = {http://logcom.oxfordjournals.org/cgi/content/abstract/3/1/47}} @inproceedings{DBLP:conf/types/Coen04, Author = {Sacerdoti Coen, Claudio}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Crossref = {DBLP:conf/types/2004}, Date-Added = {2008-05-29 17:08:35 +0200}, Date-Modified = {2009-01-02 13:06:29 +0100}, Ee = {http://dx.doi.org/10.1007/11617990_7}, Pages = {98-114}, Read = {Oui}, Title = {A {S}emi-reflexive {T}actic for ({S}ub-){E}quational {R}easoning}, Year = {2004}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxArLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3NldG9pZF9yZXdyaXRlLnBkZk8RAZIAAAAAAZIAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yBJzZXRvaWRfcmV3cml0ZS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP917yTjmCwAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTjX+wAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgA5TWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6c2V0b2lkX3Jld3JpdGUucGRmAAAOACYAEgBzAGUAdABvAGkAZABfAHIAZQB3AHIAaQB0AGUALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASACxVc2Vycy9tYXQvcmVzZWFyY2gvcGFwZXJzL3NldG9pZF9yZXdyaXRlLnBkZgATAAEvAAAVAAIACv//AAAACAANABoAJABSAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAeg=}} @proceedings{DBLP:conf/types/2004, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Date-Added = {2008-05-29 17:08:35 +0200}, Date-Modified = {2008-05-29 17:08:35 +0200}, Editor = {Jean-Christophe Filli{\^a}tre and Christine Paulin-Mohring and Benjamin Werner}, Isbn = {3-540-31428-8}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Types for {P}roofs and {P}rograms, {I}nternational {W}orkshop, {T}{Y}{P}{E}{S} 2004, {J}ouy-en-{J}osas, {F}rance, {D}ecember 15-18, 2004, {R}evised {S}elected {P}apers}, Volume = {3839}, Year = {2006}} @article{paulson83higherorder, Author = {Lawrence C. Paulson}, Date-Added = {2008-05-29 15:50:53 +0200}, Date-Modified = {2008-09-03 19:35:43 +0200}, Journal = {Science of Computer Programming}, Number = {2}, Pages = {119--149 (or 119--150??)}, Read = {Oui}, Title = {{A} {H}igher-{O}rder {I}mplementation of {R}ewriting}, Url = {http://citeseer.ist.psu.edu/paulson83higherorder.html}, Volume = {3}, Year = {1983}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAxLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3BhdWxzb244M2hpZ2hlcm9yZGVyLnBkZk8RAaoAAAAAAaoAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yBhwYXVsc29uODNoaWdoZXJvcmRlci5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP90pyTjmAwAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTjX8wAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgA/TWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6cGF1bHNvbjgzaGlnaGVyb3JkZXIucGRmAAAOADIAGABwAGEAdQBsAHMAbwBuADgAMwBoAGkAZwBoAGUAcgBvAHIAZABlAHIALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADJVc2Vycy9tYXQvcmVzZWFyY2gvcGFwZXJzL3BhdWxzb244M2hpZ2hlcm9yZGVyLnBkZgATAAEvAAAVAAIACv//AAAACAANABoAJABYAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAgY=}, Bdsk-Url-1 = {http://citeseer.ist.psu.edu/paulson83higherorder.html}} @article{DBLP:journals/iandc/LampsonB88, Author = {Butler W. Lampson and Rod M. Burstall}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Added = {2008-05-26 13:16:14 +0200}, Date-Modified = {2008-09-03 19:27:17 +0200}, Journal = {Inf. Comput.}, Number = {2/3}, Pages = {278-346}, Pdf = {http://research.microsoft.com/lampson/39-Pebble/39-Pebble.pdf}, Read = {Oui}, Title = {Pebble, a {K}ernel {L}anguage for {M}odules and {A}bstract {D}ata {T}ypes}, Volume = {76}, Year = {1988}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAmLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzLzM5LVBlYmJsZS5wZGZPEQF+AAAAAAF+AAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gNMzktUGViYmxlLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/U3sk45GwAAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk41lwAAAABABAANnfIAA7btAAH+TgAAJDnAAIANE1hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOjM5LVBlYmJsZS5wZGYADgAcAA0AMwA5AC0AUABlAGIAYgBsAGUALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASACdVc2Vycy9tYXQvcmVzZWFyY2gvcGFwZXJzLzM5LVBlYmJsZS5wZGYAABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAE0AAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAABzw==}} @misc{jhc, Author = {John Meacham}, Date-Added = {2008-05-19 15:14:19 +0200}, Date-Modified = {2008-09-03 19:29:18 +0200}, Title = {{J{H}{C}}: {J}ohn's {H}askell {C}ompiler}, Url = {http://repetae.net/john/computer/jhc}, Year = {2007}, Bdsk-Url-1 = {http://repetae.net/john/computer/jhc}} @inproceedings{barthe95implicit, Author = {Gilles Barthe}, Booktitle = {{TYPES}}, Date-Added = {2008-05-13 21:41:54 +0200}, Date-Modified = {2008-05-13 21:42:10 +0200}, Pages = {1-15}, Read = {Oui}, Title = {Implicit {C}oercions in {T}ype {S}ystems}, Url = {http://citeseer.ist.psu.edu/barthe95implicit.html}, Year = {1995}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAtLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL2JhcnRoZTk1aW1wbGljaXQucGRmTxEBmgAAAAABmgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAxquYFkgrAAAANnfIFGJhcnRoZTk1aW1wbGljaXQucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/1s7JOOS7AAAAAAAAAAAAAwADAAAJIAAAAAAAAAAAAAAAAAAAAAZwYXBlcnMAEAAIAADGq3v2AAAAEQAIAADJONarAAAAAQAQADZ3yAAO27QAB/k4AACQ5wACADtNYWNpbnRvc2ggSEQ6VXNlcnM6bWF0OnJlc2VhcmNoOnBhcGVyczpiYXJ0aGU5NWltcGxpY2l0LnBkZgAADgAqABQAYgBhAHIAdABoAGUAOQA1AGkAbQBwAGwAaQBjAGkAdAAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIALlVzZXJzL21hdC9yZXNlYXJjaC9wYXBlcnMvYmFydGhlOTVpbXBsaWNpdC5wZGYAEwABLwAAFQACAAr//wAAAAgADQAaACQAVAAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAHy}, Bdsk-Url-1 = {http://citeseer.ist.psu.edu/barthe95implicit.html}} @inproceedings{missura94using, Author = {Stephan A. Missura and Andreas Weber}, Booktitle = {{AISMC}}, Date-Added = {2008-05-13 21:22:57 +0200}, Date-Modified = {2008-05-13 21:23:26 +0200}, Keywords = {coercion}, Pages = {131-143}, Read = {Oui}, Title = {Using {C}ommutativity {P}roperties for {C}ontrolling {C}oercions}, Url = {http://citeseer.ist.psu.edu/missura94using.html}, Year = {1994}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxArLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL21pc3N1cmE5NHVzaW5nLnBkZk8RAZIAAAAAAZIAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yBJtaXNzdXJhOTR1c2luZy5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP9yMyTjl6AAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTjX2AAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgA5TWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6bWlzc3VyYTk0dXNpbmcucGRmAAAOACYAEgBtAGkAcwBzAHUAcgBhADkANAB1AHMAaQBuAGcALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASACxVc2Vycy9tYXQvcmVzZWFyY2gvcGFwZXJzL21pc3N1cmE5NHVzaW5nLnBkZgATAAEvAAAVAAIACv//AAAACAANABoAJABSAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAeg=}, Bdsk-Url-1 = {http://citeseer.ist.psu.edu/missura94using.html}} @misc{wenzel95using, Author = {Markus Wenzel}, Date-Added = {2008-05-13 17:07:39 +0200}, Date-Modified = {2008-10-13 14:46:50 +0200}, Read = {Oui}, Text = {Markus Wenzel. Using axiomatic type classes in Isabelle, a tutorial, 1995. http://www4.informatik.tu-muenchen.de/~wenzelm/papers.html.}, Title = {Using axiomatic type classes in {I}sabelle}, Url = {http://citeseer.ist.psu.edu/wenzel00using.html}, Year = {1995}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAqLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3dlbnplbDAwdXNpbmcucGRmTxEBjgAAAAABjgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAxquYFkgrAAAANnfIEXdlbnplbDAwdXNpbmcucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/6pvJOOkcAAAAAAAAAAAAAwADAAAJIAAAAAAAAAAAAAAAAAAAAAZwYXBlcnMAEAAIAADGq3v2AAAAEQAIAADJONsMAAAAAQAQADZ3yAAO27QAB/k4AACQ5wACADhNYWNpbnRvc2ggSEQ6VXNlcnM6bWF0OnJlc2VhcmNoOnBhcGVyczp3ZW56ZWwwMHVzaW5nLnBkZgAOACQAEQB3AGUAbgB6AGUAbAAwADAAdQBzAGkAbgBnAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgArVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy93ZW56ZWwwMHVzaW5nLnBkZgAAEwABLwAAFQACAAr//wAAAAgADQAaACQAUQAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAHj}, Bdsk-Url-1 = {http://citeseer.ist.psu.edu/wenzel00using.html}} @inproceedings{DBLP:conf/popl/ChakravartyKJM05, Author = {Manuel M. T. Chakravarty and Gabriele Keller and Simon L. Peyton Jones and Simon Marlow}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {POPL}, Crossref = {DBLP:conf/popl/2005}, Date-Added = {2008-05-13 17:02:39 +0200}, Date-Modified = {2008-05-13 17:02:39 +0200}, Ee = {http://doi.acm.org/10.1145/1040305.1040306}, Pages = {1-13}, Title = {Associated types with class}, Year = {2005}} @proceedings{DBLP:conf/popl/2005, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {POPL}, Date-Added = {2008-05-13 17:02:39 +0200}, Date-Modified = {2008-05-13 17:02:39 +0200}, Editor = {Jens Palsberg and Mart\'{\i}n Abadi}, Isbn = {1-58113-830-X}, Publisher = {ACM}, Title = {Proceedings of the 32nd ACM SIGPLAN-S{I}{G}{A}{C}{T} {S}ymposium on {P}rinciples of {P}rogramming {L}anguages, {P}{O}{P}{L} 2005, {L}ong {B}each, {C}alifornia, {U}{S}{A}, {J}anuary 12-14, 2005}, Year = {2005}} @inproceedings{DBLP:conf/tphol/Wenzel97, Author = {Markus Wenzel}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TPHOLs}, Crossref = {DBLP:conf/tphol/1997}, Date-Added = {2008-05-13 17:02:39 +0200}, Date-Modified = {2008-09-10 20:28:27 +0200}, Pages = {307-322}, Pdf = {http://www4.in.tum.de/~wenzelm/papers/axclass-TPHOLs97.pdf}, Title = {Type {C}lasses and {O}verloading in {H}igher-{O}rder {L}ogic}, Year = {1997}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxApLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3dlbnplbDk3dHlwZS5wZGZPEQGKAAAAAAGKAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gQd2VuemVsOTd0eXBlLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/qn8k46R0AAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk42w0AAAABABAANnfIAA7btAAH+TgAAJDnAAIAN01hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOndlbnplbDk3dHlwZS5wZGYAAA4AIgAQAHcAZQBuAHoAZQBsADkANwB0AHkAcABlAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAqVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy93ZW56ZWw5N3R5cGUucGRmABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAFAAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAAB3g==}} @proceedings{DBLP:conf/tphol/1997, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TPHOLs}, Date-Added = {2008-05-13 17:02:39 +0200}, Date-Modified = {2008-05-13 17:02:39 +0200}, Editor = {Elsa L. Gunter and Amy P. Felty}, Isbn = {3-540-63379-0}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Theorem {P}roving in {H}igher {O}rder {L}ogics, 10th {I}nternational {C}onference, {T}{P}{H}{O}{L}s'97, {M}urray {H}ill, {N}{J}, {U}{S}{A}, {A}ugust 19-22, 1997, {P}roceedings}, Volume = {1275}, Year = {1997}} @inproceedings{DBLP:conf/tphol/KammullerWP99, Author = {Florian Kamm{\"u}ller and Markus Wenzel and Lawrence C. Paulson}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TPHOLs}, Crossref = {DBLP:conf/tphol/1999}, Date-Added = {2008-05-13 17:02:39 +0200}, Date-Modified = {2008-05-13 17:02:39 +0200}, Ee = {http://link.springer.de/link/service/series/0558/bibs/1690/16900149.htm}, Pages = {149-166}, Title = {{L}ocales - {A} {S}ectioning {C}oncept for {I}sabelle}, Year = {1999}} @proceedings{DBLP:conf/tphol/1999, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TPHOLs}, Date-Added = {2008-05-13 17:02:39 +0200}, Date-Modified = {2008-05-13 17:02:39 +0200}, Editor = {Yves Bertot and Gilles Dowek and Andr{\'e} Hirschowitz and C. Paulin and Laurent Th{\'e}ry}, Isbn = {3-540-66463-7}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Theorem {P}roving in {H}igher {O}rder {L}ogics, 12th {I}nternational {C}onference, {T}{P}{H}{O}{L}s'99, {N}ice, {F}rance, {S}eptember, 1999, {P}roceedings}, Volume = {1690}, Year = {1999}} @inproceedings{DBLP:conf/types/HaftmannW06, Author = {Florian Haftmann and Makarius Wenzel}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Crossref = {types06}, Date-Added = {2008-05-13 17:02:39 +0200}, Date-Modified = {2008-09-10 18:26:40 +0200}, Ee = {http://dx.doi.org/10.1007/978-3-540-74464-1_11}, Pages = {160-174}, Title = {{C}onstructive {T}ype {C}lasses in {I}sabelle}, Year = {2006}} @inproceedings{DBLP:conf/tphol/WenzelP06, Author = {Markus Wenzel and Larry Paulson}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {The Seventeen Provers of the World}, Crossref = {DBLP:conf/tphol/2006provers}, Date-Added = {2008-05-13 17:02:39 +0200}, Date-Modified = {2008-05-13 17:02:39 +0200}, Ee = {http://dx.doi.org/10.1007/11542384_8}, Pages = {41-49}, Title = {Isabelle/{I}sar}, Year = {2006}} @inproceedings{DBLP:conf/tphol/HuffmanMW05, Author = {Brian Huffman and John Matthews and Peter White}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TPHOLs}, Crossref = {DBLP:conf/tphol/2005}, Date-Added = {2008-05-13 17:02:39 +0200}, Date-Modified = {2008-10-12 09:43:16 +0200}, Ee = {http://dx.doi.org/10.1007/11541868_10}, Pages = {147-162}, Title = {{A}xiomatic {C}onstructor {C}lasses in {I}sabelle/{H{O}{L}{C}{F}}}, Url = {http://web.cecs.pdx.edu/~jmatthew/papers/HuffmanMatthewsWhite05.pdf}, Year = {2005}, Bdsk-Url-1 = {http://web.cecs.pdx.edu/~jmatthew/papers/HuffmanMatthewsWhite05.pdf}} @article{DBLP:journals/jfp/MullerNOS99, Author = {Olaf M{\"u}ller and Tobias Nipkow and David von Oheimb and Oscar Slotosch}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Added = {2008-05-13 17:02:39 +0200}, Date-Modified = {2008-05-13 17:02:39 +0200}, Journal = {J. {F}unct. {P}rogram.}, Number = {2}, Pages = {191-223}, Title = {{H{O}{L}{C}{F}}={H{O}{L}}+{L{C}{F}}}, Volume = {9}, Year = {1999}} @unpublished{higherkind, Author = {Adriaan Moors and Frank Piessens and Martin Odersky}, Date-Added = {2008-05-13 17:02:39 +0200}, Date-Modified = {2008-05-13 17:02:39 +0200}, Note = {submitted to ECOOP'08}, Title = {Generics of a {H}igher {K}ind}} @inproceedings{DBLP:conf/tphol/Pollack00, Author = {Robert Pollack}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TPHOLs}, Crossref = {DBLP:conf/tphol/2000}, Date-Added = {2008-05-13 17:02:39 +0200}, Date-Modified = {2008-09-03 19:38:51 +0200}, Pages = {462-479}, Pdf = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.58.9947&rep=rep1&type=pdf}, Title = {{D}ependently {T}yped {R}ecords for {R}epresenting {M}athematical {S}tructure}, Year = {2000}} @proceedings{DBLP:conf/tphol/2000, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TPHOLs}, Date-Added = {2008-05-13 17:02:39 +0200}, Date-Modified = {2008-05-13 17:02:39 +0200}, Editor = {Mark Aagaard and John Harrison}, Isbn = {3-540-67863-8}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Theorem {P}roving in {H}igher {O}rder {L}ogics, 13th {I}nternational {C}onference, {T}{P}{H}{O}{L}s 2000, {P}ortland, {O}regon, {U}{S}{A}, {A}ugust 14-18, 2000, {P}roceedings}, Volume = {1869}, Year = {2000}} @inproceedings{185919, Address = {Edinburgh, Scotland}, Author = {Tobias Nipkow}, Booktitle = {Papers presented at the second annual Workshop on Logical environments}, Date-Added = {2008-05-13 16:58:48 +0200}, Date-Modified = {2008-05-13 16:58:48 +0200}, Isbn = {0-521-43312-6}, Pages = {164--188}, Publisher = {Cambridge University Press}, Title = {Order-sorted polymorphism in {I}sabelle}, Year = {1993}} @incollection{jones92theory, Address = {New York, N.Y.}, Author = {Mark P. Jones}, Booktitle = {{ESOP} '92, 4th European Symposium on Programming, Rennes, France, February 1992, Proceedings}, Date-Added = {2008-05-13 16:07:27 +0200}, Date-Modified = {2008-05-13 16:07:27 +0200}, Editor = {Bernd Krieg-Bruckner}, Pages = {287-306}, Publisher = {Springer-Verlag}, Title = {A {T}heory of {Q}ualified {T}ypes}, Url = {http://citeseer.ist.psu.edu/jones92theory.html}, Volume = {582}, Year = {1992}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAqLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL2pvbmVzOTJ0aGVvcnkucGRmTxEBjgAAAAABjgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAxquYFkgrAAAANnfIEWpvbmVzOTJ0aGVvcnkucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/3BLJOOXQAAAAAAAAAAAAAwADAAAJIAAAAAAAAAAAAAAAAAAAAAZwYXBlcnMAEAAIAADGq3v2AAAAEQAIAADJONfAAAAAAQAQADZ3yAAO27QAB/k4AACQ5wACADhNYWNpbnRvc2ggSEQ6VXNlcnM6bWF0OnJlc2VhcmNoOnBhcGVyczpqb25lczkydGhlb3J5LnBkZgAOACQAEQBqAG8AbgBlAHMAOQAyAHQAaABlAG8AcgB5AC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgArVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy9qb25lczkydGhlb3J5LnBkZgAAEwABLwAAFQACAAr//wAAAAgADQAaACQAUQAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAHj}, Bdsk-Url-1 = {http://citeseer.ist.psu.edu/jones92theory.html}} @inproceedings{DBLP:conf/popl/NipkowP93, Author = {Tobias Nipkow and Christian Prehofer}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {POPL}, Date-Added = {2008-05-13 15:54:36 +0200}, Date-Modified = {2008-09-03 19:20:37 +0200}, Pages = {409-418}, Pdf = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.42.6449&rep=rep1&type=pdf}, Read = {Oui}, Title = {{T}ype {C}hecking {T}ype {C}lasses}, Year = {1993}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxApLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL25pcGtvdzkzdHlwZS5wZGZPEQGKAAAAAAGKAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gQbmlwa293OTN0eXBlLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/c0sk45fUAAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk41+UAAAABABAANnfIAA7btAAH+TgAAJDnAAIAN01hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOm5pcGtvdzkzdHlwZS5wZGYAAA4AIgAQAG4AaQBwAGsAbwB3ADkAMwB0AHkAcABlAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAqVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy9uaXBrb3c5M3R5cGUucGRmABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAFAAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAAB3g==}} @inproceedings{DBLP:conf/fpca/NipkowS91, Author = {Tobias Nipkow and Gregor Snelting}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {FPCA}, Crossref = {DBLP:conf/fpca/1991}, Date-Added = {2008-05-13 15:53:50 +0200}, Date-Modified = {2008-09-03 19:34:18 +0200}, Pages = {1-14}, Title = {{T}ype {C}lasses and {O}verloading {R}esolution via {O}rder-{S}orted {U}nification}, Url = {http://portal.acm.org/citation.cfm?coll=GUIDE&dl=GUIDE&id=128029}, Year = {1991}, Bdsk-Url-1 = {http://portal.acm.org/citation.cfm?coll=GUIDE&dl=GUIDE&id=128029}} @proceedings{DBLP:conf/fpca/1991, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {FPCA}, Date-Added = {2008-05-13 15:53:50 +0200}, Date-Modified = {2008-05-13 15:53:50 +0200}, Editor = {John Hughes}, Isbn = {3-540-54396-1}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Functional {P}rogramming {L}anguages and {C}omputer {A}rchitecture, 5th {A}{C}{M} {C}onference, {C}ambridge, {M}{A}, {U}{S}{A}, {A}ugust 26-30, 1991, {P}roceedings}, Volume = {523}, Year = {1991}} @book{Nipkow-Paulson-Wenzel:2002, Author = {Tobias Nipkow and Lawrence C. Paulson and Markus Wenzel}, Date-Added = {2008-05-13 15:49:13 +0200}, Date-Modified = {2008-05-13 15:49:13 +0200}, Publisher = {Springer}, Series = {LNCS}, Title = {Isabelle/{H}{O}{L} --- {A} {P}roof {A}ssistant for {H}igher-{O}rder {L}ogic}, Volume = 2283, Year = 2002} @inproceedings{Kahl-Scheffczyk-2001, Abstract = {Although the functional programming language Haskell has a powerful type class system, users frequently run into situations where they would like to be able to define or adapt instances of type classes only \emph{after} the remainder of a component has been produced. However, Haskell's type class system essentially only allows late binding of type class constraints on free type variables, and not on uses of type class members at variable-free types. In the current paper we propose a language extension that enhances the late binding capabilities of Haskell type classes, and provides more flexible means for type class instantiation. The latter is achieved via \emph{named instances} that do not participate in automatic context reduction, but can only be used for late binding. By combining this capability with the automatic aspects of the Haskell type class system, we arrive at an essentially conservative extension that greatly improves flexibility of programming using type classes and opens up new structuring principles for Haskell library design. We exemplify our extension through the sketch of some applications and show how our approach could be used to explain or subsume other language features as for example implicit parameters. We present a typed lambda-calculus for our extension and provide a working prototype type checker on the basis of Mark Jones' ``Typing Haskell in Haskell''.}, Author = {Wolfram Kahl and Jan Scheffczyk}, Crossref = {Haskell2001}, Date-Added = {2008-05-13 15:15:59 +0200}, Date-Modified = {2008-09-03 19:32:04 +0200}, Read = {Oui}, Title = {{N}amed {I}nstances for {H}askell {T}ype {C}lasses}, Url = {http://ist.unibw-muenchen.de/Haskell/NamedInstances/}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAeLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzLzQucGRmTxEBXgAAAAABXgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAxquYFkgrAAAANnfIBTQucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/1OzJOORvAAAAAAAAAAAAAwADAAAJIAAAAAAAAAAAAAAAAAAAAAZwYXBlcnMAEAAIAADGq3v2AAAAEQAIAADJONZfAAAAAQAQADZ3yAAO27QAB/k4AACQ5wACACxNYWNpbnRvc2ggSEQ6VXNlcnM6bWF0OnJlc2VhcmNoOnBhcGVyczo0LnBkZgAOAAwABQA0AC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAfVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy80LnBkZgAAEwABLwAAFQACAAr//wAAAAgADQAaACQARQAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAGn}, Bdsk-Url-1 = {http://ist.unibw-muenchen.de/Haskell/NamedInstances/}} @proceedings{Haskell2001, Booktitle = {Proc.\null{} Haskell Workshop 2001}, Date-Added = {2008-05-13 17:02:39 +0200}, Date-Modified = {2008-07-30 18:09:20 +0200}, Editor = {Ralf Hinze}, Series = ENTCS, Title = {Proc.\null{} {H}askell {W}orkshop 2001, {F}irenze}, Volume = {59}, Year = {2001}} @inproceedings{DBLP:conf/esop/Jones00, Author = {Mark P. Jones}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {ESOP}, Crossref = {DBLP:conf/esop/2000}, Date-Added = {2008-05-13 14:48:50 +0200}, Date-Modified = {2008-05-13 14:48:50 +0200}, Ee = {http://link.springer.de/link/service/series/0558/bibs/1782/17820230.htm}, Pages = {230-244}, Title = {Type {C}lasses with {F}unctional {D}ependencies}, Year = {2000}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAoLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL2pvbmVzMDB0eXBlLnBkZk8RAYYAAAAAAYYAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yA9qb25lczAwdHlwZS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP9wQyTjl0AAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTjXwAAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgA2TWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6am9uZXMwMHR5cGUucGRmAA4AIAAPAGoAbwBuAGUAcwAwADAAdAB5AHAAZQAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAKVVzZXJzL21hdC9yZXNlYXJjaC9wYXBlcnMvam9uZXMwMHR5cGUucGRmAAATAAEvAAAVAAIACv//AAAACAANABoAJABPAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAdk=}} @article{betarte00type, Author = {Gustavo Betarte}, Date-Added = {2008-05-13 14:45:08 +0200}, Date-Modified = {2008-05-13 14:45:08 +0200}, Journal = {Journal of Functional Programming}, Number = {2}, Pages = {137-166}, Title = {Type checking dependent (record) types and subtyping}, Url = {http://citeseer.ist.psu.edu/article/betarte00type.html}, Volume = {10}, Year = {2000}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAqLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL2JldGFydGUwMHR5cGUucGRmTxEBjgAAAAABjgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAxquYFkgrAAAANnfIEWJldGFydGUwMHR5cGUucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/1uDJOOS9AAAAAAAAAAAAAwADAAAJIAAAAAAAAAAAAAAAAAAAAAZwYXBlcnMAEAAIAADGq3v2AAAAEQAIAADJONatAAAAAQAQADZ3yAAO27QAB/k4AACQ5wACADhNYWNpbnRvc2ggSEQ6VXNlcnM6bWF0OnJlc2VhcmNoOnBhcGVyczpiZXRhcnRlMDB0eXBlLnBkZgAOACQAEQBiAGUAdABhAHIAdABlADAAMAB0AHkAcABlAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgArVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy9iZXRhcnRlMDB0eXBlLnBkZgAAEwABLwAAFQACAAr//wAAAAgADQAaACQAUQAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAHj}, Bdsk-Url-1 = {http://citeseer.ist.psu.edu/article/betarte00type.html}} @incollection{Cardelli:TypecheckingDependentTypes, Annote = {1988}, Author = {Luca Cardelli}, Booktitle = {Foundations of Logic and Functional Programming, Trento, Italy, ({\rm December, 1986})}, Opteditor = {M. Boscarol and L. Carlucci Aiello and G. Levi}, Pages = {45--57}, Publisher = springer, Series = lncs, Source = {Leavens}, Title = {Typechecking {D}ependent {T}ypes and {S}ubtypes}, Volume = {306}, Year = {1988}} @inproceedings{WadlerBlott89, Author = {Philip Wadler and Stephen Blott}, Booktitle = popl89, Date-Modified = {2008-09-03 19:23:24 +0200}, Pages = {60--76}, Title = {How {T}o {M}ake {\em ad-hoc} {P}olymorphism {L}ess {\em ad hoc}}, Url = {http://portal.acm.org/citation.cfm?id=75277.75283}, Year = {1989}, Bdsk-Url-1 = {http://portal.acm.org/citation.cfm?id=75277.75283}} @inproceedings{Wadler89, Author = {Philip Wadler}, Booktitle = fpca89, Checked = yes, Date-Modified = {2008-09-10 20:43:49 +0200}, Month = sep, Optnote = {Imperial College, London}, Pages = {347--359}, Read = {Oui}, Title = {Theorems for {F}ree!}, Url = {http://homepages.inf.ed.ac.uk/wadler/papers/free/free.ps}, Year = 1989, Bdsk-Url-1 = {http://homepages.inf.ed.ac.uk/wadler/papers/free/free.ps}} @incollection{Wadler:MFFP, Author = {Philip Wadler}, Booktitle = {Marktoberdorf Summer School on Program Design Calculi}, Editor = {M. Broy}, Month = Aug, Note = {Also in J. Jeuring and E. Meijer, editors, Advanced Functional Programming, Springer Verlag, LNCS 925, 1995.}, Publisher = sv, Series = {NATO ASI Series F: Computer and systems sciences}, Title = {Monads for functional programming}, Volume = 118, Year = 1992} @article{wadler99marriage, Author = {Philip Wadler}, Date-Modified = {2008-09-11 15:50:50 +0200}, Journal = {{ACM} {T}ransactions on Computational Logic}, Number = 1, Pages = {1--32}, Read = {Oui}, Title = {The marriage of effects and monads}, Volume = 4, Year = 2003} @article{Hall:1996:TCH, Author = {Cordelia V. Hall and Kevin Hammond and Simon L. {Peyton Jones} and Philip L. Wadler}, Bibdate = {Tue Aug 13 11:46:35 MDT 1996}, Coden = {ATPSDT}, Fullissn = {0164-0925}, Fullurl = {http://www.acm.org/pubs/toc/Abstracts/0164-0925/227700.html}, Journal = {ACM Transactions on Programming Languages and Systems}, Month = mar, Number = {2}, Pages = {109--138}, Title = {Type classes in {Haskell}}, Volume = {18}, Year = {1996}} @misc{wadler90, Author = {Philip Wadler}, Howpublished = {University of Glasgow}, Month = mar, Title = {Comprehending {M}onads}, Year = 1990} @misc{wadler85, Author = {Philip Wadler}, Date-Modified = {2008-10-01 16:21:02 +0200}, Howpublished = {Oxford University}, Month = jan, Title = {Views . {A} {W}ay for elegant definitions and efficient representations to coexist}, Url = {http://homepages.inf.ed.ac.uk/wadler/papers/view/view.ps}, Year = 1985, Bdsk-Url-1 = {http://homepages.inf.ed.ac.uk/wadler/papers/view/view.ps}} @inproceedings{DBLP:conf/tphol/GonthierMRTT07, Author = {Georges Gonthier and Assia Mahboubi and Laurence Rideau and Enrico Tassi and Laurent Th{\'e}ry}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TPHOLs}, Crossref = {DBLP:conf/tphol/2007}, Date-Added = {2008-04-05 01:09:42 +0200}, Date-Modified = {2008-04-05 01:10:13 +0200}, Ee = {http://dx.doi.org/10.1007/978-3-540-74591-4_8}, Pages = {86-101}, Read = {Oui}, Title = {A {M}odular {F}ormalisation of {F}inite {G}roup {T}heory}, Year = {2007}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAkLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL1JSLTYxNTYucGRmTxEBdgAAAAABdgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAxquYFkgrAAAANnfIC1JSLTYxNTYucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/1qjJOOS4AAAAAAAAAAAAAwADAAAJIAAAAAAAAAAAAAAAAAAAAAZwYXBlcnMAEAAIAADGq3v2AAAAEQAIAADJONaoAAAAAQAQADZ3yAAO27QAB/k4AACQ5wACADJNYWNpbnRvc2ggSEQ6VXNlcnM6bWF0OnJlc2VhcmNoOnBhcGVyczpSUi02MTU2LnBkZgAOABgACwBSAFIALQA2ADEANQA2AC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAlVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy9SUi02MTU2LnBkZgAAEwABLwAAFQACAAr//wAAAAgADQAaACQASwAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAHF}} @article{vytinioitis:freetheorems, Address = {Amsterdam, The Netherlands, The Netherlands}, Author = {Dimitrios Vytiniotis and Stephanie Weirich}, Date-Added = {2008-04-01 18:18:07 +0200}, Date-Modified = {2008-09-09 11:22:42 +0200}, Doi = {http://dx.doi.org/10.1016/j.entcs.2007.02.043}, Issn = {1571-0661}, Journal = {Electron. Notes Theor. Comput. Sci.}, Pages = {357--373}, Pdf = {http://www.seas.upenn.edu/~sweirich/papers/rtheorems07.pdf}, Publisher = {Elsevier Science Publishers B. V.}, Read = {Oui}, Title = {Free {T}heorems and {R}untime {T}ype {R}epresentations}, Volume = {173}, Year = {2007}, Bdsk-Url-1 = {http://dx.doi.org/10.1016/j.entcs.2007.02.043}} @inproceedings{gadts:icfp06, Address = {Portland, Oregon, USA}, Author = {Simon Peyton Jones and Dimitrios Vytiniotis and Stephanie Weirich and Geoffrey Washburn}, Booktitle = {ICFP '06: Proceedings of the eleventh ACM SIGPLAN international conference on Functional programming}, Date-Added = {2008-04-01 18:16:50 +0200}, Date-Modified = {2008-04-01 18:17:26 +0200}, Doi = {http://doi.acm.org/10.1145/1159803.1159811}, Isbn = {1-59593-309-3}, Pages = {50--61}, Publisher = {ACM}, Title = {Simple unification-based type inference for {G}{A}{D}{T}s}, Url = {http://portal.acm.org/ft_gateway.cfm?id=1159811&type=pdf&coll=GUIDE&dl=GUIDE&CFID=61890656&CFTOKEN=38649807}, Year = {2006}, Bdsk-Url-1 = {http://portal.acm.org/ft_gateway.cfm?id=1159811&type=pdf&coll=GUIDE&dl=GUIDE&CFID=61890656&CFTOKEN=38649807}, Bdsk-Url-2 = {http://doi.acm.org/10.1145/1159803.1159811}} @inproceedings{dunfield:plpv07, Address = {Freiburg, Germany}, Author = {Joshua Dunfield}, Booktitle = {PLPV '07: Proceedings of the 2007 workshop on Programming languages meets program verification}, Date-Added = {2008-04-01 18:10:59 +0200}, Date-Modified = {2008-04-05 01:10:19 +0200}, Doi = {http://doi.acm.org/10.1145/1292597.1292602}, Isbn = {978-1-59593-677-6}, Pages = {21--32}, Publisher = {ACM}, Read = {Oui}, Title = {Refined typechecking with {S}tardust}, Url = {http://portal.acm.org/ft_gateway.cfm?id=1292602&type=pdf&coll=GUIDE&dl=GUIDE&CFID=61890656&CFTOKEN=38649807}, Year = {2007}, Bdsk-Url-1 = {http://portal.acm.org/ft_gateway.cfm?id=1292602&type=pdf&coll=GUIDE&dl=GUIDE&CFID=61890656&CFTOKEN=38649807}, Bdsk-Url-2 = {http://doi.acm.org/10.1145/1292597.1292602}} @inproceedings{pottier-regis-gianas-06, Abstract = {We offer a solution to the type inference problem for an extension of Hindley and Milner's type system with generalized algebraic data types. Our approach is in two strata. The bottom stratum is a core language that marries type inference in the style of Hindley and Milner with type checking for generalized algebraic data types. This results in an extremely simple specification, where case constructs must carry an explicit type annotation and type conversions must be made explicit. The top stratum consists of (two variants of) an independent shape inference algorithm. This algorithm accepts a source term that contains some explicit type information, propagates this information in a local, predictable way, and produces a new source term that carries more explicit type information. It can be viewed as a preprocessor that helps produce some of the type annotations required by the bottom stratum. It is proven sound in the sense that it never inserts annotations that could contradict the type derivation that the programmer has in mind.}, Address = {Charleston, South Carolina}, Author = {Fran{\c c}ois Pottier and Yann R{\'e}gis-Gianas}, Booktitle = {Proceedings of the 33rd {ACM} Symposium on Principles of Programming Languages (POPL'06)}, Date-Added = {2008-04-01 17:55:03 +0200}, Date-Modified = {2008-04-01 17:55:25 +0200}, Month = JAN, Off = {http://doi.acm.org/10.1145/1111037.1111058}, Pages = {232--244}, Pdf = {http://cristal.inria.fr/~fpottier/publis/pottier-regis-gianas-popl06.pdf}, Title = {Stratified type inference for generalized algebraic data types}, Url = {http://cristal.inria.fr/~fpottier/publis/pottier-regis-gianas-popl06.ps.gz}, Year = {2006}, Bdsk-Url-1 = {http://cristal.inria.fr/~fpottier/publis/pottier-regis-gianas-popl06.ps.gz}} @phdthesis{gianas:phd, Author = {Yann R{\'e}gis-Gianas}, Date-Added = {2008-04-01 17:48:24 +0200}, Date-Modified = {2008-10-01 16:55:11 +0200}, Keywords = {gadts, programming languages}, Month = {November}, Pdf = {http://cristal.inria.fr/~regisgia/these-yann.regis-gianas.pdf}, School = {Universit{\'e} Paris~7}, Title = {Des types aux assertions logiques : preuve automatique ou assist{\'e}e de propri{\'e}t{\'e}s sur les programmes fonctionnels}, Type = {Doctorat}, Year = {2007}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxA0Li4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3RoZXNlLXlhbm4ucmVnaXMtZ2lhbmFzLnBkZk8RAbYAAAAAAbYAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yBt0aGVzZS15YW5uLnJlZ2lzLWdpYW5hcy5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP+JuyTjnOgAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTjZKgAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgBCTWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6dGhlc2UteWFubi5yZWdpcy1naWFuYXMucGRmAA4AOAAbAHQAaABlAHMAZQAtAHkAYQBuAG4ALgByAGUAZwBpAHMALQBnAGkAYQBuAGEAcwAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIANVVzZXJzL21hdC9yZXNlYXJjaC9wYXBlcnMvdGhlc2UteWFubi5yZWdpcy1naWFuYXMucGRmAAATAAEvAAAVAAIACv//AAAACAANABoAJABbAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAhU=}} @inproceedings{1016883, Address = {Snow Bird, UT, USA}, Author = {Ralf L\"{a}mmel and Simon Peyton Jones}, Booktitle = {ICFP '04: Proceedings of the ninth ACM SIGPLAN international conference on Functional programming}, Date-Added = {2008-04-01 16:59:15 +0200}, Date-Modified = {2008-04-01 16:59:15 +0200}, Doi = {http://doi.acm.org/10.1145/1016850.1016883}, Isbn = {1-58113-905-5}, Pages = {244--255}, Publisher = {ACM}, Title = {Scrap more boilerplate: reflection, zips, and generalised casts}, Url = {http://portal.acm.org/ft_gateway.cfm?id=1016883&type=pdf&coll=GUIDE&dl=GUIDE&CFID=61890656&CFTOKEN=38649807}, Year = {2004}, Bdsk-Url-1 = {http://portal.acm.org/ft_gateway.cfm?id=1016883&type=pdf&coll=GUIDE&dl=GUIDE&CFID=61890656&CFTOKEN=38649807}, Bdsk-Url-2 = {http://doi.acm.org/10.1145/1016850.1016883}} @inproceedings{717294, Address = {Deventer, The Netherlands, The Netherlands}, Author = {Thorsten Altenkirch and Conor McBride}, Booktitle = {Proceedings of the IFIP TC2/WG2.1 Working Conference on Generic Programming}, Date-Added = {2008-04-01 16:58:24 +0200}, Date-Modified = {2008-04-05 01:10:26 +0200}, Isbn = {1-4020-7374-7}, Pages = {1--20}, Publisher = {Kluwer, B.V.}, Read = {Oui}, Title = {Generic {P}rogramming within {D}ependently {T}yped {P}rogramming}, Year = {2003}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAkLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL2dlbnByb2cucGRmTxEBdgAAAAABdgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAxquYFkgrAAAANnfIC2dlbnByb2cucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/2jTJOOVcAAAAAAAAAAAAAwADAAAJIAAAAAAAAAAAAAAAAAAAAAZwYXBlcnMAEAAIAADGq3v2AAAAEQAIAADJONdMAAAAAQAQADZ3yAAO27QAB/k4AACQ5wACADJNYWNpbnRvc2ggSEQ6VXNlcnM6bWF0OnJlc2VhcmNoOnBhcGVyczpnZW5wcm9nLnBkZgAOABgACwBnAGUAbgBwAHIAbwBnAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAlVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy9nZW5wcm9nLnBkZgAAEwABLwAAFQACAAr//wAAAAgADQAaACQASwAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAHF}} @article{epigram:pratprog, Abstract = {Find the type error in the following Haskell expression: You can't, of course: this program is obviously nonsense unless you're a typechecker. The trouble is that only certain computations make sense if the null xs test is True, whilst others make sense if it is False. However, as far as the type system is concerned, the type of the then branch is the type of the else branch is the type of the entire conditional. Statically, the test is irrelevant. Which is odd, because if the test really were irrelevant, we wouldn't do it. Of course, doesn't go wrong---well-typed programs don't go wrong---so we'd better pick a different word for the way they do go. Abstraction and application, tupling and projection: these provide the `software engineering' superstructure for programs, and our familiar type systems ensure that these operations are used compatibly. However, sooner or later, most programs inspect data and make a choice---at that point our familiar type systems fall silent. They simply can't talk about specific data. All this time, we thought our programming was strongly typed, when it was just our software engineering. In order to do better, we need a static language capable of expressing the significance of particular values in legitimizing some computations rather than others. We should not give up on programming.}, Author = {McBride, Conor}, Citeulike-Article-Id = {2247480}, Date-Added = {2008-04-01 16:52:21 +0200}, Date-Modified = {2008-12-04 14:29:31 +0100}, Doi = {10.1007/11546382_3}, Journal = {Advanced Functional Programming}, Keywords = {dependent-types, type-theory}, Pages = {130--170}, Priority = {3}, Read = {Oui}, Title = {Epigram: {P}ractical {P}rogramming with {D}ependent {T}ypes}, Url = {http://www.e-pig.org/downloads/epigram-notes.pdf}, Year = {2005}, Bdsk-Url-1 = {http://www.springerlink.com/content/96j7v5vn8tyebeky/}, Bdsk-Url-2 = {http://dx.doi.org/10.1007/11546382_3}, Bdsk-Url-3 = {http://www.e-pig.org/downloads/epigram-notes.pdf}} @inproceedings{weirich:PIE, Author = {Dimitrios Vytiniotis and Stephanie Weirich}, Booktitle = {Trends in Functional Programming}, Date-Added = {2008-04-01 16:39:58 +0200}, Date-Modified = {2008-04-01 16:56:20 +0200}, Keywords = {dependent types, Coq}, Read = {Oui}, Title = {Dependent {T}ypes: easy as {P{I}{E}}}, Year = {2007}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAiLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3RmcDA3LnBkZk8RAW4AAAAAAW4AAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yAl0ZnAwNy5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP93AyTjmEQAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTjYAQAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgAwTWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6dGZwMDcucGRmAA4AFAAJAHQAZgBwADAANwAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAI1VzZXJzL21hdC9yZXNlYXJjaC9wYXBlcnMvdGZwMDcucGRmAAATAAEvAAAVAAIACv//AAAACAANABoAJABJAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAbs=}} @inproceedings{xu:ESCHaskell, Address = {Portland, Oregon, USA}, Author = {Dana N. Xu}, Booktitle = {Haskell '06: Proceedings of the 2006 ACM SIGPLAN workshop on Haskell}, Date-Added = {2008-04-01 16:29:07 +0200}, Date-Modified = {2008-10-09 15:33:26 +0200}, Doi = {http://doi.acm.org/10.1145/1159842.1159849}, Isbn = {1-59593-489-8}, Pages = {48--59}, Publisher = {ACM}, Title = {Extended static checking for haskell}, Url = {http://www.cl.cam.ac.uk/~nx200/research/escH-hw.ps}, Year = {2006}, Bdsk-Url-1 = {http://portal.acm.org/ft_gateway.cfm?id=1159849&type=pdf&coll=GUIDE&dl=GUIDE&CFID=61886649&CFTOKEN=29875007}, Bdsk-Url-2 = {http://doi.acm.org/10.1145/1159842.1159849}, Bdsk-Url-3 = {http://www.cl.cam.ac.uk/~nx200/research/escH-hw.ps}} @misc{kieburtz-automated, Author = {Richard Kieburtz}, Date-Added = {2008-04-01 16:26:20 +0200}, Date-Modified = {2008-04-01 16:26:20 +0200}, Title = {Automated soundness checking of a programming logic for {H}askell}, Url = {http://citeseer.ist.psu.edu/575133.html}, Year = {Unknown}, Bdsk-Url-1 = {http://citeseer.ist.psu.edu/575133.html}} @misc{Coq, Date-Added = {2008-04-01 14:00:57 +0200}, Date-Modified = {2008-04-01 14:00:57 +0200}, Key = {Coq}, Title = {The {C}oq proof assistant}, Web = {coq.inria.fr}} @webpage{Haskell, Date-Added = {2008-04-01 14:00:57 +0200}, Date-Modified = {2008-10-01 16:44:37 +0200}, Key = {Haskell}, Title = {The {H}askell programming language}, Url = {http://haskell.org}, Bdsk-Url-1 = {http://haskell.org}} @misc{OCaml, Date-Added = {2008-04-01 14:00:57 +0200}, Date-Modified = {2008-04-01 14:00:57 +0200}, Key = {OCaml}, Title = {The {O}{C}aml programming language}, Web = {caml.inria.fr}} @inproceedings{DBLP:conf/tphol/Coquand06, Author = {Thierry Coquand}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {The Seventeen Provers of the World}, Crossref = {DBLP:conf/tphol/2006provers}, Date-Added = {2008-04-01 14:00:57 +0200}, Date-Modified = {2008-04-01 16:56:23 +0200}, Ee = {http://dx.doi.org/10.1007/11542384_9}, Pages = {50-54}, Read = {Oui}, Title = {Alfa/{A}gda.}, Year = {2006}} @proceedings{DBLP:conf/tphol/2006provers, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {The Seventeen Provers of the World}, Date-Added = {2008-04-01 14:00:57 +0200}, Date-Modified = {2008-04-01 14:00:57 +0200}, Editor = {Freek Wiedijk}, Isbn = {3-540-30704-4}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {The {S}eventeen {P}rovers of the {W}orld, {F}oreword by {D}ana {S}. {S}cott}, Volume = {3600}, Year = {2006}} @inproceedings{DBLP:conf/pepm/FogartyPST07, Author = {Seth Fogarty and Emir Pasalic and Jeremy Siek and Walid Taha}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {PEPM}, Crossref = {DBLP:conf/pepm/2007}, Date-Added = {2008-04-01 14:00:57 +0200}, Date-Modified = {2008-04-01 16:56:24 +0200}, Ee = {http://doi.acm.org/10.1145/1244381.1244400}, Pages = {112-121}, Read = {Oui}, Title = {Concoqtion: indexed types now!}, Year = {2007}} @proceedings{DBLP:conf/pepm/2007, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {PEPM}, Date-Added = {2008-04-01 14:00:57 +0200}, Date-Modified = {2008-04-01 14:00:57 +0200}, Editor = {G. Ramalingam and Eelco Visser}, Isbn = {978-1-59593-620-2}, Publisher = {ACM}, Title = {Proceedings of the 2007 {A}{C}{M} {S}{I}{G}{P}{L}{A}{N} {W}orkshop on {P}artial {E}valuation and {S}emantics-based {P}rogram {M}anipulation, 2007, {N}ice, {F}rance, {J}anuary 15-16, 2007}, Year = {2007}} @book{AKM:EquationsSampler89-1, Booktitle = {Resolution of Equations in Algebraic Structures}, Editor = {A{\"\i}t-Kaci and Maurice Nivat}, Publisher = AP, Title = {Resolution of {E}quations in {A}lgebraic {S}tructures}, Volume = {1 (Algebraic Techniques)}, Year = 1989} @book{AKM:EquationsSampler89-2, Booktitle = {Resolution of Equations in Algebraic Structures}, Editor = {A{\"\i}t-Kaci and Maurice Nivat}, Publisher = AP, Title = {Resolution of {E}quations in {A}lgebraic {S}tructures}, Volume = {2 (Rewriting Techniques)}, Year = 1989} @book{ConstraintsLanguageComputation94, Booktitle = {Constraints, Language and Computation}, Editor = {C. J. Rupp, M. A. Rosner and R. L. Johnson}, Publisher = AP, Title = {Constraints, {L}anguage and {C}omputation}, Year = 1994} @book{ModelTheoreticLogics85, Booktitle = {Model-Theoretic Logics}, Editor = {J. Barwise and S. Feferman}, Publisher = SV, Series = {Perspectives in Mathematical Logic}, Title = {Model-{T}heoretic {L}ogics}, Year = 1985} @book{Moschovakis:LogicCS92, Booktitle = {Logic from Computer Science}, Editor = {Y. N. Moschovakis}, Publisher = SV, Title = {Logic from {C}omputer {S}cience}, Year = 1992} @book{TMR:Undecidable53, Author = {Alfred Tarski and Andrzej Mostowski and Raphael M. Robinson}, Booktitle = {Undecidable Theories}, Editor = {Alfred Tarski and Andrzej Mostowski and Raphael M. Robinson}, Publisher = NH, Series = {Studies in Logic and the Foundations of Mathematics}, Title = {Undecidable {T}heories}, Year = 1953} @proceedings{aaai93, Address = {Washington, DC}, Number = 11, Title = {Proceedings of the eleventh national conference on artificial intelligence}, Year = 1993} @proceedings{acl86, Address = {New York, NY}, Booktitle = {24th Annual Meeting of the Association for Computational Lingustics}, Title = {24th {A}nnual {M}eeting of the {A}ssociation for {C}omputational {L}ingustics}, Year = 1986} @proceedings{acl87, Booktitle = {25th Annual Meeting of the Association for Computational Lingustics}, Title = {25th {A}nnual {M}eeting of the {A}ssociation for {C}omputational {L}ingustics}, Year = 1987} @proceedings{alp92, Address = {Volterra, Italy}, Booktitle = {3th International Conference on Algebraic and Logic Programming}, Editor = {H{\'e}l{\`e}ne Kirchner and Giorgio Levi}, Month = sep, Publisher = SV, Series = LNCS, Title = {3th {I}nternational {C}onference on {A}lgebraic and {L}ogic {P}rogramming}, Volume = {632}, Year = 1992} @proceedings{alp94, Address = {Madrid, Spain}, Booktitle = {4th International Conference on Algebraic and Logic Programming}, Editor = {Giorgio Levi and Mario Rodr{\'\i}guez-Artalejo}, Month = sep, Publisher = SV, Series = LNCS, Title = {4th {I}nternational {C}onference on {A}lgebraic and {L}ogic {P}rogramming}, Volume = {850}, Year = 1994} @proceedings{alp96, Address = {Aachen, Germany}, Booktitle = {5th International Conference on Algebraic and Logic Programming}, Editor = {Michael Hanus and Mario Rodr{\'\i}guez-Artalejo}, Month = sep, Publisher = SV, Series = LNCS, Title = {5th {I}nternational {C}onference on {A}lgebraic and {L}ogic {P}rogramming}, Volume = {1139}, Year = 1996} @proceedings{alpplilp98, Address = {Pisa, Italy}, Booktitle = {Principles of Declarative Programming}, Editor = {MCatuscia Palamidessi and Hugh Glaser and Karl Meinke}, Month = sep, Publisher = SV, Series = LNCS, Title = {Joint {I}nternational {S}ymposiums {P}rogramming {L}anguages, {I}mplementations, {L}ogics and {P}rogram ({P}{L}{I}{L}{P}) and {A}lgebraic and {L}ogic {P}rogramming ({A}{L}{P})}, Volume = {1490}, Year = 1998} @proceedings{plilp95, Address = {Utrecht, The Netherlands}, Booktitle = {7th International Symposium on Programming Languages: Implementations, Logics and Programs}, Editor = {S. Doaitse Swierstra and M. Hermenegildo}, Month = sep, Publisher = SV, Series = LNCS, Title = {7th {I}nternational {S}ymposium on {P}rogramming {L}anguages: {I}mplementations, {L}ogics and {P}rograms}, Volume = {982}, Year = 1995} @proceedings{caap91, Booktitle = {Colloquium on Trees in Algebra and Programming}, Editor = {S. Abramsky and T. S. E. Maibaum}, Month = apr, Publisher = SV, Series = LNCS, Title = {Colloquium on {T}rees in {A}lgebra and {P}rogramming}, Volume = 493, Year = 1991} @proceedings{caap94, Address = {Edinburgh, Scotland}, Booktitle = {Colloquium on Trees in Algebra and Programming}, Editor = {Sophie Tison}, Publisher = SV, Series = LNCS, Title = {Colloquium on {T}rees in {A}lgebra and {P}rogramming}, Volume = 787, Year = 1994} @proceedings{caap96, Address = {Link\"oping (Sweden)}, Booktitle = {Proceedings 21st International Colloquium on Trees in Algebra and Programming}, Editor = {Kirchner, H.}, Month = apr, Pages = {211--225}, Publisher = SV, Series = LNCS, Volume = 1059, Year = 1996} @proceedings{cade80, Address = {Les Arcs, France}, Booktitle = {5th~International Conference on Automated Deduction}, Editor = {W.~Bibel and R.~Kowalski}, Publisher = SV, Series = LNCS, Title = {5th~{I}nternational {C}onference on {A}utomated {D}eduction}, Volume = {87}, Year = 1980} @proceedings{cade82, Address = {New York, USA}, Booktitle = {6th International Conference on Automated Deduction}, Editor = {D. W. Loveland}, Publisher = SV, Series = LNCS, Title = {6th {I}nternational {C}onference on {A}utomated {D}eduction}, Volume = {138}, Year = 1982} @proceedings{cade84, Address = {Napa, California, USA}, Booktitle = {7th International Conference on Automated Deduction}, Editor = {R. E. Shostak}, Publisher = SV, Series = LNCS, Title = {7th {I}nternational {C}onference on {A}utomated {D}eduction}, Volume = {170}, Year = 1984} @proceedings{cade86, Address = {Oxford, England}, Booktitle = {8th International Conference on Automated Deduction}, Editor = {J{\"o}rg H. Siekmann}, Publisher = SV, Series = LNCS, Title = {8th {I}nternational {C}onference on {A}utomated {D}eduction}, Volume = {230}, Year = 1986} @proceedings{cade88, Address = {Argonne, Illinois}, Booktitle = {9th Int. Conf. on Automated Deduction}, Editor = {Lusk and Overbeek}, Month = may, Publisher = SV, Series = lncs, Title = {9th {I}nt. {C}onf. on {A}utomated {D}eduction}, Volume = 310, Year = 1988} @proceedings{cade90, Address = {Kaiserslautern, Germany}, Booktitle = {10th International Conference on Automated Deduction}, Editor = {M. E. Stickel}, Month = jul, Publisher = SV, Series = LNAI, Title = {10th {I}nternational {C}onference on {A}utomated {D}eduction}, Volume = 449, Year = 1990} @proceedings{cade92, Address = {Saratoga Springs, NY}, Booktitle = {11th International Conference on Automated Deduction}, Editor = {Deepak Kapur}, Month = jun, Publisher = SV, Series = LNCS, Title = {11th {I}nternational {C}onference on {A}utomated {D}eduction}, Volume = {607}, Year = 1992} @proceedings{cade94, Address = {Nancy, France}, Booktitle = {12th International Conference on Automated Deduction}, Editor = {A. Bundy}, Month = jun, Publisher = SV, Series = LNCS, Title = {12th {I}nternational {C}onference on {A}utomated {D}eduction}, Volume = {814}, Year = 1994} @proceedings{cade96, Address = {New Brunswick, NJ}, Booktitle = {13th International Conference on Automated Deduction}, Editor = {M. A. McRobbie and J. K. Slaney}, Month = {july/august}, Publisher = SV, Series = LNCS, Title = {13th {I}nternational {C}onference on {A}utomated {D}eduction}, Year = 1996} @proceedings{cade97, Address = {Townsville, North Queensland, Australia}, Booktitle = {14th International Conference on Automated Deduction}, Editor = {William McCune}, Month = {july}, Publisher = SV, Series = LNCS, Title = {14th {I}nternational {C}onference on {A}utomated {D}eduction}, Year = 1997} @proceedings{cade99, Address = {Trento, Italy}, Booktitle = {16th International Conference on Automated Deduction}, Editor = {Harald Ganzinger}, Month = jul, Publisher = SV, Series = LNAI, Title = {16th {I}nternational {C}onference on {A}utomated {D}eduction}, Volume = 1632, Year = 1999} @proceedings{cade2000, Address = {Pittsburgh, PA, USA}, Booktitle = {17th International Conference on Automated Deduction}, Editor = {McAllester, D.}, Month = jun, Publisher = SV, Series = LNCS, Title = {17th {I}nternational {C}onference on {A}utomated {D}eduction}, Volume = 1831, Year = 2000} @proceedings{cade03, Address = {Miami Beach, FL, USA}, Booktitle = {19th International Conference on Automated Deduction (CADE-19)}, Editor = {Franz Baader}, Month = jul, Publisher = SV, Series = LNCS, Title = {19th {I}nternational {C}onference on {A}utomated {D}eduction}, Volume = 2741, Year = 2003} @proceedings{cade05, Address = {Tallinn, Estonia}, Booktitle = {20th International Conference on Automated Deduction (CADE-20)}, Editor = {Robert Nieuwenhuis}, Month = jul, Number = 3632, Publisher = SV, Series = LNAI, Title = {20th {I}nternational {C}onference on {A}utomated {D}eduction}, Year = 2005} @proceedings{cade07, Address = {Bremen, Germany}, Booktitle = {21th International Conference on Automated Deduction (CADE-21)}, Month = jul, Publisher = SV, Series = LNAI, Title = {21th {I}nternational {C}onference on {A}utomated {D}eduction}, Volume = 4603, Year = 2007} @proceedings{cav94, Address = {Stanford, CA, USA}, Booktitle = {Computer Aided Verification}, Editor = {David L. Dill}, Month = jun, Publisher = SV, Series = LNCS, Title = {Computer {A}ided {V}erification, 6th {I}nternational {C}onference}, Volume = 818, Year = 1994} @proceedings{cav95, Address = {Liege, Belgium}, Booktitle = {Proceedings of the 7th International Conference On Computer Aided Verification}, Editor = {P. Wolper}, Publisher = sv, Title = {Proceedings of the 7th {I}nternational {C}onference {O}n {C}omputer {A}ided {V}erification}, Volume = 939, Year = 1995} @proceedings{ccl94, Address = {M{\"u}nchen, Germany}, Booktitle = {First International Conference on Constraints in Computational Logics}, Editor = {Jean-Pierre Jouannaud}, Month = sep, Publisher = SV, Series = LNCS, Title = {First {I}nternational {C}onference on {C}onstraints in {C}omputational {L}ogics}, Topics = {teamp, cclserver,lri}, Type_Publi = {editeur}, Volume = 845, Year = 1994} @proceedings{chambery94, Address = {Chamb{\'e}ry}, Booktitle = {2nd International Conference in Logic For Computer Science: Automated Deduction. Lecture notes.}, Editor = {Ren{\'e} David}, Month = jul, Publisher = {Univ. de Savoie}, Title = {2nd {I}nternational {C}onference in {L}ogic {F}or {C}omputer {S}cience: {A}utomated {D}eduction. {L}ecture notes.}, Year = 1994} @proceedings{cl00, Address = {London, UK}, Booktitle = {Proceedings of the First International Conference on Computational Logic}, Month = jul, Title = {Proceedings of the {F}irst {I}nternational {C}onference on {C}omputational {L}ogic}, Year = 2000} @proceedings{coling88, Address = {Budapest, Hungary}, Booktitle = {International Conference on Computational Linguistics}, Editor = {D{\'e}nes Vargha}, Publisher = {John von Neumann Society for Computing Sciences}, Title = {International {C}onference on {C}omputational {L}inguistics}, Year = 1988} @proceedings{comon95lncs, Booktitle = {Term Rewriting}, Clef_Labo = {CJ95}, Editor = {Hubert Comon and Jean-Pierre Jouannaud}, Organization = {French Spring School of Theoretical Computer Science}, Publisher = SV, Series = LNCS, Title = {Term {R}ewriting}, Topics = {team, cclserver}, Type_Publi = {editeur}, Volume = {909}, Year = 1995} @proceedings{concur97, Address = {Warsaw, Poland}, Booktitle = {Concurrency Theory}, Editor = {Antoni W. Mazurkiewicz and J{\'o}zef Winkowski}, Month = jul, Publisher = SV, Series = LNCS, Title = {Concurrency {T}heory, 8th {I}nternational {C}onference}, Volume = 1243, Year = 1997} @proceedings{concur99, Address = {Eindhoven, The Netherlands}, Booktitle = {Concurrency Theory}, Editor = {Jos C. M. Baeten and Sjouke Mauw}, Month = aug, Publisher = SV, Series = lncs, Title = {Concurrency {T}heory}, Volume = {1664}, Year = 1999} @proceedings{cp95, Address = {Cassis, France}, Booktitle = {Principles and Practice of Constraint Programming}, Editor = {Ugo Montanari and Francesca Rossi}, Month = sep, Publisher = SV, Series = LNCS, Title = {Principles and {P}ractice of {C}onstraint {P}rogramming}, Volume = 976, Year = 1995} @proceedings{cp96, Booktitle = {Principles and Practice of Constraint Programming}, Editor = {Eugene Freuder}, Publisher = SV, Series = LNCS, Volume = 1118, Year = 1996} @proceedings{cp97, Address = {Linz, Austria}, Booktitle = {Principles and Practice of Constraint Programming}, Editor = {Gert Smolka}, Month = oct, Publisher = SV, Series = LNCS, Title = {Principles and {P}ractice of {C}onstraint {P}rogramming}, Volume = 1330, Year = 1997} @proceedings{csl91, Address = {Berne, Switzerland}, Booktitle = {Computer Science Logic}, Editor = {E. B{\"o}rger and G. J{\"a}ger and H. Kleine B{\"u}ning and M. M. Richter}, Month = oct, Publisher = SV, Series = LNCS, Title = {Computer {S}cience {L}ogic}, Volume = 626, Year = 1991} @proceedings{csl94, Address = {Kazimierz, Poland}, Booktitle = {Proceedings of the 8th Workshop on Computer Science Logic}, Editor = {Leszek Pacholski and Jerzy Tiuryn}, Month = sep, Publisher = SV, Series = LNCS, Title = {Proceedings of the 8th {W}orkshop on {C}omputer {S}cience {L}ogic}, Volume = 933, Year = 1994} @proceedings{csl97, Booktitle = {CSL}, Editor = {Mogens Nielsen and Wolfgang Thomas}, Isbn = {3-540-64570-5}, Publisher = sv, Series = lncs, Title = {Computer {S}cience {L}ogic, 11th {I}nternational {W}orkshop, {C}{S}{L} '97, {A}nnual {C}onference of the {E}{A}{C}{S}{L}, {A}arhus, {D}enmark, {A}ugust 23-29, 1997, {S}elected {P}apers}, Volume = 1414, Year = 1998} @proceedings{csl00, Booktitle = {CSL}, Editor = {Peter Clote and Helmut Schwichtenberg}, Isbn = {3-540-67895-6}, Publisher = sv, Series = lncs, Title = {Computer {S}cience {L}ogic, 14th {A}nnual {C}onference of the {E}{A}{C}{S}{L}, {F}ischbachau, {G}ermany, {A}ugust 21-26, 2000, {P}roceedings}, Volume = 1862, Year = 2000} @proceedings{csl01, Address = {Paris, France}, Booktitle = {Proceedings of the Annual Conference of the European Association for Computer Science Logic}, Editor = {Laurent Fribourg}, Month = sep, Publisher = SV, Series = LNCS, Title = {Proceedings of the {A}nnual {C}onference of the {E}uropean {A}ssociation for {C}omputer {S}cience {L}ogic}, Volume = 2142, Year = 2001} @proceedings{csl04, Address = {Karpacz, Poland}, Booktitle = {Proceedings of the Annual Conference of the European Association for Computer Science Logic}, Editor = {Jerzy Marcinkowski and Andrzej Tarlecki}, Month = sep, Publisher = SV, Series = LNCS, Title = {Proceedings of the {A}nnual {C}onference of the {E}uropean {A}ssociation for {C}omputer {S}cience {L}ogic}, Year = 2004} @proceedings{focs78, Address = {Ann Arbor, Michigan}, Booktitle = {19th Annual Symposion on Foundations of Computer Science}, Month = oct, Publisher = IEEECSP, Title = {19th {A}nnual {S}ymposion on {F}oundations of {C}omputer {S}cience}, Year = 1978} @proceedings{focs88, Booktitle = {29th Annual Symposion on Foundations of Computer Science}, Publisher = IEEECSP, Title = {29th {A}nnual {S}ymposion on {F}oundations of {C}omputer {S}cience}, Year = 1988} @proceedings{focs99, Address = {New York, NY, USA}, Booktitle = {40th Annual Symposium on Foundations of Computer Science}, Month = oct, Publisher = IEEECSP, Title = {40th {A}nnual {S}ymposium on {F}oundations of {C}omputer {S}cience}, Year = 1999} @proceedings{fplca85, Booktitle = {IFIP International Conference on Functional Programming Languages and Computer Architecture}, Editor = {Jean-Pierre Jouannaud}, Publisher = SV, Series = LNCS, Title = {I{F}{I}{P} {I}nternational {C}onference on {F}unctional {P}rogramming {L}anguages and {C}omputer {A}rchitecture}, Volume = 201, Year = 1985} @proceedings{fsttcs90, Address = {Bangalore, India}, Booktitle = {Foundations of Software Technology and Theoretical Computer Science}, Editor = {K. V. Nori and C. E. Veni Madhavan}, Publisher = SV, Series = LNCS, Title = {Foundations of {S}oftware {T}echnology and {T}heoretical {C}omputer {S}cience}, Volume = {472}, Year = 1990} @proceedings{fsttcs92, Address = {New Delhi, India}, Booktitle = {Foundations of Software Technology and Theoretical Computer Science}, Editor = {R. Shyamasundar}, Month = dec, Publisher = SV, Series = LNCS, Title = {Foundations of {S}oftware {T}echnology and {T}heoretical {C}omputer {S}cience}, Volume = {652}, Year = 1992} @book{gi-tcs83, Booktitle = {Proceedings of the 6th {GI} Conference on Theoretical Computer Science}, Editor = {A. B. Cremers and H. P. Kriegek}, Publisher = SV, Series = LNCS, Title = {Proceedings of the 6th {G{I}} {C}onference on {T}heoretical {C}omputer {S}cience}, Volume = 145, Year = 1982} @proceedings{gwai85, Address = {Dassel/Solling}, Booktitle = {9th German Wokshop on Artificial Intelligence}, Editor = {Herbert Stoyan}, Publisher = SV, Series = {Informatik Fachberichte vol. 118}, Title = {9th {G}erman {W}okshop on {A}rtificial {I}ntelligence}, Year = 1985} @proceedings{gwai86, Address = {Ottenstein/Nieder{\"o}sterreich}, Booktitle = {10th German Wokshop on Artificial Intelligence}, Editor = {W. Bibel and R. Kowalski}, Publisher = SV, Series = {Informatik Fachberichte vol. 124}, Title = {10th {G}erman {W}okshop on {A}rtificial {I}ntelligence}, Year = 1986} @proceedings{icalp87, Address = {Karlsruhe, Germany}, Booktitle = {14th International Colloquium on Automata, Languages and Programming}, Editor = {Thomas Ottmann}, Month = jul, Publisher = SV, Series = LNCS, Title = {14th {I}nternational {C}olloquium on {A}utomata, {L}anguages and {P}rogramming}, Volume = {267}, Year = 1987} @proceedings{icalp90, Address = {Warwick, England}, Booktitle = {17th International Colloquium on Automata, Languages and Programming}, Editor = {M. S. Paterson}, Publisher = SV, Series = LNCS, Title = {17th {I}nternational {C}olloquium on {A}utomata, {L}anguages and {P}rogramming}, Volume = {443}, Year = 1990} @proceedings{icalp91, Address = {Madrid, Spain}, Booktitle = {18th International Colloquium on Automata, Languages and Programming}, Editor = {Javier Leach Albert and Burkhard Monien and M. Rodriguez Artalejo}, Publisher = SV, Series = LNCS, Title = {18th {I}nternational {C}olloquium on {A}utomata, {L}anguages and {P}rogramming}, Volume = {510}, Year = 1991} @proceedings{icalp92, Address = {Wien, Austria}, Booktitle = {19th International Colloquium on Automata, Languages and Programming}, Editor = {W. Kuich}, Month = jul, Publisher = SV, Series = LNCS, Title = {19th {I}nternational {C}olloquium on {A}utomata, {L}anguages and {P}rogramming}, Volume = {623}, Year = 1992} @proceedings{icalp93, Address = {Lund, Sweden}, Booktitle = {20th International Colloquium on Automata, Languages and Programming}, Editor = {Andrzej Lingas and Rolf Karlsson and Svante Carlsson}, Month = jul, Publisher = SV, Series = LNCS, Title = {20th {I}nternational {C}olloquium on {A}utomata, {L}anguages and {P}rogramming}, Volume = {700}, Year = 1993} @proceedings{icalp94, Address = {Jerusalem, Israel}, Booktitle = {21th International Colloquium on Automata, Languages and Programming}, Editor = {Serge Abiteboul and Eli Shamir}, Month = jul, Publisher = SV, Series = LNCS, Title = {21th {I}nternational {C}olloquium on {A}utomata, {L}anguages and {P}rogramming}, Volume = 820, Year = 1994} @proceedings{icalp97, Booktitle = {ICALP}, Editor = {Pierpaolo Degano and Roberto Gorrieri and Alberto Marchetti-Spaccamela}, Isbn = {3-540-63165-8}, Publisher = sv, Series = lncs, Title = {Automata, {L}anguages and {P}rogramming, 24th {I}nternational {C}olloquium, {I}{C}{A}{L}{P}'97, {B}ologna, {I}taly, 7-11 {J}uly 1997, {P}roceedings}, Volume = 1256, Year = 1997} @proceedings{icalp98, Address = {Aalborg, Denmark}, Booktitle = {24th International Colloquium on Automata, Languages and Programming}, Editor = {Kim G. Larsen}, Month = jul, Organization = {EATCS}, Publisher = SV, Series = LNCS, Title = {24th {I}nternational {C}olloquium on {A}utomata, {L}anguages and {P}rogramming}, Volume = {1443}, Year = 1998} @proceedings{icalp03, Address = {Eindhoven, The Netherlands}, Booktitle = {30th International Colloquium on Automata, Languages and Programming}, Editor = {Jos C. M. Baeten and Jan Karel Lenstra and Joachim Parrow and Gerhard J. Woeginger}, Isbn = {3-540-40493-7}, Month = jul, Organization = {EATCS}, Publisher = SV, Series = LNCS, Title = {30th {I}nternational {C}olloquium on {A}utomata, {L}anguages and {P}rogramming}, Volume = 2719, Year = 2003} @proceedings{iclp87, Booktitle = {Proceedings of the Fourth International Conference on Logic Programming}, Editor = {Jean-Louis Lassez}, Month = may, Publisher = MIT, Title = {Proceedings of the {F}ourth {I}nternational {C}onference on {L}ogic {P}rogramming}, Year = 1987} @proceedings{iclp90, Booktitle = {Proceedings of the 7th International Conference on Logic Programming}, Editor = {D.H.D. Warren and P. Szeredi}, Month = jun, Publisher = MIT, Title = {Proceedings of the 7th {I}nternational {C}onference on {L}ogic {P}rogramming}, Year = 1990} @proceedings{iclp91, Address = {Paris, France}, Booktitle = {Proceedings of the 8th International Conference on Logic Programming}, Editor = {Koichi Furukawa}, Month = jun, Publisher = MIT, Title = {Proceedings of the 8th {I}nternational {C}onference on {L}ogic {P}rogramming}, Year = 1991} @proceedings{iclp94, Address = {Santa Margherita Ligure, Italy}, Booktitle = {Proceedings of the Eleventh International Conference on Logic Programming}, Editor = {Pascal van Hentenryck}, Monyth = jun, Publisher = MIT, Title = {Proceedings of the {E}leventh {I}nternational {C}onference on {L}ogic {P}rogramming}, Year = 1994} @proceedings{ijcai91, Address = {Sidney}, Booktitle = {International Joint Conference on Artificial Inteligence}, Title = {International {J}oint {C}onference on {A}rtificial {I}nteligence}, Year = 1991} @proceedings{ijcai93, Address = {Chamb{\'e}ry, France}, Booktitle = {13th International Joint Conference on Artificial Intelligence}, Month = aug, Publisher = {Morgan Kaufmann}, Title = {13th {I}nternational {J}oint {C}onference on {A}rtificial {I}ntelligence}, Year = 1993} @proceedings{islp91, Address = {San Diego, USA}, Booktitle = {Proceedings of the 1991 International Symposium on Logic Programming}, Editor = {Saraswat, Vijay and Ueda, Kazunori}, Publisher = MIT, Title = {Proceedings of the 1991 {I}nternational {S}ymposium on {L}ogic {P}rogramming}, Year = 1991} @proceedings{islp93, Address = {Vancouver, Canada}, Booktitle = {Proceedings of the 1993 International Symposium on Logic Programming}, Editor = {Dale Miller}, Month = oct, Publisher = MIT, Title = {Proceedings of the 1993 {I}nternational {S}ymposium on {L}ogic {P}rogramming}, Year = 1993} @proceedings{iwnmsa93, Address = {Japan}, Month = {November}, Title = {Proceedings of the international workshop on new models of software architecture}, Year = 1993} @proceedings{jicslp92, Address = {Washington, USA}, Booktitle = {Proceedings of the Joint International Conference and Symposium on Logic Programming}, Editor = {Apt, Krzysztof}, Month = nov, Publisher = MIT, Title = {Proceedings of the {J}oint {I}nternational {C}onference and {S}ymposium on {L}ogic {P}rogramming}, Year = 1992} @proceedings{lacl98, Address = {Grenoble, France}, Booktitle = {Logical Aspects of Computational Linguistics 1998}, Editor = {Michael Moortgat}, Publisher = SV, Series = LNAI, Title = {Logical {A}spects of {C}omputational {L}inguistics 1998}, Volume = 2014, Year = 2001} @proceedings{lics86, Address = {Cambridge, MA}, Booktitle = {Proceedings of the First Symposium on Logic in Computer Science}, Month = jun, Publisher = IEEECSP, Title = {Proceedings of the {F}irst {S}ymposium on {L}ogic in {C}omputer {S}cience}, Year = 1986} @proceedings{lics87, Booktitle = {Proceedings of the Second Symposium on Logic in Computer Science}, Organization = IEEECSP, Title = {Proceedings of the {S}econd {S}ymposium on {L}ogic in {C}omputer {S}cience}, Year = 1987} @proceedings{lics88, Booktitle = {Proceedings of the Third Annual Symposium on Logic in Computer Science}, Organization = IEEECSP, Title = {Proceedings of the {T}hird {A}nnual {S}ymposium on {L}ogic in {C}omputer {S}cience}, Year = 1988} @proceedings{lics89, Address = {Pacific Grove, California}, Booktitle = {Proceedings of the Fourth Annual Symposium on Logic in Computer Science}, Organization = IEEECSP, Title = {Proceedings of the {F}ourth {A}nnual {S}ymposium on {L}ogic in {C}omputer {S}cience}, Year = 1989} @proceedings{lics90, Booktitle = {Proceedings of the Fifth Annual IEEE Symposium on Logic in Computer Science}, Organization = IEEECSP, Title = {Proceedings of the {F}ifth {A}nnual {I}{E}{E}{E} {S}ymposium on {L}ogic in {C}omputer {S}cience}, Year = 1990} @proceedings{lics91, Booktitle = {Proceedings of the Sixth Annual IEEE Symposium on Logic in Computer Science}, Organization = IEEECSP, Title = {Proceedings of the {S}ixth {A}nnual {I}{E}{E}{E} {S}ymposium on {L}ogic in {C}omputer {S}cience}, Year = 1991} @proceedings{lics92, Booktitle = {Proceedings of the Seventh Annual IEEE Symposium on Logic in Computer Science}, Organization = IEEECSP, Title = {Proceedings of the {S}eventh {A}nnual {I}{E}{E}{E} {S}ymposium on {L}ogic in {C}omputer {S}cience}, Year = 1992} @proceedings{lics93, Booktitle = {Proceedings of the Eigth Annual IEEE Symposium on Logic in Computer Science}, Organization = IEEECSP, Title = {Proceedings of the {E}igth {A}nnual {I}{E}{E}{E} {S}ymposium on {L}ogic in {C}omputer {S}cience}, Year = 1993} @proceedings{lics94, Address = {Paris, France}, Booktitle = {Proceedings of the Ninth Annual IEEE Symposium on Logic in Computer Science}, Month = jul, Organization = IEEECSP, Title = {Proceedings of the {N}inth {A}nnual {I}{E}{E}{E} {S}ymposium on {L}ogic in {C}omputer {S}cience}, Year = 1994} @proceedings{lics95, Address = {San Diego, CA}, Booktitle = {Tenth Annual {IEEE} Symposium on Logic in Computer Science}, Editor = {Dexter Kozen}, Month = jun, Organization = IEEECSP, Title = {Tenth {A}nnual {I{E}{E}{E}} {S}ymposium on {L}ogic in {C}omputer {S}cience}, Year = 1995} @proceedings{lics97, Address = {Warsaw,Poland}, Booktitle = {Twelfth Annual {IEEE} Symposium on Logic in Computer Science}, Month = jun, Organization = IEEECSP, Title = {Twelfth {A}nnual {I{E}{E}{E}} {S}ymposium on {L}ogic in {C}omputer {S}cience}, Year = 1997} @proceedings{lics98, Address = {Indianapolis, IN, USA}, Booktitle = {Thirteenth Annual {IEEE} Symposium on Logic in Computer Science}, Editor = {Vaughan Pratt}, Month = jun, Organization = IEEECSP, Title = {Thirteenth {A}nnual {I{E}{E}{E}} {S}ymposium on {L}ogic in {C}omputer {S}cience}, Year = 1998} @proceedings{lics99, Address = {Trento, Italy}, Booktitle = {Fourteenth Annual {IEEE} Symposium on Logic in Computer Science}, Editor = {Giuseppe Longo}, Month = jul, Organization = IEEECSP, Title = {Fourteenth {A}nnual {I{E}{E}{E}} {S}ymposium on {L}ogic in {C}omputer {S}cience}, Year = 1999} @proceedings{lics00, Address = {Santa Barbara, California}, Booktitle = {Fiveteenth Annual {IEEE} Symposium on Logic in Computer Science}, Editor = {Martin Abadi}, Month = jun, Organization = IEEECSP, Title = {Fiveteenth {A}nnual {I{E}{E}{E}} {S}ymposium on {L}ogic in {C}omputer {S}cience}, Year = 2000} @proceedings{lics01, Booktitle = {Sixteenth Annual {IEEE} Symposium on Logic in Computer Science}, Organization = IEEECSP, Title = {Sixteenth {A}nnual {I{E}{E}{E}} {S}ymposium on {L}ogic in {C}omputer {S}cience}, Year = 2001} @proceedings{lics02, Booktitle = {17h Annual {IEEE} Symposium on Logic in Computer Science}, Organization = IEEECSP, Title = {17h {A}nnual {I{E}{E}{E}} {S}ymposium on {L}ogic in {C}omputer {S}cience}, Year = 2002} @proceedings{lpar92, Address = {St. Petersburg, Russia}, Booktitle = {International Conference on Logic Programming and Automated Reasoning}, Editor = {A. Voronkov}, Month = jul, Publisher = SV, Series = LNAI, Title = {International {C}onference on {L}ogic {P}rogramming and {A}utomated {R}easoning}, Volume = 624, Year = 1992} @proceedings{lpar93, Address = {St. Petersburg, Russia}, Booktitle = {4th International Conference on Logic Programming and Automated Reasoning}, Editor = {Andrei Voronkov}, Month = jul, Publisher = SV, Series = LNAI, Title = {4th {I}nternational {C}onference on {L}ogic {P}rogramming and {A}utomated {R}easoning}, Volume = 698, Year = 1993} @proceedings{mfcs80, Address = {Rydzyna, Poland}, Booktitle = {Mathematical Foundations of Computer Science}, Month = sep, Title = {Mathematical {F}oundations of {C}omputer {S}cience}, Year = 1980} @proceedings{mfcs84, Address = {Praha, Czechoslovakia}, Booktitle = {Mathematical Foundations of Computer Science}, Editor = {M. P. Chytil and V. Koubek}, Month = sep, Publisher = SV, Series = LNCS, Title = {Mathematical {F}oundations of {C}omputer {S}cience}, Volume = 175, Year = 1984} @proceedings{mfcs91, Address = {Kazimierz Dolny, Poland}, Booktitle = {Mathematical Foundations of Computer Science}, Editor = {A. Tarlecki}, Month = sep, Publisher = SV, Series = LNAI, Title = {Mathematical {F}oundations of {C}omputer {S}cience}, Volume = 520, Year = 1991} @proceedings{mfcs92, Booktitle = {Mathematical Foundations of Computer Science}, Publisher = SV, Title = {Mathematical {F}oundations of {C}omputer {S}cience}, Year = 1992} @proceedings{mfcs93, Address = {Gda{\'n}sk, Poland}, Booktitle = {Mathematical Foundations of Computer Science}, Editor = {Andrzej M. Borzyszkowski and Stefan Soko{\l}owski}, Isbn = {3-540-57182-5}, Month = {30 August--3 September}, Publisher = SV, Series = LNCS, Title = {Mathematical {F}oundations of {C}omputer {S}cience}, Volume = 711, Year = 1993} @proceedings{mfcs98, Address = {Brno, Czech Republic}, Booktitle = {Mathematical Foundations of Computer Science}, Editor = {Lubos Brim and Jozef Gruska and Jiri Zlatuska}, Isbn = {3-540-64827-5}, Month = aug, Publisher = SV, Series = LNCS, Title = {Mathematical {F}oundations of {C}omputer {S}cience}, Volume = 1450, Year = 1998} @proceedings{mfcs99, Address = {Szklarska Poreba, Poland}, Booktitle = {Mathematical Foundations of Computer Science}, Editor = {M. Kutylowski and L. Pacholski and T. Wierzbicki}, Month = sep, Publisher = SV, Series = LNCS, Title = {Mathematical {F}oundations of {C}omputer {S}cience}, Volume = 1672, Year = 1999} @proceedings{pods91, Address = {Denver, CO}, Booktitle = {Tenth {ACM} Symposium on the Principles of Database Systems}, Month = may, Title = {Tenth {A{C}{M}} {S}ymposium on the {P}rinciples of {D}atabase {S}ystems}, Year = 1991} @proceedings{popl75, Address = {Palo Alto, CA}, Booktitle = {Proceedings of 2nd {ACM} Conference on Principles of Programming Languages}, Publisher = ACM, Title = {Proceedings of 2nd {A{C}{M}} {C}onference on {P}rinciples of {P}rogramming {L}anguages}, Year = 1975} @proceedings{popl80, Address = {Las Vegas, Nevada}, Booktitle = {Proccedings of the Seventh Annual {ACM} Symposion on Principles of Programming Languages}, Month = jan, Publisher = ACM, Title = {Proccedings of the {S}eventh {A}nnual {A{C}{M}} {S}ymposion on {P}rinciples of {P}rogramming {L}anguages}, Year = 1980} @proceedings{popl85, Booktitle = {Proceedings of 12th {ACM} Conference on Principles of Programming Languages}, Editor = {B. Reid}, Publisher = ACM, Title = {Proceedings of 12th {A{C}{M}} {C}onference on {P}rinciples of {P}rogramming {L}anguages}, Year = 1985} @proceedings{popl87, Address = {Munich, Germany}, Booktitle = {Proceedings of the 14th {ACM} Conference on Principles of Programming Languages}, Month = jan, Publisher = ACM, Title = {Proceedings of the 14th {A{C}{M}} {C}onference on {P}rinciples of {P}rogramming {L}anguages}, Year = 1987} @proceedings{popl90, Address = {San Francisco, CA}, Booktitle = {Proceedings of the 17th {ACM} Conference on Principles of Programming Languages}, Month = jan, Title = {Proceedings of the 17th {A{C}{M}} {C}onference on {P}rinciples of {P}rogramming {L}anguages}, Year = 1990} @proceedings{popl91, Address = {Orlando, FL}, Booktitle = {Proceedings of the 18th Symposium on Principles of Programming Languages}, Month = jan, Publisher = ACM, Title = {Proceedings of the 18th {S}ymposium on {P}rinciples of {P}rogramming {L}anguages}, Year = 1991} @proceedings{popl92, Address = {Albuquerque, NM}, Booktitle = {Conference Record of the 19th Symposium on Principles of Programming Languages}, Publisher = ACM, Title = {Conference {R}ecord of the 19th {S}ymposium on {P}rinciples of {P}rogramming {L}anguages}, Year = 1992} @proceedings{popl94, Address = {Portland, Oregon}, Booktitle = {Conference Record of the 21st Symposium on Principles of Programming Languages}, Key = {POPL'94}, Publisher = ACM, Title = {Conference {R}ecord of the 21st {S}ymposium on {P}rinciples of {P}rogramming {L}anguages}, Year = 1994} @proceedings{popl95, Address = {San Francisco, California}, Booktitle = {Conference Record of the 22nd Symposium on Principles of Programming Languages}, Key = {POPL'95}, Month = jan, Publisher = ACM, Title = {Conference {R}ecord of the 22nd {S}ymposium on {P}rinciples of {P}rogramming {L}anguages}, Year = 1995} @proceedings{popl97, Address = {Paris, France}, Booktitle = {Conference Record of the 24th Symposium on Principles of Programming Languages}, Month = jan, Publisher = ACM, Title = {Conference {R}ecord of the 24th {S}ymposium on {P}rinciples of {P}rogramming {L}anguages}, Year = 1997} @proceedings{popl99, Address = {San Antonio, Texas, United States}, Booktitle = {Proceedings of the 26th ACM SIGPLAN-SIGACT symposium on Principles of programming languages}, Isbn = {1-58113-095-3}, Publisher = ACM, Title = {Proceedings of the 26th {A}{C}{M} {S}{I}{G}{P}{L}{A}{N}-{S}{I}{G}{A}{C}{T} symposium on {P}rinciples of programming languages}, Year = {1999}} @proceedings{popl00, Address = {Boston, Masschusetts}, Booktitle = {Conference Record of the 27th Symposium on Principles of Programming Languages}, Editor = {Thomas Reps}, Month = jan, Publisher = ACM, Title = {Conference {R}ecord of the 27th {S}ymposium on {P}rinciples of {P}rogramming {L}anguages}, Year = 2000} @proceedings{popl02, Address = {Portland, OR, USA}, Booktitle = {Conference Record of the 29th Symposium on Principles of Programming Languages}, Editor = {John Mitchell}, Month = jan, Publisher = ACM, Title = {Conference {R}ecord of the 29th {S}ymposium on {P}rinciples of {P}rogramming {L}anguages}, Year = 2002} @proceedings{popl06, Address = {Charleston, South Carolina}, Booktitle = {Conference Record of the 33rd Symposium on Principles of Programming Languages}, Month = jan, Publisher = ACM, Title = {Conference {R}ecord of the 33rd {S}ymposium on {P}rinciples of {P}rogramming {L}anguages}, Year = 2006} @proceedings{ppcp94, Address = {Orcas Island, Washington, USA}, Booktitle = {Principles and Practice of Constraint Programming}, Title = {Principles and {P}ractice of {C}onstraint {P}rogramming}, Year = 1994} @proceedings{rta87, Address = {Bordeaux, France}, Booktitle = {Rewriting Techniques and Applications}, Editor = {Pierre Lescanne}, Month = may, Publisher = SV, Series = LNCS, Title = {Rewriting {T}echniques and {A}pplications}, Volume = 256, Year = 1987} @proceedings{rta89, Address = {Chapel Hill, U.S.A.}, Booktitle = {Rewriting Techniques and Applications}, Editor = {Nachum Dershowitz}, Month = apr, Publisher = SV, Series = LNCS, Title = {Rewriting {T}echniques and {A}pplications}, Volume = 355, Year = 1989} @proceedings{rta91, Address = {Como, Italy}, Booktitle = {4th International Conference on Rewriting Techniques and Applications}, Editor = {Ronald. V. Book}, Month = apr, Publisher = SV, Series = LNCS, Title = {4th {I}nternational {C}onference on {R}ewriting {T}echniques and {A}pplications}, Volume = 488, Year = 1991} @proceedings{rta93, Address = {Montreal, Canada}, Booktitle = {5th International Conference on Rewriting Techniques and Applications}, Editor = {Claude Kirchner}, Month = jun, Publisher = SV, Series = LNCS, Title = {5th {I}nternational {C}onference on {R}ewriting {T}echniques and {A}pplications}, Volume = 690, Year = 1993} @proceedings{rta95, Address = {Kaiserslautern, Germany}, Booktitle = {6th International Conference on Rewriting Techniques and Applications}, Editor = {Jieh Hsiang}, Month = apr, Publisher = SV, Series = LNCS, Title = {6th {I}nternational {C}onference on {R}ewriting {T}echniques and {A}pplications}, Volume = 914, Year = 1995} @proceedings{rta96, Address = {New Brunswick, NJ, USA}, Booktitle = {7th International Conference on Rewriting Techniques and Applications}, Editor = {Harald Ganzinger}, Month = jul, Publisher = SV, Series = LNCS, Title = {7th {I}nternational {C}onference on {R}ewriting {T}echniques and {A}pplications}, Volume = 1103, Year = 1996} @proceedings{rta97, Address = {Barcelona, Spain}, Booktitle = {8th International Conference on Rewriting Techniques and Applications}, Editor = {Hubert Comon}, Month = jun, Publisher = SV, Series = LNCS, Title = {8th {I}nternational {C}onference on {R}ewriting {T}echniques and {A}pplications}, Volume = {1232}, Year = 1997} @proceedings{rta98, Address = {Tsukuba, Japan}, Booktitle = {9th International Conference on Rewriting Techniques and Applications}, Editor = {Tobias Nipkow}, Month = apr, Publisher = SV, Series = LNCS, Title = {9th {I}nternational {C}onference on {R}ewriting {T}echniques and {A}pplications}, Volume = {1379}, Year = 1998} @proceedings{rta99, Address = {Trento, Italy}, Booktitle = {10th International Conference on Rewriting Techniques and Applications}, Editor = {Paliath Narendran and Michael Rusinowitch}, Month = jul, Publisher = SV, Series = LNCS, Title = {10th {I}nternational {C}onference on {R}ewriting {T}echniques and {A}pplications}, Volume = {1631}, Year = 1999} @proceedings{rta00, Address = {Norwich, UK}, Booktitle = {11th International Conference on Rewriting Techniques and Applications}, Editor = {Leo Bachmair}, Month = jul, Publisher = sv, Series = lncs, Title = {11th {I}nternational {C}onference on {R}ewriting {T}echniques and {A}pplications}, Volume = 1833, Year = 2000} @proceedings{rta01, Address = {Utrecht, The Netherlands}, Booktitle = {12th International Conference on Rewriting Techniques and Applications}, Editor = {Aart Middeldorp}, Month = may, Publisher = sv, Series = lncs, Title = {12th {I}nternational {C}onference on {R}ewriting {T}echniques and {A}pplications}, Volume = 2051, Year = 2001} @proceedings{rta02, Address = {Copenhagen, Denmark}, Booktitle = {13th International Conference on Rewriting Techniques and Applications}, Editor = {Sophie Tison}, Month = Jul, Publisher = sv, Series = lncs, Title = {13th {I}nternational {C}onference on {R}ewriting {T}echniques and {A}pplications}, Volume = 2378, Year = 2002} @proceedings{rta03, Address = {Valencia, Spain}, Booktitle = {14th International Conference on Rewriting Techniques and Applications}, Editor = {Roberto Nieuwenhuis}, Month = jun, Publisher = sv, Series = lncs, Title = {14th {I}nternational {C}onference on {R}ewriting {T}echniques and {A}pplications}, Volume = 2706, Year = 2003} @proceedings{rta04, Address = {Aachen, Germany}, Booktitle = {15th International Conference on Rewriting Techniques and Applications}, Editor = {Vincent van Oostrom}, Month = jun, Publisher = sv, Series = lncs, Title = {15th {I}nternational {C}onference on {R}ewriting {T}echniques and {A}pplications}, Volume = 3091, Year = 2004} @proceedings{ijcar01, Address = {Siena, Italy}, Booktitle = {First International Joint Conference on Automated Reasoning}, Editor = {Rajeev Gor{\'e} and Alexander Leitsch and Tobias Nipkow}, Month = jun, Publisher = sv, Series = lnai, Title = {International {J}oint {C}onference on {A}utomated {R}easoning}, Volume = 2083, Year = 2001} @proceedings{ijcar04, Address = {Cork, Ireland}, Booktitle = {Second International Joint Conference on Automated Reasoning}, Publisher = SV, Series = LNAI, Title = {International {J}oint {C}onference on {A}utomated {R}easoning}, Volume = 3097, Year = 2004} @proceedings{ijcar06, Address = {Seattle, USA}, Booktitle = {Third International Joint Conference on Automated Reasoning}, Editor = {Ulrich Furbach and Natarajan Shankar}, Month = aug, Publisher = SV, Series = LNCS, Title = {International {J}oint {C}onference on {A}utomated {R}easoning}, Volume = 4130, Year = 2006} @proceedings{srt95, Address = {Monte Verita, Switzerland}, Booktitle = {Proceedings of the Conference on Symbolic Rewriting Techniques}, Editor = {Manuel Bronstein and Volker Weispfenning}, Title = {Proceedings of the {C}onference on {S}ymbolic {R}ewriting {T}echniques}, Year = 1995} @proceedings{stacs92, Address = {Paris, France}, Booktitle = {9th Annual Symposium on Theoretical Aspects of Computer Science}, Editor = {P. Enjalbert and A. Finkel and K. W. Wagner}, Publisher = sv, Series = lncs, Volume = 577, Year = 1992} @proceedings{tacs91, Booktitle = {Theoretical Aspects of Computer Software}, Editor = {T. Ito and A. R. Meyer}, Month = sep, Publisher = SV, Series = LNCS, Title = {Theoretical {A}spects of {C}omputer {S}oftware}, Volume = {526}, Year = 1991} @proceedings{tacs97, Booktitle = {Theoretical Aspects of Computer Software}, Editor = {Takahashi Ito and Martin Abadi}, Publisher = sv, Series = LNCS, Title = {Theoretical {A}spects of {C}omputer {S}oftware}, Volume = 1281, Year = 1997} @proceedings{tapsoft85, Booktitle = {Proceedings of the International Joint Conference on Theory and Practice of Software Development}, Publisher = SV, Series = LNCS, Title = {Proceedings of the {I}nternational {J}oint {C}onference on {T}heory and {P}ractice of {S}oftware {D}evelopment}, Volume = {185}, Year = 1985} @proceedings{tapsoft87-2, Address = {Pisa, Italy}, Booktitle = {Proceedings of the International Joint Conference on Theory and Practice of Software Development}, Publisher = SV, Series = LNCS, Title = {Proceedings of the {I}nternational {J}oint {C}onference on {T}heory and {P}ractice of {S}oftware {D}evelopment}, Volume = {250}, Year = 1987} @proceedings{tapsoft89-1, Address = {Barcelona, Spain}, Booktitle = {Proceedings of the International Joint Conference on Theory and Practice of Software Development}, Editor = {J. D\'{\i}az and F. Orejas}, Month = mar, Note = {1 - Advanced Seminar on Foundations of Innovative Software Development {I} and Colloquium on Trees in Algebra and Programming}, Publisher = SV, Series = LNCS, Title = {Proceedings of the {I}nternational {J}oint {C}onference on {T}heory and {P}ractice of {S}oftware {D}evelopment}, Volume = {351}, Year = 1989} @proceedings{tapsoft93, Address = {Orsay, France}, Booktitle = {4th International Joint Conference on Theory and Practice of Software Development}, Editor = {M. C. Gaudel and J.-P. Jouannaud}, Month = apr, Publisher = SV, Series = LNCS, Title = {4th {I}nternational {J}oint {C}onference on {T}heory and {P}ractice of {S}oftware {D}evelopment}, Type_Publi = {editeur}, Volume = 668, Year = 1993} @proceedings{tapsoft97, Address = {Lille, France}, Booktitle = {Theory and Practice of Software Development}, Editor = {Michel Bidoit and Max Dauchet}, Month = apr, Publisher = SV, Series = LNCS, Title = {Theory and {P}ractice of {S}oftware {D}evelopment}, Volume = {1214}, Year = 1997} @proceedings{tlca93, Title = {International {C}onference of {T}yped {L}ambda {C}alculi and {A}pplications}, Year = {1993}} @proceedings{wadt92, Address = {Caldes de Malevella, Spain}, Month = oct, Publisher = {Springer 1994}, Title = {Proceedings 10th workshop on abstract data types.}, Year = 1992} @proceedings{fossacs2004, Booktitle = {Foundations of Software Science and Computation Structures}, Number = 2987, Publisher = sv, Series = lncs, Title = {Foundations of {S}oftware {S}cience and {C}omputation {S}tructures}, Year = 2004} @proceedings{fossacs2001, Address = {Genova, Italy}, Booktitle = {Foundations of Software Science and Computation Structures}, Editor = {Honsell, Furio}, Month = apr, Publisher = SV, Series = LNCS, Title = {Foundations of {S}oftware {S}cience and {C}omputation {S}tructures}, Volume = 2030, Year = 2001} @proceedings{fossacs2000, Address = {Berlin, Germany}, Booktitle = {Foundations of Software Science and Computation Structures}, Editor = {Jerzy Tiuryn}, Month = mar, Publisher = SV, Series = LNCS, Title = {Foundations of {S}oftware {S}cience and {C}omputation {S}tructures}, Volume = 1784, Year = 2000} @proceedings{fossacs99, Booktitle = {Foundations of Software Science and Computation Structures}, Publisher = SV, Series = LNCS, Title = {Foundations of {S}oftware {S}cience and {C}omputation {S}tructures}, Volume = 1578, Year = 1999} @proceedings{tacas2000, Address = {Berlin, Germany}, Booktitle = {Tools and Algorithms for the Construction and Analysis of Systems}, Editor = {Susanne Graf and Michael Schwartzbach}, Month = mar, Publisher = SV, Series = LNCS, Title = {Tools and {A}lgorithms for the {C}onstruction and {A}nalysis of {S}ystems}, Volume = 1785, Year = 2000} @proceedings{sas00, Address = {Santa Barbara, CA, USA}, Booktitle = {Static Analysis Symposium}, Editor = {Jens Palsberg}, Month = jun # {/} # jul, Title = {Static {A}nalysis {S}ymposium}, Year = 2000} @proceedings{tphols99, Address = {Nice}, Clef_Labo = {BDP+99}, Date-Modified = {2008-10-13 14:38:46 +0200}, Editor = {Yves Bertot and Gilles Dowek and Christine Paulin-Mohring and Laurent Th{\'e}ry}, Month = sep, Publisher = sv, Series = lncs, Title = {International {C}onference on {T}heorem {P}roving in {H}igher {O}rder {L}ogics ({T}{P}{H}{O}{L}s'99)}, Topics = {team}, Type_Publi = {editeur}, Year = 1999} @proceedings{tphols2000, Booktitle = {Theorem Proving in Higher Order Logics: 13th International Conference, TPHOLs 2000}, Editor = {J. Harrison and M. Aagaard}, Publisher = sv, Series = lncs, Title = {Theorem {P}roving in {H}igher {O}rder {L}ogics: 13th {I}nternational {C}onference, {T}{P}{H}{O}{L}s 2000}, Volume = 1869, Year = 2000} @proceedings{tphols2005, Addresse = {Oxford, UK}, Booktitle = {18th International Conference on Theorem Proving in Higher Order Logics}, Editor = {J. Hurd and T. Melham}, Month = aug, Publisher = sv, Series = lncs, Title = {Theorem {P}roving in {H}igher {O}rder {L}ogics: 18th {I}nternational {C}onference, {T}{P}{H}{O}{L}s 2005}, Volume = 3603, Year = 2005} @proceedings{tphols02, Address = {Hampton, VA, USA}, Booktitle = {Theorem Proving in Higher Order Logics: 15th International Conference, TPHOLs 2002}, Editor = {Victor A. {Carre\~{n}o} and C\'{e}sar A. {Mu\~{n}oz} and Sofi\`{e}ne Tahar}, Month = aug, Publisher = sv, Series = lncs, Title = {Theorem {P}roving in {H}igher {O}rder {L}ogics: 15th {I}nternational {C}onference, {T}{P}{H}{O}{L}s 2002}, Volume = 2410, Year = 2002} @proceedings{tphols03, Booktitle = {Proceedings of the 16th International Conference on Theorem Proving in Higher Order Logics (TPHOLs 2003)}, Editor = {David Basin and Burkhart Wolff}, Publisher = {Springer-Verlag}, Series = {LNCS}, Title = {Proceedings of the 16th {I}nternational {C}onference on {T}heorem {P}roving in {H}igher {O}rder {L}ogics ({T}{P}{H}{O}{L}s 2003)}, Volume = 2758, Year = 2003} @proceedings{ppdp99, Address = {Paris}, Booktitle = {Principles and Practice of Declarative Programming, International Conference PPDP'99}, Editor = {Gopalan Nadathur}, Publisher = sv, Series = lncs, Title = {Principles and {P}ractice of {D}eclarative {P}rogramming, {I}nternational {C}onference {P}{P}{D}{P}'99}, Volume = {1702}, Year = 1999} @proceedings{ppdp2002, Address = {Pittsburgh, USA}, Booktitle = {Proc. of 4th ACM Sigplan Conference on Principles and Practice of Declarative Programming, PPDP'02}, Editor = {Claude Kirchner}, Month = oct, Publisher = {ACM Press, New York}, Title = {Proc. of 4th {A}{C}{M} {S}igplan {C}onference on {P}rinciples and {P}ractice of {D}eclarative {P}rogramming, {P}{P}{D}{P}'02}, Year = 2002} @proceedings{mcu01, Address = {Chisinau, Moldova}, Month = may, Publisher = sv, Series = lncs, Title = {Machines, {C}omputations and {U}niversality ({M}{C}{U}2001)}, Volume = 2055, Year = 2001} @book{comon01cclbook, Booktitle = {Constraints in Computational Logics}, Editor = {Hubert Comon and Claude March{\'e} and Ralf Treinen}, Publisher = SV, Series = LNCS, Title = {Constraints in {C}omputational {L}ogics}, Topics = {team}, Type_Publi = {editeur}, Volume = 2002, Year = 2001} @proceedings{lfcs97, Address = {Yaroslavl, Russia}, Booktitle = {Logical Foundations of Computer Science}, Editor = {Sergei I. Adian and Anil Nerode}, Month = jul, Publisher = sv, Series = lncs, Title = {Logical {F}oundations of {C}omputer {S}cience}, Volume = 1234, Year = 1997} @proceedings{lfcs94, Booktitle = {Proceedings of the 3rd International Symposium on Logical Foundations of Computer Science}, Editor = {Anil Nerode and Yuri Matiyasevich}, Month = jul, Place = {St Petersburg, Russia}, Publisher = sv, Series = lncs, Volume = 813, Year = 1994} @techreport{unif2001, Address = {Siena, Italy}, Author = {Franz Baader and Volker Diekert and Cesare Tinelli and Ralf Treinen}, Institution = {Universit{\`a} degli Studi di Siena, Dipartimento di Ingegneria dell'Informazione}, Month = jun, Number = {DII 09/01}, Title = {U{N}{I}{F} 2001 - 15th {I}nternational {W}orkshop on {U}nification}, Topics = {team}, Type = {Technical Report}, Type_Publi = {diffusion}, Year = 2001} @book{alves-foss99, Booktitle = {Formal Syntax and Semantics of Java}, Editor = {J. Alves-Foss}, Publisher = SV, Series = LNCS, Title = {Formal {S}yntax and {S}emantics of {J}ava}, Volume = 1523, Year = 1999} @proceedings{itrs02, Address = {Copenhaguen, Danemark}, Booktitle = {Second Workshop on Intersection Types and Related Systems}, Editor = {Stefan Van Bakel}, Month = jul, Publisher = ES, Series = entcs, Title = {Second {W}orkshop on {I}ntersection {T}ypes and {R}elated {S}ystems}, Volume = 70, Year = 2002} @proceedings{ifiptcs2002, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {IFIP TCS}, Editor = {Ricardo A. Baeza-Yates and Ugo Montanari and Nicola Santoro}, Isbn = {1-4020-7181-7}, Publisher = {Kluwer}, Series = {IFIP Conference Proceedings}, Title = {Foundations of Information Technology in the Era of Networking and Mobile Computing, IFIP 17$^{\mbox{th}}$ World Computer {C}ongress - {T}{C}1 {S}tream / 2$^{\mbox{nd}}$ {I}{F}{I}{P} {I}nternational {C}onference on {T}heoretical {C}omputer {S}cience ({T}{C}{S} 2002), {A}ugust 25-30, 2002, {M}ontr{\'e}al, {Q}u{\'e}bec, {C}anada}, Year = 2002} @proceedings{tlca2001, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TLCA}, Editor = {Samson Abramsky}, Isbn = {3-540-41960-8}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Typed {L}ambda {C}alculi and {A}pplications, 5th {I}nternational {C}onference, {T}{L}{C}{A} 2001, {K}rakow, {P}oland, {M}ay 2-5, 2001, {P}roceedings}, Volume = {2044}, Year = {2001}} @proceedings{tlca2003, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TLCA}, Editor = {Martin Hofmann}, Isbn = {3-540-40332-9}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Typed {L}ambda {C}alculi and {A}pplications, 6th {I}nternational {C}onference, {T}{L}{C}{A} 2003, {V}alencia, {S}pain, {J}une 10-12, 2003, {P}roceedings}, Volume = {2701}, Year = {2003}} @proceedings{ccl98, Booktitle = {5th International Workshop on } # ccl, Place = {Jerusalem, Israel}, Year = 1998} @proceedings{tlca95, Booktitle = {International Conference of Typed Lambda Calculi and Applications}, Year = {1995}} @proceedings{hoa93, Address = {Amsterdam, The Netherlands}, Booktitle = {First International Workshop HOA93}, Editor = {Jan Heering and Karl Meinke and Bernhard M{\"o}ller and Tobias Nipkow}, Month = Sep, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {{F}irst {I}nternational {W}orkshop {H}{O}{A}93}, Volume = {816}, Year = {1993}} @proceedings{types00, Booktitle = {TYPES 2000}, Editor = {Paul Callaghan and Zhaohui Luo and James McKinna and Robert Pollack}, Isbn = {3-540-43287-6}, Publisher = SV, Series = LNCS, Title = {Types for {P}roofs and {P}rograms, {I}nternational {W}orkshop, {T}{Y}{P}{E}{S} 2000, {D}urham, {U}{K}, {D}ecember 8-12, 2000, {S}elected {P}apers}, Volume = {2277}, Year = {2002}} @proceedings{types02, Booktitle = {TYPES 2002}, Editor = {Herman Geuvers and Freek Wiedijk}, Publisher = SV, Series = LNCS, Title = {Types for {P}roofs and {P}rograms {S}econd {I}nternational {W}orkshop, {T}{Y}{P}{E}{S} 2002, {B}erg en {D}al, {T}he {N}etherlands, {A}pril 24-28, 2002, {S}elected {P}apers}, Volume = 2646, Year = 2003} @proceedings{types03, Address = {Torino, Italy}, Booktitle = {3rd International Workshop on Types for Proofs and Programs}, Editor = {Stefano Berardi and Mario Coppo and Ferruccio Damiani}, Isbn = {3-540-22164-6}, Month = apr, Publisher = SV, Series = LNCS, Title = {3rd {I}nternational {W}orkshop on {T}ypes for {P}roofs and {P}rograms}, Volume = 3085, Year = 2004} @proceedings{types04, Booktitle = {TYPES 2004}, Isbn = {3-540-31428-8}, Publisher = SV, Series = LNCS, Title = {Types for {P}roofs and {P}rograms, {I}nternational {W}orkshop, {T}{Y}{P}{E}{S} 2004, {J}ouy-en-{J}osas, {F}rance, {D}ecember 15-18,2004, {S}elected {P}apers}, Volume = {3839}, Year = {2005}} @proceedings{adcncl2000, Booktitle = {Automated Deduction in Classical and Non-Classical Logics}, Editor = {Ricardo Caferra and G. Salzer}, Month = jan, Number = 1761, Publisher = SV, Series = LNAI, Title = {Automated {D}eduction in {C}lassical and {N}on-{C}lassical {L}ogics}, Year = 2000} @proceedings{wst03, Booktitle = {{Extended Abstracts of the 6th International Workshop on Termination, WST'03}}, Editor = {Albert Rubio}, Month = Jun, Title = {{Extended {A}bstracts of the 6th {I}nternational {W}orkshop on {T}ermination, {W}{S}{T}'03}}, Year = {2003}} @proceedings{wst06, Booktitle = {{Extended Abstracts of the 8th International Workshop on Termination, WST'06}}, Editor = {Alfons Geser and Harald Sondergaard}, Month = aug, Title = {{Extended {A}bstracts of the 8th {I}nternational {W}orkshop on {T}ermination, {W}{S}{T}'06}}, Year = {2006}} @proceedings{wst07, Booktitle = {{Extended Abstracts of the 9th International Workshop on Termination, WST'07}}, Editor = {Dieter Hofbauer and Alexander Serebrenik}, Month = jun, Title = {{Extended {A}bstracts of the 9th {I}nternational {W}orkshop on {T}ermination, {W}{S}{T}'07}}, Year = {2007}} @proceedings{amast04, Address = {Stirling, UK}, Booktitle = {Algebraic Methodology and Software Technology}, Month = jul, Publisher = SV, Series = LNCS, Title = {Algebraic {M}ethodology and {S}oftware {T}echnology}, Volume = 3116, Year = 2004} @proceedings{pepm04, Address = {Verona, Italy}, Booktitle = {ACM SIGPLAN 2004 Symposium on Partial Evaluation and Program Manipulation}, Month = aug, Publisher = ACM, Title = {Partial {E}valuation and {P}rogram {M}anipulation}, Year = 2004} @proceedings{icfem04, Address = {Seattle, WA, USA}, Booktitle = {6th International Conference on Formal Engineering Methods}, Editor = {Jim Davies and Wolfram Schulte and Mike Barnett}, Month = nov, Publisher = SV, Series = LNCS, Title = {Formal {E}ngineering {M}ethods}, Volume = 3308, Year = 2004} @proceedings{icfem05, Booktitle = {ICFEM}, Editor = {Kung-Kiu Lau and Richard Banach}, Isbn = {3-540-29797-9}, Publisher = SV, Series = LNCS, Title = {Formal {M}ethods and {S}oftware {E}ngineering, 7th {I}nternational {C}onference on {F}ormal {E}ngineering {M}ethods, {I}{C}{F}{E}{M} 2005, {M}anchester, {U}{K}, {N}ovember 1-4, 2005, {P}roceedings}, Volume = {3785}, Year = {2005}} @proceedings{cav04, Address = {Boston, MA, USA}, Booktitle = {16th International Conference on Computer Aided Verification}, Editor = {Rajeev Alur and Doron A. Peled}, Month = jul, Publisher = SV, Series = lncs, Title = {Computer {A}ided {V}erification}, Volume = 3114, Year = {2004}} @proceedings{cav2004, Booktitle = {CAV}, Editor = {Rajeev Alur and Doron Peled}, Publisher = SV, Series = LNCS, Title = {Computer {A}ided {V}erification, 16th {I}nternational {C}onference, {C}{A}{V} 2004, {B}oston, {M}{A}, {U}{S}{A}, {J}uly 13-17, 2004, {P}roceedings}, Volume = {3114}, Year = {2004}} @proceedings{jfla05, Booktitle = {Seizi\`emes Journ\'ees Francophones des Langages Applicatifs}, Month = mar, Organization = {INRIA}, Publisher = {INRIA}, Title = {Journ\'ees {F}rancophones des {L}angages {A}pplicatifs}, Year = 2005} @proceedings{jfla06, Booktitle = {Dix-septi\`emes Journ\'ees Francophones des Langages Applicatifs}, Month = jan, Organization = {INRIA}, Publisher = {INRIA}, Title = {Journ\'ees {F}rancophones des {L}angages {A}pplicatifs}, Year = 2006} @proceedings{jfla07, Booktitle = {Dix-huiti\`emes Journ\'ees Francophones des Langages Applicatifs}, Month = jan, Organization = {INRIA}, Publisher = {INRIA}, Title = {Journ\'ees {F}rancophones des {L}angages {A}pplicatifs}, Year = 2007} @proceedings{jfla08, Booktitle = {Dix-neuvi\`emes Journ\'ees Francophones des Langages Applicatifs}, Month = jan, Organization = {INRIA}, Publisher = {INRIA}, Title = {Journ\'ees {F}rancophones des {L}angages {A}pplicatifs}, Year = 2008} @proceedings{esop03, Booktitle = {12th European Symposium on Programming, ESOP 2003}, Editor = {Pierpaolo Degano}, Publisher = SV, Series = LNCS, Title = {Programming Languages and Systems, 12th European Symposium on Programming, E{S}{O}{P} 2003, {H}eld as {P}art of the {J}oint {E}uropean {C}onferences on {T}heory and {P}ractice of {S}oftware, {E}{T}{A}{P}{S} 2003, {W}arsaw, {P}oland, {A}pril 7-11, 2003, {P}roceedings}, Volume = {2618}, Year = {2003}} @proceedings{sefm05, Address = {Koblenz, Germany}, Booktitle = {3rd IEEE International Conference on Software Engineering and Formal Methods (SEFM'05)}, Editor = {Bernhard K. Aichernig and Bernhard Beckert}, Month = sep, Publisher = IEEECSP, Title = {Software {E}ngineering and {F}ormal {M}ethods}, Year = 2005} @proceedings{sefm06, Address = {Pune, India}, Booktitle = {4th IEEE International Conference on Software Engineering and Formal Methods (SEFM'06)}, Editor = {Dang Van Hung and Paritosh Pandya}, Month = sep, Publisher = IEEECSP, Title = {Software {E}ngineering and {F}ormal {M}ethods}, Year = 2006} @proceedings{rta05, Address = {Nara, Japan}, Booktitle = {16th International Conference on Rewriting Techniques and Applications (RTA'05)}, Editor = {J\"urgen Giesl}, Month = apr, Publisher = SV, Series = LNCS, Title = {Term {R}ewriting and {A}pplications}, Volume = {3467}, Year = 2005} @proceedings{rta06, Address = {Seattle, USA}, Booktitle = {17th International Conference on Rewriting Techniques and Applications (RTA'06)}, Editor = {Frank Pfenning}, Month = aug, Publisher = SV, Series = LNCS, Title = {Term {R}ewriting and {A}pplications}, Volume = {4098}, Year = 2006} @proceedings{rta07, Address = {Paris, France}, Booktitle = {18th International Conference on Rewriting Techniques and Applications (RTA'07)}, Editor = {Franz Baader}, Month = jun, Publisher = SV, Series = LNCS, Title = {Term {R}ewriting and {A}pplications}, Year = 2007} @proceedings{fm05, Address = {Newcastle,UK}, Booktitle = {International Symposium of Formal Methods Europe (FM'05)}, Editor = {John Fitzgerald and Ian J. Hayes and Andrzej Tarlecki}, Month = jul, Publisher = SV, Series = LNCS, Title = {Formal {M}ethods}, Volume = {3582}, Year = 2005} @proceedings{avis04, Address = {Barcelona, Spain}, Booktitle = {{P}roceedings of the 3rd {I}nternational {W}orkshop on {A}utomated {V}erification of {I}nfinite-{S}tate {S}ystems ({AVIS}'04)}, Editor = {Bharadwaj, Ramesh}, Month = apr, Year = {2004}} @inproceedings{avis06, Address = {Vienna, Austria}, Booktitle = {{P}roceedings of the 5th {I}nternational {W}orkshop on {A}utomated {V}erification of {I}nfinite-{S}tate {S}ystems ({AVIS}'06)}, Editor = {Bharadwaj, Ramesh}, Month = apr, Year = {2006}} @proceedings{mpc2006, Address = {Kuressaare, Estonia}, Booktitle = {Mathematics of Program Construction, MPC 2006}, Editor = {Tarmo Uustalu}, Month = jul, Publisher = sv, Series = lncs, Title = {Mathematics of {P}rogram {C}onstruction, 8th {I}nternational {C}onference, {M}{P}{C} 2006}, Volume = {4014}, Year = {2006}} @proceedings{icfp06, Address = {Portland, Oregon, USA}, Booktitle = {11th ACM SIGPLAN International Conference on Functional Programming, ICFP 2006}, Editor = {John H. Reppy and Julia L. Lawall}, Isbn = {1-59593-309-3}, Publisher = {ACM}, Title = {Proceedings of the 11th {A}{C}{M} {S}{I}{G}{P}{L}{A}{N} {I}nternational {C}onference on {F}unctional {P}rogramming, {I}{C}{F}{P} 2006}, Year = {2006}} @proceedings{icfp07, Address = {Freiburg, Germany}, Booktitle = {12th ACM SIGPLAN International Conference on Functional Programming, ICFP 2007}, Editor = {Ralf Hinze and Norman Ramsey}, Publisher = {ACM}, Title = {Proceedings of the 12th {A}{C}{M} {S}{I}{G}{P}{L}{A}{N} {I}nternational {C}onference on {F}unctional {P}rogramming, {I}{C}{F}{P} 2007}, Year = {2007}} @proceedings{frocos05, Booktitle = {FroCoS'05}, Editor = {Bernhard Gramlich}, Publisher = SV, Series = LNCS, Title = {Frontiers of {C}ombining {S}ystems, 5th {I}nternational {W}orkshop, {F}ro{C}o{S} 2005, {V}ienna, {A}ustria, {S}eptember 19-21, 2005, {P}roceedings}, Volume = 3717, Year = {2005}} @proceedings{vmcai08, Address = {San Francisco, California, USA}, Booktitle = {9th International Conference on Verification, Model Checking, and Abstract Interpretation}, Editor = {Francesco Logozzo and Doron Peled and Lenore Zuck}, Month = jan, Publisher = SV, Series = LNCS, Title = {Verification, {M}odel {C}hecking, and {A}bstract {I}nterpretation}, Volume = 4905, Year = {2008}} @proceedings{esop08, Address = {Budapest, Hungary}, Booktitle = {17th European Symposium on Programming (ESOP'08)}, Month = apr, Title = {17th {E}uropean {S}ymposium on {P}rogramming ({E}{S}{O}{P}'08)}, Year = 2008} @webpage{ergo, Author = {Sylvain Conchon and Evelyne Contejean}, Date-Modified = {2008-04-01 13:52:24 +0200}, Note = {\url{http://ergo.lri.fr/}}, Read = {Oui}, Title = {The {Ergo} automatic {T}heorem {P}rover}, Topics = {team,lri}, Url = {http://ergo.lri.fr/}, Bdsk-Url-1 = {http://ergo.lri.fr/}} @inproceedings{filliatre07cav, Author = {Jean-Christophe Filli\^atre and Claude March\'e}, Crossref = {cav07}, Date-Modified = {2008-04-01 13:52:33 +0200}, Read = {Oui}, Title = {The {Why/{K}rakatoa/{C}aduceus} {P}latform for {D}eductive {P}rogram {V}erification}, Topics = {team, lri}, Url = {http://www.lri.fr/~filliatr/ftp/publis/cav07.pdf}, Bdsk-Url-1 = {http://www.lri.fr/~filliatr/ftp/publis/cav07.pdf}} @inproceedings{leavens00jml, Author = {Gary T. Leavens and K. Rustan M. Leino and Erik Poll and Clyde Ruby and Bart Jacobs}, Booktitle = {{OOPSLA} 2000 Companion, Minneapolis, Minnesota}, Pages = {105--106}, Title = {{J{M}{L}}: notations and tools supporting detailed design in {Java}}, Year = 2000} @article{marche04jlap, Author = {Claude March{\'e} and Christine Paulin-Mohring and Xavier Urbain}, Journal = jlap, Note = {\url{http://krakatoa.lri.fr}}, Number = {1--2}, Pages = {89--106}, Ps = {http://www.lri.fr/~marche/marche04jlap.ps}, Title = {The \textsc{Krakatoa} {T}ool for {C}ertification of \textsc{Java/{J}ava{C}ard} {P}rograms annotated in \textsc{J{M}{L}}}, Topics = {team}, Type_Publi = {irevcomlec}, Url = {http://krakatoa.lri.fr}, Volume = 58, Year = 2004, Bdsk-Url-1 = {http://krakatoa.lri.fr}} @article{hoare:axiomatic, Abstract = {In this paper an attempt is made to explore the logical foundations of computer programming by use of techniques which were first applied in the study of geometry and have later been extended to other branches of mathematics. This involves the elucidation of sets of axioms and rules of inference which can be used in proofs of the properties of computer programs. Examples are given of such axioms and rules, and a formal proof of a simple theorem is displayed. Finally, it is argued that important advantage, both theoretical and practical, may follow from a pursuance of these topics.}, Address = {New York, NY, USA}, Author = {Hoare, C. A. R.}, Citeulike-Article-Id = {163708}, Date-Added = {2008-03-31 17:43:59 +0200}, Date-Modified = {2008-04-01 13:49:18 +0200}, Doi = {10.1145/363235.363259}, Issn = {0001-0782}, Journal = {Commun. ACM}, Keywords = {computer, foundations, geometry, logical, mathematics, programming, techniques}, Month = {October}, Number = {10}, Pages = {576--580}, Priority = {2}, Publisher = {ACM Press}, Read = {Non}, Title = {An axiomatic basis for computer programming}, Url = {http://portal.acm.org/citation.cfm?id=363259}, Volume = {12}, Year = {1969}, Bdsk-Url-1 = {http://portal.acm.org/citation.cfm?id=363259}, Bdsk-Url-2 = {http://dx.doi.org/10.1145/363235.363259}} @article{ttsemtc, Author = {Aleksandar Nanevski and Paul Govereau and Greg Morrisett}, Date-Added = {2008-03-31 14:03:17 +0200}, Date-Modified = {2008-04-01 13:52:04 +0200}, Keywords = {HTT}, Note = {Submitted}, Read = {Oui}, Title = {Type-theoretic semantics for transactional concurrency}, Year = {2007}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAoLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3lub3Rjb25jLXRyLnBkZk8RAYYAAAAAAYYAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yA95bm90Y29uYy10ci5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP+wfyTjpuAAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTjbqAAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgA2TWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6eW5vdGNvbmMtdHIucGRmAA4AIAAPAHkAbgBvAHQAYwBvAG4AYwAtAHQAcgAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAKVVzZXJzL21hdC9yZXNlYXJjaC9wYXBlcnMveW5vdGNvbmMtdHIucGRmAAATAAEvAAAVAAIACv//AAAACAANABoAJABPAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAdk=}} @article{httpolysep, Author = {Aleksandar Nanevski and Greg Morrisett and Lars Birkedal}, Date-Added = {2008-03-31 13:57:29 +0200}, Date-Modified = {2008-04-01 13:51:57 +0200}, Journal = {Journal of Functional Programming}, Keywords = {HTT}, Note = {To appear}, Read = {Oui}, Title = {Hoare {T}ype {T}heory, {P}olymorphism and {S}eparation}, Year = {2007}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAlLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL2pmcHNlcDA3LnBkZk8RAXoAAAAAAXoAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yAxqZnBzZXAwNy5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP9wLyTjl0AAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTjXwAAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgAzTWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6amZwc2VwMDcucGRmAAAOABoADABqAGYAcABzAGUAcAAwADcALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASACZVc2Vycy9tYXQvcmVzZWFyY2gvcGFwZXJzL2pmcHNlcDA3LnBkZgATAAEvAAAVAAIACv//AAAACAANABoAJABMAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAco=}} @inproceedings{DBLP:conf/birthday/GoguenMM06, Author = {Healfdene Goguen and Conor McBride and James McKinna}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {Essays Dedicated to Joseph A. Goguen}, Crossref = {DBLP:conf/birthday/2006goguen}, Date-Added = {2008-03-28 12:48:18 +0100}, Date-Modified = {2008-09-03 19:12:42 +0200}, Ee = {http://dx.doi.org/10.1007/11780274_27}, Pages = {521-540}, Read = {Oui}, Title = {Eliminating {D}ependent {P}attern {M}atching}, Url = {http://www.cs.st-andrews.ac.uk/~james/RESEARCH/pattern-elimination-final.pdf}, Year = {2006}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxA2Li4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3BhdHRlcm4tZWxpbWluYXRpb24tZmluYWwucGRmTxEBvgAAAAABvgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAxquYFkgrAAAANnfIHXBhdHRlcm4tZWxpbWluYXRpb24tZmluYWwucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/3SPJOOYDAAAAAAAAAAAAAwADAAAJIAAAAAAAAAAAAAAAAAAAAAZwYXBlcnMAEAAIAADGq3v2AAAAEQAIAADJONfzAAAAAQAQADZ3yAAO27QAB/k4AACQ5wACAERNYWNpbnRvc2ggSEQ6VXNlcnM6bWF0OnJlc2VhcmNoOnBhcGVyczpwYXR0ZXJuLWVsaW1pbmF0aW9uLWZpbmFsLnBkZgAOADwAHQBwAGEAdAB0AGUAcgBuAC0AZQBsAGkAbQBpAG4AYQB0AGkAbwBuAC0AZgBpAG4AYQBsAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgA3VXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy9wYXR0ZXJuLWVsaW1pbmF0aW9uLWZpbmFsLnBkZgAAEwABLwAAFQACAAr//wAAAAgADQAaACQAXQAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAIf}, Bdsk-Url-1 = {http://www.cs.st-andrews.ac.uk/~james/RESEARCH/pattern-elimination-final.pdf}} @proceedings{DBLP:conf/birthday/2006goguen, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {Essays Dedicated to Joseph A. Goguen}, Date-Added = {2008-03-28 12:47:48 +0100}, Date-Modified = {2008-03-28 12:47:48 +0100}, Editor = {Kokichi Futatsugi and Jean-Pierre Jouannaud and Jos{\'e} Meseguer}, Isbn = {3-540-35462-X}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Algebra, {M}eaning, and {C}omputation, {E}ssays {D}edicated to {J}oseph {A}. {G}oguen on the {O}ccasion of {H}is 65th {B}irthday}, Volume = {4060}, Year = {2006}} @inproceedings{DBLP:conf/tphol/Oury05, Author = {Nicolas Oury}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TPHOLs}, Crossref = {DBLP:conf/tphol/2005}, Date-Added = {2008-03-28 12:46:57 +0100}, Date-Modified = {2008-04-01 13:50:47 +0200}, Ee = {http://dx.doi.org/10.1007/11541868_18}, Pages = {278-293}, Read = {Oui}, Title = {Extensionality in the {C}alculus of {C}onstructions}, Year = {2005}} @inproceedings{DBLP:conf/plpv/Oury07, Author = {Nicolas Oury}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {PLPV}, Crossref = {DBLP:conf/plpv/2007}, Date-Added = {2008-03-28 12:40:41 +0100}, Date-Modified = {2008-04-01 13:52:00 +0200}, Ee = {http://doi.acm.org/10.1145/1292597.1292606}, Pages = {47-56}, Read = {Oui}, Title = {Pattern matching coverage checking with dependent types using set approximations}, Year = {2007}} @proceedings{DBLP:conf/plpv/2007, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {PLPV}, Date-Added = {2008-03-28 12:40:41 +0100}, Date-Modified = {2008-03-28 12:40:41 +0100}, Editor = {Aaron Stump and Hongwei Xi}, Isbn = {978-1-59593-677-6}, Publisher = {ACM}, Title = {Proceedings of the {A}{C}{M} {W}orkshop {P}rogramming {L}anguages meets {P}rogram {V}erification, {P}{L}{P}{V} 2007, {F}reiburg, {G}ermany, {O}ctober 5, 2007}, Year = {2007}} @inproceedings{DBLP:conf/types/Letouzey02, Author = {Pierre Letouzey}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES'02}, Crossref = {DBLP:conf/types/2002}, Date-Added = {2008-03-24 19:51:18 +0100}, Date-Modified = {2008-04-01 13:49:56 +0200}, Ee = {http://link.springer.de/link/service/series/0558/bibs/2646/26460200.htm}, Keywords = {Extraction}, Pages = {200-219}, Read = {Oui}, Title = {A {N}ew {E}xtraction for {C}oq.}, Year = {2002}} @phdthesis{mcbride00dependently, Author = {Conor McBride}, Date-Added = {2008-03-24 19:50:18 +0100}, Date-Modified = {2019-10-18 15:04:05 +0200}, Keywords = {dependent types, oleg}, Read = {Oui}, School = {University of Edinburgh}, Title = {Dependently {T}yped {F}unctional {P}rograms and {T}heir {P}roofs}, Url = {http://strictlypositive.org/thesis.pdf}, Year = {1999}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAxLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL21jYnJpZGU5OWRlcGVuZGVudGx5LnBkZk8RAaoAAAAAAaoAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yBhtY2JyaWRlOTlkZXBlbmRlbnRseS5wZGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP9xXyTjl3AAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTjXzAAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgA/TWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6bWNicmlkZTk5ZGVwZW5kZW50bHkucGRmAAAOADIAGABtAGMAYgByAGkAZABlADkAOQBkAGUAcABlAG4AZABlAG4AdABsAHkALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASADJVc2Vycy9tYXQvcmVzZWFyY2gvcGFwZXJzL21jYnJpZGU5OWRlcGVuZGVudGx5LnBkZgATAAEvAAAVAAIACv//AAAACAANABoAJABYAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAgY=}, Bdsk-Url-1 = {http://citeseer.ist.psu.edu/mcbride99dependently.html}} @inproceedings{DBLP:conf/types/McBride00, Author = {Conor McBride}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Crossref = {DBLP:conf/types/2000}, Date-Added = {2008-03-24 19:50:18 +0100}, Date-Modified = {2008-04-01 13:49:48 +0200}, Ee = {http://link.springer.de/link/service/series/0558/bibs/2277/22770197.htm}, Keywords = {dependent types, pattern matching}, Pages = {197-216}, Read = {Oui}, Title = {Elimination with a {M}otive}, Year = {2000}} @inproceedings{DBLP:conf/types/BradyMM03, Author = {Edwin Brady and Conor McBride and James McKinna}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Crossref = {DBLP:conf/types/2003}, Date-Added = {2008-03-24 19:50:18 +0100}, Date-Modified = {2008-04-01 13:50:07 +0200}, Ee = {http://springerlink.metapress.com/openurl.asp?genre=article{\&}issn=0302-9743{\&}volume=3085{\&}spage=115}, Keywords = {dependent types, staging}, Pages = {115-129}, Read = {Oui}, Title = {Inductive {F}amilies {N}eed {N}ot {S}tore {T}heir {I}ndices.}, Year = {2003}} @inproceedings{CtpcPLDI07, Address = {San Diego, California, USA}, Author = {Adam Chlipala}, Booktitle = {PLDI'07: Proceedings of the ACM SIGPLAN 2007 Conference on Programming Language Design and Implementation}, Date-Added = {2008-03-24 19:49:19 +0100}, Date-Modified = {2008-04-01 13:52:11 +0200}, Month = jun, Read = {Oui}, Title = {A {C}ertified {T}ype-{P}reserving {C}ompiler from {L}ambda {C}alculus to {A}ssembly {L}anguage}, Url = {http://www.cs.berkeley.edu/~adamc/papers/CtpcPLDI07/}, Year = {2007}, Bdsk-Url-1 = {http://www.cs.berkeley.edu/~adamc/papers/CtpcPLDI07/}} @inproceedings{CertVerICFP06, Address = {Portland, Oregon, USA}, Author = {Adam Chlipala}, Booktitle = {ICFP'06: Proceedings of the 11th ACM SIGPLAN International Conference on Functional Programming}, Date-Added = {2008-03-24 19:49:19 +0100}, Date-Modified = {2008-04-01 13:51:50 +0200}, Month = sep, Read = {Oui}, Title = {Modular {D}evelopment of {C}ertified {P}rogram {V}erifiers with a {P}roof {A}ssistant}, Url = {http://www.cs.berkeley.edu/~adamc/papers/CertVerICFP06/}, Year = {2006}, Bdsk-Url-1 = {http://www.cs.berkeley.edu/~adamc/papers/CertVerICFP06/}} @inproceedings{PcvVMCAI06, Address = {Charleston, South Carolina, USA}, Author = {Bor-Yuh Evan Chang and Adam Chlipala and George C. Necula}, Booktitle = {VMCAI'06: Proceedings of the 7th International Conference on Verification, Model Checking, and Abstract Interpretation}, Date-Added = {2008-03-24 19:49:19 +0100}, Date-Modified = {2008-03-24 19:49:19 +0100}, Month = jan, Title = {A {F}ramework for {C}ertified {P}rogram {A}nalysis and {I}ts {A}pplications to {M}obile-{C}ode {S}afety}, Url = {http://www.cs.berkeley.edu/~adamc/papers/PcvVMCAI06/}, Year = {2006}, Bdsk-Url-1 = {http://www.cs.berkeley.edu/~adamc/papers/PcvVMCAI06/}} @inproceedings{BlastICSE04, Address = {Edinburgh, Scotland}, Author = {Dirk Beyer and Adam J. Chlipala and Thomas Henzinger and Ranjit Jhala and Rupak Majumdar}, Booktitle = {ICSE'04: Proceedings of the 26th International Conference on Software Engineering}, Date-Added = {2008-03-24 19:49:19 +0100}, Date-Modified = {2008-04-01 13:50:28 +0200}, Month = may, Read = {Oui}, Title = {Generating {T}ests from {C}ounterexamples}, Url = {http://www.cs.berkeley.edu/~adamc/papers/BlastICSE04/}, Year = {2004}, Bdsk-Url-1 = {http://www.cs.berkeley.edu/~adamc/papers/BlastICSE04/}} @inproceedings{PositionPLPV06, Address = {Seattle, Washington, USA}, Author = {Adam Chlipala}, Booktitle = {PLPV'06: Proceedings of the Programming Languages meets Program Verification Workshop}, Date-Added = {2008-03-24 19:49:19 +0100}, Date-Modified = {2008-04-01 13:51:47 +0200}, Month = aug, Read = {Oui}, Title = {Position {P}aper: {T}houghts on {P}rogramming with {P}roof {A}ssistants}, Url = {http://www.cs.berkeley.edu/~adamc/papers/PositionPLPV06/}, Year = {2006}, Bdsk-Url-1 = {http://www.cs.berkeley.edu/~adamc/papers/PositionPLPV06/}} @inproceedings{KettleStrategies06, Address = {Seattle, Washington, USA}, Author = {Adam Chlipala and George C. Necula}, Booktitle = {STRATEGIES'06: Proceedings of the 6th International Workshop on Strategies in Automated Deduction}, Date-Added = {2008-03-24 19:49:19 +0100}, Date-Modified = {2008-04-01 13:51:35 +0200}, Month = aug, Read = {Oui}, Title = {Cooperative {I}ntegration of an {I}nteractive {P}roof {A}ssistant and an {A}utomated {P}rover}, Url = {http://www.cs.berkeley.edu/~adamc/papers/KettleStrategies06/}, Year = {2006}, Bdsk-Url-1 = {http://www.cs.berkeley.edu/~adamc/papers/KettleStrategies06/}} @inproceedings{OpenverTLDI05, Address = {Long Beach, California, USA}, Author = {Bor-Yuh Evan Chang and Adam Chlipala and George C. Necula and Robert R. Schneck}, Booktitle = {TLDI'05: Proceedings of the 2nd ACM SIGPLAN Workshop on Types in Language Design and Implementation}, Date-Added = {2008-03-24 19:49:19 +0100}, Date-Modified = {2008-03-24 19:49:19 +0100}, Month = jan, Title = {The {Open {V}erifier} {F}ramework for {F}oundational {V}erifiers}, Url = {http://www.cs.berkeley.edu/~adamc/papers/OpenverTLDI05/}, Year = {2005}, Bdsk-Url-1 = {http://www.cs.berkeley.edu/~adamc/papers/OpenverTLDI05/}} @inproceedings{CoolaidTLDI05, Address = {Long Beach, California, USA}, Author = {Bor-Yuh Evan Chang and Adam Chlipala and George C. Necula and Robert R. Schneck}, Booktitle = {TLDI'05: Proceedings of the 2nd ACM SIGPLAN Workshop on Types in Language Design and Implementation}, Date-Added = {2008-03-24 19:49:19 +0100}, Date-Modified = {2008-03-24 19:49:19 +0100}, Month = jan, Title = {Type-{B}ased {V}erification of {A}ssembly {L}anguage for {C}ompiler {D}ebugging}, Url = {http://www.cs.berkeley.edu/~adamc/papers/CoolaidTLDI05/}, Year = {2005}, Bdsk-Url-1 = {http://www.cs.berkeley.edu/~adamc/papers/CoolaidTLDI05/}} @inproceedings{StrictTLDI05, Address = {Long Beach, California, USA}, Author = {Adam Chlipala and Leaf Petersen and Robert Harper}, Booktitle = {TLDI'05: Proceedings of the 2nd ACM SIGPLAN Workshop on Types in Language Design and Implementation}, Date-Added = {2008-03-24 19:49:19 +0100}, Date-Modified = {2008-04-01 13:50:51 +0200}, Month = jan, Read = {Oui}, Title = {Strict {B}idirectional {T}ype {C}hecking}, Url = {http://www.cs.berkeley.edu/~adamc/papers/StrictTLDI05/}, Year = {2005}, Bdsk-Url-1 = {http://www.cs.berkeley.edu/~adamc/papers/StrictTLDI05/}} @inproceedings{PosterPCC06, Address = {Seattle, Washington, USA}, Author = {Adam Chlipala}, Booktitle = {PCC'06: Proceedings of the International Workshop on Proof-Carrying Code}, Date-Added = {2008-03-24 19:49:19 +0100}, Date-Modified = {2008-03-24 19:49:19 +0100}, Month = aug, Title = {Developing {C}ertified {P}rogram {V}erifiers with a {P}roof {A}ssistant}, Url = {http://www.cs.berkeley.edu/~adamc/papers/PosterPCC06/}, Year = {2006}, Bdsk-Url-1 = {http://www.cs.berkeley.edu/~adamc/papers/PosterPCC06/}} @inproceedings{BlastSAS04, Address = {Verona, Italy}, Author = {Dirk Beyer and Adam J. Chlipala and Thomas Henzinger and Ranjit Jhala and Rupak Majumdar}, Booktitle = {SAS'04: Proceedings of the 11th Static Analysis Symposium}, Date-Added = {2008-03-24 19:49:19 +0100}, Date-Modified = {2008-03-24 19:49:19 +0100}, Month = aug, Title = {The {Blast} {Q}uery {L}anguage for {S}oftware {V}erification}, Url = {http://www.cs.berkeley.edu/~adamc/papers/BlastSAS04/}, Year = {2004}, Bdsk-Url-1 = {http://www.cs.berkeley.edu/~adamc/papers/BlastSAS04/}} @techreport{LaconicTR, Author = {Adam Chlipala}, Date-Added = {2008-03-24 19:49:19 +0100}, Date-Modified = {2008-04-01 13:51:32 +0200}, Number = {UCB/EECS-2006-120}, Read = {Oui}, Title = {Scrap {Y}our {W}eb {A}pplication {B}oilerplate, or {M}etaprogramming with {R}ow {T}ypes}, Type = {{Technical Report}}, Url = {http://www.cs.berkeley.edu/~adamc/papers/LaconicTR/}, Year = {2006}, Bdsk-Url-1 = {http://www.cs.berkeley.edu/~adamc/papers/LaconicTR/}} @techreport{PcvTR, Author = {Bor-Yuh Evan Chang and Adam Chlipala and George C. Necula}, Date-Added = {2008-03-24 19:49:19 +0100}, Date-Modified = {2008-03-24 19:49:19 +0100}, Institution = {UC Berkeley EECS Department}, Number = {UCB/ERL M05/32}, Title = {A {F}ramework for {C}ertified {P}rogram {A}nalysis and {I}ts {A}pplications to {M}obile-{C}ode {S}afety}, Type = {{Technical Report}}, Url = {http://www.cs.berkeley.edu/~adamc/papers/PcvTR/}, Year = {2005}, Bdsk-Url-1 = {http://www.cs.berkeley.edu/~adamc/papers/PcvTR/}} @techreport{ChlipalaMS, Author = {Adam Chlipala}, Date-Added = {2008-03-24 19:49:19 +0100}, Date-Modified = {2008-04-01 13:50:16 +0200}, Institution = {UC Berkeley EECS Department}, Number = {UCB/ERL M04/41}, Read = {Oui}, Title = {An {U}ntrusted {V}erifier for {T}yped {A}ssembly {L}anguage}, Type = {{Technical Report}}, Url = {http://www.cs.berkeley.edu/~adamc/papers/ChlipalaMS/}, Year = {2004}, Bdsk-Url-1 = {http://www.cs.berkeley.edu/~adamc/papers/ChlipalaMS/}} @article{Benton:2006ce, Abstract = {We introduce a Floyd-Hoare-style framework for specification and verification of machine code programs, based on relational parametricity (rather than unary predicates) and using both step-indexing and a novel form of separation structure. This yields compositional, descriptive and extensional reasoning principles for many features of low-level sequential computation: independence, ownership transfer, unstructured control flow, first-class code pointers and address arithmetic. We demonstrate how to specify and verify the implementation of a simple memory manager and, independently, its clients in this style. The work has been fully machine-checked within the Coq proof assistant. }, Author = {Benton, Nick}, Date-Added = {2008-03-24 19:44:44 +0100}, Date-Modified = {2008-04-01 13:51:03 +0200}, Journal = {Computer Science Logic}, Keywords = {separation logic}, M3 = {10.1007/11874683{\_}12}, Pages = {182--196}, Read = {Oui}, Title = {Abstracting {A}llocation}, Ty = {CHAPTER}, Url = {http://dx.doi.org/10.1007/11874683_12}, Year = {2006}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAmLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL25ld25ld2NzbC5wZGZPEQF+AAAAAAF+AAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gNbmV3bmV3Y3NsLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/c0Mk45fUAAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk41+UAAAABABAANnfIAA7btAAH+TgAAJDnAAIANE1hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOm5ld25ld2NzbC5wZGYADgAcAA0AbgBlAHcAbgBlAHcAYwBzAGwALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASACdVc2Vycy9tYXQvcmVzZWFyY2gvcGFwZXJzL25ld25ld2NzbC5wZGYAABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAE0AAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAABzw==}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/11874683_12}} @article{Nanevski:2007ik, Abstract = {Hoare Type Theory (HTT) combines a dependently typed, higher-order language with monadically-encapsulated, stateful computations. The type system incorporates pre- and post-conditions, in a fashion similar to Hoare and Separation Logic, so that programmers can modularly specify the requirements and effects of computations within types. This paper extends HTT with quantification over abstract predicates (i.e., higher-order logic), thus embedding into HTT the Extended Calculus of Constructions. When combined with the Hoare-like specifications, abstract predicates provide a powerful way to define and encapsulate the invariants of private state that may be shared by several functions, but is not accessible to their clients. We demonstrate this power by sketching a number of abstract data types that demand ownership of mutable memory, including an idealized custom memory manager. }, Author = {Nanevski, Aleksandar and Ahmed, Amal and Morrisett, Greg and Birkedal, Lars}, Date-Added = {2008-03-24 19:36:57 +0100}, Date-Modified = {2008-04-01 13:51:54 +0200}, Journal = {Programming Languages and Systems}, Keywords = {HTT}, M3 = {10.1007/978-3-540-71316-6{\_}14}, Pages = {189--204}, Read = {Oui}, Title = {Abstract {P}redicates and {M}utable {A}{D}{T}s in {H}oare {T}ype {T}heory}, Ty = {CHAPTER}, Url = {http://dx.doi.org/10.1007/978-3-540-71316-6_14}, Year = {2007}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAjLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL2Vzb3AwNy5wZGZPEQFyAAAAAAFyAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gKZXNvcDA3LnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/aAsk45VcAAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk410cAAAABABAANnfIAA7btAAH+TgAAJDnAAIAMU1hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOmVzb3AwNy5wZGYAAA4AFgAKAGUAcwBvAHAAMAA3AC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAkVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy9lc29wMDcucGRmABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAEoAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAABwA==}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/978-3-540-71316-6_14}} @inproceedings{mandelbaum:icfp03, Address = {Uppsala, Sweden}, Author = {Yitzhak Mandelbaum and David Walker and Robert Harper}, Booktitle = {ICFP '03: Proceedings of the eighth ACM SIGPLAN International Conference on Functional Programming}, Date-Added = {2008-03-24 19:14:56 +0100}, Date-Modified = {2008-10-09 19:17:37 +0200}, Doi = {http://doi.acm.org/10.1145/944705.944725}, Isbn = {1-58113-756-7}, Keywords = {refinement}, Pages = {213--225}, Publisher = {ACM}, Read = {Oui}, Title = {{A}n {E}ffective {T}heory of {T}ype {R}efinements}, Url = {http://www.cs.princeton.edu/sip/pub/effective-type-refinements03.pdf}, Year = {2003}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAjLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL2ljZnAwMy5wZGZPEQFyAAAAAAFyAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gKaWNmcDAzLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/b98k45c4AAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk4174AAAABABAANnfIAA7btAAH+TgAAJDnAAIAMU1hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOmljZnAwMy5wZGYAAA4AFgAKAGkAYwBmAHAAMAAzAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAkVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy9pY2ZwMDMucGRmABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAEoAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAABwA==}, Bdsk-Url-1 = {http://portal.acm.org/ft_gateway.cfm?id=944725&type=pdf&coll=GUIDE&dl=GUIDE&CFID=21423515&CFTOKEN=99839047}, Bdsk-Url-2 = {http://doi.acm.org/10.1145/944705.944725}, Bdsk-Url-3 = {http://www.cs.princeton.edu/sip/pub/effective-type-refinements03.pdf}} @mastersthesis{sozeau.Coq/Russell/report, Address = {LRI, Orsay}, Author = {Matthieu Sozeau}, Date-Added = {2008-03-24 19:08:52 +0100}, Date-Modified = {2009-01-08 14:51:36 +0100}, Keywords = {coercion, subsets, Calculus of Constructions}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/master-sozeau.pdf}, Read = {Oui}, School = {Universit\'e Paris VII}, Title = {Coercion par pr\'edicats en {C}oq}, Year = {2005}} @misc{sozeau.Coq/Russell/web, Author = {Matthieu Sozeau}, Date-Added = {2008-03-24 19:08:52 +0100}, Date-Modified = {2008-04-01 13:51:29 +0200}, Read = {Oui}, Title = {{R}ussell}, Web = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/russell.en.html}, Year = {2006}} @unpublished{sozeau.Coq/Russell/meta, Author = {Matthieu Sozeau}, Date-Added = {2008-03-24 19:08:52 +0100}, Date-Modified = {2010-01-05 20:54:14 +0100}, Note = {Notes}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/metaproof.pdf}, Read = {Oui}, Title = {Russell {M}etatheoretic {S}tudy in {C}oq}, Year = {2006}} @misc{sozeau.Coq/Russell/meta-web, Author = {Matthieu Sozeau}, Date-Added = {2008-03-24 19:08:52 +0100}, Date-Modified = {2008-09-03 17:55:28 +0200}, Read = {Oui}, Title = {{R}ussell {M}etatheoretic {S}tudy in {Coq}, experimental development}, Web = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/russell/meta.en.html}, Year = {2006}} @misc{sozeau.Coq/Russell/types06, Address = {University of Nottingham, UK}, Author = {Matthieu Sozeau}, Date-Added = {2008-03-24 19:08:52 +0100}, Date-Modified = {2008-04-01 13:51:33 +0200}, Howpublished = {Talk given at TYPES'06}, Month = {19-21 april}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Subset_Coercions_in_Coq-types06-210406.pdf}, Read = {Oui}, Title = {Subset {C}oercions in {Coq}}, Type = {slides}, Year = {2006}} @inproceedings{sozeau.Coq/Russell/article, Abstract = {We propose a new language for writing programs with dependent types on top of the {C}oq proof assistant. This language permits to establish a phase distinction between writing and proving algorithms in the {C}oq environment. Concretely, this means allowing to write algorithms as easily as in a practical functional programming language whilst giving them as rich a specification as desired and proving that the code meets the specification using the whole {C}oq proof apparatus. This is achieved by extending conversion to an equivalence which relates types and subsets based on them, a technique originating from the ``Predicate subtyping'' feature of PVS and following mathematical convention. The typing judgements can be translated to the Calculus of Inductive Constructions by means of an interpretation which inserts coercions at the appropriate places. These coercions can contain existential variables representing the propositional parts of the final term, corresponding to proof obligations (or PVS type-checking conditions). A prototype implementation of this process is integrated with the {C}oq environment.}, Author = {Matthieu Sozeau}, Booktitle = {TYPES'06}, Copyright = {Springer}, Date-Added = {2008-03-24 19:08:52 +0100}, Date-Modified = {2008-12-29 19:17:42 +0100}, Doi = {http://dx.doi.org/10.1007/978-3-540-74464-1_16}, Editor = {Thorsten Altenkirch and Conor McBride}, Pages = {237-252}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Subset_Coercions_in_Coq.pdf}, Publisher = {Springer}, Read = {Oui}, Series = {Lecture Notes in Computer Science}, Slides = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Subset_Coercions_in_Coq-types06-210406.pdf}, Title = {{Subset Coercions in Coq}}, Volume = {4502}, Year = {2007}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/978-3-540-74464-1_16}} @misc{sozeau.Coq/FingerTrees/web, Author = {Matthieu Sozeau}, Date-Added = {2008-03-24 19:08:52 +0100}, Date-Modified = {2008-04-01 13:51:56 +0200}, Read = {Oui}, Title = {{D}ependent {F}inger {T}rees in {C}oq}, Web = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/russell/fingertrees.en.html}, Year = {2007}} @inproceedings{sozeau.Coq/FingerTrees/article, Abstract = {Finger Trees (Hinze & Paterson, JFP 2006) are a general purpose persistent data structure with good performance. Their genericity permits developing a wealth of structures like ordered sequences or interval trees on top of a single implementation. However, the type systems used by current functional languages do not guarantee the coherent parameterization and specialization of Finger Trees, let alone the correctness of their implementation. We present a certified implementation of Finger Trees solving these problems using the {P}rogram extension of {C}oq. We not only implement the structure but also prove its invariants along the way, which permit building certified structures on top of Finger Trees in an elegant way.}, Address = {Freiburg, Germany}, Author = {Matthieu Sozeau}, Booktitle = {ICFP'07}, Copyright = {ACM, 2007. This is the author's version of the work. It is posted here by permission of ACM for your personal use. Not for redistribution. The definitive version was published in ICFP'07, http://doi.acm.org/10.1145/1291151.1291156}, Date-Added = {2008-03-24 19:08:52 +0100}, Date-Modified = {2014-02-10 14:13:02 +0000}, Doi = {http://doi.acm.org/10.1145/1291151.1291156}, Isbn = {978-1-59593-815-2}, Pages = {13--24}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Program-ing_Finger_Trees_in_Coq.pdf}, Publisher = {ACM Press}, Read = {Oui}, Slides = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Program-ing_Finger_Trees_in_Coq-icfp07-011007.pdf}, Title = {{P}rogram-ing {F}inger {T}rees in {C}oq}, Year = {2007}, Bdsk-Url-1 = {http://doi.acm.org/10.1145/1291151.1291156}} @unpublished{sozeau.Coq/lambda/notes, Author = {Matthieu Sozeau}, Date-Added = {2008-03-24 19:08:52 +0100}, Date-Modified = {2008-04-01 13:51:53 +0200}, Note = {Literate {C}oq script}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/drafts/lambda-notes.pdf}, Read = {Oui}, Title = {A dependently-typed formalization of simply-typed lambda-calculus: substitution, denotation, normalization}, Year = {2007}} @misc{sozeau.Coq/Programing/proval07, Address = {Orsay}, Author = me, Date-Added = {2008-03-24 19:08:52 +0100}, Date-Modified = {2008-04-01 13:52:05 +0200}, Howpublished = {Talk given at the {P}ro{V}al seminar}, Month = {9th march}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Program-ing_in_Coq-proval-090307.pdf}, Read = {Oui}, Title = {{P}rogram-ing in {C}oq}, Type = {slides}, Year = {2007}} @misc{sozeau.Coq/Programing/gallium07, Address = {Rocquencourt}, Author = me, Date-Added = {2008-03-24 19:08:52 +0100}, Date-Modified = {2008-04-01 13:52:05 +0200}, Howpublished = {Talk given at the {G}allium seminar}, Month = {16th march}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Program-ing_in_Coq-gallium-160307.pdf}, Read = {Oui}, Title = {{P}rogram-ing in {C}oq}, Type = {slides}, Year = {2007}} @misc{sozeau.Coq/FingerTrees/types07, Address = {Cividale Del Friuli, Italy}, Author = {Matthieu Sozeau}, Date-Added = {2008-03-24 19:08:52 +0100}, Date-Modified = {2008-04-01 13:52:06 +0200}, Howpublished = {Talk given at TYPES'07}, Month = {2-5 May}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Program-ing_Finger_Trees_in_Coq-types07-040507.pdf}, Read = {Oui}, Title = {A journey with {R}ussell: {P}rogramming {D}ependent {F}inger {T}rees in {C}oq}, Type = {slides}, Year = {2007}} @misc{sozeau.Coq/FingerTrees/icfp07, Address = {Freiburg, Germany}, Author = {Matthieu Sozeau}, Date-Added = {2008-03-24 19:08:52 +0100}, Date-Modified = {2008-04-01 13:52:12 +0200}, Howpublished = {Talk given at ICFP'07}, Month = {1--3 October}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Program-ing_Finger_Trees_in_Coq-icfp07-011007.pdf}, Read = {Oui}, Title = {{P}rogram-ing {F}inger {T}rees in {C}oq}, Type = {slides}, Year = {2007}} @misc{sozeau.Coq/classes/web, Author = {Matthieu Sozeau}, Date-Added = {2008-03-24 19:08:52 +0100}, Date-Modified = {2008-04-01 13:52:16 +0200}, Read = {Oui}, Title = {{T}ype classes in {C}oq}, Web = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/coq/classes.en.html}, Year = {2008}} @inproceedings{sozeau.Coq/classes/fctc, Abstract = { Type Classes have met a large success in Haskell and Isabelle, as a solution for sharing notations by overloading and for specifying with abstract structures by quantification on contexts. However, both systems are limited by second-class implementations of these constructs, and these limitations are only overcomed by ad-hoc extensions to the respective systems. We propose an embedding of type classes into a dependent type theory that is first-class and supports some of the most popular extensions right away. The implementation is correspondingly cheap, general and very well integrated inside the system, as we have experimented in Coq. We show how it can be used to help structured programming and proving by way of examples.}, Author = {Matthieu Sozeau and Nicolas Oury}, Booktitle = {TPHOLs}, Copyright = {Springer}, Date-Added = {2008-03-24 19:08:52 +0100}, Date-Modified = {2014-02-10 14:11:53 +0000}, Doi = {10.1007/978-3-540-71067-7_23}, Editor = {Otmane Ait Mohamed, C{\'e}sar Mu{\~n}oz and Sofi{\`e}ne Tahar}, Keywords = {Type classes}, Month = {August}, Pages = {278-293}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/First-Class_Type_Classes.pdf}, Publisher = {Springer}, Read = {Oui}, Series = {LNCS}, Slides = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/First-Class_Type_Classes-TPHOLs-200808.pdf}, Title = {{F}irst-{C}lass {T}ype {C}lasses}, Volume = {5170}, Year = {2008}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/978-3-540-71067-7_23}} @misc{sozeau.Coq/Programing/psu08, Address = {Portland, OR}, Author = me, Date-Added = {2008-03-24 19:08:52 +0100}, Date-Modified = {2008-04-01 13:52:18 +0200}, Howpublished = {Talk given at {P}ortland {S}tate {U}niversity}, Month = {16th january}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Program-ing_in_Coq-psu-160108.pdf}, Read = {Oui}, Title = {{P}rogram-ing in {C}oq}, Type = {slides}, Year = {2008}} @misc{sozeau.Coq/Programing/harvard08, Address = {Cambridge, MA}, Author = me, Date-Added = {2008-03-24 19:08:52 +0100}, Date-Modified = {2008-04-01 13:52:18 +0200}, Howpublished = {Talk given at {H}arvard}, Month = {23th january}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Program-ing_in_Coq-harvard-230108.pdf}, Read = {Oui}, Title = {{P}rogram-ing in {C}oq}, Type = {slides}, Year = {2008}} @misc{sozeau.Coq/FingerTrees/dtp08, Address = {Nottingham, UK}, Author = {Matthieu Sozeau}, Date-Added = {2008-03-24 19:08:52 +0100}, Date-Modified = {2008-04-01 13:52:19 +0200}, Howpublished = {Talk given at DTP'08}, Month = {18--20 February}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/Dependent_Finger_Trees-dtp08-180208.pdf}, Read = {Oui}, Title = {{D}ependent {F}inger {T}rees}, Type = {slides}, Year = {2008}} @misc{sozeau.Coq/classes/tphols08, Address = {Montr{\'e}al, Canada}, Author = {Matthieu Sozeau}, Date-Added = {2008-03-24 19:08:52 +0100}, Date-Modified = {2008-11-04 11:58:15 +0100}, Howpublished = {Talk given at {TPHOLs}'08}, Keywords = {Type classes}, Month = {20th August}, Pdf = {http://www.irif.univ-paris-diderot.fr/~sozeau/research/publications/First-Class_Type_Classes-TPHOLs-200808.pdf}, Read = {Oui}, Title = {{F}irst-{C}lass {T}ype {C}lasses}, Type = {slides}, Year = {2008}} @article{DBLP:journals/eik/Basin94, Author = {David A. Basin}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Added = {2008-02-27 12:32:04 +0100}, Date-Modified = {2008-09-10 20:38:53 +0200}, Journal = {Elektronische Informationsverarbeitung und Kybernetik}, Keywords = {rewriting, setoid}, Number = {5/6}, Pages = {249-259}, Read = {Oui}, Title = {Generalized {R}ewriting in {T}ype {T}heory}, Url = {http://www.inf.ethz.ch/personal/basin/pubs/tt-rewrite.ps.gz}, Volume = {30}, Year = {1994}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAnLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3R0LXJld3JpdGUucGRmTxEBggAAAAABggACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAxquYFkgrAAAANnfIDnR0LXJld3JpdGUucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/6YLJOOjpAAAAAAAAAAAAAwADAAAJIAAAAAAAAAAAAAAAAAAAAAZwYXBlcnMAEAAIAADGq3v2AAAAEQAIAADJONrZAAAAAQAQADZ3yAAO27QAB/k4AACQ5wACADVNYWNpbnRvc2ggSEQ6VXNlcnM6bWF0OnJlc2VhcmNoOnBhcGVyczp0dC1yZXdyaXRlLnBkZgAADgAeAA4AdAB0AC0AcgBlAHcAcgBpAHQAZQAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAKFVzZXJzL21hdC9yZXNlYXJjaC9wYXBlcnMvdHQtcmV3cml0ZS5wZGYAEwABLwAAFQACAAr//wAAAAgADQAaACQATgAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAHU}, Bdsk-Url-1 = {http://www.inf.ethz.ch/personal/basin/pubs/tt-rewrite.ps.gz}} @misc{Gonthier4col, Author = {Georges Gonthier and Benjamin Werner}, Date-Added = {2008-02-27 02:07:18 +0100}, Date-Modified = {2008-10-01 16:45:17 +0200}, Keywords = {verification, Coq}, Month = {April}, Read = {Oui}, Title = {A computer-checked proof of the four-coulour theorem}, Url = {http://research.microsoft.com/~gonthier/4colproof.pdf}, Year = {2005}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAmLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzLzRjb2xwcm9vZi5wZGZPEQF+AAAAAAF+AAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gNNGNvbHByb29mLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/U8Mk45G8AAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk41l8AAAABABAANnfIAA7btAAH+TgAAJDnAAIANE1hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOjRjb2xwcm9vZi5wZGYADgAcAA0ANABjAG8AbABwAHIAbwBvAGYALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASACdVc2Vycy9tYXQvcmVzZWFyY2gvcGFwZXJzLzRjb2xwcm9vZi5wZGYAABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAE0AAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAABzw==}, Bdsk-Url-1 = {http://research.microsoft.com/~gonthier/4colproof.pdf}} @inproceedings{longo95logic, Author = {Longo and Milsted and Soloviev}, Booktitle = {{LICS}: {IEEE} Symposium on Logic in Computer Science}, Date-Added = {2008-02-27 02:07:18 +0100}, Date-Modified = {2008-04-01 13:49:15 +0200}, Keywords = {Subtyping}, Read = {Oui}, Title = {A {L}ogic of {S}ubtyping}, Url = {http://citeseer.ist.psu.edu/longo96logic.html}, Year = {1995}, Bdsk-Url-1 = {http://citeseer.ist.psu.edu/longo96logic.html}} @inproceedings{Xi:ATStypes03, Abstract = {{ The framework Pure Type System (PTS) offers a simple and general approach to designing and formalizing type systems. However, in the presence of dependent types, there often exist some acute problems that make it difficult for PTS to accommodate many common realistic programming features such as general recursion, recursive types, effects (e.g., exceptions, references, input/output), etc. In this paper, we propose a new framework Applied Type System (ATS) to allow for designing and formalizing type systems that can readily support common realistic programming features. The key salient feature of ATS lies in a complete separation between statics, in which types are formed and reasoned about, and dynamics, in which programs are constructed and evaluated. With this separation, it is no longer possible for a program to occur in a type as is otherwise allowed in PTS. We present not only a formal development of ATS but also mention some examples in support of using ATS as a framework to form type systems for practical programming. }}, Author = {Hongwei Xi}, Booktitle = {post-workshop Proceedings of TYPES 2003}, Date-Added = {2008-02-27 02:07:18 +0100}, Date-Modified = {2008-10-07 14:22:21 +0200}, Keywords = {ATS, dependent types}, Pages = {394--408}, Pdf = {http://www.cs.bu.edu/~hwxi/academic/papers/types03.pdf}, Publisher = {Springer-Verlag LNCS 3085}, Read = {Oui}, Title = {{Applied {T}ype {S}ystem (extended abstract)}}, Year = {2004}} @misc{Xi:ATS, Author = {Hongwei Xi}, Date-Added = {2008-02-27 02:07:18 +0100}, Date-Modified = {2008-04-01 13:50:43 +0200}, Keywords = {ATS}, Read = {Oui}, Title = {Applied {T}ype {S}ystems}, Url = {http://www.cs.bu.edu/~hwxi/ATS/ATS.html}, Year = {2005}, Bdsk-Url-1 = {http://www.cs.bu.edu/~hwxi/ATS/ATS.html}} @inproceedings{Chen:POPL-2003, Abstract = {We present a coercive subtyping system for the calculus of constructions. The proposed system $\lambda C_{\leq}^{co}$ is obtained essentially by adding coercions and $\eta$-conversion to $\lambda C_\leq$\cite{Chen97a}, which is a subtyping extension to the calculus of constructions without coercions. Following \cite{LMS95,LMS98}, the coercive subtyping $c : A \leq B$ is understood as a special case of typing in arrow type $c : A \to B$ such that the term $c$ behaves like an identity function. We prove that, with respect to this semantic interpretation, the proposed coercive subtyping system is sound and complete, and that this completeness leads to transitivity elimination. In addition, we establish the equivalence between $\lambda C_{\leq}^{co}$ and $\lambda C_{\beta \eta}$, this fact implies that $\lambda C_{\leq}^{co}$ has confluence, subject reduction and strong normalization. We propose a formalization of coercion inference problem and present a sound and complete coercion inference algorithm.}, Author = {Gang Chen}, Booktitle = popl03, Churchreport = {yes}, Date-Added = {2008-02-27 02:07:18 +0100}, Date-Modified = {2009-09-22 15:05:34 -0400}, Keywords = {Subtyping, coercion, Calculus of Constructions}, Pages = {150-159}, Pdf = {http://www.church-project.org/reports/electronic/Chen:POPL-2003.pdf}, Read = {Oui}, Title = {Coercive {S}ubtyping for the {C}alculus of {C}onstructions (extended abstract)}, Year = {2003}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAjLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL2NodXJjaC5wZGZPEQFyAAAAAAFyAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gKY2h1cmNoLnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/XHMk45MIAAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk41rIAAAABABAANnfIAA7btAAH+TgAAJDnAAIAMU1hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOmNodXJjaC5wZGYAAA4AFgAKAGMAaAB1AHIAYwBoAC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAkVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy9jaHVyY2gucGRmABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAEoAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAABwA==}, Bdsk-Url-1 = {http://www.church-project.org/reports/electronic/Chen:POPL-2003.pdf.gz}} @inproceedings{cal00coherence, Address = {Ponte de Lima, Portugal}, Author = {Paul Callaghan}, Booktitle = {APPSEM Workshop on Subtyping and Dependent Types in Programming}, Date-Added = {2008-02-27 02:07:18 +0100}, Date-Modified = {2008-04-01 13:49:51 +0200}, Keywords = {coercion, subtyping, dependent types}, Month = {July}, Read = {Oui}, Title = {Coherence checking and its implementation in plastic}, Url = {http://www-sop.inria.fr/oasis/DTP00/Proceedings/callaghan.ps}, Year = {2000}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAlLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL2NhbGxhZ2hhbi5wc08RAXoAAAAAAXoAAgAADE1hY2ludG9zaCBIRAAAAAAAAAAAAAAAAAAAAMarmBZIKwAAADZ3yAxjYWxsYWdoYW4ucHMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP9b4yTjkvgAAAAAAAAAAAAMAAwAACSAAAAAAAAAAAAAAAAAAAAAGcGFwZXJzABAACAAAxqt79gAAABEACAAAyTjWrgAAAAEAEAA2d8gADtu0AAf5OAAAkOcAAgAzTWFjaW50b3NoIEhEOlVzZXJzOm1hdDpyZXNlYXJjaDpwYXBlcnM6Y2FsbGFnaGFuLnBzAAAOABoADABjAGEAbABsAGEAZwBoAGEAbgAuAHAAcwAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASACZVc2Vycy9tYXQvcmVzZWFyY2gvcGFwZXJzL2NhbGxhZ2hhbi5wcwATAAEvAAAVAAIACv//AAAACAANABoAJABMAAAAAAAAAgEAAAAAAAAABQAAAAAAAAAAAAAAAAAAAco=}, Bdsk-Url-1 = {http://www-sop.inria.fr/oasis/DTP00/Proceedings/callaghan.ps}} @inproceedings{XiPfenning1999DTP, Acknowledgement = ack-nhfb, Author = {Hongwei Xi and Frank Pfenning}, Bibdate = {Mon May 3 12:58:58 MDT 1999}, Booktitle = popl99, Date-Added = {2008-02-27 02:07:18 +0100}, Date-Modified = {2008-09-10 22:38:47 +0200}, Fullpublisher = {ACM Press}, Fullseries = {ACM SIG{-}PLAN Notices}, Keywords = {dependent types, dml}, Month = jan, Pages = {214--227}, Read = {Oui}, Title = {Dependent types in practical programming}, Url = {http://www.acm.org:80/pubs/citations/proceedings/plan/292540/p214-xi/}, Year = {1999}, Bdsk-Url-1 = {http://www.acm.org:80/pubs/citations/proceedings/plan/292540/p214-xi/}} @proceedings{conf/mfcs/1997, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {MFCS}, Date-Added = {2008-02-27 02:07:18 +0100}, Date-Modified = {2008-04-01 13:49:34 +0200}, Editor = {Igor Pr\'{\i}vara and Peter Ruzicka}, Isbn = {3-540-63437-1}, Publisher = {Springer}, Read = {Non}, Series = {Lecture Notes in Computer Science}, Title = {Mathematical {F}oundations of {C}omputer {S}cience 1997, 22nd {I}nternational {S}ymposium, {M}{F}{C}{S}'97, {B}ratislava, {S}lovakia, {A}ugust 25-29, 1997, {P}roceedings}, Volume = {1295}, Year = {1997}} @book{whitehead.russell:principia, Address = {Cambridge}, Author = {Whitehead, Alfred North and Russell, Bertrand}, Date-Added = {2008-02-27 02:07:18 +0100}, Date-Modified = {2008-04-01 13:49:20 +0200}, Publisher = {Cambridge University Press}, Read = {Non}, Title = {Principia {M}athematica}, Year = 1910} @book{progmartinlof, Author = {Nordstr\"{o}m, Bengt and Petersson, Kent and Smith, Jan M.}, Citeulike-Article-Id = {760320}, Date-Added = {2008-02-27 02:07:18 +0100}, Date-Modified = {2008-04-01 13:49:14 +0200}, Howpublished = {out of print; avalaible electronically}, Keywords = {functional_programming type_theory}, Priority = {3}, Publisher = {Oxford University Press}, Read = {Oui}, Title = {Programming in {M}artin-{L}\"of's {T}ype {T}heory}, Url = {http://www.cs.chalmers.se/Cs/Research/Logic/book/}, Year = {1990}, Bdsk-Url-1 = {http://www.cs.chalmers.se/Cs/Research/Logic/book/}} @phdthesis{ChenPhD, Address = {Laboratoire d'Informatique de l'\'Ecole Normale Sup\'erieure, Paris}, Author = {Gang Chen}, Date-Added = {2008-02-27 02:07:18 +0100}, Date-Modified = {2008-09-10 22:24:03 +0200}, Keywords = {subtyping, coercion, Calculus of Constructions}, Month = {D\'ecembre}, Read = {Oui}, School = {Universit\'e Paris {\uppercase\expandafter{\romannumeral 7}}}, Title = {Sous-typage, {C}onversion de {T}ypes et \'{E}limination de la {T}ransitivit\'e}, Url = {http://cat.inist.fr/?aModele=afficheN&cpsidt=197333}, Year = {1998}, Bdsk-Url-1 = {http://cat.inist.fr/?aModele=afficheN&cpsidt=197333}} @misc{stumpsubset, Author = {Aaron Stump}, Date-Added = {2008-02-27 02:07:18 +0100}, Date-Modified = {2008-04-01 13:50:08 +0200}, Keywords = {subsets, partiality}, Note = {Draft paper}, Read = {Oui}, Title = {Subset {T}ypes and {P}artial {F}unctions}, Url = {http://citeseer.ist.psu.edu/699807.html}, Year = {2003}, Bdsk-Url-1 = {http://citeseer.ist.psu.edu/699807.html}} @inproceedings{conf/mfcs/Chen97, Author = {Gang Chen}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {MFCS}, Crossref = {DBLP:conf/mfcs/1997}, Date-Added = {2008-02-27 02:07:18 +0100}, Date-Modified = {2008-04-01 13:49:34 +0200}, Keywords = {Subtyping, Calculus of Constructions}, Pages = {189--198}, Read = {Oui}, Title = {Subtyping {C}alculus of {C}onstruction ({E}xtended {A}bstract).}, Year = {1997}} @inproceedings{conf/mpc/Parent95, Author = {Catherine Parent}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {MPC}, Crossref = {DBLP:conf/mpc/1995}, Date-Added = {2008-02-27 02:07:18 +0100}, Date-Modified = {2008-09-10 22:41:21 +0200}, Keywords = {Calculus of Constructions}, Pages = {351-379}, Read = {Oui}, Title = {Synthesizing {P}roofs from {P}rograms in the {C}alculus of {I}nductive {C}onstructions.}, Url = {http://www-verimag.imag.fr/~parent/MPC/mpc.ps}, Year = {1995}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAsLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL21wYy1zeW50aC1wcm9vZnMucHNPEQGWAAAAAAGWAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gTbXBjLXN5bnRoLXByb29mcy5wcwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/cmck45ekAAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk419kAAAABABAANnfIAA7btAAH+TgAAJDnAAIAOk1hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOm1wYy1zeW50aC1wcm9vZnMucHMADgAoABMAbQBwAGMALQBzAHkAbgB0AGgALQBwAHIAbwBvAGYAcwAuAHAAcwAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASAC1Vc2Vycy9tYXQvcmVzZWFyY2gvcGFwZXJzL21wYy1zeW50aC1wcm9vZnMucHMAABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAFMAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAAB7Q==}, Bdsk-Url-1 = {http://www-verimag.imag.fr/~parent/MPC/mpc.ps}} @inproceedings{DBLP:conf/types/MiquelW02, Author = {Alexandre Miquel and Benjamin Werner}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Crossref = {DBLP:conf/types/2002}, Date-Added = {2008-02-27 02:07:18 +0100}, Date-Modified = {2008-09-03 19:15:33 +0200}, Ee = {http://link.springer.de/link/service/series/0558/bibs/2646/26460240.htm}, Keywords = {proof-irrelevance, Calculus of Constructions, model}, Pages = {240-258}, Pdf = {http://www.pps.jussieu.fr/~miquel/publis/types02.pdf}, Read = {Oui}, Title = {The {N}ot {S}o {S}imple {P}roof-{I}rrelevant {M}odel of {C{C}}.}, Year = {2002}, Bdsk-Url-1 = {http://www.pps.jussieu.fr/~miquel/publis/types02.pdf}} @article{DBLP:journals/jfp/McBrideM04, Author = {Conor McBride and James McKinna}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Added = {2008-02-27 02:07:18 +0100}, Date-Modified = {2008-04-01 13:50:21 +0200}, Ee = {http://dx.doi.org/10.1017/S0956796803004829}, Journal = {J. Funct. Program.}, Keywords = {pattern matching, dependent types}, Number = {1}, Pages = {69-111}, Read = {Oui}, Title = {The view from the left.}, Volume = {14}, Year = {2004}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAhLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3ZpZXcucGRmTxEBagAAAAABagACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAxquYFkgrAAAANnfICHZpZXcucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/6hrJOOj2AAAAAAAAAAAAAwADAAAJIAAAAAAAAAAAAAAAAAAAAAZwYXBlcnMAEAAIAADGq3v2AAAAEQAIAADJONrmAAAAAQAQADZ3yAAO27QAB/k4AACQ5wACAC9NYWNpbnRvc2ggSEQ6VXNlcnM6bWF0OnJlc2VhcmNoOnBhcGVyczp2aWV3LnBkZgAADgASAAgAdgBpAGUAdwAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAIlVzZXJzL21hdC9yZXNlYXJjaC9wYXBlcnMvdmlldy5wZGYAEwABLwAAFQACAAr//wAAAAgADQAaACQASAAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAG2}} @inproceedings{saibi97inheritance, Address = {La Sorbonne, Paris, France}, Author = {Amokrane Sa\"ibi}, Booktitle = {24th Annual Symposium on Principles of Programming Languages}, Date-Added = {2008-02-27 02:07:18 +0100}, Date-Modified = {2008-09-03 19:36:56 +0200}, Keywords = {Subtyping, Calculus of Constructions, coercion}, Month = {January 15-17}, Organization = {ACM}, Pages = {292--301}, Read = {Oui}, Title = {{T}yping {A}lgorithm in {T}ype {T}heory with {I}nheritance}, Url = {http://pauillac.inria.fr/~saibi/ClassCoq4.ps}, Year = {1997}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAmLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL0NsYXNzQ29xNC5wZGZPEQF+AAAAAAF+AAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gNQ2xhc3NDb3E0LnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/VbMk45IQAAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk41nQAAAABABAANnfIAA7btAAH+TgAAJDnAAIANE1hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOkNsYXNzQ29xNC5wZGYADgAcAA0AQwBsAGEAcwBzAEMAbwBxADQALgBwAGQAZgAPABoADABNAGEAYwBpAG4AdABvAHMAaAAgAEgARAASACdVc2Vycy9tYXQvcmVzZWFyY2gvcGFwZXJzL0NsYXNzQ29xNC5wZGYAABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAE0AAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAABzw==}, Bdsk-Url-1 = {http://pauillac.inria.fr/~saibi/ClassCoq4.ps}} @article{adams:PTSEQ, Author = {Robin Adams}, Date-Modified = {2008-09-22 13:56:27 +0200}, Journal = {Journal of Functional Programming}, Keywords = {PTS}, Pages = {219-246}, Read = {Oui}, Title = {{P}ure {T}ype {S}ystems with {J}udgemental {E}quality}, Url = {http://www.cs.rhul.ac.uk/~robin/ptseq8.ps.gz}, Volume = {16:2}, Year = {2006}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAjLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL3B0c2VxOC5wZGZPEQFyAAAAAAFyAAIAAAxNYWNpbnRvc2ggSEQAAAAAAAAAAAAAAAAAAADGq5gWSCsAAAA2d8gKcHRzZXE4LnBkZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/dTck45gYAAAAAAAAAAAADAAMAAAkgAAAAAAAAAAAAAAAAAAAABnBhcGVycwAQAAgAAMare/YAAAARAAgAAMk41/YAAAABABAANnfIAA7btAAH+TgAAJDnAAIAMU1hY2ludG9zaCBIRDpVc2VyczptYXQ6cmVzZWFyY2g6cGFwZXJzOnB0c2VxOC5wZGYAAA4AFgAKAHAAdABzAGUAcQA4AC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAkVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy9wdHNlcTgucGRmABMAAS8AABUAAgAK//8AAAAIAA0AGgAkAEoAAAAAAAACAQAAAAAAAAAFAAAAAAAAAAAAAAAAAAABwA==}, Bdsk-Url-1 = {http://www.cs.rhul.ac.uk/~robin/ptseq8.ps.gz}} @article{werner:ProofIrrelevance, Author = {Benjamin Werner}, Date-Modified = {2008-10-01 16:18:00 +0200}, Journal = {3rd International Joint Conference on Automated Reasoning}, Keywords = {proof-irrelevance, Calculus of Constructions}, Pdf = {http://www.lix.polytechnique.fr/Labo/Benjamin.Werner/publis/ijcar06.pdf}, Read = {Oui}, Title = {On the strength of proof-irrelevant type theories}, Year = {2006}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAkLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL2lqY2FyMDYucGRmTxEBdgAAAAABdgACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAxquYFkgrAAAANnfIC2lqY2FyMDYucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/2/3JOOXOAAAAAAAAAAAAAwADAAAJIAAAAAAAAAAAAAAAAAAAAAZwYXBlcnMAEAAIAADGq3v2AAAAEQAIAADJONe+AAAAAQAQADZ3yAAO27QAB/k4AACQ5wACADJNYWNpbnRvc2ggSEQ6VXNlcnM6bWF0OnJlc2VhcmNoOnBhcGVyczppamNhcjA2LnBkZgAOABgACwBpAGoAYwBhAHIAMAA2AC4AcABkAGYADwAaAAwATQBhAGMAaQBuAHQAbwBzAGgAIABIAEQAEgAlVXNlcnMvbWF0L3Jlc2VhcmNoL3BhcGVycy9pamNhcjA2LnBkZgAAEwABLwAAFQACAAr//wAAAAgADQAaACQASwAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAHF}} @inproceedings{DBLP:conf/tlca/Miquel01, Author = {Alexandre Miquel}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TLCA}, Date-Modified = {2008-09-03 19:16:36 +0200}, Ee = {http://link.springer.de/link/service/series/0558/bibs/2044/20440344.htm}, Keywords = {staging, Calculus of Constructions}, Pages = {344-359}, Pdf = {http://www.pps.jussieu.fr/~miquel/publis/tlca01.pdf}, Publisher = {Springer}, Read = {Oui}, Series = {Lecture Notes in Computer Science}, Title = {The {I}mplicit {C}alculus of {C}onstructions.}, Volume = {2044}, Year = {2001}} @phdthesis{miquel:PhD, Author = {Alexandre Miquel}, Date-Modified = {2008-09-03 19:17:09 +0200}, Keywords = {staging, Calculus of Constructions}, Pdf = {http://www.pps.jussieu.fr/~miquel/publis/these.pdf}, Read = {Oui}, School = {Universit\'e Paris 7}, Title = {Le {C}alcul des {C}onstructions implicite: syntaxe et s\'emantique}, Year = {2001}} @article{hinze:FingerTrees, Author = {Ralf Hinze and Ross Paterson}, Date-Modified = {2008-04-01 13:51:11 +0200}, Journal = {J. Funct. Program.}, Keywords = {Finger Trees}, Number = {2}, Pages = {197--217}, Read = {Oui}, Title = {{F}inger {T}rees: {A} {S}imple {G}eneral-purpose {D}ata {S}tructure}, Url = {http://www.soi.city.ac.uk/~ross/papers/FingerTree.html}, Volume = {16}, Year = {2006}, Bdsk-Url-1 = {http://www.soi.city.ac.uk/~ross/papers/FingerTree.html}} @article{DBLP:journals/entcs/BiernackaDS06, Author = {Malgorzata Biernacka and Olivier Danvy and Kristian St{\o}vring}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Modified = {2008-04-01 13:51:24 +0200}, Ee = {http://dx.doi.org/10.1016/j.entcs.2005.11.056}, Journal = {Electr. Notes Theor. Comput. Sci.}, Keywords = {Extraction, Normalization}, Pages = {169-189}, Read = {Oui}, Title = {Program {E}xtraction {F}rom {P}roofs of {W}eak {H}ead {N}ormalization.}, Volume = {155}, Year = {2006}, Bdsk-File-1 = {YnBsaXN0MDDSAQIDBFxyZWxhdGl2ZVBhdGhZYWxpYXNEYXRhXxAnLi4vLi4vLi4vcmVzZWFyY2gvcGFwZXJzL2VlLWNvbXBhY3QucGRmTxEBggAAAAABggACAAAMTWFjaW50b3NoIEhEAAAAAAAAAAAAAAAAAAAAxquYFkgrAAAANnfIDmVlLWNvbXBhY3QucGRmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA/2ebJOOVTAAAAAAAAAAAAAwADAAAJIAAAAAAAAAAAAAAAAAAAAAZwYXBlcnMAEAAIAADGq3v2AAAAEQAIAADJONdDAAAAAQAQADZ3yAAO27QAB/k4AACQ5wACADVNYWNpbnRvc2ggSEQ6VXNlcnM6bWF0OnJlc2VhcmNoOnBhcGVyczplZS1jb21wYWN0LnBkZgAADgAeAA4AZQBlAC0AYwBvAG0AcABhAGMAdAAuAHAAZABmAA8AGgAMAE0AYQBjAGkAbgB0AG8AcwBoACAASABEABIAKFVzZXJzL21hdC9yZXNlYXJjaC9wYXBlcnMvZWUtY29tcGFjdC5wZGYAEwABLwAAFQACAAr//wAAAAgADQAaACQATgAAAAAAAAIBAAAAAAAAAAUAAAAAAAAAAAAAAAAAAAHU}} @article{DBLP:journals/jfp/BirdC06, Author = {Richard S. Bird and Sharon A. Curtis}, Bibsource = {DBLP, http://dblp.uni-trier.de}, Date-Modified = {2008-04-01 13:51:13 +0200}, Ee = {http://dx.doi.org/10.1017/S0956796805005678}, Journal = {J. Funct. Program.}, Number = {1}, Pages = {13-20}, Read = {Oui}, Title = {Functional {P}earls: {F}inding celebrities: {A} lesson in functional programming.}, Volume = {16}, Year = {2006}} @proceedings{cav07, Address = {Berlin, Germany}, Booktitle = {19th International Conference on Computer Aided Verification}, Editor = {Werner Damm and Holger Hermanns}, Month = jul, Publisher = SV, Series = lncs, Title = {Computer {A}ided {V}erification}, Year = {2007}} @proceedings{DBLP:conf/mpc/1995, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {MPC}, Date-Added = {2008-02-27 02:07:18 +0100}, Date-Modified = {2008-04-01 13:49:28 +0200}, Editor = {Bernhard M{\"o}ller}, Isbn = {3-540-60117-1}, Publisher = {Springer}, Read = {Non}, Series = {Lecture Notes in Computer Science}, Title = {Mathematics of {P}rogram {C}onstruction, {M}{P}{C}'95, {K}loster {I}rsee, {G}ermany, {J}uly 17-21, 1995, {P}roceedings}, Volume = {947}, Year = {1995}} @proceedings{DBLP:conf/esop/2000, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {ESOP}, Date-Added = {2008-05-13 17:02:39 +0200}, Date-Modified = {2008-05-13 17:02:39 +0200}, Editor = {Gert Smolka}, Isbn = {3-540-67262-1}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Programming Languages and Systems, 9th European Symposium on Programming, ES{O}{P} 2000, {H}eld as {P}art of the {E}uropean {J}oint {C}onferences on the {T}heory and {P}ractice of {S}oftware, {E}{T}{A}{P}{S} 2000, {B}erlin, {G}ermany, {M}arch 25 - {A}pril 2, 2000, {P}roceedings}, Volume = {1782}, Year = {2000}} @proceedings{DBLP:conf/tphol/2005, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TPHOLs}, Date-Added = {2008-05-13 17:02:39 +0200}, Date-Modified = {2008-05-13 17:02:39 +0200}, Editor = {Joe Hurd and Thomas F. Melham}, Isbn = {3-540-28372-2}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Theorem {P}roving in {H}igher {O}rder {L}ogics, 18th {I}nternational {C}onference, {T}{P}{H}{O}{L}s 2005, {O}xford, {U}{K}, {A}ugust 22-25, 2005, {P}roceedings}, Volume = {3603}, Year = {2005}} @proceedings{DBLP:conf/types/2003, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Date-Added = {2008-08-07 15:44:23 +0200}, Date-Modified = {2008-08-07 15:44:23 +0200}, Editor = {Stefano Berardi and Mario Coppo and Ferruccio Damiani}, Isbn = {3-540-22164-6}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Types for {P}roofs and {P}rograms, {I}nternational {W}orkshop, {T}{Y}{P}{E}{S} 2003, {T}orino, {I}taly, {A}pril 30 - {M}ay 4, 2003, {R}evised {S}elected {P}apers}, Volume = {3085}, Year = {2004}} @proceedings{tphols2008a, Booktitle = {TPHOLs}, Date-Added = {2008-08-25 19:42:35 +0200}, Date-Modified = {2008-08-25 19:42:35 +0200}, Editor = {Otmane Ait Mohamed, C{\'e}sar Mu{\~n}oz and Sofi{\`e}ne Tahar}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Theorem {P}roving in {H}igher {O}rder {L}ogics, 21th {I}nternational {C}onference, {T}{P}{H}{O}{L}s 2008, {M}ontreal, {C}anada, {A}ugust 18-21, 2008, {P}roceedings}, Volume = {5170}, Year = {2008}} @proceedings{DBLP:conf/types/2002, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Date-Added = {2008-09-09 23:12:48 +0200}, Date-Modified = {2008-09-09 23:12:48 +0200}, Editor = {Herman Geuvers and Freek Wiedijk}, Isbn = {3-540-14031-X}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Types for Proofs and Programs, Second International Workshop, TYPES 2002, Berg en Dal, The Netherlands, April 24-28, 2002, Selected Papers}, Volume = {2646}, Year = {2003}} @proceedings{DBLP:conf/types/2000, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Date-Added = {2008-09-28 13:47:36 +0200}, Date-Modified = {2008-09-28 13:47:36 +0200}, Editor = {Paul Callaghan and Zhaohui Luo and James McKinna and Robert Pollack}, Isbn = {3-540-43287-6}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Types for Proofs and Programs, International Workshop, TYPES 2000, Durham, UK, December 8-12, 2000, Selected Papers}, Volume = {2277}, Year = {2002}} @proceedings{DBLP:conf/types/1993, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Date-Added = {2008-10-09 13:12:56 +0200}, Date-Modified = {2008-10-09 13:12:56 +0200}, Editor = {Henk Barendregt and Tobias Nipkow}, Isbn = {3-540-58085-9}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Types for Proofs and Programs, International Workshop TYPES'93, Nijmegen, The Netherlands, May 24-28, 1993, Selected Papers}, Volume = {806}, Year = {1994}} @proceedings{DBLP:conf/lics/LICS9, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {LICS}, Date-Added = {2008-10-09 16:10:11 +0200}, Date-Modified = {2008-10-09 16:11:02 +0200}, Publisher = {IEEE Computer Society}, Title = {Proceedings, Ninth Annual IEEE Symposium on Logic in Computer Science, 4-7 July 1994, Paris, France}, Year = {1994}} @proceedings{DBLP:conf/tphol/2007, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TPHOLs}, Date-Added = {2008-04-05 01:09:42 +0200}, Date-Modified = {2008-04-05 01:09:42 +0200}, Editor = {Klaus Schneider and Jens Brandt}, Isbn = {978-3-540-74590-7}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Theorem {P}roving in {H}igher {O}rder {L}ogics, 20th {I}nternational {C}onference, {T}{P}{H}{O}{L}s 2007, {K}aiserslautern, {G}ermany, {S}eptember 10-13, 2007, {P}roceedings}, Volume = {4732}, Year = {2007}} @proceedings{icfp02, Booktitle = {International Conference on Functional Programming 2002}, Date-Added = {2008-10-12 16:18:44 +0200}, Date-Modified = {2008-10-12 16:18:44 +0200}, Publisher = {ACM Press}, Title = {International Conference on Functional Programming 2002}, Year = 2002} @proceedings{DBLP:conf/types/1995, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Date-Added = {2008-10-13 12:40:31 +0200}, Date-Modified = {2008-10-13 12:40:31 +0200}, Editor = {Stefano Berardi and Mario Coppo}, Isbn = {3-540-61780-9}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Types for Proofs and Programs, International Workshop TYPES'95, Torino, Italy, June 5-8, 1995, Selected Papers}, Volume = {1158}, Year = {1996}} @proceedings{DBLP:conf/types/1996, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TYPES}, Date-Added = {2008-12-25 22:04:51 +0100}, Date-Modified = {2008-12-25 22:04:51 +0100}, Editor = {Eduardo Gim{\'e}nez and Christine Paulin-Mohring}, Isbn = {3-540-65137-3}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Types for Proofs and Programs, International Workshop TYPES'96, Aussois, France, December 15-19, 1996, Selected Papers}, Volume = {1512}, Year = {1998}} @proceedings{DBLP:conf/csl/1996, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {CSL}, Date-Added = {2008-12-25 22:06:08 +0100}, Date-Modified = {2008-12-25 22:06:08 +0100}, Editor = {Dirk van Dalen and Marc Bezem}, Isbn = {3-540-63172-0}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Computer Science Logic, 10th International Workshop, CSL '96, Annual Conference of the EACSL, Utrecht, The Netherlands, September 21-27, 1996, Selected Papers}, Volume = {1258}, Year = {1997}} @proceedings{types06, Booktitle = {TYPES 2006}, Date-Modified = {2008-12-29 19:12:17 +0100}, Editor = {Thorsten Altenkirch and Conor McBride}, Isbn = {978-3-540-74463-4}, Publisher = SV, Series = LNCS, Title = {Types for {P}roofs and {P}rograms, {I}nternational {W}orkshop, {T}{Y}{P}{E}{S} 2006, {N}ottingham, {U}{K}, {A}pril 18-21, 2006, {R}evised {S}elected {P}apers}, Volume = {4502}, Year = {2007}} @proceedings{DBLP:conf/tphol/2009, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TPHOLs}, Date-Added = {2009-12-23 15:52:59 +0100}, Date-Modified = {2009-12-23 15:52:59 +0100}, Editor = {Stefan Berghofer and Tobias Nipkow and Christian Urban and Makarius Wenzel}, Ee = {http://dx.doi.org/10.1007/978-3-642-03359-9}, Isbn = {978-3-642-03358-2}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Title = {Theorem Proving in Higher Order Logics, 22nd International Conference, TPHOLs 2009, Munich, Germany, August 17-20, 2009. Proceedings}, Volume = {5674}, Year = {2009}} @proceedings{DBLP:conf/tlca/2011, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {TLCA}, Date-Added = {2013-03-19 12:40:14 +0000}, Date-Modified = {2014-01-30 06:25:29 +0000}, Editor = {C.-H. Luke Ong}, Ee = {http://dx.doi.org/10.1007/978-3-642-21691-6}, Isbn = {978-3-642-21690-9}, Publisher = {Springer}, Series = {LNCS}, Title = {Typed Lambda Calculi and Applications - 10th International Conference, TLCA 2011, Novi Sad, Serbia, June 1-3, 2011. Proceedings}, Volume = {6690}, Year = {2011}} @proceedings{DBLP:conf/csl/2012, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {CSL}, Date-Added = {2013-01-31 14:47:53 +0000}, Date-Modified = {2013-01-31 14:47:53 +0000}, Editor = {Patrick C{\'e}gielski and Arnaud Durand}, Isbn = {978-3-939897-42-2}, Publisher = {Schloss Dagstuhl - Leibniz-Zentrum fuer Informatik}, Series = {LIPIcs}, Title = {Computer Science Logic (CSL'12) - 26th International Workshop/21st Annual Conference of the EACSL, CSL 2012, September 3-6, 2012, Fontainebleau, France}, Volume = {16}, Year = {2012}} @proceedings{DBLP:conf/icfp/2011, Bibsource = {DBLP, http://dblp.uni-trier.de}, Booktitle = {ICFP}, Date-Added = {2012-12-10 18:17:12 +0000}, Date-Modified = {2012-12-10 18:17:12 +0000}, Editor = {Manuel M. T. Chakravarty and Zhenjiang Hu and Olivier Danvy}, Isbn = {978-1-4503-0865-6}, Publisher = {ACM}, Title = {Proceeding of the 16th ACM SIGPLAN international conference on Functional Programming, ICFP 2011, Tokyo, Japan, September 19-21, 2011}, Year = {2011}} @article{MR2964639, Author = {Guiraud, Yves and Malbos, Philippe}, Fjournal = {Advances in Mathematics}, Issn = {0001-8708}, Journal = {Adv. Math.}, Mrclass = {18C10 (18D05 18G10 18G20 68Q42)}, Mrnumber = {2964639}, Mrreviewer = {Enrico Vitale}, Number = {3-4}, Pages = {2294--2351}, Title = {Higher-dimensional normalisation strategies for acyclicity}, Url = {https://doi.org/10.1016/j.aim.2012.05.010}, Volume = {231}, Year = {2012}, Bdsk-Url-1 = {https://doi.org/10.1016/j.aim.2012.05.010}} @article{MR1988395, Author = {M\'etayer, Fran\c{c}ois}, Fjournal = {Theory and Applications of Categories}, Issn = {1201-561X}, Journal = {Theory Appl. Categ.}, Mrclass = {18D05 (05C20)}, Mrnumber = {1988395}, Mrreviewer = {Ross H. Street}, Pages = {No. 7, 148--184}, Title = {Resolutions by polygraphs}, Volume = {11}, Year = {2003}} @proceedings{DBLP:conf/itp/2014, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.uni-trier.de/rec/bib/conf/itp/2014}, Date-Added = {2015-02-19 00:36:23 +0000}, Date-Modified = {2015-02-19 00:36:23 +0000}, Doi = {10.1007/978-3-319-08970-6}, Editor = {Gerwin Klein and Ruben Gamboa}, Isbn = {978-3-319-08969-0}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Timestamp = {Fri, 29 Jan 4442332 15:24:00 +}, Title = {Interactive Theorem Proving - 5th International Conference, {ITP} 2014, Held as Part of the Vienna Summer of Logic, {VSL} 2014, Vienna, Austria, July 14-17, 2014. Proceedings}, Url = {http://dx.doi.org/10.1007/978-3-319-08970-6}, Volume = {8558}, Year = {2014}, Bdsk-Url-1 = {http://dx.doi.org/10.1007/978-3-319-08970-6}} @proceedings{DBLP:conf/cpp/2011, Bibsource = {dblp computer science bibliography, http://dblp.org}, Biburl = {http://dblp.org/rec/bib/conf/cpp/2011}, Date-Added = {2018-01-20 16:31:54 +0000}, Date-Modified = {2018-01-20 16:31:54 +0000}, Doi = {10.1007/978-3-642-25379-9}, Editor = {Jean{-}Pierre Jouannaud and Zhong Shao}, Isbn = {978-3-642-25378-2}, Publisher = {Springer}, Series = {Lecture Notes in Computer Science}, Timestamp = {Thu, 25 May 2017 00:42:11 +0200}, Title = {Certified Programs and Proofs - First International Conference, {CPP} 2011, Kenting, Taiwan, December 7-9, 2011. Proceedings}, Url = {https://doi.org/10.1007/978-3-642-25379-9}, Volume = {7086}, Year = {2011}, Bdsk-Url-1 = {https://doi.org/10.1007/978-3-642-25379-9}} @proceedings{DBLP:conf/csl/2016, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/csl/2016}, Date-Added = {2018-03-04 15:49:04 +0000}, Date-Modified = {2018-03-04 15:49:04 +0000}, Editor = {Jean{-}Marc Talbot and Laurent Regnier}, Isbn = {978-3-95977-022-4}, Publisher = {Schloss Dagstuhl - Leibniz-Zentrum fuer Informatik}, Series = {LIPIcs}, Timestamp = {Wed, 27 Sep 2017 13:54:28 +0200}, Title = {25th {EACSL} Annual Conference on Computer Science Logic, {CSL} 2016, August 29 - September 1, 2016, Marseille, France}, Url = {http://www.dagstuhl.de/dagpub/978-3-95977-022-4}, Volume = {62}, Year = {2016}, Bdsk-Url-1 = {http://www.dagstuhl.de/dagpub/978-3-95977-022-4}} @proceedings{DBLP:conf/cpp/2018, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/cpp/2018}, Date-Added = {2018-03-15 12:41:04 +0000}, Date-Modified = {2018-03-15 12:41:04 +0000}, Editor = {June Andronick and Amy P. Felty}, Isbn = {978-1-4503-5586-5}, Publisher = {{ACM}}, Timestamp = {Sat, 30 Dec 2017 17:33:25 +0100}, Title = {Proceedings of the 7th {ACM} {SIGPLAN} International Conference on Certified Programs and Proofs, {CPP} 2018, Los Angeles, CA, USA, January 8-9, 2018}, Url = {http://dl.acm.org/citation.cfm?id=3176245}, Year = {2018}, Bdsk-Url-1 = {http://dl.acm.org/citation.cfm?id=3176245}} @proceedings{DBLP:conf/rta/2018, Bibsource = {dblp computer science bibliography, https://dblp.org}, Biburl = {https://dblp.org/rec/bib/conf/rta/2018}, Date-Added = {2018-10-16 17:04:30 +0000}, Date-Modified = {2018-10-16 17:04:30 +0000}, Editor = {H{\'{e}}l{\`{e}}ne Kirchner}, Isbn = {978-3-95977-077-4}, Publisher = {Schloss Dagstuhl - Leibniz-Zentrum fuer Informatik}, Series = {LIPIcs}, Timestamp = {Wed, 04 Jul 2018 11:37:53 +0200}, Title = {3rd International Conference on Formal Structures for Computation and Deduction, {FSCD} 2018, July 9-12, 2018, Oxford, {UK}}, Url = {http://www.dagstuhl.de/dagpub/978-3-95977-077-4}, Volume = {108}, Year = {2018}, Bdsk-Url-1 = {http://www.dagstuhl.de/dagpub/978-3-95977-077-4}} @comment{BibDesk Static Groups{ group name Eval keys altenkirch-mcbride-wierstra:ott-now,altenkirch:ctt,licata:cubicaltt }} @comment{BibDesk Smart Groups{ conditions comparison 3 key value version 1 conjunction 0 group name Eval conditions comparison 2 key Keywords value HTT version 1 comparison 2 key Keywords value Ynot version 1 conjunction 1 group name HTT & Ynot conditions comparison 2 key Author value Matthieu Sozeau version 1 comparison 2 key value version 1 conjunction 0 group name Matthieu Sozeau conditions comparison 4 key BibTeX Type value proceedings version 1 conjunction 0 group name Proceedings }} @comment{BibDesk URL Groups{ URL http://www.cs.nott.ac.uk/~txa/publ/alti.bib group name Altenkirch URL http://www.cis.upenn.edu/~bcpierce/papers/bcp.bib group name B. Pierce URL file://localhost/Users/mat/research/demons-biblio/demons.bib group name Démons URL file://localhost/Users/mat/research/demons-biblio/demons2.bib group name Démons2 URL file://localhost/Users/mat/research/demons-biblio/demons3.bib group name Démons3 URL http://pauillac.inria.fr/~fpottier/biblio/english.bib group name F. Pottier URL http://www.cs.nott.ac.uk/~nxg/papers/bibtex group name N. Ghani URL file://localhost/Users/mat/research/publication/bib/pvs.bib group name PVS URL http://www.itu.dk/~birkedal/realizability/realizability.bib group name Realizability URL file://localhost/Users/mat/research/publication/bib/reynolds.bib group name Reynolds }} Coq-Equations-1.3.1-8.20/doc/code.sty000066400000000000000000000012141463127417400170200ustar00rootroot00000000000000\def\kw#1{\texttt{#1}} \def\id#1{\textit{#1}} \def\cst#1{\text{\textsf{#1}}} \def\constr{\cst} \def\bfcst#1{\textbf{\cst{#1}}} \def\ind{\bfcst} \def\module#1{\textsc{\textsf{#1}}} \def\ax#1{\textsl{\textsf{#1}}} \def\tac{\kw} \def\bang{!} \def\kwabbrev#1{\kw{#1}\xspace} \def\class{\kwabbrev{class}} \def\instance{\kwabbrev{instance}} \def\Class{\kwabbrev{Class}} \def\Instance{\kwabbrev{Instance}} \def\record{\kwabbrev{Record}} \def\where{\kwabbrev{where}} %\def\indent{\coqdocindent} \def\eol{\coqdoceol} \def\classid#1{\ind{#1}} \newenvironment{code}{% \medskip}{\medskip} \def\app{\mathbin{+\!\!\!+}} \def\cons{\mathbin{::}} \def\nil{[~\!]} Coq-Equations-1.3.1-8.20/doc/coq.sty000066400000000000000000000022701463127417400166730ustar00rootroot00000000000000\usepackage{lambda} \usepackage{ml} \usepackage{code} \def\coqor{\ensuremath{|~}} \def\coqbar{\ensuremath{|}} \def\coqdoublebar{\ensuremath{\|}} \def\coqcoerce{$:>~$} \def\coqlambda{\textrm{λ}} \def\coqforall{$\forall$} \def\destml{\mlkw{dest}} \newcommand{\eqrec}[6]{\cst{eq\_rec}~#1~#2~#3~#4~#5~#6} \newcommand{\eqrect}[6]{\cst{eq\_rect}~#1~#2~#3~#4~#5~#6} % Coq % \newcommand{\elt}[4]{\constr{elt}~#1~#2~#3~#4} % \newcommand{\elts}[2]{\constr{elt}~#1~#2} % \newcommand{\curryelt}[4]{\constr{elt}~#3~#4} \newcommand{\mysubset}[3]{\{~#1 : #2 `| #3~\}} \def\Definition{\kw{Definition}} \def\Fixpoint{\kw{Fixpoint}} \def\Inductive{\kw{Inductive}} \def\Variable{\kw{Variable}} \newcommand{\Prop}{\kw{Prop}\xspace} \newcommand{\Set}{\kw{Set}\xspace} %\newcommand{\Type}{\kw{Type}\xspace} \def\setproptype{\ensuremath{\{ \Set, \Prop, \Type \}}} \def\setprop{\ensuremath{\{ \Set, \Prop \}}} % Framed goals \def\coqframeindent#1{\noindent\kern#1\hskip2em} \def\coqframenoindent#1{\noindent} \def\frameindent{\def\coqdocindent{\coqframeindent}} \def\noframeindent{\def\coqdocindent{\coqframenoindent}} \usepackage{framed} %\newenvironment{goal}{\begin{framed}\frameindent} % {\end{framed}\noframeindent} Coq-Equations-1.3.1-8.20/doc/equations.pdf000066400000000000000000017514371463127417400200740ustar00rootroot00000000000000%PDF-1.5 % 105 0 obj << /Length 205 /Filter /FlateDecode >> stream xڅj0w?ō2I5 ^Zoʉ!$>}Ԕ@Nq|p ᱐/v@~2iFy/P:8Okf[^g;o8zKIQOq8DzRDky3=7W5JCPi.5taYNcL W is ۦ)pL`K/x L endstream endobj 128 0 obj << /Length 2106 /Filter /FlateDecode >> stream xڍXK6 K6A`;0Ɂm˶0䑨b-ݹXG=It藇?< Umz/pi3i~X.hY ,OGϝq{ԷiP\H3 kYCgN:6*Jj }komWHՔ;z}k_i.U Kttؕq.hrjX[vd:V!(3AEP@}$HGz};(n)^r0޸+CC<:-DcU7)X:UmۅC=v/B5]>d3&a*(`ݚ+Ldt)$gi5.+%4'Cޜ*<˾`\4Sf06%w7Ȍ'10^VXp.>pЗ.A1!CpQkR6diINׂתvy`i;H(0sǁ)-5Se;ӑ)7 t%Ǜ[<=$V<1m@ה=bp8dK /mN( :ozEwaWkLy\k&L$._V,qi7^7F.|$D3kEJYS:/Lcրך67_ ? :yEx&9zz WV jZ҃$ ~o4=T[P9 *!y>?03U45[v $:{d,gp# =`[U>ٽW2P~~30vƓaʅ7=1Vl՞+v U=3(`v%삙`y! {(a4/(qvK$S,8&˹RQp$:*9*_E%7o>&Y=[<4PWM sTKds9q }IggoXMvu+Afu8h[33] =%ñ]$m{BXmnc+<;M|np[y:vݞO'É n+,mL7lТڍ} 'O,M8ܩoD1%Yǵ?m>D|S hv!&>aȣgK3@':9pd,htd g= h $쁉ڭ9լq \Ù"Cby! Tc endstream endobj 171 0 obj << /Length 954 /Filter /FlateDecode >> stream x]6Wp)f7費fiN&bd[3XP l}% dR۹ 8G+` {L$MPX8i$f, bjmvߋ߆ Xp]D &$42?73($f{!EAP,qcAb6{(Im4օuWDgpaJ\6/+(q1e3zAR>9O8$)0"bUv7yDrq)ATFUVY!S!F4@7Z/Z3 =7쁙eq$$# :mR`iUgyg6 3"aɋO_r< 80S~&UUSP5ʱ $Eា3U*)SَBz:Q.\z1 *ޖOa> stream xZmo_ξNR_A+HMT" }fr%*t-"3<B E^ Uc2BY'*IY3"#~ hxv5O6 c.ʎ M k1 & 0Y\p!AxhrJx_ y08w—"D "{!ai #m(0/lP Bc+3"C s;V U ڱz(/ӰBV`2C`NRF% ֆF#a"CXa#JXB(Az2 W7˚I#2 c,0#:& $fn 4 qQ2BU8yR8cRȱfHZb)`v%g 9C2бp`D^0[#+-]`spx8<kF&2ׅ$S)|SJɉ(ESwщx2}ѓUO [Z6/nj.?E[WjOm_݊J-%i-S֋'bR_5m /u3 0-0(m6j*>oJN;D/ck`6fRvRͬir'X6a٪MO~On-jGL$[6T\M/*}9OFDz&?yڷuڦF5Oweo?[5 K;C 7Ӭ[olPxX=͠ednFU8ɹM*3yjL=$%D] ߺwj9YI6׶j)UDNc-o U_gf9'ڗy?mxϚ|z0WRY_Qq0xS]ףVpQ^vŸ^ڙ'MJ_Cbʱ||z(롵ߙ}i3*uIHexLpƎ% 9XD%$$Ɲ|}^TVT =,M$#(,cd`^?e,Ar@g)IZoL`CqWiWoTy@pgɀ} Z;MXvlТvդ_F#8CRs $a: o =:}h s,:Dމu7m!80#7V!Ɏ$MPZ}/E1Mz/[>x}[;XJ@y'c/5Z@ AE:Ua}5q 6yn eY񲻸HA |B2~vj&ͦ[\)=)ѶZEil,[ zmd)ˆRY>iQ~em}soLq|p67p lBoxݔּz\Vf22]Lfi&4Y,ff3K٬f}6Yl>e}.sY\fʤ+Pb#q*?pv8f-Q?-: &pY#q = qĊBlj,t, MJ[pN}4U`Z {1Gc5yb.{d4P'kw5y`)p|m yi} ]L 0ǡC&C8B78c@Tj86߂(@wg$vwjP qGf$t"5S  iz1<`ͺ?M-avy `# 2! a6χEC\WcoZ>hP l&ψ#6zR PrFZGn1GIfX=RGO~4HpPBv/aqv>rq/ߘ#*m:PQXeN-W/r_)/ > endstream endobj 178 0 obj << /Length 2422 /Filter /FlateDecode >> stream xڽZK﯀n`Y{rıRISy+r4zww}%0h4uO8Y'8  HJ*ۻ0ԉ "Uc}k"cd~'hB9RZsN2#MZ<=[G GKꉅ[#&,2B0NoKG^jD,w?vSWM7HiZby{Z%\KyQ^"'Ղ-@}tKQ.QXZd˴@涇b &T qLR:}Ɉ!FFmDMVeUed*DEFuZ7])vvӴ,qgĤ`H#.QT1Օav,NbQ|7`ÁpzVu9#_I,ď%&uܗqi1V>B1 p +s" +;һ2~2ͨM{[!A-$I#jc#SSwNv4FQʓB9#cJ៩* `51ƈj_,! qÍK)C& $t5nx&| "P]GTŭ2TmP쎤;:1'XOVjIk^ |m^ӡ?Z'\Fh$xdD,$GiI+'P6aޟ򉸄`ˉC5"DJ(~,]{|j) 7Q OH :ge1A~AW4#sphBZ 4]{YI $2RY<0RшSx:bphM`&F)e:E#*E)zr;{IA9?ʺк~ED}ZF0'HhZn)Д&>鏵1sĥ1ff^'Oж\)lcޠ@Y'L_Zx)F֨4t }$1eNG4NW"3OfY"! VcypQyl'8z S|`ڈxwfK`_cӿ@ѻPm~NH_ zAX>NVpZo>Z҇ĴFm -M0H9۰]*A`Ӄ{;6s6Ff" i`+zmAHwy\QEJb"]1|A.DwsOj2;/2se֜ˊK@omFWj U6'8 xp7؏8.p#'WF;w+(e#`{tt"OaFT[?l4G *'2uex+jC1!h~,gP"!|ŹPS1h` kfeI 6e:6Sn #9#V숝"9U9Ps@ ٴ+ڶ׺ؔS#z~nxnR;%,m݂bHLs8a)-cЂ; ߞÀI>"isgs!*Sē_te(I*b_sw DD,2E[],;pa :BO$mL[/Em s!s6d˂3N[७Mrd9ʦ-1Lq@sSP"H6>}-ʾ\Y-_p7~*k<-&mq"KSs~)h/WWny?ǛvP4yo¿%Mz߽K|K/j6G&UEU`s-7jt"8a6ҩ<]î~'3<لIowwe\ ƨ_<t띱sf':F}Sa֛ͺ*ZΖ8>γnʨΘtYWִRcj2|{QWkGksY]d㷮:~?1Yn/6e_>6}bv6uY_YVOfArG@mݒz}YTDgyB$=L2#,,VwF endstream endobj 192 0 obj << /Length 3333 /Filter /FlateDecode >> stream xڭZKsW({Yj%@ Cj$JT{%Zb"5$5 |X=F?n(7ǛHm7Y%:?nT.I7u~_Nw: m_n-ԟG&As˦/eشŝo6x a(ݰ-L)u*&DcCɈ4L3HVQ1I c~w{CcCg oZOvE? U~+nz;%u *̬0?@451Jn|[ePA :9Rk2uwʄ1s"Ϸ8K|)7S[ɴr*KD5t<$'0㩬*.~J>[\a4HϮ"4:A;mTМP[r@zd8Q`(*m-/3ZB6lZ1h ^б9\j]X4'{]ߞ}v(QNUkco{AeRc/B„2ygIFzн5+sQBqx(VMFy1́.37I&/ G1pJ n0M,BsafM_Q(LL4LʂdR(ۼ/=g*$vəb0ϩt.TlT4t@s- 2*?5u|HIvVGJUQ("G$\Cpt]A #IS:'VƟ+ds\䅶v Vu[Uފ+#hVҠkT[ا?pU"ƀ5x*tK}CA2f2KM7$x]!6(pP_an ~\.mxZIDΎ%`01bll6wASB? Pf]nGx-UηK etKMZWpYgEŢ(_5Pdg@V)Ga(% hI!hq?zyl/$;1@u,E EX Z)XkEWQ"\лf¡c}Ykpօ%,:gn P#%|^qkBU"q/³EW5d!CV*K@?B9tRh[Ns2"G:0 HxhN'zPVYa)ـ-#A_\30o^leʖqFbe @bb?vƆ&Z㪺,E*[h˟QHG$Wh`"!|ibn [(譠܄ >6-Y#4Dwܔ v5/%g# 4$6*kyM&;t$q?BGІ@}7D!> %0@a 5 8ʿg!";IL!syh- VxUD-P &G &`PNjM%c>lփ& yIPدǬz=v|քpP˓|m''8/y%B Lw!) Q¦61¡sZQ@(6K?> endstream endobj 197 0 obj << /Length 2633 /Filter /FlateDecode >> stream xڭY[~_akFE]RnA\aJGי?BʒFI6y0wt:շwmsHr'QWR)&*ѩ^mv4-zsV tK^ZxQ5OkVRT_ǀ@qm nz!=`ڹxv"#ATMmaeIHE2'ĉx_$ vdhib1n p}q5C!Q$7"J'0wT1CcEGaPKC3KG$=8B2<AqΩ[҉d+(lt}l6(oʇh $;rBG;&WiQQŢ>.\6b0/:5M̈?dVAS6$&ݩk3:=tg~ (X\PA4)r= NX2"9`| fjZҝ 1Yp5!H f Hc_ =mL 3NS7\`ڂ_8gt.ʧ^uSvakt}xWKSSoW1x ܶ+B _|@.GK81m|nv[po^'5e M+4#;޽md U\2aнD:i+R?g 5(4OC83%5'FXܔT>{s=? 8*~]~Hcő^7$%\IoE߷$sZ߀!zv>RK5` T^`9#~ɉogN 0+Х$~(LHx:'$g<ǟRvz%R$z4"#`6_ OXq_θH'"a6.uCU|?jp#ah+z&Z{ts. Ѧ8ݻ8/yjfGɂP=5€NrWky^Jq,{|=,_oMg~8!ܘ HՁ|R燅ˋqY,|V-W/y`Hyi!@ElT/Ae6֍_uʮƯT&$G(64UgKw!kR|adD1CejHi^o6]x%c]wϫ3w~o)q(HL?d endstream endobj 202 0 obj << /Length 2739 /Filter /FlateDecode >> stream xYoܸ_!\_@!)QH^Kzu$V+%ml;á>(˾\6)rH7YTAƲD&>qʴA4 nvp|k5'LK X>/x Qz[%Y6vcvQ >TD߸}ɮnd͗<}cS!+Vg,V)w{XaV^l*R3.?^QW|,0)9iE^ȋe,H o/x"c+UʔHFQl(.%#tlYkPS|w݁kfV[ /ۍ^`oK^x5X x,wFDei95]ZT¡Aؿ4h?; R|m5 GQ[krL .$"<g&db:v,@wˠ8]}̃p[YS1Щ_c'\.i.<>)JZ?Oc+)M-.%2C"pT3m|⊛W7n^Vu7 +HQܻYh-S_ҤʷJd@]seGæaiu6* CX_TW"Yv1ruΠ֥l&8\oGǝ"MA3NQ+ltؚ4` @I,dK΅+cޯb&X8xkI~Й!RMK0Mʈ}MDV'jرa 2;b Ɋ*th68.6[.ʳ5R•C/WH,YQ:nЅ ?%JSpej`D$'a+RHik4g>Ro7lQ.5v@Rه9ZB d0 i^#R&wu5%Z~3 aM))Ӝ쇴1b}4wkm;Y7i/֌'r2OV&|zGAOf.(S9O&Ma>qAk9pV1 <|v3` A$;ה;_ڲA#xZɎf-emv\hvg oq⥓( Uץsldt#jLX<^ +$5O=^d,yӜ>(ZȤb2q/Hw+=b҅ UA@#жd+θ1|O)cIi'|s*Oa|=SjEb@{|0fC"l$=§@v-jEa8{T$<>yYQڠ BMcoNE^Un{oJoܺݰŚyLfΰ*fi*HǃarZcɚGK_+XҷrndgVWxˊ22_F0%GTqi1 t @2[$eB{4{*@l<4yZBpʴ~⺼_0,`*wvr/hWߞSa}H 8ҏg h*A t"ó"LOo%*ex~E> stream xYK۸WHUy` ЗT;Nl#B3)1FIQθlM<1jW_Wo>Xtu]qeX*U VwF,*c뛄'Q[%#{:qȣvk$r6ka=ܷ&Q]YlT<ʋkPmiͦ*o+=]hA8 F]q8[=Nw{<3UDye*4}?ݦ]KxJh^ rw ] 6`6Pd <(7 L76Лֻ$w5ӽe>/q%וLHg@#`lxl & 9ܐ1#$UxGMoC*0q~ݟFFwAR\Ԙ~#O5܏tGl 򭺚fΞz"a2S% zk{w KON#̦aiNl7AnouLq<3E1Psd"I_{sІӓޕ^Uʅ ѯ Gk\ڠu IRrddu.[3ɈzB݀3rg2pD;W:4I;w'YF@RhK>_{6pg Bg\2.9Aᓇ c/I"lK%B]wi;)`A DmHGYmmÈ-⑶a%i;|&m5W-9. ڞ4$߆c#&@a[\@{į)U47\xs! iͥq45 7(i!lqcz &!,`&h(@1YZbVwY;TgqI` |`|{p7R*3@|W (| bXA } U[rjW$A<25Qt(kbFgI,\щ5L* Y7w{EqK&X@ekFMBBώJ+31Z*%$\hk9zӀ34>%Hyq/w3$ӫeZJՄ%Yڑ _@VSM?i'Z s189՘-A Uё@R\ D%M3KpX"fē&,1h}}NzFma_R #w}9BT\qB y4k5P[CX8CX`aՋY)(>)|Pܔ)~.f:dž%7B1d}3u Ǘj(㠈2!=c3%T@jG}qhy_:oH־+rM[NnWM_v),mcfwIly)cÜfh)о@${{Wlށk<,O2 3 tǟ:!Jb}[z,y&*Vo m5AuvU{lGU86AE}Cku'bP@4V"H[Ὧ90ho)g$ NqM!b77lާX2 p3Ĵ`ғc':eܗ@CBxH@k=C N a`r2>_%u~Ջ"'jIZ a]m|Bg{[ԎMoGnk2@ }!P&(PL6 _GH`4s[eLWge??o(vfWШ!WL6iG0qϢf8_tI$Lžp1 IhRW.T7i '~VKL\|i&Blw4 {7;Wh"2!B_nwkșͼ2A$,#MCrJ!KQձ9UWua@4peo%9=`&zտM endstream endobj 216 0 obj << /Length 2470 /Filter /FlateDecode >> stream xڵY[sۺ~Pߨi @iOgNӸ'}贍gd:Il$R!8Xj> ۷ ~WkWnv+SJa2֫b1XZjcݮec_Ewد/e'B]Ѣ`ˆH|?[Z6*5o6?Ge(G8[d#t}⨰M͟ވ1-+ֺ5u9G2ezp>*o/eka&a+unb7\2\|_q68p/žNsv/^ Oހh2meAI$H7~O?unj[anfK2*,@3_q&U^M`ceRM<GRzt9ajFEi'A&*VJmSY]Wuvۖʖ`{\űC`':{C~+UqM@1*Q,#KG:&N8OOB!&Tk70aZ)iO +4;+ъ*YmOW?U Dw~i0<>\RN)qp9Ü``XA]InySٷךeONRr&ҀEw ҇LzINrge=B2ѯ݈  HQ2fin #xowۅ͉gL%ëOC+gR;小W(B˥*A' "&?ap4Fa KYԙ @F&%)O l]MEI86F104?`3 NLf;6q/~b`So?է/MAzqj qL4Uۡ>k-"= D8Mof 'Q sqO mdxfI$cQMUn4zHL's ;s8DC%Ti@E {Vu978q%[q {Hn. /:|p8 ZoEt0`URH4aZ36\6V*l eεOx6K ^0VSk!_= ŸaWm@7[q7 ҺGƫ }rg{Hč%%byE2w|Rx(3XnclMm0JaQݚRe?uLŏG%rKL&> 1iY{^OG>Q13R8K~18{vX2. s\o˂}X0xDu!ayOҸbNUc'K!Y2+ ]- & ;r١g!҉]6bz=YtOKO('$4ũj;H#1!} -);Zr gK&4 Q!z"uY-E>ya{?+ ɴx3s'AΥvX9I&Х,C( Rzۯ(h[04D㑆ߩQK+ҟIqIbq|YŌ==1(m6`.c ȣN!' .xtjC;ՋXO B>G fd>ldDt<\ػdP`f8˄&ѥ9TK>3f lG/Uaʧt1QKޠqtVxPE7j:2Q=c"oY4GrzY,bB n z$nqǦ[[Rj\)]膽!T{mwd+4_ qis6vq1v6DfF}AqcɨAA$] L|R $eR_RHiIƋ"=g W Ѿ*xq C&f_A$KLfF< +֠`64[> stream xYKs8W6TCHp0ٚL[2ZlHGϯ߯ E*Nj`F?.ɒo|懷K*yK֢Tn Mmf_~x[ڊR6uw)UNqZI=mɄK2!_ f9% S&a2|FD'7@݂KrQDftN\HiyƤgfYsANfZ_mzZkӺI߫}]u<쏯j8E05ǶLW 'uz:+ƍ*4Wd۬k^l]`-ZYas09=}Zi7|G&dM^ 7~|a7=&~zwF2~gJ(l7͒-~[tcyy>?B X.}b ^)0֪,DA0'o[Xw&݄vV.2t~#9= k՞G*O+||;?0nLuA$b7nZ* Z|3U.[F'iбrYCUidfj-pKݱ=۾c``R$Ua ڡ f4gFkIHU_V緪XB#?;S9Ѹ:& Νv@Q(𔫵B&==g۠#ك^BD;˲DiXYBfYvUth6dׇ0N Vn#cbb# lx*R^N:WP=W< gO \9*)L>pq߳u mY_@UTׅt Q#9눘$)-좝/):;W[id(Nw׌lZL;6h4O :PvR‡f!:Iс TzaH6zӻ/ ae\(3W+J 69:ԝ/Eb,}DxbRj!fϳ5*|[T{~g BB| 5ӸɇG{6ɲY.vtR M-\-Ǯh 'v-<{N; Lf^Qo,CHt\ bf[C?n@ ҁh.l?%8$$Ux&qy{, !d:M7iW=Pksv!m L#mj<;dAO+ vz‰ҍ bPݏY_86戺i`ȸ2 tcg,t7iѳ-[Jfp T^"TWKa|\l)пm2 |yFRX!of䈒+0]3w(ٽYU2…=fI J'XGEb6FųgtaA1F0BZF(4 UO`/D"P<'[ȴ |7Q(~wwq][msԯm 1! lY*^s⫯~"v\sYC5B!:1'ty_KcB_ ?o;E u^ycn=]qvK̃DYʴn4r eD3}yk(:%ğfwe_[ȋZ.*y4Yy@Hٖ# _Llm&}7A΂yl ]Ow6&Bw)]TR9!m( lG- @ !-OXäʰYA0ӝZ,GˊʼnRv- 5&?'s9 endstream endobj 237 0 obj << /Length 2141 /Filter /FlateDecode >> stream xڭYK۸W(!Tj$SI*zUNr2E IO7D#d2[xt7.p?qu-`Hf㘥 JdʢX.nd< ;luqYYDA\$UY3Có5]h?v3wg#Z@|\J/ 3wf"X IAn80cyM-VM9șkӂI2.Qg8#ey?9d($s&ř(N|w'4#2[#C[_k>"23 4oӴ -'Fe][v<h&47zu04%B8Eb{&LHLd YEF03޾&DH8mIw uWzں3Iqǡgtt)4Dw R|}N!KQᵶިn(qf.fKf-XF}3 |tB[ҷeZS%aU B_aj!gXTpX9Og"`Ccx灊oE>e ?4c JYM9 b ]X,FB{'tWR_aTxN~Z\Wwt7j%I> ? FF?ҤG%]&l80DyKY-`q CQ%@vB$PЌ!^ɵ"&U6WA_ڴ.Rp)=i ]J<@ K7;Tow(zWWc@P}1Pfy=uFv5Nڪ=W YR^K0[Hsg3? h\ Ye"<@)/ƴ^x ú+^t C r932(,;GHYwz!(,>qPuA_16i4E#~T $18SV1еWjYT\ 0c <0ǞF<<i_=QB]Y&pE߽:MԅE01;~Z5T\R`70r>hz)n^dt endstream endobj 242 0 obj << /Length 584 /Filter /FlateDecode >> stream xڵTMo0WX#vb戠PVz'k5$-q^JrGo><2 8m2Gh%4IM*%@n̟I=:Y_ ()9«W)LoL7}H&rS4)/SK3iXW*_4 1vAv!k l)1ch4j3=qoʅxҠ 3f>D˿?gzO(nF]եYA6t1YumjQKI$_bTw|s?!%PbL:L_ .Q& 􊀙ɸqln躤ڎՏ{9 C>ËC|A0R?gNaLbM qXY#4)B>;ZQNdHߡ[}\,ejT(S6~X1MSaqkqxv&0, `6bf֬tBYXЫ߂]h endstream endobj 270 0 obj << /Length 1813 /Filter /FlateDecode >> stream xYK6W(5W|I=ASH!- Ʀjp$*p(yuGv5|p.vtË4~zq* ,zq]p)Y6i-6k[VXq+-B*| SU\E+G풚]\w3}64[1sz:X,X,93WxHu-}u^ L C!3Iܢbo/0[2)5x{omb9+td"7Z' ')/q6Êtb~RJ]a;ox7nɓn65ڈr` jvHd}Ӯ/ܸϲhe,R_]o~xq\'=mtx44`$,A (Ó}5PSصm[bD &U>-] r=Nڞ# ~T Ͱ\r͍=Y`3[6;*yl߇uа͆ok֡W$[P\2 6xԴlR*kwxuoXX* s:Ci]ᬥk(g@!d>ZiaQrI$ qf1E by&|X b# >XxLn^# >wIO=q3Q1UD@"D0:4cbGyrҩwzu*?, T$A&lNy"=2Rc $F eΑaꐻ]>̴qtFwN4 p=|n]08. qŠ4t-h,?C+(hpY41e,5sӌH5DP9w1qG('0,"3u7ng8 a0 0ssg%s ߈RN#-hǙ gੜ\IyO3=+~s]&1XF 0Y'C2 별LżheSȖIy f"{DMsxMNqvcS)H;(,qcT 4ܗ&7ƍAlhv<6B:Gn Ť_v> DG聓$XLi?+NwBf*Gq \bbXtܗMBqkh}%D5樆_0k:;4Kuu4nwE@wlxMYBa$ &;J (< nx0LRm5x*ߕ.g\OHz`Č>V4~bGnNYhƸ,t@ںBpIO9K//O P> stream xZݏ6߿ח !)z\=$yP$F\If{f8lim.$E#gFl9g?ۛ =Y`v{7R0fzv{rYf{}ʇUɧĉ'5f_*X耊z*ݶJcӶlkZvhk6 b$։fCh:16+kJ%wMI <}8D ɜ{Ie*Θc( s Cg(-Ύ&褉ɾ8p= 5SajuY'%Y4x5>*̝YI]S;7ȫz\Q{ߒߛ[R_Mn֮ X47>f>#RC3YaezY¾!^NXTwk%2lJ}# dkr4fhi9cvԱ~IA̢XUHSoKݦhFn*:@(MvF-`l nܡJ3zh$6+gt1w=Rw`7MAԁ';!mgKv MkiU$\: Ólkp-*pu <7 * Ң(5ɲ@J!,|A@:'74dMB:NmS5FkVǣZzvK#\l\ӳ.a'`G+o$6"tiO=,̘06#n^"0~58zYB@X^v-dBM٢ʗU`|.)cP+A@X)FF1ח,|$B&z3|q.ȧaEDp>ϲ(, RB#Vxz0!mÝ?)8sLLk M#]Q qlI& -'p"1:NE[Sj'j&'pc+:HօTA{-`aMBMzz ؔo} eNkTMS 6|*S!G%a;H1qR `cz sWZ(luG{54m+pP!c8V,"7%߶n9-<݆m-ʬlc!8Beuws t č)7so}~ٌ1aO('j\N:):(t6I.o@{n]+"y^8-)fI _e$+{{!8^e5Sh9C!{(yY- [JԔ ex9U λ`˶}]BKM@N~ۇM~n!e#}<)AL|xTCgb&- ,Rr.1IL-uaN@/GlGZIs a>I,:KLb><~]X%j%~;E)0;dyCKxT_I?Á ڏ/R,RNg ]b;׎b_EXSRrlݛ77 endstream endobj 175 0 obj << /Type /ObjStm /N 100 /First 889 /Length 2537 /Filter /FlateDecode >> stream x[[o[~ׯ З 3M$-ںA8#ԖIn}˱$dCr8o3qR\2x#C~xSE"B).r,C9`QkzDEA=IEL2X@"h0QH@3Jb(=dR\f9Qx !g\C\φD]'|'z0,{FS!` sBNh:*1&Q"MFԅx'X H"ˬ2D';!5IL`icDuXQkXXfX6>&mZ-0ak=tYЈl"h@kDk7 je+㲨=YA2;hF~ Cl07 U׼RXV5?uG^_}}z<3ռn-ii;Pqx$'wPϯ^יZR_ z͏Xi0MA8^v0N._ƿ $1,ӟ`.RX/uIH k%{kˤmp:ߠLYqZpX hfjL͆ċ0)~kыU$r:bG pds(8ع 9͙:F[&muݶѲB||mל`Mzl5c[4}tSyYDRn@K%],A߱]-qglU !k+DKR㶹A >q<ɤMꖍS>uTE3RnCu 5[i1=N6lɺԜFcP /w,{ͻۏi8Oa<4CM-aR/H9^K1="X ;/&y?ߏ,d/D#k;carl!PIwe-8d6RWU'[#.{Nv[V.x#@\R[W[_PXT۹wâSv0!H ?#2󳳲@s~9Gͻow_fӗMs5}/7M6$n2e a/V~ הy5xRp ]g%ۨu$wJ#6W\\@ȓ+e;DNllۤ,ئ哲a[mNqlLU A}sL' $ǰ(,7/b7tRm$ױwv:pkgy=Y6Yz:s_y*UFr/Gm'x98 |Jp<L\*'dQ}D-Ll3R@gyl׼b#"L|"qAN!5Vt//\WG QVJb % 8!`%YѴ ,v8 ]Y goy#Y..G9*$'l Kn*`lVx:FJUe$䄊.mʱJ>c*I~/8a; $ (c簒@٤w_; ;^F˥_Vw[w< f!>TyzMwmx*z[Sr+\K+\9=_DkkKںچڦVzCt5&8ɝ"Rx8Bq.7GBF@Z #=[qQ4L4T)<%4Ě=9=: endstream endobj 393 0 obj << /Length 2967 /Filter /FlateDecode >> stream x]s۸ݿBtLtInڙkԝ>!ԑX"iI&e;M<_brg3wӅk//^,fqՌ 7 d A@Xp2 Rya[UZxǦƻ*="v9m0B8ej |avX-+3P*J?]>JAo._֩ ,dEfLөZ/ʮ$`$EL{N2r%]&"Cݢۙ MP;z ތC滢o/t-=C&;`X6=_ fhO3ETqpVow Ivt'ꝱ'.=P,Zra-x3Vƒ#{ OY趜r 'mbԲEBwQTYv_~9snN dlŇ=,ڧ+݋#mɂ.A~qoƾŽRo`dm.S 8t6h* ^|iwuX8X)6i\B0!fI +<ͯ*De'3$pjW]*"n 6^(@ghTޛȫ_C8/r>':AoqkB,)(* !; Dc,IY2tu mψF(Xs`&|1[n.>hSe2ff& plQς`)< â0Kt}H̍G0w&N-HǕGiAGi[/$˃M>}9t7y3W/'K=Fn:=weϩ/K+m)NP (bbùa?0%},պRn1u/ˈG_(&LcɛC>xs,CD!#?抠BM|zfC<4hl7SY!CslЕgP|/^i>30!}߻&W6'&H߁x>iu~v=DHV̄>dyݓ:"]z"^w糛#L)d8 /79IF Ϥw(a?*IQ( PDrߍ`CX+Vfџt;e!v&/0L¦Xƽ@ UUBR'"(,Һm旅_*O0ؙ{0M: f* E+[8Q׍: /ZtPX7c)wdd`N5P м^/(fL7.2ᯏZf1@v Up>S͟ LѤȭ,wm63NּئM&K@ޮKFQU@ɜz(ToVk5퐺(tq(k~s>u _ZD.YaH7Nz*$GX35wjDi^1IWe.v/<sN0MP]f~g()Tk!bE -g+ip~Amh:p]NmcʑWYk,ՍVmQIb3y13 [Uð*g9DN5%"+OCQAzQ%i2 7Lڡ( ZǢfZ15UǼ ޛ- 킶യg<("}:t endstream endobj 330 0 obj << /Type /ObjStm /N 100 /First 918 /Length 2643 /Filter /FlateDecode >> stream x[m_A(I`í0l'h|VlWr >V%y]|&>rcQ\,P'Vm .UrE+(dEhwgdLJRl])JXI\ )XAH (TWы@.X ʬRgZF{ @aDfO{J$bc- c1Q qhlM%ZsdT2٤ԁ&1DH!.`,[9zMz]0Xs)CIDbҽuj=鞬noC?0Ir~v?>@*^)`+۸Woנ}Xg.֋ղ{}}n?~ʹV?v1?vwI~;0q/kt\w_?~<#tx6orcϫǰ:k4՛ ׊Qu\랺z:~f+4e\߮rk7*PTo lUGlZUh2Q ק~Ko{O* 섒T+'J O+ns\V;wŔ}#^a&_+ W,%YHT`(X>!|1錋'aX@r|De&1GibzJt=knua'=[NN>z:p fdK7k40aF/92Wx-g-wF YFSu NY$k;T?6' BȞr9(uwO?Z?g 췫RM5#S_@,?bP u_"-wKiOn lb>A y*ThxZ{Mx<*G;a5~n?HYJaK}*4н]]C_]3׽\74/fz\߭ov68=Y}ti0י[< țqUìVr7,ۖ[+M֖M+FO=iѓFO=iK^jRF/5zK^jFO=mFO=mF/7zˍ^nrF/7z+^ip(oFIכEV)`f޽TDa|{ED=@K!bc~_B`Y .u̒W %pڿ;89H(ɂL{PRܙ ׊c[)TײÅNk2e*ג4^B8{ivq#r>CJSގ6[%0u*CGyJ2-Kbibmk9+}{P \W/9.+ ]hC<Ʊ 3g 2R7:v wμWzk=x쫖-<;PD7rbt!o_\ <p>{]3sW[|lMY7"w-MfΆqv>g+W{ɛk0#<;(b:޸!&l.H5F6_ꀎ_i٢ sp K7ǯ;PaZPȲ[+ +-ri8`4b(␈`kՍ%c-Wb=Of) ⢢4 v endstream endobj 518 0 obj << /Length 2976 /Filter /FlateDecode >> stream xْ]_<Z9\ydWJMAr$vP8^Hǂ\" `{On'q5Ypr}3 `Q'jrd:]O( 6~ een!#{+Z~4X+(V)&O)u|ItceUӔuTIkd9XYQݎBgd `I@:- ?1,^ҭ';_?ynvÏJKR (\"74pwnJf*h޴Q[Ѥ$OJw򡱨*Kle@FNR,#e/5+K˔In+2n)R, +]&KLj&G77U>fZ }5?O{ač&{ CǦ3Cz6uMvؚcgr7D4qj.Pd_ M71Y@SͼK/Jx˴Z*NnbAG/̅5JRZ Xʫ m+9karj&nv[i>@r2r6+B:L$-%fY E;݁q YEs{w6\Op6_] Ǻm748L`6e;;'~r4KM@)h7sXa7ڔQ40蒤8:C- AdNl1 L)gMhДM*rUXwni=\PD)8geԪL{Uq@ ./- $8{ B{y䫛Lt mUcdw mHB ٢+LK:ɒ b;>:lgjcgB S7Gt,)K3-obb΂C2w3_fOBMJ=,xbw4Xѹ<bXl(GҟǑAG_chC'!VN#r4Eg)BG+%tEO/:3Mf\*aoͨ  ٽ"Al'vv9(&'k"7tc %ph!ٔ4:y. !Z OWqwRD#L{T 07/޼oLVl hˢF~\VRo+0VE~LiIc3TFrc΄Ä[7LFE 67(5ӂ6}Cs+Oպ)Ӣ|I&)eV"JpD.?MJ)2j'0\i&,Pشk]zMгE$¬ۡ* ױwsׁXп]((PDC7}1KWkS&y}T0bcI \$5Eps(2ЋQ;>^p~r[xPUa:Zڒ*`w@ T }:¬:BoSY5. "klk?̝Zo뎸إmݜtBNݴ-9!guXRx+d$0@L͖tAŧ,]"w{2p+DCYh>*y`.C:s&H{򎾻>$<eg{~wki© 25\8DthE[l=lNlcL7EmE;rY;H8C"U%TVNʞ,Ruj9c$>.y3-|u: u:x:36Q B[*`wf\ &G?c o~uFU磮SCUU=xkA:Ax,k\y˅J6tǍMGa:Rk, r|rsG_* (_׽qJjQ6u&4^&vISVi>j'Kw7E6;al/ts56qSp:E*{횁ydz`:G\NZ9WcDKl)tGF w#~.a %N}"Gpם( cm4V?\@߹+]͔lEm7=2=_C&lv?Y l xv! קlvx߽Gٻtx8;]=ߔR.gsB2_S1EB_GM#3GclGeg Ixk>LF2B>N Iʫ|ؙIb_yB4^1@I 4*<9dkV> stream x[m_A. Fs 7 ӢYE'n}٥vռU]|g3$E3\AM.(!j6^b9c1 VPP2>2 ZXߣ dBz6kؤt2)z3zO.8%Н3y>alԼ`tf&9`y^}-b+,SX!H0'!s -8 d`+@l3$idm֘F8#lOBŠND>4> Y>=0%e fo?6gpPg!Pq5z N`H]E=H>)w޼0ϟ>3/'忯ar6U_,ovWK*v_}|Z<پ7/TKt]]vP/ڥMp [[זueeee^WZZr-+WlՈBXL2~'~|7Yo߼vdztT=HQ^z!m[#c-!0wjʉ[m]nKdu#r@'vH#809%YoODjb)-,zJ#}#ȶ5VPܗVOuwy@bGP}'!C]H'vHr_;*;cJVO&vʾ\zg!_þC!à[ﭜ=/'`~ec^bhc^xy1f l]Hc]7OYmļ ɃZ;!ލdL O^aac0ts'HwF\7a=l@+K$!KȷXQD~^A/ D+_z$'nO2=|):Y bǰB̋& 4"&Q/"4Izt )~xJ>ޔ;t{gzs=HZO !*cx]660;z"LA1[ο$rpSw ј x)/;p!Xp6{f $Ie 0ӯ^14yvFP^gq8-"oǝnuFfLVwp>:>a-H]y=ߕsTj;Rً>ԵU?~ڋգyW{XF3OT{:切n6OvChz;g'vVdo]A:xI [NS`-KG܊c1ܦL`@#M'?FS  4|^&Mư:"kYӟ!0NT|ry{nVnU~1ܦ>AF8h ȣ&vwXzE&>9_/VBh]xƈyѤ0E6 يcj҄ǂ1ܞ'߿HEz 2mN}=</YR endstream endobj 647 0 obj << /Length 3866 /Filter /FlateDecode >> stream x˒6>_5iF|IlMMCv3]$-w+c==hv[$ gpW{~}"3Irƥd1E:fBbc < okYȊ+HԐ"x@ɽ~ =KX gׂ3 յ0Gh`xPmm~S=y"4*$j:OTTs%bvAUdvU44}: gps0QJ`~1ǡ5ɔY]ZppK]RW.v%:n*fOoxPP2tg,ȖQC]1ۑG pʻ9رr,iRqm]]#MUFݯ2g5eL6TP{BjTR6Q]>Ǻw]d|K}6$4C#೦QHm ݮ߁Y")啥Ӷ,AJ t?yZOmw Qȫ=/ Rۮ90*:A_لLY/ȣI4W Q~:iF&NCBM_ KvHn`s'J́*jVd8&XȦBQ. $b\ۛW_Q?H8̴ Y|ǟ^b:1G[u=Li5{_ {I.Y"L˄űDeS(i҈ɖu5rDEH)#418c ŧcK~-eL"|v“Q'oVVfUM%wuR㻊乱̵sNvX\HAͷeym9@bxt;@I%X pĒ<{R12utsENskhθ9`N6Nuxy'X^^I1tupr#.7N?x06w)W=J+JQ{܅`ѦEV 2!l"$q'D-m~XvPv փ~?~د Rvy(xH#]$`> .c36"bf4`PzxNXNvzc k@ġ'qGw~Nʉx1)A.MA^zqj*/ps!6FslvGM1q)th>\ BGҌv91] w[Օ%ղmBD :i|)ACM}vM)59mbO^DEֺ!NHs=Qă ]YSM !-+ĕf)kL#ellz2 AϭrxuOGapC3rB4{,3j@:d(qB:R>RRlm9㬙P.Kv1w} ^'@(6uoI 4NR՛(V;C Ï-^ h.\~Q(O4?ҘD[o/ƁTɴ{9$O;/ڶd4[ 1Œu>3̸dQS1Kd}{9Qێ,jj1*-IXgI] IgTPIQR>.N0Έ]qNJ0F3*YaN*b1E( ="Z.<#vi>{bѹ(tTHl3ߕyF8Q g_<8?8O4"}?0H3E>yza qWnA,螖7-ЬsReH+!{Y=,]e:'or$3UG=! CqZ͚/`uLc3LWf)kӂ'h1*ՓƓu3L *TakC<[].=~wXZ LQoB&TƇ{wuU4pme{0D[t߯{"yr=fO_X٧~ l2wE}tB鉛ѷKI Oi vS>åXkrakk\5۾V>#@x}iy lVcw]0yE1B3K^?H W *F+Wa~;_JG%J%;(읺molՓ-, }.G^%& endstream endobj 575 0 obj << /Type /ObjStm /N 100 /First 924 /Length 2651 /Filter /FlateDecode >> stream x[m>R>,"].h"A(NI2(2;H{Tg/wz^. A{O?U*Wёq&`|dlh&; 8("&z1n![q9A H+@MqPJ4>SAN8r\^x\ @&r΍"$xh8P yq 2 PzEpq!䑁̜G̢WFAb$}BCL2 >@Ei`ai88 ǪpVegX UT~:Wp.6ɵM"}DI$;SQfS,Kg22c!d K+l5J4+ I@ϒM#bi!bJň&Fr'QB;Ĥ? qx@;΋֒+pw8 Np[<~=#ؓg LvO0zXp{!%g6ͩix0#slqm/xsW2*?ZnuOU^Lfoe] Z9|ZzF OL[۔C@Ik_)}"[MV;=$FlC[3T|ȣ8:QLVg2 }1JM%&so&D |Z ,YJAFOwRr"&Ҹ,ӷNs1x(ͨ]qh Rmn#3P٧Ք/6 J]<Q=e/At7yOɐ?K:#RkrƇ#Q' ?f]'Gړ=9g Hw=scq,9i_DYPk,W>AI-!w#8`'{!9 .)ֈ}]mY4 x .̈>16FM!LM |?9Jh kN̚ *q@jj9{@ZY7垖t`ޅX䀛ҼTJ+x 1w,hT/cB n v E/y]?0fA?o~5m]fqw ̉xnUYF~CKhJo6GՅe~32^#뮅P6fJvK9 y 0;Y߉O endstream endobj 722 0 obj << /Length 3418 /Filter /FlateDecode >> stream xvuPv9cLOΜLӦ4n-ԐԌ==ދ >Eɢ.l xXW7B-b"\/,"TR-nW^N2 ϩL2|3Qyn*ݻ뤾/ ,Vqa9έc=lHҼdQ&tYE e%nXjP^|e V]fg_x@/3c, N]z v㳄GY8Д.;RzK26>v?{p Z*(&i?O`3-SRpJravF^V LOeZטSlk\YwdɮB:}QRg!̽*@v?z4 4|]eP&-wXx}%tlYjlE"bʴ%Ul'bA"ݸ#!"KkBF)iY^džQ:d%-bk2V($nKiXާ>06QDFq87X h-1GOԀҏ$P *ܿal$t͞hJi]9^gmEh+&K*J󐔫4FM` *7Q~a F Hs (ŏג[O6Qf@3ZrK_/fK%2+ҩ}mZ"1*)G&#^w aKɋ"MoN+h]2FkOs X6KQCdCteVRbv\ #AlGu]7;[.;><唶G֋b:mJJ>*+YRR4ۚX~V l`)89yV XIot{皭„]nuX`X䡙-B矓Yv 8%rWgrhp Zxl o֖D.ZҚ*efשy`--JAeKS}"6rr_WEUֽ&;Z֪q,q`Iif~Ӱ\r|UH/d(a_,7W+T Fql?)1ؔz\ KT?j,u^AƛM@LB\ǧ"֍ 0DH%& Y)qq<εQ߄~/MQn6dSZ)!gZيwQޞuYMba:u@pVg CQ)DB ؖCzu+>/}`/b'f͂@ t.u>_ 8ﭢaˠ'E]6FV?h }Q\^,lwurPG,zt x0N# .oa^+pKc3be5+F{ }i|Y<$v6cjƈc7I`Hc&y1<-HR0Fe" !+6\U4h%:atmFaN )ٿFA()הܷfֵS뱳G(SxYPOLW9l(6vHۥMҕ6ܿ@3ձ)d b/s߅ӒqU؊% ތ58$EAK&_~=s+:(xouD0pg(p]~ ]Tt{%|>wvPd!%QOR4rD R*ш%:_M;,%iLfVnvѬҬ7ε.s)p,`|OnJ3aÃˡ[rELz]FjwgUPֻ&8{5=_Wg%'~Ws!Qf9ǯ3Eg\]fSLxC?! X ,X0T>pԛ !bo#&l_dBȀJإg=8>GIPB܈*0,X&Ra 28FP'i} ,H]`A2lB߈`\_h`lYMFt굋w>7TԻ6i&$n%ltĥ@{a^p@# 1$v@oE1"l]B Lw"d",`:)Wx3w yZ]<>/NmKC87WbW}c=zg`>}ԧD>q  =ևfS [|6N%] U|6{ #8z~!SϜ{{E< ] ToQ$'( XZw5ͩ0q+}hX2NVJKT|HÇ3; endstream endobj 810 0 obj << /Length 3185 /Filter /FlateDecode >> stream xَ}B~ z x'H3"e;1jhaF}wUjwp߽rW Y oq{R2 zqY|pQI6Q/WRz}^`A;E+Ul)|zR&bdp,qAQ#3\)͝m>*d]? ο✅ڞ1`e3Ěp"u16 7ɢ:Q) 71HO4]q"`NIM05a$ telT%y`I{lR^e.,%-}?<DR i s3L>-n l;JiZp.SP*J|j*+ "_yL615”SVEu>&5HKD= #ٕ,IU|?.!GhxÝc%׹vIef퇸m&\'_ ->.fB)#}bfY5⪿|"HgѠaIa Oo37.K\[2<Ƴn@ 5gJfy*`05\*!xLdRL(>ԚS w0ˠS2/P#t<5127tmyJ5?~xnxF̽:{Foϳ5B :;),]?h=%Ž#`U@(y99{NV݋g7~-Μq6I-*'FSCIwbs YZvS[d5de(YW{-|4Pͭ1F2BfRCh,K!ƺz"% gLƲ[]^> nKt~ ,gU`y!ʙիRիB9`Ə28MD _&ZӑYNqNLh@ x&:!Z1ArE^ Ub?O0w`$EKEܰIkC훤VLV0xe Sn.GxA2EUr A&1I $3MMj`2fQ^%^XןZ0WShreUkyWlnygW ԂoլKX6KdVlBaPVz('gFXs6yjq0qQ \Ȩ2n@uZo0h1@gŸ WMȃGW7dC=4ep둤Y}%сR`:Wlx>1xyi1㨜~7Iλ6R"! Ao^+u >{M{a'$;Ryi3Wk=e؀ y>!IڸQ7V^^R"F5st꧱:*W 喒K:iTIpn,iN-1ov9~IJ$\YFGن5Yж~FS]B:\"DuRö;=>.A86=,JiEϷZW9͟u͓:U0.HEY\|Hm,_ܩMh%,k7#KRnC63i^J׾%Zzh-Jhl3<HCaf7 RЌlЍ0a2hŁK=No[<޾/)]e endstream endobj 681 0 obj << /Type /ObjStm /N 100 /First 901 /Length 2656 /Filter /FlateDecode >> stream x[Qo7~ׯ pmq!9+4H/@MZ].$¹#E_߬F$ZVj@ r) !9),.Qfq(R(GIs%'GAPڂRЖ▀JCK!G%R#IO bD%D1*"WT**9JQ5X؄FXLJV'r\I+qKZI.1>QZ$Ȩ4qm.(2We}KBto.[ѷ$d@uXJ~P`p2**[:THŮDh ;)] H Pq54 zOu5Ua@$#[%wWBČJ1)-JҖ56|~ #fh &**@NM[9:0ZUIm-2SolE5k **hjocAJ104K&)t ߈AՠawZ7b+sURkj jXͺΝQudz|9_<_ŋiL ϺvR0~,SL!tT׹ܚo(cݹǮ~d{ɛr:ΖAS߸o͉@>4X 7 %> !⛎zaϡ^lzG^,^'ΆWj`ݜ=`ry)U]s]>x_"`ܹ%O@9&ಇu&XpDaț΃!`<y-`XaaF_uM<9Eਆ!;BngÐם?|>[3>Ƙ^XwcY=hvKP{d;@kdD1^b~xG=_MFw`:-ϱ t/'m &/wE1xEUޯ) V%[JZVe V~3f5k 4Yde4![Y+GGGGGGGGGG+gmW֓Y!3yѸ0} ϙ~{{w>::^vggu?^,l~֏mKW̖EY|ѽBlE`R+4b:~!}io5$>~ | >1Ƶ${ q~gd`auOV0Xq? J.b[" S)> >gt?dd9ٿ?1:>]\=cT|WpW$?hp=~}=y_/=l1|>~ǧg)fӥ85=fg9ד-44p\6;V6baYcD[o?~>=xIۓKjgײAo&̩oQzL:EA3 Q=AAY=CX)!VҁӓpGZF_uZy36bG%OĆO Yyt00 WE϶vq<G$VѓeSAx~GDa}LbLCމ[g5ņI%KL#OgZ_iJM"u心"/iD^DZW6 vze˲kwho-X6DB I)T?\ ^%ut?yx7IYar2M=s m eV;1=~ 7"dմڸm$XKTwLYiZZ }s}Ch0G)x@0|jzb~&a !x* s>P$ݚkj`ǝMAgMl<9IPuAzg؇Cם!CC!m +y uu h$L7p9_J"Bo7J>O>O&jXحJTOvI>%}Kl.]gˆ /^1bx+W ^1bxbxbxbxbx6X"''''W :Ò0Ē0Ē0A, C, C, C, C, C, C, C, C, C, C, ZF5SQ- VK¨Q- پ&BP͵>)I3js+kqb]`gYck PUjAKT0"0,ع_ktjmM Z$;Pӡ6/%58-laLwow^l_ʂܶİ1@g-h%>Qg !;KVP${4WeΕ-#(]&w7Q84#Xl/E|bi Ionqҷ8x vt]* 4PA5Y=CI>H[k8kL7ac"Մ"]z endstream endobj 929 0 obj << /Length 3196 /Filter /FlateDecode >> stream x[[s۸~>-=c!d:nt;Ӧn_}`$b#Iq;=hR@\spx@g73:~_̚%"3yo/^,$jƄ f҄ 5^z.OC3x EnfEF\5 T>p"E`%v ]d"5 ,*t@go0Xh)ɦF٦o' 'ƏxG㤄(p62B^Aa@,q-a-B!K @7@el yF8orҾNےVgL[eyY*cOq386?7}J,ǝƞ3679EɆXҾqMt+N``v|) ap,{EuCz+8.9 ;ѷqؖY$v?E "b^TN 6 h0SXFacdl, nrOv:$(u*#*Nrfi΀b@߻2bYȬ^eYx8]( lհڤa0tZ;TC{Qm'UNf Q~"F9V !Y8_L]x{] *V] 5(XY {b3)0$D.o%4y7]388f_GӚDixI|0O;$5tf  YAOhՋsH2 A!њu6'`]Evϩ 1P0Ml;Qnmrpi'zx,7~=o:4iH@vxj0F' E{WIӥ倘?Mɬڄ$d'—y4~UGuFkcq!7ɠ*۞ſONsI XWA  O8W Gy?2iV=Q]shxђ֚rTF]8LW7 twqgbe_>9t(!ol GuoF\$:1\pÆ?J(>__N5,;z:|bhsHR;95Tg͑;:RLW  zi~4 p'4-9SqC_kx|ԍF6'W ZˏpE?,ATF|suq0ߤ*U: X\M;ϚrqM\%MƦUW3 U\iv,Ij9q@閿{)@ץK3s[6cV&2qaɠ:{νQ)̋1!%eAA,fxoΛA٣e/vyuH.$3kie NC"yZjG7!lBb9tII.`6do{*Ӽ> ~we2,NaYo?p IOF1+sڀG͔l^DѰTv}\GN]MHB{2N-AwWc8xMgưؗ07hݪ_wb3`u g }++ beybeXybe9V-VL *o$aquD3|2NW8 endstream endobj 840 0 obj << /Type /ObjStm /N 100 /First 913 /Length 2597 /Filter /FlateDecode >> stream x[m~@?AQ.3$s AR԰Bϒ}'>kītpw}p3,.Af'de*hi$QٕEf\#V{eJ`GxqTcC-Sv1F(.b.&+Ө\d4:d?c館8T$9`ƶhq\PIucQ0$V\D'+3K!\TkvmZ\J6VUr>%4*DQ :r) H8Q5gtTfӐ],;P&D 6CpjOtHq6AI D¨6uxM&45֠U:U*`h\5h4rɆT!BlIT7~6a$3ׁ M AjYR]uj$&b IzNL4ُWbx$dF̀qΰ0 ŨDxB"Dk`AlDʉ=- ƀmC,S&9Idh%PMb?'c 6U߅~t/~r>̯^ix7}}=_ݫޣz~?fb;X/Vyoo߮]z.Vby7z.߿u7x},kx8_=|xF"Bg(Fz)˲\\%[;^`ai!o{pi.V| I:s*`pq@}3>@|NœOXBX{D))sy騿IW+"5 Kgt;wgg{?^\Ag&? ʟbʍ׳FovǤgOeOOX?|`g.߭~Z\L=$;$ #0/NbV*l>aNG gLjmr=X.c&KGZb&t(E-G.G*I3I('۲z+npJ ޓ+cmCG0K1LZ07yDxVF 1myP(h'neʶVv892^m1T_U0{iτFfY3t"2wل)JzǬϹރex2!7NM0K9mix( kX S@T9#qdF[HQ;b<El꼱">z{sRPk3z6mD/xr'ʁl[}[j歶;m( DEBވ;4& q#8F˨xEP%쉙{'Ml WK3"چ*[rF\jr[Zw"f2L䉧,kBd -iXgCx;0 xyprby0B}<F m}nيPڄ 4Li `[,:NX:.Y~}Fܼ߀vcRx{Sm2Ͷ ;vd{S ~@ck>~{a r=_]@W.lif1{q}8vx:»NGkjgqٴZimjmnmiv];}i^nx冗^nx冗^ixᕆW^;_i^ixᕆ O^miӆצVՆW^mxՆW^mxՆW7x5RkckZmmãG 50Ŵ'1&A]y# '5xq ֦(}iR*Y=Y,ooSX&C1*WX[>o!5ۜR^jZ_щX>3 %s6lz1ʞS[@fo٨mJz<;`B(3[B(dp7EzHI=;jvT/]ql'Ae×+(vXn׫(^b=j*B;~afܑmo.Qe$VhC1,YЛ:[7&'?eF4| endstream endobj 1001 0 obj << /Length 3589 /Filter /FlateDecode >> stream xr`NvX&ax$b>>@(4j|}$AI9 /o_|<㳯3 TWwx4fЊ2ygRc<~g?>` nl;X@͗E^.g>Κ_8l rEX96y;Yod`Q,u;] ۓ7ѝeٞ=rQ|r[ =ULLew*K~?B ,4fRo%B&|3Bg8LK۠Hupp g"fK(f1$4~!XeEM=ϻ^''24|m^ZU3%h\_v"A3t׬#i]kjݧ,ݤuH#+--tIMϻtFi[ ;" 9bdoɂ8q`J+T#}#2aB"e _hcxSog$3bՍu5iՁ'/H|xYMW4>]2K2{\jeYRBXez<)8/<ʩeUAК;8ڦ-:xffQ jV{. ;)W2hcQy?R{/p]'.:ੰ-b~x 45<%%HMp]vᶸGlP\.wzBbO,weIGK>FCDYFuB5'zʒ̓uL?K*{aK˟w{{KG$ ^8qv['@˚ZVE x-T`ا$_1b21\W& "}$꣣`B?:_5;łhjF(Bhj@@Tf%UMTAjǯ"oܷ139a2FeX9}RwI |TE:,$crv*N rR'I ZL dUk[O[^Ў|w4,X2x-%5:O,z v,$ڟ/<̧>OҐC`TgɧPMbK,"*r:p){9i #W;tʮ*t{one:2Cp93~}bҟ~Suw}@P<=҆뾞:)iv Jd"?Y5vԭUh嗛8or( 6<{u-4T|) pj҃Qc[j$3JC2[G 3H.(<,{fij`jΛU6㼍4YƄA.% j#ذrIx&MKWԣCTqP;NꊰthRVn$E]92|)JPdI ';r3SX;xd3ܡ/wV![E^JI`p9q/Co5O :ٶ+c|||&'+%+Y*>Ѥ6GM7 ?LM[F J_Iհp޿1ZT,z DQe^n*<%TZSPa,1{`{YlOqwb9 ɁÁY>V< "ׅ^v%MT+"˾}{ Ih߾VLykǴbG hS'U3!f3l!U/]g ^N2|GtI^>iV|}yJO~*yx 2t )yu2W =47o!R{ ~ٝિq9'"]v\=V//yͭ!7 v{:}AoNTQ6+o|A5Tclo;bQ@r;(&}o& <$&! =0vyqz߭@~A}zv:bzdP߿=:H7A͢)'gQ^ 3}-ɶ\=%/2ܳ'v{E˒I V%+w6YH0RK_u˾-tf endstream endobj 949 0 obj << /Type /ObjStm /N 100 /First 918 /Length 2571 /Filter /FlateDecode >> stream x[mo_A3$CpkE 1ںpPΑF_g{2w{H2`{3y[j)CF[]WI]f?(DEtDjOIvC8UINxRPpQ꘨qdJ;ɱX")((kq1g{xWufuK".% v)Rf\jOK5 .1U\@)/%r0\$'\It$N2ĪYJe/&nZHX#B,021AdO2.03 @/"V\@XՕ$̋& c'!eMAȞ`1g[}R jMUei C ɘrٴ)'ň~6vp_i<`&Xs5fi_h~Q!\eԈ(=Al ;@GO6.%XjHQ00B-X{/Vwo6|_ zs{Q=?,.߾Ytכחx}Wˍ,l/P~7oFބMț;랹k=t_-ڿ՗n6hJ< ^'!v ~ [;@<`5xvA le ,Z>W! yrFߔYpΑ;]QhC\EZϦ@;{@Έ,lzd8 tFd9zF24"FJ& R`D| Y/3h!S4Zr;"|P;0|ff;>!*gL b~9G WNq{F,GfqJ2^h@sW=xQG^SMwÕı&)͋s1 XZp)~G'hXU/FJ)}8B2)pqFdHЭ3"gÑtrV$%E9(LoŐ0O!*#0ƺݪ^pK3ZCJ;pX avF`xBD=UW{4Vve{T~u`+VԴx衪9gSK9oUR}t)=9[ Ѕb fyմD*0dSn܂w2@t'!fxChbN ~̈ k`"2^&M ,Q*Y[rgNٰ&[kJފț |mh5p_nfW^%ypO]ö+!v_  G:#VxbȞP۞vIxHp#m5B ݓ =yu/~ڸWz'/NJ6͵G)^:v} ٯ?= +j '+jlzvaȄۖZ˭MͭmKkkk=iѓFO=iѓFO=i+^iJWF4z+^ijW6C&N=lF6zkFO=mFO=m-Wmp1Jjq[ <%nY(MܡLkc L Jm-rk^!{#wp]6mRrf.O.uLdA>m^Q=;nz["On>M*8z^{dx/_#XcmIHfn`C\ziJY QF,%a;~TloY-a R^YdAZ5@z[l;"eD]eΪUMmK vʇ;&!3Rv'`;3V֦}ȐZf|L|\1C|;&="#UWG*c' 䮡?}6/J[g >ʰ ;>aDvNNVOiFl{d.mmt}*R!;T2'Ɏ\!V޽olbELg&IT endstream endobj 1113 0 obj << /Length 3113 /Filter /FlateDecode >> stream x\[۶~_4ҌWxvNō7ZvH,Rv{pD  pzۉ7+Q0VvԸ۫7W_|P1YM0HDp'7_7W4Q$ ]l77?53[NO18N_v-.!m6zoH:L昣чY@lN9.T_sRܙm`/^]$3cL.u},T .sF-lg/F!7"YB_I<hy}z S^Ļy2&gAr\yarLkL[-N&T OHlcV T%M! ){~K,il&yEBPYP _p.VL "gNQYEf*cFs-nW'{f:)aB]UKӽ;=W_؉Qh6›xz?jE;hQ쁑ymhixY܍N9IJ0%b*u/кzZt z>JwOk->k{FY.eGUbGG# @E0M7.朲͝eBdm錤Ѐ<2(Kc#OIΣ K&_}ޫs&MC0b_LEg˔Vk۾ssQ!T'5?r|u҃[yXvK\#ANؙ# qsZb1DB>^v"R9+&I.ɖLfr #<'xSxN>=|Yu>X~\yr !~W7W4P xbsgoG r/zH;5RMVX@ ly{C`, @E`la-\4]Fƫ#trE*1'‡ |-cL ROˋefi~6Sڐs%N%p,Q.5Q.OemNCNA'(o;^`qIz~rQn}Db`=#>]H8\F^ >JȾ8oBFA^=/< %n1g3 iG f5bxsȳ.` kw.}؇RʒWin4sX/J0bmb=5|NgEێ%.ť>(wGrf`{+ H蒔;\x~bQfToh\g:3Pz]FTMt\_D`Wj195LSG*$MӲ/gny0EW |P"8uMFþLm,@rf/T83Sy'2==#eΑXe!+26~L J}d_4H7`$C (:Q2m&^pk/0 Q*widv-lO`U6 T Qf2LI(Vl[}QC08h%pxZAjH #XoҶae 7.ƍQvM)ͅT_.4Sm뵮26+qGmIDX˒.x)[wUlIb\˪r%"9-[ױSl4`D\P ۠ %]KeW/;Sҏp&,4'JzZRe)~E\ *b88J9IuCz…Xa(7s*%їIdɯZەխ gGT>/4/tk9o"]Ka!X{z}lb0&*oOљp}dI0L 'هgc6_ؑ0\8H=ڜfh=YCUdY[wT)UqGӆ b*Z&CT#X+xSţe8+ [1*SׁMs+01,'diX_d\0TO_cP\;KPA+>{nTVBZKZi͚y>>e>> stream x\m~@?\r8|;.1H#N?(wJ""]tJ:֕Ç3C {DL-$lBJ8XM*8Vg w,x_B0>pG*gF`ᇵ0AȆ ! 0  % w"EH]Kk %vJ8Rp)M, LIn#6taW Sheáz45GpB48õ DDSj&$!XZkk"e|ph#u#@˱S M"R}clr(M&GLήdKU07V\M!(Ȼ[ƘEH"mD7<4TŲ1o95,ސڞԦEN:Mwu&I5mb}[Dć], $Bk k*N OJmFkTZ3Z h:Pn?K$N"C,zZ(%} mXr>v8F\/#oI#Qchm[ډ%H2z<&'"SզZ8@L5,3Ϻ/r>_κ/]gu,{kNκ/W+5hl Y]24raG⧋O/&b~ }6_-gπj/?2|oo{B!x0{BlZ6}l].>l-tWB{}g|?~Eׁ͗n]Vٷݳb=arsä[]R)nl>[GDZ-9X! _;t_p|Ǝ 6c|Gpr2)y bOl,|a͞N\{˾W"B +* DŽ0rC<<\XD P2"gcCkzs#g-^̯"9JJGF(D<'HJuĉd0Fv(^50P%y1 Uf:A2!H832dp77~se,w;LIr/j%{{Z0!B1i߻ R_ Ydn]xNj1}ǒD#e^HYls"p@9ADJQm'h{\\=Lc}9}2_'gt-^,__z6d4/ONsdįMAkU贲u#D YBU 9(rPA"EfEfEfEfEfEfEfEfEfEfE9*rTQ"GE9)rRI"'EN9)rRY"gEΊ9+rVY"E.\(rQE"E\*rUU"WE\7~3!xHUH*d kce/&MdmHѯ2u=5, de$wR>M6EіHTeXI`^#wUcQ6":R/y;o+:"=+=_P!^'}CDvlF評`jf"Hr=iӏEZʼ/C(҉yjKh1tq!wjq;x%ւ0En2y r;|2nϏQ o~,y+ [ϬCy :+O;q_7XVFdB[;}`#a1S=Q/ FYF$@'HQn+?ζomb뮺>Е%Hv3ˆ>1ZdyflA/Ws!?r~8g}0$^) !8c-jo=yߐϭ=o>:? Vuk+ RV!>M̶PV`o5r1lPJҧL7[9CSB`$.= ;c̞Uew=PG~q2*N݉HPs>;&-䋲Vy@x4-3(Ug3nhJA+*.VUޕ(%\XkyTfǪg3KcdX^:FH6 5|"9.}JN0҈Ku;d?ItaZWɂ48p=.e/*!B6[2#c 'k~m$z_-P%uXB'q|>|MfٟZO>c[v9l8I&*o8ɑd_0dӖd]"1%Yt6 s/|qcem?fl/o{oQOiU{]N#VXYF8S(dA!2j2jP:> stream xYIw6WpN3!b%|$~I4d=Qݔ.m.ߧ &-搃 PՂлBE~IO<*bQ)ƥw߿6f0$ϫ{ _Lk;V]ڎ5hnfv87;GkvUiq~[~^@B/%Z:^6:$9!S`-,U,| d֌3صg!)vdj`&]5v&}rQfq 񦪐MUklN7j/C;Iii*t/ݒDn;w]WN<sbЖ-Ƀ*lV c &  68ߩCEմY*47q癈xQV W̩Y6 Mh+7vc6^T j?‰qQ ͿQ5M4%N ؄?Uu@QZܣLmתSm!?֜]( zKA/ XyODȀk:JB5?N\"a|D܋H~^ԫϊdFr,Pq}̒wߥKB[Phͳ{U*O27{L"&1S2nf4 n~6oxc;lm TQS^ND+yP6DgEV&-\zB#ėP'0MYDpyKd\ 0_V /Nc E5ļ< s"rϑc@8ŘM X3F* -u1f)4l0z57`\@R=Rr{,x̙O- &yc@ż9aǡD>M4_ v670,њa(8 l0וu׏!acKU3öImVRIR?^W'TmѤ-ȰuO2 KbFUVp/İ >UMB7)W endstream endobj 1223 0 obj << /Length 2263 /Filter /FlateDecode >> stream xڕXr+ XeN0 hR$ۊʔ]7Dȹ;HP lAcݧOA8YO}xU$&B8i¥dN&QHj{>\Wn3iED""x[:COyn|ȃUֵGqXREe¢4x9b60:v,0u0dwI[Ƞ؃țlց#}z*ŧgSÎ ΨC/lq{g;HH:2?J(GN,|ou1xsR<&x϶xGND 2۽|{Ki^YnfH KI4 p+>/zCGkw]V@zM|2!W 6F0Tw@l{QJ(j=zk[C70 Moli|.,I8H 6Ɏk43pa,!ҀTӀR'H ^DJv=Nuc#ғ X(A]HG{US++1s![m+DOP0=,n?hPdw0K<:)e(C`۽jyÐ3Vw$1D~¢-Q.y咑x; vG<,u)G<,caa`^w4oecK ||mmV*yG>Vp()l`]$UrëO endstream endobj 1166 0 obj << /Type /ObjStm /N 100 /First 997 /Length 2582 /Filter /FlateDecode >> stream xZmo_ArAa._ 7/{Ar8N_dIѮb_?j)+ڵ5+ gəJLRI)A! żQDh]DY&wDYJLZ@$ â0E@Y!Qҁ(@H4$+')t(ʹ% e%Pamn :% :\L#cihiHh"h,(GpLڃiDehUF2u܋l:&9hP> 2n̦I0PdeΥEeج}P DEiXMn-^3b0K32t  ::ď#m>2'$I 9 ĜNa9v֒"#mpǼR:gEa`$+ }$ˠã!}gY#%z(@PAE IdAcRbPiS` Iƪ psI &(XT`>@Tɻ<Ƙ~J!?;Bc- K1( WKDI@ZQoU8L^t~YXxU<pWV)Qnhc] śzSqtxGN77aD3Ϊ;"?{BwmgE1~d^j^\M; \ߖuY4_jR}=z K2=LGʲ1pE2nȢzΧA}f!u!1*r`WӼ?*G+m8V\ڽ'kGr G+9{$[Ea+-'Wtv:/Ǒ>aU%7Aaa" GO:Vnwp"bMSn4 6[*`~}]J6w SG6h>[ (Af _ʰ7rWz±b<>23O ̹ȱhr?sw?+ _>Ó/86"юK=eg~-NU=eg^?s Ql4r0o 0);s!);s1-pM>pjO/R\MQ2qTp}e+ykz= ~TJ8'[TK'N[2P.{6t+);(q=+;s-=)ƍ`ݿM1yMcEc[h=Dnj!tTF]RzpR1zK4ЛA':BeBgdfeg"d"vDȒC%,9d!KYr!)bJh`Bd  nBqR[&/ʶ$MӇ'ڧ "y:`!RNx^V2+z ÔTAAL ~c-WGFl.3P,[hxJ$+(F ckoR"LH8 \2Gb$H6$d+;x! Y!)Y.-!}_~ b@GUS~u-9]g ) 4 @ppqEPW5܀m˛^xe^TwlM'-t 5 U>-uU6HnU_fuvBd7 D0w<̽Dk+1=Ekr{&xɭ"Z>_|3ԫt]G)-[>rJGtMz5oT&t)cJNd:Y*JNܪc$mX O]n3{IjV8Yh n'g|vyņJI"4+ԊPcGmb)"vu[LH^^ "ξIqV€-*żjr>.fdШ;.Umr\?_ \0\ԓWX?Vo鼺4G@~M\˴q\c3c%W(Z}4`et3 ܲoQ%f$x,e 6I7>;n^˛)N_<:}]1CV'ո'i@wsHVR^tf6s(;Sm#78*4Y͹ӚOH_hQ8zUG(QG/|Zu9"MU|Z& E{]5uj#FKOqEDHAGwT>!M9.fB*zstu` r) endstream endobj 1236 0 obj << /Length 525 /Filter /FlateDecode >> stream xuSR0+f"% Jf<;“SGLSD7}9h86_b.5XfA JXdtʤҨX'(nSWBc**FFc)}=]ݺKB*VT;Bŕ[UEmSyK"S:FgvW7վY9) 22o0O'Jik.~ֽ+r>Xl*jc6f_{GhKk~!O}CCs"T)%p9CqiLsmK 9!cQuf"K"i*ynR^oCH8VM<]X薌/ Ā~D }!(?Hvp%{zxvHWD'jH$SCgl+.E)l> stream xW[O[G~ا}`oȤ"PHd"d9Gs@oR؞GɟAo߼ d)&.&2dhf(R[٬ZodYnԧ,Ri;˹⫾v28jyeF97<ND@+dy_V+AU< ~2$yVZ1Tonnd7r6vVY,pyޫ?eQ=6i/.jYZM_4lNRֳwM9Z Ͽ)#JE:2UeLir;mdY)ʀi$yjosdKmv[jSt氭6יmqBˀ6FR#RۡqP<Оt c&8kMl(߉pa_7.krq\(BM&BA=l>"vXm*=غ􋲾aU}Y6ݭ]鼇5+;k:SuBٓ>II\{IE'ԙݶNz=ۛUix|JK *|p&Qp|ko)T;9mqOeOfƧ1jW}{ImնW9az˟yL4e|Y>w' endstream endobj 1272 0 obj << /Length1 1886 /Length2 12040 /Length3 0 /Length 13222 /Filter /FlateDecode >> stream xڍP\ 7 @C7n]w ,@psν'kε{Pi2[8d,lIeeyv6'  jF;:!BmR@s@ ```cp'E t[Y   .tv~~^ 9 ZW44 BXYYG+z&;j A@.n o=i,(4-k0o%< s:@S^ r;X&?7rd.v+hnht;X,v  :XAn@9/@:}smvt9@!(I]@ݓ͵uptw;XXaĪvvKlBfx9 gܚZN|2@`K7@]\A:,P og3o.`s~{eavŬoeT_`g>_o5 ;X:~F?M@τstv!7~+__~@{?ϭ }eapgWdv^y(y[o;"Yvgv9B,6{2scܛ瑃1y]\(ϛcj,f_ 6xE|lVU㿈9/Eܿ [7 >/dZi}޶CgM?3U?3WBf{fl/| V?sӿb;=7gpg& Ws, `K>Yܟ|V/xV2wuqyn:A 9¬`MmHu8;qڵx1Ozd#A3;kQLf]ئSE:gu{ޓ뒙\f׊,nt ,f3Ln@Ki$VU& ϛڞDwZR^v]եYJ$:5-fZ~ ތ2 OUɠT|^>0,asr*1X~ _z{q=},A\^Q{:To^%nqSa4`XɫݞvL<~gDHkpOK/*`,`JUoa8QJn`3*(N^eju&;x PaE~(pً>hp6ζ0 zf,onUNRôe]AS Kj_?oɍ!^*G91Y0g''GW'$:EkA#? a3O@TCh-8N[ܣT>Mɉ\H^ -e'cay[lUn%K c:̟]?܀&V 7͚P)yrr5A#;u8[$5U1z'YGc3%4c _ccm󘌣2NFZ#\&8wnjS/0uj9_~5 7ҒceCdamy-'8՞ffQ\-Z4R4Zpjc.p 0m޺Q+2, 2z}}Un (RZs:lG߶+bц\ ߷li:Qm\196Jd &*-ߤi}UcK3#m1ng'p]](zoDDq436PޑeZQXO[wII_{l {Y+n 8u[vL$WijL%{eA9lU7[hb[C׽EY*>J;mT;-X@4vp"R?}[Ou6"99Ij%ZIAwwdh/'(?)혠k9N16z,P9"Pk?dssxD<(/;~d#OXE:jRradn\r= y.UY3t).뇻6)r214Fq Υcc2|^|U f;2ѩoB\e- g;Hxhw$➰/]N? 7`K:2 VJ] *X7gSܛmBxi &|nF!jzYM-2݇#!Nfcb=0=ZHЛ㯣]`"~W"bOZ"~,-ua[eM@,iGa^Sy[fvȥc0:j/"=m[PKץ: ^;2sVKda3t [Ίp $1"To{T&%h5 Lȿ*Ds<"K9KVb%:U6w2"$9PA'qQk U,zm*2msJXݽ@=@S h|0d厦 x}Q$m_ s6T=R|ԧS>9M2kã͏D{J&() ԍ)=t5wq  ^'YbgJ)N`~[8@m;H/+z6+>ݷiG+-v.ra$]ܒt lblwfܞk %W,)ixl3`dInO]EɳF(b^pb֬_ANG'`m4eݲKO^3Caޡi@zWL2v"`r[a'HMߦM14܇cbA}XJRci}*Hi "fY{pHѥ:.f-˾N|,GeqYE~Iv&meCnpiWGvK%<$uD,kDf m,q8+4%<'5+wnD۶%?ŶxM>rElդH\֩.$&j O]N~#''zˉ|^ ?,>RnFH(~PRF@ʷ5A"VgVTXuOhny %v6ㅆբ^| s2KWE e9MӀ⾑Ȯ~kMo> Y:+6m DKY!_EyTwvl1J%HI6?:QhaK0멥daV8+kL;j6[t&m~Jb?~V|A5.ߧl#M_A =dXÐ0^^e<ڔZnD <&XX&۲;0#Y gv$Y|C{ _?a%h-ałIteU+-NagyXt F[sؘXxqƁ&fTkV~&9/5▎ $~ 16b!~k]A xo 0h2*+m=M+*8P^Il)Ih(J5ҀbzuV!b c Mtl2ɽUyYPÙx}ZB)s/τ{Qx$cub\:R9 Jݴ`\~n q/)7.'j,gigMٔy <(rP%(ntX9~xLɭ3&Z4-vyDti"hXP2VԮgFT eg%b7 KN\~c^C؛Hr yX'K4>tߤK`* f17EYBN+[rEPF-))*>A43i~IA|}`Qb{\bU8+lG#ULK捄0?⯁ԙ M<. I>R +G6鴍qdU1hpuN3ZhsUy'YVE*/=f !G9,ŊZ-\AXa]_bKT >Z{t[K /6J0rh%tX!B/~g\2`٢h2Ip8qX`gXOtNGݞľ[GL !wW9Tz(Q vz93{^8zg-ы⪹ta!;q%9JZ4fTcpsN aW3T$)!:۵iAڝxUl☊@y?} K&dDFn5K܄i)^':tqn[hɤ{/TS Wئj~OŠ+O7;ęug.sQNakW870>^~JscpܠK_ٖ93 jG9$sx~ Yh%kUܛW *#ڈIv>3/mS#,^0`z2Gq{eѺ UJp?wmּ_m38XSqQFPH)[_iy46Kc^- ;s G a鎨4/ ɫd [1ͼ[Ts9C8b<_I7mQ)!ċ䶓{a2w/7<#a2XI}:rMya|d]!ܖ(3 g.{aj8цR^lm"5|H*BG5\ K;7NUz7QÖ:gU25oX2P\Ba>E&;}OXUa⾣ Q7<1=L˵byTH,n򉆊ؖo'19JZD`B` |Jqǖ7]}fl-'JAq1OߧXBaRY/cDg r_MՅ_ATZ^-pZAnSbj-lux=5&6Le3FWkţ˸0k f'-ѯvMZfB}ɢ$dь Q:$Ⳑ yHt] !p=^+iO{VEz'b-h#ɿ5͇\ .^ ;o6X(w,ۮ\}f$@-r>?wqTiv__5u4[+5}#)~2fF3O&5W׏AtZa*'ceOSK{n,*^bI' #]+կ0l.jJ:2K̊QMP-]U0.Ujؿb *FeL(gϽ>tOR;g|8Ӫl㲸H+۞ش*+jP .v9cx"_ n*ִݢcTS@ڮHf}jJ=w1Vʑ R:!5Uϲd9$lTNZ{4/5Do'K9[ȳpv^ћ>иGwxŕ:θ$x5$Oy=),蘆r)zǪf̾SGE-?{p P<%{٣=XG~/rst'M.q3sQ 'pΓuk;ڥ|/XE{THR004xכ&J$_,pnͲszr9V3q@F̄.bꌔ؎&16X˦bw% + 1R솱' 5_Q8H/Z!=zq|rES>"XEJqRRR9 B@ISHh 8.77)6Q=L0_QclcḟU$|^ _ALsSΓ#+i"::7 CRSI `YFww_9~1v 1RFH(DBl=-dh1:Sb`0MZ/~ޥBJ qa%7N:8b\> *[,dxtFgurpm%b: jw~~z̩K_ K7e} X⫱u/uVqQ|!FY<@^+ظbMG9*R퇅lez+f~6$lfRDǻrR X{ZVD^IIGa<2[z@9!6tJI@BEI#1X*>׼Cȶ3+!%; Ru-ZX&n,q5]}>,3n]QvHH ؉!&[QJONCo&g8.VYhb[]:*˗'۰mSF!p) &TY;Kr p^u HKI0+^QqXa9Z<:0|۵ }xц}rcGupލw>@_G+ VڶE@I A/fN⫿"*oD$h- l)͵}ˢba'<(7>o#-J6+@xo=7c&KMax@C'K*;U.3RsUMϦn}9Tpga3Rl{t8>}$uuA/~vr2Bp^.;Ot Y`V#Bb|n`m2ismtb͸ !}:)zޤr/B)S'>tIB`F4t(b]ȷ˞Rs4Ve5Z+>s'~R i΂ iڑdܘ. b#<: u qu.7`!r[b'ͷow̦uR6sD*FRD eّ)Y NzoXq䅧U1ߨ c"+3ڿjc?Ld赯ѷpUp+feT,H>!8̆}i!N+l#!V敫 +{n1cit.K8?ݣs~1ИچcY\a>ch*Ȓ|Vւy?o9U_%Z{K5;ih3b# Urh㹏}K1!rR L5kd#6s[WS8ay|̍ȓ|Xu9XS/aow8A;ףaxIuႼyXma8N!lmo82'`n)$O@XvGsMd8G 94m;%Dlr"˂Rghml_l5+(d7ZlϢpzRs^,2Ä}7vSLN4aiPӾUs"(/zEsܑjӸyOcE^Ƈv{u=,㼑@3a`WPEش?\rpzöG&y’e\-l"TKb)́X' >NVbHG]?Ũ2ICѕ)Edbc@y[X7wTC=Uq} oE bQ00+x-dHP>cgNKSDrN^VFV{Yt~e\J8ݞ xO%U\/!: t0흦_9nkEfj ᫥eNVn5p&+?ښ:ČՎg HtwmY^hn/C'9Ώ^/:8WFÎW1;>?Esc{Fy2?v:aQd~& 3'fQ bƻAD c\5 ^k%y}H1-o  $glŐv*awi?FSzb'錭*U`$ւٔ~{dF٢k(݅7!WDK{STow<8t99}ꛙ67qHRj{_Z4 N1Ќ E ϩO>+b> 5Ѻ`}tG+cjMA8m3W<9@ޟt;S-QJIFGHyhwnN0-E|| KػHbN)J<0f7RC uUPk0W Au-}s% ݱam(H Âq_YY^8|Q~7Dq\_d]sY#.޿᫒@M(~c wj{B 7m`8V }tU%BFkd 1#Bi=p[L)Lh6y0ZIsJO=Wq%t,=(cJo{Lb-&7Ln_` gTQn-&P%#p6ck܎pIK~wveNʻbshԦSbI.&+{UX\vaPK%SWHV+Vz"1dql;a:K-9L^1 ,0p1)d=Мy$yB>zLS =ٜY v,xu?-UBQr .p'NHg {EyĵE+,QH" E)%[(3sK~g"$$j䠻"LW RNdž"im"+PT-cGB|ٞ6C L{Bl*_zop.PgM=2 djF]CN/s-:WNS 1S-au`¥VdFwziU>Pƍ#˷ob$T~cnN?V&y^,Ӝ5M6l6UQI+Du*vk0wSkOSvتkź qan(wh-bnW*<&연\jpS`WTJdSl?\<EtRX&MEPNVBhs$':uwP= Vɒ.X;sP~P*NFCZVq UR0xBv/ ƫ#>,%55Q!9QTv`L<[nlfE`?LlP*>,ޞz&ٰ/n[9rӋWy Jo.||Y1rS̆bd@O \bzh_hJ$UH@Ө/u8;?@pF`ޔ~xR' ! 1Njҷ[zn*=%j{&}G:q_DlFfW?pv(EtnN7z8Tg (Y׼eeLONur Q Jr툭gE; `K({7 NO'zq 6 ok7 D_{*j*ŝV2U"ئ1klG3[(2a5Ij>zs)~tSJஞ5H"٧E9ѯPI^ $3{]:miw1ņ}vv4Đ_z$H՘G6zB_r"e5ڭ(AWIcxIi^Ƌ1,;}bU0Sk$ΝM-8:ی8K4cjcev% 2SJ9s)˗o#:jf ˱b,UԈo[) ?MgajHs#MpU&fΗY>6aj1ؿǭ3ݤ G`莩a)Hf c Voên.d{સi2KϿ6c@.gf~A(湨AhЗae:EML56PGI5Q<ܩZص}۝FW-UsaP0_>`%STAB'1>oEIVyW}R6s2$ex99c hq>' ub6yZo^`S2[}E\mu 5+盍7Pk3)/eq7W/5m!>{E5MsՕOܗQЮ9@9QB\f7B9;]%[F~nYErŹo7?).{tXny]~k S$';`ଦ>L],k3\:5) #N羵+R9{6>r\ccF{(I 3Y#\]_w/Bx , 2eeܫj(ԯquL$Sl~>@-\1}G1IJ_G[T6ŐH\G[9\>x endstream endobj 1274 0 obj << /Length1 1400 /Length2 6376 /Length3 0 /Length 7340 /Filter /FlateDecode >> stream xڍtT.)Ht 3tItR03 R 4*t8޵]72 8(hj@A> PtuAa(AHOD4p$HD%@(/ M4j8UG+ N9C`0 FCQ'BN}E="vvCIs`H{ v``sG"nP@  :v5x_``v[Fp_@;Ü(e9]5!n0;;׈ʠnY npv‘xSA!kYG8 װml aoz@UBP.H0PLTPLu@!@Aܨ \.[-nl`$j: c<f =223co/+ @ (%Ut] E@?ݢ_{%_qpYL b-M)7"7;;0'i=(h"P27GPGU`v(2B0we7F/9P;׷W /#%65(9%8aKw"z% jl?DP3lnx  a|LpsCe: !!mgr^kR{g&~ǏR>CY֗#:z^v 3'S~tL3Mh9$tuݕg%gKb#+VU m.Р7YC6[ Sg8=mI]/ׁO$}QAjk,/$JtB8UFr<_~D*d38~-x+0jy=f#OvUzzb +wk?j>3Saiܗzr+#=Կ$_ߝe5e,[m@/!ǶkhiT]762\TWY}Pjjεݱ!kkdw:ͫ=1&[~TdO{ggm So"Zp8t΁:T.l/)I=% 0] NiZ;0v"ky!էc zHyP_DAG/_cRODN"erlt?K4 =3!JJOQ5dK|)$Ц,yÒb/=/ $.O-v6qu+Nkc_&GUl8oi0n'<ή-0.|nxAk #|yk)u|?ҿTPKBh貺׋e5S'سVͪT\fGW3G^ȅ]ì[jofc,tJBGhĩ^//1}\8&{ ӍpZi-\K=p[}w(6h?9>9V|+m>9w97w^-HT>!A"rZ_#DgI{}b-%tH͙mی*jxmÙS!Qĉq~V\fCTI`F9Su"(2uz#n R9mm[KƓKL3zӱ.Nb "eeB֠HiY QjwU/Ele)Mc{p^ j/[Նuios>=B%.f|Ȥ^ J90gŏW=ߒ Sz3I}з۝7!9hz?59SV m(wU`NqU [ J QRa*R[|B6bֿPp{~Vdpڱ RM6FQ h{~O;4s //IٲI/Kն,NVQSIaKs8 Ua^H;`. !`L˙\tq>6+pLV($ .M_<9zZzTEհw<ǩޜ֝LJ *ݬ/RZO2'݈Jrl%Ȍu*EPWzmpš4{>͖6tWU,ji%ZWUl^c (D10;RL+ ʭu} (Z2uƅhM6g, y=[Q蛅a+e8Ss*gj WH3M#UQlS3Pj~K]c/UDѨ h'kg "3m[r y1w#[ð~!hIۮ>X7sn{D|"]>}@ث98\աJjsIÓ:+I/p蔐 I4CR3Gl(=18謧ž^[ĐF5A5w8ÙoUY:.sv_(]64lkN_hS ^Ly7&'Cw#% m=w+p$z$"% ;b3o.a?E9Q6^9rX[?lH])NB[z){$FB#6VV ɽ k"{Ggze2v=]=g}[Ʉi-Ȋ/OT1y*j6u*62X}W3R3i=w}#ankw96*:So! NuKKa<}nmKRbceU'oOBVw[Ӣ.\jܨCh<`"L6\XK}y8ځ'sw$ûn^|˟;a}=z7t(0 쌯{cNj52O52_`Hc´率4wWeD ֎2 4Vj9=iZ[FsY57=jٜ֥Qk4vF[Qj@K(5$5ܫx҉޻uҤ&)B>.>l1@aŠ5gE6M@&9\N\V^~'6vS 20qـҾkɾ 0%KaC 5N`#;}/!e n4(6<'xRGV),0g2 v5i.RH 3q !x8`sNJ[KM,(w7#+lkflC>qpU1|e Kʝ`("~Ճ/+V#,l *\j`&+)$P#cBm?%1EZ=UV޲J/3:kKXw49b SZ3'`(vn}oWrF:08y)~q\Ry*5KH%^tO27+坐 ^ט*#XEFbm;Z]T$AX>51F_m˶"T쇖inu W)4OKʻUHaM:ވ;Zl-cScS0ouXvJ=^'/66^ΞfKl2z,-a!Y߳䆕V=Ew+rXsM#$>C84ސHkw`e wx*FVr/\ _JRz򰎽ƦpeEpԟt1CXKBp:vHW"o(;pGʊD?+{e5x`LN}قhfO.c]?=+j~dJIFA/.sho amf!YJ: (gI$\B(XH^␎P *ො͑VыJxcrZBoK(Z!ڮgLbEl [ o 9?rm t}cx crh[f#y7m{lt=j|zn=;29ZC+6ƈ vBL*\F÷U珪rat*~sx˃Js\$g6LKIOE DS~D Eqh%eoX7|/u~ZX'};ҷ)W(/C8Ce<)xļR8U~!n]6,B8џy>Zk4M,rm1TX$ZFywܐ0> ㅭ}zKQ)0{ R|w:&eKit_ {)Ca$ iMNóSߏ+,I 3]"1DeLbR /!=Y/#4զc*pt%3rϊݜtٓ&~M$z5f&weKF&yӐ¶Ƒ]U3lFA&r\bDx__O@4 n~tGv*\;Lfjҽ?Yo{]*S>{ʑtx0\:,%Ճƍ2þTU ·W&GjuiϹG >쳘qe&ni~*)4m#gF֓eyLs0@'Serym28/?\8;grLSИ8wT<&sV.8E3Mv-ɊmuK{񓆷c pkS+|G{ h놌 6'L A/%Cv)|۝?Rwܔ(2۾}MPt6@C3d#]{V#`zc<>⼝Fֹ(gRe.f{ʤQێEg4`Z }MB-%p^677]%lSSBV|&ّbZŞ kiZ /G{s5 SsXSk;RNxOF +0~ޝ] .%һ*fbba.Cm&y8y7gz I::-2$YY?v&\v"lȮsEsy8*aƺ+}Q[>:EzO;L%&MYJŽ~3AԔ=!y>v0T^]2&iy ~1v~ע2@?\L xhζ$Mb-7vDw[H$_I#0m0Ԕ{4ev`gZiiWqb$oֺbGw3ުMk9?,TsV' zڴ5&v-3RgJ,}IiX 'yƽ(f~'bގD(9񝴙3.q:uIEy;' ZuVKl4O8?6/=/;rAЂP1XtBLj#v,VĆQmg; SPhTHLZX#u&S0?~wۧsdڢ^gkǼ+y"Xܢ=ܘ7QLhe3WMnವI3˺S;]9?lz[B!ekf@9}$'4o7O~Ѱp56(66<DcP5F/i˃C#bp{N::Fƨ k}<# endstream endobj 1276 0 obj << /Length1 1538 /Length2 9065 /Length3 0 /Length 10102 /Filter /FlateDecode >> stream xڍTk6 !H7 H%ݍ 3  %"%-R!-)Hׇ7[3}7. !ܼb9@^//?7//6 c3] p(ȹALxӀP xyEwȃ< 6 n*vf;{BauZœ2N`W5!NAP.Fx V {Yӓ wdxB`7`& WeL={_. r( s{pـ]*-g0/e8oߎ ? kk3 l!P0@KQ`6AP7=d(@]+.緛.+lNN` w~WC۽y# b .ݙGqqa#¢ em۽3~ }(?a<;?}6k la 0|W{@?'zaP/_#{|x\|o ?ۍ6waDMol `-o Z?|Tc^o$߄ݡ?b?z@Zwh࿖Vlqw_ 20;?m)B6_l eP w~V\jY;><n#?lTYm~ ~M |Exapă <-DCrr7 цsBS,ెCG'aB`[ĿQ_ǜnzOQ Hkw??<}hѿ^0 l=;u m>la k[e3OXOn}>bD4&4]X?ބ=3J3'KOz 3Ulw<\ .%ݥSu6+pnJq6 *`ʱʘ$g@Gp`yOfөs`XዽY,sk`xANrJ88+J6[\3Y.1'|iѰjVF[@( UFbuI=+IeE4Lj+|fnzN{ccvZ=\/뿮IuKQyMhޡ"UpyAI^Qyy_u"R;/;l] z܇i[}ywtOaϯ`Κ*g x n"h[ěN=aMp渗@@EtEiåhr8<h>ِ.3dIvG垑z¡9ZfiW~Tx>ىe@Oփީ ޳ORy&YnYN@iW|5-ZHOc7QZQR]>YA6N?0 -7-j ~g^L=Y~:6aISv:m7*Xrx__’^A|?!}EYiD؇gmȌRX7Boy)z?PFQl@*2ǰȪM2S ~ѿ 3 ƴ&DZd!y|H_!>K`nTOmFNuIQ+'T4`smorJy*$+ⴝ'`cHF ;1˗$2h%~%vk/gҗ폖Cw1ꪪҤ),NHq,L޶xiK'iN!F. N՗CB*#kϼi^Z2.66n h xЛY4V~!ٴ]SCR#˘l2yҔE'kK*?ųs oj jldJ_|Ar/d2[iiYX;x Uh32BwVL&Cj|LvIV>窃9/5}gGL6>xa^pTWZ5]Yd=v5 ]oϳ%q1FT!-xNljDϋn[ą^[:|Ѐu3_W 9pKƃ@/c$K|fe@ŭvEt2: L fIypFqq(:LP's xUeE.> f{ͫ) -B^ Ez҉ RWǠ%]o8^N_`;~eѕc{ڙmaV5aӕUٔ(%!HT*)|ha}8cSDO`FJA }>O h#cݰ0C$UYC=/>iK7zc+hpZ9.bn4E=&\p6| zflEAqJMB/:% ( hϘVH߰]:<9`z{ U |]s1(h(,$;Ħ|11Ŧ3}(b J]q rKC<4[5EU!w̉23vT07r*EjXbedȿz',}2o]eF2ܖ I`AfcYeRvav(Lf#c{s4ʸf:ˌpYy&qX%?458 i_n|v6 4^S6_TZN$!βXTwzJ+ '\q>q>pu")~_;Ƅ o\BMq & 4*z~ s2>#`0{]4f]eE_1W]R"HjUym +Vtך'Q+6we Onٛ$z sޮwJ)y,`fkD8XnZ# `,kao>&b^NxuRL3`m7N67ݜp1Oꃳ8gxHTֳ%juA0a'""Owkj$ST =msMEeH~.GF57O훸vgq g#~a+"+ܽ>W iW~RS'BGz_evqfR&zm? 1fu|npVܴSlL*'Ҙ%6Cp>LپJa;]BjQ#S5"+D3&ݓN#dPdv|,=6wE,cio]aYޗ_BS;eXtQIW?JA&^a$~7JSOe Ξl^*20A&?Լ-uVm=sAɎW'BҤbMd(Qɭ!H91s䕓w"ʣǡ̰8Z빬9sY˞Gi AJćQDMuT<a+ݳ9KI? J7t>3+/m"T˛&gg`$\Xڇ-u`>nպYGhq?%GBX"5#N{IF<ƒjXM6bH}2[2iE9>Cn8[[˺<1,O6һ^>(*}n$qy[қ(xD lf=U&u(aOw|+ڈrQi;_҂ PM֘z<^LjI)&lx c뜟YE)'<^)kW1:>`~ .% , 5mʆ}g*fTsӓF{ApuRc/u]A mcQqZQj3Ԓ12c$0i1VJxIΐ~ye'n$$^ ؛sS9#*]ɕO%R~l6},{rUu%њ~=`H"DAԦե'rzouqLk<#`WXSgR]B) nXxy=s3v"XwFش-g*F6>Ը1YIL\zg^ꆊˡL1L1Ľ,cqg7Bc';4gboblh1,e{\qlkFQ"{L-rQP.=ㆧ]'G^T.!yި*OՋJUڬi}׈G:bo~gwv/ eg'P:63ռ}E IO?8y{^0*,֓^\FaW>VץkGQ_ y%c(k#DF EOwWF;Uui+10Vs&d͜B5zΓW.%4ryHfc>ɱEFRtbPRۉlmO| BL1#-&aho)y?rr` :H5GOoǖQJ^B8Zxxz<+-sDJXm~]'uoGSX2tĽ6:X35 T1x$hHq5X 2,ɶv^g͸D>1U©\B)2iW<1kMJO@zFlKCר}Z~p =E|yzuƸh`8/4HFU=(a_"Eo ү<*숢H\&Vl|\ p)Hov?,$fOg4fw>n~ay]ꓵDQbNE2otqQO|IQM7]HJ[Nz<}Ir"hTTCY6`)..;&M'daIBH5xn^ʚi@iAVWP4"#rLi&CTWGm snLyvFkOm6׉aĚ>delT,y"B>ПQjCAj\Ӌߵ*dJXBH>~T8Un'=00Qlo*v-|khn[P}[Є%/Щ $jIr#U0/?y0%,jtp۫9=:ףL/UF|D$EGvӢ4F KGu=֟:4_|m|& vsFr;@9ۖ!zEYvfeVX_&_M)M Ʊ}V9S~Q+NQ `I/tt3q.1z_Ѵ"G5阚IpP%q;k0qTTYn|o)Fxx^<"&kTُ)xyGS^ 4_uZ !V\W _y F6_9`Hw%"@/kk{ʭ WL>txnTnjP~ssrv[bx]Hy$FŦܰj*c ;] ?9b z@ صCP?C*)f7sI1_.yHSR/FYr_4䫟{ۋ3t/o9,҈fL,U]L Y.(Zs36!7h=<=fSH..Z'Nt+3DͬiB3ݎ_Y Z@Y+#hF{_%*;Ny5xNmF1/?bD+j'y:&wrV}qA2S_zH_H8C֣" 5i6 ҒVDF\)N=FC09)=3L\ !aBߗӑz9'fC]U wlIM,?em菰$,ŧDXސMO[c) r#|p6eE.Npj²&tj›ؔ8(]@d7@nB a *hLIjGn7c{HL~nUT0p Z-2CE͕8Ub'X߶Mqo6MIx[} Lˢ~FIL"jS@ا3Yɞ6)U8OP#L <gWԂ T) _WNGC9%x6fҮ c !7SRR` @ji$_hi(U)T}1:*wQ.  -yٺ~G1qb{Qޞ'ʼdXXXU= ۅ#h㊩d՞݊_$NH>[\8{ LD&,<4I=^L5Y̷&ZlexR1v'sLϝg3tN:WCO;9cz^z?=$kTv3UYs2Lj~k9 } cpe=49c罱z#WZm׈͎2r rjOB+1_F cZIغ(WyJ~Y3MaLM99&&J}55pNE{yE0bLaU8O⼩NL^xT(bxWL`dMZ9+|eMo½:7^yP-:8Q\xԭdq‡rI4U/]oY~;,hrXۖ$F 1S3E24)y%p^J+܀fBkH% =}U;!'TUn?bE=2OFӵḳSPhv 0̭VRS5Ce oԇbuf$"|U@d**j}[L?-p`% }⦄cGհleډzw{•q8 IQ0cFcjWַn3cRDv4ۧy،y mTJ{"AЎZIlE:09wWGq"?V59t.|yKgX5&v^)A"QJ(frM,/ ڽ5q(:J6tt4sf%)|oWx2w=-X]8" +'Ѽ˅{LDcɾ=CR>? #`n_G-gA=WXqmWBl1%y (γzVkVjpBnikX.c[t GPb\6\ qN9a_)nbo 9/_~=ϠDXRuǏ[_G/ELP*? /tv2ٕ L' !ܷD37js8^8Y#\n48a[?Y EZeO{]ϸO ٍVYrq47-c2VK@n29nm=S}BMѻg5㇧ d5橬fbLaΜ\\\ahBs\}YM,܆p%u~v6 A#V;*(eֲm@INm2ra\C%$ID+tX4w4}1 q`~NvإeY \L7ݕ7 e9G̈́Ǿuݧ cfk˲\w}*zC1Y'˪?ݜzӤ%_=ˠP5^;)]İ}/9~ܧs7c F7޻{#xv >W $2"q@l圶BB17΃ l0R6}J*TVcļcaP_T> endstream endobj 1278 0 obj << /Length1 1374 /Length2 6054 /Length3 0 /Length 6995 /Filter /FlateDecode >> stream xڍVTڲE@JC(= UPzJ !@$B  MQ)E@)ҥ(U_Y+9g̜3{+fb*.hg&j@0XR #^?V%@Cx:w3@~^@$"#`ܿy:4Qp_;psO(BdEUp E Xw8" 4Cpl?RtbAq(WqS @S/wjhE7&#|Ю(08rcf:@#o8ꏳQ_WC_P 􆢂(7+ 4bEP/G/"x߅C*&@(aX_q_ׯA/Y墆F"(/W}  z(ܟ+?o Qomsc`0XV 0wЯA _( tŷE?/bࡸs@. wCgǛc[`< @ϿWxnQ^A.HE_wTUс@,PLB @$$E?CU;V)K* % !?s&X A4WYoT?P(}4^tOT k@KDj".,U-~ }  *'7NjGj`h_꒐B1h?bNe` HCBh < g/ y{6$o&@8 05ݸjAQBb)L15iýJFoe }i3F+4STӑ&0'*5]\bʫ!g>!D Zu}S2 骘鏙X1Y}.GqZA,".tL8+/ VL~7fl`>oG7U,Y]{4<\·͖h*Nu=S#xhί$^SV DJ=DWVnRD⪦3VWyJziq\u/y[OȪzȴ$w]AaZ:.&qڟGqGk-J DŽJUX+!\H彂(oEH3(Rv:T`#r( ^+^>Jag|hڪR87oJdp*.M(7-s<}<ٳ3s5!i+N=3z*LZpX.7tKv8pÖjF)!͂#")-Wʘ'SڼN;㻟ڲk5*-d_kv)@w>SwakBvfk'E[رPҸ8?bxt}E5ΣzMB;G'".kjZ*p ?IE;4y78W>t(T\DbWs$S<F('E5kX#-Q](»Gz8ENh;g8͸r;\mc,ǿ8T\I#uLK Sl"]FG_QšfL56/ypSy0ΏMY8M:rP%(xVզzk~lQ`-fmOu]F=D*{BiZ HȨV?:/-wI响Aά_@c)腿]#(YQjUoV?mzekmO" =W $GDy3v l5:yg[حYryA[{JmVK}ɮ]k_*EyOܻ杷%[#l𠌵 f }o2˓'!!:.OrUK]~媐D$ e-Agtzy4CWS7$|5U2 4CTiCF%2|VKK}8xsw1:axK}WIH.mˎ{ʤi_tN*2]^zV{5ͣ"Цz]4+VwG\#Be@(Y'8KEbsΉX Ǧ/^A6IXȦߐ*- 0 \cTu[]+gT7!}G - $_+r}cTgS2v ~%d'\U$"؟#9de4#Qdl{4zF-StabVY]7:t}#>e91) 0YԚP7Xu%"}\. F3ٵwI& c؆# Өmh> .n]s!gHjp) AI3ɧW]C.2<CxP?E]Y6Ɖ1x]O԰|;Uu"2%ޑ'Ypx!V:Z~BH~>H™ɲ3Ŋgotof+Y gO+42r'w,+PZќg“@Xb)vn\ )@yuʣN/%@jZwmYqi[yp.s},9'zXԽbp`ʛx'ݎRKZ _z7qV])Fy RNGKg6Y,moN-S֢ShG31eNHv 'F[{.zpqG]"󫺦;j~W@|Pnݓ_FЏYm3Wcݠw_Ozl%&$5nD.ƥ'Jcnkf~zKh/ЦhoNK2Uќ!N!i4 9+˔~0N“qӻ%)kO O;̭Z߮pɿ*/radBٷ(+ BbO}%ޙ_ߴpkkn\tɆaҾ:K]ox(DʽU Q9##6l "+9G]쪄0$, Mry3Aǯm \j202lg Z^:cbVu ]I]b\qAYכ}BR.e<)s$ISYv[ mI7qa7c?`sڞn(թ8Žwk@3v;,ȫ7Hk^hɨ*tu/W;oeQO4_j.i6}Y=18@͝,+ -͔NZ8 8;^."7,`XƄN1PW/h -M'-LZe^0H"T' Cp\Dh} Y*3_+5tRkG˴d3{׽JnR|muүPW1߶yƒZ^*mn΀eyFM痃PASK"/q.̀4Rַcd_=e~dul1-fl]. U-b0M b'' yEx;ZtKDDp~d;UAc0@>{,17[xk_{[~/]G?d4]h=pplyBO\-q\v2*%~Coh945pְɢݍ+|T&iԪ3suZ ah\%~=;$P.1nk\"w82 ȕ9B* t*]"&Lk1FK:ub͙2 GtZ&V +f3Hx9Gog<ŐB!R\ZksȝXyʕ: r[4}+6c bG(!o3i6SS"mi7Pz9r IUŒNbD߳wXw XtPeW0p.ᛖh Vbs+/1bx')2Kwu$ ,f(6- ҽrϊKݫʕ(?H-.ofedI&%IdHyg42Gni>/%oS-79v..{o-qwM֛ Sb'/xw3"\/7.3ܡ |{c<?mwZEy燗ɥdqƾ]])+Ŧs"GU-hذy놚{_"[t$czmA })g:kLlUoEi[ҿ5qs]:pqЕݼ?CtDL HGP+f^vxۢ/`Բ3eph;&׽|8eW7q.'ޮhzֲIv_L4[Xc/-c#… ~M> sežpaa ΑAOaD^2b ^cJ,SɄ㗺^0#nf(bINaÌi~zq/Yn>_0Q-vub#V♭ʲE]ߔ|)(:"?>j!Ǖiz%Ge,}WWus\ǘ^"ýg2vzny*biaffA cxM>kW^J>z&C&G/[o0otHj endstream endobj 1280 0 obj << /Length1 1562 /Length2 6950 /Length3 0 /Length 8007 /Filter /FlateDecode >> stream xڍ4[6.:A}^{Dg0f05z-%]Ch-ZoZ߷fyg5lF| `=DG %J:F@( CQ08!) E% И2& M/@P (*)(& @OI2 4pM urFPpxB@p qCP_!8Q(wI~$ $* r) ` Ee0B8|@A p$ xл4z_d?O?޿A῝A7w w8Ba6? y0=;u@UBW>'GBaj}*p G!  8OOs]x+G( PAC`N@(!**"x 60s65#2 APG0 P^6{E((CP{NOt4 qk'` DOϓ5Za`w4uu5x"'$Ebb"A_pG@tw4g@@+GV@MY]T+U cAa~hzS@! r_ Z|¡HU/E98p_!$ dž2W-DK w@ ( #D -f?B5@ Bau mmc7,؃< B  p½,7/ oL@q}"a ?Qh t %Eh~:/OOoO8.!"\^Dt+m.t$D? pUPҟ z>k:?t)S2xW4|q F:MeŬ}K1`cgmɑpX/%M ]씔9Pнg̺1"Miea2qM&:f^Y/:i²\ځQ<N+NVW=s&Ԋ0O%/rI=,?0]ߚBtټg}q|dM&HZlcx-jÕl">\0ٸBeR$qeaC r#(CX _1^ïIjdy(`_- [ߨ1>>̵>ْ%?GҲIESn{mq 6J$l ^'ӝyu*m\gp%#KjVOsU~Hs̟<0gﯦ$TüՐ9&0ZjeM;D[`:7BtĠDn,hI'^,$87O (0kbKߵk}<9n)Q~/c`0!k;>F<]Sk:JQpܧ3Jlrpg_Ko)5fn6.-s~g#b*\]Et`ďن#fU>1h({B7G%*^,` =eE?ƺRF.+5m{~dQ "*#WX`,X@*>'pstp9V&Hɳv_KGՃH3@gϽo4[,"bo1>sTwo0l=VZn|B3ߪ nW.0E3<֏nz T&9uc~x.%_eI dTg*{7xS|p T|gl6%kƣMqʷW$q:6QS#q YqE:}Sr_v~^ܱs{D}y0eoX¡g-|pIQy2.oOP$P,4[E~8`T%K߉y2c%ұZđxҀӧ|nAr0'㽻 Dڦ1N\S@l{,ev ^Bi_$2!S1E“Ǟs~/6.(; jVWu\]OSW=gXay |h1sOEۼY mq0J{~̀ep*RT+7,e{ԛKȦ%¹$wouJr-i38b%PIR]x(;Q\R굸wBʹz1M[8,*0|"=8kܙN֗=LhFZqmYv ~Ub$20#>iĮI쎇~htM|.L^~2WⰜ11h}#]!LKpXYouYttmLaJ{,&? șD?)d}fIa53`Ga:X|ՀrG끄Jq;U azINRկN3=(l)TT]x1q0ziԃfXƨɯ#93(=YġYRn鵷g '=gj7t;AEmߦ8ѭ:Pߐ&s#3-&Ѯ*~\MCvN\b.>]v Od͘߸e@GaT$oXMԷΟsM;"Vt ~D6GÄ^*hXGqQ n1*rco KIeIdJrǸzp@IY7~ݛ*tڐG~n{HP wfnr9tԚd˒`Trn ?6,[P=|N̐Amo)˲ ν=ppl-#P~)4zCi;̠il J[Oڜ)%laO.zA BU2ZҞhHTƾ٪Uf;v-)|[\bYζ˼::͍-u#gJº-Fc}*dgD~Aߓ"&,Z㑑<՝1r9`5g)>LG$'._r[U :^bOIwWvzN, ˽OJ9|H#EA͔4lU 'B) ܌<_ECJٸ$%jzyx̔Yש"MbCY;RO! :f࿞+شVt V#)fINN4T7%zQɸF&z$.BJndC]^WvaqPFWڤ{EqmeD4J(>a,L)"p>wk{"Es{𐴠}"e)֛&3<'d;9G_DգZNiO&H)$U _p+n w`\$ٹ)Å{GȔj.,O@tCR 7zhxg#:ulAvN펐X 6r"!Fg| NF61y7)>Ni!]Xu)KĺJXd`"lL-pA0cYԲLh]q#bP޾^`*/ ?[`g<"au9D}D^((k1Ѽrtv,3 ~/zhsGAػ`hU/{E7q(pӼ[y~EY,žcd=벗)u~_nLV.Ч.5y h/V cfMj?z-\嵾c;%;H %`Su_3fs^%] V6}m04ymtQ gp/֐7kNgKy\^ٰٗ~6}-wáWU,U "< r#Z|ٟ7 ^О ihi0HBVk[RFl^QALdt\|CK$iMZe߿Icciz?Z]ҫٯ?O'M7tz`WA>!/w66go|aHk1f̲ `qԳIտ̫$׶w˛p|%ێg{,RM>;WoNwS0rz'?/΍X\l.pSU 1;yް:0E|jC.*nE:I ue`̓gkZnMܧOP;.XrOf/WU{mPi^i4iaŘ융@z;j^=oVpWܭۅ~⋗n8f$T!Ob#4x88J^ʗ"ܯSizE4%!Ƀ8g&NPT_R)gfr;WkTzLs)L:3ҟ򷌸!nl$o7έU^{0[W8>{,B1ds fD=1e_n.1]Sy LŘc;W(,e$ Vf; صn#=Se? w;McF7mxL=+8ܴmwuLַ' @D9"o*G;(.jcb@8 iʹ@2Wlkԭ#?<G0R|X勤JJbBp)L#>08n.c"H ~HDeoYE![74"v@NqN>I\MPh̼5 BËjkuޤZsΔ|XĮB)54Jl̀5q$ss`!4o=o*|lZ@ )#Q>..j8YVV[奕m&AσۋÏU} ]̴)-|N+ HO1-}{w|:FyNI Rĥת1 G82%amPƒ7q)SD@sm,% ,ؠqsʎ7_XH,*+TVhxŰژCbĈU#Iˣ6D (ֈpVK;]ccͲT{" !XD_\J*(f =$BHEl ʼnھУ(J^׭;٪h1r*f!G6`s x:gɬ 9!Ջ endstream endobj 1282 0 obj << /Length1 1372 /Length2 5926 /Length3 0 /Length 6871 /Filter /FlateDecode >> stream xڍtT.(1( ! CtH0 0 Cw7%R*t7H)!sw]5k}{?{y8شdP%k88a(0C%@ 10 @j$* <(`V ~u!G8y"a6(6zpA.:B0l!`<тKr.4}; e Ѕ@nP+/M#3~"-O\ar#t]p+(r wտ࿋ m0(@KI偺í~.t= s['du`4\ Hʅ"W)+­P8ʅ| 0$>vO#0/VN'p+TU/"O<@P-W{}O'$W F¬?"oBB}w+"`P?a50Z{ ߿ߞe;x~zjZ;''xD|D@ !&.gm0U[#L>MW\ g/MZPDn& "A?@R]S./@J\Gs @6mC L:wVFAn3H_@OZiP?j08TmAW W/=m ~LPDF"Dh9 AhCZA=~+#P/$uh04}t!H$b_~B=Ia]eHE,;!xKo[u 1ggVet_ބN0_ {e.0V.Ol,VULqL%Nkx^eP-+Ӽmƾ76#@> ;k\5KJ:~hY%=[@b=uBZ 77w 8+'@LLqb&YXf"RHuC g$/ORCZ`,p Yܟм|\uq*m8`l̞sH7%`|rKS*ji-=ީA 6U睈¨Ɍެz%<4ɺqe'KH?,H$뚳]- 5;NșɧaSvaFm|CS.MXς54E}"vAffZςd)y2n"3;fC* v˛Y~Yg8^$ms/B<|5*zrvfS naGw.!8n:73BWMg>©KgM$7oCpŦ?Eld[{yD=g$b0QCZ^Ĭ󟀬Z?f'Qɑ(WOk"e\2vUV_ch,}0{V""9#K .Nu O.J3tCӵrI{|tčsK1mCCr>0q! LQII`Hgz1k&JZJ.nX׃l\Q>w"(ø=n7"bKv+c/}Va2֣[/@k hXe&VnՈ|PGt ~ 忆Qkd-YRarB@dQً b?Igbx \rs/bc,g~p;o/.f-,AVg.ۤQ{[,qjdCz4lBջ{CJrqGBR R:{Rf2oI1=aLzЪ|G)߯gfm}z˕*/?H|"=0~Xv7u%oòYq#’8;gb{ |_jϞKQǕ¿'b;lNFel *"[r ~&D$&UcFe#qTý=Ufڪy!)s͗Aiq4e.8lLI+T0dZl"s]I6y wJ)=4Ox#ob@fjx}jz(va4LR9'*8;tes :$Dĭ$F*aA]"%zh_b7S O MgVO 'H0$;ޅ+hQfa7~-7Q Eqr2\3" Vb8~=}t17U?#hM9I$ɒQ<*Z`},d-;peCC7oK83u=o;x<7k,UFˤd=˟$'vِ8&lKm.xi;5ʈ;yo#J0Fm<,-mLjtA7|\ROҒvO %N'aRN"s-n鿁GT8 7s& ~c2*bPL_.w=oL~pB^IkOa.K _UǤ{ֳ;"8]JR{z$`Dׇ<s%ML_f=plc;}OڲJV4,Vo$Fpp$oӹ"}tf2\I)M?ՠWftH;̲yןҔ7B8{7)"vj"9>~D |G. dP3N`<įYQ}dcz}FWAJ#*mmP] mb!o HXS`=*CAoO YE!f `Mk`|[Th9hds4B. |cjMpq"dp) hnwxp}I@gAy&<;00] :[r+ULF$X$8atWzn1$)I\%Og)Ua"&TaZm&X՘MOb2+)jSDp%KˋmWJktzaЗv4(%? joWާg_<8ƄV|ZYOeamvu:]WQ75Cb$5Q6;c"1N~QTǂZ%BtF>h-.~KP*, {]E=Tió)&HDe5v,~zIS϶}9-2̧a2j&`bgƗF?*UG+ 2 EZIv8\ lNi [T%=N18غ'1kԬԶS3g;BT^ambCJu>]YGl-5{n8OI~dnBdiux" a+;@srE-y5͒IcU]g,1?gcy$|3*]z9yƽg[^Ho;eEsCb\6c4|y..+L:GH#"1Ӟ +`Ư@gC5L !ovx{T=¯&%F6pi,cɆ oglf~T~.aVqGuĎj)cF)^݌s m ^M:ԵcbRII1sjP-!SbQ?opcxYd}o~T%3Q U0ts|h2xưjq$5Qӫ'eZHͯ|Q|PD9n灆غ2b:_ō&Ը *hf 0i[It;>n Aolq7x;]r{T>Kɚ"w(HzԉUèy/VI7R";(@$y3衛 ;HJq,Ch~ #V6l7͓-]&ckٮ. Y, {eCAۯ_^k[<'U.96l ,}M5R5Sk"iuOH 3ڡR,f%L:hߛrYpm=+.@]'ԥ/J÷[z\D1Zd6C]m[ՄL$,rYmin]%쉰Pe":Mf%=“)m7xVUiۭ7{=c&nQ?u=; AUYoaw=!~Aw5h/$phMcٽ zDzT#ɧ KwA_.z+[wp*THkHvUΡ}{\ :l?`]壸 ,U LIb?0)U<$SG!QjZ<KV%(5ދW}QL$$)"̄QSԪ U_&OՉf˝*JG|*lkɖ.pF| ə)ԡL]-zApN")RG/$lr ׸S>UjbF.ƼTq\2I66{roV[A6~Gǫy/ 4e`RPH[Kdu cb6pf$ /S6"@U%k+"app4iˮFv$A:+ZUE#Xv.^R endstream endobj 1284 0 obj << /Length1 1561 /Length2 9293 /Length3 0 /Length 10326 /Filter /FlateDecode >> stream xڍPm-A ww d݃-w 4pdwj=ݧSCK*a eaPVN6!6' VbV Ar]m@g Ptpr8888\"œ@7%@ i`Nw.`g3 avX;@;v "\\ـl0'k1F;@ vr[h :-@/< `s+x> PsC"+E`='G"`@V;0@MVÅZA9Þn@Lt @VB|- .l?zd#eR0{{0!N`{rm0wjiG:P+XAoγ ?6k C v=,ޱqO'|`6+3 pqrz` p(?Vq<ˏO& A $.^^76Z=1 sym=yn<ʿL-k)a<ϱD,惲]XQ=^.\˝~TL`F=m{^KBCbHLx71K-yH]Z)91D3CMHY鳠oG{&Z־SWebf+wwYAӃp'fK8UOHW_~63AC!Xk\苯L@ק&{faCkB+6:JQJKWEiG^vЌFIVeu9^<|˟0LPrw'kK.VPX$|3$DΈdI-Z-WyVeSI>{K7w_E[ľF"-f[["U#y WwE 뇋Ç Ie&?O(`-^ŠCN}I֪%Bը*PatF_Z3CdּzPk˸ʏW;FdIMjOxyDׇ} ԣ6J?ˤko_$UN^-ٰ[lZĘ ǍΫ3yط.TOa9w{[q{-lr^gzDSicVi>ZQ'o*ȳzHW3Fe>%XSWSǎd]c' _,VPv~p/Y |(wᰀXX9w5:r7Hi237:Zݘ%gFd+(k39MGHz(fh{@꠼22ٳᲯVDȷ\np,KD+3x't36} yFj~6b\n1ڌqu|gt\z[ق"NzYSMTbG,h&NkC|V;Ƶ^_^'dnj[ض2Fxз_FZBmv,91cn&@ &Y}B{XVKek'QI'V֧=" J5 zﲸqlG-p&,NT/ŵlUb)l>=+SྴŹ_??ѧWaëyʦngL gK,0.y&fZRsjc窻ӽ;L0,(dZ[Oo$}bC<X.n̖*ʑw&[>-qESB/GWl^\^K'>V8LWU"Xgh=rIÜt ӞKE 9RC;̟ʐPͷޙ"cjk)T@߃ p&/c-Z^ЗR8Dlb3"Upx3[Y~vB֜;XiHM2$X!?mGaÌ"b?.sCA9%;vqк#K/6ZR6*pP$E˧%d} l(o~L]x/z7M}a/NRϼ%^8쒛p1s}Ip|.JMVKW9{rUV$lt~ eB(>!*%sR{"m(QVIF 7?y2UOZ`#0(sLX~`@s^P#vJIm{r$n`KO96t2Gi塮XoI#ɤ>!XrO^-YG݆4!?eL m LDoz(;l\2Ҿ>[9І=HA}yZ Fj;t es䪷]xWŶa$ݓAE~;qZ>YV˕l#Vr 6w= K/GIe[[&oA+Tkxc NҀ9bl Sͯ=no֚NuN-'Q*[á/g28?m W<~~],KWuEf|Z}Ï)aqs47kloSiaH%Yt0@rv-쑺O ?"HE1 =DXZA߲'5xF&/EM&^ L/&ynt+7!$=Q(E W_=N'\!Ѭ FveM-?HNQ,)j2j3}ʊ43WìQZmLUěamj+hj$quڣ~ ]f̧&\'ap|H&%8VR9"]pZTD9ེ87ihZ|D69m!;|iy 2}|mW#X/yLx)î$[ROM,#=aTc ȓ&Dx]Mm=)G) ,#IvAS#>k_s6ό1ѡ\5SyTB[۸m8.o(ifSC"zF^rpwiwҧ6w!j!|nW^EoAhQChFus/xtNdX 'xq$qmfbXZ'Z/~|5g,꯺$^zI'FdFVZ<#d ޷sVE=F[SSпR˃e4q)sVv?Q"&{z&<h"pK"Sv m 2]2Eaݕ߹X:fJzY4w|(_oN,+#wM@^=#B@= 1bfżڗwUI-W3u/;oPs\|N] J=RG5ɾl{?DR6=n{7^t*R6ai%[w=OǗ.e&ymMf?I4w)'$i~M|uVv.}kR/o T@tI/]*,(ryAyGpE;t2} TuNyVVY mLǟ* EL-~^k":R3o HsM7]æ}ՎIdشAMm~p͸#u !7hSǑ4occcbz| nCNfFO[ 8E.Ij֪rhQ(UD&)=wlU_Ƽ[\jyLZ cV >WUSY& $eӓJ+ROަRk&;Ld$-n*yhZRd]\/FX,8eAJiflWMM5ssUBX ozT.xӚ=ã#Vuw| 7 (JFAC,g(k=mx.Sj&~i{3shcLRЎHa֋HNs y|ޘXq+ TE9u,'-%.OK>to@8i J[i2XC+'?~X EXFd$KާJ+.jfdce!U=m.`2D6\>U@' yΗP;_>[1`E`[ܱx_l̙(Yzsk\PQΤ1^oDyhh%f4 lcY("ѦW; hH;⣯m/2b~giΗ4K>TQG'?,%dgߩ-PcCi`c_1i QߖAxMk2 U ?EM_ܶD=Q!Cv(؇/FP;q y, >q E}.KB* 8pS pG0iA#5b4K\cQ_ou H6E&?]sOU2U&tl$S[(N X=!Y2H/W4kXk&7v#oȇ]1}/? H7407Ge5{vzs!酋.ƞƄRvx hx+9 sO_,1qFĵ?R?&Քҫ TEarFAl/?+N{ O`ƚ7c^M c.ʨA[G1׆1Lh:}1A'@T)qEM.=⎢M9wqjbTΘ?HhB80qxNe#5؟#ϗ\]-fPI)˯xfѥO}5{""nk^K?E0&.RGf '7@A.c43KT]+J"=&rCH 8v}:ީkV_A ÄY2Z9G$"7 <ߚ(r~֝%w#D47hK^U802Ew oA,hG~oRbk ^LUkxb3mHT1&ø dtҸW( mHt ?~@%~.^F6dI Ip1U.9\<`9HF$C!80h-O󆊽Yɜ1f CpmOA ǦkNjprin6Uå+|?&j60iPwyz٭b_`i6~gfyr{ 5o_FC1uqģRhͪ+B5tB"(C3s1/NGWTH7P9k"PIg1p)k}X?/^ӣ8CN3WzvPm+z:bxRf Gkv|\6Q[1Ky,˜џ*k]>>j&z3oesfn>Tض/^mz-5N1Fm[CmQ2Rj/iCw{2bW^@}"< mSeEj Oiŋ_KJ. ׄ:\֊=(ާR `'{ɩF-3;wbqX ,aO쎩X!ML Oj0ަlnJWs]Q6)6]MFB#ÓdW ;{D*Hh^DUޞ̛)^^<} @T0TO`󒽰P \k[]I5:[PFu aڳB ز-ܰ g䥅xi@nü7b(͚˚sOEG{jn| qY=rO~4=?yv1>S}9HDZϐk`Oݾ[,ZLߣ;Gsfq'-OÿNAw r k$ QKVx&.niK'ҟZv^eX+dy>'0cIkh瀈O."[q ؕ̀$'vM_/:`9pGQ4(M ;0W_-"pgHĺZkøe|~gUCwm6 ,ilԺlb @LiIrfD"Gg p; 7͑ܶ}Et417H401چRxR8So"VOkzb_U?D6\$JДGX!3 yJ~XWb; Q>޹ RySvCUmJLhj=`mq<)7i-`!9TX"q!⑌5w@s (:W:Axg32. -vv5Lvb,B73\h7kPpxp!UbeN^ۉ ܼ-_S+P|VvT auȳSu}fqDl:g:lcдN|=SY1"LsQՅ+w^7Y ҜCr̋Tk["n.u <4|x0P|3J#aet"@>^}`;f E8fZl+ +5~"{|YRʜ"kɊ3{{)M5dpzqfv<Zn踗;(z#SeyJquRxÉJZf+2C2AJصG(-سt${c?9 x>r44DO?7&R2(T2u+Q`>=v"0E⩪2.`yLڹ%V^kz k #2Aa6:r5Ĵrt_aWDq0_zS8J)J}"xiUq Ns63 1^I|Mygn,WOrm%eK@7JF 2ɂ ajq#4J%k(>C10#SWtߧ(ct ĄH衍UHA ol QS"ٗ3r0n'w›׼ݾcmyiB'7P̼m6wCVx'\y-h*LҿҜYe,F#6acc?wW!TdF 8Ҟg, }iA~I{?!fR⣳7YYږ]j,d%;އeRXQ "Ÿ21TШ/ m}8;SܗP̎mJ3*h;Z8S] [s5l8H[4IT_Uv3 .lE5Rx>щ8د_tzMh܇o8u5X+X7+͐^mbkN3q">\u_ puA1OaV:3ꣽ;nj/M\lj>G%z%51=rБ~:GePWQj)nRP?Pv-v )ۅ R3;.jIX?&w#xufh\i; d"uZNn[ǚC;? lEԾn-VN8#q/O!aֽR& i\;e~kt-RD,Me<ϰ{Rޟ1j)Nfz=ͥ/P*1V H~@b%䕧kZ^2b]+eK9nІ?̒nZӎt ;WE_КyYɺZ|͝6Y"]Wp;cq<#E.gS7_wA<}^Np7UA:ssjJ_W*ڸJ#+A{i ׵?T\5w|:.}4]ch%x,vlE -q IK8? Fv iֱx7ebgW?YL4cMɕ&`3bv~_ׇc:v27Ak-稬_!^F rɞE2@U[F8sRA ƎyݸK8V\[ a0+(u|)Cru 30j eϱ^KUt(_9S)z f Ŗb?|ѧ^uUqt%E)>]@ZTSȍSj0q_i_LWT0e`-N0MsbH5+ミ`Ү8W&yvFx'?U$i_Ōڛ;Kȯ~!*ҭ<3l`3Y`p{ߔt*A\f;͙.#s󊦈c&0%{|ʡi|ϝ[/9? ?xq endstream endobj 1286 0 obj << /Length1 727 /Length2 21251 /Length3 0 /Length 21807 /Filter /FlateDecode >> stream xlcp߲>ۜ3m;۶mĶmLg:Uo_W_?9)=#7@Y\XHGN.hj 5t6MLfFF&8r#@eF Ps[x]]]]\]IR658[֦̀EM)y *@bd 4MmLQv&ar7[WSG9TUb" *"C[?ND;Z%kJFutD0;Ĺp M>?bm-ohc wq6uٙ:MbΦ&&mhRΆ!dk1ā&@(73Ms>*J9W;[kWf]A\NCE\co &5ۭh )rΎ@w6#_H%,lEcab0qpX|\cGGS[O/{LMMWy-ZC+|Ŋ*!if TiV@W{QC&w*nF|{Rw-$qǓS+/%澵o ^ pl!#n36XrJ;J0EwCį->JԥDB0ݧA)2xpMoQX`Upi)6L٫]d,l~k^If6ScF1!i+NGHTA#nxI_c$zץT+iz8otdq&i[IXW_sʳ(lU+e2߶zqŲ_>nY+VNLZDa+p2| +;`:mm|tή+sp;fF#À_ʖ"tJ3%>.Ӫstى#E<]^sEu3y3qD$n|4lp?pU{O9{ٿK!h$gO̱ ~a'6>lnzIl&e" !2/*[(¢@q1QuztrjskڽAף^f\ʏЭ~[{a-fOZk[b.^1C8U9 0^M#J}m+Fh1C[}BEB  W*$liQi~ں_ܛy{0!5ᅫx"u|̖O;7$-hS';9X$%;SE"†?|2G7(\w3ۈN!!rL\7h_][,t'\8.$5Ƌڜ}rC3\uz0 7q_w|݌+QaUWEzY/{&+觝p&WGE8Ͼ+Yj.;Gu\|zvo4hFJF~^YoRl 9^t u lqD1}߸Gj%$MY~(eO2.]g3oLtd }d, 'B+駷J%nsm³j7\1 \|~ܪp3x8ݐB4!(1{ !X  9+S3>҅5ِ> a M :fU_s{׾X9k+%}gqj1:{%Py8~tp-BO}n~rLP D8ܔvHsM(j']jꋭ6!Ϡ*Dට6x (&~ȢP#JcZ쨓||@9LDc9+C,ѫѬLrs=$Hp4N,(l&8c qMg1 =Y /[*Xvvw]դ|kT ;H5]ٗbd>fQDl4\ bӊ+~SRHE` OuD.3c[;:~0%֦qDY ¬z\I(P´*yޘfv^oY h8 ҉Wr 0lap8wnwV4)\4߯H 4&'eSwQ;AU`,z@"p{QJ۞p7ǐs,,6o;ۅx/}G ~b<魄 "7N.:6<~աJp:Ս_G)NQ04|5D Y9ȡcT*;PW *nFrr$?o;_3oK_6dE}>*ձ+ю'A^inj6nE2o]ֳE"9 AK`>[h{AuRhQ%qc^ddcT8oV3㤠*|zcbm#/M$BiG(mC9 Qؑ?brUoYvwABfb|-K{1D2kAvBѾZ] ëàuc8w\MWj8-u>y&3ƷjzpU/2v/C=ˠ1fJmG et*4X#Xm`O8ƋMpj9`K^#!w9'|PPD)hoXzrJӡ:WkkXR"E#njg eH9P4v-Ջ!T5m' G^ƾ 6uڀpH8W vo{Hh\[=I T$~ S9(tn\Pit1}` {4r "8v^-ߔſ@ mkS8Oh;jKjt?ґE+`)NIQg};PK2vT1)?Ts05r2ԽI-ڄx̭|Q(ܘsKWB9%4Ġ!6(/"!Oady  b$ 8Lq5T~?v[:Ql9SlBѯPXj$Ǿ?VSr&/0R !Jy|\ ㅷ$nlG`ڸ-"Jfzhm!rܧ 1v ""ڍyɆd!9 X6O3W: )G$c8|L9P-Gx&w6(S(54zC]ɯs޴BN(o$Oz)*v,(gURT|l8')%xufWl \1W{ e8ZQζ_|2!YH>7ɭMZ+F4|T DIXj~MKT?>( ZsY}4-zVo[ߛI2 %gr,ls²AMs%&!Phouu{. }Axqxro 15}}7m[nriy^Ha7 e/ i%-k.@?t`A`gt7W+hY5˪v S!{ 1#df޼ZkLO,\k) =(#ZSs%nGL կɋ'Z{`48 "pDaSmUyX=[ o9_|4FgRFW㹞RX4[T1W ;ax>2EdHs js*EGgJWWϛpQP TÐk[ X!ot7 RssٻU2;QR>,F{קIKzzQ¸|]Z)mÊ._aI=IV,/lhб VLzW9c׾ /{\?'DڍUa2,  d,hrpP^89YQ#٪pĸWdk ONvt 1qfpĭ𡁐$ (_pRv6{LDzw9B 6ybHm&Z2I`TJSZ[;H\t3.;\(J:I%U3S/w%쇯O9,-9Y,CFÏc{&K肚«sF/eGOO58%?"TtHqAº'~QTyrV-#s\q Ve< 5t"62-dGP;| @]ḌMRK@"ڨukF U~X FPrM|ۋZⴼ$x兿mM s'>FЃoNJ7[6;:iĺz˒قJ+زKEmR:|6DP~R e\@kgKNVP箜bwLOf;ׂTE w[籂Crq!t:m)ML>l-FejL85χ zg4.Y/H34o-H {}tgB 9تg'&!F׽0MN So|^ow;jrYg߰oۀ0c4.#AKL޻i7w|:\aQ|qdiuLikKMQ%2-HDin*iJVN9ϯEhf &*&N,.1md Um pӪeZ/7AD?0 _]([ݝ^Vy퇢Vr=)m ´$˗丣ܝۧB4>*%z=\)86uk#ݳh.( o9TC#t.$n'%p/5󢅳do15H:W[Ghٌr<3R+L? w 5JaoU Hb:+ ->qLҲ4 sUoiKiި>^pt--}`~'8K ~i2JZ)9Ա3c\H)^l1Eǯ:m^r29vmg읢v F'el]\tC@p~-M1b鰏7nN*,6#נ=DA fkOW S(^ny zX^d,Ikhг(85%,N;&A]dn+gijv53^LOSWN?{[hFW'@ؠY삥W9Q$-tqХF` @`\M59ĖZm#w$"&՟O"julFFs4+ ?X'њKRkԶ_on5-*eyxFar::S0wZr!2 ]Nu6h~e0۹EbTvaS c.b'mVT e]J0K?aE+&1HA=7r)=[R")'NPY#6+O9DR-yfUk_:Q!Vyt느W۪y6aٟFooߋĿ" 5Z RQߖ`A"T|\8=gڻARojmzX_B{1Lk%e/ا&k~t ~ל{-ZTyTrlVa9~B{}蜔k. bOK8TFNUf"81CxB[N ,y*j}{lvoj]' i6;ƴ9A6dPl_NdD#[M%\=۸c ( AY57%ՈO_[/R]= Xs&8u2<@a `gfo7r"s&$h@䇖y'*=,܁qot 9ncս pͷGTQzo2D=68f||ޏ05,|Gwhsb7e!ipN/]5Xqc 9e_0- B9pz3MbHr2XWV K-C-ygWZ,cC&B.v׆ndi WajOpWmhSa3r;bJq2ԅɩF |8tu P>rdd_aREYga- $Lnȡ9Y(י87"tk< l.2dÓyg VbKO<*3 7Er~nOB!>-6,SL_,ʆu(8k P iK ͠Ո55-:N$- k / 2"|F +j">P8O`-ZsѺT#硹ʜ5D(;qGӰFSJKCoG'|ڃ%Nl7޹֩oϿI8ȳ`'.zM;l@q7s ars:p~s$#yp'jq= eܰѺ_ ӑ0 ך/&pU=*1ϭ㺦A&1j݉2C~_S u1+UZ;!LZEcrBHDa!,!R*!I5Bw8#=S~H9XTjC͠/ü/"Mq#N,0^[>4NV+IjۻQ8s~A,KZaU1gYo rO[a,5Ea~_Z_bZbL>֩5òK,g-RtJ񶠓՟jNcJ 22gjW8ֿK9P^C,6h%d[sbbۙT3@I9x H;&smQt8>g9PU+iY([@\2uS`w.Pi~zE[L!\v#!n[U2ZPDoC:vt=Z{<qG#&=iԭ-T6^@t|{oǽ"/<&q#s&+XQW7Q:+a%Cr259sy>?[Q~5ߪE3sJJLi< 9PQZȷa.x4؄WҲ{ż3GQRU.hBD(=j"vz&=;5͢aiFj)QdzNO&G8‰=:fvc+a O?$$ȸP2 k1| xKaV@}DAޜݏ}\ў{# % )+~FXN" QKu:]j 4"^@*X"֝:heӓGovFI!g9~˞&7Ĺn_|][l̒^xKAξ5\V}v)\:'4ȄnaZ;ua9N.{e 1.tx ͱL]=[>/<-0/=/!g =RJYl?`t9&h~2=5\%BGn6N>xavP# /1FIZ:c4<_l$O^cpFMZC?.j nbZmd #pF3w7]K>_$3_.2_#dHtMȢ5|rIryFMC*"[/gkCbMb],q~u#~crg)aA;=&ST:dp+I ಆBx$gz|7il# FvN݄ŧ#8 !lxzEа`MEq3_%{V>ё^c75+D@s]p~%p" dy=>γv_%aVοo _^a%瑥1y5M\"I'0ۘDK2y~UaKf3\EP]BAw16D-=,C'g!y"-e"!0_:V-~ 5;UvZ߼\N .[o"Nmؚ$~5~lm!?QĘɵӴt" Ot&~/n vJÅf{4&zgc;lEϻr8ai%L\K摖\v6>#Rr&b ^g&x+ [QpKekؾe/W#:ف%Z'AmTH;Ȟ~*ti2&¬bΩcSVOCQ A:mՍkFNn*;,Ž"x˙h:vB9iFc:кmo3l["%/@ 'R(q: l(04)v_2[CX5ߛ̓MaAd1%3 Ƞj$u3]o c =cj-COգHY?XS)_ _NumX+ +a/)O'#;kQQ"򔪲1ʭer|OB➸%н`V\3>*.bC} ru!REG78G7c(umkcIP{Ra? ' ֢'@Yd[:Ӫ]>M7ix>^cz*"Yf :di#E}"ΈNK?R=%ж5tՍGbJ+ D09[ s MRhH=xcj͡J_(f 6ixNtd`]pі 4.Ar)ҽCbF %n}֨Λ(byi䉚[ɄBQ4s^g:%<ܒN1|BPW!/#b"=`7Ԫ̕i6vZo^PfT7fL)F[ 0R/~PP/q.wOt (A>N'2~op{SwPpxF;Y܍OῚI`xI .7epz% p5 bUPe25ȬR+xAJ㱲 ;tJºon3hH*^1ۉ+5.{ePƊ B˟a/~v;:_ [ebh YHpmsFL~cZiP ~JqS; h =P\}&)ј_pL M,wW[DAb?pc;᧳Mv[xޤH..swS ydikL:~$ScbhT9F䘰/ȴ]߾Ӟʇ]Yp%WۂnCڽnerdQ7h-qy\b]w$e*G\"|LFkFB,v z]>(zvnbh֞k:4H-hK6 SX-=9F%'b=s,PJ]" 4}|ӎX+K {ug*Ӑ%}|}LĎOyÖjVΛ WD>YX=^|whXSZՑ"g;9Ĥ>f3˴W; uӒ:15tbntfoRQ!jZ\% =_C*|65 ]C ɍ,eF@~I@[CR+7d$]+HӾWTjn'w_nC|eqŠLH)D?y'ݹZCc.fL]BMܲM=Cܡ@Kcx4+N|ܮ0IiUq4qZrv$u+$S[Ft!sL#H`3^;?wb~MZJ(tl{}@`RZ%.l15:zZCE5*C(E}%bKt&T馚M0 PG W&1y(xhXXlؓy$F 2K7 +ħ!{%kì^)vAbyF4׳9~*t7)j䲾LD oZuѦnav (|v ⋮!v2F)+` sQaX!Q~O,ɋeeHSX//6Mmn$] mGuV3J\b\rsaQa(ZPHa>aōT vY΅`!~H>+*p=Jwox"7.%>cr( ,*F},Eo' z4ؔviubXzrFXFÿ@?. jߋSx4*MMÿ/aNF T4cȺEYph"/:LfSOO pj=zN*:VA9R `s삖/Znl=MkZM:w-:m!8,?n)K/>ER-fCu T8{)_y9_QHr2~y4 ﻍn/s8W J Q_ZyP9P?~N Z} UJN:k3B)E~K} neg7^~#r|,eDZH)<#22 \&'Q齏&:5xbĄ#îbjW ^*rbׅ^w~9Ynnr[=꬈h3K4梃?) Kٞ]\S5<4hdRL$%<]JacC^ &)]}md:8e1kt]ș쿕^FVc؝jetM:|xI6 $e wCSu.񪜛ª]9.ڗއ*`L`vHk ( 1pafSf\דޡϕg;6-#(GAJsGqi *5IXsx8cH"awЅ<𘱶9 )5mk9Bg)rꔏNoNx+p<Qu?x ;Ң9[/\iɌT[YU5H׼s9.aIeG:t,AQF%zvԟ"v"+Jbf]67!=F}%}kzk,$H.65`sɞt^.G` FYo>SL39n&0O}_\[rJf8Iɿ1,ː1a~0SvE,alW{.": ĮbC4/`IFFHr~џX&+aHeLiݧՑ$CMx=Et99M A~2M$k@?HQV$`4 Z~_[qfj-dBvVE-aMMHu)7k(L"PLر{6H.}CB0AU]cg@6,G7m|aN/6 I2A@݆qţ4)zcGhF)H]z束T-VZ9!3Azph\K"c&EH~&Zw͠3T4CD`_01g$XNbB$*??"lQ^ju[&nT9ܡ=AAʹ]2uMPmp(x-w/xՅ1!]ȑTbZcDl07r\{ RCʪѕwKOHWQ0uLPENE칱FN5 ]Q].LxL7~ڰ 3H{SY83LU%m O^*gW>VoǶD-cgP#C!vTn}wҴa$o\,'O_Rw!h= *$['*z62Pkhh͚|^ɠGP 1=TG{<6Bڱ{ ?%Yk:R]P, f/A IiOoEm iUC+r-E_1kخ+HˌA̗?ͰE7$0=&$gI{urU\G30 :)KPRɌĖ:K,a~{V!qUx"[hwa%Û$ao~RgWȌi"A\p$ʃnorҁJhCSYZXo7G)`bifM|%!nYI~FK@֒-3`hPx!#gKc sUC`'i 3Av/Ecc84'>0)_Ʋ $9QZ5[~d4&e.ֈs4P{3C9!#P {L̃ :O+2{NV9*9 ЌnhF+ <<,fa:_n\*CwG*VFWaIhkaI,W|T95ZF6~ endstream endobj 1288 0 obj << /Length1 727 /Length2 18741 /Length3 0 /Length 19336 /Filter /FlateDecode >> stream xlspom-v~m۶Nl;'m۶ɉm̝u4VW^O]Om2"q{;US&:&zFn&3##LNŔajP1u010D<,-\Tm M,m-6n^777A7gWz'W~TLM.3KSRB^ ajgdhPt54Z9R6qv&LovnN.3sȉ +ȫDTEv&Y\Ak:Z,YFurE04v[0K7);3{&MC~4v75P::ML6&1wS;S m-m<!bBv(?#տ95O?i0n /""!N5wZ򏢆N& ;hh/r.NF- 㿞Z%,lEcab0s|ƮNNv.Y{LMMaVyZC+|Ŋ*if hVW{ot-,LpUq^HݷƙONE༖Tn+5]c˃Dܹd*ROfJ;!K0Ew#m]|ҥ@3ݧA)6x[ څ!ַ(p8 48`dȞ. ߑ]̃U#g#Jq^N ,tATK]#4,}m 0Nc3S*bq4=\vyXc`.5Fyܸe&.6"kuDQLHo1pfLaΊޙ+T"kjMu `LGOf b4Yp+JgmqVXJ |P| K\6dep> -cԐKzBUY%xlg`m ys+]׆ERx@w~eX6ᵸkмgYlO| @eܦWzqV`0[`ql-16E5b328|E2<)W W J$w{ȿrh æu̍f5X7g`!LmrY6Rrha֞{|.!Q|ӄONtPf0)AvYC^ #wmd&#pJ-DvH~ߖ3@:ˊk} crHUՓ5 !`ȡ8R u*M lO$t|Q5i2*X<9mۦdNL:" %kĐs['X]CAn3YK61l5ە$QgTnЕXzhrgxGV!Nsbm4v6j;v\)&%XHeW ͕ʛv/Ƽ JPx _Rxe'ʹ7d4?7V7Zwo:uj >=03LpG;4Gmү8[~ey 1gb)J^k-/"5zxn)5&5sۆFnm@_."OfYfڞ~3ze#]šZP99S- I Q/4˳[0K3Lcxh9R·}Y7 DJA&MŏӭsnX)4޴w@x?/ TOJMڤ @ Wdek 2-նγ N_"̌w,%jF&dyd9*16]ȑG-)_ J&)IPqCꁷ$_6[g ^le}p|-}9.AC$ >ծpZf{?MMXa4jYS쪨N*9"Vj>ٰ7Eyk*ymS^- 'KSvkGJßPC$KMT6**+~T+RG< )Dڻ.K;K@R;> &8x&hSףq+h5Q <PjjX:[[v'p*i?rkLqMђǭb5JIiD N:FDF G&y gAU#ց\.Alt~Em#edo{rKT^$ s{q]ZܵkYcƷp/~[ ο h(t_T/l%KH5ݱ$|nJ {5~;~aϓeBd%$lA\}w:lfQlid 圜DTC5wgfAԑ#_WBmuLfK.+0?CLncsM8^ФO;b?νZQFl2SӒO+~bfwUp)rB{FhGahuV[Dj^扎րB 'k,T~ʔ-D-d3O`=jƽMH8N)0J\(;,PQ/s"3k񯍒up2im ` zˇ3! m8XK7ƛe11ew_}2Wqkn๸|i>"VPک3 4q)PкS [@cMRࡄ<͆yE=;c^m\je'<;̌"Q2T.<`Pۍf0,7 c5Coa_0z&bONi} /r7[>i4(4EA%?n`f$$ŵfsě$a L{0]b:,x/i&8Qh0O'TLS7,}UNɮI*_6Zi#7Sq03l gǺӇ m{WZNjst!W$5|괪T;Np8tŝ%@6]r3 +JAOA/ N2s{l>C+3t"I~|SxY#‰o+91}3k""I2`h'G$OW~ռ@ 0-;w/H TEn,^* Tn)ftQJCd#E8ԟ2< iŞUU՝K01~Ge)p>U 'd/ޒj.HN$̌(]R:V$bbH:?+ [a<]xOZBլcݏu0)  /rI 7[`AȸSزPlap;j1btS=<|j,fB̗9tg/Cyw wLϲ1 AB'd /:b.p菱ѵyZiD(`})w DsN'gԟSvk?/ҡE%J}hE0[Lh7 dqm'?XK mr5_l4hE`%cgPÈ 5j]B[\5ۦR4],ҕ*TYD~ԾB-|>U-L(-ZiRΕSj7RTƒ+bz 0XNkDF9p@dD/'g I|;؈dZͶ{{l_U&oO~vc Űm;pNqC5wKdtqzV 1iI&%|¸[#;][,+27#nϾ "fȚ1yە#)x9Hd8\>e&ؿ S (nCP.EDkI5r ډzFO3.'s' ;ݷ%XAvAdn-uڙ61~&`+-*n1hu#:G?JI}A ~;RNt74E腏&Fi_6h9n  2^rj_$$; >x}7c`edaꞋ[=A?a]0U_}.X%s6Dzqs\eCT)f410BYNMSn0]cs-u_'^GYb-rW3peʎˣ؝GT־ڠ\qqǂ Lux1! = vɩ!*O90jbIQVe+N؟Wҏvt2YlW` Ot;l/%:e2 7DRZ0趶GLu9Q`Gwt2pxiֈ.^bY:5UFJ8Vj"'Q^Y ʕvY1o QxZ<ȟA,2ѝv%W`,((PƎ':/0[\f/)+ j0YwnA+M-Bv:"D;eE[*i#%3*z!ɹQړCۤ&#Ce 搿CWDaC$ 2?F 5|ť%)4~gе"=4'n3PM#)&ߎ3zgX`9"d9M](R]m*~Pҵ_Ǡw1.CWM OjlO7vEy|cf؍jV 8)eAViWƋݏmSGsh|g\Η&0<;=}A׼1]|ɻ* U5oiF6OR.fNA#Dq ܷjIWMQGO{2-wH i ώJcJ_Vsu5 "{|M˹1pe̵~Bd7Wbl;. o[J/ϔYuN!XAh.>Q^Eǧ ݫͫJKDTAlqa<*^pajp14x\(T ii| aszW{j56?Œ8ቑ`~ؚտr_*+{&nh{p4}op wv'tt%,{T~b_/[-@T.^%F C{c6tkZN =XZ,>bpqrPc;Kd>hH+@INe-xꇪwԖA|&ik3we(0!A6:<$mcãVbT#f]8~ِ|kwG%Y!uWr9*#vڌlfzJ&X[EvG'\YwՎa &RݟbRf/ !V]#+AQ}xhPeBA;zxh3 ].ߔX~/mQYMJTw{;e4׈&u_/IBWC MOwY<;y竉>حsCHAzQs8J}H{_ɝ!$p>*d`|{XwH[K#2{ ȓSsZ}}s p gX8sk:#Bd:r򼨙d]f>B%. P|@k1g٭]mJRY(5Rt6Bthr_(=0#C'󒍣p%k81ĸ9傓o<ڶ\YUC -ÀeJNGm7Fc+zEbz d&#&6`t{)N?&}i}YHgiG(Q#gRF:;ӓE#Z}WBjݙOb>t,󞟓96-/1ky$g-UOނ ͎Ir%#&-ߦa}ºGWޜH=;`A[A}6n>kaiQ Z> ғPP+ v[A$>Qz'PwrIdo[Eɳu)rدAk2IXҺE`xNTzf̖#m3~q6^.oLlQ fKb3wxW\`ۥ55525bwLP;0tozoQH%@T 2X@c&.y,KL 06=aQ*0h! &Ҹ<~sRY%%B^3g7jbno>B^v 5+(Z"\ڢ6#dy a3&=`W:| ᰙ N$=pkHiAHF%Q[\?ti&NKwSi(s#9eAGG&`cLڻ+4--̐/3df:'71w2C)Xצa2ѓP~߶*C`|F27 U:-lO*.sw`lɬgi ) aR)rG%ʛsBG/{_ۑs4a6 Ap{}:ʞ+OYBL%q6T#^:!ȪZK(]P*# ȃ,S?+⎃sN;.3FRrÅKxZG ۛ iue1@Ql~pe D7lt4<260)&ZugϊQf SsZ zcl\)\( ٦qAx)ty&+$/م2ˣ-jϒ[&ֽEiuIe]N.kĔ%/ġx쇐ZZe"x%U s,os+9Epx!TFu!җq9ԥ t JbOY{LHnv埐 L0 )%?>?MXAr* E&},2!wGmؾ_cF=K5ς3꿾!Bnbʴ.rxIpn_4MyXF@10ES/)?[)J Htk$)-grCO+қ1-ǫ̧&Bh}Oq}r]V_u#q6*^YӿH1vp$a%&Lql,$IxfN mARk4REdN⸤xfQ]O6֔Q GSig[ؔ DWp[׶td M[.6sr'Ӭ*=ǪZcJ?2,).+͂6־N;=jE|^ݶE*D|F=2̮z!ϟ#y.aE.P7mF"P0a: 7v0p!u;ph_Z,d)H6wԉt=b/_7^hG^Y3knS!:sMžzyv!h5'ߡrts#  [k9-\~^\$QKLxRUT &:' OiBgq;RݛZl ٹ3t|DxfOhǫ kCWd5Z,CNl}oNO. S/jbe@?OOMh5L# xD9&7.s/8fGƯI2 BP;T_!iR =:[=F_Sz˓_L z2{Ð'1vǦc/S^-K4052m:C/ij tO]ЍJ Gh51= .Xq7_ n~GAD [Iuum=pT'Mk, T\ ?VBKD?rH;puO_lM43= Nx#׳(L\_=lNdH+CP3:A mv i_X2fmiQGUYk${Q8Q0pgeUiYP˚(R쪳]NMCJyyx ܶC=Jb%Ƿ$IBw2֚l.nPMK܅+m'xrgRo45⎘Ǿ:UG=[pNxx Յ*.CDt3( Rc1VF~p"4Fz{Zу @޺-!e XfN#_=֞߮KuN)h>(󵲘!e^.#bp|a+>l#WuȢ9}]"@$crgs2fupa@0p ] {x9Uml3JQ4#zL+F!~ SG7*^xI5@J35#_j|fXĠ( O\ڣLQ%TYSNAejBgYs]kFAYJ+N@F˨TbM_vǧow9C |M}UM\A=޶ŅPB>WWMGʢn~M'ngRZ7jz$l5>d@RkvRc)Z{|2`Oy!6G$ i d#ݩbQ&Y^(\UM#}v fyGmgx/fݼA3Y#evz׶?v |>o_oFf^U{G] ?E\!+Ԏ_'x*6[f9(^u x~!o7^B i/qQJ~dF CZh,&&'l=rMjmoZ~Ѐ#/8nB 8 Cd/]&7)7*s'mguQ8 ߡM-e1*i<'QpuA6&up!D G{YA96Gܝ 2F^;"9M /|~GzH ]$n>SEֆϘ%0S/{r uf"']T82EzR{񓝌򪻢tTk~Ŭ喖.c t8WL %RZ<>P ?rXKFrvhsN'O(Y$2Yvz0IWFGOzBmۤ9bp2G:fa5n%6rq@;-ļW{g4En,K;YZkK)bgdrlc%~ d1"/ %5zMmxf=φYnu|( ):M:>v쇞EK48wR/ΕqP֟i\GD=+ 3| C_,̘Jy dίw/"]톏VdԐ"c%ϤUZ\q[jo`_kР@*@kTgcoܑt@ HX!!vǒa,+4l@+]3'AI2氮 r(bR=a_{Ԕ}y g=1 r <y6k <1SEe cKK C=}E.&mm{/D (k*ĸs7㲤gII&vOa& rͧ0 Q/JÀ(/6WDzE.YAIeBlOn\ter}G4μz= IUF?ndWW(IU.odC4}'D!℺x8`)Wgn(e٤u!@^M ȳ{jZa0,1;}@e>vTdҍV''`gzm_D Ehy$f&ݚ<_&g0IH2)y?l5\+ #H"b\F{J蒃śz꺢\NJXA֌tZZJj-UMA{gS^_Vjty5|Wi'f0]62rvPNM"ɽٙ'gqnaBk%“Yv/F|V;Wd._؋ Jn-&αkV('TDrFN2 qeXi_6 +\r Υ`UeF_ [30"61yH\u>jy7R cQk޴bXm}gg2G |3s*}aAk)Ԣ4Ф'WIkC~^s$e̤.nmo Z-F9lFMVz"HDr480@gJĢNJwc&nRgA6^ox1<ꮚl:$c7j:-ƪ()(#Q ksclCL6⭉PSqp$[}#9b~@OQ$6Z(\4HY1a*ѪSH=XlyY!Mr((b8Az^{o m4bq9Ȁ d(N%nJDNr߷շ 8n} IwӠJkXB3ޭ\*ѯ0 ɃltܥuT9z <  O؄|mF*NK:f2Br/HqpgUf{huC }HK}wyݍaڴu.a&8uV%fGYNF9T+A*Pg:QQWxU:7T|B"q86.}=6Lnt^yA {b)"cVQ=&)A{w BZݑ~MrfDnꜸZ\'E'zok2pKZ vnmxi8m-@Z8W5)']R[< /7HšZukW2JA(^4A9 舿kP선U#2G\&2Y Qlwۏ&0r| B]U.y@|d)`[@Aڞtl$ŗsIvzOn[EJ8'Vuxnq-Q$Ԓvz/@TlQ?h[dz Cl6TF$úyLZbE^>QlSlt!/irͮ,Q!M, 5sD(}>} u2*{൳>!`KmZC /dY; ;˙zg؝r[X o /dy61?gP>%6h(ٸ4/ /3О'^P8vX$:L|tbD̛6rb]ܟI~X ҧڠ'tGFu00P3@q, numMa.Jh8ޤhnj`U?}jK*G6%SguKnr#_]-ǚkNE;tmY|#SG!4; c_mIIn,*ג&VUnR IѺP7Z*ܷlS삪`Chm۵",LCt1 ʐ5?ٙ/,KM(+I,9[.-TRQ{Ro1q|ՍBܽKR;d#v\qB$O'9u<|AiQW-<]g[OZi̓=ZM,7{oP!HҜ5Kͩ]Q$ w'_5:+Aw7W!srk>f$lӷׅ6NʌHFEKSݓJN )}02_ Rcoq흯0CI3kw!O1Sw=҇$:^Lj> stream xmct-c;m۶sǶm۶ѱmFv'qOYfUG]dDv.@&:&zFn&++#L hbio'jhM*@3 bdin4ncdjikPw4𺹹 9;\,3K @DAQKJ^@)!l6&YK3 `f`bogj/N&`tr-@NLUH\A^ & *03Jnhbh.keF) P:Q 'UIPzw7sXkJKE>?PǿKb!=3R$Yr7`U,vK_I8'm9"- qMNPŜYpw5D)dNcm4JŅK?Fc4UlAW=?]aO8;>84.Y[ cBIQéUCqgbwVhg2|nC~2LuQXOgKLݢq8w{ 3Ċ}9}lن$0S=F+A] Ԕ>s6Mh{Bʑh["8Я܅>HBH߻y "l3f2 :ͭFۚCm*Y.W^&\"*(7=QWV~Ym|UB F4^4c:tTyAKKZKcevrx3>#0oS1-J@;Ur6]}Qr} TFɝ)TJ },1M~ﲘAre?Wi Ly}nwH^_Cbo|e̻@%w_ڍ%9tn5s3&00$l˪qxꚱ1]}?!4KdžEJ;Rjg?~'u.Wn(jF(ܠ͛$:khe#|Ry7:303+ڔܛ>N^*^"T y]vGǧ ? 1@_a_+)vI&YQ̴raT3]ΟΑw{cpLxMil x>^)q[uÏvXS.%  p︻bvc1TbbXt sXZㆋ#rBØw}\Uf:d}vP5OѴ աW]!d-%^CjnB>uVTOu U9CyF 9u=쁖\n uW #^IxDo""PZ h6{Cڪ_OPpP~~k`ZxriA0u [ѿfQЉ0Q!, DN(Dy߄AW+ri4v[ vZ1lᨯqѾ`ܱ_~EœD(p,znz ͡U"zw_Uߏ=6g8g5G^IdM ϟF9L&z}l M|#L۹2>.[rgJrKN={H*m|]ZjJ02D5HrUB69ԝ|lYUmv·t)TUZ#άζl,bXbgʼh1X~zTQUuǠɅo!Fe9 9~c\Xk<87 2 -ũ/!Fds <yDeKheۚm@؉l[@ Q֪ňA J͵ ds_ƀ(_ ۂUH&杲#mw4DcP8|M6&(7CaM,'K۷9tԜ@*ysTO`oZ:WL'GަV1_ӭ_%TSLe#E0̌XB3 |Zg'ݿa5aD~vvqޖLBnM$/S*OɾP+z#`ln-י8M-tR5_ڀcvz }\8wQ+_0Yg}O2u,]2iNNe-bl#7z 9)߭M J&',{i>̽Zz%ăBDM`4gJğ!oDP3 LN17JM{o ۸VT یQ[s#l> KX>2돾ƣ8ޫ 1C5N80=()XI{k+Pl_|I@}-I,j7Pe`?(H1PrápkN `O.3ԌpE+BJ_~[ *W(ٺB蓠3?/7V73wZs06[鸒z8Л''u*0'6SUQǹNh>|}q(HZӬQ# rKޠpq,y4FF^w![|D$;Mڂ77}l9V8(Yz[B][zCYc:ƀZN2׮AsJB1>Zȳ)xbYH,Đ|-œ߳z:IÃb.@r{>dP;fw]ɪm. 6nNCJ.L*&"Tj;s+"!9r G@*UJ03Z`K9;%I)O; Ro ՊJgǡ'|ЁNJ@ߍ12V1iZ`+"N^o S팷<7 xRKX.9v:qi_I+6L8UC\%e6P+G (UUXzgP ֿ郻tkw!45K1 d&o`Be`/Pxovv/D8֌k7ݪw8Pb(z6;YJh'(ScJ{ecc+}[syOxvcb<3z|,C ೞJɿuq8dHohxAi^PrO'ե{+|!>yA4 ˙CY ~_PI\F} ?ItN͝oPwFh͠c)h1b0oتӤi` L/M{"堲kNLt+k $X@١vpHG"Sax H>C,dZ?ˣMYEPlrZŤUS䘂癸~F Mk}6*i塛Fjf-Gk\QQOs ^^Rohy\ٱ_GmUHH?fTj=riF&qKt;e~&[+mcȷ~Nk' O@?o_Hkq D \k~<)bdԸ+i,(\/+U@HIzqQ9~ j ygtC69C^f*$ΠY{!v /VGIZ8P2{*;ͼY`mEʊ1 DRT0\4ksA8iZm#CoA޷*3X1e̝ѲTe@n(nq9 m,,KA`HN38XtRY)u|T*taַq_-daC$xMB 0ڽw4-1Mf\8C[3Q%cw3?H4bM =$$&h5p&R$X 0vͯ ۩3`E>?FPS_L(|yEM/3ƽ!M&t' &0| 9>!μm$2> Q6J=Wwe 0AV[f=d6jwoG.G t"ϰn-K,YϮDh\탳uֺ ]cvU 9sӌ?B] Mj__]ڹj`V[l&bN5Xz˖M)ϤS Jh[O;iM mZ"lf34ݢu+еIaזx$tl/*ZS;=oڪAqӌևN9?(ZQRSg0~``9m%мF<અR zMIsQ aǛ\lrJIe"$Ȩm%VT~I9_oOEJ s 1/Vm(~La֊С?ئqФ3 UY*<:<~c\;R\`u */Aq]_r*<]ʳ NHqTJV۹8U], \"K1džmox` Q,@t7Bl4W]+2[Im 1oFbE4P gޏIY ư]Du`SS3ָV3Wg4O_k [?|9hSNr!:XwoI(CIh#xG 'sǸؕ~t_70@+SkkAp]۱)&ie29XYZķҏxy~P0ͮG}z%䟲Uuro4纂Ie"I|D ~f* ;>]B1duQʼn~+d*Mu r!'&BY,cH}ϠƣkjM.mj0W@*t`ǭz ,Jq湯ЎrVH-2J~ 7t(G PAwEED@p^A>N'a ߾5)#T ǜj:PJ\I)R v<(pRN c%#VfjƝIJ8Ԟ?Mcsނxte8%Kْ-H(&OA.&bO\5t@7 ,}NO..Jih)Z!Wc6a m3/12Nh[h|gy??i)>ƝeX E*9a 't<&ײK[np`h=~sǡ+Dwu3p(!rv#Ma7b˯4,0b )c)eW*s=0.: J%DKݒJ>)j-jP;dRɾTl՛P+k'<\)*g}_β'<c㦹{!;/ ߖ*R#{\ H8:m\)⬵R]8COϕ٨[q3 j:9}vT:Rr&*;y(_NMMcb.NÌN~1enX3H7̟L98gm4?fB C}J&i)<g6ŸWR .D]%nu,O͘:&&JWs٨=*QU/=b +mPZyDZdos#}*[C?u&,ϑTXvF)(!ì܁ a r@X-=U?< ajЗ:ߒGl.tr xā䟎E! qy_ }kYWi F!*Ҷfɠ̉){Ț4UJ΅Aǵ9ZxN@/@w\a^fd5EYkd@W;(ؐ[nfQƜXؖT{KPmR:2+gA8uQ F4?n cs[V6^q͊zb~CVݑ,^cettbuɎi3~b| <6-~ - OGj1q?[I!N--cgV@uL# ^H?3OYi9 ~ d(Xvx ?g? Nj5^|Iq,^#Uk>Ɖ[ڕJ@RU+ħ]aof2S?xע!suJ͒?U4 8Yڭm䒥Ot׵4 2"PbH" "#" PU1R NbxjԧQغ¢ۣn"qMiW$i~eu8zSQI"PjTZD*_#qӪ㍂e+Fpkrcq%8 L4*t^8أebaV#䭄[%4땄RfbEah(z`cIdq^ 8kPӮ`B\x#ˌq`&ymk)z][R>OTRP;'bb}sZ:5Z.p A{r} lLRjt RJH&붰7Yj+#\{%p5~ ~agJʟQqVnoܜQoyy%iJҴ)1Vk\nK7P-Z[ca7,d?]i|/'=5RvCµ9f\>)c𶟮P_V˼}%؉+~[?$敽V$N-WgR`WE,CJhnUӛzTff6#qgP J@1ƟptE[v[ reJI?eդ=RfѰNq8Q>س^T5Vٓzʨ3ʁnm/${/0~_<0l|oAH.u$1re?ml9Mf"z4\IRX;%\YE6zY.T.NK'O=0’ bI8(bcyr(a*E0cݳ-,g?sӥ.0_73* |E83QM tat`5vC:1xzz}fމW܏ ]fW nT 8k+bNWcx6CsĸT]}k״BbC2ra;LeoεHOPg90IF Dxgvݧ߇j&hYa KkJ&rM%b0!0 V{킁 , 3i1~gZN\JFF37S Tx&!-8mVgKE}**BR B rK\6`~U@z}O,KohU Hq":)V6pa D3쐀 LzAҾp_OK#i{7Hz*.krɍ<΀*ojwڸ Ppz~g43(օ'syL2 VeWlaZ䧅8)G ?КH ]m;Ǧy:q$JΝ?i"S!y\HgF%CUB 7]+yNUe 2G9(O^(!jHYDݾ!k,HO<4|R6_l\ǑV`R 4LPJ = `A[^鯌HaAJKJfǮV%txVNH8~,/tܲEVق#I$F<ɰĄTఴ~Ka]6*|X<'=EmBU|#ekZ>-'ޱ?֮^itlDce=B\1Qӷ'o+l2ΆR۸yi\wb4U<9~*aDtE 㸂F1& E<9'Ln ^ZD7qߙtt\'M FGKFzl(ߨ^Bx0/Vg7kڼh,PxB?;e9jH _`i5;^DV*0)%k$w%m20]t 7=#ZdE !S2%U5ϝ׶?) {/:`Lv@N͝[ɩ8mSomhw ޚ^T9Q#]HkYK^wF'+4`HaZhcO{-BatqQũ^a/Cp]4Y;tWtlV,M8uhձn.|ʏO/D~708r9RchĚ8u#9]J,c7[_Rh~[~H=4otI%(Ǹys-ṧ >i"(̜UYE/+Ly@/zݹhF=J7e([eU"]V%XLxxyXqc:C.6w-\5V$.Lï24 싈CS/Q:gC,"C%Ti)81б$"|"5JjE)H%fKK_rGD^w̭w+^JB0Qِ{~;"vQq̠з8"QXPӱ$N ,{ endstream endobj 1292 0 obj << /Length1 727 /Length2 9888 /Length3 0 /Length 10474 /Filter /FlateDecode >> stream xmucteݶmlqRSIŶm;;V۶m۶U|s{gp>mN 1)3=37@YLH GA!hjl 1t6M,&&f8 0@mLOfchbikP]--?\]]\\]6)-Lf6ayMI9q*@bdci 46s26v@;arb_`f@[ @TQE`hgn_k2߬-8ff3_Iڙ'_|LLVښ.ΦY@hcuw6315O*$ !hgA,,MM,Rnfh> wQjg*eOZhbigPv˨ -@:;Z%}tJH @`f5vqt4sGNpK@c@R2Hڙ)U%fɮ+mT ?x*x9n~F;ے1G$Kxυde>U/zjrWa7Ωk &ҳΪyQř# Jz!QbWitҴ>Lh+Qf,.wWBrv(p-JBr ۰YimA m8pj4Tԫ޸hB 5VMyӐg+UB۶GUVOR`i!pgҢښ]qL|XԒWY(_uCDX.X$msu,oށg>kXbi<S q}{aDpu! [:C{J[x0iZXnTGصvp~{>gHPjFA;{U Af1ӫ$W.Rib>av 3_I]UE@;D o.B~^񛘣Z}SXq70N.gapgAZ'.(@NRlX(C?])]bZ_LFo!,h#Qs) >ӌk/*`>OdRHN7\c6@Έ>KB8yO+.39*c>-)KMYEx}1Rh ;E"6+JXqqa㫤4eWlw6" /q+Bm:7zЕJKsH M*!7rx{GX}{nrT$5]f'>hss){6V~ի" ʎYٛPX2wYR3\^i$6S3 V`]]zv2#m)-|sZER}HPbޫNVI{~њ̤mqLIQb3Rp.$D^PDZs5}H[Cc\"OSֳ;<W1$ڔ KqK d/K&3RGc)YU;deToE1/Rh̞N -cRJYYJ:I6bYb7EsFYU_vPYzT]ٜ?RBq& nYi2cgEE <~_S-bIsH{Y˳$߸v`P. pb rp4W߳#;9]朷skQaviX.ZD([@%rŜ} S-s=]:'j^M,p?7{&gY7[9`Pj֖ƍ)s1zg(q2~70͟H] X[+ \̚c0WI~^#WR?dnb~+!ح W+t':rYE~F}iN+ɺ{xա +!O'ORa. >BIsbj ݻe Bn啹A^mɦ F13#2Ysc SGVK5@'w N؆_5&Wc+ըkeg4VGש_ʱ m>,.S.~f%;c)cz Lv{W.w LH,O VE?j``Ƣ}aћ3O"(W!S,>N$~{ >շKq(SÑ3X)6Ƨ=}lvxTxqÊ\6?|ebFO|HgcT22[4Ո$cBR#-ɠ{|DM3. F.[@CC7G$1|Ӓ*z8LYP'Rӑ؆ ]t)l6FBTR'!(Z{S[t߲<1yz\w Gqaz2]whg /XCMEnxtqClB@&'H 㜻~WTW8E)'5'l3Zdc}u,w1B@oS?8R Jp OR`M0Pd3?̤zNg]U%M e-A:KWX (a'ۡu"\mJKvw^-i["*dI5xHw\'Y.8t~HvȹR;ga̯U똠Ib]`'mU]͑m벎OVXI_q]_7pԉDVC;7 u1Hzh?mf- ]h )`j'hv^yӓ8Ü&Pg)!߃ֻ #Gj P>}H {->e6))Y+ƈD/%R0,kC! @qhO#rȋ&/ЎFE\&?c#I9 ; !mQ3ZYҨ [`t鎺͎{!ib#';wucTtg5L8kό`̪~+JOY)m$0OJllFxok 5yt~'BAfYqs ޡHt㙄,7 *ԙfpؤcs_ k3I"_ _?eKĺ%%.?^AgROUB oLd/!HL]@]2+R,g 5A7.ZHýsu)vJx; 3Uڂ${5ea$Sܯ~@6Zfbicã]%03Q<Ԫ]Ex'Tz05,ⲟVxL^:eLRHQ!m8Q#YalNglT͗K.I0PashϓJsU2}fǁsv`.itl(*I4GG0K]L: YCFL5\ѓyѲEӭI$jnal2vv TG s30{#q7iw:wCn5 ՗Dd^w5ƛF Wu xBej$&[a<?)jSƣÐqmD>A >ޙ 4=r:ō=7Ä|~z,r7gЫ2Q)}UaL^NQNl#F t7XKn-puӐXCQvP[!)f?GSQU>9",U6֕z . ;LOH*H[6U>_MMEy3Ox579暶mPzң(fx&oǙաѵYt08,!*fÃtM%OÇ^=/XP:p>~e^yz=MkRi_`3-ꖌon+xչGίU$Zl:&JXsw@W_A06qnZ `Jʽ圷ʻ3:ƛʤb/"s1n}!.~w3 XPt̼ =SQIqxW%?&2msE}gs*(< ^t$K2qV|{háS}*kJ+n;c@cu.0ODճtհQF#fc(iޑytE~D+bzJ!_W5zPN$|INeL侟YiP=Oԫ`s?`A$'iKPMɂ[,4\m(]g6ꇺBN[dX*DM0q;}ȵr/=.J"$Fۦ`E jX! ';R+ C 1-f KSDkm+7D5~U#J[? { իQNRϚ_?M1.?NI͈ؕ63$yzp 2[n4hH{k2s./]UG!~UQGQƥFF3Guf{yk훮V6?PN|y )x#RGMT~& ȒĜJzbtb*^T_q$nCʵ&< d3 {lZl}@dos!B.YeHB|X^.}L"*[kcqtkJg Qt R%s">vN6nP!&֨2!YFQ>tlSBZlo< ;g04Գ!볌9)1aդ78og>o_@ 0>#Q(>|A*߷yW%3r_ݦ?/1x"[mf& cS.MLYwLA2c_ne:/Bwd'~kK.#ZdR 2 _Q6w)qP'yX_R7@<}^)w6{2S;i0tCL7Do@CLp޳Mɘ?YU$݄o=kB0k<=43 )2,i+V|ٲd[2NHMh@[_KSfͱ;^MF c0IK=<,"84)XМ@+@"pAҹÇ㬴C7Twz8/[xo~z6vC Wآ`(@5(+ }rص'X7H=pj< B`gT5w  84a&6ٹ P{aHq'(fg6VVx}> N3-m\??O&WzQG.OBTB\)@99<Gq(#p.,X{.q !!~rE+aKl5A^-1Qh<.Z'lk 9O]ħKv6Vײ>~N"$*+]85]?jj?ഢ{^)z'Uruu`}qh\9<;Uc@8Mf1R3sԹ2YqPX}t*DCOڧg5pIݙ,,dv {‡>cǁpFĴs<ՠ1-6F^4@'$_.6 {FSkm NALP:IԼj; @,W)~09b#x? ,q`rKTLTVBGua~iʪt[)E)c@H?=#Zq%c1a }ʹrמ|>\$9dt܏Ї^`ACz0+mVȌ>:*ɻ'TKiEޏ:-B@ Y'Qr/J=}<eJLz ^ϪЫ1ϯ`ej*J̞ՠ'`3|60I|@򭍀VA'Mʯ/?Rg*Qu q)Oi55N?̮K'ef@HB 2S YJ;F80gY!s w؆?U3™TaHDR:C88}S2-Uw|/<)uE^qx9:G]c1P5j5S6T:_}5sx]Vs5ؐ)K PiF| F|S5A- KҠDaUaR8ڳADhs}-^ szʷefv{?ag' H_ i_^\䳐rS˝/ h~so3?By31/5C̽G} $'SQ-vn8ϴdMAS\§?: ,Ub5q+.DVXF0&Roӗ-/A?I˩{+ ,G=Wև hl؟#cE6ƞE2e XI=Jxe%H%O%Dz# &1׭( )ozcR̶H㣹'ޤt~bۛQۉ`vC/FnJ`;]69&UJ&F?\Ҵw|34ĵ0tWt,-}TڟKU&83[P3DGGAaJDTЯq*0ݙǖRMؤ`&Vٚ5 H~_ pU{.)Q]}} uFXWSf{lz/Q-dH 0W,; ow >r}_F߼1]S+Sz= xe4+"Q ϲ7k%Mty+@ {PkEq@O8QHKixTjhSDiBuq3u~*v_?S'4&#)-Kߖ qb0P ~oR(IuIPymq^ 8 ?-ǃC"}cKYD4(O}TXqa{7.N'tvL7`V9) lJzR) Uu~)|.\v^7b. = hҧ*B}{H*tFoyމt#:3}>TЦÑPX⫺W83dUhkl\XS0=늪x֋f5$e z|,d4B#]P>V}12s[0T#Z ?:NИyʯ58Vk=Zf_RoT_mtr\G]؉O"[ RZ겞NTD(χ[8t\Y/KW$Jw ʹarsnm[`Ұ`H.4 DmY9&Ӊ &mL`%ycI7z|O+HlȜd V[xq|! 5 &[~#gf6RCw' @c79~F~I*v6Q56 (VuT˧Tmy9=)"~E~`iGi@2K We$~Kb|KiÜIм6rh"~6ϗ=,۩Yׂ=[v)@%a4-eܸo ^zk"beP{ iN=DC@!I|8d9e,ݷ;[059~D~]% i5@Ţg˸/wӺOh%LS6O$K*\1&n [[|ѪFZՃYZcϹ/!pS* "~OB C`5zPB[<6(GBi]-` l\AQckܮl JۘQyBg2*9fO h3ש1q,H+Idcu\г6.Ĥw6FpMqz<봄?#rlR.(4oE9^$g<9'f *$VLfvQaY]ixdavYPp8G +Lê3ߌ;끭)Oӕ4'0H.AKjw8^GrfI5l'0#'`_%UDžUʬr=vF>`y]ҕL@~WeٟHLa!,բ)X$7#]i& endstream endobj 1294 0 obj << /Length1 727 /Length2 13640 /Length3 0 /Length 14233 /Filter /FlateDecode >> stream xmxspevlgbضmٱmۚ$cb۶9ĶgI{έ[Yv$rtpv22ԥt8xx,L. S7kG S7@dP9Xl,,qG'okK+7 #@ha xxxx3 1MRnV  +$ VH@.vw3;k @rp,]vV@Gsare/0 G{@RYC``P74Ww Jv+o `n t,ś#fswr@CZ9o=@#hr(:\bvI/79?Ѧvu3ˇ_YmvX/UصA>wRj`?6O9fe)%u]?nI%@/.5V1 f @_C?F9z2pY\^66rpGg@ /a}fQ Y<_ M??kI :{n=len|x_D4l&{#{BfB:#2\.! zU-|i2³*AD߹}XQ;s.Df3)FNZ[zt5@נ[] ?CD"P[U;FdFo#5эL8a2׹lKDm;kzBT>қ*ӉW}ۭt`B=Ct|F;3J#'AhָWb[)|,OMo37#wpci D@8/V.J\ʵ un 9D[VԾ')p\Qt{֝鄉 ^ƚ~6C ӯopZ:vrmg,ϡZs R7?LE>6I1;p\qbzcg7eFt!G;.-W'p~N^/TXvm+e=GD5B- DTK|?1*N2fDd9=0~ Jj"n &x ?čA8Ny=D@~ydsEQ9CzOvnbi ߈$A*C< &Z[ K qWiTYqGIz.Nc L#6dC]^8ÜzCsM|_|I~ڧϣB'>y}/f\L‘(ܣLax}ܩSse ݕE[obB "00D鄴Y&96ta#mPN3 g7p7FN_DP Vx)rEzYƣ5qS6 sDE,QD\ rʌa(Iwo0!wק3 Ϧ2rzD'Bm'U^BVit-nƅJHDUW.믋T [=l<`eSU\e~tnGm>E|@vLYB :`7Xt0ؑm8?[ }=Cꙿ K+UDA_0zgXsX_2xup}iͫ#PE2$KCaUk +>1UIuRj'xs~$=[#gJrqfPX@)O JQN xF}amyx疠J ;YN[rlkOXȖ tRԒt`mrݏ>dMLj!?dNg;vCtڡO7h?֝uϸoI.{vjԤpFR K!X>3)WԊ}Ya~ Eҟ@I*ĥ }iy&"Whi/W3x'St~VbHCeX2@4iw8 d/*HڝTD:-M ˏd+zEQ`JY┒~ɪ3KwOT,*9Gp:L5Due؟G iL:ݍ4#VJo.J"_:qoХ3;.!|25w ܒŮF>,yXO`Tg}ud2 Mߍf}=s[ib?\Js~!rv*nhog! rhܓM!*Bnu%7B<\A(iT!{z~U 5ۼ*%jzz_OVv^X2X h67ޭa}sl8)S%`h(Ajufآ\F|ߎUk[ 0?H/a:#rq)Z0W{h!z-,12lڕW @Tpy 77%WxF|4mAEog0n}>jM /Py$zy26.dx3F$]͒6M UXqXJrC4mƦ7ua[r8 Wy ȴW砬)Zwqp S<|vU^]Yc!ޤZ_>hdN23"KKB=IG(xB fBNIy0\:A=ߖ:bxYJhԠF`|~! 8 ,kۋvK:Oߏm+aWjL/hizJvEV4Rӟ?\zU| K*+$Z`sD\_:囧5^33=ZL{:=Vقk<沲臰'mۙ/@EJi#Jji=HmVܤ6h%\S2(P$_{VD!ZbtWK3%LDf#\}~71aPK+㠣f/xɈ߳O]&CgQj.9c}J5E`@3 6}31Y'xFZy,?Ā=Ӊ #cw3j +);cC=h2۰d4N&;r~|x{8@'YA直_Ddlzb7^Etȯp-No.Zxf#MK<{6wu*#(zpzm0M;n5HrJA {`Qwd,dcBt܇iUkXO<3;)ͣnHº~,#͂2|l&ڒʧ GɣCK(eOsPo\:d0dℂn EϿ|fBbŠWX5Co`x"5^9mkcr OR0U)PCe^ZLtw[/RX /魨ҎB# HlU願ZɸD$ߏ!t\:\1 Rsz׺ 'bXQW̬Moq Br.i$ƈ D%-~3L԰pzp!|̬!8L#.v~|76R~KerB8SMtYIJ%XΒphbRD_av3q"u{Ulw%RG؁w,VhuØ'T13jVBKZ4PvE.0azeu8(X5LkmtVmDŽC&r䪩uhvEXqlo {]dņb ۱صDD97:w&{K Ns*CCd~YەclƠ;[B.C!GD' "f 5%f*ܷ#"@%m,30pŶw:d 潶 K`qh0ڟ'{Yl-bvyX[8Ga}B4=2ʕrhal) u8mG[ |mIN]Ўr1K~=<#f7a͸D@clzp,(6G9;Sj59__N(ݨS փ^! b&c߾hҮ8_d!oFoޢn2nűWpw67]bs8g\(4Ü[b=}3lw%X(t#q KY.THד;6cb4Y9pzl"lMWLCcfxL AyW'T2Qr: zˢm3tj&>չ$V"$W=E 9jde#)P|zd9A[lNS@6oOj-CHd2lwVMHW4H_}K\zPB+s&i 2e:9&ĐNjYɛGM0[cIrKؓ85a'^1SZHz;#?[Q'2 ]ʩg 0~t{u큹e ҡAi0sj+,[-R\%C-`mˊ΍PGFD9`bfRym{);!i' y - kCND` DQ1C2BocziJݲzWTcfȲl&KόTgm3= Vm'3M^6*a)hM8틱Lmsq!,ݛiX򉖝ty|V.L@f @#Ch8e6{{]Wg2tbLY#kWC{>q 8G3~aO񋡶ẽ ̢)ĻR:KQ}7gF vF0Js>)l0a'2W$ϻ~V5¹H +&WTsxBFLB0M;([u;<2qM7L֞n p+iqAtQMR1!ۚ~Zj)3c|Z QM;K8Wd;fi*T)@Qs9qay]P V:Gi8F ]L.2%GjNZO+t#{Ύ4'ٓml,qe o#xO ^fjOк7En '2+]Qw;85cBo\B>t.(E`âMM WZ_f)J(Kx;b U؟٦PGȊw[ %$nqcU“Ư1Y7xq[CQC_LN_$nO~xӯ]a%N_7Z W(LYlS\wD&+mddϫЄ|uOXc?N=:}<=~}UGr{5Ļ:ztΎ~}|1DBƴVfHv5O9\' BP@wR'\6& !ii+z%(`d :ǎEe=Pwʖada$[e~T_k_b靦Fȿ@4W(ރ Uՙ}F>%HQ-gLޫӞңbkJm!$BNw9%/çf+iNp> @_tޖވ8Yn!1pOu0x2!u&Z%[_&jЕ| W.ҩg519;NPzΠQY); 撥eWj|*CW;ۏ-xWu#sg)b.#qdKpj8ThVm J6*cW`\A;4A$,S!j~R*O_|3I"R|醒D4 L*رP vqIg\z/Q2nT@C`ni xhcAo*nEU<ff$-Y$,x;+&"p<A>1cDkq 4'e)S?㨤#֬)WHC /t5UX&Ցv?Ϗb8-λ+uq7-vF@? >o_]]L²Ht*$>^ w5Ty|*a:hy`҇"Y;r4 ~sGgoTdͩH2@gַ&ո*S%EHL*.OM~kd 썠KTK5 Q?wƷȿ~ORVYH\T&,Q2x "cU@jk8lxBsP)H|ÍZ;_&miiK,SZ?8b ґV±f )P Αڳy= H xW|;0Dp;&FSEcA40 IG4<A2uiNBWW)K:< @W0՜HkC" ȱ K`Yyp <+ ;OtILVw*{Zs(Ɗ!VOu:XpWh)w>r  }u׃n3gI[nB;8} x6`@6H8?aweٹ|sS<)λk&J"m)b:x3YM?lG>m*zG$fOD}FQ#|W&\Mg5WycoY9=]00ϭk@\_WcB g.P&䬬W}$_ ٿ ;dˉ4 w_̝Lpo`WFpmD{-mubx 2q1{%Q%*Nm r/V);zzA ޭZtx!Ҭݘޝ8%؉s"jnVg"X ffLP_xpf}eC5J+zȼ'#{mc<-c5?Hy1קTk"cxzbY:4Fz4ڭ\fϧ*kZrdzIG (v!K}E;"Xr~Tkc1A{`J>EXXJ M!!mf 6*8b;O}fLȇv5vCF 12%<=,p1VBBmGfr#x),)ixK6/Fw<|[}Nsڞr rBHڛ~q(AEVY;Pv:+.*Mwh ]p"W N]4~{xzxx&%9r[>5&ޘʠvS M\k6駭?C8Wle'۵w1&<~TL -28Wo/LI,^܌S;eՄu#└fe f|d˴WYMWz"lveEFA^9=kF6o uzʹ*-eW]6Bs4ak?|ÎKX?g?˺"=#B&wS ^f0wʨݘ 4SDaeN MPrcmC`v;!{  Xu{ .dNJ(&ykZTQE?NA ft\yl^$b7W&'\1B,̾E4rMF^ƒ}+N Zflq5$БsқdSQd?>oB <<%4Z҉p%kRu:G:7a#ii')qH20=oBqr<٢o=hzxKF>a-NCŊugG<F uk<ي'o@[FQx{|ޣ5MNE{\TR<4l=_y-qWzB)s!&*c6gɾ\W/<w+ye߇xFrA !LUF1my 7IL)Eo46^H\#ZC[7h!UbQ(suh> ;p\dUiƥ{7l z:/ebd0+dT? 6;+ޭ޾SSxČʰF&[/WUhE]7qh`oi5^ݷ (˓*T={^%*޻}34lDwi gozͥAoMs6d3 ]Se:z! 66F̥ siCw:׸hH?(yK>e3qlK?pqSEȗ#'-"#+t{ fCs!#Gq /'Er3#D=,( #Ѫ-IT?}PVؔ&i݅?qϏ#=L'˙s)MO;. 42 _6sot,ROjN:kKjwO| eX f]W㈓6pKJ_RϬpǺp=uE^pҪsq3XkaެCTFRz._ /j'2~ z~wQO׸0l}8:DK$ ˲%=z4)Ax-&3g~! *6UKЃp>4s Q@ en~*gvb^_r13+T+WO5۟+D;m&e,Ȱ/54֭uub LDtQ[.uC\ ,mds$9_R[?:[Oཧٙb\;MoDoV:#Wm6OXBFL&4Zs̭ދ6; 2H#ae2plHD}pPM'RAI+6Tf[ joXƢ]:뾡}O^#qa;#Ll gϻӿ+:T].XHGK=?ʧP E}{,?}Fmzd^rpFD"M #}Ib.QEv@ĐlalQarR]sHPց{*J@..C&gZ1!wȬyGĄPFqm׹2@3&TcyL-VT豻T9Ax+\)~z x<#.1IZӓ*Q9sa]I<5?DHI WX#khbaҘP~:jI8SY۔ ]T0hjS}E>K=r~ U4PC!bz[Rz8r<<]Ӧj/I묝߮3n(V#"8aeY\MQ 9[|jIQRd!+ PTVs3qJ'-@cP| ]w}bM_#: qҵ.]d9YgdP]k-ȃ6Ӆh / הiI&+E~\q=t5qiEh=un^C-0?gàu;:٧qhYc2;@8KKI[XSoH>M.f's؉ԋB#R+?;_ؤD{LbsA;8ސ;Ky_k_TIWP޳,BK5c9T0]I# نr#OMu\n=g޺ nkr $ |2do(L9p՘Q ϊg`r-9}](YѤkr%.6*yZ,')ձOTQqJLHnLEm']8v?"/HTԤխ;FSΖE1Dy&Ec7QJ6QU+vJ ΘMڒF^+hE{Z$g*+UN]>CU FGUiF(Z=7Iԅu~@',G||$> stream xmwcpmlٱtl6w;VGvұm>޺5k֪gQI:ػ{9ؘ٘YjbblVf**1g+4lvVV6*+֔ @`nej ōM/H ZV@VJQ :LlLV@{ `o``of/N.w:%f`PPTRTH.`k_ΞfV=˿Ԓ7wp;l_.Yh*G0vU4h\3=@vjvƶYzQS!jW {;d"i 4Sr+_k}6 @3+7=U3{[iw訪hk0w IK؛:Y[\26v6oտ7?dQ]<zZ} JTӇ `q_\Mݜhwl 4EXYt0Y5KNhX3׉n[ܨo/U靑eO`12"c!e{cɐٴX>Y:^0:#렌7+RiBWhp6z[iπH_or ڮhJrNb@jMxHS+,5U{^e v`&DL5$}O-\ytC{AbW8+LM j]9:TPHd_gCΔ* گI > X秪߸ UEC>5!שBTd؃e_D&qyԁWQSVfڰǑ.͌a6Zݸ)1W1 x^DgD?ūcTvC=U_&񡕱U Bi@Ua_BB[YgүSN bcs{]\3.nWh 1C\R%x)˧E ]G'aُ='ֹ?0_vsPv+Ŝ,˜&WNbbǼE?܊ ngI~-Ut> %i\*x8V`RehFIDCZ$[u%SHx K*4EﺕN&d'1thHF2 EARTTG@Xk-=@b*UPHN|z8px)pz2."2qDdnGxom ra+"BY5P-Gs#4溮>KkifDût1[2vPgZRi2mD^~mipq\Hitu@283vOMmz h~As[Mɺ"ufZs;bYHwiʏW@} (Fwk\J Խ8F}#q6FWZE2qo3y-;}l@SK֦@xsˠ+%ɴC&m @fP}!BϤFtN^+wu]ހq:5fϗCI4,f #? ޫ?QWm/aeZ5 |dZP|r3TQVԱҁslj^XW׋Ó RyI`UegO =dLAh}4ԓ7d)H%g}&nl\4`ԦF$cɠשM}68 07D89jyTEfsa)5>YJRa.ͫvrR'~Th!sǔ@fe22jzec3zjx o%sNЄݴ R0Gb2-fg$_4ӨA^%oeϷd*=]~XQ:7{y~B{—%FLڲw?tV{NTT^抱 mEKɤDc~&NUPg"T5HӪL@@KI,26TICw-V֋A\d;K5]K9d^t0tc{f L仍-{9$ +q@p<6Ձ>]rpMYt67'}^TH[楙1)xO("L}})T+wlh{V4Y%Zb% 99+ɟ]U`#3cBiH,qgUB']Ѧ|> s? ZVz<)5mXtWJʫ= :e4Vmz2+Z;}.CYg8I[RC.Ƃ3d> RjE[do.O)Bye3퓋qsagħ7Y`8gIlj X;50+Dr!^Z[cnIt>瓅 䬎+ʮ_%nF!)_8m>>Z}4,nfQkiI9WP٭kFwI`#$zHTFlk ?Cf-ԊPS|Y*]tUI&ؽ3pP+M {.r/+)4|ҸR% /Di:蕠DaNv$:wNP]Q%q(=؋Z®wG#K)ɧ뇎t*wgîrAOnwScLiB})/ڸrҠy)b7Ɔa;oв#IclL@qc*!⫓}|·ڦgY J}G[&o4mY|7+snShR\Yۤ|u2mNU1jgȶKB$o6Gas(9WD4C,dV tq&Mmc$ $Z-) VOU\rO [t|\~5J${l.wd:i'xADDL|ZҠڂaaU ד_zcs mb« 4/| 7wY|H~^9bQpol̏R*=Y6 /m}>}1GO9"W1E&1PKً0B+r] }jUUש;^W*T۴.?M$m7<;ɶմ?>. nؒ⦀v|dm9Qiչstis 1(E\L?2+Ɖ]v2!ٹȩ{}LI6Kowݝh n6+ޱ iOx*#$cSh7@]:tT*~SL=3ݾa#s3kޔbM3K-Fo,?]Mbx#uMj0@ݶbUv&Q"ӬlkC~*\ @ۿ֝vIzJ{x!8zF#JsCCd/|@j0U&ureW)9,Dyb6'*ha)dmPMZ t[.41|ҴvWqi^[(FU9+1^wÙo7%$pg rq;w&{]x$/|-QTףLBT VkB3M<""M@, VllڝI<EP*@iʁ=FV+sϨqC"Aɦ#CK|3i2 Jm;0rwRǕEs.3u 4L 昲oC(Y(fL#.Hi1sZ,y2h԰Ug Io/F)Y 3*yY( Iy;,/6]Z՛iz. [XRŻcQ.R9ռ)^`umVo᢮v]VOdRs9-L(bR[~ > _ɘ2PHzS8OFjKxXJNs5Kfo>$}s ld! asHʓ U}{z!c~ ~>6ٔo`suȤݢ̈J*\1Ђ\u*7xfgvtғ[Vu>8,*#LhURS1P '.]p Q"͆{rzlDh{1`}vc!O)\E7 .BoLp%U 꺓[E=zSX~Ud|҇dM-1;ZCZ߃O:½iM˲!띰Hu\aT r_1} fcMGfxK<S4|VuGAtffKǿa1(39n0qO=y;ᆯ!-OqlK8WVh oΛ,GoČUy z~Bb_EWKꓴz"?;:X_hQDb5Mϒz)I ?GS[vm_diPek"<P3p-HS4瞻SpQNiׇL(=Fiy#>(4_kT _,ʬzkT)B&-)1Os=uZ>f:^o>ހf NwXRfRtrɕ,Mß񳐰Ozō 痋bRrc[Hqk|A|t8etK_PPM4즖Rc}؟YXL1-boz s?rPm½lo@o%x^+ֲq$s&3,ѱٱyrXN zT=j&(èЄ[qO?}{p>WVyi#\dS1PD:zsɼˍqf*{ 1 C-Mi()I1ے7uA-`HzaMs;UxELu~ .sCCCbK=S=W#=a w& $/#f ֩Y4S}u%7f^ǽ1T &?BLIiS ,ikvVqU-da}W5BMUM+ːimZRk2ex}+-eSfāB5s7LQU`j@Cz\4_KivRvxϽRcK>`}em)/9qCe/&DREX4%O`6g Eĩ/,7x8^)2 Py i oeVV.e9ŲL xwfPo@)٭00:dp 5Sbm}^1B?D2qbX&! ,M-BQ Vwk.)G7л*KcNWͺ >:~ye4n`p3PR9LX7p[)#RfTSԓ7.Pmk`ʓW~9 1^YdZDPr#SȗknyC=!#gL:@G&vZ3u1XTG~+ը5%R]Z'"1*Y7h ;yT1)G4=,{ѲPg+?BZ*fL0Ыma6lhpi^·k|1P%礊uCl |c Hai$9=}rD|  `mZ*'Rz$$^HF&)y0Պ& ЭԖ9KtJ5&PgƯawxq@3c$B\zapY$ؓIB4IPKSBH뷨K\1Ɣu`4<ߏ[C9L0z>ͦxSrдBx']ꔥGQCﰔ0-;Ɲ0@y7 亀`\oQ w x`i*<݂񾧇Õ}[M#cDELCNiPӳ.~祘Jly*r"-ӉP( s}O F?mM9Yå0\0ݩiBiXTTN<-v%^J];9@ĚZb%BO-&0?% -5GyMo } )F9NrAq 4}rW{//BSuXpP39Og y$K#; x.M{=a$D%.Bmm/sUP>!*]>b<"+2GRi !+ϙI;7S3.] mo!JZײl0.a&9:9**=TTsRg\zg(Pv?=p=ݦBJL ͷNju-$e&RtMqfm@S **lC*[u`{G]!e'TzM$w?k6QFT1G]0txElcX B3k& Ei{ze"]eC4 Yƕ!@ sCi͌j$v&^+B 2W.6񭗞W[%}&2NkV <ΦX-7-GHm $Qcdj\;Cs]|Z}ǢPj3 e[#΄(%!dUGwhȚc~e񴭩CP(͊`E>b)r(fU7 vto˯I<4ĆO=V^KB/e{` Sp.Q2XgO0*lAsȡp=#(~|غ1 wXu_ ^8G  ͷU= *Q=KY2+Y%6dltbm{Pǫ)16] s$ϟA.İ4Xc=1ѳS|:XG`Trz \U9p3\3qo;qZ H;n15yc)j(J鵶ڞ)]Y ~K%R}cUeѱgS{6*ϊHV\f|xcF0s8> (s?jȑosT(X[J&h#Y8(XݳU^#ah21,%coe oeJ̋m"wu*}`+æ-XCJ"f- hP4:o9_6|?H$s endstream endobj 1298 0 obj << /Length1 722 /Length2 6337 /Length3 0 /Length 6939 /Filter /FlateDecode >> stream xmreTjԵ ]Cw HwHÌ% Rtww{{9>, N3++(E@,, H PBA%@FPH  , 'gO$5 n72[A`0$ y rwwƋt3P [ R}bW~R"H+8HilW( l_\y&p"Q"AZJr: %>T3@Ju7n_d__'A d !pRK a9[C+8@n78\ bWprtvCA -')X9N ;wNor?\ah(D#LFF 93ϯBp]稿)j6N kAQ ZV($ 2"#뒗wB{xA`aAQ "Pk6SN6A)!%J#vuڟƛg0E_*^IE-Hb/GreNi "ͷH\[M'陛%b‘:K [XY+WM:/z{DI)"ezYkkWZK.thW ̿"uv]Y*Uk)\ne]'u}L^$">37@ٮD'Գ2#ra<|24nm-5z'ΛsI:eg EȀos>ahMn=n%Q 49|ddx{nj hX's$b 90BhD1B1j=x9ROXMLa?qb\DYMXPBn(-^DJ7ꕒX@eV%U*k'9rx:=NIhqyYϴmnl5ЛZ0'\- ͇12aDܟ"`_P2) 4~ba&ںPkeɝw ;x{_!M] pv-vkn#IFdm7>b7̂X*W3g{xhގ;6nm p';e}]m6Jjby{ۖ%iAvbdǎJ%P4Վ)іbHBJ.[09 vC+[y- `R o:9&>MgmPD[ _O{tV*]z|!zdys#/ޱr|l@gX:(VƜJmѣa66?noxSlǢ@LdokubR+p8)RN@rMHCAys ~V: ; ӺM6V W.Yńnphx#^JKIV;~BrL hY/uuI{[wшo>vFr3Ls[4 TwMMqEՉ ;f^c>Et52/O*~"^ (;z]+L@>AׄDZDM]EWhN7%'4>n?ˁf:i*y NM;:L/~q!}ur5kt:@p&D6)h#@ //&̤Oe ~*86F,}rځAj(Liniq]jxUJgaO-_҈ Hҽctfd N>?Xqt. {rTgg21SIe1J{X_}>7'#ERS&HEf2О5t + ;{5`xNAVBG=OwO%or!|_q!jOWɫq='Ti?'?vXI+-cħU:~=%svQƗ-ghPJmDKmi2+`%(lh7>TiZ-o"ʛȝp aDZe鿷Y#Gv_.Ҧ[h(^ wIGX`,mRϳAMŪWKg8 2!v&!:c[H XWbWJOlsmWpa9qݫAhǸ؟结OAy51l+Cp;+&@+1.][KzBG-D6ˆAԢrixwpE>L>BJٴQKi|,E9f׶\Я[ ddeU\sP\-KL;@JMP6aRX4<&0Qm-5-5p|ɩқcV 6_ʠ<\*sKIx"eݍN ͐}Kx dK p;{ؠ'&Q>XI`"Q <څLnWPt~q$zx7\o͛3Ng#otڭl>_?"7~XL_&{Kd\y@=\u | # G(G9_*V;~{x%z٪䎕graH(S]3  黒].5"T[ G;ς#cTj"sck{>NCk'/=ysy3) d$b1HzZPV9)vLVw`;1ۿIi{Ӣ+0sbڄCN-b$n4@~CBJ,H %l@%Md}1Θ(f\x8$9S+&aQ*|zNfbw:|gU"(k5tq'yXEnm,7aÙ#k`TO7fLL~c)z޻謔O0p_[8eiǥ\BD=_m'ºU[w7[g S.{E1x[ۺOMѦ{-[<$Yd&GuŸ_12#Vջ@W҅q,4DajG[\C+.P?1.6z:sc)_'oSGUΪskxOR:寧P4ůRA7&K7赲/e?缼0˯o1 YVǨ;9S$0k\Pyuh{Kvv޼nߏYx"!7#z*wn+[$(.ȟ1́Zgi3u'K/07T?>s|+{4n@[nFK~e{["a UPRhgZxtn^Fp`j6V c9۪Td3O_(baO_Ml!{QVeAc]oV1)Y5l_V]] c`~Gɝ[Z[jn0ᴌvZK])<4s2;/>t{MtT'EoڍQPƿzhfſlmБuV>x+ԝ%Eǣ^vBG8zTrC]]Q5z (cajIaBÁ MBpFڪ~t^cl`׵)ܡ`}.tNXIךXgAzkCww=!t9ۋ99v;/H7pDzQ*My]X%|@(Zӟ~ۼ]$s ʇΆpÃqm.e8˃S@C/D+pi.XywnOEw%2jG|ԆOA|VVFz^ca#;k:Hхy^'w@0j& vk]g=omqr rc:rPI%>>yD<8[!+O!.~6^OdX;pVQ2SHףYn*.X?@e bSa&R2.lPZo,¶*u Fn8+Ŋ5l^q|DǪSgsω'%C"Ζ(<4Ⱥ%'sBF!Ohڐr!=v.w{]âӭ j.u4Lȃ>gbў&CGo?) ;(og!+",_p:J,^8n;ڢYV{ď{YMGGqѾē19W Lc;?fCK2'H,iq.**y;5-? 35Rx^97h`dF2`caaKG㬫 |Y LGJ OgSI Xi{C&`E, LǬv*{<ˡnS!1ܟ#Qᘳަɞ#^IџZ}{L~K뭑FCpʂ`zm_X*}py'i|ݒӯ3K{XwZ d0bwFeTem0iT;*UĚ/i_0 drRK<M}@w՚M{Or_i'~4Qq](G^VPOzr"7ifmEwO;0/F8ObV. ߻T,<'# }l$e'dOk*& (@R<0ȦbvBG˝ZZ8D.2oŭRsO.O k]6S)Wņºe"F]Qn: ϑ ͝4]BG, ςlW9tjB |}S"jz : % txу<ǃjlZ;+@eM% ~{?cRF"Tйރ[E搗` hs A.Ye>V;=,w郁]+,InX[p'uyPHc endstream endobj 1300 0 obj << /Length1 736 /Length2 12837 /Length3 0 /Length 13432 /Filter /FlateDecode >> stream xmvsp&nlym۶ضmscֆswUg~fI$]< L<U i5f&&&+9^Tf 39@@eJoaklfegpup2𹹹 928 0S \,s+[ @TQI[ZA@%mJ&V9+S3`0u7'gw:C /&,eTۛ)wq'hj.lko?33`cnf?ThH 0jk`lP:9f@'{ww"bOSalgeIGEzf6&VV@3%+wqrfVv>ܰ=}(2aq{S3+{ ?;wX?`3/odeb׸0kY""LzV&3+7 akw? ­8[VUBӮ0N\Z Y^ݩ#^Hߵ0"MNEཔVZn)6\Ahڙ*@\Gܺdl)Le,Gd6*'~m^Z-S O[2gRxe"kв"0LBA5V.@m協8eMu"".c!!r&InPk ac5}">o QnR Nn2$$`dkD*f&@>O)a"iWqbK$S^28P&oDSi%h`vκ>JЃ3ˈq4{GPnИ>sJדϽ FV"Y鹱Q?'Ct[M~ĵNH,Yׇ]6/Ps wYעoF$@PңSf^Iʑ2n Je*dE)d0k6K1 0<3q={p^DI[sRCOM`_FRc G:5O^?Ƣdp$:Π^ QYuRE, Pq:LJ /nF0/Ձ>|>JB kk*`@3}E@,0!ѻilmhjB-d*{cO"<< `~6+[zߖaUs<-*{&o&s֊)\XKy1& `r?.i'Ho-Qz޸[LZHe>u$Šn1u!`Z\QĤ" 0hc@GCLMt\ʦ4H H[>C U fiznKp!8E:+

p^!HYWdɳB] ҿnls-`CF3;Y]/K?Pr8¬ݙփ' "B4p*]~ $Hh'p.{*LH%aS3>V s)LMh jkv[x6P좉Iˊ!?E1+UW?m7jr )Bm&fVP>D_DOūVjW|CGqM~L>SQ" O^7@&G݋eF.qbTž@1~QS#ѥ?1SQ~tV =&7|cy(ym+3C˳WvSK-#0 Ai>SӜ,bz@褤 G|KC8N8W}S̪Uב۩j@WZG%R@8s3\ K|8i hy?Q!%mK‰Hu$l|/}M;.5Eq2 ġ5y],Aw>`ª~.7Wݸ! '(x>_[t+lҞҐ߅ꬓźsm'mmV˄FZ\@Gi+rj *OښtLZNrnHI<{MssFSWRLY_ 1W&$C*F#׶:|D'Z|C\QJA17Чmϔ;d m!?I+gO;1.޺*dV"1~@8 dp>{?~~_Π͜)JY/&ZZ_RIA~ȬO=䍹:TB(b}{SbxG n\Io0~2-!jQ;^+.&1_z(8W hz7'{j˕AUiaFbd7sbrC"^n;iV9#G8Z'- q!0&+e1U=i KfY7+3iW-Gnnh ]”[JHLɪywt𷸼va&RQN)WE 8R3j]| #TӇ,W0e'R:A[*Ϙ #)hxpN|eslv:+FSU+e?^˞2Qn3K%N3CK{*gM~i]@5xm70F^ *jdQLWi;KTb- GS+eU8hԤ}$AX d} 0n4FlOyمӳp.<]Li!KCi(*; h.Zd. );}'*S ,iH]f-~l9r\+rV8!5~ZEw@'(?$wA^J_N,>FDa9ee`DLpG(BZcܤ^ OZc̸:μĶhL* F5[9\Z T>62ĭց/k gf`SW`巟| y8vHϊScl9t5.Kzڱz'. hKIO]S?48. 0Rb 9Fi[͸Q50˼ھ#4.`F(ķ_NF+X`: /_m0ϐ#!c7)(t-Tj&LH ]܈yRiKCn/b~#GnmÏ[T|c%}£0>̔K&иzѮW _RXPiB|<sFY=ѣ~ee0T&SO`tveJ[9>,UʞRdR 2>ўvbNɞzAaOlY-mґ&:Gv Q(&u%<D5/r&BR"llz\{Ky<ڦ{^b\ ) 6Okt6 yݍm^$ 'zә7^YN$~tq^tԸq Ew/'R"xxy&PSky@E)$x_\OiF!7|?ueE/|a@׵%%?~Q$fݺC5)7\xOW+9jj΢G6e⨁'|4[gzŃ (,#ebx( ל s`^oUK͵]dR8nƯz9Alv`]RsY楞h`UJ19mM_}P77`  Yn,il! J۔5}^G[ƚoz:|)5 SV3X3OG)]=32 zl؀`'dZÅԓyjY/qX$ށR[ޢ.UVxT@,SB_o#2ʄݫL]` S}#H4eK?@yGro=>pdkǀRr-*Y ԉRxҢtLoK?9R!.:6{ң>? ld\HF8#9RFSzXd`{H7"u_u/nsWKSx(R!W e~=v7V(;}Z5l"B[і SEҰ6+m[ k @w3B 3/6[Lf#&Ҍٌ)uOk?f>hnB CV7V3)Hzn G>򬜥囻D0FCު!8VȔʝYjHOV "4ΫQLQ(5]g(UT(ܥEx2C|bPk`E ㍭]}e^tWIH&3OkQR6\R*pɆχ-gbZ9ыSPH-p}PC莔 ɚ t'|$;_]睕gfrA~uiS űQa;S.ֆDOH]06Y}dXcK"C5&}:f b+pߊC[Վ<VP=rWi'kyb+Vaoc+vRaTF5SDqvl?\Gv 7n3T^J*G#zIfk0 yy_ dKw ڏG|~4J[㎏ yd`A&k7nKķLy),*PXDҨ3ZxI?÷Ra)ri$ HӴAiylr4tH{PD L  HgۺW.kmDA@D F2A!> Ry&7v-߽KT.5Ov%Kӵuk1}+gί  z' \ + `wb6/qyX]<<2eA$Hb?y$1dr51H4R MH74WQhMjz X>yɠ7AeGDmq4#Wpe17ÈАޛ6:G *ToBtݼ/Af*]y{OFbY%|bn{(DĬgj# |]ݵF_"dDduF2ɮ'nUy]Z%ݝ ~>bU" Y K ]G8>xEՌޖZ#zmL.R>Z>V(wY@$/89b * i ᄥ>q;eqhN5(A1LXnO󾫴Ej㵓j3 WSQnRL^6 doObRex ѹ$ƙh])[Q)UꔔΥ;=#3C T6]<Cb!v2 b57J'{FFk\&3S%3'&+O `鉜t3, <،|VenkYZzo[ˁ/wl9{> 1XyR˲ V~|9HAA6._~(Xb)<;=e%306nI;)Ū1<*ۋ } X5o vgK3ڦ"9b!]EOuƩ*qs8Х23܊MJsAEH0D0b\E}k@P=BN]iv%UdDQ}΅hЭkZl7e0[扰U.IZNGudJT#֮`$ /+ n^S/ѣC bBOl23LjaA|y< x _GT>}wT;E)% /4*')BoBc*APc?kurիo'v4yS$%zóH ʔW8MIYWAL0ҌfbV1@E-b9stJr#Bv}pPmP;fO+ʺJ`pb5XʅTف*gw{ojD 7pk6!|.Kg;nηBi5m^y.+m]0ȎːzŢIYM.6l=3uE*rC}lY"ghr0KNKȿ h,X%VL OҚaRabP.54)o$7qS+/8GGTVclL b_|hMgS W!ỢB]g2ǸQ-1R?B @ ƀa2:[Z弍D[#| Fs̑Zn"f6?n_=p.-K튡ΐԬr6Ir<ҵ}uKAܑ-`BÇ azwn' Ue mr^ԯ6T\+ް\ͫ4M v; uUONH9V_Y*L1m1!MpVey" -L"YQ*7uW2X5D+M)CZ@eaAgd0PW߭iIЮTg6XխxkG`h%m:K,-;2wW<<Ӧ ]+ HvٱvE}@#5Qk,b΢v31Uvi0t>>8A<Ӽ_^gGmՠl'HX*8՛l}&0EHphÔ=+2@}`zas-1m,%yBo2+fQd\ .TE =rCWJj>hlöh ǬYOSC9YVn AE ʔ햊^[߈YE\!ޤ V*!].cRszf JZ;'k( ֛ԓ=Ͳvf2/sES]1|A!2A\o- 7HeF'+5Mcvn2-7[!oM_M}f}AM/g1`죦"e.ƅ´җH F$)tOrdPtӛU_-g_b6u-ġ/$[!WHЛ 5QW65k{$di)چZJ]bvqnPmsO0+ D}#γ]V/G~UC~,Ti\lAQ:l⯔D`6)sTn!֠/7uè7DOix9FWz2% :]=0KHl;Iu&D*&k|Ly7$ E~Uszލw#JK&o҄(oWSfjc2xc!v\u TPZDE/U G yF@.me$ `>^pW֔<^T{}#- G_>QEeIX|!-&KKғxRg0]cZ Ǥ!9uS)"},0Yo2мh;a?(Y'D/mPȯ֛a:| hz[Z ljbI@Bd:4 T+fpZE.{vK<^9,uZ|s.|Di6Z>z} + t$vY/xn '9n$L#A߷;j4E-pjETRE_:T!UsǷ^h|d1۞^\6-.ҜcxˉkwkÆg/^poiDn`eF7Dv)v7>Ze8@A7tiE5LJ֝o#+3ܓu0bJZ". 8ETil{"ߢ?<-xN\ JPm3 M6o3yȟm:-U 59NVcuaERL/%?=ّJl6r"lVlBbׯ?C+e"wk )ڤĴ+BDzѣtL_CpS̻07._}S i:,,f@H/\z.ƺjҁc( fQ$o[igLj圪uQS^x^~q&*k0dt.}]c⥢MM>vܸeU7Z>l**%iaA F")7K{u3ylBJwmi+k,!GH#ӾHTvBLB8*;[ϥ^Nj"sD%1I 9 cFUS)Z'|>GO_`D'1ZQ>6j0Pj4Z Xz(3sV)pz+Ye2C!X=3q|Q?QЋtd8ላyTJyx}lL9@4J5<PeѶ]7X 2ss•bs+[K߂3IRYgҊ=i=@q昵 Rp]'gV;&e_E;vJ"+4qAX$<jVsqqM{+6[`MZuv|/3]rо:,HD $ʝ+V#)VRaM }G۩zNDkɚu)XU_J+ g?\v ;RV CP7X_Bo=Ü(hZbF h RI wvEѵ'}h'YqJ"FȄf4L!YS:F#/juo*sXяM]AϬvxO=$akTT9 -`@L*&M^T}'A%4g5^\ (4LFQ!2S9f"/uW2!AͪX?kwyCw@"9Qoς/V9TrvVNP|WSKEuN 23jg^!߬vޘ@@=׫wvzdi/QL9\Ad_oT%_yU#3 gr q.HbVI~]§اȸ a"0Sa#/rL-?]@u9}w1J_(ͻoJqDNiO#Pẟ(˞VQC밢<sI(= db ߚQ&7IH?1^T'S;g֌ݪj7$m8g^6WOeyX,\> stream xmzcfAv۶m=m۶m۶iL۞m۶;{w_?Y'ʓQU۹x:213rŔd,0ddN.v".\uSCwt4pPSS P14tػY[xܜ]\)\,Lf6ayMI9q*@jdci 46s6;l5ۙX3?ع:fdo US 3 L2\z;#;Z$HFcuD04v[07I;3{ۿ&6/TSޮ66rJa{[WS'@^6_t1˃_t05QtK_W7׺ȚX+/W?'dPO,jglobigPv˥4+Zz `@ߒy {x131XLLNVVkdjO? fuޘ;*wXhL8̔*d6 K;|w8@#ޝmɻfbxS%b s7ł+05l[c9[̵  ܳ)R^dq>p;q*] ,/8=j_WF/ ;pd5̿X`շz|dZ#81+DK>WgZ3G _NYܡ>S+vmimNF~ٞ!}ЍB糵*c=[`زٸngSc+O31-z\;}*}NZI!ioTs>༱Na~a¿sL573,9L3FHg3Lc#' VUK̤ۍ$xii IX']5kNkS œyt8cVy{<,=ĵ/ /ίGa2y 9!&}NxFokawSFbg_G#B,w+o@LPΠ-}hU?V٢hŃ,Flϸި%y?W-[6w6w''C`ʮvAǘG4 JGpq V~7TT2"⒍uz{ ƯbHf\Q*Cpr"e!;[\+PhEÖy!C2{`˥EB $ŝ[8k $<o5 .JP nb+Cƨ35^oTYA6Rs7OՅ?)\B-|3 |#;$w2r ƥz|dm'L!fx<7wﶜHl0 R#OsJ-:{v1ZMƒӽW)eY2MGK(P~' aJX+-qj`rmKn._ԯ ݾtmn+n5P= ۡJ=Б+c_BUq4dn/vLi8_`:дL[p#?ÿQL}"̀³8֨u vx8= 3%楞fAt"Ya39#{$Gs>.¡%wҨ?X!G~&h &M66'Kb_Oq%Yeyr@I%Wn` "< 1[ic,Pmhܙ@vOX߸!mbQM:kXHPS>ζrH~TdmusENixEjБ󓯉7g̲hki8E`^%Q6b$(Cy:+HBPZ_]zԻ;6G/U/m>F{>7w#snV0絲`fUX˚F"1 ~Z tPꚗ!r+=N:9;GfP-Ocm6 = >4kcBk_ԇGgwzhT~mFcυANQSy|>(.Aj SʠK(y1T&NUhA_~Ljl<]5BTջ{I&Ǵ25/y{tFp?;>8"3Lb/ )ӿq 9L:Λ]leXl +vgZOL3Qv.W5 uMtA0]sÊZ <`x3>ƀ1ؠ/9>;d=.=sÙ3X"f<[(`aJFF+ LD&V D@8}dQ,OiJk;(ҐOƷ[ô6H%SK[9cׁxh+Js-,D c H*Ė`$Tۯ+8w 873~WgY-?R-[@D$ `Aʇ(\x#u̽{d*tŰ"71 B{hE᪼]ݤ!3w.Az{NBeo޵sOq`Acդ!Wuτ. ,=☚e╘t Βz8 wsIR.9SCYvS'2|(trھFJO?epq܌ FFJGֻ-BH6RV#YIP PH@Ai6I` ZT1expAz3 oΔUH,l_sM5^Rh>&JAq'(Ä@av1+)uT gN(W U@~ɬvVMȫ;^c> /g/=9̿tA`PCȆ[JR6JD4& al{zNGri}\ƶOk;]~&˕V̨_P{\lQAL' |0Vt {x8p~4Hpݿ*4$իS6d=>_1sE&gz9R0ZKE),Bܒ+Luu2c[۪1錩C4nZ@#oYh.6E60GU {e_s&\蔠zL6֋ey;r#5^cDmbd"YЄ RfJ:ؓI.K;:gf )J;;WК!"p&4[TJ2fc+xi/mTyvw :{ =k=QCh1R8IKX=EFhw^D?6I>VQE9zۺ$|,]~9ڋACIwVddGMƲZ8}[ ݎ.H VCUڥvя qٸl/fe6SyooR}WeP?FzAeK*<0R$4m;VʌJ)*T!$7H/"cmqUԢ,2y(%<R7섇ss3|uĮYpJe[gn:4I"yee|1G`f͟e4xh#1j`iDBȊx5**gv\LD2ϫqvޥ-o{cBVw ֏ǟKM]Zm-O)_[ȩ,{ dݏ 8G]l֍2Flڗ7GeĿ ǡWͤ4Ͼo"U`ÚNa9# yٸK$A$c|)s A"(rlZ*`EZ{Z% Y_ BBZjJѼ젻Pl2 C&HͣYAcRpTy858P% ?STN7ϼjЎPޯsۘ3^M~M౱ᄍh =v?k0hB`֧4s?ѩb +^],t3{m.hGx2 [60fy:fkGg"}7wɀ'r70vY=WQܟjXV+-qX׆tK;?FjbJp)#nۂ#b2J%ptΫP"9U k-#oP9k5XzG} lDA]f̤7ۆv\ G/x\ٮw@k ɪsw0]v9u9YG+7'a{bsO{J-}JoɆVeb*>B|k=Wk^@^=,d()_5?keĤ6|P͟ĝ[nx"%LHct i\0ai,IIhdZUP#nN?hH&׌.j 2,%M+(2'&_mkM2ifF)k?c 59sKl6]URM̮ã'{-b%C¿A.!ZB]V;sǩj5(Γ-1$" i=Kڦ ?QNoTa 3ŲeO*D/왪zC^LϺ2||rDE_'^RFnPbL93|X@˒3j 0f^ϸTÈ% I^$ v[mOxcV_~a`9nt Mgdz:Ɨhij#sfQF3`\WF lɇ!1.^W kåFVm!dwKX4E]\: Od2#in?KK<4FJ!>qyyYfq.HK<c O̊ =w&a;ݶҏ["&8VX."r)%Đ$X%/Khj>,2~}6 _g^kQbUSCdJ?HGԟTpɻ wNBҌ'}`^Df1yf_} 75B@CyLXSNDȜW!!Dz5@O\D97tƕmp̱3##/̈́lj]C5ߜxepA 7aT>c{dDY~&܁_"Z*~R!x7#4&d@]e*Cuⲳ<"" F]Yb@ƕfUbfx zny.Z(/ t\jjd'CJ2ɟn.?DUlAKEe]ƆZ{ۨIJ_UɩgJ]/h҆MZ#NۅÏc ُx,I!^E`Xif;b~| KnL7+\>-w# LIc5Qhmӗv0"k"BfBvoThQ Y@[w:Cd6V~Cd &ऍ0>4&0 >ù;ΊTN[d*+^ZUhCbǏ.9h F{'nmh;"{kii6*tR訵@?AmB[2(?x`x]%>K@oKUji![':0?` Vp3&zh=A?ݛiLN:q6Nª{cxr#Kݎ"C t=맧ECw=.[f {L)CZU-Fe)0H<ypRktMD쿡#_ `)>X;Qp%[mG8:ޱw:yX8 0[mgpBw#ѫx$Niok'RH#՘wMfyT)Z. mWl/.f!]n~"ԇVQ#3ѪPK/XU u}Xv{HAc+(tjj8vfCrm2pD?K{cMiЮ\ۈT3LaD56gҎ}_;]~l|\[㦺ؖ {g.:Y+RV4a~}/]&$V 58C_k6 tHB[̲CX'VEXZ1) `!6 whڒS!:4!s^RF#SNzi6M?PKՉd; l$w^VFRUP- '5 |[?3 O3~fx&b93'Wem]z Nf2gl&4wܳb| $z@:y% P.]|'X-oZ#mr |Ml<_OQ!G2]9F.+3¤6y~)V>x M,lM#*s.vqqK{_dM{УRqؗO$ 贜R0W_AJ(Zg:N!:]SagpEJjk^NBUsL˂U{PϛGܐѥIrNvyJ3=b])I%thj#XG" Lsz$Z5i7o#S \])'{|CG( /v)G U 1^'EUInZ464٥l̡lqG/uP[[*K{Up0޷j\7V 'j>ʪ h:2")eP:}s;JVee ]/ٮ$i%PNTOIiFZ_CTjZ ~|d_)5׃J(Ai]|tnye%-c_~1V#|70j@1./xIOd$Z-KWK9t##xsA\H$flT.*Gh؜BJ`!$9[EW&aAA>Ѵ)W-NNHyQ` y .%E; |n_~%+g3P9iV%e3)c\坃Is1=F3 :=0j&Rv>b Xؽ=x@y҅l|P@d~D5ix@Egߞ(cxc}/fn4_KRÃզ"hfp?өzUhkٔe |ef(8'_Ε8@*hJ(x+ϛ:KH'3n0՛P[u\ם柖=DV1 wwY_]2x ;e1)qCc"mbޭtsZf' RQX<"~޸e9lb@ xwWofL܃r;ud0R??@j^&y[3\ ʌX DDrn¼&Ч˿'ҿJ3 %@g(^Bg_ZΝS㐰9,.4fc!<|[6MrOL|qOsIYO"9F=/59}R-@*vB CA sUȠS{;z7o?*F-mOLo[etʱ66mVyWMQ4lLT_4=Y;6%~oExNV+7,ҟ/~9Q|>k_+XcYlz\jhu{ UO9Uc[,ľ쏂 [%) :˛}s/ƈ}+/RdYCz#b`ڢ-B] "jْJ(^!fi0@4Wȿ842iӤb醫*_F9*OUK0HtB/kڸP&Y׌Rr 9zH˶Jv\+G ; /ؤ-oN-gt T<%܌]W+GIeX=u[ |)ն;^K[TY0Ju:Ly,>aچh%Q#AILSIy:j6ys_WbIqdo<юQ,ɳ-0O=)=(K9$~?i;pJMv 8p˥?cرqޤ[S\RQM9'.4OlL&LH}YEp#ෛ56 ٕaFrΕf8pr{mxQ={]>٘XKK1^&[X[HR釡 fĊ+d0% \++0(# x͢6p-2Wl{uD~ZӸ3Q{ [qhvF b]M%vNQMUřauim-w?ү~%rj6K,Z64p7g EIo9zc/TYZ25 Y"N?_@-[}.#뒅)M.36^a.ȗFfh+k?վC$J7AَHo3FMaξ]&zO5tCY*6g&^<%K"5jhDZϛoT0i#w0;b^J4zb<:\F3I׌|ͺk㯓̇+/ 8ԉhlڴt/kB Aa+6d\z>9M;}C7| ;Pm>w xT_Y4pHDȅa]qI4i CI%so|cWH N)'tEɀOHWQ;iJ~%t h!C*^kX̴܈:qet H[y$`Az?. JJ p,}0&X)_X>Q\lzFHJm(@y8S*K{ {_㭎޺ )t):%wuֲRJ=$@ ϼ2M+uL$%t B~V ʋoz(j乫eY&zk[fCke]}afGQ'is̘h/eXrd^laIS[>u`e)MWgT.sNԷ7U`q׆Mt6*-`b2)Nm$[&r*iPUs t663yٱ/ʱV\f?k|tٌ9.+ ֙&IC@ WKe.c?׎{y -X =RQD;##/bhu$LzQ2$bayf%F~u:S];*dB/뇰8XCVs+]'IUNb>KΘb ;~/jZ`Qwu?;x>bs"-Q>NL >(?KefƓ\sWDacرIǝsIE}u-]sS4.*S;θNf+qlv_<  rIq j >LjCsr;QDMI#P?l\g)UmT6va!uXEU*y !yhY8uZwsv EsYE앩:[LWUrd A&l! +I~ I2Z? n8u<1J}T 鰕iR"QFJOoLM-2V5fF]|_Jȹӗј/-*9`"z#EzVI&+ endstream endobj 1304 0 obj << /Length1 721 /Length2 993 /Length3 0 /Length 1554 /Filter /FlateDecode >> stream xmR{4Ty_d-;% E2fFXb1Ôle^3Wwܙ[j$!Z[Li{Pz 1dՖGCڋ9{~|{C(JX &KLdyD Q#0Bl&enEJe8 `%:#ԨDVjR U )A%C@8!+@€@ q HF5)Cp5BRpW @8 ֬qJɡوnPFah,Q );懇uU-HIFi ^\!qH!|Q,y_~DKiA#XRF[4#C`T%Gh? 2^chs/q\\B(.A%D£PY3"fk΃(`hWʕD'68^ -b\$2e$:$D$  Ĉ̲]Ec5Ŧ5w7u|2s{D ux%qtŬ{qanfWsEvڜMgKO> :%d\7N5 ^n}YiOqtBӾ^O:ꨛP#K* aLEXVPhK]l_UI %|YWU6wg:O5oUtp4-)Vӈ\1ƫUʷ}-%nպ-8#V0FcRU?KMwk~ѽ~ф Br~g⌴J^Sk>xvVozWliOzm*b[lVX[&o_ozt.鬼.6^.uaF}]8i`4K&S&jNMw4욝9` ';nV)S>-kַ|-y/j%skE#Ӄ-,s|˳6 iy7Z?Бv rZEHsTRڸMS6:f?9S-xǴ}+}bwW9tm(-ynKTgP*6.p6d5XxfBy,ů<=f̀Sr5w~ExW|]xv\l= lR?ni$xh3{{֙7u'7 =)= =Cu'74?ͫ42u-i{n}9y}g]Dnbfgi=O԰= endstream endobj 1306 0 obj << /Length1 721 /Length2 28005 /Length3 0 /Length 28592 /Filter /FlateDecode >> stream xlspM-v~mضm۶sb۶m9Ol{gܺU_=l]]T7)=#7@YLIHCF&djbio'bb P75(:̌L0da{O'Ks 1տ5CK[K'Mٕ?Eʦ S)@X^ASRN@). 73u2(Xd,MMfN8c{;qr7;7S'9dEUT *C;?v.Ml-,Y:yǢabXL-`=?aW!??RLLAښ(m\]L&Nv%{[C m-m<q!bv(?R#ѿYSKWԛ?;jbog/ '(FwZ򏖆N& ;`h/N.NmF㿾Z%$dMcaf0X8|ƮNNv.^{LM=La6VyBZ*D+ig TiVfzo--LpUp.w%[ LME⼖Tn+5D]aˁD޹dRf=r,Cg1(Gډ__zҥ@3=A-6 ރ!ַ(t8 ׼7fU5s^tϔ>9>*FJ.$!ν4zDskdGൂtm RɭD}S|km|D'fݳKOO> )NV\.W_Du IniCN :Z3 &RV4N?U] SRQ[\N!,؍Qj',h39Xz[9@"$C]ej0i!ZL1{`jL_#=P39{q:Dq:- `<]] hNcc<&3(JKt?>ki}j=3U:?{i*j\N5vSV2^E'A*jdmEM.'${{}9t·m?*[spM$/bJ:qДo"C s3)ywq{Gp@zz9w>Z2gT83L3D!Y}eȓ[La[}n4UQg= =J +K8'9o{v=^ ,͂w!Q$}]!آXQQoˋ}ƛ~Ko˜N%?ﰧz쁟%&OwGf08P_EƆ߶~jy(xh )Ya'Q5[n_5ٔ蒂FՂv;漸4WRL74-1;܋oV\ "QFf+b&@(܀_HdPuyjI|l4/me 0 鲑09AmF}U qR~5K:)CG j3; ; RA2oPv&ƃʿ1xq]͓KViV&^z>L\^|B^Sϐ~QڃvdbbjIyzn| mFag3)v&!I*' rj:unğ 5b7C#%RO$W% 0Nz:__PYb1=h@QD:1CCS䧆?s%* [&ʙe;}iUi:!n׻&:3ٜ96U3|욵za#21²>oM%b9fNФ2Ҏ#z3k2H9 BWDzZJQY}(cL5Z-,4[no[2ˑ=IM[/ #^EŭywPPk4cGdo-C,I]3>$ Q4/\5\cc.Ng2z ehMwvKob=Koqf QNNuӫ5i^~qJQ·Jk?ժ<HU F/()3#z *X ^)r ޹1( on"ɥ*.D~$׳%Q8.J$xV+&p h]O ai}FB4*a'ʔ0ye E|gz2-pz8ÇѶGt۝c҇hk¹b킩`4ebGɖ/Ob8f*_4IhʆP=LVMg bI5BCŐl杚e<$ve>@AxyVLYoD{b@M_źOto2.D !tgFE)d9KbEQ\g<!m9tXTs_~cbTվvQ }/W:HYqNKK?C{7˫zBg/LU.ZXwFc}cFĭomFߪB^ mfoZxɓ8<%ҝO|'EL@;5GxH\\|NBSZz@Q HRJ!{/Q('O2rejXy_}EB`{A>+[T8Wheu*R5t7s$yFwh*:4^Lh t&鳟T+bvHwȾy[$-q_Ŏ=\QU)\6HPx#Ѝ lL@|RK:d qb"HpUG.pD>[pfy2@3Jl\vBӕrp0cBaf7ېf[ M"g˛"߹Tn&0\*$w"nr#W"S~oi*'ڌײ!Jp6l/bl^Cd*HS&jv~ƋqB0jh֋$,< /mHO_"c <)U-gP>܂aXmV+ءFÓ6B_,˸k)I!.R]퀦a!=:z'eZ!]=KP{x|b+U뎋R`eٔ@21 /pF-IJD/?Jܖfa-ڤ I߈׆!7! gG axUPbgE3_YOu9%u*#RG4iI^/KA60rS1YW5%ؙuinY)U? 8Fʱʼn! _kUL,9ovv\`ɳbuw̧f`h 1To ;IEϼ"zƏAJKr87D. n'Jl'geJ#M/Tÿ& $.ڇMu&;Fg仗u^ c u$-UYQ`䀚klkgB8-rl2и65B(wz]6x=e!Ϗ`Vs4$ X唪IlUQ(*^1Kaqf&ܳ1DD+jU?\P56B zz<*S'c= g؁]Gq_! _Ei mQǬO;ԘIa7!Ne$[.JyJm `}y"|}kKh)y)Y`gBqaR)X%/.l:IYZRvܛ9y*: ZLd_m627N;V7\YʸןR #㛭p"U)Uqv-gG &[o.[}7a>(0CL7ɎØY#eyuNk eer(;(Ul1QxypaB2ؤ>Qxlr?蠶'z|'љplx鷬MmtjFعY $uwh''ƌ`/*sCQd[OPe̛b^ ymĕL"}],W'_19٣* YJ|>2HD=%&Kta!Oe]>kNl0|RO ,MV=\-WHBbl8 S-A2 3x|z-')wL HN#IBDֿߏqlzH'$\8qH^Ec\1; R4Nv2*Ɛo$Z^`"^At:E8{SXA an5$'u}u.:;WYn3E*nj\ͩ{׋j jڹO*1; =μi?Kǹֿ6NՕ`+L_qfaqE|8vXhwa/j{MBh[֘MH|BPܝ:/>?nwC-<`hX\:h˴M;MFsFG;EtBd9[4 H!d|0' [ ID)o`V%U7+2=&.b #Q՚WczlMpaJzgP4]NLQ&%BVp/78ȕ{,S=Aw1 vr9qlޓ߯DcAd͗f^ =J,U-e~1OElT~tm'Yt-p ,a4 '%!پZ;i E%n'dGxi0;y&87כuzf6yM4s?0~Zn!:~r@V|U_~fY]j:ϕ#uj!:cT;A6ZRNb5Q5,TgP_DhU+M U~ӗ8"(*jBviT {}^oHTwZ㨅OU}' .C !4aa8+@4/;Rҽ@h*u *pqZŐRџU*џ>͉]}v0zP&}&FZGaT6@UChqѕ_@ -34gܢ5)ڠx"==v2CiF_ysgMł (>/nxk}c82qsԢS1#+3|ۓBBJ#UVr%7oG# A0$gC39 [Su#w}dalVy\u]\^ f6zMdʡ=^jT5b!ͪA+奦?'[6ϙOGজUbYMxg UFOv|GL.3"gbgZ v4~5Lb8]dOX*rzP,(%adTP㑥 Ƭ1YbE=/q̧s#&0!?n'w 8xxj}a.FRȅ37TiBgW$f{4s:<Ns0u$!Q*1œskm1OOD[JH/%NPM"4hxA\Ο1  ::p0*YOMݢ#a=I͂7Ff̚OC)7:DYŖ6ם+%sB-{qp.weKC&btՋeɈ{ xN)=%FGޅIN7Y5 WBP3JSRW:iMX;Q_oC]?0rx~S5;Ў68QS-[fS:3p'Dj/m!^WSL_4kxaw"!)OJr4rf~ahҪnԲFUN@G9XNPD 88z{%$>dc!uh550N;YG_1n$A1gJ MbdɺC`*#UЃ8*Q6Ͻ@Ǘv~bϢ@ovIi`>, ?yչ%'+nR>L/"ǃnXBO"n#ipijmn) _:n]$XM^G9E攽/?5+!Z C;G![0id#^N.˹\5IXP0_{:7B]xNv^(R.f 6Tl+7#P2;'tQU\κZO;Iþ+jOۃ+k0PCݭ W/ha Q_ мEN-=i$6Auv__·@ߣ(? ߽Gtر}fm!x})I^@Z4]9gyCwѓDϡr rhŀ5:x6Cc_E VcSW?dNH@kRtA3"QDO,a"nㄐm%O"j&% qO [߽˶љzcvmǨB:>[HL̚nhV$HlR3.SpC #P^&,՘ hΓN5]`^RINwem{+!ӆDhh0vG^A&p\SَzKEe^Ho * {@7҅ @  7sl[<#N@`<Yf-Ra7ŀPa fictߟw2ΛZ0ahϑ_IFvbSTpp ]7v dǏUQd.\.OMipW g1:9 w'^i2K/|3g>k1%L*R2S$>|)yomrt_l V5U? BoMi@kB0e1,eQOU]'N!hh[hrO7I]jb ܘ{"=֢5F.?M$B hO-Bw nQ; لvA:0WP:' m>=:lubމE H~ ]3TBcoizYI!5~ }>b% N 8l36fE"Y>fXs1 p'wd)'b+3A%Jwswm_9F:y[]בd\.0&nA bdГ EVT@M V+M%uGX5m6E'gjßŃQVY#Dw7^Ddo΀]\NWg42`u&DyjGwxt vüOk"" a"n܆,rnuǎzCK+ _K)W1-DVC^̓/ 9PWz|zYDqRcXu1b'.Ftu>v,ު#1ģ|qn0<*kV׃Z? q$zbU#A/}w`}i"& @uf7CqN*1Wx$°!HveoҰ 1v!(]P>*VuDҝ 2Y%Nѣn,v.oZè|ƥ9/xvbU# /n\R (X$FOX!j ߝ#Vjg8P v_Ѭ62oZe盄,w-Rӫ&ѵ4lUSes&B_܆$-YxE^-FG;jUbK?-'pCØ0 L@vڣ\@)Ȭ0{4a%6F~ (\V" wV$ aQEK*}-ڈITK d: U+LsDIa㪟bJ'?i^Q'>{t 5#׫`gf (s(V/mݘ23y,`%Nesvyt #>UE}._ 2zr\oD95/t.h*:PnL9,x,Zɒ+y"`1^"t=c^|S?Ee7;;pV "p!S\߯ҁ}R-x1a5~ bi,`geЁ{>H} 0L8X| doY @ÔEch>Vn\reF@ߋ b+ jj݅Iee]|"^! O8KO&\W ‘הm,.tDh[PayQ疜2X)4(9Ł li}R(*lg'<` o›fÎf6?f^x$/iA(iN~gDbe^˖jOÁ@!T0N1&wOV(Uą>= J] LR/:8=$v^Mb/`x+_۳\Ũ4j޽r'q&b5|ednEE'ex#+t^^- +%;`uwM*pI3KZ[gmO{ ٿXU_]_9@ ,I`oܒUϿjzI&[Kyͭ&W% d:f.m. ,S2փMf]> ʮc5h7gԻTꩼp^,)5 | Ҏ^3hAE^=`V4xO$Q`S$*%y iퟎ&=wm}͗K\iVyR*?:l#,F̗tfu%v'UҴS2+&y:Dk mZ3†-r"/zd請B7.&rJ4aZ:q׻/2eKl ZpO L(̯hLdv ;ᑍFQH:ouT_$VJo ?ldNL܄"+,ҔG!%)77 '[=.^ٶ5Fd+k8rôdf'd/4{-6v*7γ,+ސE9v g_MMʃ|yKfNy]YJź e_J_!G? ah-|8kGbClJⶼ(>ɛ[?+z{H[ L 2qKHF1;s:zq>n}gɇ7NXws^#(Y J,0Ye6}- ,Fcz19NM5smN!b= GjDQ)hp OJ<,q*=iܒO 3GzF3:KtlT(𼱋0\O? {D?4&Eґo}e1 o1Nx*E&~2kZF5P$rf`[iFy36úmpvI1-u5y j 4_i[6W'LC,SKlic$Jw};d qlH$D͝^c">.v#Td*бwTϯ^FdhF€ nqTZJe##)Ăc[IFYj#΁4<,C7JZ!BʩÖeMbf/)B̎t}II?XQX caIE!8Aphu];<HVtwU]`oZaLHk nr.vUvZev#NFMsg2M5D&@^N Fi?;3X7Hilb UT7 \GD(LaE[ZsF`sPJOȁ:Ie)݋7BX YSMiD0Ys J̧ /?: WoT>a>Ÿ fq^nn!&*dj^9B MSo{, eT*2L$ؘ?;jG?7)(oA5nLd_AᨒZq_bRoQ Hy~킅FҊ؁MSa߳y6lEɍ޺Q/O7O\.Yuj8sđh$ޡ9%KaBAEWX:}ER7.Xnꪃ2$_1voKKLg:3Či]aB3A%-![0SM++)jm{jS,بYG*&@&Tȥa6㨔W;a[  R !QUBe0֙f_Fi='$"|]@DHZ'fGh\B'ǩzA?PuJ5Ul{#d$3ūIi+ḋƕw\$/ T6p cZQυ*v(>Ff/]U[p3D],s䗥#X?x2?KŚhQ/8x96BvM|dfQ{窠~LRP `v..N"K(+ {4ZJ1O>1Nhb4!;si$T4*}Qs&B 7[ij/Hu+Wlۮ|yˊ%" 0l2 $; sZ+J0N’sRu }{֠`u_Q1]d]\5ҸObGI{o7X8lmCo(UL/kBs#;,wRL2D8~o8'm9*KU,cҁ8 Wh0( w pH~#T֛2n3k).y\QxUE F2L-Me1xt\ ՛@ywޤSCȶmWv-.ʎA)LZ?ρ4./OmdPC.(zyl'3˜eb=,h&bA89*Sko߿ү{Bb"Ǐ%H+Vaۜ-eö6䩖;c.0%o ]}e3eL[)L*1v[@@t]+-%~@pUS*,H`0$)kLST;o/'y-,[:I9H]# ʗ(%9ZzC_Bȕ{zH T#_ eL߇)*#qs(i4$BGv{[%Y߾M0b+`t4I'Z&Jb/ ?۩mgQmn06T]4+P1ʪ 1$F!PS|~g$K0p]^$s×2li<fE^UN ϙ ֯g9ޗ$j2~M%;/S$~5 M(?Nj4T}G|yI`5^*JT.՗U} hŒX֤evױ*A?<$ۗLCNgLqz)j.ẽ$T<28KdR_g:K2y ] k5/i.i3N&Zud!#[uLنd$u!}cihUQb] =ٷϊ~#QC\Xw^}gzZW<涮˻Ymt\>N?OVbҲ!hKxch0X8'a Zsu i&dD I,|F Y%H^MKYFXc@K^)Қ&dAc$i[r}~p_]5 ۋiۢ:P+g.*SMvi3, Pwwj#A+5~#$zy[*.tdre)*'OO`MH zj̖aqjM_IţhCӻ {mNeF>G_Ď_ +,B0*zxjj?k+OEkA݈rZb(TgC:ߊ"i)bFC{SڰR* c}D~;LL\3)Nqr$?1rW1jj{Nt/JVA3>[w5"HO[EBQp)~P"8!Y/"'hk<uf1.³4=ޒ%Zg:]a5X34sK2&mGEg">Cԇ#INY-јH}#I\%XW#Os$m<4cFueωn4G`s2P΁_0p_V e1k3T mQkIB\uc(l:@})c JވMQm.SήwP0n{ȅ<'Rm>1 ^'XGZMl8ǫlȮ<.sX|C fX`Kci L;lT{5OD:I@dTŭ:4 ,'V\2v7 {xJ]Q|O>Zِ >714sꔴ3 Cg& _BX;Bބnnyzjx,cefX }vNT_|AJjUaD50l7n]%L+`WYzd}L:I8ygᦥ;|LaAP{=4̌Ft矱Y.-Rpz|ƔpIqlʝ&VrDt/ඎQQIzY͕0_g{o=3ZUQQx{F(;+&C*CĎh lSJS'7}fZ6W :cN )ol5%P|pz_YuY<"W|2!UQg:M.gS調:j?9+S&zuUEu3n,!d giX4GĿcXΰƣSY+T'O/Z#ՙH! Į*%A\;yaCm׹%~ ײ^C9pGu-g;/8RZ\(]d8: &OV -[5+)@Cb]g5SbqL$zkH՟(L+O Ph7 !B ;o;t'E;[LOi&19XF8lb(̨+ߺUp[OsY_.QPi8t klZ;ZZ^:\Aw"??c96Ϋ|OM]bo`VhE !@.k7,l8}dxy-0C1'Q܃ĔB7fFHELSx=eaQ@|?\eU}\''ӎ$Ą9JQU"mmY30Dn10jV!dcʑf35 @י5fx!HKAEGMQ*tfD9G&k( i+F/P:1Y:֭aGՀOAHKr^}+ ~t+8T#V7CSq4a?ergt[{j 7X}-ROH+4AY9Y_c]*ߵ%FS'#ecΗÂX ]& AW_ ^# TL;'`WUb-sk*ɄxG( 8ʡq2H DNQsS yZ%:3pϒBvrX0w+#܄x}w5~'=|ID[AAWsl:SQt2ڧ_t 9Фi}II^JW&G4[[/Sh Fp?B4p s0ܐdS Acn 3զd5Y*]*EɇHbiE/w$H1jXE:gj |i$JWXuœ !%pjt^t[.Lȧ3scںki+#8ơr G7bECH@6xD~]ܡH &[6м暓LEE\ӹK%+@?\U==L f{M ˨usgt}g(P '#sP)>2;kq.$.G;{{舘 zj6oy씆׊d6#Yi^!Y&u?O= g#k%&~upm}}3?&!Lŵ-[%,=rJ=CLΚS4 wi,=>0#0ȵe1;lmqv!,z8ne/mGd8k|Gd$!72R 'dz.7BBPY-[4$)o5TP=t0.W:l!R&)*GY;c,i[+S=_&A[QpWM6i$cqnfKMܝ z!~e SYDݛص85]݄>͆_kc=?y8sEo'T}D[&&NIjż=-]9)t=yاK @XgIt26e40%)JFyWy '?̗_zaϱEvֱ^My=-aj5aX^ 0%];,|(B I a4GJ3cIk iaRG K|qoZ4AubXJx'cCdURa&`Qʬ\ xczؙm>3h$!XŶ!:[AA@Hp27tSa%:3Lq u޴on0>x{5s` .+k)}}a5v-JT& H؏K .$g<tU睐d *L-DcCOv|bїB t?S(kSQn;'kSL/&G+w2kp읨ĪS/~e`WJ0U<+5̿q}M/egec ?u#JEo\˥:{5=ɔrM3#wO *3 C@Vx,iqi?n!AÏ~Ww'C^pɒLd5I٠/,L97m ѷ̹{=ZKD\WHJ]n.]f"h&y>5a aN7=$7+yoy<-2B!=pE{۩@eÄ%&QPVj)gf_QKfV= jp`\k̚I< ,<@Y;{aEQbj ۔OH6 mw LND^!)RH崪A@KpͺS˘u7LlfWI|T 6iWU?AEՙ|:OzBh膘^syYŎ С R:!r[S81jKypֈs`3TQةΩ$Z d; e|@6b2IF ]Lgw뎗@3/"|~rñҐ.ӽzץ nLj0]PF"J ?_ K߿rX+R}ك*画'qlxQJ)}`0=F\\t. :8& Pcj |6-$[{)&\QEC -\E%g7 NCֵ-p:Ɓ'm /]M@h ENsL aQ9/Q'V^Ý,ѠZow/(k4zB|~0Đȓ,cЖo=eh֯LK%*sR5 On;)ރ,㹌!ՕCH;NZ]c#&Vm|>vwcBi"ۿ/[TG^2}/j *rSW)}l-p  E?6)!æse\m).+vF$N*_MGFugJ-ƍTZĘ㻲Twudˮz?=/ =] p?ysB:^Yw3'06" CDsah-hЃ|oeqrT9l>̔8T`~+֔dق,7?=kȵͰؖ'C?PLOX亼fuzVvW/ &⢠R9h hB_:0f91C(ZA=9<$!`Y!*t<˓?R I&4&Q5yvLܰYmJȎuWlx.pk*\Q1g{qw^]ƉFHau*]ۅ91A;kpݚNG[3y1*R#['jcG.J H~PtnCaZ-{EE(M>Yv`,p Rx1a}keЪ9.b mː@2_>h3Y^٨Ӳ7uy] ҹB@Jϫ<}b$giC29'A ߧ^@<*;жte#CⓦjqkMU_G>jD'�ZyPr U6GQ+'k~r eX>ls*'`a"u3{ ~-ckX w W!heч OSz) b~ ©&G(b:Y\VrYɲ 0>ftMhhF*4V5/Хsq384lUH wH!Bf{6.p&WeCvLpvXW߇LMG.TA `Adю+*VCzb n{) N]jƜЖd1u'%z0ܩ7"H )e DsATHD串ƅXAQn$4p 6uKO^ ~TS5e+3`aRq4ƿj*E`u6i \<ȝ:n xgȸ!QliUĵC$Qh46lf2΁yv[$hMrBkM#7(L,QmOQi\+Ƥ-LɮE[ M1)m12l'Ʀu̠8|e=%)k~ A|ged#0 f;xs;;j;kE[۞r3̣H~^\3#b|H6WHᑼF!HT]Gmmgi6c I,ox^^=(ߴl] yÆvDE+tiyL#Z"P]U=Le.t7k[6}LksBlv_'OG8Qp>c-Wnj<ޡ~EKG2=1baUלU5fKV輊^+/ERPWc?=Q\JHv;E}㡸4;de$$o vx{_ \n_h_ihK6rV߱ڏ4nɓ*Bo p!`[Y @#zCxZŧa<LWaLo XiP:æ+ZAro4{ 5;\fE*Yr[f(_s$WǚkbpU8+ W5NJsmR<$ӇG_ef8j]y GwCdaA&[ '.Y uBE-E#l)]ܑXm\K? 3. X'/WAF)n̪y"'/wxsWeDIdB=)Cjlz$*%RM-1@˙UB[X,jnt[JG`ȂOϲ:ڷ6|j&Î{?A/er a&V+J3e&Hޯ+?kqRE8+ܵɱ%lB36]p 5,dkt[a o+ EqІlD32Z|Ft)UuV"~5Mi':3`^Vni)uDoKl=m 4حJ)eVBm2n(>LCp>~=bwӁ}@W>Kerf!pqs3D|R, %=zҌ^@\L܏@I5|Ⱥc#uEa5j,ﶺV~pwdow`o!gŷf\XLI\ &oFbP-$_~ߒvVfOMB3q]wl}y-<;NCSrMi\;񋁺̰taη8TgBQ\a=e$ZlֶUݩ~d\ ?$9xИahRU5;ÂWpT3qeɂՋ{3$dpH]VT?ԥ$%wzIeP<3ky@˜os`|mzm/ڷfWRr[%\_ElXc㚑]jW%Ruv&# wta]p>*7yj-IaOs?j:iZUm5 330]6Id.5z1Srӗ F%|쁸Nh /=ay!6.PȦ q7))4 8Z{he 0D+{ZѼ`h鎲9E^r߅V6\|R;j. ZbyN\ZBLc5$(K!(5z"IըſЃlk 껃M?ŏ ;RuԑF` p!ҫ}`xN~0T9yBTI[bvgd mr4S #p>z+MmA⊫MT0.16%6*zqz]ip7sΪ۽" OMHr_xIG5=ēT`^dkj BQx$l& ?DW|EV=Zʿov^18M[_NW J?)Ԭ?h VLйʊ}ƙ6y+dv]ǵ%$~2Pc38,sAp{qKzDj!ʲ$%|)$ E膴De 3ƕ'#2_j]EN6 9e}QLS"p3;5<.=سyX^^'ɍaNR Aa}ktaXF:eH;s NYK!+X9TK( I#$xyWdXɐh?`xzg%8g{s9PI}ez{6Z sk'㦶~ĭ 0=}ƍh7yqimAF@4޶A'm3e5ۗvve㸌OL٣)և Z^`:]cy}8S|PFif@6( DdYvߣHcyF[.;ݷ;H`H`/BZ̺Lˋ7/yo C}')|݁efެR}r|n &%=)z{f,LwPPC̾6깘A^-{N,v1[b% Qh #M›kyc6llJ$VCv TWN(DjAΤ宀\s` E\')״EsRAmѧ#B :D"%Q`Sr'.)R]wBv`m~RZI՟[ry7dv<^|LSsA/0Ɉm(K\|r|]Q}dJ&k %_&ȓXpB7y;@rdزéx`h d)WZ_V2 'XۚOto:sW^zORI' Z(<$L_po)JMٟS@[pc`I$F<׵r(ddM"E/Y^"ZFhM. 1<|Mr/7lG¢b$+^lB́F/_ )Uq:ӔtW@G_oL!3%:-%-g\(G;2&NnvCk6:%4<ڟfz[W'b(;,܁ Flۇ;B,xigs{Ċ`^*j[#|7e LJ[kdoz~Ic^6t`Kě1/u?yl8@=06`hJ"0ֆQlż0`m' uG@l9jV{g%EPZjI6ssw8mVߐ!cR͞sZUj{#r\_ᇇؕ;n;Ci|e.RR'.yg p2^wOP)z߳(moQsȷCHTڪgqb~Njb=qw8nUH"dxD8'R$% Oz7 =&3@L_=cj'l~l/dfl^f;AOLx̬0@tfO}ri.iM,a>=ӑOfnG6JӛMg9.Pc" 2`q9 n|uc[XĭƷ]f1ꏹ2]5_Xe%0 c'\RW]+կW8uu5YFC1|V bd*9\S=+ܥUnsQInVUN ј|s"K: KqNj ;2ң/uF+jm,B(K `j/]5}k$h`kWq~F#z 4iDB{oRxU`m_ sۦ͜&4&w OU.v8|U""G!,"O endstream endobj 1308 0 obj << /Length1 721 /Length2 11964 /Length3 0 /Length 12519 /Filter /FlateDecode >> stream xmwL]]-Pܽ8/׋KqECqw(ϛy$'Wr pwQt2330T%TYL   4,fx '͔"@ pup2𹹹 92\.R.@- - &Aƶ%W[+S)H 0wlLͬən@_b ;@\QM`loř/h ttFL w 02u-e3Wr3_M;A;cl=v1_]r)YD5>yݿDm=gCFaI9 9@寖 i+[=!7_.o21Lz?7=3; 蚺@@{;zMLySCK} ʠh&i;/uѬ- n 'L6oHcc$ xOEdeU/?k . 5pL /#]Wh2sNd$JZ{P7f%3t>;S!&OayA[{aڷߚ7Z诤oZ_ty+oPH0@ 浜FNF/ArO6ncұ" I+2pRy.*(ET(#SaXCs2x,#q("M+r5 3HѪyկFG\'>QC:,*: IIox%>^S{~lI5*[!iMTdvld0: ONb+oO ӕꄹ&BjEla4 C=}y`~, ķ11L ׭W&߯0ec8Wɥd| DnZP[9Ps"PY%jX':vdo(],CR|zw?Uo-ǽ"8ҟ_Lcm I`o%{5$xP؄_1yDJ( B&FvMJj ~ZH`3Vxt}p|pJV}uG:N?_9@yDؾ@1@. D,n5.lᶨA@&nwcX}XB?S:Ucr,]I'.ض6 3-qooB?J~J ǧ".Vxu&ہwUq &AQ4 Vaޜ8:zS'ƠМM|&#ABuV}dn&W/6Cb;&l*~bf#! ޙB I[{4g#J 3[u[wΊoШ2[aV7Vz^J𔡖A;0|v gq-V̦d1TR|* ]AQhP)0J|;4Kyb`0=G+B C "G kJ< ;@i[E&#@ĄZADTOjh+r)Nw8Cu:G;vѶz`=:JC> ޕ_Zuۊ(+j6d,4' cΏܵ +J0g&D[+᠐ / sNp۹; CVFb gaW]vA+VĻW:@|k7ވ'8Nw65ufFv0-mߜC,rHT2x) CȢ?aބ9KtWE9lo^\S7tþ7\yBL*rsIx~/ojAdnC"nc\0Q⤁/žAFv j Y?|_Dicp #( G#bj8wC!#nkcZ=HZ jZRx:dmYn796e߄51,'c JaU%\ L8 NӋD+V5Rޒ"$5܊f+95Sۧn$.4Sj |smw]ؤ/u xkJUlit|ȭK>'CrOֻ0I1m/wPb'?֦3hv*{dVjq./ ,x3ӷ5?%7IdrcQ]h߇9uE#eVB#׽Hau bE9k +~e)\z~Aׯ+ nt%;+/}RX& V: 涑X G~?θkDMQu%_aưK]1bnbXRI3w &IMF6!s4RqȼfcqGݤ۶D<*H^pسY_Jِ1;k40.sIU}6!xd-T.X͍A % 7z,VkU\ ޟKm1o:ё훑"SSTTmz0WWʏT) $ޡ/w=f$dtaޮ6}FWOWY0y,$Nt1*KҺ{˶o3YzzK&R՜6e1r2 DB[B֡.˫̮3&db)U Xr7lGm/x @ݓy>6eM~V2aw-o<7G$oI۔)BaB;.ESIM+$p,r@PT@ZDk#k~[nD9{9'u;0Ͳ뜠>G'UkVDyб@k`ryE}yTץRYUUąBj  iɼTRĜCCS4!F|tV;+-y`vJ 4[W4B`v.7A49/4iLu| HY])^]4W/dgjZ9Qߝg1+G5Rp'-:miCU$Z< }Cr֍^q7?}:+d,{B6vUf?^ E.WJJ: RIbquN9n @`K`&:T p`zTTAzQ8ۘ#f  LyR蘒U,)%3dW~XbFvIN̲.~bTpCXj֜2PP dm2|P6s`Kmjv?mжA)\"!YĆ-$>= ї'NW$՚U UdH 5ڔ/a'NJdk6";޾j$ckDTzVK~&JG-h--XMIaU 8ګ6i[5kڛT<4^bHNK;ۜdmYi}_KºR{_0(Vӥ=r̹!}x)v2܇I̤jnk41I8O-* I|RTv~ݾHE]>ݷ>YE:G/uAjW i;eG_B²W'qki¾[jHWzoz9u :{# 1c敽3 4]79*\r6ց7`d`賕 -Y}`Zv]/gf*Xeyƴ Q%f=ۏ<@thGc߂5"%"oΑe SQrB~ 6q0:}V\6H˕5V_A"""4@zԵq{ޏ(v =#tIo¤a)Ϝ%'M?+xv!Е_C`\o4j. mW\YQ:5gW {0nA^u*V#>C~u%m'Ɖ0C_He5ͯأ( ejnaޘH =Ys*~/ ՑBl PUwo*Y`Eg~N8 wTVic`s,dǽVIƟJE=PE(U U~ZrM0; Ec8E{/$i<г(45 sFT¦"7W5Fia_8b>\{_%TNΓʥ&(ÃH3>)d]}Eihwh..0_ |/U%l0I)+!KxO @թx;K՞΂s imte˂[ Wa)c: ,!bs|3mRiJ[/Zo/vӕ_vlsm<$rF{O=v b}`hC@nlgmv\J \Ce N߫9DmyT^B̏IYҸLS_5Y{,i;;i0 p)(>gp?shЋdbð,hu,hPGFkb8J/8'(RW=L]{KyeO)]u"KzCvw ltB$bhE[MnjӰ#en'xpNRP\)$oKNFl9+܌"+\[dRP4eXUӰ=w UNO(mD,H+e~N8TCo$r~\:ǐv51E !'; *8% ꏣn# }MK2Cw>A(x\ s~a@$3u7`I>{OApvDBeu y{2b(-o/w4v:)fnV[y]a8* X7/⤖"[y;wU~A_&M3'`<6OӇSPb.DhFڶΨa -`h!M2#=&!|\2\$甏l9:`h XWVPDcYuCoӣJ;32.MsbrŨ,θ~lDIzC{_lpVDᩪc XiM@+0;Dӯ>dK)8q0CsoP֟lUh;4&@"O jvC}{Iryj2S6.Dd򊟑T_9; E-OhļE@c"f*F0|֬Ñ@#Н=x/tc'9c]9~0$<_-jjRLm.7]W`PI Un?"ͬ㐧 .*~Ƥ"XbI7JZ>c@'\+=ȴ`^zcɮ_ޝ_U5K젳H\ݖpТAOe/>tBIZ,E΂Dݸ14K60isw CK1Ǧ=!EXZκfxݔngœ"HMκ^ƒ-u{';,*<4yC8` lchlK[m}GjcԀY-~@yPz,70: ȀBƲv_MSL6\3#' |cB>dٰH1x)B+% udՏ:|=U .Im6bS1-kUuX\p%GWsvo/b+\oL)]pI_=Ǿ{B&V&X'IB=Z3'*Tev2\bKY 6a3^Ge}z-U SDK?' N'u; #n,o*JBK?_ȅl9Qf DjE`;E %|PV)f5RX@$Ғ:"[)tgTi W7Pr'0ሙ6v:VZ };=C*!.@5-EQߥt}>=[؂j5.CbמKNgݘn An[_@YmCʕ;%N";'I!GJMtuĘק rqYА`wؿPBcj_ɈB7'Bj glxm~}֣PD>PG'6NWE09ﭠX*rQvlQ+ ^&PwٓRÑH5L߆ӁQt[meЋx7%YuRi:[&&!roԣa*eg K>sV~LIfOzAkA԰cš{B9q?J0nJ1TE7-+:,ڪM_XM+KI*P9%=IX{UB#nr"`U)Yk,$/^M9-XI R[yLvݮԇϩIVYrτo:$.yNŧ4w[f2;+ɔX5/K'm3Ak327ae;xhI1Kjۨh}"R^JC2I&i??|IPmOU$E'`Uq5:Ōb 5ۜו-TT&6R\% ZX|q56] '#mSouPʂcy_]ڥ;T=l2 Xf)Bw 5`Ͽ{ܭb2s^U% -?SF}OF]B>R=Huĸ)'gk;8{)ibI;+sM%m-lbEtkyн\&c&q1Y6 :)Uz:oK "=fz2wJ:FV&H6jƳzY1Ax!ɴ;,|}!4n8<{2m Rꚉ]1A{57~c $ލjt jU#˲wk=M1gw"=?6tۭm /PL6  Sn]OP{JX+?8.d|h)+Nz6xR: M],'J3'"RA]74kOL6U rbPw[dWP/7.1ߴ/,G`y.AhI\~MrѦgë&i#b);eCt(}>rhn݆Au3k|#%JB )/vrM3t`\'Hk&`W[wwOlvi`/pnJQ2OZ&.AyL_./3eb% 7Ș|v wfWlX %4y HM\|WRWye}-dI3}$72:n}G 5x407jr$V&D'tI yyffh >'U|z`NDlZS|8GZd|\]7/jmWTn6ъ"]=gpy(ݞ|z@؊F'|CXQa3h3Ң& SFYc'=k{.JCk0f<^@Zlh&t$"x u*o"#hmn]63:\vs*HZ2P┫JF͝h]s@%/@bmi˘}0k*N#~3dy;s*-8XDAOj젽/i/p웦_ziR {gE#+BT(xkkI*Yu G#'j2椯/,~\~91m(++ J.CZY0UB}^ bR>FI&Ded]3!{-=B@ۢq%@O[?^-(yxgVL%v U<'^#N M, B׺NQ \t:(XI[0ЙIV$AVgP$bPOCPGNk7u5 Yȑx B \FpjY˰f\p lA!Ż;ot޳a]&t=һPŖ4Фʹdqzs|nSLt1߲wY im0cH]ULv5@XѬ-S@ry3ء\Ȩ\:B"vwܝρ RMiʹˇo~z 3VGz>d0z91Su'W5 #wⶽgn!@3|9u۵Nә (UGW5ue70$E鳹]MzꐏgE}&i,SSXKsGSz }+Q@5HfLP(y{I80399LK+:?^m&AS`76wʘ BƌAϟ鞃w1l[g`BVɪ**$A.?ꧣ.mqd*#0 WrB.ϱYK5]K:ˇ#uV<5#J[A$w3ˏBBtBZ;ܣv]W~|flKBtVGd|?#[YNU$\Wt YοA!"`l1 endstream endobj 1310 0 obj << /Length1 721 /Length2 9517 /Length3 0 /Length 10098 /Filter /FlateDecode >> stream xmvcpmmcǶݱmg'ٱm۶aF6;>޺֘cԤ$twUr22$UXx,Lb 8@V6 +<%@@cJOi 4rh::[Zݿ19 1-R - #(RHA@[)@d;8lLͬɅ g׿̝ "J 1fu1 /Յ/dllLk7o `fe 0YX3K7{s׿fnI%毆3_"ssts9@U;P@;+[/2:[U!+I+O_́%'(̬]5s#ρ2 bJ:fV5׿Z'  `Q>FvV+;7k wGg@ O)ڊ)uFkxD\܌ +tՐ:~"lm 0x49Ki\F^e׫JKC%&"UԍkZ2GNwYohRP[?h ѳu?A{saŗ&ϻk!;WxFŕAVZ&['qgGjGv4^g.N}CM=VPNRyZ*WF.$ƒ=CI H(/&I4[<ߝmɓPځ3m%Է#ω \ljwѻy.'Rh5fJcgKui{嗛<穓󖳖fƽ%L|%yIɺVnɆ" uKý԰a-CFYCI`A~їj Cf(8. JK5eO5xF,j.q:se[J&g5#au݄6ƿ=(g!DK]=bFXAYX [ ᕿ4iXhHN̙u#R$GJ#tFZ N{z1(rxGP=&mNGH %Ox4-cfrl3=^.>:6 !]vLf$e]qovimtsC=|.71l=enHдAEE|O Isl…s_&idI*\줤b-V d+`d{zx>8OtB(:D|v#h)`XR\do7x gWg-C j nC8̣5Gh:A{ \苐FhRurvnzkuٶ٧|rY JMh٢5)fQ ٪z1\\}B7#pe1df܀*`,.h' ~pG?=7/0߳:/>cZqnDP$>Ʈe?Xx,]qnbYX\[MU?)598U =7SaOyC}ߓXѼ( =]xC@SCM}RUacj,{uė[a=h;,}9pqngWv}6q q  Ȃz\W2M붎~zs1qv_P9L6uអ Y(8&4%#oQkZ8+oEDoJ'B,*]>^AMk˜}pAZ2e@kѱ$V!<\cݑnAu7ց]rqXڛqe*rgG}K$a{k|Qh E !TEG{u ^fC3. xN 겚vA{sWUJ)_wYoaz^HƖK {x7I4—3U]u >l?)XWob9[-J;r(`\`оr&]OrBkB{=BU21s0Aш|jv]nt ' 䲴>JD\q'="cq.TNGIBnV6vXW9[nq^^`0ȏ!Ku[[HC>L?-~@E+=g.XoJㄵ%Żx<}j0(8尻"s)Rc9ʰOu.7LSΎ|c6I|DBC,H/h KER?f12 Ģ7mBPɽa۹.݇x>.z.gO.ZKFN;$`>2uw7Z"kNpd`SM!u|e38Quq>U#xFF0}\LXuՔQ;i/|_\ctkCO޳q)zJ㠦3~1yY)TVb/Kw7gBDfo˪DbcDH?3\L?ec'tuzǷGNȤ#1 g#=`SE2+ϼ*$MʋNBNcV#^R|{at։,IAL_VB)4ۅSkIsrjY?Z@-I<deXI̟=gW^>UdYV\'27ݯ xS;DD鈪sSJk"b NG9 :¸!1d}a}U4UL֦QOwasGrϘkh*eL<=GueDRzs-ilgtˏs2qMy%"o!g(mR+dcz =Ajx۵½Ki69҇xS,^C5'o!@f aԨr1Ѿ`᪮oȤE7.OwFSxSzD6'Ҕ: 'rZ^^DȞAz!DfM/> jak_0ﯓu(e (/ oʯj+h>֭d>`.zGg`WR*ѪwT;p selFNn% l±aXB̟4|;gz0q;٠avQ⛲([=YTәDm.D8TGɗZ>㗒yy(F sSjx 4g-aKwbn AW YʌuI^]?4ԔS;6ݞ"bDokz[~o<^c4r3v1EIn Ap̞*NԶ2IgG{`߃Ù<햼-Z' ʅ^'#XKh DE f@f(7|6/;фWB V <)rDi?HīpW7Z1Xb #_< ]30,k.FSIku^Z2v& QYc`m/2ϹFH+_pQ?8-0=E&X隞¢L3lw .H%7e簰73o49ZV"@8p`>)ւKv?FPge I[u>S s~X DҸk]']XƋ#(ӿ;4?P:S F֗I= :ʳ%fWdILLـQss{Z"'Sukϝ'RRCc`:3u|ң9K3#=Ljd2L_` Z*}?`s~ y tSg GF~a{/TnY*i"ZtW-\Ыx V9fk6ynU{ӤE.NZ& ףkS-G"qiܔp8ja1b9F쑱}2fz oOH^t+GY쮤ka4ģ/wQ4*}`atc rR]޾=∏>>- ;NΡ<)*HzYk_~dH9 GU7nщ_W] ?SOk?%Qǡ+V3UJJ3MHy/93`eʱ%O&rr8֪z@"(ebmvZ_GK~<w.˪W}_KŇb_?O_(Q%-n)bBYЌ.9K VҠ1 ^}NO$":a_LwN;ڗK|Xk"CF&.0RSNՌOnZ].jN)aHfС3$lk3Ckiˏpri@)WegGND0TQ-6 q.![f˿.(g3"zŶ׬4-06Kn[NR(. ? cB !'N*Aglp=l.K TaӤчl&q )WeESAvbK7=yyrT$wvx\8'G5ڷ8t tѕ#Ba nR"لp "dF]=t'\w\4:mB+pd'wᠶ؎Q|TF$"V8Se*裸^ݬ#Z;h)kvO1[cO r&>Ӄ%OJc4Bho69476c:2հFs7jQwJncjZNaQ SS@Ϝ;Yf#tVYܦh<+4v6$( 2Y9RVveK6 чtVpAV\O+"{W,h5xU>S8.IdGlp8!׼(10GᴗzUw)ղ΢8NKS5BֶHnM^Gu^H4H/s<H35xC!W2,t5¡őQ4ȅ<"df躨j;cgǖ 2xUPWns FGnw\+j^_4K[،S puAE j~ڻY4zhK6E_FnZY@xHmhg+~xs=ܦ{ že-AFJ!'4;gz_~2Qx{x!嵢8/[4;ꟇkQs9OZVha`{v2TA~XTsm7pe#MiLs06i{1 endstream endobj 1312 0 obj << /Length1 736 /Length2 6947 /Length3 0 /Length 7552 /Filter /FlateDecode >> stream xmUePZ%@=  ݃K ;5.`{oVm?ӧRKA4<@,luiu9v666 '"54 s:`ccGH;x8- : N9вw.,N.,oE bXm@ e]9%&@dr2ـO` Dwcv89My#fdo PVVHIjHLd o 3_oĂ0!3%/,<]z#@!=dbcdj I:@@NE{s@M'@ڂm<ߢ͐"fg&#Lv s0My S7k"b 7ylnog񿏾uRJa);9y_aS?`uM!N`w>_2o3;7cc?t.NN ;A@9{@ujShTD{Ɖ_&s:1V;'yn$fBr џs_),}V?Ԇj`". K* ?2sK0dأMG_[7{Am1VOXo.Bm Q[8^Zp"xLe͙͖M|P'9!CL|0rrwmw<y d@o,%0bA0 2zG])9ětD:?6:XJ&9UԘ4Ttn A^ȘbXo}XubN(\82]r3!1iijev~ZA~3:n0epVd+6@5NH/ 7#5 0-DEYjutuUY+WÞo:q&Ԗx< iѴp|+Vn lgS݆poH_o,ẏ&XEMNDiuS~cqϨ{0+zF/JFE;2ggGS7'E7cKD*ZbRUwmwPX M(ZaOǪT|Vv%yT[Qu6iDThH3 Ca<SVg2|x/4AY(ƝxUiXI\2((  M.A LJ֥gWJ&R.E(dEk5G&@^/ K.X4J ;W /S8!S>zG 3cXNI; JC㝏a`ng)ϏDIaT|7k,i[ǨlMnݧ Xv7~]w=V|=cJd8,"Z͙>'6i"\:#?KAق؞a0iS9Ӈ6ɜo# Hږ ko4M]WZ#յy佯fb;4B7IAN-2XjO$r׉NkfZ@V ٨ mk 5=ܟ Q<Hϐ7Pst!PdUfcIMYYLP~()h7,a#-YlꅑʋS)j}X~bJOa-bY|<5>tQ@ Tx%gZ$־^=M@Q1]0GDp EŴ9qQta:YN]C]e)92!x+{ƀlEÚwE<"XOX\ǻ0eD yH }MU+UFOFd*uĪڿx 8E04}S@萎v~Yp&"!Xyp~b3X#7@|P L [XȄeq@ؓ2 ʙAJw s3jiJm?Y@lс`ݖc㴲\$h/lBnW:ɋ=&4[o@/@ZK9j58dу\uF|$EQ!JcU[uIn16^@ـ*R' x+Bu:\LhTtŠMD&?/ !D8̃0{Hqyc?}Ir7Oi'*" gs2}01e0DFTqiR|#g>fRG=Wd'J *aߑHwuP̰֋;ϾCN`{dW>5o4,*- e̸%0ɧV*r$[2C]ûSIcе?e%iل?4{`WXf,5lu?B'v]x:,[-&m'n:{'D}GËOڲ˜,8uCD6sҠz"EMwJCcI%bA$ I-$!#*WesϨ>QUA)XBl/5Fi w1n$Fs6٥ujX0S]meb=$pSڴ\~<*OmD{Ov_srp.tt证;Id&9Hqɤ*"reL;aB}MF6RR&).(;e<.2D |4&gP$\Z|xɳ`!pz-z$fıg[r/~0K2r9|b#{i{7+M>^6}DO|ٱh ҝf 1-r ޚy UZy.S7Kj]+9GmRrBNIM:\q+.ܚƒiVC]_O9^WkOHhG| ~# -dRP,I"baX:grp˒5nrbx},VVWZ&ItP pS7kW$Lc(Umٿ# F4!;[ s|>Z븉$ZE(k"Tܥ v+ݟ} Px=6|Mx@֑/ELjc)0E¥EdS{M}hzmC?E.ʈjkdkw NЮM;"|2u B?G֤J,tDCJ>{FȺVO\rioG>6&@11b`3ŁZY#,ڈQr=L9H-ZUU_{ )P_lCkE vF-^$Xef^wB` DbL&ٳ&᝺+= YW<2ͳ:qh_a)kDxvtr<6COHWޫ9*^d‡6^Z%].G)@,l@;fC]hg439NOx&+& Wii0:1̄e.UnxqDW\OX%aEheNIIv|aR}JCYQTUL ;h+xDQf b+q!s vU:hGftcK녲ujwQ)ڜ[t&R߾P|WGKH %*ߑ drB¸'P1^ qs[y G@0 &, IpN?^=_ci/ B:3xaɁPbh.2PlHJo_?ww3޾6*$/Iddv/";VuJ9oc(sZ+т/ Na]_DV%uI}B6MQ4m=vNZq[³Y8%v#s$ bo1"vPބ{jM^Sy'a&$;keC2c`2^k =tȲ5|7qv^G]lC8`ZU^ظ [20.A3qppQ a.q>V(@!̧_$i \(~rZf 1̠6?Oy.5X^Zz5S#u'Fu`茎d;TSKz!z9?zq Wxr|xՐP{,C\'|*E犯v8OfہI]S]1CT_N1(ɎƿDzۈZkd 8OSSbEY{%|^Ƿ54 :/^7&9pon\tB7n[\e JvKzS(B'-l`8`}bR$vww ^g`Q`X{M:7{ 's)M9]%HNwZdν9K_NsL~A # 5AW<&xiªO@*n0Bu0aXP{[[s:;TO J'csI_耣\= C+i6RGE+Qg.a5/8jpJ|6~oUJ,%+%? Bi ac"A {w8@;\ԃ|a;^rMOln/zڤo_1Ef30nY"Q0QxZ` YY tQvĸoG-q@\P< +&INXQ2z1˝axn.ۭݴe:ţ.DꔌaG }6!),/",iJ endstream endobj 1314 0 obj << /Length1 727 /Length2 11046 /Length3 0 /Length 11633 /Filter /FlateDecode >> stream xmvePL-$! ]{[pww}o̭[lYݽڻLEPRU`eaa0#QQ;]v. >& rXXXN` K)?AаwZDܜ]\nR. 9WR֖UJ+Av 'c 69N;S{;3893C 򗘹-(&*gVۙdnsq::-Z&uń 0L@`;$&kgnw?Ր`2vQ4hm\]@Njclpƶ`/.#/׿`g)LWpscB;@gfv6sbYV2WQc'I+=C=}!a(11{oFVNV#;+蚺:9\o-'dhobV'Y4S K?3eN 4u 43=#V#wG!4:ݔn&7OĐE/ V; hc2D Tx~eaEn;;榉]Lg \Kq_SEБI@>HțTp@LZ;S8\tIȜ cr" 16fv47_f1['q<չh pCg} D2W>A"r~_"Z> {ZX_Sma'Ӟ` ?<%%D||BbQfk@%J&e.i[t,z_PȽ N5Dn=@ C]v]E^!!HKt؛;'PD ; 73~ 7(eYmi{TTl?V;qm"J5$*i=u{_cDЩ[$2utVx\%='Z 9tq+3O%ILx\ NLZs u⸿֤!^E R3Rݧ`fr`7 3ANTL7K"dWE/&dwmZ &K{aE˓eFzwtNՎBw%=qE{4TQV.0sV#[cJ⻾S.{~J'{Q㷵9m;L:qu%|!='Ftd5ʪQvFE2t'@,H!v#wO }W٩J365X?SԯbiW>H]z(NidlJd4szӕ7iq&NzOK>#5?E@dVŦ~t_50URǒN5K$E W~!htOr|g)rpE/hyH[~ zu*ǢN s˺A%0tߌqO[l3fa|'vNFTx5t_/OmL_-E,kQ.Iˤ0tk• 9Augփ0zJ ǔyX+xm@<tw> YR6R raC}sGcv/g6#uXΫƐf2W^% cVdIS ec 4yoe}cb7\rkAũM};&^䒌p3lDt1{] i k5NXgҧDhT=*%{UmhsJ*ȑHoXGS1|p/({TݗG?rJH(KᣕlD`ڌc\f&\9֘aiV{,2ГDTwgDG{'LUBhֹ 3;{)5';$Êʧ;O2MPPlOM6GPQ&I2$La"}Orޚ*A|ǻ& m"r$VNxYJ5u牕+ZX068]J6S [ԁϝ[޾ϰgݺqe'?Qd6RTAnG| K2v͡f@dm?UYo\&Z/}ܧ_GKC#Mh485U,ɁimXA6@!6xPso7}#v۴~JCp e.ž[3z]'ɟio$\5_(I}/tav%tщlow6=;aŚZ{5kli`?+]aZ=LΤ}h iҗUV,zMauqvg:Wl"2ĜQ~xI53uL^_L<}toU;yŠ [ys*99o84fbvgY1|xbrnԩWoy[REMv"nX0U rqes6<6%lyj5:t"5I:SzAxqMrCO{;H/Pף ZM4m]u N V^-a:LּO٭x8?20$ i|Ld2 _ #wj%āWSБњkI}<?xlG?iIRG_-bp|4[7SƗ5vK#=Sc7TQhEyŽdrE@5KlQ=|lՁ ĘJf$zkcl>!d+/E'Wlo<0{5etg+KpQz?$oS/@z5#=c/v3̼:q2k[k2i)5hb{le!bYZ[,ŬND^k|Xߵ4_Dlj ,=ufΏ)Mڿz=W|-@>ЊtT/T7O{4NM@9B^RE~1l,-5zfH 158ɢkQ@iv69u~o-Z=I>~$Ax{nSo6~ )1`;[X̛<<2]!v1TQK}ƻFx=y'<6vﶉU7ʽ"FZZ!hNt/-!/(ϊē_ t's9۱Җ~UmWH!dўvLo'̝%ۺ:2z-Y9h9Ԛ޾f52GJ?la&>0oFl"Ѻ k$HQhM(Ϲ+}=˕-߼4쾻G*2p1 9׾XvHDzqQ@4Gƿ3㙑+""p_u7$Id%0i EPRÂA?{vT%͕s5) _J߆5p^vV޳4#+djy.gA tp_P..; Smʠ%OX(ے4 >?˜,ך]m!I7Xf0AxX@Sm]>02]4WP3O^ݵ:cY[vo{P!=FOEEV+3G<=o| CN5W30g5&J,AiYt6{ԘQWaZ5* _wX`B#95@^,[uneЏ 6rӁc"H?͏(a3\R\%4NמO=F5уGb\& <ͣtQzuzٮ.@G敆Ek˼j-bZG !?fMQyzUrn'Vk*e9zS vJޭ$;Z-UCMc/V[ճusU殌p\ M%aF9X%F_&l Ah* *ꙋ"Ii&30Hɾ1Ĭ?CѯƱrx&=4Z C:T XV3hr[{!JZ$6aQء,]3٬31;NpTԥaÁFU @S~ˑMZOOYG;+z`>Х?342$&YlUm<{˔Zuo8^7_e_s3\u|&*JstT\(44soqO")}RCPUܐ,,f󊚾Ha|^{({m:ޗe(sVڼineGϒ?GRTS)iLjo/4>h{MKHXE Dݴ) lZجls gqé?/xΆ! - LzpPjaQS>'\\hU\":3C*?w50nB'8z;]ϦwIwe"YDӈӀװz׫yk "h%@lRJFζFdO5mu(}&?niDž:>;ꃿ*;B4{n[qj("Rh*rOt/nde.^. /0)zw|t#D-h0&U{m”t Ao#hEaܼ]yJ7)E2 !KyPtS4$M=Y |PEh)Sw" %{犦Oj9b>ILMZg ?=/]Iވq]k,ac,:"= [͛ ,nLBM˫$?7xC^z3^M-/8q F5wiA.ob~ :mo,\ΨǶ(WX m`LT Ÿz3QG4~B|Bf?w&sƗ(3Δ:b9?4$\85] PIa P/3 LH3C9vy܋X}aOE[)"[O7Qӥ0ΎC#MFॆCSQuS+{72:k1ӼvE,1aJzrC'Ip#ǕDPsUK pgp: c!|0>O3)[Qv;H-~n=h1MG\5d$<(X4!HRkӄ̤V0#埞R !W6;>q .8M(l&cՕ91+ݯ!FvW,n!*'l ne*o #4jl&~ޢ;(WaE ࿕^j*"$0nZ%jc³b\{xM:CqC~Wu\.Y[^'SĢ;Y؇6A$ '* G zuplxlyg %dpl+`ΞWz|dc8:ş;UV+? f.)Vln'<am b0 #;q+tn+)uُ焍$|vc璅;2ux=N8n/p[ I'Gz ؟< p#)ˡz\v%i䕿6U+ SlrWTf;} $;|Ek'e^n;0*׋E/y d9cfA^CWi%a4.a&hgR{A Ovk1x]4=\sȩH/8x_nuހ0/QܴDE~8l~Tl/^HYu\!%tҵs*؍^g.0bu[35m/׾+f@ Rn`ӣlJ1 o{@4#]c =,/CMCp $޺LfFFwt~9HȖB&u:mY3-`<~n\ r$M R[,WǍ %G#)wŹBRS8?uzodz";cC(xإ?a{V P߽6@zc(bEk%!}X*2Qk L]@) Y ]DL{Th~Z!cؒ=i:f L$TH{&5g͇%~S[<pnaLBCF5~re'SlM*c[Tkp >|;mG%t#NM@( 渋 jUkG9є i jactLIb̺a@>Ik- i2F OAɠXQSA ( oOo1ߪ[J{)tt5TAٻ ;ğY hϸT}DaHmx*McSIGŮ"8rwab5{oQr| .H:-毗Ev{ˊU_9^g\l.<U ,se7lâ\&ORs8V%i$V=$ͩ*4lZN*qCR0EqP(PIW&wuzM"2@7.5MM8f9ųz5+A4)%kn$x~abڏixgllSBR`auU1ޓɘu}eid] 33`lPM{Xǁ0RUOu|3xyzUSJL d dwBbTu7>_AhD9d;xǔS{N1G޿*4rՎ~^3I wQr)R|0ch?Τ?p5ʔWJ- "{Agð KJ4i Eq곅&Dcݧ$-|_<ո)S.Єd"02I<(XK5|[2\?]fѲCQV>n 1(fyW"t;h!Q gl=lk\  R+7R f,r}Gm4칼R4o_+@Rwi|hz>*oZVhm'ͽڪ?r`&7Y]gKs뗈O#5JmCz,ca/F_QIvO w/_ ؙ)uo!A%$3'-j. ͑&+y5'28p7Y *whL k6*ي_⿳d6'9ڕ4:wbz9]a$&r~ils\t0rZ4$>5?lLSP\IgG7 ޮ-PsH)b] ?Oߪ:~{s endstream endobj 1316 0 obj << /Length1 726 /Length2 10204 /Length3 0 /Length 10795 /Filter /FlateDecode >> stream xmweP]ݲ-[pwMp ld!; Ay9߹ܺU֟ѣkkV-jri{&`cDp&.@~ts8ؑN K+? @drhڻ̬nnnnή,N,Ԁ@`$TTuet2 db Pu5Af@3`aw0ə`7_aNv%)u1ieulPvq9;k:&#[F[uwĂ0L 0|[x :W_}́ٮ&v@ dotLqYjb!b 1_ A@sU_-Ll(Av5ﵚۃm=ȿs ׯ*2݂R`3{sP'UM@^J7W2qqyX#l/K\Û/` cc?r\`\; !.ٛ |Nm -*e2`\b\ Z\ݪ幣N \l6SQ' )+,j6_?ׇ$3SqI_[ Re9w,”a6.؊_be辣1fB ~Aэ$xtb/f>j@#hln5\nXy'}y X.}{IlW/ z_USs/֫K \5Xڥ(2s*yݞ^ fַYQx/5cT$@9UK9K-h:1JQ$gpЕ8HiG)bA ֹ+xh! !wJk hXΐYwr|rl?gRImq+SO JKfY9iE'`I!f)BDC;]*ѻFվGJj Sv&sZ[ٷ2RϰT=Qɠ +G vj`72Kp-?ҙ}O}G.u! f[iC'^.nv.Da-W_#E$8?W7L R*dz/tXه#1_$@g)kB9Q'S.W;YZoLcA5o(;YsكcA2XWU l_,>46F+M~;]uh'{{NAŨm$JW@IόD.%4a!w] (}A-0mit)`.C/~ٖŹH1I0۝@ PՔgJqQc!Klrzq9 U(ۮ,/3kgw ̰9 (=:bk\wBSMFcƍ2s:0Ja|*I"{ ؏ZͻN o!I(-"(@fD#ߴ> +8"H孫6#(xTh:!^ڹFu#",qĿ^hd)\\`g$Vf!`ˢ٦[mKCQ,y4wO%/\ 됶u*sʍwJNH04u܅Z{b d?춹NvT/ey?ʸLĦ9LJq\#JYE/'SSִ]Yi_L);WE[0 *DQ7:PO! Nt`5d#2m}fX\Ao< lV47> DT" /Yh@q_E07XqȈO< ﱊA3>az+j.GuQƘÆ1 Y]8,aYU87O&v9ܬgzl Y"p) +ex"H>O3?~? 5Hy<I¦Ł@xuE elkvoy9d I OŚ@2OV)trQ!3Lv.fSѼW 8:@uNc6Hv>"7c5:rdbj@h/ߘLY%i&YaSJIun 4aM$ؼ*%(} ~}~UgOls \XW0h*ӂeBadr7AC>"#kؑ:plQ2 i y5nuSNg^J 6K1[!܅dNn(j`RqE։3 N% CG5چΓp ckɔrr*2E {fKĒMw"j"RI%iXg;n?(?9`kAѕoy%n{}Y=9%n}:apoI9GTl ֍u&S啕8GddC ߞ "kjҕ&x-R2Mȡs5R2DV\~XZ8t|VϻI ̰9O H A`6f }oݚ Ha(&UVYKA `ع6 )NS˳ML7#_B1.*Yt r/oLT?Pp+D}:-D;QY6*(ө06[\*yk~I-][)]Z(r-kTBp+Yyz[q%;y.HDn-0'fXԇ~-! ou9"E*YK{FCtBK%VQOdINAOХҜ|X~*VadI(j%f'F/_Y%.0yټH[xXP*Jҩ5YSe P&mbbq+jQ!0E11r|2)4M*"FH\YQ8kU7ŐmC 3B:uw>W Ne3=~V{5Lt&0X"[Q%lTLa ONqM,BJ! hǬet?ּX`x,WISW6+&fa]-hM;uO/Z{@{~TxxZ& zH~ghz7B|E?-%d}VuSP_okAv+x(ne|TÃJnoOHV']E _U`*LZ2*a\h\A}"Ę%B^HOA+e% $En@wӗ/㷾-}\Mq5ҕ_v+Eꆸ@{z Ͼ%I92 Grpy1ҍ݋}0sj7Q#D'o>荤f'}\`i(F{ Ԅ6^CŭlQ]N~6[G*]d~T0!4uB4{@h)t2 [Tґg7d@8zRWöIVc~.aڤ{jV9.]PHzCRB3(G0ڨh>g1R82죬曖a)z Fͽ%۠d|}[#|>Xݜs _ ]#}?;7zᵆƺFL`řSB,"eQG ̉*IӽI"}QktþeTn}2N5})s'I¥P \,ku%ƅ!@pu}<$DQϝ_ <<4´ ݉EY+#u&+Uzzd 2eEE1| QpIs5hiI$fNVՠ7 R{PYڇrϢ9 Mq,~|Wg>([#aMzJc5-\_ScڗvuO8ޛԢ)NsTq. =+Pn{},\Fs;JDU'6[7d^xf[ Q乕t3}<{4uBbB/m-j~!~V'J3<'Z Bk7HtӮ,+3*u`rq q=h84ٸq!>O ZTT_}gΘ ⤾0B;-w48)0?=#Tf?.XclS'/æ)׷]S#\ 돮%;,ZA̲:n-go9MV`vGe\GkVoGbWULҽ:$2Sb_|8p%;/aw,"XD:6Iqcdק[b\R=J~05l &Ow2E>tB-Y &ik^(K[kk baWBOD0Z%q t|8`b &c"(¾ɛ7XOrSxCeku{[XH /> UwŞi`'duJZ K#29["i?eb*ֵ;sW,큝h0TtP~ V"żU*x.NnMTnxsN;$$8fjń)2$Jlr5"*35ݔO$yJ煋bLnk/I,ɲWHQ/ի;ʡQ̔oT5%p!Cو&$ICJsKq[>{͖|6*Eݶ5I! BAqeFy%]݊f;JmeW2#g|%?"Bĕ}>cl#s:rm?,fz_>kv93\xPڒϠXwdiน$5{oTA'տoD+_TЪkoYZ#yip  51_-*f[ L&`_w+&AG)\{6Ślԃ(;7Bxb eJ`qi򀏋¶)OE'0n+CBJPm<G w>V,/:o! $H/#B ᆪ_*gNJv ;oX3_`ڰ. )5Bdr:u.I^-=Tx ?8#Z$+t)i̱B'n=WNJTuS ~c]Y%Չ\ɺS&kS-ԝ4tߪ2RqF O0ѕC%ޯ^MpiR ݳT|ݎ F5>ϧ9_4D4q&#(#-"tG,,;>H'8cO^Y8g{Dӏ<'9Z<1|t2 hIiؗ<8QMн "S!BRØ5Sn,6'#2?6C^岻Fj 3m0|XHט$L%=+[\ 4v dTbr@s5mӹ>$#78q Sٮ'WWE=Gѵ!D./n{J`o ffӾq vND~ IK)Be 藃=Stj( kγ4#Bh1] |tUL١;0O=%-:+1*1-Lqׄ 4BQDنhfLv tї `C .O^aS}v.g$ G#5QxQ9l}Y6B 1f}~֓YnrCEQBw{=ChYQ]$%BmJQ#C (]J$B0z486ͱD#ًAwNS]gƨ0%fZ95Wޏ2} 7jEPgX/|P^doMşɶ$21^۶EN^6(JֲTi8`4y§ b(aR ^'҃a^ltl*TqIPg6zE\kT1QJ#Ϡ%Q1o5;j^'X҇^`EUJ-ЎW i\Go 7~֓:[^A+PCJ>b?V(Tn%UHcccJNdzֲą9/D0iʷ? &d*uF\e +e*wƛܜG7QGa%K>8KT%2Άob]i'}7RPƃ_P 1@xWgH"ИOf#JTp1k=1?'hܴA>vNZ^$c|ȣ6~%ehsPҚMj<(ka0y;w="9aj.:W,>߃J~6F%JaܡE4yWgf]S!OMz*>Mze0!n⭐B{pDܞ%L=>Vzz=_{:#Q!zӁ ٲ,⻰=#F CL߱w4&b*meN S-r0l d%s;w Q2 9L"m%~ąq]\̬; ǂ,Cen ΅qɁo5Bm#h&A1dݍYЯvyş{?؋&J~U:!X[E1+%#PS?|VYM_{/ P/m$> stream xlctf.fm۶ܱ:tlN:ضmg}o?WUyjUEN,`dgf`JI3311XE.Vb.@& t0XNV.*SfVvVN [7+SK+?T@%`ne **iK+H$@{-@ ge wRQfVəػ\  /&,eTۛinhb.+dz<#113̬L]&@ +{8&mo\O|8A*Tv.@' bOf3_PGL͍m!vMfGhfjNϼ9zqIߣ2*JikIio-no`feoPuQc'5ۭdl03?]"a?C8xxsYYlnV hoIW4[]r0 NV'^4[ I;;mN %Zpe2Dew,)ѥlMxnlō9NϿ㯡]r$ݽӜbYfMjMRI2Tc5Sc wTXl.jIGm 5UQv uǘ4-x7҄4afW7L @T,d=a)-D+&UOh3 ,0y6k;Q:'r\ڀ1(:Psxe >yDh6^ڛtps^;fyrO{%ݺӜCbI͊"g#7JjkVryq-䓤0;bDF,x{x/?P寐FCf0r]]øؚUzX߫>C[QW(aѨ&]x`՘[K!'׬ C?*y= "!{PIJB& f񭫉3k;/&30TyF=኶Ae*u8Q"(l)Uk%KT y:^k⭌.k6$kE(b_ٹOCWXkC}؍y}R ]~IVu'=;Ak}Ƥ8vī3 T>e9 B&NW7|d^:GDϫ]>o&T2B@~0t |!!Dt9lqS:V:\y  A٫6[h뇇ePJy HKFx v^Izg߾R@*'t,>rܷ ;22"RYR;':m6$u:u,Oks#1c}5b:L Wu6Bw7WerJrc@vǾ*|S',UғnYР`n sӤtv~ N@LfB8|0D"hJXu.DOQqVNO!]BWT /%q^N}SqM5ÏTK3$3gNYE4 4=>:7 Wbu7Ԉsoy4krQl9;~~t{ڨ!벸N]saYB=u7MkQ)JzZnYd \TcHVvBna<T5"Լ+~i*&ima"CEo))HDTSדtů^muY)r5ˀ݇S$>KA א`ADNaWƂа|1ݜ][~6n<2hzЃwR%VbծVp K k3a ,F3Zān{:հŤ;Mvm#*&5#J `@UӄFA'z-zi8ё F O[+<ʺ{JVl= 빺㛛WMWJo/qa,a=r]Ce*f cW4lXjz+ ]Zyyn⿼[a(@u. 5nvO#.Ȉ۫w {̇CLxBo!<Ǯ3)t-:"х6PtR|/k1+UC[G1fvqjh~|\|%V)^J!5W ywG?ѐHu,ǹzMS(j⇪Tƪ^= lb$ t69 R7K2U=Top ‘ï\{B1gȣb {#Ck*բ"[L)[(iLA2Q/7\KHE-kR8ָHOĄ:DU >0a#I(F;Jxxʂz &KXD (lRk{;ChppĥJː[\f2_)1&$,q^upq \UO(L-|+P֣cO'[H_E] ~RpPxjҁP3)Yj]vV[5Fi.v8P3-H }У?Χ PpW*VQ2HjB*雷ՃR>5n`g2a"00h8SjA3EDyC~p\1+ە%q,c/u2 RL_ =ѣ"}RYygc9I>M1';.9D:]u|mSPBg )RTD{<]E)C.(:ճES-='bE1F7Bf>&&G o*'Qr [_;nIz0Ǩ۳? ;Dx"}qL˱9s홹TOQ|Sؙ$|P[剎=Q,ȶzq_ÕDV{daV1efzKyA[<5 ÷žǷR{ [jήTi "'Pȥ0=C}5fR͍$1bb{{FrLj,MqYo"O^ rMYj}p7! #ۅ.eEűjR&[Q A~+H+d?fE;Fm3 GLi |ݣH'~(c|vt"e#q 4NEk<庖5>:9* '4ee$U/ì|I؞=^z1mHbF &oلjs+@4Q}s^x236{>,k(M|sH O)ߋ!JXVOMnIxx1x5bяWL*ak=ŪyFI6EV>e\a͆-eE[{`fW(/i@?h6s'Ao佳2{"vߙ +NiAJ Z^Յn n+m /]BuQzp☜A2&jf@&Q`R @`Y1E.m.IY(Q, (B!9?H (֝WaÌ _ɽ6=Neܧ_28$m͐beWI Tz(E2N{[ܡi<(YߡE(Sz~E=•>J˭1i|d4yR㎨Z+& }?/I*znpσ = NhyPh-v)/mm?y>siVc+Ǎ>XYMk> cQ,!녘'x;~dk֎c1UzHsurTFЗGVNC-oozp2-Ə`OU" %lxHINR}yΒ$o1PS.Ǖmj_Mok<'1M;VW1l,_W0%g m<>1ȌvxYT!4Jx J[l=Vp!&+:Nyq<Ji.ڭ㴜gM+Dg@0Ilv;I dk⟂؉ yi-p4]U]X=?Jg_m0ޝ<Ǐ G{RrV%xڏLn%ht@lYyaP,IS oۿ%z`זlkl392iPe7"1J8鹋%$c|C;=75tiP9MFiѰ?dS˙::< ߖ(ZyufA@배ɤ&Db)psڵOA,~:$!y q0LK {xl#ŐB7E@BYu cpmU88jy5}yFӼc8\6jǟR}?Y>\ɟlUee3xF,_4s0kB~ZOH{aZ7[4gt9g}FC1gqhm\[aj!֡-6Pk4oW:Mi"ڇzBA1hihexY!c}.*LQVY0 {KA$FmHyC$n@v{le$ogkbT@p Lt(sH6r[#^l=߆t4 /exjMk\/\©Ys@5BFmPͷCh:!6_|\yL(tEdƴoO xBLg,/&mݚmwגcyok5dc$ /12 NA`05 ~spE/M^5! SKsyT0m_цdϛu uy~P;hKQZ^dLo5ۺ~|&e(yċ<삥Kbd?SF>3!`ղ$cɫٟ"0›2RlgG\|b>$n-Éb<5zπn1Z{;Y.T!Jw9}=6#?ȟ:p+Q"A:_hNlz1TS,iJѶJ=&PK3;rH ޤA4B{5GZmW]ڳEuey4#'-(mg6+ڞ*UsJ{0߇@f\VDKʳ±(= ocE5pWLK_p\>гZMdxڜnsU7\qZFR@ ;^)&CPz.ӎb2QsbIn$MAcaPp̻XqT[p%|J;O mMf"tB&h2ﰙm:*a~Qi L(NkjH{Tzt :C]5/+P MFRf gd@#˱\lJjK>#%@͊d*Ϳǃf|j ^`|O}Ͳ}̢ߵw׿R{e} db.tXj4![ Ճ0UtX-[XGoƘ; - X[og` Wᓅ̾p4{ *m HynI,]pb\c3ZU'Z{ƛD WG59LN6M!JEc.ѥ\āHnZ Ix6[;EɆU4𑜢[ޞ}~KYF4K2ʋ[=dR]ہ;|,(OerU'I? (Lݶ5H%ӍWZs@X@C3Pt}oPQ4qX1~Ir`a4}OqFّ,a4G%7@Y;.YLy#dG7x _[[栾ω10v=q৪w,"Xly~68`eٕk%F|C29L<VDI}qtMN5P;wɚ@ûs-ͦ3[_ F|me"Pw|ˣlXxAX2i/ݕ");'A+.lU|UNע22>3AnƘjHN9bOߙb|fAzKs=A31Ouȅ V@sjc0pQj _qrx_-Rc%2#>RሞNo(@]k%*U/sɺT4BPKF!?K:ۭAiʿuS7|E$efbϖ󶖪t"~ %)?ݴrv`.UGN>pwe`jp׌7%2_e:X&]Lf5*G];Р]Ma=77fl)i{qd T H^ y]rՠLr{6PI:N*B^p%f T ;*{MЦqKB'װcRƹvO^}$ ]wA)z TwuKwWVrz})zí[Ln 77YBHnT޺H,+=Urff8 w9&ہE瓷뾒6kl2l=b`kk"/wsLWKn}14cox&JMec^7s^&s(dͼ})V [gxB&oJriϢ ۳|RsYvϕ!ZŘeNz&7d1PcJiD.g7ў"0N\k"3@҄kߞķ?95r5&? SnY; D "XU@qvg <_Zұ`ӰYw^X;R\ j꼗Ӻ6vZ)ݜ# `v74f T;}>ҾY`˟Rݞ'rt.~Kg,w@񿤣48m΍fj 9cLQ )5=0N^~EiYмM+9'd!_aOK gETbqM>FfĐ|$ O%ڷWۺd2.fkP cz=XPQ"T11%س3O΄הMlj5=t:2k6k!4Y.&wO"+ۼaZlgs/㏛!ąHFd <'xa>F~m(yԏ/iVr /"Бf )O67:T:UH>2 RIqHwQ1x02 eEkJD6fH)Q/nQ' _ЩO 9 e/l$2ó(d')FJZxYeuq yآMl“XY T_Y<\F?1 qsd8%k_h tJέEA)9S5 Dh,Ig^=pF.| oH,?EDZ Z׷FZ+ fd^97oxj9 dddB5x٫~ORmWRbG8#Gr$tT9 jrxdP%K#ɜ(crWdoEA)˴S/ `x'aְ+ +[feppeVg^vUGOa0W@h -?4NC*;&H\*j: `" _?٥z1)=;W=Cha}Vd?+􇎈u{>z=M XZ/4ظva@K';38@{MJE`k;SrÖ?hCi&7٧-I>]bһi%wr-Ct v[/bg볿=$b&*?2B}e@ڀk˩˞=;2s2r<9a Z1Ʈ˳9|f}$S-N Kt+{4_OBW-Z)a!DO_"a28u-/v͗/XOxīi'e͒m|0krȀb6ȩӸMY}yyVYԪ8B z'_h3,Ǩ} Wc Itkv%fB(io]}y|qhVۯ>a h~1Tem{fe tϾng*#رUOcp+|yh<56tjAUIܩ-wSMAs+A:8gt-1=Y6R$<[v"~X]DaLDJRs R3\!%tչ؊**4>JܜG4q1=$˿2#5>j d{n7 ~l^{!*\DٶmMܾ`8ZY*Ͼ*0!n|DRƗ:;ݗ= Hp!dVOM0 `5 +֗GsgKb ݶ=ۻF5&ޟ i>RߗN(NUXe*TNMe$)3`ԛ/Cj#NCZh 67GU5aK5{_ ;9ǟ:j7v/R,O1K@@?2R.x "Bq+n; ˑc?AނbαF<nߦž ,ok+?d,GY %r*al\,m)9ѝIB3F8ϯDIa'S}C~=hvBxѦ\/B0!բ*=^L_Ɯ i/}2jM8)dc4 ~^Tޛ%`sڄ\vc?.z.[0Ǩ 9Vɇ'ae%]XVg?Rh 5npNr@5l309bq !pSbX aU)~)Ecuu!j6]mJC.$RF/j,MِX:u9#X:]=ʧԑ)NɎĺc9l|m+V+K&WY{|v?[<0҆lP ws>(tmu(&l."$Py~KvY4uK,+άǭ\`38kKZзW= "ЭgZq;BQGŤR1vFDK)BR9wֲ̞Zc&OX0灧/;YT~gPw/a Jq q[άHboA$k x_2(IHx-Z| 8W {i.N-әxTKk@M#j4Ղjxj,;GXxb"֪L208sKr4FN1ՠHsCW e=Cp"MǞ&^bZ*x׳n;O%O@'ؕ_oc#E^cIIIdQe?qΗ̅{>[I2*2|]#59eX[ mxk>Fu#aWJ"ᮟ*8=]l#1E):RAO&zB~-T<]{< S=^[ 9=r -hbNc$}*-D3-5;; @m"oy2[k~BhJIqUH-EOoo[uڏ|0V~FpEA 1yuq$ݱL21[=(tLySГE^w$==`P赹ږpv|ͤR>_,Q?oK5v ;7:$)$~ߘwGHG E/ Juw [u492=R8g "ۛ '-9@ IIފ%GyH_d1\G-pac^T Bї4"mi+Bi:`e] E[Y Dm8u]_rK| 7YW;fM(`*IʟO6N6`R@SH8>r-+i 9 1SvE@ 7{GV+TDЃ۳N% >JBGC^Y'_#?u~#GQ1N5Kq345ɩFQķj 1@)oaT=vvSpd/E𦏘Nqͳ6 s1ɂ߁j~=]:;Cof7 LޜG9CN p3-hcq9VD^r x׵@҇JcoEz@6v@.@/iq2*DϕXqzcAPlE)Y~R9NSǹ#g:/լ톁rigFHJ)@-!_'afOo+cTq>UgkHlܩN- (;'-9h,6NJrv . ^;=W*h(u ޲?MI$ĵ(Mw>׵ܿ,eS^#0\ b J^=?ã#LcFkV+ɶ\{ŽY+Aљ)- 0z@UCb/4܏'Y0wgX<M>o"Ң>=3ĭ'|C`u IRlH1+f~s??a˽ioh1NDpl ` ]Y~b?}JvCioSr%mu/veK!wAb ;,A|fNHu00 <%ZoSUX8Õjr_'ePyϞVC8`xJum-*f;Dse."3 m4U`Gb %< !D[LiaSEv^?;E/5Us?:aZmz 3!JxXf*m+/&W,#y!a˲Ζ ~ aȃkWL Sx*j͖nwUf4 ^Gz&8H)dO(h]oj\HQkZgz%zn \2d&D,޻|pI(pOy EMe%o[1z}xܵlu 1TV?Wc;Z8oD2|$8*9IPLܹbu 0#<7XnRI#~>( |a3c+:Ü\4* sjcUs@A6cw$AP2B;}vhjo9b'K3e6eF_CV^B6>ЩzYel30l6#U(j1[yY~dj _g|ȩNQJ?z 54qǏEF!CXF| u,F~Ӏ<)\V,>5~2͒]ܰƵOܷ57 ^{qW=6`ka_~t,~鳤EU*^EZh"¡"ٲFhqõ[n4WA^o .դdG->j ʴZG`ԸO\[u}NWuiޤyxߗ( ?{N mͷUMnnRפ-ǼӀg9"#/ hޤVW':WORMWW w :H%m!g䱅2;6~$v9WVoIi>yU9 _4ԑCuhiird^ۮgt<( ]5 S.Tf:@Ϣ`sCASqzTj2Á)&j9.p.+2.QmrNpa/Uүx gS\zW*.nOL1Ɖ+ W&S`}x[ڏId}D8e#XΝzzfrA<>6T/YƊZ'e 5 MyօX _B۫8b81lxf7q9ldo=o'Cgw;_g3d6jn_zfۣ-xJUlNd 0Z=6(Zm#}='yIwk FG E0UY~ձk/ D͜`ϠwZOGӍ:Lsݟy6|4™2ŗZ9㏭\>J}Qyrە3ROV:N.}2mL/4Ԯk-s|< ^wKZNV6 Y7Z _oy) M @42B {= YӖb/hIzjUM~;g q,Ҭr 2F‡Év M dm iT[ޑ߳Qtxi8_Ymy"nmdT࢘!2MgsNi8CNp[yYx%؎jąp(gVAMW3E#@՞X,ڦf1| "B0"u' V#I\lHL*6Vre9 QCndf)Fkl!]~ 0y(=|*ߙ> 4dumqB@hKHR>3=\CGq4Ip$5]a`hMgOwlB~ kPTPaM2b죳 i̮f;PXw L(jDt /av܂+ ʨ~enF憳&Ss8gFWtIbɃ1H*JhOdѕ;hh|XJi  CL VxygڴˀS̀ƅJq>\V9/ӝBTn\ZWw_AL6Òmt+FN bK B$tYs[f`f)](B$AbpXz=g[>Ss? i6XKp:\E E>x[aV)$"W]aDT>TѶ!~'?'@{2L`2ќnj|ǬCesƿeGXw m+_}%soJ805 V<7>} UjM\\3=DG *P&Kx/3Wȏ*(B۶h c ue|}fO 9i 5?}Un1;?Q[zپx—h&ʻ[Vt28/*)mO$j gFP5"kyM|&Bi)>y}/RHQ#p&V[LITkv/;m1vTvgj~ MKhU/Vt_}uHLqtNj'kȨny*g=hnm?x6?DB:ju^CU+-3iT$myp20rcہ-HvG3*@`77_nd -rJ0ijyuA@. F#Dj"ź,=É=$HtR {0 \uL;zMJ{Bo+]C=[8Ÿ+ I|tYTa\}_{GjIngOuE\cJ?-pQw啸y~ocoϏk?3RV 0;[rby@vN+G&'o5asCf= OK{2*z\">mMPԘI߾CHcjl >;l{]3cnu?ļPDeT}T5Rw +-64tZxbEro/L,t &aAȂeӛ,vOȀܝ n+stK8Gqm #Ԩ'(䝯@Iɛ,&7iؽ!5ゴ_,:L6FM;k׶HZU<*$ɬvyc(iSD|eHa zO:سV$^S˸Xs\" %)% @]S9Rˆ6_RU_'/yTꚪ,-5H38Q_ 4Ae#O? qpL3 2v!'OpBb˷p1+qy0&?G&zf^;p >Xry3٬=L4aq@ Ze̵ KZ%[}XK+t" Uz <\몀4Bx>1f bܩS{[AZyT0 &먱i Zw)+-*n- Y*: O-~Rq V-Yi>-MV3QEq/ѷ;2ªɥf>cs<pbVy)yPYXĖNȹtcBrJ˪oFr mC8s[(px92"6D%qQF$l-A+a0 h6IxE2Nύ`}>1eP01oׯ+#Êj@/''sgItTly){x  dը@=US3z)11uJwՄC Ŋє;)|n7--҆rfhyOէ7W*_4hsoC;7$~"PZVtAoJ0_+ٰT4Oz_}(r@r~Ym3$-Yp&gMS4&IjCs.FPjg g1)|lBg]1t;C*3}(?;4B`sXNG_aP.XY/?e.6Ӗ Xsv5`{ws~TiQ0Zj{(pr.5)GiVmC̽'2Jꩾra,5ȟ"!et4xꎯCKF"XXقɴt @ z,Vb}ªe4GGR ْRcuvh4NZk3βR4?vnx2 R2~8|fie,t#a2ye}#9u%1ҭ瓗NR{ERsԘw_M̯-ٲÖ![R8hqR=a֢zܐ9#qw[ N4EE͋ǶقΘx [kȉ׽Z,cd au Ll!( K潊Z7(6C5@'cY];PUvě_ɧoy4Sߕ_Z\ߑra얾<'"idUP=F;cd>ޕRn;>D,+@ B~ ϱˁ($1p,jlj XZ0mv~ge\&*@BaiATWŸyalH]8%/m͖zQ'ι,Ȭ? [0ja; dXfr!lq0P#o5 |{E.)<>4T jB ^S!!thcitY A,+we#$PcySB|'M[%aVLiz*Dgrϓ<cI &rLfg@ILsCgV'S]aXS0r|&@wyAe5 1~$9"b<<\LQ#.J1\1<:y\m7}I7yB iJD?nF׭ f[1S,MHpw&|.lGY_C[db'CQMrnkms~8sVJ6v:hlOqǒey&3LT} -X#*GXBXkt>bov ōG%A j٥%$X\uP]b?wWҧX$BD0P~a9;V[N,5^͘ 1cݨUҩb}2 S@}9!JNtd}]=Qfp֑[v'?}pgcK.Jjcu4GE`h&+3%=}說'*G֟m[rd3œė:2“B]2Auf H9 "KJ ęa$x|7Id݆ra1iT`1yˣl G]~?wJjP`n{]YmD"Yp^*LQҽgE" dLcɩ@|rQK ܆-e`L8'mn`0.= 3m!^ԃx=ߴ@WaB/UyGKD Z B%-57'^fB5[!XRh@i%oI" ӘQ(A:}S엽U@qC C3ssG"aK$[hS4)Y/P}~NLSC_*A-ֆpaԵ[UA],,]N,fּ)Jpգ̴;H]vn#7&acCctjP]vs=fyN3ܪ&̅ $fUg$؉hJ==}=b 1imE6 1I+ mw܄lH( 34p \<`~m߲B`@Hk1w-Hw;6Unݐ5#!9ې|W= h يUb4dS}\]YaṠWb͈~Q}MpcL3(}RXQ m} "7#1 6)TI|]vtzT@tMއ("X4~..? j$":v@[[PI[F2}h3btxD-d$} +XygzV @ɢ]8 H?q\=(twþmyA&c=nRETSԄ ȀfGpQDw+iV Q<1'eL7$tZc]eghjruxQǒk`qEgAw{;ZPDQ_'f̮7lͼʡqg{ewP bJe{V=C_ SlxqU>C_5F @Ŵ7׼Ue_c8wWS"EfYew7s/GËHROK%[e̾6dڢ^jJokGNLryp(3a>O>?`u\ve(Kʐ166kzx˼&{z0axI "s\-?ߪ̧ mq ziۧW?99_O_yr$`U3 }7']4Smrigo_whf+lC{-tnkb]εc~`1^qj?iEW&_"∋I(~_DpBDlޑiղVcw^@ˉ톆j2"vr+&n^0RU 6+ևrp XʻW̲VB$*=mIA($AW<5h jp2d\ 5/ <>`G-Ė9Ȅ&&9`Ѥ&oˎچ5ڧsA A{& 'bLLp,x_JWX s8veǮe/b\ E2R90*z?@i1,$/DNY@uIrK0Xت0Blo@GyX@Gq\ 7vw%k(6B歀˒6금7GiiE [uJVAF@EjkE𥒯r{k$A?<5OGkgKwsl_jii2h [t&mPDf 2-ԣ@v#:}x1ւ kRi+W=s# կnz! К6*4KJ!l]:@Z$cjcsjd6ANh'} ӣ&Lc@쇑) HOE!ѵpvYI;C?}=Tm W5'&ݜuTЖc6e<BVD qL[i"@lsM韅׶"QM݅[t&HlqƪArCIy@-rȨ@W)ʅsDŽU$ k~cA0/7,{P)Rp}lH,qfA.~GDv,)DB=24/aQn18S @p˿G{3aL  yZw_B=s$YeԾ봮 >W0y x CSbe'TtYR' n^^, 9J-w V7 Po&g+lb|;Ry݂d5FKzAiKS rܐ4D~FS(ET~fGe nG9["9"aB2?Kqm`:V@83P~沦_gx=W/fYEWz?bt݄䆃+ %xf%J)OTC#R; c1upf4xW`ɳ ۢC~B| `rR*)f 0\\?AJW3NAP/:B)qMD>--n0_ycKA:7;`(Q))c+&i0GH|(YV/|)uu}Dj3PAl]uj-iŅ m$d.͜ۓ/Wk,3IbZ3juX $Lَc,OGgI*Z's81GK4 7h:M|EgZ7Gï?H(=^Xڌ9҉ǫY"LrL?R5\G Pu^=|s&cɧ5" 0h^P.4AA,3=Ӎ|Lfl}RWKn0=Is0 h1fpߋL e\fTV4E?4j Ib髍It2f*jٙ+Z3^@֑BTM&3?@ ;=DCǴW"x2 XK#(0;+^wRx[7e|Yj^OVLШ0H{ h6Ф1`{Pyz'FdE1gk=,먳AxefE:UXn ?Bc*<]vCS ԜXSXow_zzbFa7uh n Ey7A#0'RmKm`uR4L3O ]V" hI@qf)8wc)&7ecO*z/}['d:qA5Pፕ݈6xQٙTU8 sIZHY8EFçI5LGjDŀl&Ux~o #9$(:ZjOȑ09OU)X?cDqʛcRbຈ5&=ܢ8$W92Dkb]E7` Ij3@Í[ ]r'E9VU vSN㵎G:yvvtŰ2O4ӥO{ŵc|3yX/kf_ 6@s3L(k]+mr ]ްGbK FT)1wGБD*;\ h>(m ff=m؋|(y&Ma`7Ύ딫Oa$9T>=%WP,s rJ&5v qow8sr4*}m9֠J#0j xd=k )koFwqcy{a}|Ċu(tZ -3uO'LWYT%VbU^LA } &A { v1CJ'.):eXTWc,s55 'z Y9Fȃ7GGBv MnΠç 9 XA*HV-x\Tj=zGV?YGbtx@/|Ƀwʽ)`_l&hxS][ǃy7'b fvA}]L9Sl4u >;zaSl]@RuA+д[VD͔d *=']b$qwpr endstream endobj 1320 0 obj << /Length1 725 /Length2 21357 /Length3 0 /Length 21873 /Filter /FlateDecode >> stream xlcpn6vضm۶m{ŶmsŶmJVr٧N?1s;ػz9113rTUU9,0dd"fFVFf 3S#CqprtPPP52r;:[Xx]Ybfp4[ٚD%j 3{3g#[ @Ō ` 0q7&w7sv@NLUH\A^ & *07JՅh3WeG p_% `je 06aoRM ( `jfOR `jlVX'7DHC?1d"nifh߮nf1knLNUS{[s?<E-1%y-ML-*ilSA* u9#Wg+O#J_Hz7JXӇ@`bbp1\7gg3{O.%fP`3~~UIXe2ߊld KJ) MGW`CIșN*Jz|]顽j?_۲܁*XLnj4H#v|WRmZl`w$e,,D`BM節ߤ}_nJ#{{%2Q 96v*}-ǃ&xMP+R%S\hȰ{RUuRШZهeWI]v).}ZYu@ :lbBڪ[{֔thZXLѯP랱mYe [N\CK=pEgTBFB;(_LuXZ[Cg/>shW3~ZWBg"'a@B-(Xu*za01KdZMCh:ƴ|ë>6 h*bf=H) ; {>wctY$s> K)ZT@joh<.FUCqUQ>2ǻFI ]d\\T<@⨊rvraVD}:'AXMOpD,C}}>TX5,Q-I{t,PA5#a궟_!a^[ 4^.aTեw{%MmFܓwJdN)!(2yJKwsF^"~ӜDEd~w6g)`q)f_q#176Ġkz`;)Q]GR8>V=A+ :V= w03ϹJ[m 7 '1#@ɘIj90ys=|"sPБ' =!v"GC1v]Y;=+:D} U&5,\\Yz^lh9qB̻~KP;t2}(:WÓ?t{&V=QzlO\]hRd0# `?#}_RC«*ܽ2N! >W<ղy0PP; nO:Ljo̤`v6U~Y 8;ZAX{%sl+݊$,8/L2&,;XIAxS#rKY%o^aI4aI[~rJ97`ͳ!uېοpLp_)hFȤL(L%~_,JI*k16Ph3^Y:*v]evΐ?s,+r*y@"ɇ"`S9> /a:q7yS4RZы.*.{}Bvm[rQJuU[ϾǿD[X4mpavMdYt/.W}g l 7EO [{ m:onpx%5ŝ5Z\S .nCu{oږ;ti͟$2& (ǧ梔H]p~P\T{I_ K: !z_B8b7|pXyя9=4\ƠP`M>ZVlk\z,OII"}F$O[2/խTzw-!)[}|RIbZg;z2́PBEaVa?I̮L`iCx26SFGVD[$*>UJi͡z5cF1O=MO0LS:QTl{ܾWP? BqM l+NUs6q71l9SZ?~RS&[\̋1GU/$G5Wid$| _HW%Z=Bo5KwHXU2A\4h;Yrc9 !&=3IEkC8 B!ku ɼyzO:.qʒB0?i}pv|מNM.5 '"f+b\7alv5^\Cdc" mcp+7=ivRG}=3 I]ӌ]}dίQdE\$23$?Cz˞%@#tƛ\Z=.ÊbA_aB=ҐS]*쯉?[6y d*S?MA0-vW N7:"+qOT{急6DXrW&B}7nsEUl Ƈ 9ME aeWo\uEAji+H=gX~V8OwWerQ|d$55] l#,Z3lm%r?|A3G`v6;\xP^O YZq}J9N)FoCe]q:v" 񟤐h́}5tLy=Q=Ӏ<RnbɎģrMb )_UʽP`0yf\^eO9/#9O h%i,19@6IV iA"&IW{I[dOl(x\g)sY-EM쫶dx׍Qx34C_e4x{\-ۄXMp;]%&uEH$[Q'taEJR䋀ʌ`"^A2PQ:PT0$vA#56 Me_s)]%&À\Ye!I=_o8+j2:5k,w2 .(hF>KFސP]%F=Hu%oſ5k9ymMs6W3V.] GYu2Sunswh壜2aWX*L' ZZ`aH[>yr4iFM0nfj`38Ɏ%2j|cJ!>J`X9xC`#O%,FswI|._qF9⾥\d͗Qv/:="%@3TJ-īd@\Q[%gHX09eѣ IivW../& ǧTsXfoLKRFĬY,jn-thYAo=i˰;rיt2eZ"Ls3P7K\Kcd؎D\SXt=J?Fzws{t .L53rk|!dō8Q<4D%Rҋ$" KG DwJrzG?GS(O) \qe[VS%BT|—}z7B!k|^ 7P3&8q5)WK' `%;q{L(` F/̮ :#oP% (‚W~8?wa>MV. >}/s2 'l!N'^D=SY[= a!&OEI1\ 9` ʦݽB˿߮y@AחD%S{9OFNԽ,RqpHRcd#b5QJWnV1lO?O^S SncA uEI!87ڨ p%@]Ǔ]]a:)BMk%qGQRr{1Y] 4wI日*hł 7 k9a]G):G-0Ԕ#-u8G]T͠!\'Gb!]7OdsY7g+o0כE͌kB"Fܢ⪌`_JV)J-w#8OLx}ݥ>fߢk'w I% _{䨀>@G [ v?/D,lLmiOLK$#!sp>btӝ+| qXIXhIfrBΰz鉌JZiEJ dw د3$K:\nc?v01z]¯LEIGٺWSm(Еr?o-'-BO"nҁ_$4:xo%]?Ni`P #uGΏ-u,FݝA$q S#:􁖐/r⸾WJ8y(n|dt_eL캦H{`= 4_ sq$3 ~>˧`Uf9 ^媠 7DlvZdvm90 8:@3PlO#kM͓o==GTaBg. n330&g8 5[c¿aCx{8/PBGjt z!YOh$B";^Ԛį3E{]aٖ>Id51Dh]Z1FjX*rr9Rqu>&htx^SŤa6S`zخnƎv Wj噼1'k og*yΖ4*:uɍfKoksʢ9`ԫJQxџJ4ϵR(u~V*Z.y!h' @25KUDhk6utS+P7wQh1)F/:ҹgaZ~fg *8ẓws4{ړarf`X)kLJƘY]"eKq~ 2E90{YF{+<`t2Ow5߽%8R 2˾e5CG LE.S]p)j 9?6Zռ8MG/eSӞG!M0c.b~ Q:"0jo`*l)%hܶ#.C'OSsnʬ:)Ft[b, lA!PK n !v]Hcuvm c-ڈ([m`O|o .؊}.!.i2LqcѵE;Ig֟U(?wF(v bER2\$zSyGdf24n]1}3ROp fƝyXROpG S]/dd8={/ۙiza  _!mݣwtwO+HQa (1?J?I'!YH=umvTجe!xB#6ߒ!x.p[ѓOg!0vN{u/(Z/j%(gKn`YC&B!l{G =YTEWЁ!f0 L\WwI$ЛH&C^MG'k6ջ3jYԊ*DKӟ+g_Q`SҾQ^K<@QyqOxҊYGW1ZsހH(.s&Jrhδ M5ӎPkXl4Zb`fj QZHWɅqsp|®l7+p#?_&J1 m:[ "`qf; _HbNWJ.G8޵hiBK͢f$Q}SideUWX@TC:~E'OxߢOϮA B. V_)ԒK8ЋF\ngPSe|ʚ{PoDcB q#H(bBWٳOb%vϣpM_DiNk|qOQH(:\܂eX|t]edT("Q l ˛k8GC'^ Y^ueNKNרp-lH'1G3,4NE^A-&M"--@3D]a8! 9ak~ęyUMs؞{I+$vgȇApj5v#lA{5gKq= !d Bi-~<bEϔYL',2:LkI8 ~"كlvb1s,ÿ{.iL̦Ȑ9|/}WDBESǬwf? >S+,/#"*`?ܻS5>zF!(Z.ᅽQSS].o f*bn$dQTڃi%uƪ&J@r2 hEe4g>VJ/1#ycZŕ1_eoemLL$#}qBF-FOLFF:7$MDr IQ \m}U06i)| bvM %3Wjt&p/_W! $ d9nm :Aḙ+{ ;?9 8T,Q PYǴzo/m,di:oFڑų;?X2Uj}bl &iwU2K x+,RB5fPGrQ}z, +-7\;1=|"J^YSHsel nUGiAz9K187WZIP'Ueד玛dŸoojCr+aK&Ert0عɠ3g{6B2 '6 } \B& ;s+Oey}о{ci;&_ \绮фU 1b6ߤE*t$"l*=qlͷ}B~ʗj9Gw_K?xg " C(i?<qڙSޏVIh`o3 WRY޿!Sk$YXP.A,.]]e[X/0$UͦƮǰT+ Uli){) %$]pj,$6GVS7 3/Rf_  ߕa)V/&1crCFlٍ %]~?^\3s_E"^{CNRIŀ}̱.=_fb07o%z`UXhfL 4։/eDoape= 0>̊ &'{Q׌Ћrm-$ӹp@ڒMшF໾;$9G5瑄H%w1S:Ojap (K~(60l3e.6oQ*{^Z +!٤}\5b$@Ͼzs gl$TmUhvz} 2!w\YftIx7 mC0 nsAS:xO?f'}Z3/>7PXfR%sg>btϼqfW!l*AТ 7~$y[,y uU=ҩndNXT7C}m O%ߙ-=$P1%\(`^dXL&M]0+ SG]}!>/+fFqua4Lo^(v 8N8AW:f:.(VHV%Xm&΅T^-BjC#\&N9i^̠yi)mi>vM^K,ܐREeѓR5xJ@)*w`ym-=.}r_u̎ߪFq 4LHWLX 59rX;CT jz;c y*%]7Ax(ɭ8>O VpƯY8gͷZmf݆i,ֻ'_ )NS/yh,z%~)wG*s* "UIHb2ZJ*S[ , ]*As(u)kipV}䊢c5or*N0oOb9LE"}#p :9 ksu cHb@iQ"ʈhlsR"o3jZh#KUV!OMuZ$m("OhAm&.9Xgvڷ]pQfmˈ*ZEsPFtiK3}^V*d:`iQTƺ %MZ- l}Db0ϯ"keԶd,K]zM0U( F8xuXs\*Й} .^ǩbEXꯠ#sP֠#5T4cX@JU,$Zjf^o6  {osRVV/G%<9b >+ RzoR9#,tF W,tLk`M¿ XD+킀lTSln4www_^YܥL*2fkyHj7a^ %eOBP rnv(A~Vc.H͛d:'Jժ}^tktiI;hTL= ~L7?y=wM 7<[kM>pD!#K cJ`ӪrpD(.$Dgiڹl<%IVë ;9d`:N)}yH~gCtDK a a(pGoos5%}DNB4o ȝ+ ԖPt!|Î8ݯ>`RZ>UXpL"6,͌4黽; w;L_4Vr%>VNG~ ]W)7Hm5뇭oXC?LQJDl񦾏/KwCsel{ Xf #P'JdAѢ ݬ3=TC;<˰6?`'[֒?5Kˊ>)Z|zE+UʳwQ'!r$%Pb\aGI39[8H5=U^"96\* (E6yQsi;|ػ. [1^e9`zM61{pkt yj̹˜5R]!?ˣqѩjݿP)# ; 1 ˥SX?},dyh7kA0aZ~f v:2L TYs*+w"X,UgupN HDNzPu}/oquk!+70/=1T~odތeMיU&?_*DtGisĝ䀩䇏?ocGc.k,,ުL]P_g!̔΄C(6'GݍXX({E%=%@&*qp;:+'t5jAnZ `9[x2=}Z^ += Sq>~}=]h}|%D߱;Zsγ j*i]U4 ŨD"E| m>$Lޙ:Bli%M{B0ঽ@l+,E/y]x'Cv Ge.5=GȒ(D t-uê l-H_(ͭ@_9c!],Ha)nPb ۠(`;E(W6 NozB~A&ER4d۟pٙ<~-#DwތMS[\Z?u-Z9! =,?xaq@w;1K%=T?]Zs@}h=m|՝YoLܩ7ŭK.Т wG/au8gç~ѨEg la;@]kn+42Wǀ6w|Eu.[2n&΃L>-F*'pBGghVvWH1FbƑr_&2 A?(WA;K#U4Wrxæԑp5(3M'N6H bhRamңoM@ˌ OLe\Ax|"ljv0YXrxds|GQ,$g`E 'KA5Z .m ([yYLŝ\@g#b0&#I$o')hؾdw'[yZ;H\NW:FE#J]1}oA1/`agQh) ɇghVIf|ҺnbqǧKGO Mb&.#4Jt>܅vneh\ֺ l@|Nx].l2,4}HLf&LW:jPlumvQ#dߛ ٷ "pQ)JM5#Q8]ҹהfnW%P4 8\kX>0A@f CH&DRgu!oc 4M:Ѕ׍Z3*h?)$T[}(_8 gogxmyݔ3 0ͽKp- b%oMxw֮ӭ'Y}X! ;w!3e93j{P6n zW˜9jI~- R)/o%c<"wHO^(ɛS>S{-o/n>8)\~{UkuF^ޖ9܉aWd,xyHí[H.C֜& 9`B=_3mRP+6#}|a7(EЃ;O|ɯmnD!`fPBnGuBV_SrdYXErڐ/yp@/Y,?r{]unH[,g#'DFM"-LtIueS`lԅYq%/hhNlPV`9ka¦|pH4"E߯]½PUe cj[R\!Xnu-PjCרNX ςvɛї?%Rv5wo8+i|=Ojw=$Roݚ޼ Z4^ϕ_.E:q Y_";mNO \ G {:2Չ)>aZ`05=^lW٫})lv=eh- tqW{}uIoV {So@LUaӥ2`MUIuGm%kl[?:^ er(GHeEf3!ve-h y_9OD~0"Y J d} U]ySFh%KiUQ t5`\hosV]$}~>V@.>|ڷ[UMB _g%_4%12<"{ee=j @vmkVv?cY{, `Y.Aձ:]bqC`I`m:%I8M7Ē(I|Gt~/ э0U&US5hT@m VfH.d0ܨ/y?[ b#]Ԝe1jd U2.4._5gʤ]x7 TG1%*>c?"CYB$,|kfv]Ty+"U /D7},cOK֝4ǣU?Q?A&㋏ c>lsʝK!~<=;ЫwoeHa.*yy$Ƨs>C`uXxr}=b7_yC\ai GBkc`XașPbe6w]IsmSduAp,C( [^`W %}S+脩Ӂ=| BxhM"t4Uf5/WD0$Nb-1 LJuamK PxlyY vy#-8f<@հWXjubm˛ಒCoV0B{ F[#Xo+x[X4xMI%Grbj#H 2&NI& ?#X_pYưovYl;Xׄ2;&hNP\ABF^_^ B#KTA,ߘ EYЧ3jz m,筤-bՠwch'g1`-]*c/zz1zk`EF?\5Z|KѤ7&jf> _ N}ݧٰVNsb9`$;]zg aKƶY%aKl> ]K?ܙ%z^ΩX!Tx4ϲ[7O3:(.C&=O D3P# cޗi{QC< ~'Z<(U_{|]TApz75#~v,E:#\pi3f]WZY*(-LT}Z{~+'F8S(3ŇN0|=dsbJhe BF)Ğq<["^ԙ}t^j)Kvk"hFZ! (7!G&1=cPoWK$$N$}`7&NANsʩ % V_ *sn6`o*}*;6wǬ:Q:wUXߟ'kX00$6ꅣFw[uqJkmtt0ҖL{FiuN#1\N/\tֵM K"~A'ZXM ^F.=|,|6qqM+ߵsn/ίxR.ɷ00^Ac:y62bK9 endstream endobj 1322 0 obj << /Length1 725 /Length2 38031 /Length3 0 /Length 38483 /Filter /FlateDecode >> stream xlcpfݶ68c۶m;wlIǶձtl6;v:9]3Xjrb {W5/G 3=3@UBM GN. 4vr3v4fU# GuprtPRа56rh8:[Z݅]IR@- - TPHƶ%7[+S)H 0wpG:؛Y ÿػ]f`WPTP2rRۻ 4uuWutlO2?ΞfV=x7wpl@3?n v@+ `t{g,jlge?Ү!lo}L1YHXy͔\ &?{#4rO:NU3{[YR5fVU4v6?ÿJV`'@_-%%"Cg`0rX 3ߤSn& h `j^/^TƖʑSTT N<-t҇lzT?[oȣYMM=J@Jq`!#Q} eW9M! 5M4 Dí$UQ E.8"*Pf EfArgcg2I DcL񖂉H7B-.cVYq>ɫj䌗~kRiG#Տ)L_Z^8eCɼkjDM|% =)Гg'ܱ'w_/S°}+sf6L9#w*'*`LM.,- խ{П)uwNũYsUGͧ4iֺrмGNk1%[~dxWxp݃*Stq3wE9˟VNCIUVI("h}A3cEX$kJDꢬ-9;ݹAM4d@1 E+ٜe<^01ZKjc.f~L]gYaS;3uBa\' }Ybw,l+]ڧLcCىNH*9v|*\h?.T#I1^syw4`MR捁j'I5XQp8:2đ<Qumk(Gvٛ*5C^4dYZ-  B#to}CZ=׹ ΌBtoTK+i1LĆya[3Ǚ%^9;=硎AbeR+Ct4+n^a-LĶ$AB]q˂׸/SFY02 sMkO"`cUi)m^9fbS=zX @XB4$I(t nKOz3x'} fl,f͛%,m ߣ[6¦K ˜}QmUQi;K~MucJ+=}ϴxßݨts7 77k+d 4N2@#YڲXY=ĆOgԗ8ھޓMƾ*Pz ؛`$slUFXҎ]M8%lBߜ{a\D+ߗ> ~]鑄::~8,r4]P m!3H ϊ_X/!Ԛ3bV?,0 aI@0rnPqQE[ Xڹ|&i.s65<<ػu0{)IzϕR 3o2pu61] 8Ɋu3^@4:J%?ZP?B]1.g櫕jv)d~-I|0<%BS2uu}~uémzycJP .nZSU4 0Robsi7tOljG|"~%cuOʯa>.$aY"9oS@$?-Sl2lv}l}Oݳ I_X-U6‚ӵ/yF+Z-؅xA:WEQ@ '[E{S+j1hMony听 I~ck =r_Z3ǦV=,X?ZW3·;̨½܃y蝲ܼT]lR*B3KR@CLw/\7^\Sz_Uʯ%\4Z$XX6V+D[({w~.ٲ->K)uzJiRk D'W3.+{3?X ijPZ /acz`~ к#*,Y½@w[?WW-V1[n'Ȁ {x7 }K&;-<;"ncAYO9[G^Lؤ' k`<L/~}3yB Y+ޝ0ɰ*I[I a33t k;6 ]Mor,xhZ a3mI"B]k#dQI'jygUMzkq$ˑrC;p_ sݣ}2Ԡَwy&AUZvOX >š ' ~#0N1XGܯLQl{񚪖QVnd[a&3=ѠQP #S4r~pʻVP |zK9hm1 7sv[;9Z\v2MTU8J DhHW|L1O\5Kd7*zC碞uAޢaA㳧Ǻ;S Aj[6S.$)Pp`ʤTbu Ip^6/01!V#'n{ ` O-)E3WX6[݉' *Uxh#̭Bk=} X MpryCL!&xi)Q m]`wK!(A㡓KuQ&MQQai҇NIYH\ow3rQ=*IsuaQ6 j{VjMnLߵ'U( Llk0)&wk+,1M֜WKੜ"FUgx337B qO 녊oi&` $73 q]}w)l]t[@In50Rٴ!N~hb?DQ/!.7RvPP /T!,cLtdt=JkKl&αȗ^YgeQ5$:qMҗ9 Ce@y."d"gwb뜗d;ЍpüJrݰ~mO~?p\Lx%L5ڊێZ;']L+im+MtEDW$~,ft[M: VńJQb>H};}1MV=w&_.O~O(;~ eBJ CsM>mΑ}C=s,W\2tZ6]mN486hSX7CM\+0^ DXZTF$Ǖ\zq]5 X? /\$VԂV&+) Do߄6'`َ+I̱B0wO?/e{/){&MTO31}x؆qGi-t?6z۫؏}`jARojĘonvm/j;_WD4i Zd!Q)gA-NXX{39y']G(jqfͰwh gfG.by;5mPԄLKc>uI}XAoescj]=l$kJx&esEKV-ovs*ϛ"gL>2i  nM\|[! ])~)-b&iּ g :?O3ݼjeǮn|qӬl"MaXxC 'a6`G]-Eٟ{%xpQ;=G}k $Wf>#ˆtF}:&t)vufxSU)v-f֯GZfOk_l!`k>I9XHn U+6ǬTJ=FzIqG" ƫ-,kv+Tv" <~밠=qCbEEca`)][be{KfBD9^6f3.Z'|Zwzt-Y\ |e~4`fL7<>fB2kÀ$ۚlNL'߁%NY9@_-O ,sr-x :eذ(|_,û w5LH.aUf` 력l%Vz5*+jmDGb%),Xus~ṲdD%_~5cG6ڶ;D\ B]R VyO,;L5Q֦[);dPqj$:\,4GM~Ksae۴n^Dc$i4z{qqN^t2Ti)o|B`i`|xiR{gw7GlzPq'|/8Z6<u̩ɔ. ()k _$םx L/xba4\-0ܫy =**6fVW>C(#U1~v똺 Y Ĺ{uQsd$az] D2Ʋ"xWzpW跩Y:؞#wm$]T)Oӫx;hxz>.\$ZU|ܰqte7Uҭ9L3r*NykzNƫ2hN 4(ц(]݄9p]pj~>}[!F{d/p7%|TJ͛_ S;&O,K[qc5kd+ǩ4cW덇~ǰT-x{rl 4BEbV<6 quM!s}6åV5<*ccx~ˣCΣZ<9\$R%U;ެrOwq堻`x٬<|ryb\;Ȏ-qL85GZfEIQMOՒSLZ%A g)jNALH*22sT j7O ?rms#WY>m;0h:FVVɛ]b: 0A_ߜ:".hˆ,Á}PEq՘@ZGlb% (|!pc|6&@n`ꌁO7Yu;`bp@U7; _gChT#`LGmns{t죷M%(rL7˭K,4 ^a1dˆnD=;="2}J* ԢRP&9#ғ@",ʫ=7_a?m1hVN2Y]Ţܯ0@`l3ȓS>^& Pd5ԲceŬo 1]dJrR+ԁCP޴TvR<:YeJZ`]\'NOlFϿ p'?oS=a(JZV3ԝŶE}s'p0Ve}w_ly]})"wYӢ`mNMb?\k> M'vOhoVUL3FɑGbeI"**FXk&#"wF^SKl4F|=XR>{t6hc1A90E_]DR|ȹ=&F]?/T$q#g%k^5mv֦~-LBoIP•5݄?,i n1qYu3esZgʜnYl{ սkh^#nsJLR;ϫq{J&K<^n|%d A<#1Jb2RŎ,F t{NL(VEG H/U<-Ea6>@SFX[Q@d=ۆd lqcܯ֩)Pq]|Wl츷# X%?4?qQ|2AvGob7~JqQ]ۂ=j7th+EHԾڬxi gSڼV0AmxiC$4gh\u_ O]2!Y,&56[QK} "U dn&8LGDL~6l]^ƺ&msxOGҳ D̤9{1޻ 7b#^!K,1` 4h̷vB|TW詘jS+H`YjU7:sI]NYgVpJ*9[uz~e,7(B&ؙW Od/;m| pZd7agRqTju{ s psģ:fT W0*/ee.Q+VJdIg"JEhՎTe_3Qod;>nAVhE(4<)_B8qqtYZ oޤ3*%&uOFR~1Pqhe؆#¡qAӏ^NSX&D`QIxWf}0!?}>y"cdrd歎9o@湁?/d+qm|x!)&,Lj.nV @ywœU6v$ mz^lj_ R`ΩjF2Ǥjmq.s50+w:-a^3 n4EbdGLaNSNIXʖgeE}}e;\T6&𡿍 @6A*JX}3!-tUJ0'jQ mfU N?/ûE1$'H(&CDj+M=wR{A}\>)=~NGK/2b"ӈ%9?*{:b™`m~ݖk|kaaKv¼2T'u{~᮳ݤƮ*o-,nʣ>i8~K<&g)ˠkQ֎ 6׊!u V^(9<^Grù*" Ǽ4':m+0#N)$)Sś NםxeSJ) cmioZAbFҰw]}(1 t¨Ѩb:yf] [Yz}dD$O*3{<[ѵDuu5YF>$h^ V B_V#$荛n&6*R%sBZK9a|᭿y_м('HYC~1@ ^8-5l$.O>AžnZLpґ zFСZe{\q3!DsUg:( rP,X/2Yl#Q3lՊ%.bʌleP* "m:`Uiz!f js}1q؜g8DBr7e2kv ?91`yoz{2DMhRXѿF8h Ԛ뵤|γn|L5u}1)vlE40 xc58֣d!r7$[àBT(A2"O % ۍnQΞv_ Y:B79=V?@ēLԴ;魲Պ9T. Z H/T }dB= yLJݭmH" zg8=Oa7Nyzvi)xzK1DcP7Ϻ8D+t< \]Qގ;>u d #xI]`ڴd}!s"W@ HAFc&DjŏH}WWe%kXt.eY;<~:n7ʸbM]^KFbQf5j;Tx`\{t3 tL<7޼(.ym['(=x3L\Z_(ڥDtjpdH*N=DKɃ%d)xQZ>KXAUWM67&&=]mL=Z%3y/ 4xPQpgoI``Jf+#͢GEf\b -aV.Z,E0 ݔ,J#Ay@&ASla]Q&H=;'5 oY b+cFyEED :ze`/'ʈ{ͫtON:߯ᓄbgRzT$ɔyB0sDXE÷FGh Wi -ο#R5W{dOQ`U5d+J/ħ,VFwE,{nK,3hV鎒AUcjd1U#i< &<-JV uJ9Nܬ&)U\ֱ30\W!42ǔq 4k#)='քwhR |6*rv>mOM}Fc\f~m }%M%s0?"OiT ȵ_?#!'W1:?M9d42rKv,h%Bi'aF?߯8ot;C)3\Gn}2G-HO]!+L!Uz_-`>$ө&W3b3T%]qUأ[$llʘ ɡzc5BɲAUqlOґ6ul' Ve8ғ-jYϦc.h@׾d Raל*R *DGv:rpLGkbcB:dRّ Next4y+m-Rcߖ[oi3MlWxSp*Lx>ڿ& F{;!D"DIžl)ӊck,Z@Hq$°W&DQ^stq85M1Hݷa7թ`h,JKb'yvY oD5t0vgVLE"z@>S ,b]H֌z޸鯱Es?g>C_fI ;='ƍ,1><;ǜ#hP/v!pK7gU3^!vezO$Jؿ JӁc_ bq|HG} e3Y`z:JY3~֨G..ʇ-R=:9}!'۠tWTMӟΥW-B2X^paa@2Cc8Y a9j>Pб9DS`6D+ F6r_ i/0BTY5.ut%?`dPQS. EQpU>X-Pr-2 "(K, Ʃ6KɳKf۰F[12,+jλTz|Ufnr0RzCkJB ^k tbW^tέn%Yq%1CjTGQ %'OheH?- am5Y=567 9MM(@zryhMnZe@/2APM^GEns|$ !=UͷFmW}J2 -&c BFh*j2#iiPele7PCåjl#\ /˛~թvfIuYgdy9 G#Hr|rxh' ߨ&* QG3t}n-=cl{[VyJez&I "h3Ǻ iq^G‚ʫ5<[To/~)^B :+Ƥ2yrfiSa/el9=]*HFÑ뗚:!k݀@Ž?*#u=? ]й΍̜dXtTQ(JODi!#^]%-II_DI{k^WfQ,~.p+• Pq t\NSzOy*qȳ]5$: x s#8o^U^&uil dU3;?KgaԮL9R2P׫ ;irޤ=7,w,uloͰB.bm!_;sGuXz+ga5o88Njԟ.2\7Y}_'z ƞx:Ą}Z f;JԊAHQ~ GX6B cմy?uT]aGB0BJ%NZ}G BqvPul|%n^} Gן:V@D·\{w/--d\{ݖ1)TvBhvVLUuu2qk+ZK(#h;E(ӯY)ͷkR!%?kεDp(BN_ޕ :iIn VգѿKԶb-i+?2zji9+g& sԤ,'7?1 aF5~ s7ҝ =2f\.L7 aG@$bgHVJKL!Y Q"=0hXUg3}2EN/Nxp' eMRέƺ8ߓ_̿mB˾cOL<}fgeSbOPAu&F0/ǿ'O.`Wu<҇YVd @<oQ< $Ma*6KG8'wUCW7O^ۍ$G$rܰ[d{ioʐ@|;1Ȍ 3ﶃ)5ˈ^yo%y\%Nm ]Dw#kR?IpR/;*+}uv#a~۾tp"6b .Ŋ҈QhX w {”dޙg?ٛfyRŊP玿'@S:25Y?]_]@ۙ*r\%an9u\݃88G\ 6C-}+թ$ңhD{$x 8jwnk6Fd+dnTe/zGxVp2ïqQxzg945e%.깸EZ%eĻcS-,;Eeg1MR8]c˯5|la@* 3׼rcA_aO2~˫E$mFG| ]w0ABO2"vQ[컫ũ>v\rvO=UP&ś_E*?"C:B.,W'i(@cֻ&WΗΝJk2#0Y[(3?Dɱ"'Zd ZL֫(PG\aNj( SƑ܇hbeIlO ȩ35XSa> RnIon81uF3 21RJ+?/ QUf(J•I+?$ǖjϣ<,."!u]j;*e,i1*V Yx[ap#^5@};K^hLR {I)LqDM({1nzcA$dPcҶ@R;sh;e6*T^AM0fͩ )_GS6 %_ܽc0B+9Jڭmo< IR}P(<[i qS.9Zϼ!^ꗖ\Pi1j*;Hb#`U/͌GGBHťI\tF(U(Lׁ!vA%QϥĮЇP81+3sgܺ^̄u6}Sܴp'\췺:iס,ގXa i ҇ 酌E0hYUSQ2 })o=t1t.VB-R(u)& j藕D#~u `*H܌0I٢y-y bt-s6ZY^M~2v.t^'k0}V-ɥy-!z> :{J5Ȭ*ߙ[ډ{@!xY,XL•^xiR~{$dP$Ed.13O "\͕̄ 3aWSY+ T=[d o}|8 Ur]s5%x:|mҬ [qێS-ʧ=W5=!E`lyO .m҄/CL`"q% DfllN66(IړO]ўxiW]=RM~t y&X%G1;S`tyf<:Un zܒer= Oy]>rElHpj7A΢[s/"xP6jЯb-=H4g閕yD]}:A.9;խvg( v#H+1tQׁ\+ >+.ptE;2uxHoGZ\AOQ%n ֶwt*]uy3qi}IȥaZi"}ʁ̶`-"T5`4Ր<{_XY4?}z WSZ ШEؚsT UguM+-h;&ǢڧꓯDա![\>FAzIZ0K_kvԅE"ް.M΋bW-oQ‘KMF ot )dESd?_􉋡. hzwuRrsȒa:w+~nK,2- uҠ!vw{ 4Wy6Pj #dtOi*J{Yf"6UUP)}ºCG P̆J{"߁_\t<]@G`Aݒߜ$o*k8I x5<鹏 W5k2g@k/0CnҜ{n]K *Z@:y{{z"̹|u%OH=lא M_5lelU Eض(9`ʸO~b>deTn+~H ՞s{HGZ)FEԌZSNfkдȮ.O80GYT4w4[MhIvUqPx(E ]T |bGjr ]%wlqd<å^~9_]u'Wl8=yIp>T}zP\2LA>ѳ@W$cT9)#l ϯ'rpH/w?梜e>%њ0siMNM oo3vI[?*L5U]3aC YGG: ! wAO,@%THnUW#l#fٌi&M>_(w"ql k xgǟ S8|=rLV:3ɪZ0IݝO;Մ(,#1nhMDZrWˡP?8y;4oϋ5q!WNm>ux$Gmтʏ7t?]#Gz2gB,hʹ\ Sm :᩶?rbt$!/@*JI>e!ە1Gc^#uxA!{X3gSgBUJFs:ʭ(Gnx29 ghULבz{Ϡd 蠍b*woFYoJHN>/E EV. Ɂ7MBH t*  2C+Iʉ\ xߧi!w :D`6pSSOnP:Kgw8K5=#oqȲ"?$cF<сcbNM'(U?sA㧞@ruAFOZk]R%#kwJ=6?sIGyϴk/Ѷ~r%c8g_W,S$';fI)Cm{TL%Q;xĠ 5m!+SƲruD.қd}o X#lu!E9!Vn=ёFZ5PۓIh2=@m,^QOi0͚%6Ktʢ$s__s+/#bn/Wv1Sma򉶰owDrx(V91ߋêH81+%XTz1ɪeRe4;6͘X$]H:68*($[Q0KL_ťΖ(Ӄl-&H'."w)Y.5a(s /NATW =xsIlB8S%ˍ(C pYZ*M-=cqMd9IgVFP!--K}I~4`#Lo7sԑo#;B-"rx=Y4~/v tx8k#.]"#/8+{SJK {gʎ;'w"6XlMwc g0NVS-@:{(2#)PZFHB4>fKGd 'k|2؁%V:r棼s2Qe^Ւp+KQ|@V΅P^Uk UZz re+dV؟&B+YH4BOc8#C]-:eH!PaTFl|_ªWBmc r:\ڿshMHW 7G侍`;P{"~AK A\XkjOٯ=)U5.GY[c635IEl6*e NM\$Jo5:RCBԫg )& ^%Ȉ,(D92^BYcZyJBB[ZAg U:S.epu_}<[o3*eWuwx ¥f2'dsxxuA`H=.lfEğ$ { ~DyȊͭ{-eSʉ2^-v@/C6ҘŔ&/riy4 *ƨ ^{4,8&3!X>F>C t  ^S\WzҲ` 9Wv W`XjQ~ a=(Ԥm8`0k.mJ) >Uk.-Vx2p@*kH=AnS5~P+Gު`_Mmȏ.(bhi~YY5wϡVqCnCi\Cs|R!\ X N"t[uv'P3s'/(9-Xǀ\٫6˜W=nj|3sH_Ǫr=% $lcv06U^,IM!4H'F<",ϼ^}:pX'>,ds:&6 O2/5,dЂ Ɵݩ_6Zcv/T\gkHؾc6Ok^,N>V0sYvWte4G-8Km5(\#;Z-xPY=f6rZkxg-!'1k~L#kyfLXxEi] 9De[P0@Opvz5W= .O C™ `Re&P:-kǠ@XsY{HkOz`6؋q[QhB/:7 #KDm-8SݯsB+1jSW,*6M%p'VD,(!oܔdy9+oMS W+%*Sq3ssxoêK.Y}0 L3^Q2F`xg^ J{k ;G ,-^ި3GL= )՘^tk LT).t@j\_b:?@v?-uN9t}x'o);5ӈ@$dNeѾRxLct42nB#EMzDC~P#Gs?*#5XCt9CMti{}.&yґҬ'Rϼ9MKӝ#aj6SuzPq~:L(]XbUoSA kq!e"TY{rXz`jSYNm|v@LYK6pԂH-@ sW.OøZS./BR{jHErXӷ#M%xk?b$LIPF;6)@"sjh8)JIq߀b|LU,]i,9gHk7CERա_n|&Ђ &.F K^{`-"~%ԙBÝ2ܭ Wdd`$7- \. SACsØvGbdzc蓤X|̚3:?45Wio<9G>Ql/Bˈȉ0?*~YlFWYO'~C噷c|[q.2 v7)J2gq4,~_ =ǣR"{WrWڕ餡8EXS/#'ECwRh!YBGeK0IS0 5x Ù$s͘1A9 J?DX3D ^*٦tX<'MKulUY?}pecIIfjW eZ¨}0(KDB6j2hF4E-i 3:lhN-aOl+>N70ٿY,X 2mu<ݚdC%~!nO/M.͘*n龛aϠN% ~vg=iPH8)Jޛt学 [Oi-cLO3NѱKv4ox7ӸY?LOAM?Grt5$,k/gI܁GMn1*p8BQrOGtBuosMczYG9|> cS5.w* z!c..ؚx`"{\(H+ʇmdsfpmFkzv:Rד >o "'*:%M#{ Mztٵ &R[)qY]A9Zz jh:IXսFԶasJՃ<*/]g:=IE#88WRኟdJh+Pj7rѤihMj\qJ,CM+m*ӊZGNuq1Zzak\ 4Zx*Etp^/h?xT {놐$F1<%Ad]+3`(P{ױD7~MX" NC<د8M0=5m,uB~EgfQz٨_OZ՗l}+ BEuѢhx0Ln/?\?@Ķez.ؑn̨WPYJb8s>ۊtLu0Oľ aɸ4wE.hy{9"PE;!od/Ȃ&eWTuiC\=@Y譺ïXKK43`q3^X~ENy~Ej[7n=vp3rrJ+@[y|,/vb9pgJF gLzN?;s r3vHưm5EYUuNX5e$ՓU딥)YbűM_'|M6*Hd_zt\2ZEEZNJ,G mb BQuϴgW;)"gu hF݊w"X_Ndn[ Ļ8^o~NQe"=f.;7Z_fxX|c6!!Ȁm|h3sJt0c}ΜV{m Xa=ΒBi "NRd5IFܥ("piJ4%D䣳1AU> tޞ9x't]da# b:em/=OFX`nmY\W%YߦUU{j.W /NsJ=A%p0 )o7 x,@5`'7ݟ;*<4{&өh9:Thsdყ4>f/Ϣ ܀zK{1u`+)ʅ3 $̡moVd\yg"]m㉞2Ő;ぱuʼHZ#]Cc^B?E@mǖuG.˨[:ʇٛUFmCSQR)^{cOd| rNgJ&5ddk!Lshb$#lfS?(Mc]mUUʔWW 8JCt41NS+-C)m'R쀭$Wk]46fߙWl.u,0Uuy.֌E19'[ZR$ܘɤ}G)QXmg@43oU>I/tϋxO _WVƼz<~=EKd 3eQU#"&0Z](apbh(cB2@^ }A$ᗁG+xOn(Mч(dghf~"sؓߗemzՀLSj՚QߑVZ>>|Ofp|nXsdcΦ<9׹C8xaUWYp{#V[zo4K[.<^\. Z iK=]#>s>ILk}-5k?bϑDcPg_(t##MDƓb6ARGG)1 q#3'~%ϲ%jGT67y[YTKNp64"p|Ni /z9!"m27F՜7Oer7f5nr"UIXOw4PK2O]ĠVJ '(K|J;x1z1VL洕FzxuorϪ͓{wnq3^$otiXӢm( SwƐBecw/b -l^%ٗOЂʦ-uiT-u3T^ 7oDGؙ'+Fl*9I [M ?3=<NZy2ȒD""Wէ.̩O:a m6}Ov4, *Co\z2(pF<u$2;hZENBP8aXYHWRc{>' j{+o?iFPWW[; _^  +(K(S9˗/ְ|Z(~}4/Zi|t6ez{f >o+%i[g5dv]ϙHl'o[:PX&l)S+2R@R Nv< >XYryƸ붿WlAF 5)FM;'*ֈ#,(_C$z:OUk]}(IiF<c(;DqDLd<+_%}.ykwGYT|xZ WiNɋCe\L va7'gPSz'e+3#zВ!-8Cvsc> Iw|Dłn&| ?6~6AH[u_jt#Z=:aM3&;mECm#7*oJl#m KeޟGu3F{35nٝp2 =R}VFLPI9aŝY1GKb*y@~%HM˯G m$Ur=iZD {ۀ6mni5*5Ѩmd8aΈ&T~gRdIrGe@m,zFbJE+@R98HB²M3 NJ:CT^JZwu Ĩ1DZ/ݍErifSAWkv,eT:1gN4S]Mp MOKf=ҍ|.AbP["@ddxsTs%sX j'D 0;J *c{{{=بT.?gӅpFW;zROLudk73mTiHn=}&\mu ZR GuN.kLE Y/yOjU ?ѩ,(ޣI{tɀ<9Bt%Œs.vqBVEBUHxI$ ۻH} BRENruQmiP3yӺPWY8DغU(ywbhv=b,*D&-?6t wqhB\ȃV]jŠJHL\,Miu-M\_ܯ,>.[ vL#|tVbA Z,W y0B_Ao 7+@ ̲k 4҆|v9,Ǭ8yldM3 ڌ\&dW]ZYXp*Cw\#"E_< k(q~3i( f ;lz| o~2m׹VSPG[;Іw;b~Btşɴ?9^=&۪2[-ܔa `ƚ-] >)*]Wv0es,i&rԵ-Xqd%J=c1|XMVhg>AQ>;EfB2ϊ QWէlV,?ጲ9'jm`p[@-&$J fZ]c+hv٘'7+-b:::~+="Ӂ;$^ } YEĹ/tî.WӎF [ckـ* A7C֪Z6$Q\D]j IX vd(py{|- ]jBvu]AAD˯y<&shB |8̡0 q3/ <7V:-11)$Z78Φ,PC]iv:CO{;]|GHb1Qd@ g$Od҅dxşy稱3Id,yMjPA|;f.yWW [FrDu`N!c)B"a%} As0Z'|G?ƼUƙLM1dXGT]sƣ}Hӧ{j[^2Œ2NMn=ے"pJИv75i&<ě@ew0Qc&fDȾ1cGk׮{LAt ` ϕ98bX?4 c[D:mϏğQ;-G(QŝD0>$.37@ݧ*[;!}Ua{ec1UMv*Ea6hd XRHn30k!.8!gpQsQBC_Pk#CcDm$N܊xf,]<"p׋?n'^UAh u<)Bkny2 1gPCr^2jF8H\ǮAo3玒c(jQ/di B fjSUHb3rōƀX.T%:i-Sﷶb))p{-RĆTi}7BZL%&TvW}hk3{X椻X8{*VȘ: #f5H2þ'oG*%j"_"+poAxViMu7֧RU]mMKnqbYTS n?DrRv-ZH5S)B>p x@zXO$/K ݽH&f7f:,vCX~wD՟;Ir2YI`eU븙u>X=9K=nIJ3 wƠOpSb¾!\, ⲙKz/yk?b"; $#3h^fIwj]iY;tIx/}帢/1D[As Ҡg嚞L7G< 0!7U+qod`;gl56]Ԡ/t]Tq'l 82W[뚨vIyFbrǴx@&s|I"ɯPlOA敷(0-%Ê(+E68QE&LPU6(쉾\c/~De{&5-8 ?Ueczw$F4 OٽMExɭMK2T!eϏw˟\t5 'VN5q;(,)z>4v3 ;zbe!uu`h`2N&9 +?w;YAs~(1aeb0Qx 1@%1x%BYYNOf7+n )<\imY<ޛѶDS11FibZx Lsɣ:#r]6ۦI5H4Vf1Gv"#"Yw\(hV&w&|(%SM/U3YsFY $?^hnfzG7Ip/qSFTxo-R0>j !C}YD*W)hp77yrnQ!5p]5[zڰڍ|Aצ% \Mwx``.8/8pM` ;axx`ғWmK>f)k{t݇aM0peI'0EܵI)'C6 ~Xb ,c XwdEEnx1*V8 ƬPr-2T~g;1g.?{3` >1v+֋͞_(;1bed\"T`{?`6:jR7M91[T>U_T#U}Zwf#E-u4TZW ^lY}QrZ?k8 -g{է Hhia-]PH1z/AhuON2`!AVLq4DSi &32 9(>e/pPA@ȝsmͺPZ8N¼>?XjtIܠfu}R]"VvycD'ƴwHF^ϽxB8iˈX*YU%{ƿyYLh=#9 kg'Dd>UH+#rgž42UùZY#J cRT[tm[>5n>0 s{N>LF kxu: 7p@nggE}Ј5)7/#HH8,n]lޠ~IjwK582nV| lF)jԉnv r`NʼnRJQ|1Y6 YsuCɌG4;ͥS>FǨ:ʠ3vMol V$Pcb(ʐ4d/4L*XG)4ءU=zVU$wneA ׆J3hE'0s$$ݑͣS9s֐5Kkr$`^-8HY#'[ EGד]V z6A; tyk<G"ۀ=2j9ϬJRFu"8=,ٔBf~}IAXD|63 >N#mUf2~I JN^rr LxAXW+kЀƶmeu9pO>Ad!wM%;E3HRٸueP1mb-CcT hH`lH+"sdz>Q+`/EwSƹVщKѻղ픮/}gnsF7+6J! )S7/ C%@9IHV˺*k>mmg@$@`tNG]Ӏw1"VLw^O4( A`$<35`䟮pD< TǧuPb&hdۣe"QMV-Rnlevd^O4_Ec@I}+T0.Ll#e9ڟ%/nȝw}bW0=^XBGʺ+&BfWJ0paǫQQJ#dQx%t|h,tږBMQm;>Rq:=fn 4#5tGbؙndmR/oÝ.0ՂO6JNܭ2D^ǟabGe@Mg#u_J'kYq4q?>hWMkhF&}#ÚRL;,pꕚL]:f )N2oi3Q :%YsOz`9"+酏. !}1eը!dV'#vigZ>iMS\qp,q8r{('Sx+[kP a;O`=ZY^yb/DJDeJ=*W3XQɂƗ=vq{sǛlTvVӠ^MA dUa V;[mɿCȳM79%'ڱ^k6vǝiݛ%BEH<4>]ֲf|zBRn }CNSQ_S| Ļ] 4䜸\aDH7dG2k pǮHUh)_֧[Nd+G쌜l"g{w7qI, &ݳG.ݩ91JbZJP9@',_6-lݏox8Y?pW7&H%PڌXJw&jT N13TET\1w($_&ʟ{Io W(..0B ʥ Vk"Td#5#T2Jz=CP@6Džf$bn geڡٮ7 !h&\#F@i[=,SSՐ^%T[\M y8&u?m'f s7<P0U|p[kU3ԈhWschyݽ?(bv'fx·"rSXS6;ƥgYtSg^V<~_0:#T47ִѓpB/+ݏGa.uR4c^Łܑo| ]-e#FCNT E{5LkEKîF?n2ky^2=vVQLq:H{ JL̦ y3u1ѓ% \OL :=g0iL T!l Ky?ZyHbi(A99%Ua_NQѶߐ&ᤡla/֑.na+lzn=`^k-VMo8VqndW߫ar))r Ңѳ"-=|qSqOv߬sԻb;[4]Miٹy3O aDgӮy |T[rW h*;Y@cY,U"$_]ܭ$F37|>ibM\DI=пg.@8$K-W]4 MؓĠe48!2 " co #nuٹ١`'7gUΖb̜hL;1[:rO+,v/#kDv&$ZScA8Ȫ!߽z>q[ !,$o T|H_NKp#?@8v7_ɐ=r6|I:59#YQdy#ZEyR挨\%)=o9!{>$䱷xC!N*0a5V\'Q]ģi@baN1<22Q d;G>^jUS)$&Q;Tiֱ;wPf Xߛ}j_ALŠp}rw)J9[M$;m}P !jX3AMjr_3ٌЄ.QlVCGIu.kk{l[TׄPN7w/%1m-nBHkBC (C\_÷Ydo 1*_1(5 " xMp~Oy'&S3l vYjj!UXL_X";CvEȍ߽ %i%-+iQP|8/6+Ә֗a߶S7\"`@> SJ_3^ p\`W%!KӊI#J}\9XCEn~ aJykoBS.b`OÉA Ӣ,[XU< qH@Ř}*V&=(xH n;#[Pg[2f= &Gox+6֯X1}la[ n`!BϊTilkYX:يTT&G"|mf~p%Jl8K^>~)f&Rߛ2Rnqk20'^peCHoy욁4k׎z@L=z ayGE'ahttX ;»};EaH8ݴlلŢImHoNrHC0e=⦀ @oG~nnDxd7$W_MGdZ.W%l} CśX XZF{sϑ 8A*3`B$J3bbL1 ҼG^-A"alj]!~H`?)UX =p՝}._>{С"ufyn4 Nw`G[rJBYKTHI,(Q4n Cq ;6Q\qG) sn@z!^ZӮCd׿c٪fMr&-FK% Wy6K;pNB:G$uv=e60S҉p#bTMmܑӷGYݗg;n%q$8ԚyV.?UG~tcz&~tǕ9+r,kOɶ 'X" Oa^lGVY+YrNmNFț4IDžS[JpL Sr:-SM K+:n rYgqz> stream xlc(ݲ-ڶm۶m۶mݫm۶mwm߷7^ԟd1bƬEJ(fo`HHE,@@ EJ*djbio'bbEnjBl@DEJ ldinB@aLo#++?Iʦ.f6 rrvN6 F62ƦvΦfN6QL,əL\fdoK +"(&/B *L"L`hgB #O3?Ѧ..ꎆ%W_$:(FFKc#SsK;(&igfO>8$015'F֔Bԉ@Ɏ_ܻ;YcPC[K!bv?|8Ǭn55tO?&v6?ݢv&v.id -3A&겆.N t=#(!!{oZFNZf6Ff&&fkdjo1U{cְ ?ѢJPYUUF[mDa k|wܹ@#޽]ɇ"xSUJsł5L[c9{A T,YRe1; KOv┺HZ?057Fo`nI1- +µ ;OQpC\@ih*Z|𧋗j16]d6>6{x44(F5 zÕ^ڏ*C36M~'|Up"CWZ닌B^`m>SY,{@[#UpzVx@AjȂr7t-Ry0GBNc5w}a{:1Ŕu>\ \0xrz@(Wyhpl[(>B߻L.FFD@t8^?K%[;W.c e{&eydAToZonlHmvZWq: H^u ]& ?"*^6ڂר]YqAGK3ȰTC#Ǐ:COK# D\z85(lƍW 0O絿+UBG&>~|C7x|Mp2$|ȸ/xL# y1ltt~˸Ϸݞ XMK93~e5W7m/g$9l,Ҋf3s^8:~a6,^s PʽBFP*2`/Y`6b1)"Ђvw) Bgk\~}+LLƁxaPD5U>{#^qlpVqq%,[rI+0E( fALrQMgb\O8uR,M*ix|,8 ڣnm`.@ֺ\D {S3L&>~OX=-{ߵkcZFRKLYwg9j~ա)&qHCLu?إ{}{+'="7 ݾ Yo+P!-{mZ"Ng+ϸܺ=l;`RRN$ʷ_zcBgTA] hɻڀ;$\[ 5JĂύܼw=eF&߮)%2e`7LJOcmWi.: taY1x1Ly 2YA۷/6ԏ-GO'OAh$0ᓖܾ%|X+j5߆~ Eq!5)̀0i I{O Cq*-9K} şUDX ^e^<;[6Xh_! rY.`P$kb\qd)Je Z(  K.f"ИdK@C= ? dFssdM5CXV>g HKh]#~StBV"y׶(5ԙsӲC1ƿY!\B1|a645Rc_LȬcSo6:em|)`(U 4숺ÃaPgDf8:Og"j$.v+_w[}yXĶE TP5+ HWr9:$+0F:iQcwr ġD`Hrh[6C$w`~R)ÿRXO .5n4^P#AQQkN-M+!"LTB E4eIYـ)F LXX8 _̵aN.2C|US `dÌa",\{7{E_ e6,9 P-%rj;!ń;؉#Ψ0T<´=:D$o\ag_ZiWm`_wG؊$4 .bv B]Lh5l-/$ ;/Sq&x=vK/[!b#/FD֎vV[L ::9% M7x"DC1We/#Z?Zy|M0^&4?<MxUi,hhވjͩѧ\A9ZfsCc*H?GPZ)"&TUDBfqfxXHzW@N7<!Ǵ ez[0@8KYssbƋ (g}IJ]s NH#xxi*4rq 4!yz ^- ;܂Z(gWUyίZ~vu 3LijZB{eEvH)v[j~]V3U wA?tZϩ5ȳ2h67)7%6RNg݇0y &\382JZI 7bpσpIKgu#TXX"rR҉Òu=lY-c3Lshmˆܠ5aze+(q5S{X-n6y݄0VIz>)~ WQT7Pf4BǺY7k;d8 T/}j4/C%^j +>}Fq1:ϡ;|ЁVJmd RSd@ZF'H`L)&.F]*^n*0 NqV0m҄ ww0ze lV=?7rkʷ#cL<N,WM~&yCͥd8op 'eK`"gEM \Ћ-u9I)ХC7Nc6 vT'.k>a*@-Ek.˼һⓧHپ.S1 Ӊ!i6mC)>@mgVe|`\}eS9$g ORhR*~$"gaՎG1WzSE6 p, >IuqhC5.ϸ0&:z=Is5{mD ]Hr5֧}~× [jb (wmUG2twC87| Ƥ pL9?]+~7t46u> (ү @~q;`D6+F:4E{\Q09v-5d (H$9i7@2m PQ-pg)oQ{QTh!Jc7QKnfEjfLHk^?J'{l/(lu>oCq)̝q[X}u27֒Оfie QR \x24KK(V*iri#9NKXimK<_ -ԟ*Pb@> gC.Aۺ2Im]8g| ۊp(O̞%s!V\ؒ$q͏'Ts?m/uLh(-"["tϧZ/t}ѴVAY(y9m: hD'q"FB󄏜 WEՇJ,|Mԩ[6"nϪV`(-B; Gj=H ː/H|Щ]>'"\K M`I6VA#碅fh)U@>ގX?I[bZY82&V{B_V2?K6+)qy|sYg4п`_Lj#]k 3+ɣ,t"W,6fŔg dPOP)5j 9GJe8D]#54O8t<½F! OAQQ1ocerJUSJB^켕MiF=&䍪qVrhּSt˸vڜr8^:DkN,`MJO iIƅ\q9Q"n ̰ B'*Wr_h( 2D%{0CblHU7mya| vihl#b yX㢡FʤL\Oti&ɼGe]{):-W?ZR3GywǘʬvQ _1QLc){J-z7Ҷ,7䶳[;rG )VIZ]jOɼ)j)wTq0#j$3xH[ ;c?Ԧ{Rqk ;vL?§ fc0GFgfE)\R|;S> sȽbgZ|JPu(#Z0X :]`R"lJI"~x8# &Ӝ9uI*uۆi_Ȣ=J_].EmvKߺ{r^/{|sw/2pazEfc;d%TO?;iœPjGEםMh7q Ws W$2-7f 7Wrн6ȴ_guļG6HaA&/% Α"n61AăeҬ_)+pM5_oh@8qc|x\I#`)94w+nU qh%g7Pu5٭N2o$c(M >ь;%u'+&i|Xbe4"ޔ&UY}>qeBDC %60?eޓo8-7BvVŎfSױ/^6X9}iZ[0#m)(M^A8 o!Љ75ؖL||ۡ|u l? X:U<^^(DEԺ1L yŠš1| xkI lvf4@RR,< ƉdpZI*C#]+JtNᵄ_ܰ~l h(K+(W 9A~|]5[AQc."ZRXcq6/U.rh @fڵi.;PON YK?s&9yIhd">AcR eц\뙍Su%ʏ8cWᤎd.emZ0]"x ۱waMZ1PRfVn5I!t-!o i_X[̇)uȹx-wi3fNaI.`?E[J3좘5ڣ6j`窋5_K\u}7;' YQЏ#޴.&=ϞlA%k;i4Bp*Dv%k*㘦ŹboS -KĠ܎&( g17e8h;hf\RwSֽDZt 7.^6w&]} F$]vWG '(t"/kϬzģfذ/ 2T%;Jlx*2B\[$m }ڊB-s;8!$„.FzJQc?KFڬSE;8㿠`LYu;]f ?YↄWȦ $YSw7}=_P1 6qPmG9rZXX{iDnF0nu,I)ne.r6 'U#o褘|S}yzT5νv7ā9_FzO1K6 ՇFԞܵV 2* uO2WIg#ZCғ\۶QW[5݉V}ETt1H# LK=j-\]Jc'YEn˚QƨDe_!id"EEip4UpNYv]8{\;N57BVBzەGO bdcs>Ohd>Ac Nn)N#ҩT~GɣF4HtKu1$q DfLtKsK,K]҄ BE`MR>/-W[֞iЍ[_EX#6MP̬yևtL96C#(qթ[{El#A˲HK VtB/\UW`ZO cUijc\Vɧ&wՀvJ$sC]&zYZz= |ma flBBG8a}\s7m|3M=|_0k49- 8}*fl]MzI³v]w NiZpע /2س wA6_K-p!_={za\ݞ赺φ}(kNc|)"%_{fzsu7(rob~>|gv*1Vюcc< Ĥ0HmWu M7VkG湷L/P> !a؇r| pIyg5NZ4gd/aє9Ua=}=|a\Bw {76bh 3,EwȗS4-X*aV :1d g%CV_a&" Np:y 8m3LЭKQ B CSw G$GR 2>/QpӆV+G8-O{?3]66.3+eu/GE!wY.D\kP'}ƯAc'P U7+i, j_!Pw3ʭq oy =sS ښM6(<beAp]un o/:NK@/7)ǡv=0Z+܌C-;8sLoIWoǰy< _LG&uHA~P6ilڒK)XLFoD\aha#XNCixÁhЋzIr{a F?0Q.Y ƉU+gkLk$n՜8~c_7$ݗԋ@LNYܱ"~ $U S3D;qo*A@tC"@j.Yv%7]BGK;wZ@Zd.ؿ%ƅ Ujg(឵j.OwCԨ-1@( !x֟sv !Xܐ_ͧ8ptk% dQ۶%"$T^DRȜRNS(^H @ @ X[A`]TDZy.j:؈jdAUN55{ т~ƙݱ6Q龆򶏍֎".mpit,;Z쟒#H/VZ:]I4Gowj1bȈX8OJB?T 稢{e6R#> cWH&ض mt"Ge.}OF~eMҵϷ{k#>j 812g;@DóXa4'9i&K ę< r"ϡB_axrdxۓ(XK.XhplXvB1!eL%4Bt-̓$8t˄yӨ` /)tK]nW'fw0Q,bvH z7`[ze\4oU[z##ֱ9gٓMmEP~0*g%a3K&mt!8iwN{\gq\"m>ݲbRmU@[J.irޖnfæi ;^@:"܉LGQ PقbzRչj\RN6?Lг .H 瀼JN# b}ĭWaG$z`Mִ~@pe}'}c` kҨ!l8EM2ꔏym&װİgJ&H2c,sZoTܚW<Y]~pdAR>יXO||iCZz,G4=j)z.5} WzwNEMUfDd#2R4DFy̝ݱgmY+h)$,+;tWAX8)X`@ոb.b7kW'uo}f?7)}GЍZ[h-o3> I(%iPv/&' $"U`y1wU{ɪ4T@_mn7g,03qk]8<Y+l:ZVd%Y{! CEg(UnNHKwoT`_7`um|{zWXa@<²·+v­bd&p0oxEU~ ̾sQ$Lf r&Od}h-f{aϺS"FV^춽|"H X-IA_x_C)0Zыw-qh2 P$n^-+eNS(ojbve(w0 ʚ ܂ |ǦbXZDUgdVLrE=m<}$n݅]A6d_)($&]t-WUC; @Pۧ;Esud})@ןL}Aa@$OB\_C?a TYQGR3NE Ae_9x7ц-mDaAF#<Re%@£N1r bu,º2?r:( ([6R+ظӭuZyeRBi- wkCҥ~f w:oMLS0׼v zrW'X72*+T˗F'5I @x5R]'u$QHɄ_[(Yr{+!0'lf] bw}c=K٤0'EO1MipjĶkدA N̳F7ހރm,o@丵xd|=$ U>" #oKKGoҸ1piisU.f0~64n 62Fz_mWegY?.c66ȅƘU/ٹS< m LA}:pw.Ɩ&A-Filx0|S?`}̆;P.aA$*X/Yq"WsGG` ZW :2[}Q%?18 WBژvK|sh{Uw Dbݔ^1ك#МŀKgcn>ۭҬHXKFWc$aټoZ?NZ1J#.VI\M ;F>ƖAP$ "A8{xZuO?HcO(8M)LT. )`~.rE.ơ!{ڸgERRӞD  5.QOS1W$_jZvuʸ:pff[&AI6SQef ƴϥOb`9K8"le(:/ouFwQ<`(V]<T.|͵k_=RAAXd;h':]3Gs"|.E;.ޛM-[ֶ kNk N95R msi߳BA94_@BIxuNZd/,b%LO.9Z4Y<@lnI}v/yQy P^2 6DUC:z<.ݍ"Owdk}'Fa=B"P_xlEyj,KLUrEJ}q/X/.>yH 'sF_"gH 7cA,k+xYTrk+Im}`0G-D|a̅uupz7^/k ~ 5[Ň\jsה]ͤ2; Do*ꐍ=ˊFx_#jӾdZ/3=;@ ;Oˣ&>c +(>D{@$5r*0Wn` L6 f9fVNNWL9_iKpFYpMj,ļrp#T| &I8rf>5DFO 2ЖJ9ڒ,}Z e}$wZ.qӠמ]m;I|~RQ㑘*E֫eJׁ,ك9'D<>K)> gC;2p [LIDɀSldU"( &42-r+im\[;ń &RF\sBKMar=U 9I=Ji&0 9'ee '2 iMy蠟(5BDި[ bhTXn!DYWAz@жn%͂攫hY&tʰQ(ӫ#w s[YH$j8S 3jyGuw;Ls[. ctۈk}=ɩtH`Xv퓶Ua8wW>IؔJp@'wA]^҂u 3ȌF6b`,eB fToۍ̜qvg!Ԑ`_X̅/QptޕJ$>7o\PW9z588'YJoY5 Arn,2uUz[+I@?ࡧ¤sQzB]] fW1|ƍ#M-t/1gbn!Fw=9 I_{(>UZm(A:fG&V{LpU5Wf 4%wOdFP6WArhBWTSu= !X!Q'X$ (rxUsd"#%.^ܥz-ae8bnj`5 D{ӷ=@m,AaY^rBk֨0 CT `V ЕD.7 ("*FY1rr@S9 #1 : yW`-NX`[<EXkZmRE!2GsjCe4E? &dMӥ|X쨸| n'ݗ$iSÎ.'fBrLÜyUJ{8e c\إKsWv_kY ױ.A1aC9 V؇4dtIcWe"8$u!, Ʒ w`N#Ҝha1J0v|޽ăkgGSWۦ"juSਬ >ÃP#dA: ٠|Kl,D&+І-MG s[-ňՀDD݉|IwlǍ^5$SV0p/\Nuo_R\DVS/VsY'*̾:9K^Q3b,TP(%w8TZ{qw^kKyE]6hZhbkp\ޟDocg!6Ҕ %ބZnloqFlY2ԠJ-MoȦy[ôck a:-R P DeW^I_ko$_pI"ށ uT]qavʗIh3֊DV \5lhkʮ%%|7ޑC |qb23-w U`ęT_4`DŽHVD?B.Ԍy?r3:DﭩA2#@C\  !"3х9x6Sd^] JRqGD 똞@?!N i̊@w B>DA{P~sVp/_ю<eauv9Zqx m 3~c]26=I`eRjIndǽ2%+g$0Я6c(.gcIuENdg:ԭ'θYj:s$oc)3ZcN @?we}Bj}R,[Em?Fb;0PPfz>BojN&Ԙ:ۢ Dug9Ec!k29ђ2CJ` qK's=ಃG\gTe  PRtf}$zP8Ni)tg*-n$necGLK ;mҖA`]=}<`eζ~đ g1H}\K?orrz U$&E9-噞KB#q֤kqH'x^է٠i7jz|,X=5 \1srM:#۲v3W jWGB|[orf>pWn+(C4aUO7`.:R2 Wl>/~Ɔp.CO'IEiLEsfJ6I˓.[+ks5rne݅ O`$it}LD6)]4Qr9y?&Ggxۭ @F\Iv"c}!_B ά 1"=I!FO4oB\d4meߵ/F:9W pxvrT7z c;)w4 [J eP >Skέ έyY(z:Kq5BC S>~<56QS*^.㍳fO q%@2<Ö.yvSʃ7]e\kHѨrwO@f=ETX*`<*jnV=qi쏟E@c٨_f\ջ~Z=ЊTZ[4x\dہ.gq97ؕOAHSTot6`6[/YO^M5(C HL`bH}owzzh|X - 5%cB>$ʦ-Mn.&b#3?s+Jא虯?1n3׵Γ ק-[ ':do r1*cYB+Q =n8-s/!npVNz|q(."P+LK #WOLHWh?AmF?~ b͍/?ZdHм@$S=0w2w*<(W-DZkC E-Uoӽ9|slQPp[%4ߡ\$ $Ǧr-br_/sW*olNP%TXGYP?G.3*2-hWڄ[G+L'B3P@P5W^ڦ>u# AO4}#XySngqz~ܩ{Cb0Vq &ʏ`,H8z<ŷ!PC k֌ۺ_R#sّ)λ16̟ᚍ" 3kjG#C kF:t}&N4(a#ak]@ڰw~p(IѷHf6$WRٗ]`cFR>cݰKMm-tcBy*x^cȕrסdyq&J"L(w )sr1h gz 57(\QZ[#9`gP5/0l28軇cܔqֈp9@;QRg+,o y+zbPҗh65L! endstream endobj 1240 0 obj << /Type /ObjStm /N 100 /First 933 /Length 4869 /Filter /FlateDecode >> stream x\[o[I~8ħ`0@Ĺot~PX%Nϯ~d$[+Nb)HɒmTitcGO!7Bj"bmmf3Яhz1ϱ1ϡ1Ͼ1Ϯ1󖖂gX,Ұ6z\n,VRc<8Z3>ćэ1Nc΍9Ls<; ϖhxZ#@V7+L$9ƙ&yl!Q1aD0gF FD1bLFk$<"#a8̇ba$dĚMwvmZZ<,x61!;hė Ƀ@Y# hE뢗i[Z2EYO3`QFkf4$1g$fDì$!0X[3u.`vq|zK[3tZEü'-To.J|d+aKڡCxlT WG&n:za؈TkhXU%rk$7p_1شH~縮q\a7 9H. QwzStD\;O8uׄٹuSLJ4˙ H*x&19^Kan,f!78\Z/46YZ%wʭ$cX T 7#` LsBQy";Ajetr,4By" (i'G$k 2Yh,Xˢ^'Ɖ̜h? ԉV:UX7,Zs|4+ΩC?b;huP)Pm})?L,W'phw ٢  b$P(yG?;Djӵ;&,ޱ;yfs&Jri6fTP\ O_U˻ma>*dV(xm8D%ĖMC!} a,BLhq#QiP>.m,8[d7а{ђLx @i<%oj,J{%1۲®Aȍ jCZZpdR`ulf5B(EI$CfB9jTD<e1/zӖU7B$Y *υ0gRqi  bI{6̮{`\kvZ:2Pf {Nֱk$dĞEKzKvz=2߂-\JUd$0 IY;#Kbx`CKǁϰ;T >a*;7 2I^qS&JdIo^*wP8C8z23r_ΰ^ .զ8 Mk#Aƥ!̩goEia's=0Xs/(*((HAxUDI΅O#x& ڕ~5EazޥqPZj%Ldܳq E.Ҟ-ܜV[ ̽c ^RQy+ՠag#0! eiQFqAp,dr5+[&e"'xr|rt>Y~V'88KYy͋˚^E]᜛]*PO/^h ź#nyGZ,9$ qĒ(íp]!`-9Xd(o'޿=6I@Is/E(ĬYYL>P\BFDA'$ńEp[SBÕ2y QVY ȱp- iyĂXPFKު-WedE/4."t!^+2!;qh@. #0%k"/bslWNuq$j>6"\$9am8XqVk9zZKԴ&<ĴHm\g))#+B¹3}=[JC&.ǚ#?ъNqBᔵ\kg>G2 bk,rMQefmzg\SF3j8{Ktg[JcN2ȥo8Ir!!]B9,fz)Pbr1ud3E$;Z0i>9s촵Ϯ\EȮ:ﷷ\e.vj/@9\-KeE)hְ\Uvr߆]qZNrWP Jw/UX3իQ|(.bc%RGGsQ s GO%;eJcnޮ#)lEtkX fѼγ]cIbu0s;PCȤ~ʾ M%P6ܭ⛒aj3g9w76rFAW̺'C֗S.>`gԁw&4`M|1(3q>QJ+fqrz=i^=ǣ)[2O#J;oh0sF S|WgH4Qs+-yƀBN oJ e!R8. A.Tju_f2qR1J@6&]{ebGzpCh8󝍴ؙؤ*x + WD|y.^f[P"pY:(ˌu/u:7z۲mcfp _6$!8-+#CĢ*=P/|sl䈅q4D, i8֡bH3j $/d\e|lAa%ג/U{*dp\vL"2XS>?귵}=k͋x<\ygpF=ro6_kE ųgߚ>y_W:Z;k7kU7|:A@5{z2 uB =zPF4`|Mj^ڏq{|qv6hOڮ~n?ǓA{֞e{ٍ'v^Wu{~mhp9hQ_?%7_c1ۥt?HHI/Ұ=_ƭW=wsŽ y)3ϻdо`3!r=qw>>Mcuw5Uw6dU$r-Hd/ \ ]{9Z`({;;e{L~D}:x|Ǒ? xp ϯڳdx9=>Wi1p,w'k7*9xbw̲=kATLGi_gC+wR&]tو;JS~1a Cv'p <6?nn`|j~C-~lsm{ج۹c\`e}n;.erOɟOc:N?/OrQ`*$o r[ll|^fkk?SnKT^Ӑ~nܼJpSv_!_ [ɫnU2SȩD6Ia&z6/;dt5vCN :Wsnc^F>e1su .Q ,bw7wE2}b~^lWΦz gwqWGo?Kol,}0_9gg{O<\݇mqߑz[D@^ɍ>u+"V orqfWަ)-0{s5kC=&oD J*,=5Xأ>h?MwAJ 拶 Cq)CIP틠KG1yqZ:!O؝.fε>,ar3P-+|hgIO)ëv@BXɁL /ףQ7H,JP)rx48CY4YeBS+6!, )Ѻ^amx% _/&{S}OOvi䠤; J E"4dNf :rk8Y'_ O67tX:>xnRÍ#{DG&ٖ&\ ;G -Wrc>QzxgICۋ/Huo0Tr<]pLqj蕼ыp.*--y<߶.!TDt1Q:ϳYɡy|CxS2D_6 ؠa}-Qc qXU%x_}r*<$򪂭chq}~Bk9w3Ka'f闭7_,<:$7Y+X`tzPr" endstream endobj 1326 0 obj << /Length1 725 /Length2 19352 /Length3 0 /Length 19908 /Filter /FlateDecode >> stream xls/-<=mm۶m۶m۶=syo7Vfv\]դv&t \%QeeFv&3 ) @db`d0100=-̝F6U;k;W #soW':G>LL&S kBLV fbkh` w10H[:PLl-ɉl]M!fhgQU+  l:;qmbhcYe?xc02-&f0M9@񏆔c]e lLBv6.&;cG[wsoop6G [cd$jnb,oΎ.&L62&.6I7GVO)^B@EB]n[#;c [3?z8o:\Q;1pvph1EuQv^,Zf6#335rqt4ufcΈ;25Whz~V_zxmV bXA' .wjO⡅ tj:0\g筄̷vC!\LH69cs#Pj%+VҡY1Zsi7~}VR%S䐺i<Ɛ`$,?h[ϼ"8L3ɠ2n b|A@s㢉+^=ѭKd ^.0рf+g`sDzɃQclk5BTWaj M#"$ ϴZno!G z!k<4QPDz/؈?l<޿u7؁.&>2bPJ@%i#s<4]61M H^ u@ 3N{dM ?L{ f7!ÊC#9KʖvxȜc3$( oܝ0Opb>&AZAIhGԯS/߳lEP(\ )D"(͓ФB (s LK}~cYB&⻲w \V l`rPxuS@@6#pU ᔓx{rpF_<y^KL,)ɐƗD9dAݪrYX 7p?B|Y:"M[bGIgA SɖRltIAY_ $uJE+F ZDž ۨ,wf2a~:*0lfzi7!ԭ ?潀:ф#vF8`K^ѰR-6[ u)C8c I3~ p΂u7Jà#bFf5N{L;NȆRt {O= T{JŘOAoJ/=2t‚NXtUST3lKsw0L/椧gkҘ6#)m 3"[mz]Ξ&yoށ.1dp@S`0A-TnM.lp͘E,C߽:O!sYDo|EIY,KZE6%g_cg~Wz7f=٬Y(D\ac BKv/ϘXҖyܨ 'K %)=c;MZw፼ϾTGڱDFͷI=wH'Ո*.-J^xWĴ<8׻4}cbtOC8a9>4(1%8˪bBE yr_ P #U"/k_m9ZK E^7cQ.'gm+y=d*4 >G%2V(_Ts U1!F,d^PS<$g<|=yN0.m0\[F 4*kA7F!fH3J;.A"l'8?Í  {ˀ@=RӠ6)/'*799Bi^1Q)[oYׄs<eho:ͷvm]%V'-:L4 OQ~C 1e=WC|Fck^"oqtvuk4>-pꌐ"@FC W, qW&= C|ozԊQN_=յ|{LHwW!LD9?>Wն4Z>70LjAiZiBk)3;y=x/o}gW!ѠݪBlut'_=5R1VM@iEPf2Z#R]Y$;4 ]<3U(ڂ}{R>MVpV?'@O Pʇi Z}cl d])~}5(KaWx_~ԗ$x#+sO<>v\3F";ǣ:#$Ku7b2,zsk+1'YU-GEVpNq1(<ޢ?_>ZXuc؟9>:>F×cYo~t eBDmBMэxFkU֩ !aVv2i*WbNK2ݾ倽,WZHJm2N6 UC(sLX7GrT npI7uB,1}mh}j Z|]nMa:Ur*UG,.x^Xlug;d 6iY7ͫ t l_抓Nw0]'yFr ìWQp,k3Βumb;c-E ;mԙȮf}FIѯa%[H#tj1eG=ɕhqŢg~ސY=`/p X}̄Xʌ@㫫| P̖]Ô~ɷvaנ ܀e *4 '$ L-׆ϢpiBF~"r!=:l!pfMd•Đ8t=P+#UOߠn=Q=_&H4<40G ); jf.aPFp)6XJ4`MbDԸ8\h)D1eLmqLK V03dq0pNcP=_|Yc\qh`O<~ 18,0 c>gz/`8OXOߞjc򦥹\K%c}C ;k[ObaV6e&"ca QO \Ȍ+!Gڷ-@ńY{04&F2np}65&t #]S Y?>x ~D4fɛCġA y+|JƬVdN% b]9I?>k3Df;X+}iH?]_RQ1X}kq$Lc<(vkQd7hc]ޯO׿L | QPvvk^3~.'@XC"JMBM)gМAUPɶ=.JTFbԘF0wȂ4xf qjJ`2@ap f.3!qIX9 H/%c=Om}c7d9h`Ht3Xm7%FliN0TlsgybC91Yt:V%Óg]fU4ަJcBy9u aƃ%CVex!Y\uSC$V(ş8[IN|s)K aWh%z =_=>7Ry9[x!=NM0W-0ﯗNq#WOT뚢*7?52/?:%^赃Rh'lC?}9l_RA2$PfFP,Ues9lx I,*t0.{TpT1\0@[Ⱥ&E+FuA5  2Ƶ?oG/FwPj (^#A/w8fxj"yy=z Rس6|U9% =( zp(GqU-41ڳ;ס$Z+;djcϱX= bd=BtolRȬ10d>_pלChqz |=W=ZДBY4 hn}`S> /Uh#B IokLqϼCIJ7SPuI.pzg@5]+ZdѲ2Ó\r\0ddPI'>< t!{g{7fКz=D!ѥIp:GA2,7쵱iQ)pZN0y W= u_sYBz}!haEU+Puˊx#5jD[ÈEREn[aϗ)&.  Dbu8Kxj<axF?Tru#Be'K6!HWo$pByg*~n+c+InBr(jTHJp<%ySnj)l./ ^]+>qzH:az.6R0Qr</T}٪a/N{W7k<#tcCksNvـVt=- Ɓ}m(SrѕgWU41">/ARnjQaVL8q'"^iǛ}?X$g<@pDG4‹$̑N(jyLr=MGƲoOq9Lȥ s<FTYbRAq?+tiMI¨:L) r+_^C2Tox\B$zܮWs5^nN5au#Y  lgq9IHfKʀ: t5R𯟆Vfr' "pq%oօ <)hڹPA@4\c v5"z `s)}텛2h(EKX׎l>Er^IVN3%*V J^O6d1|w Hr]jH ms9|w GMy8d`dxLU"(09T΃J_(ہEps-: Kawڇr㣝dr)=T(-/ݫDyLW|OS2P$#\ -w55t0n[@+[W枨lb :/ iF/BE$@9w1A+2=I27%>Ij?QNQS-Z8I'޲Iţ⪒{Ʉ&(I'FvԿPO^㍜~~K< 6Ⱥf D P ]O^ypS(Sm^_IEm DX$?_T>}0KiP>6~)v39ݗ/d^ԭsl!P >6^}8(M7v:sAu)"?;>\ eeʅFݝWIFРzF>o¤C>}B5v4o_EHpf묥|@֝N}ZDuj |cuGqxFcRSt]fLjDڃ'Qy9rUƵ[@ [-`faW7 9826vnq΂Գ0 !nכ׈/lD+His Am^]0[t5sſzQ)z&RW(`% *\2!/Gk=c]7=7 oIlg4YV:4R  K~by:}K8VwTVL}3rg7K^li0JI'-E { ;2X tzw%.^jĎoUY$|UڋUte'`]#e*lvyCԗ̡Ƞ6 EGvDM<3Huz10d*r`q±#[R<諝?jHWGc^zt5Ur~ x+Smf{.C t }:xPfL"nz@GH\+՗wAYWR[w}I 0RױTHtP3<k@+Hz U[㈦!gj9\Zv!12ocF {Z6Z~4ǥR J ,@kTnޖ'|\/+*1< Źqti2C72y:'g&2'j/]>3|`-H&)把~]ޮ'XMކdi߈k o}O ?6h `2=H3`mc3X[TEeQy$ K4\zSR9Sζ,됤$L8 Z/_&;QD^MKNIJS/awXhڀbIѳi*N^eh-8תy!y\NW9ia3h{@Cn,&h`YT e2׮u`1MOnclDy^zYt8(TUJCE:˛g{8#5G0!ZL-JB ,JE蓮C/wߡJV8֟K_o5k7Db'O[QrnՔ19͹`W[z2i<{g@ჹG_X<0wԢJ%SÐo p˅m9%f+5k1Q_/׆`Tb4G;LW(+L$70iS(ta'Vgo?%̾Av-3Y9976R0ذ! N:#2eK"-IEƆ0;u\ )GUL}حI(a3Ѭ(!;AFr.XS|[R߫X.bS1A4p4hPZ\gbl'J7>6[AKM򰀨A[; o_`-L׳K} CfG j*Ϻq/zcMU &1\#'EkIYug4$( E'A;o;? w4M4\ܱoEԳw9j@H/Z+c"31eDWG@>6FWWXbIlQWcKƾ7T&%~1oyU;q xYXPcuʣxoC][3dž֞jw)*-|deRSҪynt [龼*tZߨ#Ljݨ SC>_-3tby)}=cTx)}PKX/ڐѭ'$Lg 32{"Ϊq:xuGV"W*[dA&?D2eI$UOr >jg*"$>`DĒ(\p>Q23+=JyڌG.mI%o& ^ˠݫ9=ʡmwq8r@nCF-Ϡ3NfIqBI7p|$g?NxX]ÝC~GC.Ea5\ԹVRf 1Q;ѴQB[0# 0sǗL{{S CXF]dAdGKg@Մėe)S?> [nlxTqё溯a7xc[AX\FV>uWڅ }qħMůf \P;R!FeTEPU\Z82>hAo3p] YtXX>y`Œ מSzݗ:#~;sc.:jp2@/PQpYQ0 #^ Hqoc*XeB}3VCL~ڴ$X,#UWh_oxu$5SE n!^乬+ AdI+6],d6/XE1(A=WDzfP275R,`بw%Xgn  bvQ(cQ٥f;Ў|T@Nx%7xk{\C,J9Qƿ\{p*_h&`ܑU` 2XF.-ߓ:]~;"́2V7ls.Zi/!GxӣADw~ݫLլ1Q֊ꨶnc#R`О7G9x]{4G^d\Cj7ҧ`d삍V/*N8DmI# >!-I>s%"c#,+Gh#8oҶuЂv2ӁH'w7FOcA;S~y\2h3$@QUPުxx3/+aV˟gja9\tE+;ڻ^6OCz^_TXٜZc'P$2ԣxdڒqqӭ5Gx=vf}}Ĝik4tBRUALoqbP_k2°fґV̵_9hkPBZwκ8Sl]CFTەۚEdH_oecMeTΈjA,}G0^_:ѹ͂V%AKFo|8`\j5gjV2꩜=zMRGd\Z99HiGmNO7fhL^b͈l CoΔ ZA I(djztCAc Ud%~;GT5#FCkyR\$eF .|'`J_׋21R?er9=Y(?Z6Q\A5%:wYڢJ! f%M_9b=]?M.N8%J\YKBw9k~8\&O+ 9AeGu~'!J *% ph:v͝,Eb([8nt~Cʱ%vy9@U5vl &@\)ꅩU#{H/<)rsP( lPH_#&= .8-F +1A;t&ME&۔tIH bN:g,'yHI8*(ι,=a U@S**鴤b|#AJ ec^\Mۆ>Q{fWy 꿝R_PiI Q8=G#rO"@Y_Cݶaƹ7,%@0?3*ӮlohZ: <"'d胩+:K ⤂ β4$ (IJȶɳ"pG p˕Ef x .˜Uu8n}D_([V<|ǂ_h섵a@VΜ߀w MtO_^g=oqdk&JEG9gs A@:&rsd\E|=t9x y*a -N{XbUr=gJet`cU2Ҭ%W[حN xuoW(=Fz ٤Vm݆IpgWx$M 2/<фxoQ}*y>\3kԡ,oz1跞r5@D0sUM{\wZΖ7b V+\ o6 oc^9U CR0=ס.edvS^CΟ :2&໲fDZHpӗU%b,2{ĕ:p)t $NMr*mީRP ̀lyٞnCgfcmc{GՄbᐯ } d#m ޟ{ ;wfH0  ꖽ=`֘cGnL#ntPPv:"`KF(-77.SMޫEb F{5˜Q]*HzudmJԍm[=`U}ԅɠ,l%-S uB), qrZ~-F5΃ >{TMfzhk7ɺE!oM!pjT5h =pē#8e] N@F}y>/b )*Hn&H-Q/<#V> *vg: |W3)iNR"HC;hXa1RY#|zXM 7[!r)lAҬjZ>}\H9 ߾=qg4kcWVN~f6h!jExJVT- =:\GKq~QYќTf~]QGh]_};*II=&\]b*eRe?=۞7Ԩbz!+Ar/ϼӮ*R=Ml?,19٢.%[WcOX=!nά!"g|g'q?K"GTuġ5> z}N,ƍO-ކEr 2S{Xv0oub3tgRnjҊH>s@;΄f)!RGZ 2]ǁVzFѵQjfCdH=7MsJ}N]"B./z]jW`P#%^(.]8Nwo s틏 *u]1rLq{= Q\wq5܊`&ਯ*jE ]$)#n Q2n!x47iG( ;oTv)颷y!{- ZrofbS^ҤWISUyha=\B~6Yě-sgÐ7a7/:;"Uc~)hϑQgXlOVR6@glF1 X-Ur-(L{ Ǒr<-r,irX8ɴĈNLGb*;ng@ޑ__HYhfRg0mW* ՉXU/*ۆ} ϰ1ѦrRC/a8g=:<{2% SY)4nInyxC7XT^Ӗ?eG;{l endstream endobj 1328 0 obj << /Type /ObjStm /N 100 /First 936 /Length 4163 /Filter /FlateDecode >> stream x\[s۸~ׯ[=~S:Uǎ/w6Lۜ%E2) eQ4}H ?4?4+LpЄq.$6LbN4%N9x>j-4@H0ؚIm ʩ3pp| N}=#`j=Dh$x 81Ft^B AK#4H = 1B$a+aF]" p E}JIADq.(<ǖ@r~6(BCsq~A 4+deHQN40~KT ML4M!tGNRT~} NE`61H`CW& a05f@@ĠAgK$bK$J-p&}@#sb%8WebSE}/Go&L-M#N l}/c+*r ,2pN`6)Z%K/gP m晒 |\(M$ꯩh+6ʀ؄y 8*61%h5o*[ba;w\g 9 lڟb4^}l> 4%fooڽ ~Lw 4!t|O#΄됣" Kh\ lVzO2+$%w$#aF$ݓO,#şRdt󟚥}>|-l 4M" M~FEv7da5dt<<< C K>/F.L!>tx_<2Na~?o_O lJ<?eù@&#O]10M@/d1C2 FC~ NIc$KaZP.9K KH|$|"䄜3rN.g\+rMnW–$wGVQ<%}Nh&YɃ7WOaU#y"9A Ɉ2!SXI$/ .Gߥ$G# YAp|OG䋵xN'9n~Q.;+#v $?^wzzhfbSrO::8Z0k~_m|ԛ+pplN*cul |Uz{7VYK`[+.nZI٤[V(qv޻:ZqjdVN얜hihu~pvyr[j%mjյzn>Vow&UݯފV'VeՊ45 ۊV>~;AmV SjvR4+6Pj;wۻJ[_v.aAo'~yl'MRJm8Ѵ %sUW]] -/^-,]ϥ'NȹoGwE:)v2߾=|3譲S:.NIN^6ԉEI᾿Vv *?:5`SlpQ9Otz9+KgPǡ.7u\tA}|=β);X#x<dזLhjh6)aLN{>;()C\%U<dE)p:>G> @4dY{9NgPp9Q )JwSO1I>'!GW|UAT"\MlJQUq?]?T|XLF߾_]Z >Cevav,!e*Ya7DDUrף-BJnlka$ Ӹ)Al788ĕ˷d7Z mRHT<QP:R(ųg[q r(Q{])n;i*$N2vaQ%JS,=K!dN4Hsw1Lwnǻ/BFAĺ(׼x}u$qc],ވAqqگ Ld Ny$.:_&:G.d&#{V|2iJ?l b]FGXz.ÂV00+U&̠0P $Hs*(MqȊiW!Jo@;3XXscBy>%(/&5 sbxh$56b7ahԣݶGg  g4 bT "Y F]ich} 9oEh E67Av1V@4)]0(^)U+2IF`*DOo֧,ͯ(qěMk$v_A2>0zuy`"Êͅ`x#Y.,ADK񠋷 b| px!YxS\E2W*.𧊐x $~P.BYtU(KPɻUZ=Jog&ò! LB0(SBFJP7 endstream endobj 1410 0 obj << /Producer (pdfTeX-1.40.21) /Author()/Title()/Subject()/Creator(LaTeX with hyperref)/Keywords() /CreationDate (D:20210621172045+02'00') /ModDate (D:20210621172045+02'00') /Trapped /False /PTEX.Fullbanner (This is pdfTeX, Version 3.14159265-2.6-1.40.21 (TeX Live 2020) kpathsea version 6.3.2) >> endobj 1368 0 obj << /Type /ObjStm /N 42 /First 413 /Length 1713 /Filter /FlateDecode >> stream xڝYQo7 ~<`I"%ۺnXn$`IٝtwqF|N?Q"ǔ]|Lb@6r$1ˑP#:߁bRPbg[yqs!(w\HE(хœ;n[EjeF)@ʔ:#5II\(:IG1}Vr4p$DN0%|'wHŢK\ BbT+3Ky K Fl\^o>o_7o^cw{xg,6B׉G_^g(-]ݟگ}N,>,9ߢ[^>=wsDd >Ò=/W ZiiOLJ텿%^! k4 J5bCCHBq mvja$T((hM[mrCz#Q EVcCP$Ak$zA3E GQjQTbDbF>byr=Y DAIІb(<4JO6iRHkI5$ƚwt4AaFIs'>0f 괗!iڃ3wHIP6M@ ׂpxTɨm'՟JsrVYYLdRV\Z#̐dLkM?TOm"",p5EEnLQfz4p"7笨ȱ־PkORJ*rsH*IEnTdhXkO*2"tzQ4h"4hyӇƤCoAԗiֽ@X ~< kPz6Ab J[1EcR~|SOMሤH&ʕӌp Ӱlͅ&%aÑЪ;:"7H#Rq&vSh> xوfhF־ӛ0:ȺٞY̠q*l7g~zi"u3"`ႅ cB9f-%{j}X߶ h*ChΤc p]sEl&h;9ugfvLdJ$S"%tsbG*ȶi٨f'[:[Ͷ?Xe۟L~( (STLbt Gb! X&cC~rXl%O[,~ ,X-:F w8?s2{3x:㡛zݳ~V?o5_TVcOoڟ>zqk5w_moo׏o;6ެ^^]y31o_tY endstream endobj 1411 0 obj << /Type /XRef /Index [0 1412] /Size 1412 /W [1 3 1] /Root 1409 0 R /Info 1410 0 R /ID [<1B444C73AAD337A1813D9D8B373681C1> <1B444C73AAD337A1813D9D8B373681C1>] /Length 3121 /Filter /FlateDecode >> stream x%I$MBz[{~_ݯ_{E6xuyӁ0 j` " /Όz^?"iw4&4Kz4Ǯ 014v&-6v[i݅Xdl vcic.2c+m3vJ2ncŴN]cN݈Mgnv7cMƎ݊m6;jw;v̏)cG.c+{ |^3vAq`Ќ;`>0c#I;ml_[}ԿgB,EnA:)u4_Ԇn:n`gCg^X:X{ZOW>^uzk^ qB/^zzK^/^zz_ ͯoE>{ӿs$wHt 5׆$ 0qL`SXXXXXXX:`=oTO,us; 7{?4f|aQqIiYy\E\e|+k[;] ^Z;|'|hff&XތYmDmDmDmDmz0QQQQQQQq؇؃؅h;a+`36z*4?Ui(\Cq6{h DDD.X \i01INnX~\ +ufVO>`!a1`)a9V`%Va5`-3X ؈M،-p kͻhcGX5XUX/RxQ}EWe&1qal[^~AaQqIiYy\E\e|+k[;] ^Z;|'ԃMJMMMHHHH5I$$$$$$$$$$$$$Ǥ'|;FWxxg)qh~iRvuuuڊmgcr6&gcr6z3lL@WIWIWIWIWIWIWIWIWIWIWIWIWIWIWIWIWIWIWIWIWIWIimh~zeIfIfIfIfIfIfIfIfIfIfIfIfIfzpCg)9trW7N 8&0),B,b,R,rJjZLcf[۰;{~ aa+[٣88888󸀋Wpp7pp6mgcSp۸pO/5-=>#>3@/.mjuu]zumNz&[uCucßԭOar9L%Lw]sjԏnoG~p q7Q?5\.pO /Qr߱E=>#>3`vg6`j}\9zG"]BCwZuj}==D+:OC[ j}~z닪v;v`'|Q:c>SO?u" endstream endobj startxref 509417 %%EOF Coq-Equations-1.3.1-8.20/doc/equations.tex000066400000000000000000000064631463127417400201120ustar00rootroot00000000000000\documentclass{report} \usepackage[T1]{fontenc} %\usepackage{fullpage} \usepackage{amsmath,amssymb} \usepackage{qsymbols} \usepackage{code} \usepackage{coq} \usepackage[color]{coqdoc} \usepackage{hyperref} \usepackage{abbrevs} \usepackage{natbib} \usepackage{minitoc} \usepackage[utf8x]{inputenc} % \makeatletter % \renewcommand{\tableofcontents}{\noindent\@starttoc{toc}} % \makeatother \def\texttau{\ensuremath{\tau}} \def\textDelta{\ensuremath{\Delta}} \def\textGamma{\ensuremath{\Gamma}} \setlength{\coqdocbaseindent}{1em} \def\eqnversion{\texttt{1.3beta2}\xspace} \author{Matthieu Sozeau} \date{\today} \title{\Equations \eqnversion Reference Manual} \begin{document} \maketitle \def\coqlibrary#1#2#3{} \def\Equations{\texorpdfstring{\name{Equations}}{Equations}} \chapter*{Introduction} \label{cha:introduction} \Equations is a toolbox built as a plugin on top of the \Coq proof assistant to program and reason on programs defined by full \emph{dependent} pattern-matching and \emph{well-founded} recursion. While the primitive core calculus of \Coq allows definitions by \emph{simple} pattern-matching on inductive families and \emph{structural} recursion, \Equations extends the set of easily definable constants by allowing a richer form of pattern-matching and arbitrarily complex recursion schemes. It can be thought of as a twin of the \name{Function} package for Isabelle that implements a \emph{definitional} translation from partial, well-founded recursive functions to the HOL core logic. See \cite{BoveKraussSozeau2011} for an overview of tools for defining recursive definitions in interactive proof assistants like (\Coq, \Agda or \Isabelle). The first version of the tool was described in \cite{sozeau.Coq/Equations/ITP10}, the most recent one is described in \cite{equationsreloaded}. This manual provides a documentation of the plugin commands (\autoref{cha:manual}) followed by a tutorial using basic examples (\autoref{cha:gentle-intro}). More elaborate examples are available at \url{http://mattam82.github.io/Coq-Equations/examples}. This manual describes version \eqnversion of the package. \section*{Installation} Equations is available through the \texttt{opam}\footnote{\url{http://opam.ocaml.org}} package manager as package \texttt{coq-equations}. To install it on an already existing \texttt{opam} installation with the \Coq repository, simply input the command: \begin{verbatim} # opam install coq-equations \end{verbatim} If you want to use it with the \href{https://github.com/HoTT/HoTT}{HoTT library for Coq}, simply install the \texttt{coq-hott} package before \Equations. The development version and detailed installation instructions are available at \url{http://mattam82.github.io/Coq-Equations}. \doparttoc \parttoc \tableofcontents \chapter{Manual} \label{cha:manual} \input{manual} \chapter{A gentle introduction to Equations} \label{cha:gentle-intro} The source of this chapter that can be run in Coq with Equations installed is available at: \url{https://raw.githubusercontent.com/mattam82/Coq-Equations/main/doc/equations_intro.v} \input{equations_intro} \paragraph{Going further} More examples are available at \url{http://mattam82.github.io/Coq-Equations/examples} \bibliography{biblio} \addcontentsline{toc}{chapter}{Bibliography} \label{cha:bibliography} \bibliographystyle{myplainnat} \end{document} Coq-Equations-1.3.1-8.20/doc/equations_intro.v000066400000000000000000000630741463127417400207730ustar00rootroot00000000000000(** printing funelim %\coqdoctac{funelim}% *) (** printing Derive %\coqdockw{Derive}% *) (** printing Signature %\coqdocind{Signature}% *) (** printing NoConfusion %\coqdocind{NoConfusion}% *) (** printing simp %\coqdoctac{simp}% *) (** printing <= %$\Leftarrow$% *) (** printing <=? %$\le?$% *) (** [Equations] is a plugin for %\cite{Coq}% that comes with a few support modules defining classes and tactics for running it. We will introduce its main features through a handful of examples. We start our Coq primer session by importing the [Equations] module. *) From Coq Require Import Arith Lia Program. From Equations Require Import Equations. (* begin hide *) Check @eq. Require Import Bvector. (* Derive DependentElimination for nat bool option sum Datatypes.prod list *) (* end hide *) (** * Inductive types In its simplest form, [Equations] allows to define functions on inductive datatypes. Take for example the booleans defined as an inductive type with two constructors [true] and [false]: [[ Inductive bool : Set := true : bool | false : bool ]] We can define the boolean negation as follows: *) Equations neg (b : bool) : bool := neg true := false; neg false := true. (* begin hide *) Check neg_graph. Check neg_graph_equation_1. Check neg_graph_equation_2. Lemma neg_inv : forall b, neg (neg b) = b. Proof. intros b. funelim (neg b); now simp neg. Defined. (* end hide *) (** [Equations] declarations are formed by a signature definition and a set of _clauses_ that must form a _covering_ of this signature. The compiler is then expected to automatically find a corresponding case-splitting tree that implements the function. In this case, it simply needs to split on the single variable [b] to produce two new _programming problems_ [neg true] and [neg false] that are directly handled by the user clauses. We will see in more complex examples that this search for a splitting tree may be non-trivial. *) (** * Reasoning principles In the setting of a proof assistant like Coq, we need not only the ability to define complex functions but also get good reasoning support for them. Practically, this translates to the ability to simplify applications of functions appearing in the goal and to give strong enough proof principles for (recursive) definitions. [Equations] provides this through an automatic generation of proofs related to the function. Namely, each defining equation gives rise to a lemma stating the equality between the left and right hand sides. These equations can be used as rewrite rules for simplification during proofs, without having to rely on the fragile simplifications implemented by raw reduction. We can also generate the inductive graph of any [Equations] definition, giving the strongest elimination principle on the function. I.e., for [neg] the inductive graph is defined as: [[ Inductive neg_ind : bool -> bool -> Prop := | neg_ind_equation_1 : neg_ind true false | neg_ind_equation_2 : neg_ind false true ]] Along with a proof of [Π b, neg_ind b (neg b)], we can eliminate any call to [neg] specializing its argument and result in a single command. Suppose we want to show that [neg] is involutive for example, our goal will look like: [[ b : bool ============================ neg (neg b) = b ]] An application of the tactic [funelim (neg b)] will produce two goals corresponding to the splitting done in [neg]: [neg false = true] and [neg true = false]. These correspond exactly to the rewriting lemmas generated for [neg]. In the following sections we will show how these ideas generalize to more complex types and definitions involving dependencies, overlapping clauses and recursion. * Building up ** Polymorphism Coq's inductive types can be parameterized by types, giving polymorphic datatypes. For example the list datatype is defined as: *) Inductive list {A} : Type := nil : list | cons : A -> list -> list. Arguments list : clear implicits. Notation "x :: l" := (cons x l). Notation "[]" := nil. (** No special support for polymorphism is needed, as type arguments are treated like regular arguments in dependent type theories. Note however that one cannot match on type arguments, there is no intensional type analysis. We can write the polymorphic [tail] function as follows: *) Equations tail {A} (l : list A) : list A := tail nil := nil ; tail (cons a v) := v. (** Note that the argument [{A}] is declared implicit and must hence be omitted in the defining clauses. In each of the branches it is named [A]. To specify it explicitely one can use the syntax [(A:=B)], renaming that implicit argument to [B] in this particular case *) (** ** Recursive inductive types Of course with inductive types comes recursion. Coq accepts a subset of the structurally recursive definitions by default (it is incomplete due to its syntactic nature). We will use this as a first step towards a more robust treatment of recursion via well-founded relations. A classical example is list concatenation: *) Equations app {A} (l l' : list A) : list A := app nil l' := l' ; app (cons a l) l' := cons a (app l l'). (** Recursive definitions like [app] can be unfolded easily so proving the equations as rewrite rules is direct. The induction principle associated to this definition is more interesting however. We can derive from it the following _elimination_ principle for calls to [app]: [[ app_elim : forall P : forall (A : Type) (l l' : list A), list A -> Prop, (forall (A : Type) (l' : list A), P A nil l' l') -> (forall (A : Type) (a : A) (l l' : list A), P A l l' (app l l') -> P A (a :: l) l' (a :: app l l')) -> forall (A : Type) (l l' : list A), P A l l' (app l l') ]] Using this eliminator, we can write proofs exactly following the structure of the function definition, instead of redoing the splitting by hand. This idea is already present in the [Function] package %\cite{Barthe:2006gp}% that derives induction principles from function definitions. *) (* begin hide *) Check app_graph. Check @app_graph_equation_1. Check @app_graph_equation_2. (* end hide *) (** ** Moving to the left The structure of real programs is richer than a simple case tree on the original arguments in general. In the course of a computation, we might want to scrutinize intermediate results (e.g. coming from function calls) to produce an answer. This literally means adding a new pattern to the left of our equations made available for further refinement. This concept is know as with clauses in the Agda %\cite{norell:thesis}% community and was first presented and implemented in the Epigram language %\cite{DBLP:journals/jfp/McBrideM04}%. The compilation of with clauses and its treatment for generating equations and the induction principle are quite involved in the presence of dependencies, but the basic idea is to add a new case analysis to the program. To compute the type of the new subprogram, we actually abstract the discriminee term from the expected type of the clause, so that the type can get refined in the subprogram. In the non-dependent case this does not change anything though. Each [with] node generates an auxiliary definition from the clauses in the curly brackets, taking the additional object as argument. The equation for the with node will simply be an indirection to the auxiliary definition and simplification will continue as usual with the auxiliary definition's rewrite rules. *) Equations filter {A} (l : list A) (p : A -> bool) : list A := filter nil p := nil ; filter (cons a l) p with p a => { filter (cons a l) p true := a :: filter l p ; filter (cons a l) p false := filter l p }. (** By default, equations makes definitions opaque after definition, to avoid spurious unfoldings, but this can be reverted on a case by case basis, or using the global [Set Equations Transparent] option. *) Global Transparent filter. (** A more compact syntax can be used to avoid repeating the same patterns in multiple clauses and focus on the patterns that matter. When a clause starts with `|`, a list of patterns separated by "," or "|" can be provided in open syntax, without parentheses. They should match the explicit arguments of the current problem. Under a `with` node, they should match the variable(s) introduced by the `with` construct. When using "|", the ";" at the end of a clause becomes optional. *) Equations filter' {A} (l : list A) (p : A -> bool) : list A := | [], p => [] | a :: l, p with p a => { | true => a :: filter' l p | false => filter' l p }. (** A common use of with clauses is to scrutinize recursive results like the following: *) Equations unzip {A B} (l : list (A * B)) : list A * list B := unzip nil := (nil, nil) ; unzip (cons p l) with unzip l => { unzip (cons (pair a b) l) (pair la lb) := (a :: la, b :: lb) }. (** The real power of with however comes when it is used with dependent types. *) (** * Dependent types Coq supports writing dependent functions, in other words, it gives the ability to make the results _type_ depend on actual _values_, like the arguments of the function. A simple example is given below of a function which decides the equality of two natural numbers, returning a sum type carrying proofs of the equality or disequality of the arguments. The sum type [{ A } + { B }] is a constructive variant of disjunction that can be used in programs to give at the same time a boolean algorithmic information (are we in branch [A] or [B]) and a _logical_ information (a proof witness of [A] or [B]). Hence its constructors [left] and [right] take proofs as arguments. The [eq_refl] proof term is the single proof of [x = x] (the [x] is generaly infered automatically). *) Equations equal (n m : nat) : { n = m } + { n <> m } := equal O O := left eq_refl ; equal (S n) (S m) with equal n m := { equal (S n) (S ?(n)) (left eq_refl) := left eq_refl ; equal (S n) (S m) (right p) := right _ } ; equal x y := right _. (** Of particular interest here is the inner program refining the recursive result. As [equal n m] is of type [{ n = m } + { n <> m }] we have two cases to consider: - Either we are in the [left p] case, and we know that [p] is a proof of [n = m], in which case we can do a nested match on [p]. The result of matching this equality proof is to unify [n] and [m], hence the left hand side patterns become [S n] and [S ?(n)] and the return type of this branch is refined to [{ n = n } + { n <> n }]. We can easily provide a proof for the left case. - In the right case, we mark the proof unfilled with an underscore. This will generate an obligation for the hole, that can be filled automatically by a predefined tactic or interactively by the user in proof mode (this uses the same obligation mechanism as the Program extension %\cite{sozeau.Coq/FingerTrees/article}%). In this case the automatic tactic is able to derive by itself that [n <> m -> S n <> S m]. Dependent types are also useful to turn partial functions into total functions by restricting their domain. Typically, we can force the list passed to [head] to be non-empty using the specification: *) Equations head {A} (l : list A) (pf : l <> nil) : A := head nil pf with pf eq_refl := { | ! }; head (cons a v) _ := a. (** We decompose the list and are faced with two cases: - In the first case, the list is empty, hence the proof [pf] of type [nil <> nil] allows us to derive a contradiction by applying it to reflexivity. We make use of another category of left-hand sides, which we call _empty_ patterns, denoted with [!] to inform the compiler that the type of the variable is empty in this case. In general we cannot expect the compiler to find by himself that the context contains a contradiction, as it is undecidable %(\cite{DBLP:conf/plpv/Oury07,DBLP:conf/birthday/GoguenMM06})%. However, in this case, one could also write an empty set of clauses for the [with] subprogram, as Equations applies a heuristic in case of an empty set of clause: it tries to split each of the variables in the context to find an empty type. - In the second case, we simply return the head of the list, disregarding the proof. *) (** ** Inductive families The next step is to make constraints such as non-emptiness part of the datatype itself. This capability is provided through inductive families in Coq %\cite{paulin93tlca}%, which are a similar concept to the generalization of algebraic datatypes to GADTs in functional languages like Haskell %\cite{GADTcomplete}%. Families provide a way to associate to each constructor a different type, making it possible to give specific information about a value in its type. *** Equality The alma mater of inductive families is the propositional equality [eq] defined as: [[ Inductive eq (A : Type) (x : A) : A -> Prop := eq_refl : eq A x x. ]] Equality is a polymorphic relation on [A]. (The [Prop] sort (or kind) categorizes propositions, while the [Set] sort, equivalent to $\star$ in Haskell categorizes computational types.) Equality is _parameterized_ by a value [x] of type [A] and _indexed_ by another value of type [A]. Its single constructor states that equality is reflexive, so the only way to build an object of [eq x y] is if [x ~= y], that is if [x] is definitionaly equal to [y]. Now what is the elimination principle associated to this inductive family? It is the good old Leibniz substitution principle: [[ forall (A : Type) (x : A) (P : A -> Type), P x -> forall y : A, x = y -> P y ]] Provided a proof that [x = y], we can create on object of type [P y] from an existing object of type [P x]. This substitution principle is enough to show that equality is symmetric and transitive. For example we can use pattern-matching on equality proofs to show: *) Equations eqt {A} (x y z : A) (p : x = y) (q : y = z) : x = z := eqt x ?(x) ?(x) eq_refl eq_refl := eq_refl. (** Let us explain the meaning of the non-linear patterns here that we slipped through in the [equal] example. By pattern-matching on the equalities, we have unified [x], [y] and [z], hence we determined the _values_ of the patterns for the variables to be [x]. The [?(x)] notation is essentially denoting that the pattern is not a candidate for refinement, as it is determined by another pattern. This particular patterns are called "inaccessible". When they are variables the inaccessibility annotation is optional. *** Indexed datatypes Functions on [vector]s provide more stricking examples of this situation. The [vector] family is indexed by a natural number representing the size of the vector: [[ Inductive vector (A : Type) : nat -> Type := | Vnil : vector A O | Vcons : A -> forall n : nat, vector A n -> vector A (S n) ]] The empty vector [Vnil] has size [O] while the cons operation increments the size by one. Now let us define the usual map on vectors: *) Arguments Vector.nil {A}. Arguments Vector.cons {A} a {n} v : rename. Notation vector := Vector.t. Notation Vnil := Vector.nil. Notation Vcons := Vector.cons. Equations vmap {A B} (f : A -> B) {n} (v : vector A n) : vector B n := vmap f (n:=?(0)) Vnil := Vnil ; vmap f (Vcons a v) := Vcons (f a) (vmap f v). (** Here the value of the index representing the size of the vector is directly determined by the constructor, hence in the case tree we have no need to eliminate [n]. This means in particular that the function [vmap] does not do any computation with [n], and the argument could be eliminated in the extracted code. In other words, it provides only _logical_ information about the shape of [v] but no computational information. The [vmap] function works on every member of the [vector] family, but some functions may work only for some subfamilies, for example [vtail]: *) Equations vtail {A n} (v : vector A (S n)) : vector A n := vtail (Vcons a v') := v'. (** The type of [v] ensures that [vtail] can only be applied to non-empty vectors, moreover the patterns only need to consider constructors that can produce objects in the subfamily [vector A (S n)], excluding [Vnil]. The pattern-matching compiler uses unification with the theory of constructors to discover which cases need to be considered and which are impossible. In this case the failed unification of [0] and [S n] shows that the [Vnil] case is impossible. This powerful unification engine running under the hood permits to write concise code where all uninteresting cases are handled automatically. *) (** ** Derived notions, No-Confusion For this to work smoothlty, the package requires some derived definitions on each (indexed) family, which can be generated automatically using the generic [Derive] command. Here we ask to generate the signature, heterogeneous no-confusion and homogeneous no-confusion principles for vectors: *) Derive NoConfusion for nat. Derive Signature NoConfusion NoConfusionHom for vector. (** The precise specification of these derived definitions can be found in the manual section %(\S \ref{manual})%. Signature is used to "pack" a value in an inductive family with its index, e.g. the "total space" of every index and value of the family. This can be used to derive the heterogeneous no-confusion principle for the family, which allows to discriminate between objects in potentially different instances/fibers of the family, or deduce injectivity of each constructor. The [NoConfusionHom] variant derives the homogeneous no-confusion principle between two objects in the _same_ instance of the family, e.g. to simplify equations of the form [Vnil = Vnil :> vector A 0]. This last principle can only be defined when pattern-matching on the inductive family does not require the [K] axiom and will otherwise fail. ** Unification and indexed datatypes Back to our example, of course the equations and the induction principle are simplified in a similar way. If we encounter a call to [vtail] in a proof, we can use the following elimination principle to simplify both the call and the argument which will be automatically substituted by an object of the form [Vcons _ _ _]:[[ forall P : forall (A : Type) (n : nat), vector A (S n) -> vector A n -> Prop, (forall (A : Type) (n : nat) (a : A) (v : vector A n), P A n (Vcons a v) v) -> forall (A : Type) (n : nat) (v : vector A (S n)), P A n v (vtail v) ]] As a witness of the power of the unification, consider the following function which computes the diagonal of a square matrix of size [n * n]. *) Equations diag {A n} (v : vector (vector A n) n) : vector A n := diag (n:=O) Vnil := Vnil ; diag (n:=S _) (Vcons (Vcons a v) v') := Vcons a (diag (vmap vtail v')). (** Here in the second equation, we know that the elements of the vector are necessarily of size [S n] too, hence we can do a nested refinement on the first one to find the first element of the diagonal. *) (** ** Recursion Notice how in the [diag] example above we explicitely pattern-matched on the index [n], even though the [Vnil] and [Vcons] pattern matching would have been enough to determine these indices. This is because the following definition also fails: *) Fail Equations diag' {A n} (v : vector (vector A n) n) : vector A n := diag' Vnil := Vnil ; diag' (Vcons (Vcons a v) v') := Vcons a (diag' (vmap vtail v')). (** Indeed, Coq cannot guess the decreasing argument of this fixpoint using its limited syntactic guard criterion: [vmap vtail v'] cannot be seen to be a (large) subterm of [v'] using this criterion, even if it is clearly "smaller". In general, it can also be the case that the compilation algorithm introduces decorations to the proof term that prevent the syntactic guard check from seeing that the definition is structurally recursive. To aleviate this problem, [Equations] provides support for _well-founded_ recursive definitions which do not rely on syntactic checks. The simplest example of this is using the [lt] order on natural numbers to define a recursive definition of identity: *) Equations id (n : nat) : nat by wf n lt := id 0 := 0; id (S n') := S (id n'). (** Here [id] is defined by well-founded recursion on [lt] on the (only) argument [n] using the [by wf] annotation. At recursive calls of [id], obligations are generated to show that the arguments effectively decrease according to this relation. Here the proof that [n' < S n'] is discharged automatically. Wellfounded recursion on arbitrary dependent families is not as easy to use, as in general the relations on families are _heterogeneous_, as they must relate inhabitants of potentially different instances of the family. [Equations] provides a [Derive] command to generate the subterm relation on any such inductive family and derive the well-foundedness of its transitive closure. This provides course-of-values or so-called "mathematical" induction on these objects, reflecting the structural recursion criterion in the logic. *) Derive Subterm for vector. (** For vectors for example, the relation is defined as: [[ Inductive t_direct_subterm (A : Type) : forall n n0 : nat, vector A n -> vector A n0 -> Prop := t_direct_subterm_1_1 : forall (h : A) (n : nat) (H : vector A n), t_direct_subterm A n (S n) H (Vcons h H) ]] That is, there is only one recursive subterm, for the subvector in the [Vcons] constructor. We also get a proof of: *) Check well_founded_t_subterm : forall A, WellFounded (t_subterm A). (** The relation is actually called [t_subterm] as [vector] is just a notation for [Vector.t]. [t_subterm] itself is the transitive closure of the relation seen as an homogeneous one by packing the indices of the family with the object itself. Once this is derived, we can use it to define recursive definitions on vectors that the guard condition couldn't handle. The signature provides a [signature_pack] function to pack a vector with its index. The well-founded relation is defined on the packed vector type. *) Module UnzipVect. Context {A B : Type}. (** We can use the packed relation to do well-founded recursion on the vector. Note that we do a recursive call on a substerm of type [vector A n] which must be shown smaller than a [vector A (S n)]. They are actually compared at the packed type [{ n : nat & vector A n}]. The default obligation tactic defined in [Equations.Init] includes a proof-search for [subterm] proofs which can resolve the recursive call obligation automatically in this case. *) Equations unzip {n} (v : vector (A * B) n) : vector A n * vector B n by wf (signature_pack v) (@t_subterm (A * B)) := unzip Vnil := (Vnil, Vnil) ; unzip (Vector.cons (pair x y) v) with unzip v := { | pair xs ys := (Vector.cons x xs, Vector.cons y ys) }. End UnzipVect. (** For the diagonal, it is easier to give [n] as the decreasing argument of the function, even if the pattern-matching itself is on vectors: *) Equations diag' {A n} (v : vector (vector A n) n) : vector A n by wf n := diag' Vnil := Vnil ; diag' (Vcons (Vcons a v) v') := Vcons a (diag' (vmap vtail v')). (** One can check using [Extraction diag'] that the computational behavior of [diag'] is indeed not dependent on the index [n]. *) (** *** Pattern-matching and axiom K *) (** To use the K axiom or UIP with [Equations], one _must_ first set an option allowing its use during dependenet pattern-matching compilation. *) Module KAxiom. (** By default we disallow the user of UIP, but it can be set. *) Set Equations With UIP. Module WithAx. (** The user must declare this axiom itself, as an instance of the [UIP] class. *) Axiom uipa : forall A, UIP A. Local Existing Instance uipa. (** In this case the following definition uses the [UIP] axiom just declared. *) Equations K {A} (x : A) (P : x = x -> Type) (p : P eq_refl) (H : x = x) : P H := K x P p eq_refl := p. End WithAx. (** Note that the definition loses its computational content: it will get stuck on an axiom. We hence do not recommend its use. Equations allows however to use constructive proofs of UIP for types enjoying decidable equality. The following example relies on an instance of the [EqDec] typeclass for natural numbers, from which we can automatically derive a [UIP nat] instance. Note that the computational behavior of this definition on open terms is not to reduce to [p] but pattern-matches on the decidable equality proof. However the defining equation still holds as a _propositional_ equality, and the definition of K' is axiom-free. *) Equations K' (x : nat) (P : x = x -> Type) (p : P eq_refl) (H : x = x) : P H := K' x P p eq_refl := p. Print Assumptions K'. (* Closed under the global context *) End KAxiom. (** *** Options [Equations] supports the following attributes: - [universes(polymorphic | monomorphic)] for universe polymorphic or monomorphic definitions (also depending on the global `Universe Polymorphism` flag). - [tactic=tac] for setting the default tactic to try solve obligations/holes. By default this reuses the `Obligation Tactic` of Program. - [derive(eliminator=yes|no, equations=yes|no)] to control the derivation of the graph and elimination principle for the function, and the propositional equalities of the definition. Note that `eliminator=yes` forces `equations=yes`. *)Coq-Equations-1.3.1-8.20/doc/lambda.sty000066400000000000000000000016661463127417400173410ustar00rootroot00000000000000 % Pi-calculus \newcommand{\picalc}{$\pi$-calculus} \newqsymbol{`n}{\boldsymbol{\nu}} \renewcommand{\bar}[1]{{\overline #1}} % Telescopes \def\tele#1{\overrightarrow{#1}} % Pure type systems \newcommand{\PTSsorts}{\mathcal{S}} \newcommand{\PTSaxioms}{\mathcal{A}} \newcommand{\PTSrules}{\mathcal{R}} \newcommand{\hnf}[1]{#1^{\downarrow}} \newcommand{\nf}[1]{#1{\downarrow}} \newcommand{\SP}{\textsc{SP}} \newcommand{\eqbre}{`=_{\beta\pi\eta}} \newcommand{\eqbr}{`=_{\beta\pi}} \newcommand{\eqqbp}{`=^{?}_{\beta\pi}} \newcommand{\eqbp}{`=_{\beta\pi}} \newcommand{\noteqbp}{\not`=_{\beta\pi}} \newcommand{\redbr}{"->"_{\beta\pi}} \newcommand{\redbrt}{"->>"_{\beta\pi}} \newcommand{\redbp}{"->"_{\beta\pi}} \newcommand{\redbpt}{"->>"_{\beta\pi}} \newcommand{\eqbres}{`=_{\beta\pi\eta\SP{}}} \newcommand{\eqbrs}{`=_{\beta\pi\SP{}}} \newcommand{\eqbpers}{`=_{\beta\pi\eta\rho\sigma}} \newcommand{\freevars}[1]{\ensuremath\mathcal{F}\mathcal{V}(#1)} Coq-Equations-1.3.1-8.20/doc/manual.tex000066400000000000000000000523421463127417400173540ustar00rootroot00000000000000\section{The \kw{Equations} Vernacular} \label{manual} \def\cst#1{\coqdoccst{#1}} \subsection{Syntax of programs} \def\kw#1{\coqdockw{#1}} \def\vec#1{\overrightarrow{#1}} \def\vecplus#1{{\overrightarrow{#1}}^{+}} \def\textcoloneq{\texttt{:=}} \def\userref#1#2{\coqdockw{with}~#1~\textcoloneq~#2} \def\var#1{\coqdocvar{#1}} \def\figdefs{\begin{array}{llcl} \texttt{term}, \texttt{type} & t, ~τ & \Coloneqq & \coqdocvar{x} `| \lambda \coqdocvar{x} : \tau, t, R `| ∀ \coqdocvar{x} : \tau, \tau' `| \mathbf{\lambda}\texttt{\{}\,\vecplus{\vec{up} \coloneqq t}\texttt{\}} \cdots \\ \texttt{binding} & d & \Coloneqq & \texttt{(}\coqdocvar{x}~\texttt{:}~\tau\texttt{)} `| \texttt{(}\coqdocvar{x}~\textcoloneq~t~\texttt{:}~\tau\texttt{)} \\ \texttt{context} & Γ, Δ & \Coloneqq & \vec{d} \\ \texttt{programs} & progs & \Coloneqq & prog~\overrightarrow{mutual} \texttt{.} \\ \texttt{mutual programs} & mutual & \Coloneqq & \coqdockw{with}~p `| where \\ \texttt{where clause} & where & \Coloneqq & \coqdockw{where}~p `| \coqdockw{where}~not\\ \texttt{notation} & not & \Coloneqq & \texttt{''}string\texttt{''}~\textcoloneq~t~(\texttt{:}~scope)?\\ \texttt{program} & p, prog & \Coloneqq & \coqdoccst{f}(\texttt{@\{} univ\_decl \texttt{\}})?~Γ~\texttt{:}~τ~(\coqdockw{by}~\textit{annot})?~\textcoloneq~clauses \\ \texttt{annotation} & annot & \Coloneqq & \kw{struct}~\var{x}? `| \kw{wf}~t~R? \\ \texttt{user clauses} & clauses & \Coloneqq & \vecplus{cl} `| \texttt{\{}\,\vec{cl}\,\texttt{\}} \\ \texttt{user clause} & cl & \Coloneqq & \coqdoccst{f}~\vec{up}~n?~\texttt{;} `| \texttt{|} up~\vec{(\texttt{,} `| \texttt{|})~up}~n?~\texttt{;}? \\ \texttt{user pattern} & up & \Coloneqq & x `| \_ `| \coqdocconstr{C}~\vec{up} `| \texttt{?(}\,t\,\texttt{)} `| \texttt{!} \\ \texttt{user node} & n & \Coloneqq & \textcoloneq~t~\overrightarrow{where} `|\, \userref{t~\vec{, t}}{clauses} \end{array}} In the grammar, $\vec{t}$ denotes a possibly empty list of $t$, $\vecplus{t}$ a non-empty list. Concrete syntax is in \texttt{typewriter} font. \begin{figure}[h] \centering$\figdefs$ \caption{Definitions and user clauses} \label{fig:usergram} \end{figure} The syntax allows the definition of toplevel mutual (\kw{with}) and nested (\kw{where}) structurally recursive definitions. Notations can be used globally to attach a syntax to a recursive definition, or locally inside a user node. A single program is given as a tuple of a (globally fresh) identifier, an optional universe annotation, a signature and a list of user clauses (order matters), along with an optional recursion annotation (see next section). The signature is simply a list of bindings and a result type. The expected type of the function \cst{f} is then $∀~Γ, τ$. An empty set of clauses denotes that one of the variables has an empty type. Each user clause comprises a list of patterns that will match the bindings $Γ$ and an optional right hand side. Patterns can be named or anonymous variables, constructors applied to patterns, the inaccessible pattern \texttt{?(}t\texttt{)} (a.k.a. "dot" pattern in \Agda) or the empty pattern \texttt{!} indicating a variable has empty type (in this case only, the right hand side must be absent). Patterns are parsed using \Coq's regular term parser, so any term with implicit arguments and notations which desugars to this syntax is also allowed. A right hand side can either be a program node returning a term $t$ potentially relying on auxiliary definitions through local \kw{where} clauses, or a \kw{with} node. Local \kw{where} clauses can be used to define nested programs, as in \Haskell or \Agda, or local notations. They depend on the lexical scope of the enclosing program. As programs, they can be recursive definitions themselves and depend on previous \kw{where} clauses as well: they will be elaborated to dependent let bindings. The syntax permits the use of curly braces around a list of clauses to allow disambiguation of the scope of \kw{where} and \kw{with} clauses. The $\lambda\{$ syntax (using a unicode lambda attached to a curly brace) extends \Coq's term syntax with pattern-matching lambdas, which are elaborated to local \kw{where} clauses. A local \kw{with}~$t$ node essentialy desugars to a program node with a local \kw{where} clause taking all the enclosing context as arguments plus a new argument for the term $t$, and whose clauses are the clauses of the \kw{with}. The \kw{with} construct can be nested also by giving multiple terms, in which case the clauses should refine a new problem with as many new patterns. \subsection{Generated definitions} Upon the completion of an \Equations definition, a few supporting lemmas are generated. \subsubsection{Equations} Each compiled clause of the program or one of its subprograms defined implicitely by \kw{with} or explicitly by \kw{where} nodes gives rise to an equation. Note that the clauses correspond to the program's splitting tree, i.e. to the expansion of pattern-matchings, so a single source clause catching multiple cases can correspond to multiple equations. All of these equations are registered as hints in a rewrite hint database named $\cst{f}$, which can be used by the \coqdoctac{simp} or \coqdoctac{autorewrite} tactic afterwards. The $\coqdoctac{simp}~f$ tactic is just an alias to $\coqdoctac{autorewrite with}~f$. The equation lemmas are named after the position they appear in in the program, and are of the form $\cst{f}\_clause\_n\_equation\_k$. In case the program is well-founded, \Equations first generates an unfolded definition named \cst{f\_unfold} corresponding to the 1-unfolding of the recursive definition and shows that it is extensionally equal to \cst{f}. This unfolding equation is used to generate the equations associated to \cst{f}, which might also refer to the unfolded versions of subprograms. Well-founded recursive definitions can hence generate a set of equations that is not terminating as an unconditional rewrite system. \subsubsection{Elimination principle} \Equations also automatically generates a mutually-inductive relation corresponding to the graph of the programs, whose first inductive is named $\cst{f}\_ind$. It automatically shows that the functions respects their graphs (lemma $\cst{f}\_ind\_fun$) and derives from this proof an elimination principle named $\cst{f}\_elim$. This eliminator can be used directly using the \tac{apply} tactic to prove goals involving a call to the function(s). One has to provide predicates for each of the toplevel programs and the \kw{where} subprograms (\kw{with} subprograms's predicates follow from their enclosing programs). In case the program has a single predicate, one can use the $\tac{funelim}~call$ tactic to launch the elimination by specifying which call of the goal should be used as the elimination target. A most general predicate is inferred in this case. \subsection{Logic parameterization} \Equations comes with three possible instances of its library, one where equality is \Coq's standard equality \ind{eq} in \Prop and another where equality is proof-relevant and defined in \kw{Type}, finally a last instance is a variant of the second reusing the definitions of the \texttt{Coq-HoTT} library. The first can be used simply by requiring \cst{Equations.Prop.Equations} (or \texttt{From Equations Require Import Equations}), while the other two can be used by importing \cst{Equations.Type.All} or \cst{Equations.HoTT.All} instead. The libraries are qualified respectively by the \cst{Equations.Prop},\cst{Equations.Type} or \cst{Equations.HoTT} prefixes. When referring to classes in the following, one can find their definition in the respective prefix. In other words, \cst{Classes.EqDec} might refer to \cst{Equations.Prop.Classes.EqDec}, \coqdoccst{Equations.Type.Classes.EqDec} or \coqdoccst{Equations.HoTT.Classes.EqDec} depending on the logic used. \subsection{Local Options} The \kw{Equations} command takes a few options using the syntax \[\texttt{\#[}attributes\texttt{]}~\kw{Equations}\texttt{(}opts\texttt{)} ~progs~\ldots\] One can use the \kw{Equations?} syntax to use the interactive proof mode instead of obligations to resolve holes in the term or obligations coming from well-founded recursive definitions. In \Equations v1.3 and higher, the command supports the following attributes: - \texttt{universes(polymorphic | monomorphic)} for universe polymorphic or monomorphic definitions (also depending on the global \texttt{Universe Polymorphism} flag). - \texttt{tactic=tac} for setting the default tactic to try solve obligations/holes. If not set, the default tactic is the same as for \texttt{Program}, controlled by the \text{Obligation Tactic} command. The \texttt{tac} can be either an unquoted unqualified identifier or a string delimited by quotes to provide a qualified name, as in \[\texttt{\#[tactic="Program.Tactics.program\_simpl"] Equations foo ...}\] - Flags \texttt{derive(equations=yes|no, eliminator=yes|no)}, both enabled by default, to control the derivation of the propositional equalities of the definition and its graph and elimination principle, respectively. Note that \texttt{eliminator = yes} forces \texttt{equations = yes}. The \emph{deprecated} syntax $opts$ is a list of flags among: \begin{itemize} \item $\texttt{ind} `| \texttt{noind}$: Generate (default) or do not generate the inductive graph of the function and the derived eliminator. \item $\texttt{eqns} `| \texttt{noeqns}$: Generate (default) or do not generate the equations corresponding to the (expanded) clauses of the program. \texttt{noeqns} implies \texttt{noind}. \end{itemize} \subsection{Global Options} The \kw{Equations} command obeys a few global options: \begin{itemize} \item \texttt{Equations Transparent}: governs the opacity of definitions generated by \kw{Equations}. By default this is off and means that definitions are declared \emph{opaque} for reduction, avoiding spurious unfoldings of well-founded definitions when using the \texttt{simpl} tactic for example. The \texttt{simp} $\cst{c}$ tactic is favored in this case to do simplifications using the equations generated for $\cst{c}$. One can always use \kw{Transparent} after a definition to get its definitional behavior. \item \texttt{Equations With Funext} (since v1.2): governs the use of the functional extensionality axiom to prove the unfolding lemma of well-founded definitions, which can require extensionality of the functional. By default \emph{on}. When this flag is off, the unfolding lemmas of well-founded definitions might fail to be proven automatically and be left to the user as an obligation. To prove this obligation, the user is encouraged to use the \tac{Equations.Init.unfold\_recursor} tactic to help solve goals of the form \[\cst{FixWf}~x~f = \cst{f\_unfold}~x\]. \item \texttt{Equations With UIP} (since v1.2): governs the use of instances of \texttt{Classes.UIP} derived by the user, or automatically from instances of the decidable equality class \texttt{Classes.EqDec}. By default \emph{off}. When switched on, equations will look for an instance of $\ind{UIP}\~A$ when solving equalities of the form \[\forall (e : x = x :> A), P e\], i.e. to apply the deletion rule to such equations, or to unify indices of constructors for inductive families without a \ind{NoConfusionHom} instance. It will report an error if it cannot find any. Note that when this option is on, the computational behavior of \Equations definitions on open terms does not follow the clauses: it might block on the uip proof (for example if it is a decidable equality test). The rewriting equations and functional elimination principle can still be derived though and are the preferred way to reason on the definition. \item \texttt{Equations WithK} DEPRECATED. Use \texttt{With UIP} and declare your own version of the \cst{UIP} axiom as a typeclass instance. Governs the use of the \texttt{K} axiom. By default \emph{off}. The computational behavior of definitions using axioms changes entirely: their reduction will get stuck even on closed terms. It is advised to keep such definitions opaque and use the derived rewriting equations and functional elimination principle to reason on them. \item \texttt{Equations Derive Equations} (since v1.2) This sets the default for the generation of equations, governed by the local \texttt{derive(equations)} attribute. \item \texttt{Equations Derive Eliminator} (since v1.2) This sets the default for the generation of the graph and functional elimination principle associated to a definition, governed locally by the \texttt{derive(eliminator)} attribute. \end{itemize} \section{Derive} \Equations comes with a suite of deriving commands that take inductive families and generate definitions based on them. The common syntax for these is: \[(\mathtt{Equations})?~\mathtt{Derive}~\ind{C}_1 \ldots \ind{C}_n~\mathtt{for}~\ind{ind}_1 \ldots \ind{ind}_n.\] Which will try to generate an instance of type class \ind{C} on inductive type \ind{Ind}. \paragraph{Note} The \texttt{Equations} prefix can be used to disambiguate from other plugins defining \texttt{Derive} commands such as \texttt{QuickChick}. We assume $\ind{ind}_i : \Pi \Delta. s$. The derivations provided by \Equations are: \begin{itemize} \item \ind{DependentEliminationPackage}: generates the dependent elimination principle for the given inductive type, which can differ from the standard one generated by \Coq. It derives an instance of the class \texttt{DepElim.DependentEliminationPackage}. \item \ind{Signature}: generate the signature of the inductive, as a sigma type packing the indices $\Delta$ (again as a sigma type) and an object of the inductive type. This is used to produce homogeneous constructions on inductive families, by working on their packed version (total space in HoTT lingo). It derives an instances of the class \texttt{Equations.Signature.Signature}. \item \ind{NoConfusion}: generate the no-confusion principle for the given family, as an heterogeneous relation. It embodies the discrimination and injectivity principles for the total space of the given inductive family: i.e. $\Sigma \Delta, \ind{I}~\bar{\Gamma}~\Delta$ for a family $\ind{I} : \forall \Gamma, \Delta "->" \kw{Type}$ where $\Gamma$ are (uniform) parameters of the inductive and $\Delta$ its indices. It derives an instance of the class \texttt{Classew.NoConfusionPackage}. \item \ind{NoConfusionHom}: generate the \emph{homogeneous} no-confusion principle for the given family, which embodies the discrimination and injectivity principles for (non-propositional) inductive types. This principle can be derived if and only if the no-confusion property on the inductive family instance reduces to equality of the non-forced arguments of the constructors. In case of success it generates an instance of the class \texttt{Classes.NoConfusionPackage} for the type $\ind{I}~ \Delta~\Gamma$ applicable to equalities of two objects in the \emph{same} instance of the family $\ind{I}$. \item \ind{EqDec} This derives a decidable equality on $C$, assuming decidable equality instances for the parameters and supposing any primitive inductive type used in the definition also has decidable equality. If successful it generates an instance of the class (in \texttt{Classes.EqDec}): \begin{verbatim} Class EqDec (A : Type) := eq_dec : forall x y : A, { x = y } + { x <> y }. \end{verbatim} \item \ind{Subterm}: this generates the direct subterm relation for the inductive (asuming it is in \kw{Set} or \kw{Type}) as an inductive family. It then derives the well-foundedness of this relation and wraps it as an homogeneous relation on the signature of the datatype (in case it is indexed). These relations can be used with the \texttt{by wf} clause of equations. It derives an instance of the class \texttt{Classes.WellFounded}. \end{itemize} \section{\texttt{dependent elimination}} The \tac{dependent elimination} tactic can be used to do dependent pattern-matching during a proof, using the same engine as Equations. Its syntax is: \begin{figure}[h] \tac{dependent elimination} \textit{ident} \texttt{as} [ up | .. | up ]. \end{figure} It takes a list of patterns (see figure \ref{fig:usergram}) that should cover the type of \textit{ident} and generates the corresponding subgoals. \section{\texttt{simp}} The $\tac{simp}~\cst{f}_1 \ldots \cst{f}_n$ tactic is an alias to \[\begin{array}{l} \tac{autorewrite}~\tac{with}~\cst{f}_1 \ldots \cst{f}_n ; \\ \tac{try typeclasses eauto with subterm\_relation simp rec\_decision} \cst{f}_1 \ldots \cst{f}_n \end{array}\] It can be used to simplify goals involving equations definitions $\cst{f}_1 \ldots \cst{f}_n$, by rewriting with the equations declared for the constants in the associated rewrite hint database and trying to solve the goal using the hints declared in the associated ``auto'' hint database, both named $\cst{f}$. \section{Functional elimination} The $\tac{funelim}~t$ tactic can be used to launch a functional elimination proof on a call $t$ of an Equations-defined function $t = \cst{f}~args$ (the eliminator is named $\cst{f\_elim}$). By default, it will generalize the goal by an equality betwee \texttt{f args} and a fresh call to the function \texttt{f args'}, keeping information about initial arguments of the function before doing elimination. This ensures that subgoals do not become unprovable due to generalization. This might produce complex induction hypotheses that are guarded by dependent equalities between the initial arguments and the recursive call arguments. These can be simplified by instantiating the induction hypotheses sufficiently and providing reflexive equality proofs to instantiate the equalities. A variant $\tac{apply\_funelim}~t$ simply applies the eliminator without generalization, avoiding the generation of (dependent) equalities. Note that in this case (as when using \Coq's built-in \tac{induction} tactic) one may have to explicitly generalize the goal by equalities (e.g. using the \tac{remember} tactic) if the function call being eliminated is not made of distinct variables, otherwise it can produce unprovable subgoals. Finally, for mutual or nested programs, no automation is provided yet. The user has to invoke the functional elimination directly, e.g. using \[\tac{eapply}~(\cst{f\_elim}~P_1 \ldots P_n)\] providing predicates for each of the nested or mutual function definitions in \cst{f} (use \kw{About} \cst{f\_elim} to figure out the predicates to be provided). \newpage \section{Changes} \def\issue#1{\href{https://github.com/mattam82/Coq-Equations/issues/#1}{\texttt{\##1}}} \begin{itemize} \item Version 1.3: \begin{itemize} \item \textbf{Incompatible change} Support for the Coq-HoTT library, out of the box. This required a slight reorganization of the directories. To use this, simply install the \texttt{opam} \texttt{coq-hott} package before \texttt{coq-equations} and \[\texttt{From Equations.HoTT Require Import All}\] Now \[\texttt{Require Import Equations.Equations}\] no longer works, use \texttt{Require Import Equations.Prop.Equations} (absolute path, future-proof) or \texttt{From Equations Require Import Equations} (relative path) to use the (default) \texttt{Prop} instance. \item Complete support for building with \texttt{dune}. \item New attributes \texttt{tactic = tac} and \texttt{derive(eliminator, equations)}. \item Improved syntax for concise clauses: no ";" separator needed for clauses already separated by \texttt{|}. Useful in with clauses but also at the toplevel, e.g. the following definition is now accepted: \begin{verbatim} Equations foo (x : nat) : nat := | 0 => 1 | S n => S (foo n). \end{verbatim} \item Fixed issues: \begin{itemize} \item \issue{329}: improved strengthening avoiding to abstract over recursive definitions which would not pass the guardness checker. This can simplify the produced terms, avoiding unnecessary "commutative cuts". \item \issue{321}: warn rather than error when using \texttt{Equations?} and no subgoals are left. This will leave a proof state with no subgoals, that \emph{must} be closed using a \texttt{Qed} or \texttt{Defined} (similarly to \Coq's \texttt{\#[refine] Instance} command). \item \issue{372}, \issue{194}: funelim applied to a partial application failing \item \issue{354}: support for building values in \texttt{SProp} \item \issue{353}: name capture problem in presence of modules \item \issue{335}: provide an alias \texttt{Equations Derive} to not conflict with QuickChick's \texttt{Derive} \item \issue{325}: properly inline all Equations helper constants during Extraction \end{itemize} \end{itemize} \item Version 1.2.4: Fixed issues: \begin{itemize} \item \issue{338}: incompatibility with ssreflect's \texttt{rewrite -!} \item \issue{346}: Derive not working when the inductive type's sort is hidden under a constant. \item \issue{228}, \issue{306}, \issue{349}: functional elimination proofs were failing for no good reason. Improved automation when functional extensionality is disabled. \item \issue{328}: uncaught \texttt{Not\_found} exception raised while printing an error message. \end{itemize} \end{itemize} %%% Local Variables: %%% mode: latex %%% TeX-master: "equations" %%% TeX-PDF-mode: t %%% End: Coq-Equations-1.3.1-8.20/doc/ml.sty000066400000000000000000000016001463127417400165150ustar00rootroot00000000000000% ML syntax \newcommand{\mlkw}[1]{\kw{#1}\xspace} \newcommand{\letml}{\mlkw{let}} \newcommand{\andml}{\mlkw{and}} \newcommand{\letcci}{\mlkw{let}_{CCI}} \newcommand{\inml}{\mlkw{in}} \newcommand{\ifml}{\mlkw{if}} \newcommand{\thenml}{\mlkw{then}} \newcommand{\elseml}{\mlkw{else}} \newcommand{\funml}{\mlkw{fun}} \newcommand{\boolml}{\mlkw{bool}} \newcommand{\natml}{\mlkw{nat}} \newcommand{\listml}{\mlkw{list}} \newcommand{\typeml}{\mlkw{type}} \newcommand{\valml}{\mlkw{val}} \newcommand{\matchml}{\mlkw{match}} \newcommand{\withml}{\mlkw{with}} \newcommand{\moduleml}{\mlkw{module}} \newcommand{\sigml}{\mlkw{sig}} \newcommand{\structml}{\mlkw{struct}} \newcommand{\enml}{\mlkw{end}} \newcommand{\returnml}{\mlkw{return}} \newcommand{\asml}{\mlkw{as}} \newcommand{\sdef}[1]{\ensuremath{\mlkw{#1}}} \newcommand{\sref}[1]{\ensuremath{\mlkw{#1}}} % concatenation \def\app{\mathbin{+\!\!\!+}} Coq-Equations-1.3.1-8.20/doc/myplainnat.bst000066400000000000000000000643131463127417400202440ustar00rootroot00000000000000%% File: `plainnat.bst' %% A modification of `plain.bst' for use with natbib package %% %% Copyright 1993-1999 Patrick W Daly %% Max-Planck-Institut f\"ur Aeronomie %% Max-Planck-Str. 2 %% D-37191 Katlenburg-Lindau %% Germany %% E-mail: daly@linmpi.mpg.de %% %% This program can be redistributed and/or modified under the terms %% of the LaTeX Project Public License Distributed from CTAN %% archives in directory macros/latex/base/lppl.txt; either %% version 1 of the License, or any later version. %% % Version and source file information: % \ProvidesFile{natbst.mbs}[1999/05/11 1.6 (PWD)] % % BibTeX `plainnat' family % version 0.99b for BibTeX versions 0.99a or later, % for LaTeX versions 2.09 and 2e. % % For use with the `natbib.sty' package; emulates the corresponding % member of the `plain' family, but with author-year citations. % % With version 6.0 of `natbib.sty', it may also be used for numerical % citations, while retaining the commands \citeauthor, \citefullauthor, % and \citeyear to print the corresponding information. % % For version 7.0 of `natbib.sty', the KEY field replaces missing % authors/editors, and the date is left blank in \bibitem. % % Includes fields ISBN and ISSN. % % Includes field URL for Internet addresses (best used with % with the url.sty package of Donald Arseneau % ENTRY { address author booktitle chapter edition editor howpublished institution isbn issn journal key month note number organization pages publisher school series title type url web volume year pdf ee doi } {} { label extra.label sort.label short.list } INTEGERS { output.state before.all mid.sentence after.sentence after.block } FUNCTION {init.state.consts} { #0 'before.all := #1 'mid.sentence := #2 'after.sentence := #3 'after.block := } STRINGS { s t } FUNCTION {output.nonnull} { 's := output.state mid.sentence = { ", " * write$ } { output.state after.block = { add.period$ write$ newline$ "\newblock " write$ } { output.state before.all = 'write$ { add.period$ " " * write$ } if$ } if$ mid.sentence 'output.state := } if$ s } FUNCTION {output} { duplicate$ empty$ 'pop$ 'output.nonnull if$ } FUNCTION {output.check} { 't := duplicate$ empty$ { pop$ "empty " t * " in " * cite$ * warning$ } 'output.nonnull if$ } FUNCTION {fin.entry} { add.period$ write$ newline$ } FUNCTION {new.block} { output.state before.all = 'skip$ { after.block 'output.state := } if$ } FUNCTION {new.sentence} { output.state after.block = 'skip$ { output.state before.all = 'skip$ { after.sentence 'output.state := } if$ } if$ } FUNCTION {not} { { #0 } { #1 } if$ } FUNCTION {and} { 'skip$ { pop$ #0 } if$ } FUNCTION {or} { { pop$ #1 } 'skip$ if$ } FUNCTION {new.block.checka} { empty$ 'skip$ 'new.block if$ } FUNCTION {new.block.checkb} { empty$ swap$ empty$ and 'skip$ 'new.block if$ } FUNCTION {new.sentence.checka} { empty$ 'skip$ 'new.sentence if$ } FUNCTION {new.sentence.checkb} { empty$ swap$ empty$ and 'skip$ 'new.sentence if$ } FUNCTION {field.or.null} { duplicate$ empty$ { pop$ "" } 'skip$ if$ } FUNCTION {emphasize} { duplicate$ empty$ { pop$ "" } { "{\em " swap$ * "}" * } if$ } INTEGERS { nameptr namesleft numnames } FUNCTION {format.names} { 's := #1 'nameptr := s num.names$ 'numnames := numnames 'namesleft := { namesleft #0 > } { s nameptr "{ff~}{vv~}{ll}{, jj}" format.name$ 't := nameptr #1 > { namesleft #1 > { ", " * t * } { numnames #2 > { "," * } 'skip$ if$ t "others" = { " et~al." * } { " and " * t * } if$ } if$ } 't if$ nameptr #1 + 'nameptr := namesleft #1 - 'namesleft := } while$ } FUNCTION {format.key} { empty$ { key field.or.null } { "" } if$ } FUNCTION {format.authors} { author empty$ { "" } { author format.names } if$ } FUNCTION {format.editors} { editor empty$ { "" } { editor format.names editor num.names$ #1 > { ", editors" * } { ", editor" * } if$ } if$ } FUNCTION {format.isbn} { isbn empty$ { "" } { "" } %new.block "ISBN " isbn * } if$ } FUNCTION {format.issn} { issn empty$ { "" } { new.block "ISSN " issn * } if$ } FUNCTION {format.web} { web empty$ { "" } { new.block " \url{" web * "}" * } if$ } FUNCTION {format.url} { url empty$ { "" } { "" } if$ } FUNCTION {format.title} { title empty$ { "" } { url empty$ { pdf empty$ { doi empty$ { ee empty$ { title "t" pop$ } { "\href{" ee * "}" * "{" * title "t" pop$ * "}" *} if$ } { "\href{" doi * "}" * "{" * title "t" pop$ * "}" *} if$ } { "\href{" pdf * "}" * "{" * title "t" pop$ * "}" *} if$ } { "\href{" url * "}" * "{" * title "t" pop$ * "}" *} if$ } if$ } FUNCTION {format.full.names} {'s := #1 'nameptr := s num.names$ 'numnames := numnames 'namesleft := { namesleft #0 > } { s nameptr "{vv~}{ll}" format.name$ 't := nameptr #1 > { namesleft #1 > { ", " * t * } { numnames #2 > { "," * } 'skip$ if$ t "others" = { " et~al." * } { " and " * t * } if$ } if$ } 't if$ nameptr #1 + 'nameptr := namesleft #1 - 'namesleft := } while$ } FUNCTION {author.editor.full} { author empty$ { editor empty$ { "" } { editor format.full.names } if$ } { author format.full.names } if$ } FUNCTION {author.full} { author empty$ { "" } { author format.full.names } if$ } FUNCTION {editor.full} { editor empty$ { "" } { editor format.full.names } if$ } FUNCTION {make.full.names} { type$ "book" = type$ "inbook" = or 'author.editor.full { type$ "proceedings" = 'editor.full 'author.full if$ } if$ } FUNCTION {output.bibitem} { newline$ "\bibitem[" write$ label write$ ")" make.full.names duplicate$ short.list = { pop$ } { * } if$ "]{" * write$ cite$ write$ "}" write$ newline$ "" before.all 'output.state := } FUNCTION {n.dashify} { 't := "" { t empty$ not } { t #1 #1 substring$ "-" = { t #1 #2 substring$ "--" = not { "--" * t #2 global.max$ substring$ 't := } { { t #1 #1 substring$ "-" = } { "-" * t #2 global.max$ substring$ 't := } while$ } if$ } { t #1 #1 substring$ * t #2 global.max$ substring$ 't := } if$ } while$ } FUNCTION {format.date} { year duplicate$ empty$ { "empty year in " cite$ * warning$ pop$ "" } 'skip$ if$ month empty$ 'skip$ { month " " * swap$ * } if$ extra.label * } FUNCTION {format.btitle} { title empty$ { emphasize } { url empty$ { pdf empty$ { doi empty$ { ee empty$ { title "t" change.case$ } { "\href{" ee * "}" * "{" * title emphasize "t" change.case$ * "}" *} if$ } { "\href{" doi * "}" * "{" * title emphasize "t" change.case$ * "}" *} if$ } { "\href{" pdf * "}" * "{" * title emphasize "t" change.case$ * "}" *} if$ } { "\href{" url * "}" * "{" * title emphasize "t" change.case$ * "}" *} if$ } if$ } FUNCTION {tie.or.space.connect} { duplicate$ text.length$ #3 < { "~" } { " " } if$ swap$ * * } FUNCTION {either.or.check} { empty$ 'pop$ { "can't use both " swap$ * " fields in " * cite$ * warning$ } if$ } FUNCTION {format.bvolume} { volume empty$ { "" } { "volume" volume tie.or.space.connect series empty$ 'skip$ { " of " * series emphasize * } if$ "volume and number" number either.or.check } if$ } FUNCTION {format.number.series} { volume empty$ { number empty$ { series field.or.null } { output.state mid.sentence = { "number" } { "Number" } if$ number tie.or.space.connect series empty$ { "there's a number but no series in " cite$ * warning$ } { " in " * series * } if$ } if$ } { "" } if$ } FUNCTION {format.edition} { edition empty$ { "" } { output.state mid.sentence = { edition "l" change.case$ " edition" * } { edition "t" change.case$ " edition" * } if$ } if$ } INTEGERS { multiresult } FUNCTION {multi.page.check} { 't := #0 'multiresult := { multiresult not t empty$ not and } { t #1 #1 substring$ duplicate$ "-" = swap$ duplicate$ "," = swap$ "+" = or or { #1 'multiresult := } { t #2 global.max$ substring$ 't := } if$ } while$ multiresult } FUNCTION {format.pages} { pages empty$ { "" } { pages multi.page.check { "pages" pages n.dashify tie.or.space.connect } { "page" pages tie.or.space.connect } if$ } if$ } FUNCTION {format.vol.num.pages} { volume field.or.null number empty$ 'skip$ { "\penalty0 (" number * ")" * * volume empty$ { "there's a number but no volume in " cite$ * warning$ } 'skip$ if$ } if$ pages empty$ 'skip$ { duplicate$ empty$ { pop$ format.pages } { ":\penalty0 " * pages n.dashify * } if$ } if$ } FUNCTION {format.chapter.pages} { chapter empty$ 'format.pages { type empty$ { "chapter" } { type "l" change.case$ } if$ chapter tie.or.space.connect pages empty$ 'skip$ { ", " * format.pages * } if$ } if$ } FUNCTION {format.in.ed.booktitle} { booktitle empty$ { "" } { editor empty$ { "In " booktitle emphasize * } { "In " format.editors * ", " * booktitle emphasize * } if$ } if$ } FUNCTION {empty.misc.check} { author empty$ title empty$ howpublished empty$ month empty$ year empty$ note empty$ and and and and and key empty$ not and { "all relevant fields are empty in " cite$ * warning$ } 'skip$ if$ } FUNCTION {format.thesis.type} { type empty$ 'skip$ { pop$ type "t" change.case$ } if$ } FUNCTION {format.tr.number} { type empty$ { "Technical Report" } 'type if$ number empty$ { "t" change.case$ } { number tie.or.space.connect } if$ } FUNCTION {format.article.crossref} { key empty$ { journal empty$ { "need key or journal for " cite$ * " to crossref " * crossref * warning$ "" } { "In {\em " journal * "\/}" * } if$ } { "In " key * } if$ " \citep{" * crossref * "}" * } FUNCTION {format.book.crossref} { volume empty$ { "empty volume in " cite$ * "'s crossref of " * crossref * warning$ "In " } { "Volume" volume tie.or.space.connect " of " * } if$ editor empty$ editor field.or.null author field.or.null = or { key empty$ { series empty$ { "need editor, key, or series for " cite$ * " to crossref " * crossref * warning$ "" * } { "{\em " * series * "\/}" * } if$ } { key * } if$ } 'skip$ if$ ", \citet{" * crossref * "}" * } FUNCTION {format.incoll.inproc.crossref} { editor empty$ editor field.or.null author field.or.null = or { key empty$ { booktitle empty$ { "need editor, key, or booktitle for " cite$ * " to crossref " * crossref * warning$ "" } { "In {\em " booktitle * "\/}" * } if$ } { "In " key * } if$ } { "In " } if$ " \citet{" * crossref * "}" * } FUNCTION {article} { output.bibitem format.authors "author" output.check author format.key output new.block format.btitle "title" output.check new.block crossref missing$ { journal emphasize "journal" output.check format.vol.num.pages output format.date "year" output.check } { format.article.crossref output.nonnull format.pages output } if$ format.issn output format.url output new.block note output fin.entry } FUNCTION {book} { output.bibitem author empty$ { format.editors "author and editor" output.check editor format.key output } { format.authors output.nonnull crossref missing$ { "author and editor" editor either.or.check } 'skip$ if$ } if$ new.block format.btitle "title" output.check crossref missing$ { format.bvolume output new.block format.number.series output new.sentence publisher "publisher" output.check address output } { new.block format.book.crossref output.nonnull } if$ format.edition output format.date "year" output.check format.isbn output format.url output new.block note output fin.entry } FUNCTION {booklet} { output.bibitem format.authors output author format.key output new.block format.title "title" output.check howpublished address new.block.checkb howpublished output address output format.date output format.isbn output format.url output format.web output new.block note output fin.entry } FUNCTION {inbook} { output.bibitem author empty$ { format.editors "author and editor" output.check editor format.key output } { format.authors output.nonnull crossref missing$ { "author and editor" editor either.or.check } 'skip$ if$ } if$ new.block format.btitle "title" output.check crossref missing$ { format.bvolume output format.chapter.pages "chapter and pages" output.check new.block format.number.series output new.sentence publisher "publisher" output.check address output } { format.chapter.pages "chapter and pages" output.check new.block format.book.crossref output.nonnull } if$ format.edition output format.date "year" output.check format.isbn output format.url output format.web output new.block note output fin.entry } FUNCTION {incollection} { output.bibitem format.authors "author" output.check author format.key output new.block format.title "title" output.check new.block crossref missing$ { format.in.ed.booktitle "booktitle" output.check format.bvolume output format.number.series output format.chapter.pages output new.sentence publisher "publisher" output.check address output format.edition output format.date "year" output.check } { format.incoll.inproc.crossref output.nonnull format.chapter.pages output } if$ format.isbn output format.url output format.web output new.block note output fin.entry } FUNCTION {inproceedings} { output.bibitem format.authors "author" output.check author format.key output new.block format.title "title" output.check new.block crossref missing$ { format.in.ed.booktitle "booktitle" output.check format.bvolume output format.number.series output format.pages output address empty$ { organization publisher new.sentence.checkb organization output publisher output format.date "year" output.check } { address output.nonnull format.date "year" output.check new.sentence organization output publisher output } if$ } { format.incoll.inproc.crossref output.nonnull format.pages output } if$ format.isbn output format.url output format.web output new.block note output fin.entry } FUNCTION {conference} { inproceedings } FUNCTION {manual} { output.bibitem format.authors output author format.key output new.block format.btitle "title" output.check organization address new.block.checkb organization output address output format.edition output format.date output format.url output format.web output new.block note output fin.entry } FUNCTION {mastersthesis} { output.bibitem format.authors "author" output.check author format.key output new.block format.title "title" output.check new.block "Master's thesis" format.thesis.type output.nonnull school "school" output.check address output format.date "year" output.check format.url output format.web output new.block note output fin.entry } FUNCTION {misc} { output.bibitem format.authors output author format.key output title howpublished new.block.checkb format.title output howpublished new.block.checka howpublished output format.date output format.issn output format.url output format.web output new.block note output fin.entry empty.misc.check } FUNCTION {phdthesis} { output.bibitem format.authors "author" output.check author format.key output new.block format.btitle "title" output.check new.block "PhD thesis" format.thesis.type output.nonnull school "school" output.check address output format.date "year" output.check format.url output format.web output new.block note output fin.entry } FUNCTION {proceedings} { output.bibitem format.editors output editor format.key output new.block format.btitle "title" output.check format.bvolume output format.number.series output address output format.date "year" output.check new.sentence organization output publisher output format.isbn output format.url output format.web output new.block note output fin.entry } FUNCTION {techreport} { output.bibitem format.authors "author" output.check author format.key output new.block format.title "title" output.check new.block format.tr.number output.nonnull institution "institution" output.check address output format.date "year" output.check format.url output format.web output new.block note output fin.entry } FUNCTION {unpublished} { output.bibitem format.authors "author" output.check author format.key output new.block format.title "title" output.check format.url output format.web output new.block note "note" output.check format.date output fin.entry } FUNCTION {webpage} { output.bibitem format.authors output author format.key output title howpublished new.block.checkb format.title output howpublished new.block.checka howpublished output format.url output format.web output new.block note output fin.entry empty.misc.check } FUNCTION {default.type} { misc } MACRO {jan} {"January"} MACRO {feb} {"February"} MACRO {mar} {"March"} MACRO {apr} {"April"} MACRO {may} {"May"} MACRO {jun} {"June"} MACRO {jul} {"July"} MACRO {aug} {"August"} MACRO {sep} {"September"} MACRO {oct} {"October"} MACRO {nov} {"November"} MACRO {dec} {"December"} MACRO {acmcs} {"ACM Computing Surveys"} MACRO {acta} {"Acta Informatica"} MACRO {cacm} {"Communications of the ACM"} MACRO {ibmjrd} {"IBM Journal of Research and Development"} MACRO {ibmsj} {"IBM Systems Journal"} MACRO {ieeese} {"IEEE Transactions on Software Engineering"} MACRO {ieeetc} {"IEEE Transactions on Computers"} MACRO {ieeetcad} {"IEEE Transactions on Computer-Aided Design of Integrated Circuits"} MACRO {ipl} {"Information Processing Letters"} MACRO {jacm} {"Journal of the ACM"} MACRO {jcss} {"Journal of Computer and System Sciences"} MACRO {scp} {"Science of Computer Programming"} MACRO {sicomp} {"SIAM Journal on Computing"} MACRO {tocs} {"ACM Transactions on Computer Systems"} MACRO {tods} {"ACM Transactions on Database Systems"} MACRO {tog} {"ACM Transactions on Graphics"} MACRO {toms} {"ACM Transactions on Mathematical Software"} MACRO {toois} {"ACM Transactions on Office Information Systems"} MACRO {toplas} {"ACM Transactions on Programming Languages and Systems"} MACRO {tcs} {"Theoretical Computer Science"} READ FUNCTION {sortify} { purify$ "l" change.case$ } INTEGERS { len } FUNCTION {chop.word} { 's := 'len := s #1 len substring$ = { s len #1 + global.max$ substring$ } 's if$ } FUNCTION {format.lab.names} { 's := s #1 "{vv~}{ll}" format.name$ s num.names$ duplicate$ #2 > { pop$ " et~al." * } { #2 < 'skip$ { s #2 "{ff }{vv }{ll}{ jj}" format.name$ "others" = { " et~al." * } { " and " * s #2 "{vv~}{ll}" format.name$ * } if$ } if$ } if$ } FUNCTION {author.key.label} { author empty$ { key empty$ { cite$ #1 #3 substring$ } 'key if$ } { author format.lab.names } if$ } FUNCTION {author.editor.key.label} { author empty$ { editor empty$ { key empty$ { cite$ #1 #3 substring$ } 'key if$ } { editor format.lab.names } if$ } { author format.lab.names } if$ } FUNCTION {author.key.organization.label} { author empty$ { key empty$ { organization empty$ { cite$ #1 #3 substring$ } { "The " #4 organization chop.word #3 text.prefix$ } if$ } 'key if$ } { author format.lab.names } if$ } FUNCTION {editor.key.organization.label} { editor empty$ { key empty$ { organization empty$ { cite$ #1 #3 substring$ } { "The " #4 organization chop.word #3 text.prefix$ } if$ } 'key if$ } { editor format.lab.names } if$ } FUNCTION {calc.short.authors} { type$ "book" = type$ "inbook" = or 'author.editor.key.label { type$ "proceedings" = 'editor.key.organization.label { type$ "manual" = 'author.key.organization.label 'author.key.label if$ } if$ } if$ 'short.list := } FUNCTION {calc.label} { calc.short.authors short.list "(" * year duplicate$ empty$ short.list key field.or.null = or { pop$ "" } 'skip$ if$ * 'label := } FUNCTION {sort.format.names} { 's := #1 'nameptr := "" s num.names$ 'numnames := numnames 'namesleft := { namesleft #0 > } { nameptr #1 > { " " * } 'skip$ if$ s nameptr "{vv{ } }{ll{ }}{ ff{ }}{ jj{ }}" format.name$ 't := nameptr numnames = t "others" = and { "et al" * } { t sortify * } if$ nameptr #1 + 'nameptr := namesleft #1 - 'namesleft := } while$ } FUNCTION {sort.format.title} { 't := "A " #2 "An " #3 "The " #4 t chop.word chop.word chop.word sortify #1 global.max$ substring$ } FUNCTION {author.sort} { author empty$ { key empty$ { "to sort, need author or key in " cite$ * warning$ "" } { key sortify } if$ } { author sort.format.names } if$ } FUNCTION {author.editor.sort} { author empty$ { editor empty$ { key empty$ { "to sort, need author, editor, or key in " cite$ * warning$ "" } { key sortify } if$ } { editor sort.format.names } if$ } { author sort.format.names } if$ } FUNCTION {author.organization.sort} { author empty$ { organization empty$ { key empty$ { "to sort, need author, organization, or key in " cite$ * warning$ "" } { key sortify } if$ } { "The " #4 organization chop.word sortify } if$ } { author sort.format.names } if$ } FUNCTION {editor.organization.sort} { editor empty$ { organization empty$ { key empty$ { "to sort, need editor, organization, or key in " cite$ * warning$ "" } { key sortify } if$ } { "The " #4 organization chop.word sortify } if$ } { editor sort.format.names } if$ } FUNCTION {presort} { calc.label label sortify " " * type$ "book" = type$ "inbook" = or 'author.editor.sort { type$ "proceedings" = 'editor.organization.sort { type$ "manual" = 'author.organization.sort 'author.sort if$ } if$ } if$ " " * year field.or.null sortify * " " * title field.or.null sort.format.title * #1 entry.max$ substring$ 'sort.label := sort.label * #1 entry.max$ substring$ 'sort.key$ := } ITERATE {presort} SORT STRINGS { longest.label last.label next.extra } INTEGERS { longest.label.width last.extra.num number.label } FUNCTION {initialize.longest.label} { "" 'longest.label := #0 int.to.chr$ 'last.label := "" 'next.extra := #0 'longest.label.width := #0 'last.extra.num := #0 'number.label := } FUNCTION {forward.pass} { last.label label = { last.extra.num #1 + 'last.extra.num := last.extra.num int.to.chr$ 'extra.label := } { "a" chr.to.int$ 'last.extra.num := "" 'extra.label := label 'last.label := } if$ number.label #1 + 'number.label := } FUNCTION {reverse.pass} { next.extra "b" = { "a" 'extra.label := } 'skip$ if$ extra.label 'next.extra := extra.label duplicate$ empty$ 'skip$ { "{\natexlab{" swap$ * "}}" * } if$ 'extra.label := label extra.label * 'label := } EXECUTE {initialize.longest.label} ITERATE {forward.pass} REVERSE {reverse.pass} FUNCTION {bib.sort.order} { sort.label 'sort.key$ := } ITERATE {bib.sort.order} SORT FUNCTION {begin.bib} { preamble$ empty$ 'skip$ { preamble$ write$ newline$ } if$ "\begin{thebibliography}{" number.label int.to.str$ * "}" * write$ newline$ "\expandafter\ifx\csname natexlab\endcsname\relax\def\natexlab#1{#1}\fi" write$ newline$ "\expandafter\ifx\csname url\endcsname\relax" write$ newline$ " \def\url#1{{\tt #1}}\fi" write$ newline$ } EXECUTE {begin.bib} EXECUTE {init.state.consts} ITERATE {call.type$} FUNCTION {end.bib} { newline$ "\end{thebibliography}" write$ newline$ } EXECUTE {end.bib} Coq-Equations-1.3.1-8.20/doc/qsymbols.sty000066400000000000000000000646651463127417400200020ustar00rootroot00000000000000% $Id: qsymbols.sty,v 1.12 1997/01/07 23:56:44 krisrose Exp $ % % `Quoted math symbol abbreviations' package for LaTeX2e. % Copyright 1994 Kristoffer H. Rose % % This package is free software; you can redistribute it and/or modify % it under the terms of the GNU General Public License as published by the % Free Software Foundation; either version 2 of the License, or (at your % option) any later version. % % This package is distributed in the hope that it will be useful, but % WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY % or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License % for more details. % % You should have received a copy of the GNU General Public License along % with this package; if not, write to the Free Software Foundation, Inc., % 675 Mass Ave, Cambridge, MA 02139, USA. % \NeedsTeXFormat{LaTeX2e} \def\tmp#1Revision: #2 $,#3Date: #4 #5 ${% \ProvidesPackage{qsymbols}[#4 Quoted math symbol abbreviations v.#2] \def\qsymbolsversion{#2}\def\qsymbolsdate{#4}} \tmp$Revision: 1.12 $,$Date: 1997/01/07 23:56:44 $ % Process Options such that \xyloaded is indicator... \DeclareOption{noams}{\let\qsym@noams=\relax} \DeclareOption{nostmary}{\let\qsym@nostmary=\relax} \DeclareOption{dvips}{\PassOptionsToPackage{dvips}{xy}} \newif\ifpurexy@ \DeclareOption{xy}{\let\qsym@xy=\relax \purexy@false} \DeclareOption{purexy}{\let\qsym@xy=\relax \purexy@true} \newif\ifoldcm@ \DeclareOption{oldcm}{\oldcm@true} \def\qsymsmash@@{} \DeclareOption{smash}{\gdef\qsymsmash@@{\ht\z@=\z@ \dp\z@=\z@}} \def\qW@@#1#2{} \def\qW@debug#1#2{\begingroup#1\immediate\write16{#2}\endgroup} \DeclareOption{debug}{\let\qW@@=\qW@debug} \DeclareOption*{% \expandafter\PassOptionsToPackage\expandafter{\CurrentOption}{xy}} \ProcessOptions\relax % Import of symbols. \ifnum\the\catcode`\"=\active \def\restoredbl@{\catcode`\"=\active}% \else \let\restoredbl@=\relax \fi \catcode`\"=12 \ifx\qsym@noams\undefined \let\blacktriangle=\undefined % Sic. \RequirePackage{amssymb,amsbsy} \fi \ifx\qsym@nostmary\undefined \RequirePackage{stmaryrd} \fi \ifx\qsym@xy\relax \ifx\xyloaded\undefined \RequirePackage[arrow,cmtip]{xy}% \else \xyoption{arrow}\xyoption{cmtip}\makeatletter \fi \fi \ifx\UseTips\undefined \let\qTips=\UseComputerModernTips \let\qTips=\NoComputerModernTips \else \let\qTips=\UseTips \let\qNoTips=\NoTips \fi %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % SETUP. % Activate ` in math mode to look up qsymbol. \def\active@q{\futurelet\tmp\qsym@} {\catcode`\`=\active \global\let`=\active@q} \mathcode`\`="8000 % Definition commands. \def\qsym@#1{\ifx\bgroup\tmp \def\tmp{`{#1}}\else\def\tmp{`#1}\fi \expandafter\let\expandafter\@tmp\csname\space\codeof\tmp\endcsname \avoidrelax@\@tmp{\@warning{Unknown symbol \codeof\tmp}}} \def\avoidrelax@#1#2{\ifx#1\relax\def\av@id{#2}\else\let\av@id=#1\fi \av@id} \def\newqsymbol#1{\def\tmp{#1}\edef\tmp{\codeof\tmp}% \expandafter\let\expandafter\@tmp\csname\space\tmp\endcsname \ifx\@tmp\relax\else \@warning{Redefining qsymbol \tmp}\fi \expandafter\def\csname\space\tmp\endcsname} \def\newqsymbol@#1#2{\def\tmp{#1}% \expandafter\def\csname\space\codeof\tmp\endcsname{#2}} \def\newqsymbol@@#1#2{\ifx#2\undefined \else \newqsymbol@{#1}{#2}\fi} % Boxification...not really an optimisation anymore. \newcommand{\boxifyqsymbol}[2][]{\def\tmp{#2}\edef\tmp{\codeof\tmp}% \edef\tmp{{\expandafter\noexpand\csname\space\tmp @box\endcsname}{\tmp}}% \DN@##1##2{\AtBeginDocument{% \newsavebox{##1}\sbox{##1}{$#2$}% \expandafter\def\csname\space##2\endcsname{#1{\usebox{##1}}}}}% \expandafter\next@\tmp} % Auxiliaries (from Xy-pic)... %\ifx\xyloaded\undefined {\catcode`\:=12 \catcode`\>=12 % to ensure that all of :->< are other... \gdef\codeof#1{\expandafter\codeof@\meaning#1<-:} \gdef\codeof@#1:->#2<-:{#2}} \def\space@.{\futurelet\space@\relax}\space@. % \def\DN@{\def\next@} \def\DNii@{\def\nextii@} %\fi %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % SINGLE SYMBOLS. % Letters: \newqsymbol@{`a}\alpha \newqsymbol@{`b}\beta \newqsymbol@{`c}\chi \newqsymbol@{`d}\delta \newqsymbol@{`D}\Delta \newqsymbol@{`e}\epsilon \newqsymbol@{`f}\phi \newqsymbol@{`F}\Phi \newqsymbol@{`g}\gamma \newqsymbol@{`G}\Gamma \newqsymbol@{`h}\eta \newqsymbol@{`i}\iota \newqsymbol@{`j}\psi \newqsymbol@{`J}\Psi \newqsymbol@{`k}\kappa \newqsymbol@{`l}\lambda \newqsymbol@{`L}\Lambda \newqsymbol@{`m}\mu \newqsymbol@{`n}\nu \newqsymbol@{`p}\pi \newqsymbol@{`P}\Pi \newqsymbol@{`q}\theta \newqsymbol@{`Q}\Theta \newqsymbol@{`r}\rho \newqsymbol@{`s}\sigma \newqsymbol@{`S}\Sigma \newqsymbol@{`t}\tau \newqsymbol@{`u}\varrho \newqsymbol@{`v}\varphi \newqsymbol@{`w}\omega \newqsymbol@{`W}\Omega \newqsymbol@{`x}\xi \newqsymbol@{`X}\Xi \newqsymbol@{`y}\upsilon \newqsymbol@{`Y}\Upsilon \newqsymbol@{`z}\zeta % Symbols: \newqsymbol@{`+}\pm \newqsymbol@{`*}\times \newqsymbol@{`:}\in \newqsymbol@{`;}\notin \newqsymbol@{`/}\setminus \newqsymbol@{`U}\cup \newqsymbol@@{`C}\complement \newqsymbol@{`_}\bot \newqsymbol@{`T}\top \newqsymbol@{`o}\circ \newqsymbol@{`.}\cdot \newqsymbol@{`=}\equiv \newqsymbol@{`~}\simeq \newqsymbol@{`E}\exists \newqsymbol@{`A}\forall \newqsymbol@{`!}\lnot \newqsymbol@{`^}\land \newqsymbol@{`V}\lor %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % CIRCLED & BOXED OPERATIONS. % Circled: \newqsymbol@{`(}{\oqsym@} \def\oqsym@#1){\def\tmp{`(#1)}% \expandafter\let\expandafter\@tmp\csname\space\codeof\tmp\endcsname\relax \avoidrelax@\@tmp{\encircle{#1}}} \ifx\varbigcirc\undefined \let\varbigcirc=\bigcirc \fi \def\encircle#1{\ensuremath{% \def\nextii@{\kern-.1em % \ooalign{\hfil\hbox{$\mathstrut\scriptstyle#1$}\hfil\crcr $\varbigcirc$}\kern-.1em}% \mathchoice{\hbox{\nextii@}}{\hbox{\nextii@}}% {\hbox{\scriptsize\nextii@}}{\hbox{\tiny\nextii@}}}} \newqsymbol@{`()}\bigcirc \newqsymbol@{`(?)}\circ \newqsymbol@{`(!)}\bullet \newqsymbol@@{`(R)}\circledR \newqsymbol@{`(C)}\copyright % Circled symbols overridden by stmaryrd (bold) variants if available. \newqsymbol@{`(+)}\oplus \newqsymbol@@{`(+)}\varoplus \newqsymbol@{`(-)}\ominus \newqsymbol@@{`(-)}\varominus \newqsymbol@{`(`*)}\otimes \newqsymbol@@{`(`*)}\varotimes \newqsymbol@@{`(/)}\oslash \newqsymbol@@{`(/)}\varoslash \newqsymbol@@{`(|)}\obar \newqsymbol@@{`(|)}\varobar \newqsymbol@@{`(`/)}\obslash \newqsymbol@@{`(`/)}\varobslash \newqsymbol@{`(`.)}\odot \newqsymbol@@{`(`.)}\varodot \newqsymbol@@{`(*)}\oast \newqsymbol@@{`(*)}\varoast \newqsymbol@@{`(`o)}\ocircle \newqsymbol@@{`(`o)}\varocircle \newqsymbol@@{`(`^)}\owedge \newqsymbol@@{`(`^)}\varowedge \newqsymbol@@{`(`V)}\ovee \newqsymbol@@{`(`V)}\varovee \newqsymbol@@{`(<)}\olessthan \newqsymbol@@{`(<)}\varolessthan \newqsymbol@@{`(>)}\ogreaterthan \newqsymbol@@{`(>)}\varogreaterthan % Boxed: \newqsymbol@{`[}{\boxedqsym@} \def\boxedqsym@#1]{\def\tmp{`[#1]}% \expandafter\let\expandafter\@tmp\csname\space\codeof\tmp\endcsname\relax \avoidrelax@\@tmp{\enbox{#1}}} \def\enbox#1{\ensuremath{% \def\nextii@{\kern-.1em \hbox{% \ooalign{\hfil\raise.1em\hbox{$\scriptstyle#1$}\hfil\crcr $\square$}\kern-.1em}}% \mathchoice{\hbox{\nextii@}}{\hbox{\nextii@}}% {\hbox{\scriptsize\nextii@}}{\hbox{\tiny\nextii@}}}} \newqsymbol@@{`[+]}\boxplus \newqsymbol@@{`[-]}\boxminus \newqsymbol@@{`[`*]}\boxtimes \newqsymbol@@{`[/]}\boxslash \newqsymbol@@{`[|]}\boxbar \newqsymbol@@{`[`/]}\boxbslash \newqsymbol@@{`[`.]}\boxdot \newqsymbol@@{`[*]}\boxast \newqsymbol@@{`[`o]}\boxcircle \def\bull{% \mathbin{\vcenter{% \setbox0=\hbox{\kern1pt \vrule height.4ex depth.4ex width.8ex }% \ht0=.5ex \dp0=.5ex \wd0=1ex \box0}}} \def\whitebull{% \mathbin{\vcenter{% \setbox0=\hbox{\kern1pt % \vrule height.4ex depth.4ex width.1ex % \rlap{\vrule height.4ex depth -.3ex width.6ex }% \vrule height-.3ex depth.4ex width.6ex % \vrule height .4ex depth.4ex width .1ex }% \ht0=.5ex \dp0=.5ex \wd0=1ex \box0}}} \newqsymbol@{`[]}\square \newqsymbol@{`[?]}\whitebull \newqsymbol@{`[!]}\bull % Angled: \newqsymbol@{`<}\angledqsym@ \def\angledqsym@#1>{\def\tmp{`<#1>}% \csname\space\codeof\tmp\endcsname\relax} \newqsymbol@{`<>}\lozenge \newqsymbol@{`}\diamond % Braced: \newqsymbol@{`{}}\emptyset \newqsymbol@@{`{}}\varnothing % Faces: \def\smiley{{\hbox{\rlap{$\varbigcirc$}\kern.13em$\scriptstyle\ddot\smile$}}} \def\frowny{{\hbox{\rlap{$\varbigcirc$}\kern.13em$\scriptstyle\ddot\frown$}}} \def\weeny{{\hbox{\rlap{$\varbigcirc$}\kern.20em$% \scriptstyle\ddot{\mkern.5mu\smash{-}\mkern-.5mu}$}}} \def\oopsy{{\hbox{\rlap{$\bigcirc$}\kern.5ex$\scriptstyle\ddot\sim$}}} \newqsymbol@{`(:-)}\smiley \newqsymbol@{`[:-]}\frowny \newqsymbol@{`<:->}\weeny \newqsymbol@{`{:-}}\oopsy %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % BOLD SYMBOLS \newqsymbol@{`@}{\futurelet\tmp\boldqsymbol} \def\boldqsymbol{% \ifx `\tmp \DN@`{\futurelet\tmp\boldqsymbol@}% \else \let\next@=\boldsymbol \fi \next@} \def\boldqsymbol@{% \ifx [\tmp \DN@[##1]{\boldsymbol{`[##1]}}%] \else\ifx (\tmp \DN@(##1){\boldsymbol{`(##1)}}%) \else\ifx <\tmp \DN@<##1>{\boldsymbol{`<##1>}}%> \else\ifx \bgroup\tmp \DN@##1{\boldsymbol{`{##1}}}% \else \DN@##1{\boldsymbol{`##1}}% \fi\fi\fi\fi \next@} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % ORDERINGS. \newqsymbol@{``}{\qsymord@} \def\xqsym@@#1#2{% \expandafter\let\expandafter\1\csname qsym@@#1\endcsname \def\2{\expandafter\def\csname qsym@@#1\endcsname}% \expandafter\2\expandafter{\1#2}} \def\qsymord@{\def\qsym@@@{``}% \let\qsym@@=\empty\let\qsym@@x=\empty\let\qsym@@v=\empty\let\qsym@@m=\empty \futurelet\tmp\qsymord@i} {\restoredbl@ \gdef\qsymord@i{% \ifx `\tmp \def\@tmp##1{\futurelet\tmp\qsymord@i}\xqsym@@{@}`\xqsym@@{x}x% \else\ifx "\tmp \def\@tmp"##1"{\xqsym@@{@}{"##1"}% \mathbin{\vcenter{\hbox{% $\let\undefinedarrow@=\undefinedarrowtrue@ \def\qrightdirection@@{(0,-1):0}\qsymar##1 $}}}}% \else\ifx\undefined\tmp \let\@tmp=\qsymord@ii \else\ifcat A\tmp \def\@tmp##1{\csname##1@ord\endcsname}% \else \let\@tmp=\qsymord@ii \fi\fi\fi\fi \@tmp}} \def\qsymord@ii{\def\@tmp##1{\xqsym@@{@}{##1}\futurelet\tmp\qsymord@ii}% \ifx /\tmp \xqsym@@{x}n% \else\ifx =\tmp \def\qsym@@m{eq}% \else\ifx -\tmp \def\qsym@@v{dash}% \else\ifx +\tmp \def\qsym@@v{plus}% \else\ifx <\tmp \def\qsym@@{less}% \else\ifx >\tmp \def\qsym@@{gtr}% \else\ifx (\tmp \def\qsym@@{subset}% \else\ifx )\tmp \def\qsym@@{supset}% \else\ifx [\tmp \def\qsym@@{sqsubset}% \else\ifx ]\tmp \def\qsym@@{sqsupset}% \else\ifx \{\tmp\def\qsym@@{prec}% \else\ifx \}\tmp\def\qsym@@{succ}% \else\ifx \<\tmp\def\qsym@@{triangleleft}% \else\ifx \>\tmp\def\qsym@@{triangleright}% \else\ifx ~\tmp \def\qsym@@{sim}% \else\ifx \~\tmp\def\qsym@@{backsim}% \else \let\@tmp=\qsymord@x \fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi \@tmp} \def\qsymord@x{% \expandafter\let\expandafter\@tmp\csname\qsym@@x\qsym@@\qsym@@v\qsym@@m\endcsname \ifx\@tmp\relax \@warning{Undefined ordering \codeof\qsym@@@}% \else\qW@@{}{Q \codeof\qsym@@@\space : \expandafter\string\csname\qsym@@x\qsym@@\qsym@@v\qsym@@m\endcsname}\fi \@tmp} \mathchardef\less="313C % < \let\lesseq=\leq \let\nlesseq=\nleq \def\xless{\mathrel{\wedge}} \let\xxless=\bigwedge \mathchardef\gtr="313E % > \let\gtreq=\geq \let\ngtreq=\ngeq \def\xgtr{\mathrel{\vee}} \let\xxgtr=\bigvee \let\subsetdash=\in % `set inclusion' relations \def\nsubset{\not\subset} \let\nsubsetdash=\notin \def\nsubseteq{\not\subseteq} \def\nsubsetplus{\not\subsetplus} \def\nsubsetpluseq{\not\subsetpluseq} \let\xsubset=\cap \let\xxsubset=\bigcap \let\xsubsetplus=\nplus \let\xxsubsetplus=\bignplus \let\xsubsetdash=\pitchfork \let\supsetdash=\ni \let\xsupset=\cup \let\xxsupset=\bigcup \let\xsupsetplus=\uplus \let\xxsupsetplus=\biguplus \def\nsupset{\not\supset} \def\nsupseteq{\not\supseteq} \def\nsupsetplus{\not\supsetplus} \def\nsupsetpluseq{\not\supsetpluseq} \def\nsqsubset{\not\sqsubset} % `open square' relations \def\nsqsubseteq{\not\sqsubseteq} \def\nsqsupset{\not\sqsupset} \def\nsqsupseteq{\not\sqsupseteq} \let\xsqsubset=\sqcap \let\xsqsupset=\sqcup \let\xxsqsubset=\bigsqcap \let\xxsqsupset=\bigsqcup \let\xprec=\curlywedge % `curly' relations \let\xsucc=\curlyvee \let\xxprec=\bigcurlywedge \let\xxsucc=\bigcurlyvee \let\triangleleft=\vartriangleleft % `triangle' relations \let\triangleright=\vartriangleright \let\xtriangleleft=\vartriangle \let\xtriangleright=\triangledown \let\xxtriangleleft=\bigtriangleup \let\xxtriangleright=\bigtriangledown \def\nsim{\not\sim} % ~ \def\nsimeq{\not\simeq} \let\xsim=\wr \def\nbacksim{\not\backsim} \def\nbacksimeq{\not\backsimeq} \def\xbacksim{\smallint} \def\xxbacksim{\int} \def\U@ord{\bigcup} % Abbreviations... ``U \expandafter\def\csname\string^@ord\endcsname{\bigwedge} % ``^ \def\V@ord{\bigvee} % ``V \def\S@ord{\sum} % ``S \def\P@ord{\prod} % ``P %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % ARROWS. \newif\ifqsym@@conn \newif\ifqsym@@not \newif\ifqsym@@ss % Activate "..." in math mode to look up qsymbol arrow. {\restoredbl@ \gdef\active@dq#1"{\def\qsym@@@{"#1"}\qsymar#1 }} {\catcode`\"=\active \global\let"=\active@dq} \mathcode`\"="8000 \newqsymbol@{`"}{\active@dq} \def\qsymar{\let\qsym@@=\empty \let\qsym@@v=\empty \let\qsym@@m=\empty \let\qsym@@x=\empty \def\qsym@@w{{10.1pt}}% \qsym@@connfalse \qsym@@notfalse \setbox\qsymar@labelbox=\copy\voidb@x \qsym@@sstrue \let\qsym@@sb=\empty \let\qsym@@sp=\empty \futurelet\tmp\qsymar@} \newbox\qsymar@labelbox \def\qsymar@hasconn{% \ifqsym@@conn \qsymar@expand \else \qsym@@conntrue \fi} {% REPAIR bug of the 2e version of AMS-LaTeX where \makeatother is broken... \ifx\@ifpackageloaded\undefined \makeatother \else \@ifpackageloaded{amstex}{\catcode`\@=\active}{\makeatother}\fi \gdef\addATto#1{#1@}} \def\qsym@@labelmargin{\labelmargin{1pt}} \def\qsymarv@#1{\def\qsym@@v{#1}\def\qsym@@labelmargin{\labelmargin{#1pt}}} \def\qsymar@{% \DN@##1{\xqsym@@{}{##1}\xqsym@@{m}{##1}\futurelet\tmp\qsymar@}%default \ifx\space@\tmp \expandafter\DN@\space{\qsymar@i}%terminate at space \else\ifx2\tmp \qsymarv@{2}\xqsym@@{}2\DN@2{\futurelet\tmp\qsymar@}% \else\ifx3\tmp \qsymarv@{3}\xqsym@@{}3\DN@3{\futurelet\tmp\qsymar@}% \else\ifx^\tmp \def\qsym@@v{^}\xqsym@@{}^\DN@^{\futurelet\tmp\qsymar@}% \else\ifx_\tmp \def\qsym@@v{_}\xqsym@@{}_\DN@_{\futurelet\tmp\qsymar@}% \else\ifx<\tmp \else\ifx>\tmp \else\ifx|\tmp \else\ifx-\tmp \qsymar@hasconn \else\ifx=\tmp \qsymar@hasconn \xqsym@@{m}-\qsymarv@{2}% \xqsym@@{}=\DN@={\futurelet\tmp\qsymar@}% \else\ifx.\tmp \qsymar@hasconn \else\ifx:\tmp \qsymar@hasconn\xqsym@@{m}.\qsymarv@{2}% \xqsym@@{}:\DN@:{\futurelet\tmp\qsymar@}% \else\ifx~\tmp \qsymar@hasconn \qsymar@expand \else\ifx x\tmp \else\ifx o\tmp \else\ifx`\tmp \else\ifx'\tmp \else\ifx!\tmp \qsymar@expand \DN@!{\futurelet\tmp\qsymar@}% \else\ifx \bgroup\tmp \DN@##1{\futurelet\tmp\qsymar@group##1@@}% \else\addATto\ifx\tmp \DN@##1##2{\xqsym@@{x}{##1##2}\futurelet\tmp\qsymar@}% \else\ifx (\tmp \qsymar@expand\qsymar@expand \DN@(##1){\xqsym@@{}{(##1)}% \xqsym@@{x}{|*=/2\R@c/\hbox{\encircle{##1}}}% \futurelet\tmp\qsymar@}% \else\ifx [\tmp \qsymar@expand\qsymar@expand \DN@[##1]{\xqsym@@{}{[##1]}\xqsym@@{x}{|*\hbox{\enbox{##1}}}% \futurelet\tmp\qsymar@}% \else\ifx *\tmp \DN@*{\qsymar@star}% \else\ifx /\tmp \xqsym@@{}{/}% \ifqsym@@not \DN@/{\xqsym@@{x}{|-/.7pt/*\dir2{/}}\futurelet\tmp\qsymar@}% \else \DN@/{\xqsym@@{x}{|-/-.7pt/*\dir2{/}}\futurelet\tmp\qsymar@}% \qsym@@nottrue \fi \else\ifcat A\noexpand\tmp \DN@##1{\expandafter\let\expandafter\next@\csname qac@##1\endcsname \ifx\next@\relax \DN@{\qsymar@badescape{##1}}\fi \next@}% \else\ifcat .\noexpand\tmp \DN@##1{\expandafter\let\expandafter\next@\csname qac@##1\endcsname \ifx\next@\relax \DN@{\qsymar@badescape{##1}}\fi \next@}% \else \let\next@=\qsymar@i \@warning{Ignored weird token in arrow: \meaning\tmp}% \fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi\fi \next@} \def\qsymar@badescape#1{\DN@{\futurelet\tmp\qsymar@}% \@warning{Undefined qsymbol arrow macro #1}} \def\qsymar@expand{\DNii@##1{\dimen@=##1}\expandafter\nextii@\qsym@@w\relax \ifdim\dimen@<11pt \def\qsym@@w{{15.1pt}}% \else\ifdim\dimen@<16pt \def\qsym@@w{{20.1pt}}% \else\ifdim\dimen@<21pt \def\qsym@@w{{25.1pt}}% \else \advance\dimen@5pt \edef\qsym@@w{{\the\dimen@}}\fi\fi\fi} % Measure/keep label \def\qsymar@group#1@@{% \ifx ^\tmp \let\next@=\qsymar@label \else\ifx _\tmp \let\next@=\qsymar@label \else \qsym@@ssfalse \ifx |\tmp \let\next@=\qsymar@label \else \DN@##1@@{}\fi \fi\fi \next@#1{}@@% \xqsym@@{x}{#1}\futurelet\tmp\qsymar@} \def\qsymar@label#1#2#3@@{% \DN@{#3}\ifx\next@\empty \ifqsym@@ss \qsym@@ssfalse \ifx ^\tmp \ifx\qsym@@sp\empty \qsym@@sstrue \def\qsym@@sp{#2}\fi \else\ifx _\tmp \ifx\qsym@@sb\empty \qsym@@sstrue \def\qsym@@sb{#2}\fi \fi\fi \fi \DN@{\qsymar@label@@{#2}}% \else \qsym@@ssfalse \DN@{\qsymar@label@#3@@}\fi \next@} \def\qsymar@label@#1#2@@{\DN@{#2}% \ifx\next@\empty \DN@{\qsymar@label@@{#1}}% \else \DN@{\qsymar@label@#2@@}\fi \next@} \def\qsymar@label@@#1{\setbox\z@=\hbox{$\m@th\scriptstyle\;{#1}\;$}% \DNii@##1{\dimen@=##1}\expandafter\nextii@\qsym@@w\relax \loop\ifdim\dimen@<\wd\z@ \qsymar@expand \repeat} \def\qsymar@star#1#{\qsymar@star@{#1}} \def\qsymar@star@#1#2{\xqsym@@{}{*#1{#2}}\xqsym@@{m}{*#1{#2}}% \futurelet\tmp\qsymar@} \def\qsymar@i{% \ifingraph@ \DN@{\ingraph@false \qsymar@x\GRAPHar@}% \else\if\inxy@ \DN@{\qsymar@x\ar}% \else \edef\nextii@{\codeof\qsym@@}% \expandafter\let\expandafter\tmp\csname\space"\nextii@"\endcsname \if\undefinedarrow@\tmp \ifx\xyloaded\undefined \DN@{\@warning{arrow "\nextii@" undefined}}% \else \DNii@##1{\setboxz@h{\kern\p@\qTips\qsym@@labelmargin $\expandafter\xy\qrightdirection@@ \qsymar@x\ar*{}+/v(1,0)##1/*{}\relax \endxy$\kern\p@}\mathrel{\qsymsmash@@\boxz@}}% \DN@{\expandafter\nextii@\qsym@@w}% \qW@@{}{Q \codeof\qsym@@@\space : Xy-pic}\fi \else \ifx\qsym@@sp\empty \ifx\qsym@@sb\empty \qsym@@ssfalse \fi\fi \ifqsym@@ss \xqsym@@{}{@}% \expandafter\let\expandafter\@tmp\csname\space"\codeof\qsym@@"\endcsname \ifx\@tmp\relax \expandafter\def\expandafter\tmp\expandafter{\expandafter$\tmp$}% \else \let\tmp=\@tmp \fi \let\next@=\qsymar@stretch \qW@@{\DN@##1{\dimen@=##1}\expandafter\next@\qsym@@w}% {Q \codeof\qsym@@@\space : \codeof\tmp\space FROM \the\dimen@\space LIMITS \string_{\codeof\qsym@@sb} \string^{\codeof\qsym@@sp}}% \else \let\next@=\tmp \qW@@{}{Q \codeof\qsym@@@\space : \codeof\tmp\space LIMITS \string_{\codeof\qsym@@sb} \string^{\codeof\qsym@@sp}}% \fi \fi \fi\fi \next@} \def\qrightdirection@@{} \def\undefinedarrow@#1{\ifx#1\relax TT\else TF\fi} \def\undefinedarrowtrue@#1{TT} \def\qsymar@x#1{% %% Eval #1 @\qsym@@v{\qsym@@m}\qsym@@x ... \DN@{\addAT@#1}% \DNii@{\expandafter\expandafter\expandafter\next@ \expandafter\qsym@@v\expandafter{\qsym@@m}}% \expandafter\nextii@\qsym@@x}% % Automatic stretching to fit a label: \def\qsymar@stretch{\mathrel{\m@th \DN@##1{\dimen@=##1}\expandafter\next@\qsym@@w\relax \setbox\z@=\hbox{\tmp}\ifdim\dimen@<\wd\z@ \dimen@=\wd\z@ \fi \setbox2=\hbox{$\m@th\scriptstyle\;{\qsym@@sp}\;$}% \ifdim\dimen@<\wd2 \dimen@=\wd2 \fi \setbox4=\hbox{$\m@th\scriptstyle\;{\qsym@@sb}\;$}% \ifdim\dimen@<\wd4 \dimen@=\wd4 \fi \setbox\z@=\hbox to \dimen@{\hfil\tmp\hfil}% \dimen@=\ht\z@ \advance\dimen@-.3ex \ht\z@=\dimen@ \dimen@=\dp\z@ \advance\dimen@-.3ex \dp\z@=\dimen@ \qsymsmash@@{\textstyle\mathop{\box\z@}\limits^{\box2}_{\box4}}}} %% User's additional constructions...and the two (only) original ones. \def\newqsymbolarrowcharxy#1#2{% \DN@{#1}\expandafter\def\csname qac@\codeof\next@\endcsname{% \futurelet\tmp\qsymar@group#2@@}} \newqsymbolarrowcharxy{?}{^-*-<3pt,1pt>{\scriptstyle=}} \newqsymbolarrowcharxy{+}{^-*-<3pt,1pt>{\scriptstyle+}} \newif\ifingraph@ % `Canned' arrows; single arrows avoided when `oldcm' option specified (they % were ugly before getting fixed -- this is *still* bad with Y&Ys PS fonts). \ifpurexy@\else \newqsymbol@{"-"}{-} \newqsymbol@{"-@"}{\genericarrowfill---} \newqsymbol@{"/-"}{\not-} \newqsymbol@{"="}{=} \newqsymbol@{"=@"}{\genericarrowfill===} \newqsymbol@{"/="}{\not=} \newqsymbol@{"3-"}\equiv \newqsymbol@{"3-@"}{\genericarrowfill\equiv\equiv\equiv} \newqsymbol@{"3/-"}{\not\equiv} \newqsymbol@{"~"}\sim \newqsymbol@{"2~"}\approx \ifoldcm@\else \newqsymbol@{"<-"}\leftarrow \newqsymbol@{"<-!"}\longleftarrow \newqsymbol@{"<-@"}\leftarrowfill \fi \newqsymbol@{"<="}\Leftarrow \newqsymbol@{"<=!"}\Longleftarrow \newqsymbol@{"<=@"}{\genericarrowfill\Leftarrow==} \newqsymbol@@{"<3"}\Lleftarrow \ifx\Llefttarrow\undefined\else \newqsymbol@{"<3@"}{\genericarrowfill\Lleftarrow\equiv\equiv}\fi \ifoldcm@\else \newqsymbol@{""}\rightarrow \newqsymbol@{"-!>"}\longrightarrow \newqsymbol@{"->@"}\rightarrowfill \fi \newqsymbol@{"=>"}\Rightarrow \newqsymbol@{"=!>"}\Longrightarrow \newqsymbol@{"=>@"}{\genericarrowfill==\Rightarrow} \newqsymbol@@{"3>"}\Rrightarrow \ifx\Rrightarrow\undefined\else \newqsymbol@{"3>@"}{\genericarrowfill\equiv\equiv\Rrightarrow}\fi \ifoldcm@\else \newqsymbol@@{"-/>"}\nrightarrow \fi \newqsymbol@@{"=/>"}\nRightarrow \ifoldcm@\else \newqsymbol@{"<->"}\leftrightarrow \newqsymbol@{"<-!>"}\longleftrightarrow \newqsymbol@{"<->@"}{\genericarrowfill\leftarrowfill-\rightarrowfill} \fi \newqsymbol@{"<=>"}\Leftrightarrow \newqsymbol@{"<=!>"}\Longleftrightarrow \newqsymbol@{"<=>@"}{\genericarrowfill\Leftarrowfill=\Rightarrowfill} \ifoldcm@\else \newqsymbol@@{""}\nleftrightarrow \fi \newqsymbol@@{""}\nLeftrightarrow \ifoldcm@\else \newqsymbol@@{"<-|"}\mapsfrom \newqsymbol@@{"<-!|"}\longmapsfrom \ifx\mapsfrom\undefined \newqsymbol@{"<-|@"}{\genericarrowfill\leftarrow-\mapsfromchar}\fi \fi \newqsymbol@@{"<=|"}\Mapsfrom \newqsymbol@@{"<=!|"}\Longmapsfrom \ifx\Mapsfrom\undefined \newqsymbol@{"<=|@"}{\genericarrowfill\Leftarrow=\Mapsfromchar}\fi \ifoldcm@\else \newqsymbol@{"|->"}\mapsto \newqsymbol@@{"|-!>"}\longmapsto \newqsymbol@{"|->@"}{\genericarrowfill\mapstochar-\rightarrow} \fi \newqsymbol@@{"|=>"}\Mapsto \newqsymbol@@{"|=!>"}\Longmapsto \ifx\Mapsto\undefined \newqsymbol@{"|=>@"}{\genericarrowfill\Mapstochar=\Rightarrow}\fi \ifoldcm@\else \newqsymbol@{"<-'"}\hookleftarrow \newqsymbol@{"<-'@"}{\genericarrowfill\leftarrow-\rhook} \newqsymbol@{"`->"}\hookrightarrow \newqsymbol@{"`->@"}{\genericarrowfill\lhook-\rightarrow} \newqsymbol@{"^<-"}\leftharpoonup \newqsymbol@{"^->"}\rightharpoonup \newqsymbol@{"_<-"}\leftharpoondown \newqsymbol@{"_->"}\rightharpoondown \fi \newqsymbol@@{"<--"}\dashleftarrow \newqsymbol@@{"-->"}\dashrightarrow \ifoldcm@\else \newqsymbol@{"<<-"}{\leftarrow\mkern-15mu\leftarrow} \newqsymbol@{"<<-@"}{\genericarrowfill{\leftarrow\mkern-15mu\leftarrow}--} \fi \newqsymbol@{"<<="}{\Leftarrow\mkern-15mu\Leftarrow} \newqsymbol@{"<<=@"}{\genericarrowfill{\Leftarrow\mkern-15mu\Leftarrow}==} \ifoldcm@\else \newqsymbol@{"->>"}{\rightarrow\mkern-15mu\rightarrow} \newqsymbol@{"->>@"}{\genericarrowfill--{\rightarrow\mkern-15mu\rightarrow}} \fi \newqsymbol@{"=>>"}{\Rightarrow\mkern-15mu\Rightarrow} \newqsymbol@{"=>>@"}{\genericarrowfill=={\Rightarrow\mkern-15mu\Rightarrow}} \newqsymbol@@{"<~"}\leftsquigarrow \newqsymbol@@{"~>"}\rightsquigarrow \newqsymbol@@{"<~>"}\leftrightsquigarrow \ifoldcm@\else \newqsymbol@@{"<-<"}\leftarrowtail \newqsymbol@@{">->"}\rightarrowtail \fi \newqsymbol@@{"<|-"}\leftarrowtriangle \newqsymbol@@{"<|-@"}{\genericarrowfill\leftarrowtriangle--} \newqsymbol@@{"-|>"}\rightarrowtriangle \newqsymbol@@{"-|>@"}{\genericarrowfill--\rightarrowtriangle} \newqsymbol@@{"<|-|>"}\leftrightarrowtriangle \newqsymbol@@{"<|-|>@"}{\genericarrowfill\leftarrowtriangle-\rightarrowtriangle} \newqsymbol@@{"-o"}\multimap \newqsymbol@@{"-o@"}{\genericarrowfill--\multimap} \newqsymbol@@{"|-"}\vdash \newqsymbol@@{"|/-"}\nvdash \newqsymbol@@{"||-"}\Vdash \newqsymbol@@{"||/-"}\nVdash \newqsymbol@@{"|="}\vDash \newqsymbol@@{"|/="}\nvDash \newqsymbol@@{"-|"}\dashv \def\genericarrowfill#1#2#3{$\m@th\mathord{#1}\mkern-6mu% \cleaders\hbox{$\mkern-2mu\mathord{#2}\mkern-2mu$}\hfill \mkern-6mu\mathord{#3}$} \fi % Xy-pic

additions... \ifx\xyloaded\undefined \def\inxy@{TF}\else \newdir{|>}{!/+4.6pt/\dir{|}*:(1,-.2)\hbox{\qNoTips\dir^{>}}% *:(1,+.2)\hbox{\qNoTips\dir_{>}}} \newdir{<|}{!/-4.6pt/\dir{|}*:(1,-.2)\hbox{\qNoTips\dir_{<}}% *:(1,+.2)\hbox{\qNoTips\dir^{<}}} \newdir{`}{\dir^{(}} \newdir{'}{\dir^{)}} \newdir{/}{:(1,-.3)\dir3{|}} % DIAGRAMS: make `... work in Xy-graphs also: \let\qsymbolsGRAPH@=\GRAPH@ \def\GRAPH@{% \ifx `\next \DN@`{\ingraph@true\active@q}% \else \let\next@=\qsymbolsGRAPH@ \fi \next@} \fi %% Restore possibly active " :-) \restoredbl@ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % END. \endinput % $Log: qsymbols.sty,v $ % Revision 1.12 1997/01/07 23:56:44 krisrose % Spacing bug with debugging fixed. % % Revision 1.11 1997/01/06 18:55:01 krisrose % Deactivate " after ``. % % Revision 1.10 1996/12/17 18:26:33 krisrose % Fixed bugs with new stretchable arrows. % % Revision 1.9 1996/12/05 04:29:15 krisrose % Handles sub/superscripts without Xy-pic. % Cleaned up & ready for Xy-pic version 3.3. % % Revision 1.8 1995/05/09 17:27:33 kris % Intermediate. % Arrow group decorations now properly measured. % Bug fixes. % % Revision 1.7 1994/12/12 01:29:28 kris % Updates for Xy-pic v3 in progress... % % Revision 1.6 1994/11/16 14:05:40 kris % Intermediate version fits internal Xy-pic... % % Revision 1.5 1994/10/28 18:19:36 kris % Added '{..} and documented "...{...}..." . % % Revision 1.4 1994/10/28 15:08:17 kris % Added boldsymbol support. % % Revision 1.3 1994/10/26 16:47:02 kris % Fixed a few things :-) % % Revision 1.2 1994/10/26 02:10:17 kris % Integrated qarrow; use Xy-pic is an option. % % Revision 1.1 1994/10/24 22:55:12 kris % Initial revision % % Extracted from kris.sty 1.9. % Tell Emacs what this is... % Local Variables: % mode:latex % fill-column:77 % End: Coq-Equations-1.3.1-8.20/doc/stlc.tex000066400000000000000000000000001463127417400170240ustar00rootroot00000000000000Coq-Equations-1.3.1-8.20/doc/utf.sty000066400000000000000000000005141463127417400167060ustar00rootroot00000000000000\usepackage[autogenerated]{ucs} \usepackage[OT1]{fontenc} \usepackage{textcomp} \usepackage[utf8x]{inputenc} \DeclareUnicodeCharacter{8797}{\eqbydef} \DeclareUnicodeCharacter{12314}{\ensuremath{\llbracket}} \DeclareUnicodeCharacter{12315}{\ensuremath{\rrbracket}} \PrerenderUnicode{é} \PrerenderUnicode{è} \usepackage{utfmacros}Coq-Equations-1.3.1-8.20/doc/utfmacros.sty000066400000000000000000000017361463127417400201220ustar00rootroot00000000000000\def\textalpha{\ensuremath{\alpha}} \def\textbeta{\ensuremath{\beta}} \def\mathbbm#1{\ensuremath{\mathbb{#1}}} %\def\textalpha{\ensuremath{\alpha}} \def\texttau{\ensuremath{\tau}} \def\textSigma{\ensuremath{\Sigma}} \def\textsigma{\ensuremath{\sigma}} \def\textphi{\ensuremath{\phi}} \def\textlambda{\ensuremath{\lambda}} \def\textLambda{\ensuremath{\Lambda}} \def\textepsilon{\ensuremath{\epsilon}} \def\textPi{\ensuremath{\Pi}} \def\textGamma{\ensuremath{\Gamma}} \def\textDelta{\ensuremath{\Delta}} \def\textPhi{\ensuremath{\Phi}} \def\texteta{\ensuremath{\eta}} \def\textrho{\ensuremath{\rho}} \def\textpsi{\ensuremath{\psi}} \def\textdelta{\ensuremath{\delta}} \def\textpi{\ensuremath{\pi}} \def\textPsi{\ensuremath{\Psi}} \def\textiota{\ensuremath{\iota}} \def\textomega{\ensuremath{\omega}} \def\textOmega{\ensuremath{\Omega}} \def\textmu{\ensuremath{\mu}} \def\textTheta{\ensuremath{\Theta}} \def\texttheta{\ensuremath{\theta}} \def\eqbydef{\ensuremath{\stackrel{\mathrm{def}}{=}}} Coq-Equations-1.3.1-8.20/dune-project000066400000000000000000000000671463127417400171270ustar00rootroot00000000000000(lang dune 2.5) (using coq 0.2) (name coq-equations) Coq-Equations-1.3.1-8.20/examples/000077500000000000000000000000001463127417400164205ustar00rootroot00000000000000Coq-Equations-1.3.1-8.20/examples/AlmostFull.v000066400000000000000000001572511463127417400207040ustar00rootroot00000000000000From Equations Require Import Equations. Require Import Examples.Fin. Require Import Relations Utf8. Require Import Relations Wellfounded. Require Import Setoid RelationClasses Morphisms. Require Import Lia. Require Import Bool. Require Import List Arith String. From Coq Require Import FunctionalExtensionality. Set Equations Transparent. Set Keyed Unification. Set Asymmetric Patterns. Section Equality. Class Eq (A : Type) := { eqb : A -> A -> bool; eqb_spec : forall x y, reflect (x = y) (eqb x y) }. Equations fin_eq {k} (f f' : fin k) : bool := fin_eq fz fz => true; fin_eq (fs f) (fs f') => fin_eq f f'; fin_eq _ _ => false. Global Instance fin_Eq k : Eq (fin k). Proof. exists fin_eq. intros x y. induction x; depelim y; simp fin_eq; try constructor; auto. intro H; noconf H. intro H; noconf H. destruct (IHx y). subst x; now constructor. constructor. intro H; noconf H. now apply n0. Defined. Global Instance bool_Eq : Eq bool. Proof. exists bool_eq. intros [] []; now constructor. Defined. Global Instance prod_eq A B : Eq A -> Eq B -> Eq (A * B). Proof. intros. exists (fun '(x, y) '(x', y') => eqb x x' && eqb y y'). intros [] []. destruct (eqb_spec a a0); subst. destruct (eqb_spec b b0); subst. constructor; auto. constructor; auto. intro H; noconf H. now elim n. constructor; auto. simplify *. now elim n. Defined. Equations option_eq {A : Type} {E:Eq A} (o o' : option A) : bool := option_eq None None := true; option_eq (Some o) (Some o') := eqb o o'; option_eq _ _ := false. Global Instance option_Eq A : Eq A -> Eq (option A). Proof. intros A_Eq. exists option_eq. intros [] []; simp option_eq; try constructor. destruct (eqb_spec a a0); subst. now constructor. constructor. intro H; noconf H. now elim n. simplify *. simplify *. constructor. Defined. Section EqFin_fn. Context {A} `{Eq A}. Equations eq_fin_fn {k} (f g : fin k -> A) : bool := eq_fin_fn (k:=0) f g := true; eq_fin_fn (k:=S k) f g := eqb (f fz) (g fz) && eq_fin_fn (fun n => f (fs n)) (fun n => g (fs n)). Global Instance Eq_graph k : Eq (fin k -> A). Proof. exists eq_fin_fn. induction k; intros; simp eq_fin_fn. constructor; auto. extensionality i. depelim i. destruct (eqb_spec (x fz) (y fz)). simpl. destruct (IHk (fun n => x (fs n)) (fun n => y (fs n))). constructor; auto. extensionality n. depelim n. auto. eapply equal_f in e0. eauto. constructor. intro H'. subst. elim n. extensionality n'. reflexivity. simpl. constructor. intros H'; elim n. subst. reflexivity. Defined. End EqFin_fn. End Equality. Definition dec_rel {X:Type} (R : X → X → Prop) := ∀ x y, {R x y} + {not (R x y)}. Section AlmostFull. Context {X : Type}. Inductive WFT : Type := | ZT : WFT | SUP : (X -> WFT) -> WFT. Derive NoConfusion Subterm for WFT. Definition sec_disj (R : X -> X -> Prop) x y z := R y z \/ R x y. Fixpoint SecureBy (R : X -> X -> Prop) (p : WFT) : Prop := match p with | ZT => forall x y, R x y | SUP f => forall x, SecureBy (fun y z => R y z \/ R x y) (f x) end. Lemma SecureBy_mon p (R' S : X -> X -> Prop) (H : forall x y, R' x y -> S x y) : SecureBy R' p -> SecureBy S p. Proof. revert R' S H. induction p. simpl. intros. apply H. apply H0. simpl. intros. eapply H. 2:apply H1. intros. simpl in H2. intuition. Defined. Definition almost_full (R : X -> X -> Prop) := exists p, SecureBy R p. Context (R : X -> X -> Prop) (decR : dec_rel R). Fixpoint af_tree_iter {x : X} (accX : Acc R x) := match accX with | Acc_intro f => SUP (fun y => match decR y x with | left Ry => af_tree_iter (f y Ry) | right _ => ZT end) end. Context (wfR : well_founded R). Definition af_tree : X → WFT := fun x => af_tree_iter (wfR x). Scheme Acc_ind_dep := Induction for Acc Sort Prop. Lemma secure_from_wf : SecureBy (fun x y => not (R y x)) (SUP af_tree). Proof. intro x. unfold af_tree. generalize (wfR x). induction a using Acc_ind_dep. simpl. intros y. destruct (decR y x). simpl. eapply SecureBy_mon; eauto. simpl; intros. intuition. simpl. intros. intuition auto. Defined. Corollary af_from_wf : almost_full (fun x y => not (R y x)). Proof. exists (SUP af_tree). apply secure_from_wf. Defined. End AlmostFull. Class AlmostFull {X} (R : X -> X -> Prop) := is_almost_full : almost_full R. #[export] Instance proper_af X : Proper (relation_equivalence ==> iff) (@AlmostFull X). Proof. intros R S eqRS. split; intros. destruct H as [p Hp]. exists p. revert R S eqRS Hp. induction p; simpl in *; intros. now apply eqRS. apply (H x (fun y z => R y z \/ R x y)). repeat red; intuition. apply Hp. destruct H as [p Hp]. exists p. revert R S eqRS Hp. induction p; simpl in *; intros. now apply eqRS. apply (H x _ (fun y z => S y z \/ S x y)). repeat red; intuition. apply Hp. Qed. Arguments WFT _ : clear implicits. #[export] Instance almost_full_le : AlmostFull Peano.le. Proof. assert (relation_equivalence Peano.le (fun x y => ~ (y < x))) as ->. { cbn. intros x y. intuition auto. red in H0. lia. lia. } red. eapply af_from_wf. 2:apply lt_wf. intros x y. apply lt_dec. Defined. Arguments WFT X : clear implicits. Section WfFromAF. Context {X : Type}. Lemma clos_trans_n1_left {R : X -> X -> Prop} x y z : R x y -> clos_trans_n1 _ R y z -> clos_trans_n1 _ R x z. Proof. induction 2. econstructor 2; eauto. constructor; auto. econstructor 2. eauto. auto. Defined. Lemma clos_trans_1n_n1 {R : X -> X -> Prop} x y : clos_trans_1n _ R x y -> clos_trans_n1 _ R x y. Proof. induction 1. now constructor. eapply clos_trans_n1_left; eauto. Defined. Lemma clos_refl_trans_right {R : X -> X -> Prop} x y z : R y z -> clos_refl_trans _ R x y -> clos_trans_n1 _ R x z. Proof. intros Ryz Rxy. apply clos_rt_rtn1_iff in Rxy. induction Rxy in Ryz, z |- *. econstructor 1; eauto. econstructor 2; eauto. Defined. Lemma clos_trans_1n_right {R : X -> X -> Prop} x y z : R y z -> clos_trans_1n _ R x y -> clos_trans_1n _ R x z. Proof. induction 2. econstructor 2; eauto. constructor; auto. econstructor 2. eauto. auto. Defined. Lemma clos_trans_n1_1n {R : X -> X -> Prop} x y : clos_trans_n1 _ R x y -> clos_trans_1n _ R x y. Proof. induction 1. now constructor. eapply clos_trans_1n_right; eauto. Defined. Lemma acc_from_af (p : WFT X) (T R : X → X → Prop) y : (∀ x z, clos_refl_trans X T z y -> clos_trans_1n X T x z ∧ R z x → False) → SecureBy R p → Acc T y. Proof. induction p as [|p IHp] in T, R, y |- * . + simpl. intros. constructor. intros z Tz. specialize (H z y). elim H. constructor 2. split; auto. constructor. auto. + intros cond secure. constructor. intros z Tzy. simpl in secure. specialize (IHp y T (fun y0 z0 => R y0 z0 \/ R y y0) z). apply IHp; auto. intros x w wz. assert(wy: clos_refl_trans X T w y). { econstructor 3. eauto. now constructor. } pose proof (cond x w wy). intuition. specialize (cond w y). apply cond. constructor 2. intuition. apply clos_trans_n1_1n. eapply clos_refl_trans_right; eauto. Defined. Lemma wf_from_af (p : WFT X) (T R : X → X → Prop) : (∀ x y, clos_trans_1n X T x y ∧ R y x → False) → SecureBy R p → well_founded T. Proof. intros. intro x. eapply acc_from_af;eauto. Defined. Definition compose_rel {X} (R S : X -> X -> Prop) : relation X := fun x y => exists z, R x z /\ S z y. Equations power (k : nat) (T : X -> X -> Prop) : X -> X -> Prop := power 0 T := T; power (S k) T := fun x y => exists z, power k T x z /\ T z y. Transparent power. Lemma acc_incl (T T' : X -> X -> Prop) x : (forall x y, T' x y -> T x y) -> Acc T x -> Acc T' x. Proof. intros HT H; induction H in |- *. constructor. intros. apply HT in H1. now apply H0. Qed. Lemma power_clos_trans (T : X -> X -> Prop) k : inclusion _ (power k T) (clos_trans _ T). Proof. intros x y. induction k in x, y |- *. simpl. now constructor. simpl. intros [z [Pxz Tzy]]. econstructor 2. apply IHk; eauto. constructor. auto. Qed. Lemma clos_trans_power (T : X -> X -> Prop) x y : clos_trans _ T x y -> exists k, power k T x y. Proof. rewrite clos_trans_tn1_iff. induction 1. exists 0; auto. destruct IHclos_trans_n1 as [k pkyz]. exists (S k). simp power. now exists y. Qed. Lemma acc_power (T : X -> X -> Prop) x k : Acc T x -> Acc (power k T) x. Proof. intros. apply Acc_clos_trans in H. revert H. apply acc_incl. intros. now apply (power_clos_trans _ k). Qed. Equations secure_power (k : nat) (p : WFT X) : WFT X := secure_power 0 p := p; secure_power (S k) p := SUP (fun x => secure_power k p). Transparent secure_power. Lemma secure_by_power R p (H : SecureBy R p) k : SecureBy R (secure_power k p). Proof. induction k in R, p, H |- *; trivial. induction p. simpl in *. intros. apply IHk. simpl. intuition. simpl. intros. apply IHk. simpl. intros. simpl in H0. simpl in H. specialize (H x0). eapply SecureBy_mon. 2:eauto. simpl. intuition. Qed. Lemma acc_from_power_af (p : WFT X) (T R : X → X → Prop) y k : (∀ x z, clos_refl_trans _ T z y -> clos_trans_1n X (power k T) x z ∧ R z x → False) → SecureBy R (secure_power k p) → Acc T y. Proof. (* induction k in T, R, y |- *. simpl. intros. simp secure_power in H0. eapply acc_from_af; eauto. admit. *) (* intros. *) (* simp secure_power in H0. simpl in H0. *) (* constructor. intros x Txy. *) (* specialize (IHk T (sec_disj R x)). specialize (H0 x). *) (* apply IHk; auto. *) (* intros. destruct H1; subst. unfold sec_disj in *. *) (* intuition. *) (* intuition. red in H4. apply (H x z). *) induction p as [|p IHp] in T, R, y, k |- * . (* + simpl. intros. constructor. *) (* intros z Tz. specialize (H z y). elim H. constructor 2. split; auto. *) (* constructor. auto. *) (* + intros cond secure. constructor. intros z Tzy. *) (* simpl in secure. *) (* specialize (IHp y T (fun y0 z0 => R y0 z0 \/ R y y0) z). eapply (IHp k); auto. *) (* intros x w wz. *) (* assert(wy: clos_refl_trans X (power (S k) T) w y). *) (* { econstructor 3. eauto. constructor. now constructor. } *) (* pose proof (cond x w wy). intuition. *) (* specialize (cond w y). apply cond. constructor 2. *) (* intuition. apply clos_trans_n1_1n. eapply clos_refl_trans_right; eauto. *) (* Defined. *) Admitted. (* induction k. eapply acc_from_af; eauto. *) (* apply IHk. intros. eapply H. eauto. *) (* constructor. intros. *) (* apply IHk; auto. intros x z Hzy [Hxz Rzx]. *) (* specialize (H x z). *) (* specialize (H x _ H2). apply H. split; intuition. *) (* induction H4. constructor. simpl. exists *) (* eapply acc_power with 0. eapply acc_from_af; eauto. intros. intuition. *) (* simpl power in *. *) (* apply clos_trans_1n_n1 in H3. destruct H3. *) (* specialize (H x _ H1). apply H; split; auto. *) (* specialize (H *) (* induction p as [|p IHp] in T, R, y |- * . *) (* + simpl. intros. constructor. *) (* intros z Tz. specialize (H z y). elim H. constructor 2. split; auto. *) (* constructor. auto. *) (* + intros cond secure. constructor. intros z Tzy. *) (* simpl in secure. *) (* specialize (IHp y T (fun y0 z0 => R y0 z0 \/ R y y0) z). apply IHp; auto. *) (* intros x w wz. *) (* assert(wy: clos_refl_trans X T w y). *) (* { econstructor 3. eauto. now constructor. } *) (* pose proof (cond x w wy). intuition. *) (* specialize (cond w y). apply cond. constructor 2. *) (* intuition. apply clos_trans_n1_1n. eapply clos_refl_trans_right; eauto. *) (* Defined. *) Lemma wf_from_power_af (p : WFT X) (T R : X → X → Prop) k : (∀ x y, clos_trans_1n X (power k T) x y ∧ R y x → False) → SecureBy R p → well_founded T. Proof. intros. intro x. eapply acc_from_power_af; eauto. apply secure_by_power. apply H0. Defined. End WfFromAF. Section FixAF. Context {X : Type} (T R : X -> X -> Prop). Context {af : AlmostFull R}. Context (H : forall x y, clos_trans_1n X T x y /\ R y x -> False). Global Instance af_wf : WellFounded T. Proof. red. destruct af. eapply wf_from_af; eauto. Defined. End FixAF. Section PowerAf. Context {X : Type}. Context (T R : X -> X -> Prop). Context {af : AlmostFull R}. Context (k : nat). Context (H : forall x y, clos_trans_1n X (power k T) x y /\ R y x -> False). Global Instance af_power_wf : WellFounded T. Proof. destruct af as [p Sp]. eapply wf_from_power_af; eauto. Defined. End PowerAf. Equations cofmap {X Y : Type} (f : Y -> X) (p : WFT X) : WFT Y := cofmap f ZT := ZT; cofmap f (SUP w) := SUP (fun y => cofmap f (w (f y))). Transparent cofmap. Lemma cofmap_secures {X Y : Type} (f : Y -> X) (p : WFT X) (R : X -> X -> Prop) : SecureBy R p -> SecureBy (fun x y => R (f x) (f y)) (cofmap f p). Proof. induction p in R |- *; simpl; auto. intros. specialize (H (f x) (fun y z : X => R y z \/ R (f x) y)). simpl in H. apply H. apply H0. Defined. #[export] Instance AlmostFull_MR {X Y} R (f : Y -> X) : AlmostFull R -> AlmostFull (Wf.MR R f). Proof. intros [p sec]. exists (cofmap f p). apply (cofmap_secures f p _ sec). Defined. Fixpoint oplus_nullary {X:Type} (p:WFT X) (q:WFT X) := match p with | ZT => q | SUP f => SUP (fun x => oplus_nullary (f x) q) end. Lemma oplus_nullary_sec_intersection {X} (p : WFT X) (q: WFT X) (C : X → X → Prop) (A : Prop) (B : Prop) : SecureBy (fun y z => C y z ∨ A) p → SecureBy (fun y z => C y z ∨ B) q → SecureBy (fun y z => C y z ∨ (A ∧ B)) (oplus_nullary p q). Proof. revert C q. induction p; simpl; intros; auto. induction q in C, H, H0 |- *; simpl in *; intuition. specialize (H x y). specialize (H0 x y). intuition. specialize (H1 x (fun y z => (C y z \/ A /\ B) \/ C x y)). simpl in *. eapply SecureBy_mon. 2:eapply H1. simpl. intuition. intuition. firstorder auto. eapply SecureBy_mon. 2:eapply H0. simpl. firstorder auto. specialize (H x (fun y z => C y z \/ C x y)). simpl in *. eapply SecureBy_mon. 2:eapply H. all:simpl. intros. firstorder auto. eapply SecureBy_mon. 2:eapply H0. simpl. intros. intuition. eapply SecureBy_mon. 2:eapply H1. simpl. intros. intuition. Qed. Section OplusUnary. Context {X : Type}. (* Nested version is harder to work with *) (* Equations oplus_unary (p : WFT X) (q : WFT X) : WFT X by struct p := *) (* oplus_unary ZT q := q; *) (* oplus_unary (SUP f) g := SUP (fun x => oplus_unary_right g x) *) (* where oplus_unary_right (q : WFT X) (x : X) : WFT X by struct q := *) (* { oplus_unary_right ZT x := f x; *) (* oplus_unary_right (SUP g) x := *) (* oplus_nullary (oplus_unary (f x) (SUP g)) *) (* (oplus_unary_right (g x) x) }. *) Equations? oplus_unary (p : WFT X) (q : WFT X) : WFT X by wf (p, q) (Subterm.lexprod _ _ WFT_subterm WFT_subterm) := oplus_unary ZT q := q; oplus_unary p ZT := p; oplus_unary (SUP f) (SUP g) := SUP (fun x => oplus_nullary (oplus_unary (f x) (SUP g)) (oplus_unary (SUP f) (g x))). Proof. repeat constructor. constructor 2. repeat constructor. Defined. Equations? oplus_binary (p : WFT X) (q : WFT X) : WFT X by wf (p, q) (Subterm.lexprod _ _ WFT_subterm WFT_subterm) := oplus_binary ZT q := q; oplus_binary p ZT := p; oplus_binary (SUP f) (SUP g) := SUP (fun x => oplus_unary (oplus_binary (f x) (SUP g)) (oplus_binary (SUP f) (g x))). Proof. repeat constructor. constructor 2. repeat constructor. Defined. End OplusUnary. Set Firstorder Solver auto. (* Lemma oplus_unary_sec_intersection {X} (p q : WFT X) *) (* (C : X -> X -> Prop) (A B : X -> Prop) : *) (* SecureBy (fun y z => C y z \/ A y) p -> *) (* SecureBy (fun y z => C y z \/ B y) q -> *) (* SecureBy (fun y z => C y z \/ (A y /\ B y)) (oplus_unary p q). *) (* Proof. *) (* intros. *) (* revert H H0. revert q C. induction p. *) (* intros. simp oplus_unary. eapply SecureBy_mon; [|eapply H0]. firstorder. *) (* simpl. induction q. simpl. intros. *) (* eapply SecureBy_mon; [|eapply H0]. simpl. firstorder auto. *) (* intros. *) (* simp oplus_unary. simpl. intros x. *) (* eapply SecureBy_mon; [|eapply (oplus_nullary_sec_intersection _ _ _ (A x) (B x))]. simpl. *) (* intros. destruct H3; [|intuition]. rewrite <- or_assoc. left. eapply H3. *) (* - simpl. simpl in H2. *) (* eapply SecureBy_mon; [|eapply (H _ _ (fun y z => C y z \/ C x y \/ A x))]; auto. simpl; intros. *) (* intuition auto. *) (* eapply SecureBy_mon; [|eapply H1]; simpl. intros. intuition auto. *) (* simpl. intros. *) (* eapply SecureBy_mon; [|eapply H2]; simpl. intros. intuition auto. *) (* - simpl. eapply SecureBy_mon; [|eapply (H0 _ (fun y z => C y z \/ C x y \/ B x))]; simpl. intuition auto. *) (* intuition. simpl in H2. eapply SecureBy_mon; [|eapply H1]; simpl. intuition auto. *) (* eapply SecureBy_mon; [|eapply H2]; simpl. intros. intuition auto. *) (* Defined. *) Lemma oplus_unary_sec_intersection {X} (p q : WFT X) (C : X -> X -> Prop) (A B : X -> Prop) : SecureBy (fun y z => C y z \/ A y) p -> SecureBy (fun y z => C y z \/ B y) q -> SecureBy (fun y z => C y z \/ (A y /\ B y)) (oplus_unary p q). Proof. funelim (oplus_unary p q); simpl; intros. - eapply SecureBy_mon; [|eapply H0]; simpl; firstorder. - eapply SecureBy_mon; [|eapply H]. simpl; firstorder. - eapply SecureBy_mon. 2:eapply (oplus_nullary_sec_intersection _ _ _ (A x) (B x)). simpl. intros. destruct H3; [|intuition auto]. rewrite <- or_assoc. left. eapply H3. -- simpl. eapply SecureBy_mon; [|eapply (H _ (fun y z => C y z \/ C x y \/ A x) A B)]. simpl. intuition auto. eapply SecureBy_mon; [|eapply H1]; simpl. intros. intuition auto. simpl. intros. eapply SecureBy_mon; [|eapply H2]; simpl. intros. intuition auto. -- simpl. specialize (H0 x). eapply SecureBy_mon; [|eapply (H0 (fun y z => C y z \/ C x y \/ B x) A B)]; simpl. intuition auto. intuition. simpl in H2. eapply SecureBy_mon; [|eapply H1]; simpl. intuition auto. eapply SecureBy_mon; [|eapply H2]; simpl. intros. intuition auto. Qed. Lemma oplus_binary_sec_intersection' {X} (p q : WFT X) (C : X -> X -> Prop) (A B : X -> X -> Prop) : SecureBy (fun y z => C y z \/ A y z) p -> SecureBy (fun y z => C y z \/ B y z) q -> SecureBy (fun y z => C y z \/ (A y z /\ B y z)) (oplus_binary p q). Proof. funelim (oplus_binary p q); simpl; intros. eapply SecureBy_mon. 2:eapply H0. simpl. firstorder. eapply SecureBy_mon; [|eapply H]. simpl; firstorder. eapply SecureBy_mon. 2:eapply (oplus_unary_sec_intersection _ _ _ (A x) (B x)). simpl. intros. destruct H3; [|intuition auto]. rewrite <- or_assoc. left. eapply H3. - simpl. eapply SecureBy_mon; [|eapply (H _ (fun y z => C y z \/ C x y \/ A x y) A B)]. simpl. intuition auto. eapply SecureBy_mon; [|eapply H1]; simpl. intros. intuition auto. simpl. intros. eapply SecureBy_mon; [|eapply H2]; simpl. intros. intuition auto. - simpl. eapply SecureBy_mon; [|eapply (H0 x (fun y z => C y z \/ C x y \/ B x y) A B)]; simpl. intuition auto. intuition. simpl in H2. eapply SecureBy_mon; [|eapply H1]; simpl. intuition auto. eapply SecureBy_mon; [|eapply H2]; simpl. intros. intuition auto. Defined. Lemma oplus_binary_sec_intersection {X} (p q : WFT X) (A B : X -> X -> Prop) : SecureBy A p -> SecureBy B q -> SecureBy (fun y z => A y z /\ B y z) (oplus_binary p q). Proof. revert p q A B; intros p q. funelim (oplus_binary p q); simpl; intros. eapply SecureBy_mon. 2:eapply H0. simpl. firstorder. eapply SecureBy_mon; [|eapply H]. simpl; firstorder. eapply SecureBy_mon. 2:eapply (oplus_unary_sec_intersection _ _ _ (A x) (B x)). simpl. intros. destruct H3; [|intuition auto]. left. eapply H3. - simpl. eapply SecureBy_mon; [|eapply (H _ (fun y z => A y z \/ A x y) B)]; simpl. unfold sec_disj. intuition auto. eapply SecureBy_mon; [|eapply H1]; simpl. intros. intuition auto. simpl. intros. eapply SecureBy_mon; [|eapply H2]; simpl. intros. intuition auto. - simpl. specialize (H0 x). eapply SecureBy_mon; [|eapply (H0 A (fun y z => B y z \/ B x y))]; simpl. intuition auto. intuition. simpl in H2. apply H2. Defined. Definition inter_rel {X : Type} (A B : X -> X -> Prop) := fun x y => A x y /\ B x y. Corollary af_interesection {X : Type} (A B : X -> X -> Prop) : AlmostFull A -> AlmostFull B -> AlmostFull (inter_rel A B). Proof. intros [pa Ha] [pb Hb]. exists (oplus_binary pa pb). now apply oplus_binary_sec_intersection. Defined. (* Non-functional construction in intuition auto! *) (* Lemma oplus_unary_sec_intersection' {X} (p q : WFT X) *) (* (C : X -> X -> Prop) (A B : X -> Prop) : *) (* SecureBy (fun y z => C y z \/ A y) p -> *) (* SecureBy (fun y z => C y z \/ B y) q -> *) (* SecureBy (fun y z => C y z \/ (A y /\ B y)) (oplus_unary p q). *) (* Proof. *) (* revert p q C; intros p q. funelim (oplus_unary p q); simpl; intros. *) (* eapply SecureBy_mon. 2:eapply H0. simpl. firstorder. *) (* eapply SecureBy_mon; [|eapply H]. simpl; firstorder. *) (* eapply SecureBy_mon. 2:eapply (oplus_nullary_sec_intersection _ _ _ (A x) (B x)). simpl. *) (* intros. destruct H3; [|intuition auto]. rewrite <- or_assoc. left. eapply H3. *) (* - simpl. *) (* eapply SecureBy_mon; [|eapply (H _ (fun y z => C y z \/ C x y \/ A x))]. simpl. *) (* clear H H0. (* BUG *) intuition auto. *) (* eapply SecureBy_mon; [|eapply H1]; simpl. intros. intuition auto. *) (* simpl. intros. *) (* eapply SecureBy_mon; [|eapply H2]; simpl. intros. intuition auto. *) (* - simpl. eapply SecureBy_mon; [|eapply (H0 _ (fun y z => C y z \/ C x y \/ B x))]; simpl. intuition auto. *) (* intuition. simpl in H2. eapply SecureBy_mon; [|eapply H1]; simpl. intuition auto. *) (* eapply SecureBy_mon; [|eapply H2]; simpl. intros. intuition auto. *) (* Qed. *) Definition af_bool : AlmostFull (@eq bool). Proof. exists (SUP (fun _ => SUP (fun _ => ZT))). simpl. intros x y z w. destruct x, y, z, w; intuition. Defined. Definition product_rel {X Y : Type} (A : X -> X -> Prop) (B : Y -> Y -> Prop) := fun x y => A (fst x) (fst y) /\ B (snd x) (snd y). #[export] Instance af_product {X Y : Type} (A : X -> X -> Prop) (B : Y -> Y -> Prop) : AlmostFull A -> AlmostFull B -> AlmostFull (product_rel A B). Proof. intros. pose (af_interesection (Wf.MR A fst) (Wf.MR B snd)). assert (relation_equivalence (inter_rel (Wf.MR A fst) (Wf.MR B snd)) (product_rel A B)). repeat red; intuition. rewrite <- H1. apply a; typeclasses eauto. Defined. Definition T (x y : nat * nat) : Prop := (fst x = snd y /\ snd x < snd y) \/ (fst x = snd y /\ snd x < fst y). Definition Tl (x y : nat * (nat * unit)) : Prop := (fst x = fst (snd y) /\ fst (snd x) < fst (snd y)). Definition Tr (x y : nat * (nat * unit)) : Prop := (fst x = fst (snd y) /\ fst (snd x) < fst y). Ltac destruct_pairs := repeat match goal with [ x : _ * _ |- _ ] => let x0 := fresh x in let x1 := fresh x in destruct x as [x0 x1]; simpl in * | [ x : exists _ : _, _ |- _ ] => destruct x | [ x : _ /\ _ |- _ ] => destruct x end. Require Import ssreflect. Section SCT. Definition subgraph k k' := fin k -> option (bool * fin k'). Definition graph k := subgraph k k. Definition strict {k} (f : fin k) := Some (true, f). Definition large {k} (f : fin k) := Some (false, f). Declare Scope fin_scope. Delimit Scope fin_scope with fin. Bind Scope fin_scope with fin. Notation "0" := fz : fin_scope. Notation "1" := (fs 0) : fin_scope. Open Scope fin_scope. (* bug scopes not handled well *) Equations T_graph_l (x : fin 2) : option (bool * fin 2) := { T_graph_l fz := large (fs fz); T_graph_l (fs fz) := strict (fs fz) }. Equations T_graph_r (x : fin 2) : option (bool * fin 2) := { T_graph_r fz := large (fs fz); T_graph_r (fs fz) := strict fz }. Equations graph_compose {k} (g1 g2 : graph k) : graph k := graph_compose g1 g2 arg0 with g1 arg0 := { | Some (weight, arg1) with g2 arg1 := { | Some (weight', arg2) => Some (weight || weight', arg2); | None => None }; | None => None }. Infix "⋅" := graph_compose (at level 90). Eval compute in (T_graph_l ⋅ T_graph_r) fz. Equations k_tuple_type (k : nat) : Type := k_tuple_type 0 := unit; k_tuple_type (S n) := nat * k_tuple_type n. Equations k_tuple_val {k : nat} (f : fin k) (t : k_tuple_type k) : nat := k_tuple_val fz (x, _) := x; k_tuple_val (fs f) (_, t) := k_tuple_val f t. Equations k_related (k k' : nat) (G : subgraph k k') : k_tuple_type k -> k_tuple_type k' -> Prop := k_related 0 k' g := fun _ _ => True; k_related (S k) k' g with g fz := { | Some (weight, d) := fun x y => (if weight then k_tuple_val fz x < k_tuple_val d y else (Peano.le (k_tuple_val fz x) (k_tuple_val d y))) /\ k_related k k' (fun f => g (fs f)) (snd x) y; | None => fun _ _ => False }. Definition graph_relation {k : nat} (G : graph k) : relation (k_tuple_type k) := k_related k k G. Lemma k_related_spec {k k' : nat} (G : subgraph k k') : forall x y, k_related k k' G x y <-> (forall f : fin k, match G f with | Some (weight, d) => if weight then k_tuple_val f x < k_tuple_val d y else Peano.le (k_tuple_val f x) (k_tuple_val d y) | None => False end). Proof. eapply k_related_elim. split; auto. intros []. intros f. depelim f. intros n k'' g b f. intros H. intros gz [x rx] y. specialize (H (x, rx) y rx y). simpl. rewrite H. clear H. split. + intros [xlt relr]. intros f'. depelim f'. rewrite gz. apply xlt. apply relr. + intros Hf. split. specialize (Hf fz). rewrite gz in Hf. apply Hf. intros f'. apply Hf. + intros n k'' g gf x y. split. intros []. intros f. specialize (f fz). now rewrite gf in f. Qed. Lemma graph_relation_spec {k : nat} (G : graph k) : forall x y, graph_relation G x y <-> (forall f : fin k, match G f with | Some (weight, d) => if weight then k_tuple_val f x < k_tuple_val d y else Peano.le (k_tuple_val f x) (k_tuple_val d y) | None => False end). Proof. unfold graph_relation. intros x y. now rewrite k_related_spec. Qed. Definition approximates {k} (G : graph k) (R : relation (k_tuple_type k)) := inclusion _ R (graph_relation G). Eval compute in k_tuple_type 2. Example approximates_T_l : @approximates 2 T_graph_l Tl. Proof. intros x y Tlr. red in x, y, Tlr. destruct_pairs. unfold graph_relation. simp k_related T_graph_l; simpl. lia. Qed. Example approximates_T_r : @approximates 2 T_graph_r Tr. Proof. intros x y Tlr. red in x, y, Tlr. destruct_pairs. subst. unfold graph_relation. simp k_related T_graph_l; simpl. intuition. Qed. Lemma compose_approximates {k} (G0 G1 : graph k) (R0 R1 : relation (k_tuple_type k)) : approximates G0 R0 -> approximates G1 R1 -> approximates (G0 ⋅ G1) (compose_rel R0 R1). Proof. unfold approximates. intros ag0 ag1. intros x z [y [Hxy Hyz]]. rewrite graph_relation_spec. intros f. specialize (ag0 _ _ Hxy). specialize (ag1 _ _ Hyz). rewrite -> graph_relation_spec in ag0, ag1. specialize (ag0 f). funelim (graph_compose G0 G1 f). now rewrite Heq in ag0. rewrite Heq0 in ag0. specialize (ag1 arg1). rewrite Heq in ag1. destruct weight, weight'; simpl; try lia. specialize (ag1 arg1). now rewrite Heq in ag1. Qed. Equations fin_union {A n} (f : fin n -> relation A) : relation A := fin_union (n:=0) f := fun _ _ => False; fin_union (n:=S n) f := fun x y => f fz x y \/ fin_union (fun f' => f (fs f')) x y. Lemma fin_union_spec {A n} (f : fin n -> relation A) : forall x y, fin_union f x y <-> exists k, f k x y. Proof. intros x y. funelim (fin_union f). split. intros []. intros [k _]. depelim k. split. intros [Hfz|Hfs]. now exists fz. specialize (H x y x y). rewrite -> H in Hfs. destruct Hfs. now exists (fs x0). intros [k Hk]. depelim k. now left. right. rewrite (H x y). now exists k. Qed. Equations fin_all k (p : fin k -> bool) : bool := fin_all 0 _ := true; fin_all (S k) p := p fz && fin_all k (fun f => p (fs f)). Lemma fin_all_spec k (p : fin k -> bool) : reflect (forall f, p f = true) (fin_all k p). Proof. induction k; simp fin_all. constructor. intros f; depelim f. destruct (p fz) eqn:pfz. simpl. specialize (IHk (fun f => p (fs f))). simpl in IHk. destruct IHk; constructor. intros f; depelim f; auto. intro Hf. apply n. intros f'. apply Hf. simpl. constructor. intros H. specialize (H fz). rewrite pfz in H. discriminate. Qed. Definition graph_eq {k} (g g' : graph k) : bool := fin_all k (fun f => eqb (g f) (g' f)). Equations power_graph_n {k} (n : nat) (g : graph k) : graph k := power_graph_n 0 g := g; power_graph_n (S n) g := power_graph_n n g ⋅ g. Lemma approximates_power {k} (n : nat) (g : graph k) T : approximates g T -> approximates (power_graph_n n g) (power n T). Proof. induction n; simp power power_graph_n; auto. intros. specialize (IHn H). now apply compose_approximates. Qed. Equations list_union {A} (rs : list (relation A)) : relation A := list_union nil := fun _ _ => False; list_union (cons r rs) := fun x y => r x y \/ list_union rs x y. Definition approximates_family {k} (graphs : list (graph k)) (R : relation (k_tuple_type k)) := inclusion _ R (list_union (List.map graph_relation graphs)). Equations fin_all_compose {k n} (g : graph k) (p : fin n -> graph k) : fin n -> graph k := fin_all_compose g p f := p f ⋅ g. Equations compose_family {k} (g : list (graph k)) (g' : list (graph k)) : list (graph k) := compose_family nil _ := nil; compose_family (cons g gs) g' := app (map (fun g' => g ⋅ g') g') (compose_family gs g'). Definition is_transitive_closure {k} (gs : list (graph k)) (l : list (graph k)) : Prop := (forall g, In g gs -> In g l) /\ forall g g', In g l /\ In g' l -> In (g ⋅ g') l. Definition graphs_relation {k} (gs : list (graph k)) : relation (k_tuple_type k) := list_union (map graph_relation gs). Lemma list_union_app {A} (rs rs' : list (relation A)) : forall x y, list_union (rs ++ rs') x y <-> list_union rs x y \/ list_union rs' x y. Proof. induction rs; intros; simpl; simp list_union; intuition. rewrite -> IHrs in H0. intuition. Qed. Equations map_k_tuple k (p : k_tuple_type k) (f : fin k -> nat) : k_tuple_type k := map_k_tuple 0 p f := p; map_k_tuple (S n) (x, xs) f := (f fz, map_k_tuple n xs (fun i => f (fs i))). Lemma graph_relation_compose {k} x y (g g' : graph k) z : graph_relation g x z -> graph_relation g' z y -> graph_relation (g ⋅ g') x y. Proof. intros gzx g'zy. rewrite -> graph_relation_spec in gzx, g'zy |- *. intros f. specialize (gzx f). simp graph_compose. destruct (g f) as [[[] d]|]; simpl. specialize (g'zy d); destruct (g' d) as [[[] d']|]; simp graph_compose; simpl; lia. specialize (g'zy d). destruct (g' d) as [[[] d']|]; simpl in *. lia. lia. lia. lia. Qed. Lemma union_graph_relation_compose {k} x y a (g : list (graph k)) z: graph_relation a x z -> list_union (map graph_relation g) z y -> list_union (map (fun x => graph_relation (a ⋅ x)) g) x y. Proof. induction g; simpl. 1:firstorder. intros Hxz Hzy. destruct Hzy. specialize (graph_relation_compose x y a a0). left; firstorder. firstorder. Qed. Lemma graphs_relation_compose {k} (g g' : list (graph k)) x y z : graphs_relation g x z -> graphs_relation g' z y -> graphs_relation (compose_family g g') x y. Proof. intros gxz gzy. induction g in g', x, y, z, gxz, gzy |- *; simp compose_family; auto. unfold graphs_relation in gxz. simpl in gxz. destruct gxz. unfold graphs_relation. unfold compose_family. rewrite -> map_app, map_map, list_union_app. left. apply (union_graph_relation_compose _ _ _ _ z H gzy). specialize (IHg g' x y z H gzy). unfold graphs_relation. rewrite -> map_app, map_map, list_union_app. right; auto. Qed. Lemma in_transitive_closure {k} (g : list (graph k)) (l : list (graph k)) (T : relation (k_tuple_type k)) (approx : approximates_family g T) : is_transitive_closure g l -> forall i, exists fam : list (graph k), (forall g, In g fam -> In g l) /\ approximates_family fam (power i T). Proof. unfold is_transitive_closure. intros. destruct H as [inS inScomp]. (* assert (exists G, approximates G (power i (fin_union T)) /\ In G l). *) induction i in |- *; simp power. unfold approximates. - exists g. intuition. - destruct IHi as [famgi [Ingi gixz]]. exists (compose_family famgi g). intuition. + revert H. clear -inS inScomp Ingi. induction famgi. simpl. intros []. simpl. rewrite in_app_iff in_map_iff. intros [[x [<- Inx]]| Ing]. apply inScomp. intuition auto. apply Ingi. constructor. auto. apply IHfamgi; auto. intros. apply Ingi; eauto with datatypes. + red. intros x y [z [powxz Tzy]]. do 2 red in approx, gixz. specialize (gixz _ _ powxz). specialize (approx _ _ Tzy). eapply graphs_relation_compose; intuition. Qed. Equations fin_list {n A} (f : fin n -> A) : list A := fin_list (n:=0) f := nil; fin_list (n:=S n) f := f fz :: fin_list (fun i => f (fs i)). Lemma size_change_wf {k} (n : nat) (T : fin n -> relation (k_tuple_type k)) (graphs : list (graph k)) (approx : approximates_family graphs (fin_union T)) (S : list (graph k)) (Strans : is_transitive_closure graphs S) (R : relation (k_tuple_type k)) (AF : AlmostFull R) : (forall G, In G S -> forall x y, graph_relation G x y /\ R y x -> False) -> well_founded (fin_union T). Proof. intros H. apply (af_wf _ R). intros x y [Txy Ryx]. rewrite <- clos_trans_t1n_iff in Txy. apply clos_trans_power in Txy as [k' Tkxy]. red in Strans. destruct (in_transitive_closure graphs S (fin_union T) approx Strans k') as [g' [Ings Hg]]. apply Hg in Tkxy. clear Hg. induction g'; simpl in Tkxy. elim Tkxy. destruct Tkxy. specialize (Ings a). forward Ings by constructor; auto. now apply (H a Ings x y). apply IHg'; auto. intros. apply Ings; intuition. Defined. Lemma size_change_wf_fam {k} (n : nat) (T : fin n -> relation (k_tuple_type k)) (graphs : fin n -> graph k) (approx : forall f, approximates (graphs f) (T f)) (S : list (graph k)) (Strans : is_transitive_closure (fin_list graphs) S) (R : relation (k_tuple_type k)) (AF : AlmostFull R) : (forall G, In G S -> forall x y, graph_relation G x y /\ R y x -> False) -> well_founded (fin_union T). Proof. intros H. apply (af_wf _ R). intros x y [Txy Ryx]. rewrite <- clos_trans_t1n_iff in Txy. apply clos_trans_power in Txy as [k' Tkxy]. red in Strans. assert(approximates_family (fin_list graphs) (fin_union T)). { clear -approx. induction n. simpl. red. auto. red. simpl. auto. red. intros x y Rxy. specialize (IHn (fun f => T (fs f)) (fun f => graphs (fs f)) (fun f => approx (fs f))). do 2 red in IHn. specialize (IHn x y). destruct Rxy. simp fin_list. simpl. left. now apply approx. intuition. simp fin_list. simpl. intuition. } destruct (in_transitive_closure (fin_list graphs) S (fin_union T) H0 Strans k') as [g' [Ings Hg]]. apply Hg in Tkxy. clear Hg. induction g'; simpl in Tkxy. elim Tkxy. destruct Tkxy. specialize (Ings a). forward Ings by constructor; auto. now apply (H a Ings x y). apply IHg'; auto. intros. apply Ings; intuition. Defined. Equations TI_graph k : graph k := TI_graph 0 := λ{ | ! } ; TI_graph (S n) := fun f => Some (false, f). Lemma TI_compose k (G : graph k) : forall f, (G ⋅ TI_graph k) f = G f. Proof. induction k. unfold TI_graph. do 2 red in G. intros f; depelim f. intros f. depelim f. simp TI_graph graph_compose. destruct (G fz) as [[weight d]|]; simpl; try easy. now rewrite orb_false_r. simp TI_graph graph_compose. destruct (G (fs f)) as [[weight d]|]; simpl; trivial. now rewrite orb_false_r. Qed. Definition TI k : relation (k_tuple_type k) := graph_relation (TI_graph k). Equations intersection k : relation (k_tuple_type k) := intersection 0 := fun x y => True; intersection (S n) := fun x y => Nat.le (fst x) (fst y) /\ intersection n (snd x) (snd y). Lemma TI_intersection_equiv k : relation_equivalence (TI k) (intersection k). induction k. - intros x y. red. split. intros []. exact I. intros. exact I. - intros [x rx] [y ry]. simpl. split. + unfold TI. intros Hg. pose proof Hg. rewrite -> graph_relation_spec in H. intros. pose (H fz). simpl in y0. intuition. assert (graph_relation (TI_graph k) rx ry). rewrite graph_relation_spec. intros. clear y0. specialize (H (fs f)). simpl in H. unfold TI_graph. destruct k. depelim f. auto. do 2 red in IHk. simpl in IHk. rewrite <- IHk. apply H0. + intros [Hle Hi]. unfold TI. rewrite graph_relation_spec. intros. depelim f. simpl. auto. simpl. do 2 red in IHk. simpl in IHk. rewrite <- IHk in Hi. red in Hi. rewrite -> graph_relation_spec in Hi. clear -Hi. induction k. depelim f. specialize (Hi f). simpl in Hi. auto. Qed. #[global] Instance TI_AlmostFull k : AlmostFull (TI k). Proof. rewrite TI_intersection_equiv. induction k. simpl. red. red. exists ZT. simpl. intros. exact I. simpl. apply af_interesection. apply (AlmostFull_MR Nat.le). apply almost_full_le. apply (AlmostFull_MR (intersection k)). apply IHk. Qed. Lemma TI_compose' k (G : graph k) : (G ⋅ TI_graph k) = G. Proof. extensionality f. apply TI_compose. Qed. Lemma sct_power_check {k} G (T : relation (k_tuple_type k)) : approximates G T -> (exists n f, power_graph_n n G f = Some (true, f)) -> (forall x y, T x y -> TI _ y x -> False). Proof. intros approx [n [f eqpow]] x y Txy TIyx. assert (compose_rel T (TI k) x x). exists y; easy. assert (power n (compose_rel T (TI k)) x x). { clear -H. induction n; simp power; auto. exists x. intuition. } pose (compose_approximates G (TI_graph k) T (TI k)). forward a; auto. forward a; auto. unfold TI. red. intuition. rewrite TI_compose' in a. apply (approximates_power n) in a. specialize (a x x). specialize (a H0). rewrite -> graph_relation_spec in a. specialize (a f). rewrite eqpow in a. lia. Qed. Theorem size_change_termination {k} (n : nat) (T : fin n -> relation (k_tuple_type k)) (graphs : fin n -> graph k) (approx : forall f, approximates (graphs f) (T f)) (S : list (graph k)) (Strans : is_transitive_closure (fin_list graphs) S) (haspow : forall G, In G S -> exists n f, power_graph_n n G f = Some (true, f)) : well_founded (fin_union T). Proof. apply size_change_wf_fam with graphs S (TI k); auto. - apply TI_AlmostFull. - intros. specialize (haspow G H). destruct Strans. refine (sct_power_check G (graph_relation G) _ haspow x y _ _). red. firstorder. intuition. intuition. Qed. Import ListNotations. Inductive trans_clos_answer (k : nat) : Set := | OutOfFuel | Finished (l : list (graph k)). Derive NoConfusion for trans_clos_answer. Section find_opt. Context {A : Type}. Equations find_opt (l : list A) (f : A -> option A) : option A := find_opt [] f := None; find_opt (x :: xs) f with f x := { | Some y => Some y; | None => find_opt xs f }. End find_opt. Lemma find_opt_spec {A} (l : list A) (f : A -> option A) : match find_opt l f with | Some x => exists a, In a l /\ f a = Some x | None => forall a, In a l -> f a = None end. Proof. funelim (find_opt l f); cbn; intros. elim H. exists x; simpl; intuition eauto. destruct (find_opt xs f); now firstorder subst. Qed. Equations compute_transitive_closure {k} (n : nat) (gs : list (graph k)) : trans_clos_answer k by struct n := compute_transitive_closure 0 _ := OutOfFuel _; compute_transitive_closure (S n) gs := aux gs [] where aux (l : list (graph k)) (acc : list (graph k)) : trans_clos_answer k by struct l := aux nil acc := Finished _ acc; aux (g :: gs') acc := with_new_candidate new_candidate where with_new_candidate : option (graph k) -> trans_clos_answer k := { | Some newg => compute_transitive_closure n (newg :: g :: gs' ++ acc); | None => aux gs' (g :: acc) } where new_candidate : option (graph k) := new_candidate := let gs'' := g :: acc in find_opt gs'' (fun g' => let gcomp := g' ⋅ g in (* if eqb g gcomp then None else *) if List.existsb (eqb gcomp) gs'' then let gcomp' := g ⋅ g' in if List.existsb (eqb gcomp') gs'' then None else Some gcomp' else Some gcomp). (* Hint Extern 10 => progress simpl : rec_decision. *) (* FIXME bug when using with *) (* Equations compute_transitive_closure {k} (n : nat) (gs : list (graph k)) : trans_clos_answer k *) (* by wf n lt := *) (* compute_transitive_closure 0 _ := OutOfFuel _; *) (* compute_transitive_closure (S n) gs := aux gs [] *) (* where aux (l : list (graph k)) (acc : list (graph k)) : trans_clos_answer k by struct l := *) (* aux nil acc := Finished _ acc; *) (* aux (g :: gs') acc := *) (* let gs'' := g :: gs' ++ acc in *) (* let new_candidate := *) (* find_opt gs'' (fun g' => *) (* let gcomp := g' ⋅ g in *) (* if eqb g gcomp then None else *) (* if List.existsb (eqb gcomp) gs'' then *) (* let gcomp' := g ⋅ g' in *) (* if List.existsb (eqb gcomp') gs'' then None *) (* else Some gcomp' *) (* else Some gcomp) *) (* in *) (* match new_candidate with *) (* | Some newg => compute_transitive_closure n (newg :: gs'') *) (* | None => aux gs' (g :: acc) *) (* end. *) Definition becoming_transitive_closure {k} (acc : list (graph k)) (l : list (graph k)) : Prop := (forall g, In g acc -> In g l) /\ forall g g', In g acc -> In g' acc -> In (g ⋅ g') l. Definition transitive_closure_of {k} (g : graph k) (l : list (graph k)) : Prop := forall g', In g' l -> In (g ⋅ g') l /\ In (g' ⋅ g) l. Definition incl_transitive_closure_of {k} (l : list (graph k)) (l' : list (graph k)) : Prop := forall trl, is_transitive_closure l trl -> incl l' trl. Definition new_candid k n gs g l acc := (compute_transitive_closure_clause_2_aux_clause_2_new_candidate (@compute_transitive_closure) k n gs (compute_transitive_closure_clause_2_aux (@compute_transitive_closure) k n gs) g l acc). Notation aux := compute_transitive_closure_clause_2_aux. Definition with_new_candid k n gs g l acc := (compute_transitive_closure_clause_2_aux_clause_2_with_new_candidate (@compute_transitive_closure) k n gs (aux (@compute_transitive_closure) k n gs) g l acc). Lemma is_transitive_closure_incl: ∀ (k : nat) (gs : list (graph k)) (g : graph k) (l acc l' : list (graph k)), is_transitive_closure gs l' → incl_transitive_closure_of gs ((g :: l) ++ acc) → incl_transitive_closure_of ((g :: l) ++ acc) l'. Proof. intros k gs g l acc l' H0 H3. red in H3. pose proof (H3 _ H0). red. intros. assert (is_transitive_closure (g :: l ++ acc) l'). red. intuition. red in H0. intuition. red in H1, H2 |- *. intuition. Admitted. Ltac apply_find_opt_spec := match goal with | |- context [find_opt ?g ?f] => pose proof (find_opt_spec g f) end. Lemma existsb_spec {A} (p : A -> bool) l : reflect (exists x, In x l /\ p x = true) (existsb p l). Proof. destruct existsb eqn:Heq. constructor. now apply existsb_exists in Heq. constructor. intro. apply existsb_exists in H. rewrite Heq in H; discriminate. Qed. Lemma eqb_refl {A} `{E:Eq A} (a : A) : eqb a a = true. Proof. destruct (eqb_spec a a); intuition. Qed. Lemma eqb_eq {A} `{E:Eq A} (a b : A) : eqb a b = true <-> a = b. Proof. destruct (eqb_spec a b); intuition. discriminate. Qed. Lemma eqb_neq {A} `{E:Eq A} (a b : A) : eqb a b = false <-> a <> b. Proof. destruct (eqb_spec a b); intuition. Qed. Lemma incl_transitive_closure: ∀ (k : nat) (l l' trl : list (graph k)), is_transitive_closure l' trl -> incl l l' -> incl_transitive_closure_of l l' → is_transitive_closure l trl. Proof. intros. red in H, H0, H1 |- *. intuition. Qed. Lemma incl_switch_head {A} (x : A) (y l r : list A) : incl (x :: y ++ l) r -> incl (y ++ x :: l) r. Proof. unfold incl in *. intuition auto. specialize (H a). apply H. simpl in *; rewrite -> in_app_iff in *. simpl in *. intuition auto. Qed. Lemma incl_switch_head' {A} (x : A) (y l r : list A) : incl r (x :: y ++ l) -> incl r (y ++ x :: l). Proof. unfold incl in *. intuition. specialize (H a H0). simpl in *; rewrite -> in_app_iff in *. simpl in *. intuition auto. Qed. Hint Resolve incl_switch_head' : core. Lemma tr_clos_incl {k} {l l' : list (graph k)} : is_transitive_closure l l' -> incl l l'. Proof. unfold is_transitive_closure. intuition. Qed. Hint Resolve tr_clos_incl : core. Lemma becoming_empty {k} {l: list (graph k)} : becoming_transitive_closure [] l. Proof. unfold becoming_transitive_closure. intuition. inversion H. inversion H. Qed. Hint Resolve becoming_empty : core. Lemma compute_transitive_closure_spec {k} n (gs : list (graph k)) l : compute_transitive_closure n gs = Finished _ l -> is_transitive_closure gs l. Proof. eapply (compute_transitive_closure_elim (fun k n gs r => forall l, r = Finished k l -> is_transitive_closure gs l) (fun k n gs l acc res => is_transitive_closure acc acc -> forall l', res = Finished k l' -> incl (l ++ acc) l' /\ (incl gs (l ++ acc) -> incl_transitive_closure_of gs (l ++ acc) -> becoming_transitive_closure acc l' -> incl (l ++ acc) l' /\ incl_transitive_closure_of (l ++ acc) l' /\ (* becoming_transitive_closure l l' *) is_transitive_closure gs l')) (fun k n gs g l acc newc => let gs' := (g :: acc) in match newc with | None => is_transitive_closure gs' gs' /\ transitive_closure_of g gs' | Some g' => ~ In g' gs' /\ exists a, In a gs' /\ (g' = (g ⋅ a) \/ g' = (a ⋅ g)) end) (fun k n gs g l acc newc res => forall l', res = Finished k l' -> incl (l ++ acc) l' /\ (match newc with | Some c => incl gs (c :: g :: l ++ acc) /\ incl_transitive_closure_of gs (c :: g :: l ++ acc) | None => incl gs (g :: l ++ acc) /\ incl_transitive_closure_of gs (g :: l ++ acc) /\ transitive_closure_of g (g :: acc) /\ becoming_transitive_closure (g :: acc) l' end -> is_transitive_closure gs l'))); clear. all:try discriminate; eauto. + intros * H l Haux. (* forward H. red. intuition. specialize (H l Haux). apply H. auto with datatypes. rewrite app_nil_r. red. intuition. intuition. + intros * n * tracc l' [= <-]. simpl. split. intuition. intros inclgs incltrgs becacc. intuition. red. intuition. red. intuition. + intros *. simpl. fold (new_candid k n gs g l acc). fold (with_new_candid k n gs g l acc). intros Hcand IH tracc l' Hnewc. split. admit. intros inclgs incltrgs becaccl'. specialize (IH _ Hnewc). destruct IH as [incll' IH]. split. admit. forward IH. ++ destruct new_candid. destruct Hcand as [ng [a [Ina Ha]]]. split. intuition. +++ intros trl Htrl. specialize (incltrgs trl Htrl). intros x Inx. destruct Inx. subst x. destruct Ha as [-> | ->]; apply Htrl; intuition. now apply incltrgs. +++ intuition. clear H2. red. split. intros x Inx. simpl in Inx. intuition. subst x. admit. admit. (* destruct Ing' as [<-|Inlacc]. red in becaccl'. *) ++ intuition. clear Hcand. eapply is_transitive_closure_incl; eauto. + intros. remember (g :: acc) as gs''. cbv zeta. apply_find_opt_spec. destruct find_opt. ++ destruct H as [a [Inags'' Ineq]]. subst gs'0. destruct (existsb_spec (eqb (a ⋅ g)) gs''). destruct (existsb_spec (eqb (g ⋅ a)) gs''). all:admit. *) (* discriminate. *) (* noconf Ineq. split. *) (* +++ intros Inga. apply n0. subst gs''. *) (* exists (g ⋅ a). intuition auto. subst. apply eqb_refl. *) (* +++ exists a. intuition. *) (* +++ noconf Ineq. split. *) (* intros Inag. apply n0. exists (a ⋅ g). intuition. apply eqb_refl. *) (* exists a. intuition. *) (* ++ split; intuition. subst gs'0. split; intros g' ?g''; auto. *) (* intros [ing' ing'']. *) (* specialize (H g' ing'). *) (* destruct (existsb_spec (eqb (g' ⋅ g)) gs''). *) (* destruct (existsb_spec (eqb (g ⋅ g')) gs''). *) (* destruct e, e0; destruct_pairs. apply (eqb_eq _ x) in H3. apply (eqb_eq _ x0) in H2. *) (* subst. admit. admit. admit. admit. *) (* + intros * IH * Hf. specialize (IH _ Hf). clear Hf. *) (* split. intros x Inx. destruct IH as [IH _]. intuition. *) (* intros [inclgs incltrgs]. *) (* intuition. eapply incl_transitive_closure; eauto. *) (* + intros * IH * Hf. split. admit. *) (* intros [inclgs [incltrgs [trg trg']]]. *) (* apply IH. admit. auto. *) (* red in trg. now eapply incl_switch_head' in inclgs. *) (* red in incltrgs |- *. intros trl Htrl. specialize (incltrgs _ Htrl). *) (* now apply incl_switch_head in incltrgs. apply trg'. *) Admitted. Definition gn_set : list (graph 2) := [ T_graph_l; T_graph_r ]. Transparent compute_transitive_closure. (* I so wish we could get the scope without everything *) Local Open Scope string_scope. Equations print_fin {k} (f : fin k) : String.string := print_fin fz := "0"; print_fin (fs fz) := "1"; print_fin (fs (fs fz)) := "2"; print_fin (fs (fs (fs fz))) := "3"; print_fin f := "> 3". Equations print_nat (n : nat) : string := print_nat 0 := "0"; print_nat 1 := "1"; print_nat 2 := "2"; print_nat 3 := "3"; print_nat 4 := "4"; print_nat x := "5". Equations print_node {k'} (f : nat) (data : option (bool * fin k')) : string := print_node f None := ""; print_node f (Some (weight, f')) := print_nat f ++ (if weight then "<" else "<=") ++ print_fin f'. Equations print_fin_fn {k k'} (f : fin k -> option (bool * fin k')) : string := print_fin_fn (k := 0) f := ""; print_fin_fn (k := S k) f := print_node (k' - S k) (f fz) ++ " , " ++ print_fin_fn (fun n => f (fs n)). Equations print_graph {k} (f : graph k) : string := print_graph (k := 0) f := "empty"; print_graph (k := S n) f := print_fin_fn f. Equations print_list {A} (f : A -> string) (l : list A) : string := print_list f nil := ""; print_list f (cons a b) := f a ++ " ; " ++ print_list f b. Eval compute in print_list print_graph gn_set. Definition eq_dec_graph {k} : forall x y : graph k, { x = y } + { x <> y }. Proof. intros x y. destruct (eqb x y) eqn:Heq. left. destruct (eqb_spec x y). auto. discriminate. right. destruct (eqb_spec x y). auto. discriminate. auto. Defined. Definition uniquize {k} (l : list (graph k)) := nodup (@eq_dec_graph k) l. Definition T_trans_clos := match compute_transitive_closure 10 gn_set with | Finished l => l | OutOfFuel => gn_set end. (* Eval compute in T_trans_clos. *) Eval compute in match compute_transitive_closure 10 gn_set with | Finished l => print_list print_graph (uniquize l) | OutOfFuel => "outoffuel"%string end. End SCT. Eval compute in match compute_transitive_closure 10 gn_set with | Finished l => print_list print_graph (uniquize l) | OutOfFuel => "outoffuel"%string end. Print Assumptions size_change_wf. Print Assumptions size_change_termination. Definition R := product_rel Nat.le Nat.le. Require Import Lia. Derive Signature for clos_trans_1n. Definition antisym {X} (R : X -> X -> Prop) := forall x y, R x y -> R y x -> False. Equations T_rel (f : fin 2) : relation (k_tuple_type 2) := T_rel fz := Tl; T_rel (fs fz) := Tr. Equations T_graphs (f : fin 2) : graph 2 := T_graphs fz := T_graph_l; T_graphs (fs fz) := T_graph_r. Definition gnlex : (nat * nat) -> nat. assert (sct:=size_change_termination 2 T_rel T_graphs). forward sct. intros f; depelim f. apply approximates_T_l. depelim f. apply approximates_T_r. depelim f. specialize (sct T_trans_clos). forward sct. apply (compute_transitive_closure_spec 10). reflexivity. forward sct. intros. simpl in H. intuition; subst G; solve [exists 1; (exists fz ; reflexivity) || (exists (fs fz) ; reflexivity) ]. set (rel :=(Wf.MR (fin_union T_rel) (fun x => (fst x, (snd x, tt))))). assert (WellFounded rel). apply Wf.measure_wf. apply sct. refine (Subterm.FixWf (WF:=H) (fun x => nat) _). refine (fun x => match x as w return ((forall y, rel y w -> nat) -> nat) with | (0, _) => fun _ => 1 | (_, 0) => fun _ => 1 | (S x, S y) => fun frec => frec (S y, y) _ + frec (S y, x) _ end). red. simpl. red. compute. simpl. intuition lia. red. simpl. compute. intuition lia. Defined. Require Import ExtrOcamlBasic. (* Extraction gnlex. Print Assumptions gnlex. *) (* Eval native_compute in gnlex (4, 3). *) Lemma gnlex_0_l y : gnlex (0, y) = 1. Admitted. Lemma gnlex_0_r x : gnlex (x, 0) = 1. Admitted. Lemma gnlex_S x y : gnlex (S x, S y) = gnlex (S y, y) + gnlex (S y, x). Admitted. Lemma gnlex_S_test x y : exists foo, gnlex (S x, S y) = foo. eexists. rewrite gnlex_S. destruct y. rewrite gnlex_0_r. destruct x. rewrite gnlex_0_r. reflexivity. rewrite gnlex_S. rewrite gnlex_0_r. destruct x. admit. rewrite gnlex_S. Admitted. Coq-Equations-1.3.1-8.20/examples/Basics.v000066400000000000000000000456101463127417400200210ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (** printing elimination %\coqdoctac{elimination}% *) (** printing noconf %\coqdoctac{noconf}% *) (** printing simp %\coqdoctac{simp}% *) (** printing by %\coqdockw{by}% *) (** printing rec %\coqdockw{rec}% *) (** printing Coq %\Coq{}% *) (** printing funelim %\coqdoctac{funelim}% *) (** printing Derive %\coqdockw{Derive}% *) (** printing Signature %\coqdocclass{Signature}% *) (** printing Subterm %\coqdocclass{Subterm}% *) (** printing NoConfusion %\coqdocclass{NoConfusion}% *) (** * Basic examples This file containts various examples demonstrating the features of Equations. If running this interactively you can ignore the printing and hide directives which are just used to instruct coqdoc. *) Require Import Program Bvector List Relations. From Equations Require Import Equations Signature. Require Import Utf8. Set Keyed Unification. (** Just pattern-matching *) Equations neg (b : bool) : bool := neg true := false ; neg false := true. (** A proof using the functional elimination principle derived for [neg]. *) Lemma neg_inv : forall b, neg (neg b) = b. Proof. intros b. funelim (neg b); auto. Qed. Module Obligations. (** One can use equations similarly to Program or the [refine] tactic, putting underscores [_] for subgoals to be filled separately using the tactic mode. *) Equations? f (n : nat) : nat := f 0 := 42 ; f (S m) with f m := { f (S m) IH := _ }. Proof. intros. exact IH. Defined. End Obligations. (** Structural recursion and use of the [with] feature to look at the result of a recursive call (here with a trivial pattern-matching. *) Import List. Equations app_with {A} (l l' : list A) : list A := app_with nil l := l ; app_with (cons a v) l with app_with v l => { | vl := cons a vl }. (** Structurally recursive function on natural numbers, with inspection of a recursive call result. We use [auto with arith] to discharge the obligations. *) Obligation Tactic := program_simpl ; try CoreTactics.solve_wf ; auto with arith. Equations equal (n m : nat) : { n = m } + { n <> m } := equal O O := in_left ; equal (S n) (S m) with equal n m => { equal (S n) (S ?(n)) (left eq_refl) := left eq_refl ; equal (S n) (S m) (right p) := in_right } ; equal x y := in_right. (** Pattern-matching on the indexed equality type. *) Equations eq_sym {A} (x y : A) (H : x = y) : y = x := eq_sym x _ eq_refl := eq_refl. Equations eq_trans {A} (x y z : A) (p : x = y) (q : y = z) : x = z := eq_trans x _ _ eq_refl eq_refl := eq_refl. Notation vector := Vector.t. Derive Signature for eq vector. Module KAxiom. (** By default we disallow the K axiom, but it can be set. *) (** In this case the following definition fails as [K] is not derivable on type [A]. *) Fail Equations K {A} (x : A) (P : x = x -> Type) (p : P eq_refl) (H : x = x) : P H := K x P p eq_refl := p. Set Equations With UIP. Axiom uip : forall A, UIP A. Local Existing Instance uip. Equations K_ax {A} (x : A) (P : x = x -> Type) (p : P eq_refl) (H : x = x) : P H := K_ax x P p eq_refl := p. (** The definition is however using an axiom equivalent to [K], so it cannot reduce on closed or open terms. *) End KAxiom. Module KDec. (** However, types enjoying a provable instance of the [K] principle are fine using the [With UIP] option. Note that the following definition does *not* reduce according to its single clause on open terms, it instead computes using the decidable equality proof on natural numbers. *) Set Equations With UIP. Fail Equations K {A} (x : A) (P : x = x -> Type) (p : P eq_refl) (H : x = x) : P H := K x P p eq_refl := p. Equations K (x : nat) (P : x = x -> Type) (p : P eq_refl) (H : x = x) : P H := K x P p eq_refl := p. Print Assumptions K. (* Closed under the global context *) End KDec. (** The [with] construct allows to pattern-match on an intermediary computation. The "|" syntax provides a shortcut to repeating the previous patterns. *) Section FilterDef. Context {A} (p : A -> bool). Equations filter (l : list A) : list A := filter nil := nil ; filter (cons a l) with p a => { | true := a :: filter l ; | false := filter l }. (** By default, equations makes definitions opaque after definition, to avoid spurious unfoldings, but this can be reverted on a case by case basis, or using the global [Set Equations Transparent] option. *) Global Transparent filter. End FilterDef. (** We define inclusion of a list in another one, to specify the behavior of [filter] *) Inductive incl {A} : relation (list A) := stop : incl nil nil | keep {x : A} {xs ys : list A} : incl xs ys -> incl (x :: xs) (x :: ys) | skip {x : A} {xs ys : list A} : incl xs ys -> incl (xs) (x :: ys). (** Using [with] again, we can produce a proof that the filtered list is a sublist of the original list. *) Equations sublist {A} (p : A -> bool) (xs : list A) : incl (filter p xs) xs := sublist p nil := stop ; sublist p (cons x xs) with p x := { | true := keep (sublist p xs) ; | false := skip (sublist p xs) }. (** Well-founded definitions: *) Require Import Arith Wf_nat. (** One can declare new well-founded relations using instances of the [WellFounded] typeclass. *) #[local] Instance wf_nat : WellFounded lt := lt_wf. #[local] Hint Resolve Nat.lt_succ_diag_r : lt. (** The [by wf n lt] annotation indicates the kind of well-founded recursion we want. *) Equations testn (n : nat) : nat by wf n lt := testn 0 := 0 ; testn (S n) with testn n => { | 0 := S 0 ; | (S n') := S n' }. (** Notations for vectors *) Equations Derive NoConfusion NoConfusionHom for vector. Arguments Vector.nil {A}. Arguments Vector.cons {A} _ {n}. Declare Scope vect_scope. Notation " x |:| y " := (@Vector.cons _ x _ y) (at level 20, right associativity) : vect_scope. Notation " x |: n :| y " := (@Vector.cons _ x n y) (at level 20, right associativity) : vect_scope. Notation "[]v" := Vector.nil (at level 0) : vect_scope. Local Open Scope vect_scope. (** We can define functions by structural recursion on indexed datatypes like vectors. *) Equations vapp {A} {n m} (v : vector A n) (w : vector A m) : vector A (n + m) := vapp []v w := w ; vapp (Vector.cons a v) w := a |:| vapp v w. (** We can also support well-founded recursion on indexed datatypes. *) (** We show that decidable equality of the elements type implied decidable equality of vectors. *) #[local] Instance vector_eqdec {A n} `(EqDec A) : EqDec (vector A n). Proof. intros. intros x. induction x. left. now depelim y. intro y; depelim y. destruct (eq_dec h h0); subst. destruct (IHx y). subst. left; reflexivity. right. intro. apply n0. noconf H0. constructor. right. intro. apply n0. noconf H0. constructor. Defined. Print Assumptions vector_eqdec. (** We automatically derive the signature and subterm relation for vectors and prove it's well-foundedness. The signature provides a [signature_pack] function to pack a vector with its index. The well-founded relation is defined on the packed vector type. *) Derive Subterm for vector. (** The relation is actually called [t_subterm] as [vector] is just a notation for [Vector.t]. *) Section foo. Context {A B : Type}. (** We can use the packed relation to do well-founded recursion on the vector. Note that we do a recursive call on a substerm of type [vector A n] which must be shown smaller than a [vector A (S n)]. They are actually compared at the packed type [{ n : nat & vector A n}]. *) Equations unzip {n} (v : vector (A * B) n) : vector A n * vector B n by wf (signature_pack v) (@t_subterm (A * B)) := unzip []v := ([]v, []v) ; unzip (Vector.cons (x, y) v) with unzip v := { | pair xs ys := (Vector.cons x xs, Vector.cons y ys) }. End foo. (** Playing with lists and functional induction, we define a tail-recursive version of [rev] and show its equivalence with the "naïve" [rev]. *) Equations app {A} (l l' : list A) : list A := app nil l := l; app (cons a v) l := cons a (app v l). Infix "++" := app (right associativity, at level 60) : list_scope. Equations rev_acc {A} (l : list A) (acc : list A) : list A := rev_acc nil acc := acc; rev_acc (cons a v) acc := rev_acc v (a :: acc). Equations rev {A} (l : list A) : list A := rev nil := nil; rev (cons a v) := rev v ++ (cons a nil). Notation " [] " := List.nil. Lemma app_nil : forall {A} (l : list A), l ++ [] = l. Proof. intros. funelim (app l []); simpl. reflexivity. now rewrite H. Qed. Lemma app_assoc : forall {A} (l l' l'' : list A), (l ++ l') ++ l'' = l ++ (l' ++ l''). Proof. intros. revert l''. funelim (l ++ l'); intros; simp app; trivial. now rewrite H. Qed. Lemma rev_rev_acc : forall {A} (l : list A), rev_acc l [] = rev l. Proof. intros. replace (rev l) with (rev l ++ []) by apply app_nil. generalize (@nil A). funelim (rev l). reflexivity. intros l'. simp rev_acc; trivial. rewrite H. rewrite app_assoc. reflexivity. Qed. #[local] Hint Rewrite @rev_rev_acc : rev_acc. #[local] Hint Rewrite @app_nil @app_assoc : app. Lemma rev_app : forall {A} (l l' : list A), rev (l ++ l') = rev l' ++ rev l. Proof. intros. funelim (l ++ l'); simp rev app; trivial. now (rewrite H, <- app_assoc). Qed. Equations zip' {A} (f : A -> A -> A) (l l' : list A) : list A := zip' f nil nil := nil ; zip' f (cons a v) (cons b w) := cons (f a b) (zip' f v w) ; zip' f x y := nil. Equations zip'' {A} (f : A -> A -> A) (l l' : list A) (def : list A) : list A := zip'' f nil nil def := nil ; zip'' f (cons a v) (cons b w) def := cons (f a b) (zip'' f v w def) ; zip'' f nil (cons b w) def := def ; zip'' f (cons a v) nil def := def. Import Vector. (** Vectors *) Equations vector_append_one {A n} (v : vector A n) (a : A) : vector A (S n) := vector_append_one nil a := cons a nil; vector_append_one (cons a' v) a := cons a' (vector_append_one v a). Equations vrev {A n} (v : vector A n) : vector A n := vrev nil := nil; vrev (cons a v) := vector_append_one (vrev v) a. Definition cast_vector {A n m} (v : vector A n) (H : n = m) : vector A m. intros; subst; assumption. Defined. Equations vrev_acc {A n m} (v : vector A n) (w : vector A m) : vector A (n + m) := vrev_acc nil w := w; vrev_acc (cons a v) w := cast_vector (vrev_acc v (cons a w)) _. (* About vapp'. *) Record vect {A} := mkVect { vect_len : nat; vect_vector : vector A vect_len }. Coercion mkVect : vector >-> vect. Derive NoConfusion for vect. (** Splitting a vector into two parts. *) Inductive Split {X : Type}{m n : nat} : vector X (m + n) -> Type := append : ∀ (xs : vector X m)(ys : vector X n), Split (vapp xs ys). Arguments Split [ X ]. (** We split by well-founded recursion on the index [m] here. *) Equations split {X : Type} {m n} (xs : vector X (m + n)) : Split m n xs by wf m := split (m:=O) xs := append nil xs ; split (m:=S m) (cons x xs) with split xs => { | append xs' ys' := append (cons x xs') ys' }. (** The [split] and [vapp] functions are inverses. *) Lemma split_vapp : ∀ (X : Type) m n (v : vector X m) (w : vector X n), let 'append v' w' := split (vapp v w) in v = v' /\ w = w'. Proof. intros. funelim (vapp v w). destruct split. depelim xs; intuition. simp split in *. destruct split. simpl. intuition congruence. Qed. (* Eval compute in @zip''. *) Require Import Bvector. (** This function can also be defined by structural recursion on [m]. *) Equations split_struct {X : Type} {m n} (xs : vector X (m + n)) : Split m n xs := split_struct (m:=0) xs := append nil xs ; split_struct (m:=(S m)) (cons x xs) with split_struct xs => { split_struct (m:=(S m)) (cons x xs) (append xs' ys') := append (cons x xs') ys' }. Lemma split_struct_vapp : ∀ (X : Type) m n (v : vector X m) (w : vector X n), let 'append v' w' := split_struct (vapp v w) in v = v' /\ w = w'. Proof. intros. funelim (vapp v w); simp split_struct in *. destruct split_struct. depelim xs; intuition. destruct (split_struct (vapp v _)); simpl. intuition congruence. Qed. (** Taking the head of a non-empty vector. *) Equations vhead {A n} (v : vector A (S n)) : A := vhead (cons a v) := a. (** Mapping over a vector. *) Equations vmap' {A B} (f : A -> B) {n} (v : vector A n) : vector B n := vmap' f nil := nil ; vmap' f (cons a v) := cons (f a) (vmap' f v). #[local] Hint Resolve Nat.lt_succ_diag_r : subterm_relation. Transparent vmap'. (** The same, using well-founded recursion on [n]. *) Equations vmap {A B} (f : A -> B) {n} (v : vector A n) : vector B n by wf n := vmap f (n:=?(O)) nil := nil ; vmap f (cons a v) := cons (f a) (vmap f v). Transparent vmap. Eval compute in (vmap' id (@nil nat)). Eval compute in (vmap' id (@cons nat 2 _ nil)). (** The image of a function. *) Section Image. Context {S T : Type}. Variable f : S -> T. Inductive Imf : T -> Type := imf (s : S) : Imf (f s). (** Here [(f s)] is innaccessible. *) Equations inv (t : T) (im : Imf t) : S := inv ?(f s) (imf s) := s. End Image. (** Working with a universe of types with an interpretation function. *) Section Univ. Inductive univ : Set := | ubool | unat | uarrow (from:univ) (to:univ). Equations interp (u : univ) : Set := interp ubool := bool; interp unat := nat; interp (uarrow from to) := interp from -> interp to. Transparent interp. Definition interp' := Eval compute in @interp. Equations foo (u : univ) (el : interp' u) : interp' u := foo ubool true := false ; foo ubool false := true ; foo unat t := t ; foo (uarrow from to) f := id ∘ f. Transparent foo. (* Eval lazy beta delta [ foo foo_obligation_1 foo_obligation_2 ] iota zeta in foo. *) End Univ. Equations vlast {A} {n} (v : vector A (S n)) : A by struct v := vlast (@cons a O _) := a ; vlast (@cons a (S n) v) := vlast v. Transparent vlast. (** The parity predicate embeds a divisor of n or n-1 *) Inductive Parity : nat -> Set := | even : forall n, Parity (mult 2 n) | odd : forall n, Parity (S (mult 2 n)). (* Eval compute in (fun n => mult 2 (S n)). *) Definition cast {A B : Type} (a : A) (p : A = B) : B. intros. subst. exact a. Defined. Equations parity (n : nat) : Parity n := parity O := even 0 ; parity (S n) with parity n => { parity (S ?(mult 2 k)) (even k) := odd k ; parity (S ?(S (mult 2 k))) (odd k) := cast (even (S k)) _ }. (** We can halve a natural looking at its parity and using the lower truncation. *) Equations half (n : nat) : nat := half n with parity n => { half ?(S (mult 2 k)) (odd k) := k ; half ?(mult 2 k) (even k) := k }. Equations vtail {A n} (v : vector A (S n)) : vector A n := vtail (cons a v') := v'. Equations diag {A n} (v : vector (vector A n) n) : vector A n := diag (n:=O) nil := nil ; diag (n:=S ?(n)) (cons (@cons a n v) v') := cons a (diag (vmap vtail v')). Transparent diag. Definition mat A n m := vector (vector A m) n. Equations vmake {A} (n : nat) (a : A) : vector A n := vmake O a := nil ; vmake (S n) a := cons a (vmake n a). Equations vfold_right {A : nat -> Type} {B} (f : ∀ n, B -> A n -> A (S n)) (e : A 0) {n} (v : vector B n) : A n := vfold_right f e nil := e ; vfold_right f e (@cons a n v) := f n a (vfold_right f e v). Equations vzip {A B C n} (f : A -> B -> C) (v : vector A n) (w : vector B n) : vector C n := vzip f nil _ := nil ; vzip f (cons a v) (cons a' v') := cons (f a a') (vzip f v v'). Definition transpose {A m n} : mat A m n -> mat A n m := vfold_right (A:=λ m, mat A n m) (λ m', vzip (λ a, cons a)) (vmake n nil). Require Import Examples.Fin. Generalizable All Variables. Opaque vmap. Opaque vtail. Opaque nth. Lemma nth_vmap `(v : vector A n) `(fn : A -> B) (f : fin n) : nth (vmap fn v) f = fn (nth v f). Proof. revert B fn. funelim (nth v f); intros; now simp nth vmap. Qed. Lemma nth_vtail `(v : vector A (S n)) (f : fin n) : nth (vtail v) f = nth v (fs f). Proof. funelim (vtail v); intros; now simp nth. Qed. #[local] Hint Rewrite @nth_vmap @nth_vtail : nth. Lemma diag_nth `(v : vector (vector A n) n) (f : fin n) : nth (diag v) f = nth (nth v f) f. Proof. revert f. funelim (diag v); intros f. depelim f. depelim f; simp nth; trivial. rewrite H. now simp nth. Qed. Equations assoc (x y z : nat) : x + y + z = x + (y + z) := assoc 0 y z := eq_refl; assoc (S x) y z with assoc x y z, x + (y + z) => { assoc (S x) y z eq_refl _ := eq_refl }. Section well_founded_recursion_and_auxiliary_function. (** When recursive calls are made on results pattern-matching the output of auxiliary functions, you need enough information to prove that the argument of recursive calls are smaller. This is usually granted by the specification of the auxiliary function (see function pivot in the quicksort example). When the type of the recursive function is not informative enough, we can use an inspect pattern as illustrated in the following example. *) Context {A : Type} (f : A -> option A) {lt : A -> A -> Prop} `{WellFounded A lt}. Hypothesis decr_f : forall n p, f n = Some p -> lt p n. (** The [inspect] definition is used to pack a value with a proof of an equality to itself. When pattern matching on the first component in this existential type, we keep information about the origin of the pattern available in the second component, the equality. *) Definition inspect {A} (a : A) : {b | a = b} := exist _ a eq_refl. Notation "x 'eqn:' p" := (exist _ x p) (only parsing, at level 20). (** If one uses [f n] instead of [inspect (f n)] in the following definition, patterns should be patterns for the option type, but then there is an unprovable obligation that is generated as we don't keep information about the call to [f n] being equal to [Some p] to justify the recursive call to [f_sequence]. *) Equations f_sequence (n : A) : list A by wf n lt := f_sequence n with inspect (f n) := { | Some p eqn: eq1 => p :: f_sequence p; | None eqn:_ => List.nil }. (** The following is an illustration of a theorem on f_sequence. *) Lemma in_seq_image (n p : A) : List.In p (f_sequence n) -> exists k, f k = Some p. Proof. funelim (f_sequence n);[ | now intros abs; elim abs]. now simpl; intros [p_is_a | p_in_seq];[rewrite <- p_is_a; exists n | auto]. Qed. End well_founded_recursion_and_auxiliary_function. Module IdElim. Import Sigma_Notations. Set Equations Transparent. Equations transport {A : Type} (P : A -> Type) {x y : A} (p : x = y) (u : P x) : P y := transport P eq_refl u := u. Notation "p # x" := (transport _ p x) (right associativity, at level 65, only parsing). Equations path_sigma {A : Type} {P : A -> Type} (u v : sigma P) (p : u.1 = v.1) (q : p # u.2 = v.2) : u = v := path_sigma (_ , _) (_ , _) eq_refl eq_refl := eq_refl. Example foo := path_sigma_elim. End IdElim. Coq-Equations-1.3.1-8.20/examples/Fin.v000066400000000000000000000077121463127417400173320ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (** An example development of the [fin] datatype using [equations]. *) Require Import Program.Basics Program.Combinators. From Equations Require Import Equations. Open Scope equations_scope. (** [fin n] is the type of naturals smaller than [n]. *) Inductive fin : nat -> Set := | fz : forall {n}, fin (S n) | fs : forall {n}, fin n -> fin (S n). Derive Signature for fin. (** NoConfusion For [fin]. *) Derive NoConfusion NoConfusionHom for fin. (** We can inject it into [nat]. *) Equations fog {n} (f : fin n) : nat := fog (n:=?(S n)) (@fz n) := 0 ; fog (fs f) := S (fog f). (** The injection preserves the number: *) Lemma fog_inj {n} (f : fin n) : fog f < n. Proof with auto with arith. intros. depind f; simp fog... Qed. (** Of course it has an inverse. *) Equations gof n : fin (S n) := gof O := fz ; gof (S n) := fs (gof n). Lemma fog_gof n : fog (gof n) = n. Proof with auto with arith. intros. funelim (gof n)... simp fog; congruence. Qed. Equations fin_inj_one {n} (f : fin n) : fin (S n) := fin_inj_one fz := fz; fin_inj_one (fs f) := fs (fin_inj_one f). Inductive le : nat -> nat -> Type := | le_O n : 0 <= n | le_S {n m} : n <= m -> S n <= S m where "n <= m" := (le n m). Derive Signature for le. Equations le_S_inv {n m} (p : S n <= S m) : n <= m := le_S_inv (le_S p) := p. Equations fin_inj {n} {m} (f : fin n) (k : n <= m) : fin m := fin_inj fz (le_S p) := fz; fin_inj (fs f) (le_S p) := fs (fin_inj f p). (** Let's do some arithmetic on [fin] *) (* Equations fin_plus {n m} (x : fin n) (y : fin m) : fin (n + m) := *) (* fin_plus (fz n) f := fin_inj f _ ; *) (* fin_plus (fs n x) y := fs (fin_plus x y). *) (* Next Obligation. destruct n; try constructor. *) (** Won't pass the guardness check which diverges anyway. *) Inductive finle : forall (n : nat) (x : fin n) (y : fin n), Prop := | leqz : forall {n j}, finle (S n) fz j | leqs : forall {n i j}, finle n i j -> finle (S n) (fs i) (fs j). Scheme finle_ind_dep := Induction for finle Sort Prop. #[export] Instance finle_ind_pack n x y : DepElim.DependentEliminationPackage (finle n x y) := { elim_type := _ ; elim := finle_ind_dep }. Arguments finle {n}. Require Vectors.Vector. Arguments Vector.nil {A}. Arguments Vector.cons {A} _ {n}. Notation vnil := Vector.nil. Notation vcons := Vector.cons. Equations nth {A} {n} (v : Vector.t A n) (f : fin n) : A := nth (vcons a v) fz := a ; nth (vcons a v) (fs f) := nth v f. Equations tabulate {A} {n} (f : fin n -> A) : Vector.t A n by struct n := tabulate (n:=O) f := vnil ; tabulate (n:=(S n)) f := vcons (f fz) (tabulate (f ∘ fs)). (** [Below] recursor for [fin]. *) Equations Below_fin (P : forall n, fin n -> Type) {n} (v : fin n) : Type := Below_fin P fz := unit ; Below_fin P (fs f) := (P _ f * Below_fin P f)%type. #[export] Hint Rewrite Below_fin_equation_2 (* Below_fin_equation_3 *) : rec_decision. Equations(noeqns noind) below_fin (P : forall n, fin n -> Type) (step : forall n (v : fin n), Below_fin P v -> P n v) {n} (v : fin n) : Below_fin P v := below_fin P step fz := tt ; below_fin P step (@fs n f) := let bf := below_fin P step f in (step n f bf, bf). Global Opaque Below_fin. Definition rec_fin (P : forall n, fin n -> Type) {n} v (step : forall n (v : fin n), Below_fin P v -> P n v) : P n v := step n v (below_fin P step v). (* Import Equations.Below. *) (* Instance fin_Recursor n : Recursor (fin n) := *) (* { rec_type := fun v => forall (P : forall n, fin n -> Type) step, P n v; *) (* rec := fun v P step => rec_fin P v step }. *) Coq-Equations-1.3.1-8.20/examples/HoTT_light.v000066400000000000000000000500141463127417400206140ustar00rootroot00000000000000 (* begin hide *) (**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (* end hide *) (** * HoTT-light ** A lightweight version of the Homotopy Type Theory library prelude. *) Set Warnings "-notation-overridden". Require Export Unicode.Utf8. Require Import Coq.Program.Tactics Setoid. Require Import Relations. (** Switches to constants in Type *) Require Import Equations.Type.All. (** This imports the polymorphic identity and sigma types in Type and their associated notations. *) Import Id_Notations. Import Sigma_Notations. Local Open Scope equations_scope. Set Warnings "-deprecated-option". Set Universe Polymorphism. Set Primitive Projections. (** We want our definitions to stay transparent. *) Set Equations Transparent. Set Implicit Arguments. (** Redefine a polymorphic identity function *) Definition id {A : Type} (a : A) : A := a. Require Import CRelationClasses CMorphisms. #[local] Instance id_reflexive A : Reflexive (@Id A). Proof. exact (@id_refl A). Defined. #[local] Instance eq_symmetric A : Symmetric (@Id A). Proof. exact (@id_sym A). Defined. #[local] Instance eq_transitive A : Transitive (@Id A). Proof. exact (@id_trans A). Defined. (** Non-dependent cartesian products are just sigma types. *) Equations fst {A B} (p : A * B) : A := fst (a, b) := a. Equations snd {A B} (p : A * B) : B := snd (a, b) := b. Definition Sect {A B : Type} (s : A -> B) (r : B -> A) := forall x : A, r (s x) = x. Equations ap {A B : Type} (f : A -> B) {x y : A} (p : x = y) : f x = f y := ap f id_refl := id_refl. (** We define [transport] with the arguments in the order we like. *) Equations transport {A : Type} (P : A -> Type) {x y : A} (p : x = y) (u : P x) : P y := transport P id_refl u := u. Notation "p # x" := (transport _ p x) (right associativity, at level 65, only parsing). (** We use [1] to refer to the reflexivity proof. *) Notation "1" := id_refl : equations_scope. (** Notation for the inverse *) Reserved Notation "p ^" (at level 3, format "p '^'"). Notation "p ^" := (id_sym p%equations) : equations_scope. Equations apd {A} {B : A -> Type} (f : forall x : A, B x) {x y : A} (p : x = y) : p # f x = f y := apd f 1 := 1. (** *** Equivalence *) (** A typeclass that includes the data making [f] into an adjoint equivalence*) Class IsEquiv {A B : Type} (f : A -> B) := BuildIsEquiv { equiv_inv : B -> A ; eisretr : Sect equiv_inv f; eissect : Sect f equiv_inv; eisadj : forall x : A, eisretr (f x) = ap f (eissect x) }. Arguments eisretr {A B}%type_scope f%function_scope {_} _. Arguments eissect {A B}%type_scope f%function_scope {_} _. Arguments eisadj {A B}%type_scope f%function_scope {_} _. Arguments IsEquiv {A B}%type_scope f%function_scope. (** A record that includes all the data of an adjoint equivalence. *) Record Equiv A B := BuildEquiv { equiv_fun : A -> B ; equiv_isequiv : IsEquiv equiv_fun }. Coercion equiv_fun : Equiv >-> Funclass. Global Existing Instance equiv_isequiv. Arguments equiv_fun {A B} _ _. Arguments equiv_isequiv {A B} _. Bind Scope equiv_scope with Equiv. Reserved Infix "<~>" (at level 85). Notation "A <~> B" := (Equiv A B) (at level 85) : type_scope. Notation "f ^^-1" := (@equiv_inv _ _ f _) (at level 3). (** *** Functional extensionality *) Definition pointwise_paths {A} {P:A->Type} (f g:forall x:A, P x) := forall x:A, f x = g x. #[export] Hint Unfold pointwise_paths : typeclass_instances. Notation "f == g" := (pointwise_paths f g) (at level 70, no associativity) : type_scope. (** This definition has slightly changed: the match on the Id is external to the function. *) Equations apD10 {A} {B : A -> Type} {f g : forall x, B x} (h : f = g) : f == g := apD10 1 := fun h => 1. Class Funext := { isequiv_apD10 :: forall (A : Type) (P : A -> Type) f g, IsEquiv (@apD10 A P f g) }. Axiom funext : Funext. #[local] Existing Instance funext. Definition path_forall `{Funext} {A : Type} {P : A -> Type} (f g : forall x : A, P x) : f == g -> f = g := (@apD10 A P f g)^^-1. (** *** Path spaces in sigma types and product types *) Equations path_sigma {A : Type} (P : A -> Type) (u v : sigma P) (p : u.1 = v.1) (q : p # u.2 = v.2) : u = v := path_sigma (_, _) (_, _) 1 1 := 1. Equations path_prod_uncurried {A B : Type} (z z' : A * B) (pq : (z.1 = z'.1) * (z.2 = z'.2)): z = z' := path_prod_uncurried (_, _) (_, _) (1, 1) := 1. Definition path_prod {A B : Type} (z z' : A * B) (e : z.1 = z'.1) (f : z.2 = z'.2) : z = z' := path_prod_uncurried _ _ (e, f). Equations path_prod_eq {A B : Type} (z z' : A * B) (e : z.1 = z'.1) (f : z.2 = z'.2) : z = z' := path_prod_eq (_, _) (_, _) 1 1 := 1. Equations eta_path_prod {A B : Type} {z z' : A * B} (p : z = z') : path_prod _ _ (ap pr1 p) (ap (fun x : A * B => pr2 x) p) = p := eta_path_prod 1 := 1. Definition path_prod' {A B : Type} {x x' : A} {y y' : B} : (x = x') -> (y = y') -> ((x,y) = (x',y')) := fun p q => path_prod (x, y) (x', y') p q. Equations ap_fst_path_prod {A B : Type} {z z' : A * B} (p : z.1 = z'.1) (q : z.2 = z'.2) : ap fst (path_prod _ _ p q) = p := ap_fst_path_prod (z:=(_, _)) (z':=(_, _)) 1 1 := 1. Equations ap_snd_path_prod {A B : Type} {z z' : A * B} (p : z.1 = z'.1) (q : z.2 = z'.2) : ap snd (path_prod _ _ p q) = q := ap_snd_path_prod (z:=(_, _)) (z':=(_, _)) 1 1 := 1. #[local] Instance isequiv_path_prod {A B : Type} {z z' : A * B} : IsEquiv (path_prod_uncurried z z'). Proof. unshelve refine (BuildIsEquiv _ _ _). - exact (fun r => (ap fst r, ap snd r)). - intro. apply eta_path_prod. - intros [p q]. exact (path_prod' (ap_fst_path_prod p q) (ap_snd_path_prod p q)). - destruct z as [x y], z' as [x' y']. intros [p q]; simpl in p, q. destruct p, q; apply 1. Defined. Equations path_sigma_uncurried {A : Type} {P : A -> Type} (u v : sigma P) (pq : Σ p, p # u.2 = v.2) : u = v := path_sigma_uncurried (u1, u2) (_, _) (1, 1) := 1. Definition pr1_path {A} {P : A -> Type} {u v : sigma P} (p : u = v) : u.1 = v.1 := ap (@pr1 _ _) p. Notation "p ..1" := (pr1_path p) (at level 3). Definition pr2_path {A} `{P : A -> Type} {u v : sigma P} (p : u = v) : p..1 # u.2 = v.2. destruct p. apply 1. Defined. Notation "p ..2" := (pr2_path p) (at level 3). Definition eta_path_sigma_uncurried {A} `{P : A -> Type} {u v : sigma P} (p : u = v) : path_sigma_uncurried _ _ (p..1, p..2) = p. destruct p. apply 1. Defined. Definition eta_path_sigma A `{P : A -> Type} {u v : sigma P} (p : u = v) : path_sigma _ _ (p..1) (p..2) = p := eta_path_sigma_uncurried p. Definition path_sigma_equiv {A : Type} (P : A -> Type) (u v : sigma P): IsEquiv (path_sigma_uncurried u v). unshelve refine (BuildIsEquiv _ _ _). - exact (fun r => (r..1, r..2)). - intro. apply eta_path_sigma_uncurried. - destruct u, v; intros [p q]; simpl in *. destruct p. simpl in *. destruct q. reflexivity. - destruct u, v; intros [p q]; simpl in *; destruct p. simpl in *. destruct q; simpl in *. apply 1. Defined. (** *** Groupoid laws for equality *) Equations concat {A} {x y z : A} (e : x = y) (e' : y = z) : x = z := concat 1 q := q. Notation "p @ q" := (concat p q) (at level 20). Definition concat2 {A} {x y z : A} {p p' : x = y} {q q' : y = z} (h : p = p') (h' : q = q') : p @ q = p' @ q' := match h, h' with 1, 1 => 1 end. Reserved Notation "p @@ q" (at level 20). Notation "p @@ q" := (concat2 p q)%equations : equations_scope. Definition moveR_E A B (f:A -> B) {H : IsEquiv f} (x : A) (y : B) (p : x = f^^-1 y) : (f x = y) := ap f p @ (@eisretr _ _ f _ y). (** One can use the shortcut notation [| p] to give patterns for the explicit arguments without repeating the function name. *) Equations concat_1p {A : Type} {x y : A} (p : x = y) : 1 @ p = p := | 1 := 1. Equations concat_p1 {A : Type} {x y : A} (p : x = y) : p @ 1 = p := | 1 := 1. Equations concat_Vp {A : Type} {x y : A} (p : x = y) : p^ @ p = 1 := | 1 := 1. Equations concat_pV {A : Type} {x y : A} (p : x = y) : p @ p^ = 1 := | 1 := 1. Equations concat_p_pp {A : Type} {x y z t : A} (p : x = y) (q : y = z) (r : z = t) : p @ (q @ r) = (p @ q) @ r := concat_p_pp 1 _ _ := 1. #[export] Hint Rewrite @concat_p1 @concat_Vp @concat_pV : concat. #[local] Instance Id_equiv A : Equivalence (@Id A) := {}. #[local] Instance concat_morphism (A : Type) x y z : Proper (Id ==> Id ==> Id) (@concat A x y z). Proof. reduce. destruct X. destruct X0. destruct x0. reflexivity. Defined. Definition trans_co_eq_inv_arrow_morphism@{i j k} : ∀ (A : Type@{i}) (R : crelation@{i j} A), Transitive R → Proper@{k j} (respectful@{i j k j k j} R (respectful@{i j k j k j} Id (@flip@{k k k} _ _ Type@{j} arrow))) R. Proof. reduce. transitivity y. assumption. now destruct X1. Defined. #[local] Existing Instance trans_co_eq_inv_arrow_morphism. Equations concat_pp_A1 {A : Type} {g : A -> A} (p : forall x, x = g x) {x y : A} (q : x = y) {w : A} (r : w = x) : (r @ p x) @ ap g q = (r @ q) @ p y := concat_pp_A1 _ 1 1 := concat_p1 _. Equations whiskerL {A : Type} {x y z : A} (p : x = y) {q r : y = z} (h : q = r) : p @ q = p @ r := whiskerL _ 1 := 1. Equations whiskerR {A : Type} {x y z : A} {p q : x = y} (h : p = q) (r : y = z) : p @ r = q @ r := whiskerR 1 _ := 1. Equations moveL_M1 {A : Type} {x y : A} (p q : x = y) : id_sym q @ p = 1 -> p = q := moveL_M1 _ 1 := fun e => e. Definition inverse2 {A : Type} {x y : A} {p q : x = y} (h : p = q) : id_sym p = id_sym q := ap (@id_sym _ _ _) h. Equations ap02 {A B : Type} (f:A->B) {x y:A} {p q:x=y} (r:p=q) : ap f p = ap f q := ap02 f 1 := 1. Equations ap_p_pp {A B : Type} (f : A -> B) {w : B} {x y z : A} (r : w = f x) (p : x = y) (q : y = z) : r @ (ap f (p @ q)) = (r @ ap f p) @ (ap f q) := ap_p_pp f _ 1 _ := concat_p_pp _ 1 _. Equations ap_compose {A B C : Type} (f : A -> B) (g : B -> C) {x y : A} (p : x = y) : ap (fun x => g (f x)) p = ap g (ap f p) := ap_compose f g 1 := 1. (** An example of the [with] construct doing abstraction in the context and conclusion. Here [p x] is abstracted first, then [g x], resulting in a goal [gx : A, px : gx = x]. *) Equations concat_A1p {A : Type} {g : A -> A} (p : forall x, g x = x) {x y : A} (q : x = y) : (ap g q) @ (p y) = (p x) @ q := concat_A1p p 1 with p x, g x := { concat_A1p p (x:=?(x)) 1 1 x := 1 }. (** Dummy example using functional elimination on a proof-relevant function using [with]. *) Lemma concat_A1p_lemma {A} (f : A -> A) (p : forall x, f x = x) {x y : A} (q : x = y) : (concat_A1p p q) = (concat_A1p p q). Proof. apply_funelim (concat_A1p p q). clear; intros. simpl. elim Heq0 using Logic.Id_rect_r. simpl. reflexivity. Qed. Equations ap_pp {A B : Type} (f : A -> B) {x y z : A} (p : x = y) (q : y = z) : ap f (p @ q) = (ap f p) @ (ap f q) := ap_pp _ 1 1 => 1. Equations concat_pp_V {A : Type} {x y z : A} (p : x = y) (q : y = z) : (p @ q) @ id_sym q = p := concat_pp_V 1 1 => 1. Equations ap_V {A B : Type} (f : A -> B) {x y : A} (p : x = y) : ap f (id_sym p) = id_sym (ap f p) := ap_V f 1 => 1. #[export] Hint Rewrite @ap_pp @ap_V : ap. #[export] Hint Rewrite @concat_pp_V : concat. Equations concat_pA1 {A : Type} {f : A -> A} (p : forall x, x = f x) {x y : A} (q : x = y) : (p x) @ (ap f q) = q @ (p y) := concat_pA1 p 1 := concat_p1 (p _). Equations concat_p_Vp {A : Type} {x y z : A} (p : x = y) (q : x = z) : p @ (id_sym p @ q) = q := concat_p_Vp 1 1 := 1. Equations concat_pV_p {A : Type} {x y z : A} (p : x = z) (q : y = z) : (p @ id_sym q) @ q = p := concat_pV_p 1 1 := 1. #[export] Hint Rewrite @concat_pA1 @concat_p_Vp @concat_pV_p : concat. Definition concat_pA1_p {A : Type} {f : A -> A} (p : forall x, f x = x) {x y : A} (q : x = y) {w : A} (r : w = f x) : (r @ ap f q) @ p y = (r @ p x) @ q. Proof. destruct q; simpl. now rewrite !concat_p1. (* now simp concat. *) Defined. Equations ap_p {A B : Type} (f : A -> B) {x y : A} (p q: x = y) (e : p = q) : ap f p = ap f q := ap_p f 1 := 1. #[local] Instance ap_morphism (A : Type) (B : Type) x y f : Proper (@Id (@Id A x y) ==> @Id (@Id B (f x) (f y))) (@ap A B f x y). Proof. reduce. now apply ap_p. Defined. #[local] Instance reflexive_proper_proxy : ∀ (A : Type) (R : crelation A), Reflexive R → ∀ x : A, ProperProxy R x. Proof. intros. reduce. apply X. Defined. #[local] Instance isequiv_inverse A B (f:A -> B) (H:IsEquiv f) : IsEquiv (f^^-1) | 1000. Proof. refine (BuildIsEquiv (@eissect _ _ f _) (@eisretr _ _ f _) _). intros b. rewrite <- (concat_1p (eissect _ _)). rewrite <- (concat_Vp (ap f^^-1 (eisretr _ (f (f^^-1 b))))). rewrite (whiskerR (inverse2 (ap02 f^^-1 (eisadj _ (f^^-1 b)))) _). refine (whiskerL _ (id_sym (concat_1p (eissect _ _))) @ _). rewrite <- (concat_Vp (eissect _ (f^^-1 (f (f^^-1 b))))). rewrite <- (whiskerL _ (concat_1p (eissect _ (f^^-1 (f (f^^-1 b)))))). rewrite <- (concat_pV (ap f^^-1 (eisretr _ (f (f^^-1 b))))). apply moveL_M1. repeat rewrite concat_p_pp. (* Now we apply lots of naturality and cancel things. *) rewrite <- (concat_pp_A1 (fun a => id_sym (eissect _ a)) _ _). rewrite (ap_compose f f^^-1). rewrite <- (ap_p_pp _ _ (ap f (ap f^^-1 (eisretr _ (f (f^^-1 b))))) _). rewrite <- (ap_compose f^^-1 f). rewrite (concat_A1p (@eisretr _ _ f _) _). rewrite ap_pp, concat_p_pp. rewrite (concat_pp_V _ (ap f^^-1 (eisretr _ (f (f^^-1 b))))). repeat rewrite <- ap_V. rewrite <- ap_pp. rewrite <- (concat_pA1 (fun y => id_sym (eissect _ y)) _). rewrite ap_compose, <- (ap_compose f^^-1 f). rewrite <- ap_p_pp. rewrite (concat_A1p (@eisretr _ _ f _) _). rewrite concat_p_Vp. rewrite <- ap_compose. rewrite (concat_pA1_p (@eissect _ _ f _) _). rewrite concat_pV_p; apply concat_Vp. Defined. Definition transport_inv A {P : A -> Type} (x y :A) (e : x = y) (u:P x) v: u = e^ # v -> e # u = v. destruct e. exact id. Defined. Definition moveR_M1 {A : Type} {x y : A} (p q : x = y) : 1 = p^ @ q -> p = q. Proof. destruct p. intro h. exact (h @ (concat_1p _)). Defined. Definition moveL_Vp {A : Type} {x y z : A} (p : x = z) (q : y = z) (r : x = y) : r @ q = p -> q = r^ @ p. Proof. destruct r. intro h. exact ((concat_1p _)^ @ h @ (concat_1p _)^). Defined. (** *** Contractibility *) Class Contr (A : Type) := BuildContr { center : A ; contr : (forall y : A, center = y) }. Arguments center A {Contr}. Lemma contr_equiv A B (f : A -> B) `{IsEquiv A B f} `{Contr A} : Contr B. Proof. exists (f (center A)). intro y. eapply moveR_E. apply contr. Qed. Global Instance contr_forall A {P : A -> Type} {H : forall a, Contr (P a)} : Contr (forall a, P a) | 100. Proof. exists (fun a => @center _ (H a)). intro f. apply path_forall. intro a. apply contr. Defined. Global Instance contr_unit : Contr unit | 0 := {| center := tt; contr := fun t : unit => match t with tt => 1 end |}. Definition path_contr {A} {H:Contr A} (x y : A) : x = y := concat (id_sym (@contr _ H x)) (@contr _ H y). Definition path2_contr {A} {H:Contr A} {x y : A} (p q : x = y) : p = q. assert (K : forall (r : x = y), r = path_contr x y). intro r; destruct r; symmetry; now apply concat_Vp. apply (transitivity (y:=path_contr x y)). - apply K. - symmetry; apply K. Defined. #[local] Instance contr_paths_contr A {H:Contr A} (x y : A) : Contr (x = y) | 10000 := let c := {| center := concat (id_sym (contr x)) (contr y); contr := path2_contr (concat (id_sym (contr x)) (contr y)) |} in c. Global Program Instance contr_prod A B {CA : Contr A} {CB : Contr B} : Contr (prod A B). Next Obligation. exact (@center _ CA, @center _ CB). Defined. Next Obligation. apply path_prod; apply contr. Defined. Equations singletons_contr {A : Type} (x : A) : Contr (Σ y : A, x = y) := singletons_contr x := {| center := (x, 1); contr := contr |} where contr : forall y : (Σ y : A, x = y), (x, 1) = y := contr (x, 1) := 1. #[local] Existing Instance singletons_contr. Notation " 'rew' H 'in' c " := (@Logic.Id_rew_r _ _ _ c _ H) (at level 20). Notation " 'rewd' H 'in' c " := (@Logic.Id_rect_r _ _ _ c _ H) (at level 20). (** *** Singletons are contractible as a no-confusion principle The (heterogeneous) NoConfusion principle for equality, i.e. [NoConfusiom (Σ y, x = y)] is equivalent to the proof that singletons are contractible, i.e that this type has a definitional equivalence with [unit]. *) Definition NoConfusion_singleton {A : Type} (x : A) (p q : Σ y : A, x = y) : Type := unit. Unset Implicit Arguments. Equations noConfusion_singleton {A} (x : A) (p q : Σ y : A, x = y) : NoConfusion_singleton p q -> p = q := noConfusion_singleton x (x, 1) (x, 1) tt => 1. Equations noConfusion_singleton_inv {A} (x : A) (p q : Σ y : A, x = y) : p = q -> NoConfusion_singleton p q := noConfusion_singleton_inv x (x, 1) ?((x, 1)) 1 => tt. Definition NoConfusionIdPackage_singleton {A} (x : A) : NoConfusionPackage (Σ y : A, x = y). Proof. refine {| NoConfusion := @NoConfusion_singleton _ x; noConfusion := noConfusion_singleton x; noConfusion_inv := noConfusion_singleton_inv x |}. - intros a b e. (* also, this is an equality in the unit type... *) dependent elimination a as [(a, 1)]. dependent elimination b as [(a, 1)]. hnf in e. destruct e. reflexivity. - intros a b e. (* apply path2_contr. *) dependent elimination e as [1]. dependent elimination a as [(a, 1)]. reflexivity. Defined. Definition contr_sigma A {P : A -> Type} {H : Contr A} `{H0 : forall a, Contr (P a)} : Contr (sigma P). Proof. exists (center A, center (P (center A))). intros [a Ha]. unshelve refine (path_sigma _ _ _ _). simpl. apply H. simpl. apply transport_inv. apply (H0 (center A)). Defined. (** *** Adjointification: producing an equivalence from an iso *) Section Adjointify. Context {A B : Type} (f : A -> B) (g : B -> A). Context (isretr : Sect g f) (issect : Sect f g). (* This is the modified [eissect]. *) Let issect' := fun x => ap g (ap f (issect x)^) @ ap g (isretr (f x)) @ issect x. Let is_adjoint' (a : A) : isretr (f a) = ap f (issect' a). Proof. unfold issect'. apply moveR_M1. repeat rewrite ap_pp, concat_p_pp; rewrite <- ap_compose. rewrite (concat_pA1 (fun b => (isretr b)^) (ap f (issect a)^)). repeat rewrite concat_pp_p; rewrite ap_V. rewrite <- concat_p_pp. rewrite <- concat_p_pp. apply moveL_Vp. rewrite concat_p1. rewrite concat_p_pp, <- ap_compose. rewrite (concat_pA1 (fun b => (isretr b)^) (isretr (f a))). rewrite concat_pV, concat_1p; reflexivity. Qed. (** We don't make this a typeclass instance, because we want to control when we are applying it. *) Definition isequiv_adjointify : IsEquiv f := @BuildIsEquiv A B f g isretr issect' is_adjoint'. Definition equiv_adjointify : A <~> B := @BuildEquiv A B f isequiv_adjointify. End Adjointify. Arguments isequiv_adjointify {A B}%type_scope (f g)%function_scope isretr issect. Arguments equiv_adjointify {A B}%type_scope (f g)%function_scope isretr issect. (** *** Congruence preserves equivalence If [f] is an equivalence, then so is [ap f]. We are lazy and use [adjointify]. *) Global Instance isequiv_ap {A B} f `{IsEquiv A B f} (x y : A) : IsEquiv (@ap A B f x y) | 1000 := isequiv_adjointify (ap f) (fun q => (eissect f x)^ @ ap f^^-1 q @ eissect f y) (fun q => ap_pp f _ _ @ whiskerR (ap_pp f _ _) _ @ ((ap_V f _ @ inverse2 (eisadj f _)^) @@ (ap_compose f^^-1 f _)^ @@ (eisadj f _)^) @ concat_pA1_p (eisretr f) _ _ @ whiskerR (concat_Vp _) _ @ concat_1p _) (fun p => whiskerR (whiskerL _ (ap_compose f f^^-1 _)^) _ @ concat_pA1_p (eissect f) _ _ @ whiskerR (concat_Vp _) _ @ concat_1p _). (** The definition of homotopy fiber. *) Definition hfiber {A B : Type} (f : A -> B) (y : B) := Σ (x : A), f x = y. Global Arguments hfiber {A B}%type_scope f%function_scope y. Coq-Equations-1.3.1-8.20/examples/Makefile.local000066400000000000000000000001331463127417400211460ustar00rootroot00000000000000COQEXTRAFLAGS += -w "-notation-overridden" # post-all:: # $(MAKE) -f $(SELF) $(TEXFILES) Coq-Equations-1.3.1-8.20/examples/MoreDep.v000066400000000000000000000244451463127417400201530ustar00rootroot00000000000000(** * MoreDep Porting a chapter of Adam Chlipala's Certified Programming with Dependent Types, #More Dependent Types#. *) Require Import Bool Arith List Program. From Equations Require Import Equations. Set Equations Transparent. Set Keyed Unification. Set Implicit Arguments. Section ilist. Variable A : Set. Inductive ilist : nat -> Set := | Nil : ilist O | Cons : forall n, A -> ilist n -> ilist (S n). Derive Signature for ilist. Arguments Cons {n}. Equations app n1 (ls1 : ilist n1) n2 (ls2 : ilist n2) : ilist (n1 + n2) := app Nil ls2 := ls2; app (Cons x ls1) ls2 := Cons x (app ls1 ls2). Equations inject (ls : list A) : ilist (length ls) := inject nil := Nil; inject (cons h t) := Cons h (inject t). Equations unject n (ls : ilist n) : list A := unject Nil := nil; unject (Cons x ls) := cons x (unject ls). Theorem unject_inverse : forall ls, unject (inject ls) = ls. Proof. intros. funelim (inject ls); simp unject; congruence. Qed. Equations hd n (ls : ilist (S n)) : A := hd (Cons x _) := x. End ilist. Inductive type : Set := | Nat : type | Bool : type | Prod : type -> type -> type. Derive NoConfusion EqDec for type. Inductive exp : type -> Set := | NConst : nat -> exp Nat | Plus : exp Nat -> exp Nat -> exp Nat | Eq : exp Nat -> exp Nat -> exp Bool | BConst : bool -> exp Bool | And : exp Bool -> exp Bool -> exp Bool | If : forall t, exp Bool -> exp t -> exp t -> exp t | Pair : forall t1 t2, exp t1 -> exp t2 -> exp (Prod t1 t2) | Fst : forall t1 t2, exp (Prod t1 t2) -> exp t1 | Snd : forall t1 t2, exp (Prod t1 t2) -> exp t2. Derive Signature NoConfusion for exp. Derive NoConfusionHom for exp. Derive Subterm for exp. Equations typeDenote (t : type) : Set := typeDenote Nat := nat; typeDenote Bool := bool; typeDenote (Prod t1 t2) := (typeDenote t1 * typeDenote t2)%type. Equations expDenote t (e : exp t) : typeDenote t := expDenote (NConst n) := n; expDenote (Plus e1 e2) := expDenote e1 + expDenote e2; expDenote (Eq e1 e2) := Nat.eqb (expDenote e1) (expDenote e2); expDenote (BConst b) := b; expDenote (And e1 e2) := expDenote e1 && expDenote e2; expDenote (If e e1 e2) with expDenote e => { | true := expDenote e1; | false := expDenote e2 }; expDenote (Pair e1 e2) := (expDenote e1, expDenote e2); expDenote (Fst e) := fst (expDenote e); expDenote (Snd e) := snd (expDenote e). Equations pairOutType2 (t : type) : Set := pairOutType2 (Prod t1 t2) := option (exp t1 * exp t2); pairOutType2 _ := option unit. Equations pairOutTypeDef (t : type) : Set := pairOutTypeDef (Prod t1 t2) := exp t1 * exp t2; pairOutTypeDef _ := unit. Transparent pairOutTypeDef. Definition pairOutType' (t : type) := option (match t with | Prod t1 t2 => exp t1 * exp t2 | _ => unit end). Equations pairOut t (e : exp t) : option (pairOutTypeDef t) := pairOut (Pair e1 e2) => Some (e1, e2); pairOut _ => None. Set Printing Depth 1000000. Require Import Wellfounded. Equations cfold {t} (e : exp t) : exp t := (* Works with well-foundedness too: cfold e by wf (signature_pack e) exp_subterm := *) cfold (NConst n) => NConst n; cfold (Plus e1 e2) with (cfold e1, cfold e2) => { | pair (NConst n1) (NConst n2) := NConst (n1 + n2); | pair e1' e2' := Plus e1' e2' }; cfold (Eq e1 e2) with (cfold e1, cfold e2) => { | pair (NConst n1) (NConst n2) := BConst (Nat.eqb n1 n2); | pair e1' e2' => Eq e1' e2' }; cfold (BConst b) := BConst b; cfold (And e1 e2) with (cfold e1, cfold e2) => { | pair (BConst b1) (BConst b2) := BConst (b1 && b2); | pair e1' e2' := And e1' e2' }; cfold (If e e1 e2) with cfold e => { | BConst true => cfold e1; | BConst false => cfold e2; | _ => If e (cfold e1) (cfold e2) } ; cfold (Pair e1 e2) := Pair (cfold e1) (cfold e2); cfold (Fst e) with cfold e => { | e' with pairOut e' => { | Some p := fst p; | None := Fst e' } }; cfold (Snd e) with cfold e => { | e' with pairOut e' => { | Some p := snd p; | None := Snd e' } }. Inductive color : Set := Red | Black. Derive NoConfusion for color. Inductive rbtree : color -> nat -> Set := | Leaf : rbtree Black 0 | RedNode : forall n, rbtree Black n -> nat -> rbtree Black n -> rbtree Red n | BlackNode : forall c1 c2 n, rbtree c1 n -> nat -> rbtree c2 n -> rbtree Black (S n). Derive Signature NoConfusion for rbtree. Require Import Arith Lia. Section depth. Variable f : nat -> nat -> nat. Equations depth {c n} (t : rbtree c n) : nat := depth Leaf := 0; depth (RedNode t1 _ t2) := S (f (depth t1) (depth t2)); depth (BlackNode t1 _ t2) := S (f (depth t1) (depth t2)). End depth. Theorem depth_min : forall c n (t : rbtree c n), depth min t >= n. Proof. intros. funelim (depth Nat.min t); cbn; auto; match goal with | [ |- context[min ?X ?Y] ] => let H := fresh in destruct (Nat.min_dec X Y) as [H|H]; rewrite H end; lia. Qed. Lemma depth_max' : forall c n (t : rbtree c n), match c with | Red => depth max t <= 2 * n + 1 | Black => depth max t <= 2 * n end. Proof. intros; funelim (depth Nat.max t); cbn; auto; match goal with | [ |- context[max ?X ?Y] ] => let H := fresh in destruct (Nat.max_dec X Y) as [H|H]; rewrite H end; repeat match goal with | [ H : context[match ?C with Red => _ | Black => _ end] |- _ ] => destruct C end; lia. Qed. Theorem depth_max : forall c n (t : rbtree c n), depth max t <= 2 * n + 1. Proof. intros; generalize (depth_max' t); destruct c; lia. Qed. Theorem balanced : forall c n (t : rbtree c n), 2 * depth min t + 1 >= depth max t. Proof. intros; generalize (depth_min t); generalize (depth_max t); lia. Qed. Inductive rtree : nat -> Set := | RedNode' : forall c1 c2 n, rbtree c1 n -> nat -> rbtree c2 n -> rtree n. Section present. Variable x : nat. Equations present {c n} (t : rbtree c n) : Prop := present Leaf := False; present (RedNode a y b) := present a \/ x = y \/ present b; present (BlackNode a y b) := present a \/ x = y \/ present b. Equations rpresent {n} (t : rtree n) : Prop := rpresent (RedNode' a y b) => present a \/ x = y \/ present b. End present. Notation "{< x >}" := (sigmaI _ _ x). Import Sigma_Notations. (* No need for convoy pattern! *) Equations balance1 n (a : rtree n) (data : nat) c2 (b : rbtree c2 n) : Σ c, rbtree c (S n) := balance1 (RedNode' t1 y t2) data d with t1 => { | RedNode a x b := {}; | _ with t2 => { | RedNode b x c := {}; | b := {} } }. Equations balance2 n (a : rtree n) (data : nat) c2 (b : rbtree c2 n) : Σ c, rbtree c (S n) := balance2 (RedNode' (c2:=c0) t1 z t2) data a with t1 => { | RedNode b y c := {}; | _ with t2 => { | RedNode c z' d := {}; | _ := {} } }. Section insert. Variable x : nat. Equations insResult (c : color) (n : nat) : Set := insResult Red n := rtree n; insResult Black n := Σ c', rbtree c' n. Transparent insResult. Equations ins {c n} (t : rbtree c n) : insResult c n := ins Leaf := {< RedNode Leaf x Leaf >}; ins (RedNode a y b) with le_lt_dec x y => { | left _ := RedNode' (pr2 (ins a)) y b; | right _ := RedNode' a y (pr2 (ins b)) }; ins (@BlackNode c1 c2 _ a y b) with le_lt_dec x y => { | left _ with c1 => { | Red := balance1 (ins a) y b; | Black := {} }; | right _ with c2 => { | Red := balance2 (ins b) y a; | Black := {< BlackNode a y (pr2 (ins b))>} } }. Equations insertResult (c : color) (n : nat) : Set := insertResult Red n := rbtree Black (S n); insertResult Black n := Σ c', rbtree c' n. Transparent insertResult. Equations makeRbtree c n (r : insResult c n) : insertResult c n := makeRbtree Red _ (RedNode' a x b) := BlackNode a x b; makeRbtree Black _ r := r. Arguments makeRbtree [c n] _. Equations insert {c n} (t : rbtree c n) : insertResult c n := insert t := makeRbtree (ins t). Section present. Variable z : nat. Lemma present_balance1 : forall n (a : rtree n) (y : nat) c2 (b : rbtree c2 n), present z (pr2 (balance1 a y b)) <-> rpresent z a \/ z = y \/ present z b. Proof. intros. funelim (balance1 a y b); subst; simpl in *; tauto. Qed. Lemma present_balance2 : forall n (a : rtree n) (y : nat) c2 (b : rbtree c2 n), present z (pr2 (balance2 a y b)) <-> rpresent z a \/ z = y \/ present z b. Proof. intros. funelim (balance2 a y b); subst; simpl in *; tauto. Qed. Equations present_insResult (c : color) (n : nat) (t : rbtree c n) (r : insResult c n): Prop := @present_insResult Red n t r := rpresent z r <-> z = x \/ present z t; @present_insResult Black n t r := present z (pr2 r) <-> z = x \/ present z t. Theorem present_ins : forall c n (t : rbtree c n), present_insResult t (ins t). Proof. intros. funelim (ins t); simp present_insResult in *; simpl in *; try match goal with [ |- context [balance1 ?A ?B ?C] ] => generalize (present_balance1 A B C) end; try match goal with [ |- context [balance2 ?A ?B ?C] ] => generalize (present_balance2 A B C) end; try tauto. Qed. Ltac present_insert t t0 := intros; funelim (insert t); cbn; generalize (present_ins t0); try rewrite present_insResult_equation_1; try rewrite present_insResult_equation_2; funelim (ins t0); intro; assumption. Theorem present_insert_Red : forall n (t : rbtree Red n), present z (insert t) <-> (z = x \/ present z t). Proof. intros. funelim (insert t). generalize (present_ins t). simpl. try rewrite present_insResult_equation_1; try rewrite present_insResult_equation_2. funelim (ins t). intros; assumption. intros; assumption. Qed. Theorem present_insert_Black : forall n (t : rbtree Black n), present z (pr2 (insert t)) <-> (z = x \/ present z t). Proof. present_insert t t. Qed. End present. End insert. Coq-Equations-1.3.1-8.20/examples/POPLMark1a.v000066400000000000000000000304021463127417400204150ustar00rootroot00000000000000(** ** POPLMark 1a solution Original development by Rafaël Bocquet: POPLmark part 1A with inductive definition of scope and well-scoped variables (and terms, types and environments). *) Require Import Program. From Equations Require Import Equations. Require Import Coq.Classes.EquivDec. Require Import Arith. Definition scope := nat. Inductive var : scope -> Set := | FO : forall {n}, var (S n) | FS : forall {n}, var n -> var (S n) . Derive Signature NoConfusion NoConfusionHom for var. Inductive scope_le : scope -> scope -> Set := (* We use an equality in the constructor here to avoid requiring UIP on [nat]. *) | scope_le_n : forall {n m}, n = m -> scope_le n m | scope_le_S : forall {n m}, scope_le n m -> scope_le n (S m) | scope_le_map : forall {n m}, scope_le n m -> scope_le (S n) (S m). Derive Signature NoConfusion NoConfusionHom Subterm for scope_le. Equations scope_le_app {a b c} (p : scope_le a b) (q : scope_le b c) : scope_le a c := (* by wf (signature_pack q) scope_le_subterm := *) scope_le_app p (scope_le_n eq_refl) := p; scope_le_app p (scope_le_S q) := scope_le_S (scope_le_app p q); scope_le_app p (scope_le_map q) with p := { | scope_le_n eq_refl := scope_le_map q; | scope_le_S p' := scope_le_S (scope_le_app p' q); | (scope_le_map p') := scope_le_map (scope_le_app p' q) }. (* Proof. all:repeat constructor. Defined. *) Lemma scope_le_app_len n m (q : scope_le n m) : scope_le_app (scope_le_n eq_refl) q = q. Proof. depind q; simp scope_le_app; trivial. destruct e; reflexivity. now rewrite IHq. Qed. #[local] Hint Rewrite scope_le_app_len : scope_le_app. Inductive type : scope -> Type := | tvar : forall {n}, var n -> type n | ttop : forall {n}, type n | tarr : forall {n}, type n -> type n -> type n | tall : forall {n}, type n -> type (S n) -> type n . Derive Signature NoConfusion NoConfusionHom for type. Inductive env : scope -> scope -> Set := | empty : forall {n m}, n = m -> env n m | cons : forall {n m}, type m -> env n m -> env n (S m) . Derive Signature NoConfusion NoConfusionHom for env. Lemma env_scope_le : forall {n m}, env n m -> scope_le n m. Proof. intros n m Γ; depind Γ. constructor; auto. constructor 2; auto. Defined. Equations env_app {a b c} (Γ : env a b) (Δ : env b c) : env a c := env_app Γ (empty eq_refl) := Γ; env_app Γ (cons t Δ) := cons t (env_app Γ Δ). Lemma cons_app : forall {a b c} (Γ : env a b) (Δ : env b c) t, cons t (env_app Γ Δ) = env_app Γ (cons t Δ). Proof. intros. autorewrite with env_app. reflexivity. Qed. #[local] Hint Rewrite @cons_app. Equations map_var {n m} (f : var n -> var m) (t : var (S n)) : var (S m) := map_var f FO := FO; map_var f (FS x) := FS (f x). Lemma map_var_a : forall {n m o} f g a, @map_var n o (fun t => f (g t)) a = @map_var m o f (@map_var n m g a). Proof. depind a; autorewrite with map_var; auto. Qed. Lemma map_var_b : forall {n m} (f g : var n -> var m), (forall x, f x = g x) -> forall a, map_var f a = map_var g a. Proof. depind a; autorewrite with map_var; try f_equal; auto. Qed. Equations lift_var_by {n m} (p : scope_le n m) : var n -> var m := lift_var_by (scope_le_n eq_refl) := fun t => t; lift_var_by (scope_le_S p) := fun t => FS (lift_var_by p t); lift_var_by (scope_le_map p) := map_var (lift_var_by p). Equations lift_type_by {n m} (f : scope_le n m) (t : type n) : type m := lift_type_by f (tvar x) := tvar (lift_var_by f x); lift_type_by f ttop := ttop; lift_type_by f (tarr a b) := tarr (lift_type_by f a) (lift_type_by f b); lift_type_by f (tall a b) := tall (lift_type_by f a) (lift_type_by (scope_le_map f) b). Lemma lift_var_by_app : forall {b c} (p : scope_le b c) {a} (q : scope_le a b) t, lift_var_by p (lift_var_by q t) = lift_var_by (scope_le_app q p) t. Proof with autorewrite with lift_var_by map_var scope_le_app in *; auto. intros b c p; induction p; intros a q t; try destruct e... - rewrite IHp; auto. - generalize dependent p. generalize dependent t. depind q; subst; intros... rewrite IHp... specialize (IHp _ q). rewrite (map_var_b (lift_var_by (scope_le_app q p)) (fun t => lift_var_by p (lift_var_by q t))); eauto. rewrite <- map_var_a; auto. Qed. #[local] Hint Rewrite @lift_var_by_app : lift_var_by. Lemma lift_type_by_id : forall {n} (t : type n) P, (forall x, lift_var_by P x = x) -> lift_type_by P t = t. Proof. depind t; intros; autorewrite with lift_type_by; rewrite ?H, ?IHt1, ?IHt2; auto. intros; depelim x; autorewrite with lift_var_by map_var; try f_equal; auto. Qed. Lemma lift_type_by_n : forall {n} (t : type n), lift_type_by (scope_le_n eq_refl) t = t. Proof. intros; eapply lift_type_by_id; intros; autorewrite with lift_var_by; auto. Qed. #[local] Hint Rewrite @lift_type_by_n : lift_type_by. Lemma lift_type_by_app : forall {a} t {b c} (p : scope_le b c) (q : scope_le a b), lift_type_by p (lift_type_by q t) = lift_type_by (scope_le_app q p) t. Proof. depind t; intros b c p; depind p; intros q; repeat (autorewrite with scope_le_app lift_var_by lift_type_by; rewrite ?IHt1, ?IHt2; auto). Qed. #[local] Hint Rewrite @lift_type_by_app : lift_type_by. Equations lookup {n} (Γ : env O n) (x : var n) : type n := lookup (n:=(S _)) (cons a Γ) FO := lift_type_by (scope_le_S (scope_le_n eq_refl)) a; lookup (n:=(S _)) (cons a Γ) (FS x) := lift_type_by (scope_le_S (scope_le_n eq_refl)) (lookup Γ x) . Lemma lookup_app {n} (Γ : env O (S n)) {m} (Δ : env (S n) (S m)) x : lookup (env_app Γ Δ) (lift_var_by (env_scope_le Δ) x) = lift_type_by (env_scope_le Δ) (lookup Γ x). Proof with autorewrite with lookup scope_le_app env_app lift_var_by lift_type_by; auto. induction Δ; subst; simpl... rewrite IHΔ... Qed. #[local] Hint Rewrite @lookup_app : lookup. (** The subtyping judgment *) Inductive sa : forall {n}, env O n -> type n -> type n -> Prop := | sa_top : forall {n} (Γ : env O n) s, sa Γ s ttop | sa_var_refl : forall {n} (Γ : env O n) x, sa Γ (tvar x) (tvar x) | sa_var_trans : forall {n} (Γ : env O (S n)) x t, sa Γ (lookup Γ x) t -> sa Γ (tvar x) t | sa_arr : forall {n} {Γ : env O n} {t1 t2 s1 s2}, sa Γ t1 s1 -> sa Γ s2 t2 -> sa Γ (tarr s1 s2) (tarr t1 t2) | sa_all : forall {n} {Γ : env O n} {t1 t2 s1 s2}, sa Γ t1 s1 -> sa (cons t1 Γ) s2 t2 -> sa Γ (tall s1 s2) (tall t1 t2) . Derive Signature for sa. Inductive sa_env : forall {n}, env O n -> env O n -> Prop := | sa_empty : sa_env (empty eq_refl) (empty eq_refl) | sa_cons : forall {n} (Γ Δ : env O n) a b, sa Γ a b -> sa_env Γ Δ -> sa_env (cons a Γ) (cons b Δ) . Derive Signature for sa_env. Lemma sa_refl : forall {n} (Γ : env O n) x, sa Γ x x. Proof. depind x; constructor; auto. Qed. Lemma sa_env_refl : forall {n} (Γ : env O n), sa_env Γ Γ. Proof. depind Γ; subst; constructor; auto using sa_refl. Qed. Inductive env_extend : forall {b c}, env O b -> env O c -> scope_le b c -> Prop := | env_extend_refl : forall {b} (Γ : env O b), env_extend Γ Γ (scope_le_n eq_refl) | env_extend_cons : forall {b c} (Γ : env O b) (Δ : env O c) p a, env_extend Γ Δ p -> env_extend (cons a Γ) (cons (lift_type_by p a) Δ) (scope_le_map p) | env_extend_2 : forall {b c} (Γ : env O b) (Δ : env O c) p a, env_extend Γ Δ p -> env_extend Γ (cons a Δ) (scope_le_S p) . Derive Signature for env_extend. Lemma env_app_extend {b c} (Γ : env O b) (Δ : env b c) : env_extend Γ (env_app Γ Δ) (env_scope_le Δ). Proof. depind Δ; subst; intros; autorewrite with env_app scope_le_app in *; simpl; constructor; auto. Qed. Lemma env_extend_lookup {b c} (Γ : env O b) (Δ : env O c) P : env_extend Γ Δ P -> forall x, lift_type_by P (lookup Γ x) = lookup Δ (lift_var_by P x). Proof with autorewrite with lift_type_by lift_var_by map_var lookup scope_le_app; auto. intros A; depind A; intros x; depelim x... all:rewrite <- IHA... Qed. Lemma sa_weakening {b} (Γ : env O b) p q (A : sa Γ p q) : forall {c P} (Δ : env O c) (B : env_extend Γ Δ P), sa Δ (lift_type_by P p) (lift_type_by P q). Proof. induction A; intros c P Δ B; autorewrite with lift_type_by in *; try (auto; constructor; auto; fail). - depelim c; [depelim B|]. constructor; rewrite <- (env_extend_lookup _ _ _ B); auto. - constructor; auto. eapply IHA2. constructor. auto. Qed. Lemma sa_weakening_app {b} (Γ : env O b) p q (A : sa Γ p q) {c} (Δ : env b c) : sa (env_app Γ Δ) (lift_type_by (env_scope_le Δ) p) (lift_type_by (env_scope_le Δ) q). Proof. intros; eapply sa_weakening. exact A. auto using env_app_extend. Qed. Lemma sa_toname {n m} Γ (Δ : env (S n) m) x : x <> lift_var_by (env_scope_le Δ) FO -> forall p q, lookup (env_app (cons p Γ) Δ) x = lookup (env_app (cons q Γ) Δ) x. Proof. depind Δ; subst; intros A p q; depelim x; simpl in *; autorewrite with env_app lookup lift_var_by in *; auto. - exfalso; auto. - specialize (IHΔ Γ x). forward IHΔ by intro; subst; auto. now rewrite (IHΔ p q). Qed. Lemma var_dec_eq : forall {n} (x y : var n), {x = y} + {x <> y}. Proof. depind x; depelim y. - left; reflexivity. - right; intro H; depelim H. - right; intro H; depelim H. - destruct (IHx y); subst. + left; reflexivity. + right; intro H; depelim H. contradiction. Qed. Lemma sa_narrowing {s} q : (forall {s'} (P : scope_le s s') (Γ : env O s') p (A : sa Γ p (lift_type_by P q)) s'' (Δ : env (S s') s'') a b (B : sa (env_app (cons (lift_type_by P q) Γ) Δ) a b), sa (env_app (cons p Γ) Δ) a b) /\ (forall {s'} (A : scope_le s s') (Γ : env O s') p (B : sa Γ p (lift_type_by A q)) r (C : sa Γ (lift_type_by A q) r), sa Γ p r). Proof. induction q; match goal with | [ |- _ /\ ?Q ] => assert (PLOP:Q); [ intros s' A Γ p B; depind B; subst; intros r C; autorewrite with lift_type_by lift_var_by in *; try noconf H; try (constructor; auto; fail); try (constructor; eapply IHB; autorewrite with lift_type_by; auto; fail); try (depelim C; subst; constructor; destruct_pairs; try noconf H; eauto; fail); try (specialize (IHB _ _ _ IHq1 IHq2 A); destruct_pairs; try noconf H; constructor; eauto; fail); auto | split; [ intros s' P Γ p A; depind A; subst; intros s'' Δ a b B; destruct_pairs; remember (env_app (cons _ Γ) Δ) as PPP; depind B; try (subst; constructor; auto; autorewrite with core; auto; fail); clear B; constructor; specialize (IHB _ HeqPPP); subst *; try (noconf H; auto); match goal with | [ IHB : sa _ (lookup (env_app (cons ?a _) _) ?x) _ |- sa _ (lookup (env_app (cons ?b _) _) _) _ ] => destruct (var_dec_eq x (lift_var_by (env_scope_le Δ) FO)) as [Heq|Hneq] ; [ subst; autorewrite with lookup lift_type_by lift_var_by in *; try (noconf H; auto); autorewrite with lookup lift_type_by lift_var_by scope_le_app in *; try solve [auto; depelim IHB; autorewrite with lookup lift_type_by lift_var_by scope_le_app in *; auto; constructor; auto; fail]; try solve [(apply sa_var_trans in A || assert (A := sa_arr A1 A2) || assert (A := sa_all A1 A2)); match goal with | [ A : sa _ ?p _ |- _ ] => (apply @sa_weakening_app with (Δ:=cons p (empty eq_refl)) in A; apply @sa_weakening_app with (Δ:=Δ) in A; autorewrite with lookup env_app lift_var_by lift_type_by in *; simpl in *; eapply PLOP; [exact A | exact IHB]) end; fail] | rewrite sa_toname with (p:=b) (q:=a); auto ] end | assumption ] ] end. - clear IHB1 IHB2. depelim C; [constructor|]; destruct_pairs. constructor; eauto. simpl in H. simpl in H0. apply (H1 _ A Γ _ C1 _ (empty eq_refl) _ _) in B2; autorewrite with env_app in B2; eauto. Qed. Print Assumptions sa_narrowing. (* Closed under the global context *) Coq-Equations-1.3.1-8.20/examples/RoseTree.v000066400000000000000000000150431463127417400203420ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Require Import Program. From Equations Require Import Equations. Require Import Utf8 Lia Arith. Require Import List. Import ListNotations. Set Keyed Unification. Equations map_In {A B : Type} (l : list A) (f : forall (x : A), In x l -> B) : list B := | nil, _ := nil | cons x xs, f := cons (f x _) (map_In xs (fun x H => f x _)). Lemma map_In_spec {A B : Type} (f : A -> B) (l : list A) : map_In l (fun (x : A) (_ : In x l) => f x) = List.map f l. Proof. funelim (map_In l _); rewrite ?H; trivial. Qed. Section list_size. Context {A : Type} (f : A -> nat). Equations list_size (l : list A) : nat := | nil := 0 | x :: xs := S (f x + list_size xs). Lemma In_list_size: forall x xs, In x xs -> f x < S (list_size xs). Proof. intros. funelim (list_size xs); simpl in *. destruct H. destruct H0. * subst; lia. * specialize (H _ H0). intuition. lia. Qed. End list_size. Transparent list_size. Module RoseTree. Section roserec. Context {A : Set}. Inductive t : Set := | leaf (a : A) : t | node (l : list t) : t. Derive NoConfusion for t. Fixpoint size (r : t) := match r with | leaf a => 0 | node l => S (list_size size l) end. Equations? elements (r : t) : list A by wf (size r) lt := elements (leaf a) := [a]; elements (node l) := concat (map_In l (fun x H => elements x)). Proof. red. now apply In_list_size. Qed. Equations elements_def (r : t) : list A := elements_def (leaf a) := [a]; elements_def (node l) := concat (List.map elements_def l). Lemma elements_equation (r : t) : elements r = elements_def r. Proof. funelim (elements r); simp elements_def; trivial. f_equal. rewrite map_In_spec. clear Heqcall. induction l; simpl; auto. rewrite H. rewrite IHl; auto. intros. apply H. now constructor 2. now constructor. Qed. (** To solve measure subgoals *) Hint Extern 4 (_ < _) => simpl; lia : rec_decision. Hint Extern 4 (MR _ _ _ _) => repeat red; simpl in *; lia : rec_decision. Obligation Tactic := simpl in *; program_simpl; try typeclasses eauto with subterm_relation simp rec_decision. (* Nested rec *) Equations elements_acc (r : t) (acc : list A) : list A by wf (size r) lt := elements_acc (leaf a) acc := a :: acc; elements_acc (node l) acc := aux l _ where aux (x : list t) (H : list_size size x < size (node l)) : list A by wf (list_size size x) lt := aux nil _ := acc; aux (cons x xs) H := elements_acc x (aux xs _). Definition elements2 (r : t) : list A := elements_acc r []. Lemma elements2_equation r acc : elements_acc r acc = elements_def r ++ acc. Proof. revert r acc. let t := constr:(fun_elim (f:=elements_acc)) in apply (t (fun r acc res => res = elements_def r ++ acc) (fun r acc x H res => res = concat (List.map elements_def x) ++ acc)); intros; simp elements; trivial. rewrite H1. clear H1. rewrite H0. simpl. now rewrite app_assoc. Qed. Equations elements' (r : t) : list A by wf r (MR lt size) := | leaf a := [a] | node l := fn l hidebody where fn (x : list t) (H : list_size size x < size (node l)) : list A by wf x (MR lt (list_size size)) := | nil, _ := nil; | cons x xs, _ := elements' x ++ fn xs hidebody. Equations elements'_def (r : t) : list A := elements'_def (leaf a) := [a]; elements'_def (node l) := concat (List.map elements' l). Lemma elements'_equation (r : t) : elements' r = elements'_def r. Proof. pose (fun_elim (f:=elements')). apply (p (fun r f => f = elements'_def r) (fun l x H r => r = concat (List.map elements' x))); clear p; intros; simp elements'_def; trivial. simpl. f_equal. apply H1. Qed. End roserec. Arguments t : clear implicits. Section AltSize. Context {A : Set}. (** Let's use an alternative size criterion allowing to make recursive calls on non-strict subterms of the initial list: we just count the maximal depth of [node] constructors among all forests. *) Equations alt_size (r : t A) : nat := { alt_size (leaf _) => 0; alt_size (node l) => S (max_size l) } where max_size (l : list (t A)) : nat := { max_size nil := 0; max_size (cons a t) := Nat.max (alt_size a) (max_size t) }. (** This has the property that the maximal size of two appended lists is the maximal size of the separate lists. *) Lemma max_size_app l l' : max_size (l ++ l') = Nat.max (max_size l) (max_size l'). Proof. induction l; simp max_size. reflexivity. simpl. rewrite <- Nat.max_assoc. f_equal. apply IHl. Qed. Context {B : Set} (f : A -> B). (** It hence becomes possible to recurse on an arbitrary list as long as the depth decreases, for example by appending the subforest to itself in the [node] case. The same is possible with sized types where node has type [j < i -> list^i (t^j) -> t^(S i)]. *) Equations? map_t (r : t A) : t B by wf (alt_size r) lt := map_t (leaf a) := leaf (f a); map_t (node l) := node (map_list (l ++ l) _) where map_list (l' : list (t A)) (H : max_size l' ≤ max_size l) : list (t B) by struct l' := map_list nil _ := nil; map_list (cons a t) Hl' := cons (map_t a) (map_list t _). Proof. simp alt_size. apply Nat.lt_succ_r. now apply Nat.max_lub_l in Hl'. now apply Nat.max_lub_r in Hl'. clear map_list. rewrite max_size_app. now rewrite Nat.max_id. Defined. End AltSize. Section fns. Context {A B : Set} (f : A -> B) (g : B -> A -> B) (h : A -> B -> B). Equations map (r : t A) : t B := | leaf a := leaf (f a) | node l := node (List.map map l). Equations fold (acc : B) (r : t A) : B := fold acc (leaf a) := g acc a; fold acc (node l) := List.fold_left fold l acc. Equations fold_right (r : t A) (acc : B) : B := fold_right (leaf a) acc := h a acc; fold_right (node l) acc := List.fold_right fold_right acc l. End fns. End RoseTree. Coq-Equations-1.3.1-8.20/examples/STLC.v000066400000000000000000001100641463127417400173560ustar00rootroot00000000000000(* begin hide *) (**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (* end hide *) (** * Normalization of Simply Typed Lambda-Calculus through Hereditary Substitutions. Uses extrinsic encoding of terms, with de Bruijn indices, lifting and substitution. Derive hereditary substitution function justified by a well-founded order on typable terms and conclude with a normalizer building beta-short eta-long normal forms, typable in a bidirectional type system. *) Require Program. From Equations Require Import Equations. Require Import Lia. Require Import List Utf8. Import ListNotations. Set Keyed Unification. Derive Signature for le CompareSpec. Inductive term := | Var (n : nat) | Lambda (t : term) | App (t u : term) | Pair (t u : term) | Fst (t : term) | Snd (t : term) | Tt. Derive NoConfusion Subterm EqDec for term. Coercion Var : nat >-> term. Declare Scope term_scope. Delimit Scope term_scope with term. Bind Scope term_scope with term. Notation " @( f , x ) " := (App (f%term) (x%term)). Notation " 'λ' t " := (Lambda (t%term)) (at level 10). Notation " << t , u >> " := (Pair (t%term) (u%term)). Parameter atomic_type : Set. Parameter atomic_type_eqdec : EqDec atomic_type. #[export] Existing Instance atomic_type_eqdec. Inductive type := | atom (a : atomic_type) | product (a b : type) | unit | arrow (a b : type). Derive NoConfusion Subterm EqDec for type. Coercion atom : atomic_type >-> type. Notation " x × y " := (product x y) (at level 90). Notation " x ---> y " := (arrow x y) (at level 30). Require Import Arith. Equations lift (k n : nat) (t : term) : term := lift k n (Var i) with Nat.compare i k := { | Lt := Var i ; | _ := Var (i + n) } ; lift k n (Lambda t) := Lambda (lift (S k) n t) ; lift k n (App t u) := @(lift k n t, lift k n u) ; lift k n (Pair t u) := << lift k n t, lift k n u >> ; lift k n (Fst t) := Fst (lift k n t) ; lift k n (Snd t) := Snd (lift k n t) ; lift k n Tt := Tt. Tactic Notation "absurd" tactic(tac) := exfalso; tac. Ltac term_eq := match goal with | |- Var _ = Var _ => f_equal ; lia | |- @eq nat _ _ => lia || absurd lia | |- lt _ _ => lia || absurd lia | |- le _ _ => lia || absurd lia | |- gt _ _ => lia || absurd lia | |- ge _ _ => lia || absurd lia end. #[local] Hint Extern 4 => term_eq : term. Ltac term := typeclasses eauto with term core arith. Ltac do_rewrites := repeat match goal with H : ?lhs = ?rhs |- context [?lhs] => rewrite H; clear H end. Ltac crush := do_rewrites; auto; try term. Lemma lift0 k t : lift k 0 t = t. Proof. funelim (lift k 0 t); term || rewrite ?H; crush. Qed. #[local] Hint Rewrite lift0 : lift. Require Import Lia. Lemma lift_k_lift_k k n m t : lift k n (lift k m t) = lift k (n + m) t. Proof. funelim (lift k m t); intros; simp lift; try rewrite H ; try rewrite H0; auto. destruct (Nat.compare_spec i k); try discriminate. subst. case_eq (Nat.compare (k + n) k); intro H; simp lift; try term. rewrite Nat.compare_lt_iff in H; term. rewrite Heq; simp lift; term. rewrite Heq. rewrite Nat.compare_gt_iff in Heq. simp lift. destruct (Nat.compare_spec (i + n) k); try discriminate; simp lift; term. Qed. #[local] Hint Rewrite lift_k_lift_k : lift. Equations subst (k : nat) (t : term) (u : term) : term := subst k (Var i) u with Nat.compare i k := { | Eq := lift 0 k u ; | Lt := i ; | Gt := Var (pred i) } ; subst k (Lambda t) u := Lambda (subst (S k) t u) ; subst k (App a b) u := @(subst k a u, subst k b u) ; subst k (Pair a b) u := << subst k a u, subst k b u >> ; subst k (Fst t) u := Fst (subst k t u) ; subst k (Snd t) u := Snd (subst k t u) ; subst k Tt _ := Tt. Lemma substnn n t : subst n n t = lift 0 n t. Proof. funelim (subst n n t) ; try rewrite H ; try rewrite H0; simp lift; auto. rewrite Nat.compare_lt_iff in Heq; absurd lia. rewrite Nat.compare_gt_iff in Heq; absurd lia. Qed. #[local] Hint Rewrite substnn : subst. Notation ctx := (list type). Reserved Notation " Γ |-- t : A " (at level 70, t, A at next level). Inductive types : ctx -> term -> type -> Prop := | axiom Γ i : i < length Γ -> (Γ |-- i : nth i Γ unit) | abstraction Γ A B t : A :: Γ |-- t : B -> Γ |-- λ t : A ---> B | application Γ A B t u : Γ |-- t : A ---> B -> Γ |-- u : A -> Γ |-- @(t, u) : B | unit_intro Γ : Γ |-- Tt : unit | pair_intro Γ A B t u : Γ |-- t : A -> Γ |-- u : B -> Γ |-- << t , u >> : (A × B) | pair_elim_fst Γ A B t : Γ |-- t : (A × B) -> Γ |-- Fst t : A | pair_elim_snd Γ A B t : Γ |-- t : (A × B) -> Γ |-- Snd t : B where "Γ |-- i : A " := (types Γ i A). Derive Signature for types. Notation " [ t ] u " := (subst 0 u t) (at level 10). Notation " x @ y " := (app x y) (at level 30, right associativity). Lemma nth_length {A} x t (l l' : list A) : nth (length l) (l @ (t :: l')) x = t. Proof. induction l; simpl; auto. Qed. #[local] Hint Constructors types : term. Lemma nat_compare_elim (P : nat -> nat -> comparison -> Prop) (PEq : forall i, P i i Eq) (PLt : forall i j, i < j -> P i j Lt) (PGt : forall i j, i > j -> P i j Gt) : forall i j, P i j (Nat.compare i j). Proof. intros. case (Nat.compare_spec i j); intros; subst; auto. Qed. Lemma nth_extend_left {A} (a : A) n (l l' : list A) : nth n l a = nth (length l' + n) (l' @ l) a. Proof. induction l'; auto. Qed. Lemma nth_app_l {A} (a : A) {n} (l l' : list A) : n < length l -> nth n (l @ l') a = nth n l a. Proof. revert l l' n; induction l; intros; auto. depelim H. destruct n; trivial. simpl. eapply IHl. simpl in H. lia. Qed. Lemma nth_app_r {A} (a : A) {n} (l l' : list A) : length l <= n -> nth n (l @ l') a = nth (n - length l) l' a. Proof. revert l l' n; induction l; intros; auto. simpl in H. depelim H; auto. destruct n; simpl in H. depelim H. simpl; apply IHl; lia. Qed. Lemma nth_extend_middle {A} (a : A) n (l l' l'' : list A) : match Nat.compare n (length l') with | Lt => nth n (l' @ l) a = nth n (l' @ l'' @ l) a | _ => nth n (l' @ l) a = nth (n + length l'') (l' @ l'' @ l) a end. Proof. assert (foo:=Nat.compare_spec n (length l')). depelim foo; fold (length l') in H; try rewrite H0; try rewrite H. rewrite <- nth_extend_left. replace (length l'') with (length l'' + 0) by auto with arith. rewrite <- nth_extend_left. replace (length l') with (length l' + 0) by auto with arith. now rewrite <- nth_extend_left. clear H0. now rewrite !nth_app_l by trivial. clear H0. rewrite !nth_app_r by lia. f_equal. lia. Qed. #[local] Hint Rewrite <- app_assoc in_app_iff in_inv : list. Lemma type_lift Γ t T Γ' : Γ' @ Γ |-- t : T -> forall Γ'', Γ' @ Γ'' @ Γ |-- lift (length Γ') (length Γ'') t : T. Proof. intros H. depind H; intros; simp lift; eauto with term. generalize (nth_extend_middle unit i Γ0 Γ' Γ''). destruct Nat.compare; intros H'; rewrite H'; simp lift; apply axiom; autorewrite with list in H |- *; lia. apply abstraction. rewrite app_comm_cons. now apply IHtypes. Qed. Lemma type_lift1 Γ t T A : Γ |-- t : T -> A :: Γ |-- lift 0 1 t : T. Proof. intros. apply (type_lift Γ t T [] H [A]). Qed. Lemma type_liftn Γ Γ' t T : Γ |-- t : T -> Γ' @ Γ |-- lift 0 (length Γ') t : T. Proof. intros. apply (type_lift Γ t T [] H Γ'). Qed. #[local] Hint Resolve type_lift1 type_lift type_liftn : term. Ltac crush ::= do_rewrites; simpl; do_rewrites; auto; try term. Lemma app_cons_snoc_app {A} l (a : A) l' : l ++ (a :: l') = (l ++ a :: nil) ++ l'. Proof. induction l; crush. Qed. #[local] Hint Extern 5 => progress (simpl ; autorewrite with list) : term. Ltac term ::= simp lift subst; try typeclasses eauto with core term. Lemma substitutive Γ t T Γ' u U : (Γ' @ (U :: Γ)) |-- t : T -> Γ |-- u : U -> Γ' @ Γ |-- subst (length Γ') t u : T. Proof with term. intros H. depind H; term. intros. (* Var *) assert (spec:=Nat.compare_spec i (length Γ')). depelim spec; try fold (length Γ') in H1; subst; try rewrite H1; try rewrite H2 ; simp subst. (* Eq *) generalize (type_lift Γ0 u U [] H0 Γ'); simpl; intros. rewrite app_cons_snoc_app, app_nth1, app_nth2; try (simpl; lia). now rewrite Nat.sub_diag. term. (* Lt *) rewrite app_nth1 by lia. rewrite <- (app_nth1 _ Γ0); term. (* Gt *) rewrite app_nth2; term. change (U :: Γ0) with ((cons U nil) @ Γ0). rewrite app_nth2; term. simpl. rewrite (nth_extend_left unit _ Γ0 Γ'). replace (length Γ' + (i - length Γ' - 1)) with (pred i); term. apply axiom. autorewrite with list in H |- *. simpl in H. lia. (* Abstraction *) intros. apply abstraction. now eapply (IHtypes _ _ _ (A :: Γ')). Qed. Lemma subst1 Γ t T u U : U :: Γ |-- t : T -> Γ |-- u : U -> Γ |-- subst 0 t u : T. Proof. intros; now apply (substitutive Γ t T [] u U). Qed. Reserved Notation " t --> u " (at level 55, right associativity). Inductive reduce : term -> term -> Prop := | red_beta t u : @((Lambda t) , u) --> subst 0 t u | red_fst t u : Fst << t , u >> --> t | red_snd t u : Snd << t , u >> --> u where " t --> u " := (reduce t u). Derive Signature for reduce. Require Import Relations. Definition reduces := clos_refl_trans term reduce. Notation " t -->* u " := (reduces t u) (at level 55). Require Import Setoid. #[local] Instance: Transitive reduces. Proof. red; intros. econstructor 3; eauto. Qed. #[local] Instance: Reflexive reduces. Proof. red; intros. econstructor 2; eauto. Qed. Inductive value : term -> Prop := | val_var (i : nat) : value i | val_unit : value Tt | val_pair a b : value a -> value b -> value << a, b >> | val_lambda t : value (λ t). Derive Signature for value. #[local] Hint Constructors value : term. Inductive reduce_congr : relation term := | reduce1 t u : reduce t u -> reduce_congr t u | reduce_app_l t t' u : reduce_congr t t' -> reduce_congr (@(t, u)) (@(t', u)) | reduce_app_r t u u' : reduce_congr u u' -> reduce_congr (@(t, u)) (@(t, u')) | reduce_pair_l t t' u : reduce_congr t t' -> reduce_congr (<< t, u >>) (<< t', u >>) | reduce_pair_r t u u' : reduce_congr u u' -> reduce_congr (<< t, u >>) (<< t, u' >>) | reduce_fst t t' : reduce_congr t t' -> reduce_congr (Fst t) (Fst t') | reduce_snd t t' : reduce_congr t t' -> reduce_congr (Snd t) (Snd t'). Derive Signature for reduce_congr. Ltac find_empty := match goal with [ H : _ |- _ ] => solve [ depelim H ] end. Lemma preserves_red1 Γ t τ : Γ |-- t : τ → forall u, t --> u → Γ |-- u : τ. Proof. intros H; induction H; intros t' redtt'; term; try find_empty; depelim redtt'. apply subst1 with A. now depelim H. apply H0. now depelim H. now depelim H. Qed. Lemma preserves_redpar Γ t τ : Γ |-- t : τ → forall u, reduce_congr t u → Γ |-- u : τ. Proof. intros H. induction H; intros t' rtt'; depelim rtt'; term; try find_empty. depelim H1. depelim H. eapply subst1; eauto. depelim H0; depelim H; term. depelim H0; depelim H; term. Qed. Lemma subject_reduction Γ t τ : Γ |-- t : τ → forall u, t -->* u → Γ |-- u : τ. Proof. induction 2; eauto using preserves_red1. Qed. #[local] Hint Constructors reduce reduce_congr : term. Lemma progress_ t τ : nil |-- t : τ → (exists t', reduce_congr t t') \/ value t. Proof. intros H; depind H; auto with term. destruct IHtypes1 as [[t' tt']|vt]. left; eauto with term. destruct IHtypes2 as [[u' uu']|vu]. left; eauto with term. depelim H; [depelim H|depelim vt..]. left. exists ([u]t0). eauto with term. destruct IHtypes1 as [[t' tt']|vt]; eauto with term. destruct IHtypes2 as [[u' uu']|vu]; eauto with term. destruct IHtypes as [[t' tt']|vt]; eauto with term. depelim vt; depelim H; eauto with term. depelim H. destruct IHtypes as [[t' tt']|vt]; eauto with term. depelim vt; depelim H; eauto with term. depelim H. Qed. Reserved Notation " Γ |-- t => A " (at level 70, t, A at next level). Reserved Notation " Γ |-- t <= A " (at level 70, t, A at next level). Inductive atomic : type -> Prop := | atomic_atom a : atomic (atom a). Derive Signature for atomic. #[local] Hint Constructors atomic : term. (* FIXME bug *) Equations? atomic_dec (t : type) : { atomic t } + { ~ atomic t } := atomic_dec (atom a) := left (atomic_atom a) ; atomic_dec t := right _. Proof. all:(intro H; depelim H). Qed. Inductive check : ctx -> term -> type -> Prop := | abstraction_check Γ A B t : A :: Γ |-- t <= B -> Γ |-- λ t <= A ---> B | pair_intro_check Γ A B t u : Γ |-- t <= A -> Γ |-- u <= B -> Γ |-- << t , u >> <= (A × B) | unit_intro_check Γ : Γ |-- Tt <= unit | check_synth Γ t T : atomic T -> Γ |-- t => T -> Γ |-- t <= T with synthetize : ctx -> term -> type -> Prop := | axiom_synth Γ i : i < length Γ -> Γ |-- i => nth i Γ unit | application_synth {Γ A B t u} : Γ |-- t => A ---> B -> Γ |-- u <= A -> Γ |-- @(t, u) => B | pair_elim_fst_synth {Γ A B t} : Γ |-- t => (A × B) -> Γ |-- Fst t => A | pair_elim_snd_synth {Γ A B t} : Γ |-- t => (A × B) -> Γ |-- Snd t => B where "Γ |-- i => A " := (synthetize Γ i A) and "Γ |-- i <= A " := (check Γ i A). Derive Signature for check synthetize. #[local] Hint Constructors synthetize check : term. Scheme check_mut_ind := Induction for check Sort Prop with synthetize_mut_ind := Induction for synthetize Sort Prop. Combined Scheme check_synthetize from check_mut_ind, synthetize_mut_ind. Lemma synth_arrow {Γ t T} : forall A : Prop, Γ |-- λ (t) => T -> A. Proof. intros A H. depelim H. Qed. Lemma synth_pair {Γ t u T} : forall A : Prop, Γ |-- << t, u >> => T -> A. Proof. intros A H. depelim H. Qed. Lemma synth_unit {Γ T} : forall A : Prop, Γ |-- Tt => T -> A. Proof. intros A H. depelim H. Qed. #[local] Hint Extern 3 => match goal with | H : ?Γ |-- ?t => ?T |- _ => apply (synth_arrow _ H) || apply (synth_pair _ H) || apply (synth_unit _ H) end : term. Lemma check_types : (forall Γ t T, Γ |-- t <= T -> Γ |-- t : T) with synthetizes_types : (forall Γ t T, Γ |-- t => T -> Γ |-- t : T). Proof. intros. destruct H; try econstructor; term. intros. destruct H; try solve [ econstructor; term ]. Qed. #[local] Hint Resolve check_types synthetizes_types : term. Inductive normal : term -> Prop := | normal_unit : normal Tt | normal_pair a b : normal a -> normal b -> normal << a, b >> | normal_abs t : normal t -> normal (λ t) | normal_neutral r : neutral r -> normal r with neutral : term -> Prop := | neutral_var i : neutral (Var i) | neutral_fst t : neutral t -> neutral (Fst t) | neutral_snd t : neutral t -> neutral (Snd t) | neutral_app t n : neutral t -> normal n -> neutral (@(t, n)). Derive Signature for normal neutral. #[local] Hint Constructors normal neutral : term. Lemma check_lift_gen Δ t T (H : Δ |-- t <= T) : forall Γ Γ', Δ = Γ' @ Γ -> forall Γ'', Γ' @ Γ'' @ Γ |-- lift (length Γ') (length Γ'') t <= T with synthetize_lift_gen Δ t T (H : Δ |-- t => T) : forall Γ Γ', Δ = Γ' @ Γ -> forall Γ'', Γ' @ Γ'' @ Γ |-- lift (length Γ') (length Γ'') t => T. Proof. destruct H; intros; simp lift. econstructor. change (S (length Γ')) with (length (A :: Γ')). change (A :: Γ' @ Γ'' @ Γ0) with ((A :: Γ') @ Γ'' @ Γ0). eapply check_lift_gen; try eassumption. subst. rewrite app_comm_cons; subst; try eassumption; trivial. econstructor; eapply check_lift_gen; eassumption. econstructor. econstructor. eassumption. eapply synthetize_lift_gen; eassumption. destruct H; intros; simp lift; try solve [econstructor; term]. clear check_lift_gen synthetize_lift_gen. subst. generalize (nth_extend_middle unit i Γ0 Γ' Γ''). destruct Nat.compare; intros H'; rewrite H'; simp lift; apply axiom_synth; autorewrite with list in H |- *; lia. Qed. Definition check_lift Γ t T Γ' (H : Γ' @ Γ |-- t <= T) : forall Γ'', Γ' @ Γ'' @ Γ |-- lift (length Γ') (length Γ'') t <= T := check_lift_gen (Γ' @ Γ) _ _ H _ _ eq_refl. Definition synthetize_lift Γ t T Γ' (H : Γ' @ Γ |-- t => T) : forall Γ'', Γ' @ Γ'' @ Γ |-- lift (length Γ') (length Γ'') t => T := synthetize_lift_gen (Γ' @ Γ) _ _ H _ _ eq_refl. Lemma check_lift1 {Γ t T A} : Γ |-- t <= T -> A :: Γ |-- lift 0 1 t <= T. Proof. intros. apply (check_lift Γ t T [] H [A]). Qed. Lemma synth_lift1 {Γ t T A} : Γ |-- t => T -> A :: Γ |-- lift 0 1 t => T. Proof. intros. apply (synthetize_lift Γ t T [] H [A]). Qed. #[local] Hint Resolve check_lift1 synth_lift1 : term. Lemma check_lift_ctx {Γ t T Γ'} : Γ |-- t <= T -> Γ' @ Γ |-- lift 0 (length Γ') t <= T. Proof. intros. apply (check_lift Γ t T [] H Γ'). Qed. Lemma synth_lift_ctx {Γ t T Γ'} : Γ |-- t => T -> Γ' @ Γ |-- lift 0 (length Γ') t => T. Proof. intros. apply (synthetize_lift Γ t T [] H Γ'). Qed. #[local] Hint Resolve check_lift_ctx synth_lift_ctx : term. Equations η (a : type) (t : term) : term := η (atom _) t := t ; η (product a b) t := << η a (Fst t), η b (Snd t) >> ; η (arrow a b) t := (Lambda (η b @(lift 0 1 t, η a 0)))%term ; η unit t := Tt. Lemma checks_arrow Γ t A B : Γ |-- t <= A ---> B → ∃ t', t = λ t' ∧ A :: Γ |-- t' <= B. Proof. intros H; inversion H; subst. exists t0; term. inversion H0. Qed. Lemma normal_lift {t k n} : normal t → normal (lift k n t) with neutral_lift {t k n} : neutral t -> neutral (lift k n t). Proof. destruct 1; simp lift; constructor; term. destruct 1; simp lift; try (constructor; term). destruct Nat.compare; term. Qed. #[local] Hint Resolve normal_lift neutral_lift : term. Lemma check_normal {Γ t T} : Γ |-- t <= T -> normal t with synth_neutral {Γ t T} : Γ |-- t => T -> neutral t. Proof. destruct 1; constructor; term. destruct 1; constructor; term. Qed. #[local] Hint Resolve check_normal synth_neutral : term. Lemma eta_expand Γ t A : neutral t → Γ |-- t => A -> Γ |-- η A t <= A. Proof. revert Γ t; induction A; intros; simp η; constructor; term. assert(0 < length (A1 :: Γ)) by (simpl; lia). specialize (IHA1 (A1 :: Γ) 0 (neutral_var _) (axiom_synth (A1 :: Γ) 0 H1)). apply (IHA2 (A1 :: Γ) @(lift 0 1 t, η A1 0)); term. Qed. Lemma η_normal : forall Γ A t, neutral t -> Γ |-- t => A -> normal (η A t). Proof. intros. now apply eta_expand in H0; term. Qed. (** Going to use the subterm order *) Require Import Arith Wf_nat. #[export] Instance wf_nat : Classes.WellFounded lt := lt_wf. #[local] Hint Constructors Subterm.lexprod : subterm_relation. Derive Signature for Acc. Notation lexicographic R S := (Subterm.lexprod _ _ R S). Definition her_order : relation (type * term * term) := lexicographic (lexicographic type_subterm term_subterm) term_subterm. #[local] Hint Unfold her_order : subterm_relation. Import Program.Tactics. Local Obligation Tactic := program_simpl. Arguments exist [A] [P]. Definition hereditary_type (t : type * term * term) := (term * option { u : type | u = (fst (fst t)) \/ type_subterm u (fst (fst t)) })%type. Inductive IsLambda {t} : hereditary_type t -> Set := | isLambda abs a b prf : IsLambda (Lambda abs, Some (exist (arrow a b) prf)). Equations is_lambda {t} (h : hereditary_type t) : IsLambda h + term := is_lambda (pair (Lambda abs) (Some (exist (arrow a b) prf))) := inl (isLambda abs a b prf) ; is_lambda (pair t' _) := inr t'. Arguments is_lambda : simpl never. Lemma is_lambda_inr {t} (h : hereditary_type t) : forall t', is_lambda h = inr t' -> fst h = t'. Proof. let elim := constr:(fun_elim (f:=@is_lambda)) in apply elim; simpl; intros; try congruence. Qed. Inductive IsPair {t} : hereditary_type t -> Set := | isPair u v a b prf : IsPair (Pair u v, Some (exist (product a b) prf)). Equations is_pair {t} (h : hereditary_type t) : IsPair h + term := is_pair (pair (Pair u v) (Some (exist (product a b) prf))) := inl (isPair u v a b prf) ; is_pair (pair t' _) := inr t'. Arguments is_pair : simpl never. Lemma is_pair_inr {t} (h : hereditary_type t) : forall t', is_pair h = inr t' -> fst h = t'. Proof. let elim := constr:(fun_elim (f:=@is_pair)) in apply elim; simpl; intros; try congruence. Qed. Lemma nth_extend_right {A} (a : A) n (l l' : list A) : n < length l -> nth n l a = nth n (l @ l') a. Proof. revert n l'. induction l; simpl; intros; auto. depelim H. destruct n; auto. apply IHl. auto with arith. Qed. Definition her_type (t : type * term * term) := let u' := fst (fst t) in { u : type | u = u' \/ type_subterm u u' }. #[local] Remove Hints t_step : subterm_relation. #[local] Remove Hints Subterm.clos_trans_stepr : subterm_relation. Ltac apply_step := match goal with |- clos_trans ?A ?R ?x ?y => not_evar y; eapply t_step end. #[local] Hint Extern 30 (clos_trans _ _ _ _) => apply_step : subterm_relation. Lemma clos_trans_inv {A} R (x y z : A) : clos_trans A R y z → clos_trans A R x y → clos_trans A R x z. Proof. eauto using t_trans. Qed. Ltac apply_transitivity := match goal with |- clos_trans ?A ?R ?x ?y => not_evar x; not_evar y; eapply clos_trans_inv end. #[local] Hint Extern 31 (clos_trans _ _ _ _) => apply_transitivity : subterm_relation. Equations? hereditary_subst (t : type * term * term) (k : nat) : term * option (her_type t) by wf t her_order := hereditary_subst (pair (pair A a) t) k with t := { | Var i with Nat.compare i k := { | Eq := (lift 0 k a, Some (exist A _)) ; | Lt := (Var i, None) ; | Gt := (Var (pred i), None) } ; | Lambda t' := (Lambda (fst (hereditary_subst (A, a, t') (S k))), None) ; | App f arg with hereditary_subst (A, a, f) k := { | p with is_lambda p := { | inl (isLambda f' A' B' prf) := let (f'', y) := hereditary_subst (A', fst (hereditary_subst (A, a, arg) k), f') 0 in (f'', Some (exist B' _)) ; | inr f' := (@(f', fst (hereditary_subst (A, a, arg) k)), None) } } ; | Pair i j := (<< fst (hereditary_subst (A, a, i) k), fst (hereditary_subst (A, a, j) k) >>, None) ; | Fst t' with hereditary_subst (A, a, t') k := { | p with is_pair p := { | inl (isPair u v a' b' prf) := (u, Some (exist a' _)) ; | inr p' := (Fst p', None) } } ; | Snd t' with hereditary_subst (A, a, t') k := { | p with is_pair p := { | inl (isPair u v a' b' prf) := (v, Some (exist b' _)) ; | inr p' := (Snd p', None) } } ; | Tt := (Tt, None) }. Proof. all:(try match goal with |- her_order _ _ => unfold her_type in *; simpl in *; try (clear; constructor 2; do 2 constructor) end). 1:(destruct prf; subst; eauto 10 with subterm_relation). all:(clear -prf; simpl in *; destruct prf; subst; eauto 5 with subterm_relation). Defined. #[local] Hint Unfold her_type : subterm_relation. #[local] Hint Unfold Program.Basics.const : subterm_relation. Ltac autoh := unfold type_subterm in * ; try typeclasses eauto with hereditary_subst subterm_relation. Ltac simph := try (rewrite_strat (innermost (hints hereditary_subst))); autoh. #[local] Hint Transparent type_subterm : subterm_relation. Ltac invert_term := match goal with | [ H : check _ (Lambda _) _ |- _ ] => depelim H | [ H : check _ (Pair _ _) _ |- _ ] => depelim H | [ H : check _ Tt _ |- _ ] => depelim H | [ H : types _ ?t _ |- _ ] => match t with | Var _ => depelim H | Lambda _ => depelim H | App _ _ => depelim H | Pair _ _ => depelim H | Fst _ => depelim H | Snd _ => depelim H | Tt => depelim H end end. Lemma hereditary_subst_type Γ Γ' t T u U : Γ |-- u : U -> Γ' @ (U :: Γ) |-- t : T -> let (t', o) := hereditary_subst (U, u, t) (length Γ') in (Γ' @ Γ |-- t' : T /\ (forall ty prf, o = Some (exist ty prf) -> ty = T)). Proof. intros. funelim (hereditary_subst (U, u, t) (length Γ')) Heqcall; cbn in Heqcall |- *; DepElim.simpl_dep_elim; subst; try (split; [ (intros; try discriminate) | solve [ intros; discriminate ] ]); DepElim.simplify_dep_elim. invert_term. simpl in *. apply abstraction. specialize (H Γ (A0 :: Γ')). simpl in H. eqns_specialize_eqs H. simpl in H. on_call hereditary_subst ltac:(fun c => remember c as hsubst; destruct hsubst; simpl in *). specialize (H _ H0 H1). apply H; auto. on_call hereditary_subst ltac:(fun c => remember c as hsubst; destruct hsubst; simpl in *). on_call hereditary_subst ltac:(fun c => remember c as hsubst; destruct hsubst; simpl in *). depelim H2. constructor. now apply H. now apply H0. depelim H0. term. (* Var *) simpl. apply Nat.compare_eq in Heq; subst. depelim H0. rewrite !nth_length. split. term. intros. noconf H1. auto. (* Lt *) apply Nat.compare_lt_iff in Heq. depelim H0. replace (nth i (Γ' @ (_ :: Γ)) unit) with (nth i (Γ' @ Γ) unit). constructor. rewrite app_length. auto with arith. now do 2 rewrite <- nth_extend_right by auto. (* Gt *) pose (substitutive _ _ _ _ _ _ H0 H). simp subst in t. rewrite Heq in t. simp subst in t. (* App *) simpl in *. - on_call (hereditary_subst (A, a, arg)) ltac:(fun c => remember c as hsubst; destruct hsubst; simpl in *). on_call hereditary_subst ltac:(fun c => remember c as hsubst; destruct hsubst; simpl in * ). dependent elimination H2 as [application _ U T f arg tyfn tyu]. specialize (H _ _ H1 tyu). specialize (Hind _ _ H1 tyfn). cbn in Heqcall. rewrite Heq0 in Hind. destruct Hind as [Ht' Ht'']. dependent elimination Ht' as [abstraction _ U T abs tyabs]. eqns_specialize_eqs Ht''. noconf Ht''. destruct H as [Ht tty]. specialize (H0 _ [] _ _ _ _ Ht tyabs eq_refl Heqhsubst0). cbn in H0. rewrite <- Heqhsubst0 in H0. destruct H0 as [H0 H5]. split; auto. intros ty prf0 Heq'. noconf Heq'. auto. (* App no redex *) - apply is_lambda_inr in Heq. revert Heq. intros <-. depelim H1. specialize (H _ _ H0 H1_0). specialize (Hind _ _ H0 H1_). on_call hereditary_subst ltac:(fun c => remember c as hsubst; destruct hsubst; simpl in * ). on_call (hereditary_subst (A, a, arg)) ltac:(fun c => remember c as hsubst; destruct hsubst; simpl in * ). destruct H, Hind. econstructor; eauto. (* Fst redex *) - simpl in *. depelim H0. specialize (Hind _ _ H H0). cbn in Heqcall. rewrite Heq0 in Hind. destruct Hind. depelim H1. intuition auto. eqns_specialize_eqs H2. noconf H2. now noconf H1. (* Fst no redex *) - apply is_pair_inr in Heq. revert Heq. on_call hereditary_subst ltac:(fun c => remember c as hsubst; destruct hsubst; simpl in * ). depelim H0. intros <-. specialize (Hind _ _ H H0); eauto. destruct Hind. now apply pair_elim_fst with B. (* Snd redex *) - simpl. depelim H0. specialize (Hind _ _ H H0). rewrite Heq0 in Hind. destruct Hind. depelim H1. intuition auto. eqns_specialize_eqs H2. noconf H2. now noconf H1. (* Snd no redex *) - apply is_pair_inr in Heq. revert Heq. on_call hereditary_subst ltac:(fun c => remember c as hsubst; destruct hsubst; simpl in * ). intros Ht2; subst t. simpl in *. depelim H0. specialize (Hind _ _ H H0); eauto. now apply pair_elim_snd with A0. Qed. Print Assumptions hereditary_subst_type. Import Program.Basics. #[export] Instance: subrelation eq (flip impl). Proof. reduce. subst; auto. Qed. Lemma nth_pred Γ' Γ U n : n > length Γ' -> nth (pred n) (Γ' @ Γ) unit = nth n (Γ' @ (U :: Γ)) unit. Proof. revert_until Γ'. induction Γ'; intros. destruct n; auto. depelim H. destruct n; auto. simpl pred. simpl. rewrite <- IHΓ'. destruct n; auto. simpl in H. depelim H. depelim H. simpl in *; lia. Qed. Lemma hereditary_subst_subst U u t Γ' : (forall Γ T, Γ |-- u <= U -> match hereditary_subst (U, u, t) (length Γ') with | (t', Some (exist ty _)) => ((Γ' @ (U :: Γ) |-- t <= T -> Γ' @ Γ |-- t' <= T /\ ty = T) /\ (Γ' @ (U :: Γ) |-- t => T -> Γ' @ Γ |-- t' <= T /\ ty = T)) | (t', None) => (Γ' @ (U :: Γ) |-- t <= T -> Γ' @ Γ |-- t' <= T) /\ (Γ' @ (U :: Γ) |-- t => T -> Γ' @ Γ |-- t' => T) end). Proof. funelim (hereditary_subst (U, u, t) (length Γ')); simpl in *. let Hind := fresh "Hind" in rename H into Hind; intros ?? Hu. simpl. simpl in *. (** Lambda *) - cbn in *. on_call hereditary_subst ltac:(fun c => remember c as hsubst; destruct hsubst; simpl in *). split; intros Hsyn; [| elim (synth_arrow False Hsyn)]. invert_term. constructor. specialize (Hind _ _ _ (A0 :: Γ') eq_refl). simpl in *. specialize (Hind Heqhsubst _ B Hu). rewrite <- Heqhsubst in Hind. destruct o as [[ty prf]|], Hind as [Hind0 Hind1]. apply Hind0; eauto. eauto. elim (synth_arrow False H0). (** Pairs *) - do 2 on_call hereditary_subst ltac:(fun c => remember c as hsubst; destruct hsubst; simpl in *). split; intros Hsyn; [|elim (synth_pair False Hsyn)]. invert_term. specialize (H0 _ B H1). specialize (H _ A0 H1). destruct o as [[ty prf]|], o0 as [[ty' prf']|], H, H0; destruct_conjs; constructor; eauto. now apply H. now apply H0. now apply H. now apply H0. elim (synth_pair False H3). (* Unit *) - split; intros Hsyn; [|elim (synth_unit False Hsyn)]. depelim Hsyn. term. elim (synth_unit False H1). (* Var: eq *) - apply Nat.compare_eq in Heq; subst i. split; intros Hsyn; depelim Hsyn; rewrite ?nth_length. depelim H1; rewrite !nth_length. now split; term. split; term. (* Lt *) - apply Nat.compare_lt_iff in Heq. split; intros Hsyn; depelim Hsyn; [depelim H1;constructor;auto|]; (rewrite nth_app_l by lia; rewrite <- nth_app_l with (l':=Γ) by lia; constructor; rewrite app_length; auto with arith). (* Gt *) - apply Nat.compare_gt_iff in Heq. split; intros Hsyn; depelim Hsyn. depelim H1. constructor. auto. replace (nth i (Γ' @ (A :: Γ)) unit) with (nth (pred i) (Γ' @ Γ) unit). constructor. rewrite app_length in *. simpl in H1. lia. now apply nth_pred. replace (nth _ (Γ' @ (_ :: _)) unit) with (nth (pred i) (Γ' @ Γ) unit). constructor. rewrite app_length in *. simpl in H0. lia. now apply nth_pred. (* App *) - cbn. on_call (hereditary_subst (A,a,arg)) ltac:(fun c => remember c as hsubst; destruct hsubst; simpl in *). specialize (H0 _ _ _ [] eq_refl). rewrite Heq0 in Hind. revert H0. on_call hereditary_subst ltac:(fun c => remember c as hsubst; destruct hsubst; simpl in *). intros. (* Redex *) assert((Γ' @ (A :: Γ) |-- @(f, arg) => T → Γ' @ Γ |-- t0 <= T ∧ B' = T)). intros Ht; depelim Ht. destruct (Hind Γ (A0 ---> T) H1). specialize (H _ A' H1). destruct (H4 Ht). noconf H6. depelim H5. split; auto. destruct o; try destruct h; destruct H. destruct (H H2). subst x. specialize (H0 Heqhsubst0 _ B' H7). rewrite <- Heqhsubst0 in H0. destruct o0 as [[ty typrf]|]; destruct H0 as [Hcheck Hinf]. now apply Hcheck. now apply Hcheck. specialize (H0 Heqhsubst0 _ B' (H H2)). rewrite <- Heqhsubst0 in H0. destruct o0 as [[ty typrf]|]; destruct H0 as [Hcheck Hinf]. now apply Hcheck. now apply Hcheck. split; auto. depelim H6. split; eauto. intros Ht3u; apply H2. now depelim Ht3u. (* No redex *) - intros Γ T Hu. assert(Γ' @ (A :: Γ) |-- @( f, arg) => T → Γ' @ Γ |-- @( f', fst (hereditary_subst (A, a, arg) (length Γ'))) => T). intros Ht; depelim Ht. on_call hereditary_subst ltac:(fun c => remember c as hsubst; destruct hsubst; simpl in *). revert Heq. on_call hereditary_subst ltac:(fun c => remember c as hsubst; destruct hsubst; simpl in *). intros. pose (Hind _ (A0 ---> T) Hu). destruct o0 as [[ty prf']|]. + destruct y as [Hind' Hind'']. specialize (Hind'' Ht). destruct Hind''; subst ty. specialize (H _ A0 Hu). destruct o as [[ty' prf'']|]. ++ destruct H as [Hind0 Hind0']. specialize (Hind0 H0). destruct Hind0. subst ty'. eapply application_synth; eauto. simpl in *. depelim H1. simp is_lambda in Heq. noconf Heq. depelim H1. ++ depelim H1. simp is_lambda in Heq. noconf Heq. depelim H1. + clear y. specialize (H _ A0 Hu). destruct (Hind _ (A0 ---> T) Hu). apply is_lambda_inr in Heq. cbn in Heq; subst t0. simpl. destruct o as [[ty prf]|]; destruct H as [Hindt0 Hindt0']. eapply application_synth; eauto. now apply Hindt0. eapply application_synth; eauto. + split; auto. intros H2. depelim H2. constructor; auto. (* Pair *) - simpl in Heq0. autorewrite with is_pair in Heq. simpl in prf. intros Γ T Hu. assert( (Γ' @ (A :: Γ) |-- Fst t' => T → Γ' @ Γ |-- u <= T ∧ a' = T)). intros Ht; depelim Ht. specialize (Hind _ (T × B) Hu). revert Hind. on_call hereditary_subst ltac:(fun c => remember c as hsubst; destruct hsubst; simpl in *). noconf Heq0. cbn in Heqcall. intros [Hind Hind']. specialize (Hind' Ht). destruct Hind' as [H0 H1]. noconf H1. depelim H0. split; auto. depelim H0. split; auto. intros H1. depelim H1. intuition. - intros Γ T Hu. assert (Γ' @ (A :: Γ) |-- Fst t' => T → Γ' @ Γ |-- Fst p' => T). intros Ht; depelim Ht. specialize (Hind _ (T × B) Hu). revert Hind. on_call hereditary_subst ltac:(fun c => remember c as hsubst; destruct hsubst; simpl in *). destruct o as [[ty prf]|]. intros [Hind Hind']. destruct (Hind' Ht). subst ty. depelim H. simp is_pair in Heq. discriminate. depelim H. apply is_pair_inr in Heq. simpl in Heq ; subst p'. intros [Hind Hind']. eapply pair_elim_fst_synth. now apply Hind'. split; auto. intros H2. depelim H2. intuition auto with term. (* Snd *) - intros Γ T Hu. assert((Γ' @ (A :: Γ) |-- Snd t' => T → Γ' @ Γ |-- v <= T ∧ b' = T)). intros Ht; depelim Ht. specialize (Hind _ (A0 × T) Hu). revert Hind. on_call hereditary_subst ltac:(fun c => remember c as hsubst; destruct hsubst; simpl in *). noconf Heq0. intros [Hind Hind']. specialize (Hind' Ht). destruct Hind' as [H0 H1]. noconf H1. depelim H0. split; auto. depelim H0. split; auto. intros H1. depelim H1. intuition auto with term. - intros Γ T Hu. assert (Γ' @ (A :: Γ) |-- Snd t' => T → Γ' @ Γ |-- Snd p' => T). intros Ht; depelim Ht. specialize (Hind _ (A0 × T) Hu). revert Hind. on_call hereditary_subst ltac:(fun c => remember c as hsubst; destruct hsubst; simpl in *). destruct o as [[ty prf]|]. intros [Hind Hind']. destruct (Hind' Ht). subst ty. depelim H. simp is_pair in Heq. discriminate. depelim H. intros [Hind Hind']. apply is_pair_inr in Heq. subst p'. simpl in *. specialize (Hind' Ht). econstructor; eauto. split; auto. intros H1. depelim H1. term. Qed. Print Assumptions hereditary_subst_subst. Lemma check_liftn {Γ Γ' t T} : Γ |-- t <= T -> Γ' @ Γ |-- lift 0 (length Γ') t <= T. Proof. intros. apply (check_lift Γ t T [] H Γ'). Qed. Lemma synth_liftn {Γ Γ' t T} : Γ |-- t => T -> Γ' @ Γ |-- lift 0 (length Γ') t => T. Proof. intros. apply (synthetize_lift Γ t T [] H Γ'). Qed. #[local] Hint Resolve check_liftn synth_liftn : term. (* Write normalization function *) Lemma types_normalizes Γ t T : Γ |-- t : T → ∃ u, Γ |-- u <= T. Proof. induction 1. (* eta-exp *) exists (η (nth i Γ unit) i). apply (eta_expand Γ i (nth i Γ unit) (neutral_var _)); term. destruct IHtypes as [t' tt']. exists (λ t'); term. destruct IHtypes1 as [t' tt']. destruct IHtypes2 as [u' uu']. (* Hereditary substitution *) apply checks_arrow in tt'. destruct tt' as [t'' [t't'' t'B]]. subst. generalize (hereditary_subst_subst _ _ t'' [] Γ B uu'). destruct_call hereditary_subst. destruct o. destruct h. simpl in *. intros. destruct H1. exists t0; intuition. simpl in *. intros. destruct H1. exists t0; intuition. (* Unit *) exists Tt; term. (* Pair *) destruct IHtypes1 as [t' tt']. destruct IHtypes2 as [u' uu']. exists << t' , u' >>. term. (* Fst *) destruct IHtypes as [t' tt']. depelim tt'. exists t0; term. depelim H0. (* Snd *) destruct IHtypes as [t' tt']. depelim tt'. exists u; term. depelim H0. Qed. Print Assumptions types_normalizes. Coq-Equations-1.3.1-8.20/examples/_CoqProject000066400000000000000000000005411463127417400205530ustar00rootroot00000000000000-I ../src -R ../theories Equations -Q . Examples INSTALLDEFAULTROOT = Equations Fin.v HoTT_light.v RoseTree.v graph_complete.v STLC.v polynomials.v string_matching.v nested_mut_rec.v MoreDep.v general_recursion.v Basics.v definterp.v ordinals.v ho_finite_branching.v mutualwfrec.v accumulator.v views.v wfrec.v AlmostFull.v POPLMark1a.v bove_capretta.v Coq-Equations-1.3.1-8.20/examples/_HoTTProject000066400000000000000000000005761463127417400206570ustar00rootroot00000000000000-I ../src -R ../theories Equations -Q . Examples INSTALLDEFAULTROOT = Equations COQC = hoqc COQTOP = hoqtop Fin.v HoTT_light.v RoseTree.v graph_complete.v STLC.v polynomials.v string_matching.v nested_mut_rec.v MoreDep.v general_recursion.v Basics.v definterp.v ordinals.v ho_finite_branching.v mutualwfrec.v accumulator.v views.v wfrec.v AlmostFull.v POPLMark1a.v bove_capretta.v Coq-Equations-1.3.1-8.20/examples/accumulator.v000066400000000000000000000141001463127417400211220ustar00rootroot00000000000000(* begin hide *) (**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (* end hide *) (** * Working with accumulators A standard pattern of functional programming involves writting tail-recursive versions of an algorithm using an accumulator (also called wrapper/worker), we show how Equations makes this pattern easy to express and reason about using where clauses, well-founded recursion and function eliminators. *) From Equations Require Import Equations. From Coq Require Import List Program.Syntax Arith Lia. Import ListNotations. (** ** Worker/wrapper The most standard example is an efficient implementation of list reversal. Instead of growing the stack by the size of the list, we accumulate a partially reverted list as a new argument of our function. We implement this using a [go] auxilliary function defined recursively and pattern matching on the list. *) Equations rev_acc {A} (l : list A) : list A := rev_acc l := go [] l where go : list A -> list A -> list A := go acc [] := acc; go acc (hd :: tl) := go (hd :: acc) tl. (** A typical issue with such accumulating functions is that one has to write lemmas in two versions, once about the internal [go] function and then on its wrapper. Using the functional elimination principle associated to [rev_acc], we can show both properties simultaneously. *) Lemma rev_acc_eq : forall {A} (l : list A), rev_acc l = rev l. Proof. (** We apply functional elimination on the [rev_acc l] call. The eliminator expects two predicates: one specifying the wrapper and another for the worker. For the wrapper, we give the expected final goal but for the worker we have to invent a kind of loop invariant: here that the result of the whole [go acc l] call is equal to [rev l ++ acc]. *) apply (rev_acc_elim (fun A l revaccl => revaccl = rev l) (fun A _ acc l go_res => go_res = rev l ++ acc)); intros; simpl. (** Functional elimination provides us with the worker property for the initial [go [] l] call, i.e. that it is equal to [rev l ++ []], hence the proof: *) + now rewrite app_nil_r in H. (** For the worker proofs themselves, we use standard reasoning. *) + reflexivity. + now rewrite H, <- app_assoc. Qed. (** ** The worker/wrapper and well-founded recursion Sometimes the natural expression of an algorithm in the worker/wrapper pattern requires well-founded recursion: here we take an example algorithm from Haskell whose termination is justified by a measure. Note that the [worker] subprogram's termination measure and implementation depends on the enclosing [k] argument which is captured in the where clause. *) Obligation Tactic := idtac. Equations? isPrime (n : nat) : bool := isPrime 0 := false; isPrime 1 := false; isPrime 2 := true; isPrime 3 := true; isPrime k := worker 2 where worker (n' : nat) : bool by wf (k - n') lt := worker n' with ge_dec n' k := { | left H := true; | right H := if Nat.eqb (Nat.modulo k n') 0 then false else worker (S n') }. Proof. lia. Defined. (* Require Import ExtrOcamlBasic. *) (* Extraction isPrime. *) (** ** Programm equivalence with worker/wrappers Finally we show how the eliminator can be used to prove program equivalences involving a worker/wrapper definition. Here [indexes l] computes the list [0..|l|-1] of valid indexes in the list [l]. *) Equations indexes : list nat -> list nat := indexes l := go [] (length l) where go : list nat -> nat -> list nat := go acc 0 := acc; go acc (S n) := go (n :: acc) n. (** Clearly, all indexes in the resulting list should be smaller than [length l]: *) Lemma indexes_spec (l : list nat) : Forall (fun x => x < length l) (indexes l). Proof. (** We apply the eliminator, giving a predicate that specifies preservation of the property from the accumulator to the end result for [go]'s specification. The rest of the proof uses simple reasoning. *) apply (indexes_elim (fun l indexesl => Forall (fun x => x < length l) indexesl) (fun l acc n indexesl => n <= length l -> Forall (fun x => x < length l) acc -> Forall (fun x => x < length l) indexesl)); clear l; intros. + apply H; constructor. + apply H0. + apply H. lia. constructor. lia. apply H1. Qed. (** Using well-founded recursion we can also define an [interval x y] function producing the interval [x..y-1] *) Equations? interval x y : list nat by wf (y - x) lt := interval x y with lt_dec x y := { | left ltxy => x :: interval (S x) y; | right nltxy => [] }. Proof. lia. Defined. (** We prove a simple lemmas on [interval]: *) Lemma interval_large x y : ~ x < y -> interval x y = []. Proof. funelim (interval x y); clear Heq; intros; now try lia. Qed. (** One can show that [indexes l] produces the interval [0..|l|-1] using [indexes_elim]. The recursion invariant for [indexes_go] records that [acc] corresponds to a partial interval [n..|l|-1] during the computation, and is finally completed into [0..|l|-1] by the end of the computation. We use the previous lemmas as helpers. *) Lemma indexes_interval l : indexes l = interval 0 (length l). Proof. set (P := fun start (l indexesl : list nat) => indexesl = interval start (length l)). revert l. apply (indexes_elim (P 0) (fun l acc n indexesl => n <= length l -> P n l acc -> P 0 l indexesl)); subst P; simpl. intros l. + intros H. apply H; auto. rewrite interval_large; trivial; lia. + intros; trivial. + intros l ? n H Hn ->. apply H. lia. rewrite (interval_equation_1 n). destruct lt_dec. reflexivity. elim n0. lia. Qed. Coq-Equations-1.3.1-8.20/examples/bove_capretta.v000066400000000000000000000116511463127417400214310ustar00rootroot00000000000000(** * The Bove-Cappretta method This method involves building the graph and/or domain of a recursive definition and to define it by recursion + inversion on that graph, but not direct pattern matching. We show a difficult example involving nested recursive calls. *) From Equations Require Import Equations. Require Import Arith Lia Relations Utf8. Import Sigma_Notations. (** The graph of the [f91] function. *) Inductive f91_graph : nat -> nat -> Prop := | f91_gt n : n > 100 -> f91_graph n (n - 10) | f91_le n nest res : n <= 100 -> f91_graph (n + 11) nest -> f91_graph nest res -> f91_graph n res. Derive Signature for f91_graph. (** It is easy to derive the spec of [f91] from it, by induction. *) Lemma f91_spec n m : f91_graph n m -> if le_lt_dec n 100 then m = 91 else m = n - 10. Proof. induction 1; repeat destruct le_lt_dec; try lia; auto. Qed. (* Do not [simpl] the (101 - n) call *) Arguments minus : simpl never. (** One can construct the graph using a (relatively) complex termination argument. Note that it is required to know that the result is in the graph to show termination at the second, nested recursive call to [f91_exists]. *) Equations? f91_exists n : Σ r, f91_graph n r by wf (101 - n) lt := f91_exists n with le_lt_dec n 100 := { | left H := ((f91_exists (f91_exists (n + 11)).1).1, _) ; | right H := (n - 10, _) }. Proof. all:hnf. 2-3:edestruct f91_exists; cbn. 3:destruct f91_exists. lia. apply f91_spec in pr2. destruct le_lt_dec; subst; lia. econstructor 2; eauto. constructor. lia. Defined. (** Combining these two things allow us to derive the spec of [f91]. *) Lemma f91 n : (f91_exists n).1 = if le_lt_dec n 100 then 91 else n - 10. Proof. destruct f91_exists. simpl. generalize (f91_spec _ _ pr2). destruct le_lt_dec; auto. Qed. (** This extracts to [f91]. *) (* Extraction f91_exists. *) (** An alternative is to use the domain of [f91] instead, which for nested recursive calls requires a quantification on the graph relation. *) Inductive f91_dom : nat -> Prop := | f91_dom_gt n : n > 100 -> f91_dom n | f91_dom_le n : n <= 100 -> f91_dom (n + 11) -> (forall n', f91_graph (n + 11) n' -> f91_dom n') -> f91_dom n. Lemma le_nle n : n <= 100 -> 100 < n -> False. Proof. lia. Qed. (** These two structural inversion lemmas are essential: we rely on the fact that they return subterms of their [prf] argument below to define [f91_ongraph] by _structural_ recursion. *) Equations f91_dom_le_inv_l {n} (prf : f91_dom n) (Hle : n <= 100) : f91_dom (n + 11) := | f91_dom_gt n H | Hle := ltac:(exfalso; eauto using le_nle); | f91_dom_le n H Hd Hg | Hle := Hd. Equations f91_dom_le_inv_r {n} (prf : f91_dom n) (Hle : n <= 100) : (forall n', f91_graph (n + 11) n' -> f91_dom n') := | f91_dom_gt n H | Hle := ltac:(exfalso; eauto using le_nle); | f91_dom_le n H Hd Hg | Hle := Hg. Module WithSigma. (** In this case, [f91_ongraph] is recursive on the domain proof, but only does inversion of it, not direct pattern-matching which would be forbidden as it lives in [Prop]. *) Equations? f91_ongraph n (prf : f91_dom n) : Σ r, f91_graph n r by struct prf := f91_ongraph n prf with le_lt_dec n 100 := { | left H => ((f91_ongraph (f91_ongraph (n + 11) _).1 _).1, _); | right H => (n - 10, _) }. Proof. clear f91_ongraph. destruct prf. exfalso; lia. apply prf. destruct f91_ongraph. clear f91_ongraph. simpl. eapply f91_dom_le_inv_r in prf; eauto. destruct f91_ongraph. simpl in *. destruct f91_ongraph. clear f91_ongraph. simpl in *. econstructor 2; eauto. constructor. lia. Defined. (** The proof witness [f91_dom n] disappears at extraction time. But the polymorphic sigma type makes it leave a dummy unit value on the side. *) (* Extraction f91_ongraph. *) End WithSigma. Module WithSubset. (** Same thing with a subset type for cleaner extraction. We use [Program Mode] to avoid explicit projections/injections into subset types. *) Set Program Mode. Equations? f91_ongraph n (prf : f91_dom n) : { r | f91_graph n r } by struct prf := f91_ongraph n prf with le_lt_dec n 100 := { | left H => f91_ongraph (f91_ongraph (n + 11) _) _; | right H => n - 10 }. Proof. clear f91_ongraph. destruct prf. exfalso; lia. apply prf. destruct f91_ongraph. clear f91_ongraph. simpl. eapply f91_dom_le_inv_r in prf; eauto. destruct f91_ongraph. simpl in *. destruct f91_ongraph. clear f91_ongraph. simpl in *. econstructor 2; eauto. constructor. lia. Defined. Lemma f91_ongraph_spec n dom : proj1_sig (f91_ongraph n dom) = if le_lt_dec n 100 then 91 else n - 10. Proof. destruct f91_ongraph. simpl. generalize (f91_spec _ _ f). destruct le_lt_dec; auto. Qed. (** All proof witnesses [f91_dom n] and [f91_graph n r] disappear at extraction time, giving the "real" [f91] implementation. *) (* Extraction f91_ongraph. *) End WithSubset. Coq-Equations-1.3.1-8.20/examples/definterp.v000066400000000000000000000363061463127417400205770ustar00rootroot00000000000000(* begin hide *) (**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (* end hide *) (** * Definitional interpreter for STLC extended with references This is a port of the first part of "Intrinsically-Typed Definitional Interpreters for Imperative Languages", Poulsen, Rouvoet, Tolmach, Krebbers and Visser. POPL'18. It uses well-typed and well-scoped syntax and a monad indexed over an indexed set of stores to define an interpreter for an imperative programming language. This showcases the use of dependent pattern-matching and pattern-matching lambdas in Equations. We implement a variant where store extension is resolved using type class resolution as well as the dependent-passing style version. *) Require Import Program.Basics Program.Tactics. Require Import Coq.Vectors.VectorDef. Require Import List. Import ListNotations. Require Import Utf8. From Equations Require Import Equations. Set Warnings "-notation-overridden". (** The Σ notation of equations clashes with the Σ's used below, so we redefine the Σ notation using ∃ instead. *) Notation "'∃' x .. y , P" := (sigma (fun x => .. (sigma (fun y => P)) ..)) : type_scope. Notation "( x , .. , y , z )" := (@sigmaI _ _ x .. (@sigmaI _ _ y z) ..) (right associativity, at level 0, format "( x , .. , y , z )") : equations_scope. Notation " x .1 " := (pr1 x) : equations_scope. Notation " x .2 " := (pr2 x) : equations_scope. Local Open Scope equations_scope. Set Equations Transparent. (** [t] is just [Vector.t] here. *) Derive Signature NoConfusion NoConfusionHom for t. (** Types include unit, bool, function types and references *) Inductive Ty : Set := | unit : Ty | bool : Ty | arrow (t u : Ty) : Ty | ref : Ty -> Ty. Derive NoConfusion for Ty. Infix "⇒" := arrow (at level 80). Definition Ctx := list Ty. Reserved Notation " x ∈ s " (at level 70, s at level 10). #[universes(template)] Inductive In {A} (x : A) : list A -> Type := | here {xs} : x ∈ (x :: xs) | there {y xs} : x ∈ xs -> x ∈ (y :: xs) where " x ∈ s " := (In x s). Derive Signature NoConfusion for In. Arguments here {A x xs}. Arguments there {A x y xs} _. Inductive Expr : Ctx -> Ty -> Set := | tt {Γ} : Expr Γ unit | true {Γ} : Expr Γ bool | false {Γ} : Expr Γ bool | ite {Γ t} : Expr Γ bool -> Expr Γ t -> Expr Γ t -> Expr Γ t | var {Γ} {t} : In t Γ -> Expr Γ t | abs {Γ} {t u} : Expr (t :: Γ) u -> Expr Γ (t ⇒ u) | app {Γ} {t u} : Expr Γ (t ⇒ u) -> Expr Γ t -> Expr Γ u | new {Γ t} : Expr Γ t -> Expr Γ (ref t) | deref {Γ t} : Expr Γ (ref t) -> Expr Γ t | assign {Γ t} : Expr Γ (ref t) -> Expr Γ t -> Expr Γ unit. (** We derive both [NoConfusion] and [NoConfusionHom] principles here, the later allows to simplify pattern-matching problems on [Expr] which would otherwise require K. It relies on an inversion analysis of every constructor, showing that the context and type indexes in the conclusions of every constructor are forced arguments. *) Derive Signature NoConfusion NoConfusionHom for Expr. #[universes(template)] Inductive All {A} (P : A -> Type) : list A -> Type := | all_nil : All P [] | all_cons {x xs} : P x -> All P xs -> All P (x :: xs). Arguments all_nil {A} {P}. Arguments all_cons {A P x xs} _ _. Derive Signature NoConfusion NoConfusionHom for All. Section MapAll. Context {A} {P Q : A -> Type} (f : forall x, P x -> Q x). Equations map_all {l : list A} : All P l -> All Q l := | all_nil := all_nil | all_cons p ps := all_cons (f _ p) (map_all ps). Equations map_all_in {l : list A} (f : forall x, x ∈ l -> P x -> Q x) : All P l -> All Q l := | f, all_nil := all_nil | f, all_cons p ps := all_cons (f _ here p) (map_all_in (fun x inl => f x (there inl)) ps). End MapAll. Definition StoreTy := list Ty. Inductive Val : Ty -> StoreTy -> Set := | val_unit {Σ} : Val unit Σ | val_true {Σ} : Val bool Σ | val_false {Σ} : Val bool Σ | val_closure {Σ Γ t u} : Expr (t :: Γ) u -> All (fun t => Val t Σ) Γ -> Val (t ⇒ u) Σ | val_loc {Σ t} : t ∈ Σ -> Val (ref t) Σ. Derive Signature NoConfusion NoConfusionHom for Val. Definition Env (Γ : Ctx) (Σ : StoreTy) : Set := All (fun t => Val t Σ) Γ. Definition Store (Σ : StoreTy) := All (fun t => Val t Σ) Σ. Equations lookup : forall {A P xs} {x : A}, All P xs -> x ∈ xs -> P x := lookup (all_cons p _) here := p; lookup (all_cons _ ps) (there ins) := lookup ps ins. Equations update : forall {A P xs} {x : A}, All P xs -> x ∈ xs -> P x -> All P xs := update (all_cons p ps) here p' := all_cons p' ps; update (all_cons p ps) (there ins) p' := all_cons p (update ps ins p'). Equations lookup_store {Σ t} : t ∈ Σ -> Store Σ -> Val t Σ := lookup_store l σ := lookup σ l. Equations update_store {Σ t} : t ∈ Σ -> Val t Σ -> Store Σ -> Store Σ := update_store l v σ := update σ l v. Definition store_incl (Σ Σ' : StoreTy) := sigma (fun Σ'' => Σ' = Σ'' ++ Σ). Infix "⊑" := store_incl (at level 10). Equations app_assoc {A} (x y z : list A) : x ++ y ++ z = (x ++ y) ++ z := app_assoc nil y z := eq_refl; app_assoc (cons x xs) y z := f_equal (cons x) (app_assoc xs y z). Section StoreIncl. Equations pres_in {Σ Σ'} (incl : Σ ⊑ Σ') t (p : t ∈ Σ) : t ∈ Σ' := pres_in (Σ'', eq_refl) t p := aux Σ'' where aux Σ'' : t ∈ (Σ'' ++ Σ) := aux nil := p; aux (cons ty tys) := there (aux tys). Equations refl_incl {Σ} : Σ ⊑ Σ := refl_incl := ([], eq_refl). Equations trans_incl {Σ Σ' Σ''} (incl : Σ ⊑ Σ') (incl' : Σ' ⊑ Σ'') : Σ ⊑ Σ'' := trans_incl (p, eq_refl) (q, eq_refl) := (q ++ p, app_assoc _ _ _). Equations store_ext_incl {Σ t} : Σ ⊑ (t :: Σ) := store_ext_incl := ([t], eq_refl). Context {Σ Σ'} (incl : Σ ⊑ Σ'). Equations weaken_val {t} (v : Val t Σ) : Val t Σ' := { weaken_val (@val_unit ?(Σ)) := val_unit; weaken_val val_true := val_true; weaken_val val_false := val_false; weaken_val (val_closure b e) := val_closure b (weaken_vals e); weaken_val (val_loc H) := val_loc (pres_in incl _ H) } where weaken_vals {l} (a : All (fun t => Val t Σ) l) : All (fun t => Val t Σ') l := weaken_vals all_nil := all_nil; weaken_vals (all_cons p ps) := all_cons (weaken_val p) (weaken_vals ps). Equations weakenv_vals {l} a : @weaken_vals l a = map_all (fun t v => weaken_val v) a := weakenv_vals all_nil := eq_refl; weakenv_vals (all_cons p ps) := f_equal (all_cons (weaken_val p)) (weakenv_vals ps). Definition weaken_env {Γ} (v : Env Γ Σ) : Env Γ Σ' := map_all (@weaken_val) v. End StoreIncl. Infix "⊚" := trans_incl (at level 10). Equations M : forall (Γ : Ctx) (P : StoreTy -> Type) (Σ : StoreTy), Type := M Γ P Σ := forall (E : Env Γ Σ) (μ : Store Σ), option (∃ Σ' (μ' : Store Σ') (_ : P Σ'), Σ ⊑ Σ'). Equations bind {Σ Γ} {P Q : StoreTy -> Type} (f : M Γ P Σ) (g : ∀ {Σ'}, P Σ' -> M Γ Q Σ') : M Γ Q Σ := bind f g E μ with f E μ := | None := None | Some (Σ', μ', x, ext) with g _ x (weaken_env ext E) μ' := | None := None; | Some (_, μ'', y, ext') := Some (_, μ'', y, ext ⊚ ext'). Infix ">>=" := bind (at level 20, left associativity). Definition transp_op {Γ Σ P} (x : Store Σ -> P Σ) : M Γ P Σ := fun E μ => Some (Σ, μ, x μ, refl_incl). Equations ret : ∀ {Γ Σ P}, P Σ → M Γ P Σ := ret (Σ:=Σ) a E μ := Some (Σ, μ, a, refl_incl). Equations getEnv : ∀ {Γ Σ}, M Γ (Env Γ) Σ := getEnv (Σ:=Σ) E μ := Some (Σ, μ, E, refl_incl). Equations usingEnv {Γ Γ' Σ P} (E : Env Γ Σ) (m : M Γ P Σ) : M Γ' P Σ := usingEnv E m E' μ := m E μ. Equations timeout : ∀ {Γ Σ P}, M Γ P Σ := timeout _ _ := None. Section StoreOps. Context {Σ : StoreTy} {Γ : Ctx} {t : Ty}. Equations storeM (v : Val t Σ) : M Γ (Val (ref t)) Σ := storeM v E μ := let v : Val t (t :: Σ) := weaken_val store_ext_incl v in let μ' := map_all (fun t' => weaken_val store_ext_incl) μ in Some (t :: Σ, all_cons v μ', val_loc here, store_ext_incl). Equations derefM (l : t ∈ Σ) : M Γ (Val t) Σ := derefM l := transp_op (lookup_store l). Equations updateM (l : t ∈ Σ) (v : Val t Σ) : M Γ (Val unit) Σ := updateM l v E μ := Some (Σ, update_store l v μ, val_unit, refl_incl). End StoreOps. Reserved Notation "P ⊛ Q" (at level 10). Inductive storepred_prod (P Q : StoreTy -> Type) : StoreTy -> Type := | storepred_pair {Σ} : P Σ -> Q Σ -> (P ⊛ Q) Σ where "P ⊛ Q" := (storepred_prod P Q). Arguments storepred_pair {P Q Σ}. Class Weakenable (P : StoreTy -> Type) : Type := weaken : forall {Σ Σ'}, Σ ⊑ Σ' -> P Σ -> P Σ'. #[local] Instance val_weaken {t} : Weakenable (Val t) := fun Σ Σ' incl => weaken_val incl. #[local] Instance env_weaken {Γ} : Weakenable (Env Γ) := fun Σ Σ' incl => weaken_env incl. #[local] Instance loc_weaken (t : Ty) : Weakenable (In t) := fun Σ Σ' incl => pres_in incl t. Class IsIncludedOnce (Σ Σ' : StoreTy) : Type := is_included_once : Σ ⊑ Σ'. #[local] Hint Mode IsIncludedOnce + + : typeclass_instances. #[local] Instance IsIncludedOnce_ext {T} Σ : IsIncludedOnce Σ (T :: Σ) := store_ext_incl. Class IsIncluded (Σ Σ' : StoreTy) : Type := is_included : Σ ⊑ Σ'. #[local] Hint Mode IsIncluded + + : typeclass_instances. #[local] Instance IsIncluded_refl Σ : IsIncluded Σ Σ := refl_incl. #[local] Instance IsIncluded_trans Σ Σ' Σ'' : IsIncludedOnce Σ Σ' -> IsIncluded Σ' Σ'' -> IsIncluded Σ Σ'' := fun H H' => trans_incl H H'. Equations wk {Σ Σ' P} {W : Weakenable P} (p : P Σ) {incl : IsIncluded Σ Σ'} : P Σ' := wk p := weaken incl p. Equations bind_ext {Σ Γ} {P Q : StoreTy -> Type} (f : M Γ P Σ) (g : ∀ {Σ'} `{IsIncluded Σ Σ'}, P Σ' -> M Γ Q Σ') : M Γ Q Σ := bind_ext f g E μ with f E μ := { | None := None; | Some (Σ', μ', x, ext) with g _ ext x (weaken_env ext E) μ' := { | None := None; | Some (_, μ'', y, ext') := Some (_, μ'', y, ext ⊚ ext') } }. Infix ">>='" := bind_ext (at level 20, left associativity). Equations eval_ext (n : nat) {Γ Σ t} (e : Expr Γ t) : M Γ (Val t) Σ := | 0, _ := timeout | S k, tt := ret val_unit | S k, true := ret val_true | S k, false := ret val_false | S k, ite b tr fa := eval_ext k b >>=' λ{ | _ | ext | val_true => eval_ext k tr; | _ | ext | val_false => eval_ext k fa } | S k, var x := getEnv >>=' fun {Σ ext} E => ret (lookup E x) | S k, abs x := getEnv >>=' fun {Σ ext} E => ret (val_closure x E) | S k, @app Γ A B e1 e2 := eval_ext k e1 >>=' λ{ | _ | ext | val_closure e' E => eval_ext k e2 >>=' fun {Σ' ext'} v => usingEnv (all_cons v (wk (P:=Env _) E)) (eval_ext k e')} | S k, new e := eval_ext k e >>=' fun {Σ ext} v => storeM v | S k, deref l := eval_ext k l >>=' λ{ | _ | ext | val_loc l' => derefM l' } | S k, assign l e := eval_ext k l >>=' λ{ | _ | ext | val_loc l' => eval_ext k e >>=' λ{ | _ | ext' | v => updateM (wk l') (wk v) }}. Equations strength {Σ Γ} {P Q : StoreTy -> Type} {w : Weakenable Q} (m : M Γ P Σ) (q : Q Σ) : M Γ (P ⊛ Q) Σ := strength m q E μ with m E μ => { | None => None | Some (Σ', μ', p, ext) => Some (Σ', μ', storepred_pair p (weaken ext q), ext) }. Infix "^" := strength. (* Issue: improve pattern matching lambda to have implicit arguments implicit. Hard because Coq does not keep the implicit status of bind's [g] argument. *) Equations eval (n : nat) {Γ Σ t} (e : Expr Γ t) : M Γ (Val t) Σ := eval 0 _ := timeout; eval (S k) tt := ret val_unit; eval (S k) true := ret val_true; eval (S k) false := ret val_false; eval (S k) (ite b tr fa) := eval k b >>= λ{ | _ | val_true => eval k tr; | _ | val_false => eval k fa }; eval (S k) (var x) := getEnv >>= fun Σ E => ret (lookup E x); eval (S k) (abs x) := getEnv >>= fun Σ E => ret (val_closure x E); eval (S k) (app e1 e2) := eval k e1 >>= λ{ | _ | val_closure e' E => (eval k e2 ^ E) >>= fun Σ' '(storepred_pair v E) => usingEnv (all_cons v E) (eval k e')}; eval (S k) (new e) := eval k e >>= fun Σ v => storeM v; eval (S k) (deref l) := eval k l >>= λ{ | _ | val_loc l' => derefM l' }; eval (S k) (assign l e) := eval k l >>= λ{ | _ | val_loc l' => (eval k e ^ l') >>= λ{ | _ | storepred_pair v l'' => updateM l'' v }}. Definition idu : Expr [] (unit ⇒ unit) := abs (var here). Definition idapp : Expr [] unit := app idu tt. (** All definitions are axiom-free (and actually not even dependent on a provable UIP instance), so everything computes. *) Eval vm_compute in eval 100 idapp all_nil all_nil. Definition neg : Expr [] (bool ⇒ bool) := abs (ite (var here) false true). Definition letref {t u} (v : Expr [] t) (b : Expr [ref t] u) : Expr [] u := app (abs b) (new v). Obligation Tactic := idtac. Equations in_app_weaken {Σ Σ' Σ'' : StoreTy} {t} (p : t ∈ (Σ ++ Σ'')) : t ∈ (Σ ++ Σ' ++ Σ'') by struct Σ := in_app_weaken (Σ:=nil) p := pres_in (Σ', eq_refl) t p; in_app_weaken (Σ:=cons _ tys) here := here; in_app_weaken (Σ:=cons _ tys) (there p) := there (in_app_weaken p). Equations pres_in_prefix {Σ Σ' Σ''} (incl : Σ' ⊑ Σ'') {t} (p : t ∈ (Σ ++ Σ')) : t ∈ (Σ ++ Σ'') := pres_in_prefix (Σ'', eq_refl) p := in_app_weaken p. (** [Equations?] enters refinement mode, which can be used to solve the case of variables in proof mode. *) Equations? weaken_expr {Γ Γ' t u} (e1 : Expr (Γ ++ Γ') t) : Expr (Γ ++ u :: Γ') t := weaken_expr tt := tt; weaken_expr true := true; weaken_expr false := false; weaken_expr (ite b tr fa) := ite (weaken_expr b) (weaken_expr tr) (weaken_expr fa); weaken_expr (var (t:=ty) x) := var _; weaken_expr (abs (t:=t) x) := abs (weaken_expr (Γ := t :: Γ) x); weaken_expr (app e1 e2) := app (weaken_expr e1) (weaken_expr e2); weaken_expr (new e) := new (weaken_expr e); weaken_expr (deref l) := deref (weaken_expr l); weaken_expr (assign l e) := assign (weaken_expr l) (weaken_expr e). Proof. clear weaken_expr. apply (pres_in_prefix (Σ' := Γ') ([u], eq_refl) x). Defined. Definition seq {Γ u} (e1 : Expr Γ unit) (e2 : Expr Γ u) : Expr Γ u := app (abs (weaken_expr (Γ := []) e2)) e1. (* let x = ref true in x := false; !x *) Definition letupdate : Expr [] bool := letref true (seq (assign (var here) false) (deref (var here))). Eval vm_compute in eval 100 letupdate all_nil all_nil. (** [[ = Some ([bool], all_cons val_false all_nil, val_false, [bool], eq_refl) : option (∃ (Σ' : StoreTy) (_ : Store Σ') (_ : Val bool Σ'), [] ⊑ Σ') ]] *) Coq-Equations-1.3.1-8.20/examples/definterp_scope.v000066400000000000000000000345711463127417400217720ustar00rootroot00000000000000Require Import Program.Basics Program.Tactics. Require Import Equations.Equations. Require Import Coq.Vectors.VectorDef. Require Import List. Import ListNotations. Set Equations Transparent. Derive Signature NoConfusion NoConfusionHom for t. Inductive Ty : Set := | unit : Ty | bool : Ty | arrow (t u : Ty) : Ty | ref : Ty -> Ty. Derive NoConfusion for Ty. Infix "⇒" := arrow (at level 80). Reserved Notation " x ∈ s " (at level 70, s at level 10). #[universes(template)] Inductive In {A} (x : A) : list A -> Type := | here {xs} : x ∈ (x :: xs) | there {y xs} : x ∈ xs -> x ∈ (y :: xs) where " x ∈ s " := (In x s). Derive Signature NoConfusion for In. Arguments here {A x xs}. Arguments there {A x y xs} _. From Equations Require Import Fin. Section ScopeGraphs. Context (k : nat). Definition Scope := fin k. Definition Graph := Scope -> (list Ty * list Scope). Context (g : Graph). Definition declsOf (s : Scope) := fst (g s). Definition edgesOf (s : Scope) := snd (g s). Inductive sedge (s : Scope) : Scope -> Set := | nilp : sedge s s | consp {s' s''} : s' ∈ edgesOf s -> sedge s' s'' -> sedge s s''. Notation "s ⟶ s'" := (sedge s s') (at level 20). Inductive resolve (s : Scope) (t : Ty) := | path {s'} : s ⟶ s' -> t ∈ declsOf s' -> resolve s t. Notation "s ↦ t" := (resolve s t) (at level 20). End ScopeGraphs. Arguments resolve {k g}. Notation "s ↦ t" := (resolve s t) (at level 20). Section Expr. Context (k : nat). Context (g : Graph k). Inductive Expr (Γ : Scope k) : Ty -> Set := | tt : Expr Γ unit | true : Expr Γ bool | false : Expr Γ bool | ite {t} : Expr Γ bool -> Expr Γ t -> Expr Γ t -> Expr Γ t | var {t} : Γ ↦ t -> Expr Γ t | abs {t u} : Expr (t :: Γ) u -> Expr Γ (t ⇒ u) | app {t u} : Expr Γ (t ⇒ u) -> Expr Γ t -> Expr Γ u | new {t} : Expr Γ t -> Expr Γ (ref t) | deref {t} : Expr Γ (ref t) -> Expr Γ t | assign {t} : Expr Γ (ref t) -> Expr Γ t -> Expr Γ unit. Derive Signature NoConfusion NoConfusionHom for Expr. #[universes(template)] Inductive All {A} (P : A -> Type) : list A -> Type := | all_nil : All P [] | all_cons {x xs} : P x -> All P xs -> All P (x :: xs). Arguments all_nil {A} {P}. Arguments all_cons {A P x xs} _ _. Derive Signature NoConfusion NoConfusionHom for All. Section MapAll. Context {A} {P Q : A -> Type} (f : forall x, P x -> Q x). Equations map_all {l : list A} : All P l -> All Q l := map_all all_nil := all_nil; map_all (all_cons p ps) := all_cons (f _ p) (map_all ps). End MapAll. Definition StoreTy := list Ty. Inductive Val : Ty -> StoreTy -> Set := | val_unit {Σ} : Val unit Σ | val_true {Σ} : Val bool Σ | val_false {Σ} : Val bool Σ | val_closure {Σ Γ t u} : Expr (t :: Γ) u -> All (fun t => Val t Σ) Γ -> Val (t ⇒ u) Σ | val_loc {Σ t} : t ∈ Σ -> Val (ref t) Σ. Derive Signature NoConfusion NoConfusionHom for Val. Definition Env (Γ : Ctx) (Σ : StoreTy) : Set := All (fun t => Val t Σ) Γ. Definition Store (Σ : StoreTy) := All (fun t => Val t Σ) Σ. Equations lookup : forall {A P xs} {x : A}, All P xs -> x ∈ xs -> P x := lookup (all_cons p _) here := p; lookup (all_cons _ ps) (there ins) := lookup ps ins. Equations update : forall {A P xs} {x : A}, All P xs -> x ∈ xs -> P x -> All P xs := update (all_cons p ps) here p' := all_cons p' ps; update (all_cons p ps) (there ins) p' := all_cons p (update ps ins p'). Equations lookup_store {Σ t} : t ∈ Σ -> Store Σ -> Val t Σ := lookup_store l σ := lookup σ l. Equations update_store {Σ t} : t ∈ Σ -> Val t Σ -> Store Σ -> Store Σ := update_store l v σ := update σ l v. Import Sigma_Notations. Definition store_incl (Σ Σ' : StoreTy) := &{ Σ'' : _ & Σ' = Σ'' ++ Σ }. Infix "⊑" := store_incl (at level 10). Lemma app_assoc {A} (x y z : list A) : x ++ y ++ z = (x ++ y) ++ z. Proof. induction x; simpl; auto. now rewrite IHx. Defined. Section StoreIncl. Context {Σ Σ' : StoreTy} (incl : Σ ⊑ Σ'). Lemma pres_in t : t ∈ Σ -> t ∈ Σ'. Proof. destruct incl. subst. clear. induction pr1. intros. exact H. intros H. specialize (IHpr1 H). constructor 2. apply IHpr1. Defined. Equations(noind) weaken_val {t} (v : Val t Σ) : Val t Σ' := { weaken_val val_unit := val_unit; weaken_val val_true := val_true; weaken_val val_false := val_false; weaken_val (val_closure b e) := val_closure b (map_all (fun t v => weaken_val v) e); (* (weaken_vals e); *) weaken_val (val_loc H) := val_loc (pres_in _ H) }. (* where weaken_vals {l} (a : All (fun t => Val t Σ) l) : All (fun t => Val t Σ') l by struct a := *) (* weaken_vals all_nil := all_nil; *) (* weaken_vals (all_cons p ps) := all_cons (weaken_val p) (weaken_vals ps). *) Definition weaken_env {Γ} (v : Env Γ Σ) : Env Γ Σ' := map_all (@weaken_val) v. Lemma refl_incl : Σ ⊑ Σ. Proof. exists []. reflexivity. Defined. Lemma trans_incl {Σ''} (incl' : Σ' ⊑ Σ'') : Σ ⊑ Σ''. Proof. destruct incl as [? ->], incl' as [? ->]. exists (pr0 ++ pr1). now rewrite app_assoc. Defined. Lemma store_ext_incl {t} : Σ ⊑ (t :: Σ). Proof. now exists [t]. Defined. End StoreIncl. Infix "⊚" := trans_incl (at level 10). Equations M : forall (Γ : Ctx) (P : StoreTy -> Type) (Σ : StoreTy), Type := M Γ P Σ := forall (E : Env Γ Σ) (μ : Store Σ), option &{ Σ' : _ & &{ _ : Store Σ' & &{ _ : P Σ' & Σ ⊑ Σ'}}}. Require Import Utf8. Notation "( x , .. , y , z )" := (sigmaI _ x .. (sigmaI _ y z) ..) : core_scope. Equations bind {Σ Γ} {P Q : StoreTy -> Type} (f : M Γ P Σ) (g : ∀ {Σ'}, P Σ' -> M Γ Q Σ') : M Γ Q Σ := bind f g E μ with f E μ := { | None := None; | Some (Σ', μ', x, ext) with g _ x (weaken_env ext E) μ' := { | None := None; | Some (_, μ'', y, ext') := Some (_, μ'', y, ext ⊚ ext') } }. Infix ">>=" := bind (at level 20, left associativity). Definition transp_op {Γ Σ P} (x : Store Σ -> P Σ) : M Γ P Σ := fun E μ => Some (Σ, μ, x μ, refl_incl). Equations ret : ∀ {Γ Σ P}, P Σ → M Γ P Σ := ret (Σ:=Σ) a E μ := Some (Σ, μ, a, refl_incl). Equations getEnv : ∀ {Γ Σ}, M Γ (Env Γ) Σ := getEnv (Σ:=Σ) E μ := Some (Σ, μ, E, refl_incl). Equations usingEnv {Γ Γ' Σ P} (E : Env Γ Σ) (m : M Γ P Σ) : M Γ' P Σ := usingEnv E m E' μ := m E μ. Equations timeout : ∀ {Γ Σ P}, M Γ P Σ := timeout _ _ := None. Section StoreOps. Context {Σ : StoreTy} {Γ : Ctx} {t : Ty}. Equations storeM (v : Val t Σ) : M Γ (Val (ref t)) Σ := storeM v E μ := let v : Val t (t :: Σ) := weaken_val store_ext_incl v in let μ' := map_all (fun t' => weaken_val store_ext_incl) μ in Some (t :: Σ, all_cons v μ', val_loc here, store_ext_incl). Equations derefM (l : t ∈ Σ) : M Γ (Val t) Σ := derefM l := transp_op (lookup_store l). Equations updateM (l : t ∈ Σ) (v : Val t Σ) : M Γ (Val unit) Σ := updateM l v E μ := Some (Σ, update_store l v μ, val_unit, refl_incl). End StoreOps. Reserved Notation "P ⊛ Q" (at level 10). Inductive storepred_prod (P Q : StoreTy -> Type) : StoreTy -> Type := | storepred_pair {Σ} : P Σ -> Q Σ -> (P ⊛ Q) Σ where "P ⊛ Q" := (storepred_prod P Q). Arguments storepred_pair {P Q Σ}. Class Weakenable (P : StoreTy -> Type) : Type := weaken : forall {Σ Σ'}, Σ ⊑ Σ' -> P Σ -> P Σ'. Instance val_weaken {t} : Weakenable (Val t). Proof. intros Σ Σ' incl. apply (weaken_val incl). Defined. Instance env_weaken {Γ} : Weakenable (Env Γ). Proof. intros Σ Σ' incl. apply (weaken_env incl). Defined. Instance loc_weaken (t : Ty) : Weakenable (In t). Proof. intros Σ Σ' incl. apply (pres_in incl). Defined. Class IsIncludedOnce (Σ Σ' : StoreTy) : Type := is_included_once : Σ ⊑ Σ'. Hint Mode IsIncludedOnce + + : typeclass_instances. Instance IsIncludedOnce_ext {T} Σ : IsIncludedOnce Σ (T :: Σ). Proof. apply store_ext_incl. Defined. Class IsIncluded (Σ Σ' : StoreTy) : Type := is_included : Σ ⊑ Σ'. Hint Mode IsIncluded + + : typeclass_instances. Instance IsIncluded_refl Σ : IsIncluded Σ Σ := refl_incl. Instance IsIncluded_trans Σ Σ' Σ'' : IsIncludedOnce Σ Σ' -> IsIncluded Σ' Σ'' -> IsIncluded Σ Σ''. Proof. intros H H'. exact (trans_incl H H'). Defined. Equations wk {Σ Σ' P} {W : Weakenable P} (p : P Σ) {incl : IsIncluded Σ Σ'} : P Σ' := wk p := weaken incl p. Equations bind_ext {Σ Γ} {P Q : StoreTy -> Type} (f : M Γ P Σ) (g : ∀ {Σ'} `{IsIncluded Σ Σ'}, P Σ' -> M Γ Q Σ') : M Γ Q Σ := bind_ext f g E μ with f E μ := { | None := None; | Some (Σ', μ', x, ext) with g _ ext x (weaken_env ext E) μ' := { | None := None; | Some (_, μ'', y, ext') := Some (_, μ'', y, ext ⊚ ext') } }. Infix ">>='" := bind_ext (at level 20, left associativity). Equations eval_ext (n : nat) {Γ Σ t} (e : Expr Γ t) : M Γ (Val t) Σ := eval_ext 0 _ := timeout; eval_ext (S k) tt := ret val_unit; eval_ext (S k) true := ret val_true; eval_ext (S k) false := ret val_false; eval_ext (S k) (ite b t f) := eval_ext k b >>=' λ{ | _ | ext | val_true => eval_ext k t; | _ | ext | val_false => eval_ext k f }; eval_ext (S k) (var x) := getEnv >>=' fun {Σ ext} E => ret (lookup E x); eval_ext (S k) (abs x) := getEnv >>=' fun {Σ ext} E => ret (val_closure x E); eval_ext (S k) (app (Γ:=Γ) e1 e2) := eval_ext k e1 >>=' λ{ | _ | ext | val_closure e' E => eval_ext k e2 >>=' fun {Σ' ext'} v => usingEnv (all_cons v (wk E)) (eval_ext k e')}; eval_ext (S k) (new e) := eval_ext k e >>=' fun {Σ ext} v => storeM v; eval_ext (S k) (deref l) := eval_ext k l >>=' λ{ | _ | ext | val_loc l => derefM l }; eval_ext (S k) (assign l e) := eval_ext k l >>=' λ{ | _ | ext | val_loc l => eval_ext k e >>=' λ{ | _ | ext | v => updateM (wk l) (wk v) }}. Equations strength {Σ Γ} {P Q : StoreTy -> Type} {w : Weakenable Q} (m : M Γ P Σ) (q : Q Σ) : M Γ (P ⊛ Q) Σ := strength m q E μ with m E μ => { | None => None; | Some (Σ, μ', p, ext) => Some (Σ, μ', storepred_pair p (weaken ext q), ext) }. Infix "^" := strength. (* TODO improve pattern matching lambda to have implicit arguments implicit. Hard because Coq does not keep the implicit status of bind's [g] argument. *) Equations eval (n : nat) {Γ Σ t} (e : Expr Γ t) : M Γ (Val t) Σ := eval 0 _ := timeout; eval (S k) tt := ret val_unit; eval (S k) true := ret val_true; eval (S k) false := ret val_false; eval (S k) (ite b t f) := eval k b >>= λ{ | _ | val_true => eval k t; | _ | val_false => eval k f }; eval (S k) (var x) := getEnv >>= fun Σ E => ret (lookup E x); eval (S k) (abs x) := getEnv >>= fun Σ E => ret (val_closure x E); eval (S k) (app (Γ:=Γ) e1 e2) := eval k e1 >>= λ{ | _ | val_closure e' E => (eval k e2 ^ E) >>= fun Σ' '(storepred_pair v E) => usingEnv (all_cons v E) (eval k e')}; eval (S k) (new e) := eval k e >>= fun Σ v => storeM v; eval (S k) (deref l) := eval k l >>= λ{ | _ | val_loc l => derefM l }; eval (S k) (assign l e) := eval k l >>= λ{ | _ | val_loc l => (eval k e ^ l) >>= λ{ | _ | storepred_pair v l => updateM l v }}. Definition idu : Expr [] (unit ⇒ unit) := abs (var here). Definition idapp : Expr [] unit := app idu tt. Eval vm_compute in eval 100 idapp all_nil all_nil. Definition neg : Expr [] (bool ⇒ bool) := abs (ite (var here) false true). Definition letref {t u} (v : Expr [] t) (b : Expr [ref t] u) : Expr [] u := app (abs b) (new v). Equations weaken_expr {Γ Γ' t u} (e1 : Expr (Γ ++ Γ') t) : Expr (Γ ++ u :: Γ') t := weaken_expr tt := tt; weaken_expr true := true; weaken_expr false := false; weaken_expr (ite b t f) := ite (weaken_expr b) (weaken_expr t) (weaken_expr f); weaken_expr (var x) := var _; weaken_expr (abs (t:=t) x) := abs (weaken_expr (Γ := t :: Γ) x); weaken_expr (app e1 e2) := app (weaken_expr e1) (weaken_expr e2); weaken_expr (new e) := new (weaken_expr e); weaken_expr (deref l) := deref (weaken_expr l); weaken_expr (assign l e) := assign (weaken_expr l) (weaken_expr e). Next Obligation. clear weaken_expr. induction Γ in Γ', u, x |- *. now apply there. simpl. depelim x. constructor. apply there. apply IHΓ. apply x. Defined. Definition seq {Γ u} (e1 : Expr Γ unit) (e2 : Expr Γ u) : Expr Γ u := app (abs (weaken_expr (Γ := []) e2)) e1. (* let x = ref true in x := false; !x *) Definition letupdate : Expr [] bool := letref true (seq (assign (var here) false) (deref (var here))). Eval vm_compute in eval 100 letupdate all_nil all_nil. (* Inductive eval_sem {Γ : Ctx} {Σ} {env : Env Γ Σ} : forall {t : Ty}, Expr Γ t -> Val t Σ -> Prop := | eval_tt (e : Expr Γ unit) : eval_sem e val_unit | eval_var t (i : t ∈ Γ) : eval_sem (var i) (lookup env i) | eval_abs {t u} (b : Expr (t :: Γ) u) : eval_sem (abs b) (val_closure b env) | eval_app {t u} (f : Expr Γ (t ⇒ u)) b' (a : Expr Γ t) v : eval_sem f (val_closure b' env) -> eval_sem a v -> forall u, @eval_sem (t :: Γ) _ (all_cons v env) _ b' u -> eval_sem (app f a) u. Lemma eval_correct {n} Γ Σ (μ : Store Σ) t (e : Expr Γ t) env v : eval n e env μ = Some v -> @eval_sem _ _ (weaken v.2.2.2 env) _ e (v.2.2.1). Proof. induction n. intros; discriminate. destruct e; simp eval; try intros [= <-]; simpl; try constructor. admit. admit. pose proof (fun_elim (f:=eval)). specialize (H (fun n Γ Σ t e m => forall env v μ, m env μ = Some v -> @eval_sem _ _ (weaken v.2.2.2 env) _ e v.2.2.1) (fun n Γ Σ t u f a v m => forall env v', @eval_sem _ _ env _ f v.2.2.1 -> m env = Some v' -> @eval_sem _ env _ (app f a) v')). rapply H; clear; intros. discriminate. noconf H. constructor. noconf H. constructor. noconf H. constructor. unfold bind in H1. destruct (eval n e0 env) eqn:Heq. specialize (H _ _ Heq). specialize (H0 v0 _ _ H H1). apply H0. discriminate. unfold bind in H2. destruct (eval k arg env) eqn:Heq. specialize (H _ _ Heq). unfold usingEnv in H2. specialize (H0 v (all_cons v a) v'). econstructor; eauto. Admitted.*)Coq-Equations-1.3.1-8.20/examples/definterp_simple.v000066400000000000000000000107031463127417400221410ustar00rootroot00000000000000Require Import Program.Basics Program.Tactics. Require Import Equations.Equations. Require Import Coq.Vectors.VectorDef. Require Import List. Import ListNotations. Set Equations Transparent. Derive Signature NoConfusion NoConfusionHom for t. Inductive Ty : Set := | unit : Ty | arrow (t u : Ty) : Ty. Derive NoConfusion for Ty. Infix "⇒" := arrow (at level 80). Definition Ctx := list Ty. Reserved Notation " x ∈ s " (at level 70, s at level 10). Inductive In {A} (x : A) : list A -> Type := | here {xs} : x ∈ (x :: xs) | there {y xs} : x ∈ xs -> x ∈ (y :: xs) where " x ∈ s " := (In x s). Arguments here {A x xs}. Arguments there {A x y xs} _. Inductive Expr : Ctx -> Ty -> Set := | tt {Γ} : Expr Γ unit | var {Γ} {t} : In t Γ -> Expr Γ t | abs {Γ} {t u} : Expr (t :: Γ) u -> Expr Γ (t ⇒ u) | app {Γ} {t u} : Expr Γ (t ⇒ u) -> Expr Γ t -> Expr Γ u. Derive Signature NoConfusion NoConfusionHom for Expr. Inductive All {A} (P : A -> Type) : list A -> Type := | all_nil : All P [] | all_cons {x xs} : P x -> All P xs -> All P (x :: xs). Arguments all_nil {A} {P}. Arguments all_cons {A P x xs} _ _. Derive Signature NoConfusion NoConfusionHom for All. Section MapAll. Context {A} {P Q : A -> Type} (f : forall x, P x -> Q x). Equations map_all {l : list A} : All P l -> All Q l := map_all all_nil := all_nil; map_all (all_cons p ps) := all_cons (f _ p) (map_all ps). End MapAll. Inductive Val : Ty -> Set := | val_unit : Val unit | val_closure {Γ t u} : Expr (t :: Γ) u -> All Val Γ -> Val (t ⇒ u). Derive Signature NoConfusion NoConfusionHom for Val. Definition Env (Γ : Ctx) : Set := All Val Γ. Equations lookup : forall {A P xs} {x : A}, All P xs -> x ∈ xs -> P x := lookup (all_cons p _) here := p; lookup (all_cons _ ps) (there ins) := lookup ps ins. Equations update : forall {A P xs} {x : A}, All P xs -> x ∈ xs -> P x -> All P xs := update (all_cons p ps) here p' := all_cons p' ps; update (all_cons p ps) (there ins) p' := all_cons p (update ps ins p'). Equations M : Ctx -> Type -> Type := M Γ A := Env Γ -> option A. Require Import Utf8. Equations bind : ∀ {Γ A B}, M Γ A → (A → M Γ B) → M Γ B := bind m f γ := match m γ with | None => None | Some x => f x γ end. Infix ">>=" := bind (at level 20, left associativity). Equations ret : ∀ {Γ A}, A → M Γ A := ret a γ := Some a. Equations getEnv : ∀ {Γ}, M Γ (Env Γ) := getEnv γ := Some γ. Equations usingEnv : ∀ {Γ Γ' A}, Env Γ → M Γ A → M Γ' A := usingEnv γ m γ' := m γ. Equations timeout : ∀ {Γ A}, M Γ A := timeout _ := None. Equations eval : ∀ (n : nat) {Γ t} (e : Expr Γ t), M Γ (Val t) := eval 0 _ := timeout; eval (S k) tt := ret val_unit; eval (S k) (var x) := getEnv >>= fun E => ret (lookup E x); eval (S k) (abs be) := getEnv >>= fun E => ret (val_closure be E); eval (S k) (app fe xe) := eval k fe >>= λ{ | val_closure be E => eval k xe >>= fun xv => usingEnv (all_cons xv E) (eval k be)}. Inductive eval_sem {Γ : Ctx} {env : Env Γ} : forall {t : Ty}, Expr Γ t -> Val t -> Prop := | eval_tt (e : Expr Γ unit) : eval_sem e val_unit | eval_var t (i : t ∈ Γ) : eval_sem (var i) (lookup env i) | eval_abs {t u} (b : Expr (t :: Γ) u) : eval_sem (abs b) (val_closure b env) | eval_app {xt bt Γ' env'} (fe : Expr Γ (xt ⇒ bt)) (be : Expr (xt :: Γ') bt) bv (xe : Expr Γ xt) xv : eval_sem fe (val_closure be env') -> eval_sem xe xv -> @eval_sem (xt :: Γ') (all_cons xv env') _ be bv -> eval_sem (app fe xe) bv. Lemma eval_correct {n} Γ t (e : Expr Γ t) env v : eval n e env = Some v -> @eval_sem _ env _ e v. Proof. pose proof (fun_elim (f:=eval)) as H. specialize (H (fun n Γ t e m => forall env v, m env = Some v -> @eval_sem _ env _ e v) (fun n Γ t u f a v m => forall env v', @eval_sem _ env _ f v -> m env = Some v' -> @eval_sem _ env _ (app f a) v')). - rapply H; clear; intros. + discriminate. + noconf H; constructor. + noconf H; constructor. + noconf H; constructor. + unfold bind in H1. destruct (eval n e0 env) eqn:Heq; try discriminate. specialize (H _ _ Heq). specialize (H0 v0 _ _ H H1). apply H0. + unfold bind in H2. destruct (eval k xe env) eqn:Heq; try discriminate. unfold usingEnv in H2. specialize (H _ _ Heq). specialize (H0 v (all_cons v a) v'). econstructor; eauto. Qed. Coq-Equations-1.3.1-8.20/examples/function_iter_style.v000066400000000000000000000030731463127417400227020ustar00rootroot00000000000000From Equations Require Import Equations. Require Import List Program.Syntax Arith Lia. Equations div2 (n : nat) : nat := div2 0 := 0; div2 1 := 0; div2 (S (S n)) := S (div2 n). Lemma div2_le : forall x, div2 x <= x. Proof. intros x. funelim (div2 x); auto with arith. Defined. Transparent div2. Equations log_iter (fn : nat -> nat) (n : nat) : nat := log_iter fn 0 := 0; log_iter fn 1 := 0; log_iter fn n := S (fn (div2 n)). Transparent log_iter. Equations iter {A} (n : nat) (f : A -> A) : A -> A := iter 0 f x := x; iter (S n) f x := f (iter n f x). Transparent iter. Definition f_terminates {A} {B : A -> Type} (fn : (forall x : A, B x) -> (forall x : A, B x)) := forall x : A, { y : B x | (exists p, forall k, p < k -> forall g : forall x : A, B x, iter k fn g x = y) }. Lemma log_terminates : f_terminates log_iter. Proof. intro. Subterm.rec_wf_rel IH x lt. destruct x. exists 0. exists 0. intros. inversion H. simpl. reflexivity. simpl. reflexivity. destruct x. exists 0. exists 0. intros. inversion H; simpl; reflexivity. specialize (IH (div2 (S (S x)))). forward IH. simpl. auto using div2_le with arith. destruct IH as [y Hy]. exists (S y). destruct Hy as [p Hp]. exists (S p). intros. destruct k. inversion H. simpl. rewrite Hp. auto. auto with arith. Defined. Definition log x := proj1_sig (log_terminates x). Eval compute in log 109. Equations? log' (n : nat) : nat by wf n lt := log' 0 := 0; log' 1 := 0; log' n := S (log' (div2 n)). Proof. subst n. simpl. auto using div2_le with arith. Defined. Coq-Equations-1.3.1-8.20/examples/general_recursion.v000066400000000000000000000200371463127417400223170ustar00rootroot00000000000000(** * General recursive functions This file explores a way to formalize general recursive functions without worrying about termination proofs, counters or monads. The definitions are actually defined by well-founded recursion on the total relation (which isn't well-founded). Using fueling of the accessibility proof, we can however compute with these definitions inside Coq and reason on them independently of the termination proof. *) (* begin hide *) (** printing elimination %\coqdoctac{elimination}% *) (** printing noconf %\coqdoctac{noconf}% *) (** printing simp %\coqdoctac{simp}% *) (** printing by %\coqdockw{by}% *) (** printing rec %\coqdockw{rec}% *) (** printing Coq %\Coq{}% *) (** printing funelim %\coqdoctac{funelim}% *) (** printing Derive %\coqdockw{Derive}% *) (** printing Signature %\coqdocclass{Signature}% *) (** printing Subterm %\coqdocclass{Subterm}% *) (** printing NoConfusion %\coqdocclass{NoConfusion}% *) From Equations Require Import Equations. Require Import ZArith Lia. Require Import Program. Require Import Psatz. Require Import Nat. Require Import Coq.Vectors.VectorDef. Require Import Relations. Set Keyed Unification. Set Equations Transparent. (* end hide *) (** The total relation. *) Definition total_relation {A : Type} : A -> A -> Prop := fun x y => True. (** We assume an inconsistent axiom here, one should be added function per function. *) Axiom wf_total_init : forall {A}, WellFounded (@total_relation A). #[local] Remove Hints wf_total_init : typeclass_instances. (** We fuel it with some Acc_intro constructors so that definitions relying on it can unfold a fixed number of times still. *) #[local] Instance wf_total_init_compute : forall {A}, WellFounded (@total_relation A). exact (fun A => Acc_intro_generator 10 wf_total_init). Defined. (** Now we define an obviously non-terminating function. *) Equations? nonterm (n : nat) : nat by wf n (@total_relation nat) := nonterm 0 := 0; nonterm (S n) := S (nonterm (S n)). Proof. (* Every pair of arguments is in the total relation: so [total_relation (S n) (S n)] *) red. constructor. Defined. Local Obligation Tactic := idtac. (** The automation has a little trouble here as it assumes well-founded definitions implicitely. We show the second equation: [nonterm (S n) = S (nonterm (S n))] using the unfolding equation. *) Next Obligation. intros. now rewrite nonterm_unfold_eq at 1. Defined. (* Note this is a dangerous rewrite rule, so we should remove it from the hints *) (* Print Rewrite HintDb nonterm. *) (** Make nonterm transparent anyway so we can compute with it *) Transparent nonterm. (** We can compute with it (for closed natural numbers) *) Definition at_least_five (n : nat) : bool := match n with | S (S (S (S (S x)))) => true | _ => false end. Transparent wf_total_init_compute. (** Indeed it unfolds enough so that [at_least_five] gives back a result. *) Example check_10 := eq_refl : at_least_five (nonterm 10) = true. Example check_0 := eq_refl : at_least_five (nonterm 0) = false. (** The elimination principle completely abstracts away from the termination argument as well *) Lemma nonterm_ge n : n <= nonterm n. Proof. funelim (nonterm n). reflexivity. lia. Defined. (** We can go as far as defining the (call-by-name) Y combinator and computing with it. *) Section Y. Context {A : Type}. Equations? Y (f : A -> A) : A by wf f (@total_relation (A -> A)) := Y f := f (Y f). Proof. (* Every pair of arguments is in the total relation: so [total_relation f f] *) red. constructor. Defined. Obligation Tactic := idtac. (** The automation has a little trouble here as it assumes well-founded definitions implicitely. We show the second equation: [nonterm (S n) = S (nonterm (S n))] using the unfolding equation. *) Next Obligation. intros. now rewrite Y_unfold_eq at 1. Defined. End Y. (** Using [Y], we can easily define any general recursive function. *) Definition fact := Y (fun (fact : nat -> nat) (x : nat) => match x with | 0 => 1 | S n => S n * fact n end). Check eq_refl : fact 8 = 40320. (** [Y] is only good in call-by-name or call-by-need, so we switch to Haskell *) Extraction Language Haskell. Recursive Extraction fact. (* y :: (a1 -> a1) -> a1 y x = x (y x) fact :: Nat -> Nat fact = y (\fact0 x -> case x of { O -> S O; S n -> mul (S n) (fact0 n)}) *) (** Let's define an efficient version whose termination is not entirely obvious. *) Definition factN := Y (fun (fact : N -> N) (x : N) => match x with | N0 => 1%N | Npos n => (Npos n * fact (Pos.pred_N n))%N end). (** [1001!] is pretty large. *) Definition fact1001 := 402789647337170867317246136356926989705094239074925347176343710340368450911027649612636252695456374205280468598807393254690298539867803367460225153499614535588421928591160833678742451354915921252299285456946271396995850437959540645019696372741142787347450281325324373824456300226871609431497826989489109522725791691167945698509282421538632966523376679891823696900982075223188279465194065489111498586522997573307838057934994706212934291477882221464914058745808179795130018969175605739824237247684512790169648013778158661520384916357285547219660337504067910087936301580874662367543921288988208261944834178369169805682489420504038334529389177845089679546075023305854006141256288633820079940395329251563788399404652902154519302928365169452383531030755684578503851488154092323576150311569325891190105926118761607100286827930472944913272420825078912158741589850136017030887975452922434889688775883386977825215904423682478943313806072144097432418695807412571292308739802481089407002523955080148184062810447564594783139830113821372260474145316521647368313934670783858482781506915288378941348078689691815657785305896912277993200639858696294199549107738635599538328374931258525869323348477334798827676297868823693023377418942304272267800509765805435653787530370118261219994752588866451072715583785495394684524593296728611334955079882857173250037068541860372512693170819259309411027837176612444692649174536429745421086287708588130082168792750697158901737130221751430550976429258055277255676893874108456870904122902259417224707137723406125811549952159629766771063079472679280213882978523785424760309678138268708239764925768714349554665438389311198715040908077757086900159389712443987670244241787904585093011546861502058550090914877900852701619648229332192401075747543562989953271508977501771085759521631427816116191761031257454497039673414248149210836002497114107565960458576525212556159634975715552638678172137468172843066451093984443636560722213668172225585711566558134467392654185460222589723312097599987253417831473939565071006344352518096564427781204200068323913056897090916602712260306869786107237077572445866572945760977721639408338430009976028970539150822336553856613962747814621747092348996915755983464741082000337526945990059365493439921937093368896754791416759604324895514660325913157843796039917819613717350380997781225472000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000%N. (** We can still compute with our [Y] combinator *) Time Check (@eq_refl _ (fact1001) <: factN 1001 = fact1001). (** It takes [0.8sec] using [vm_compute]. *) (** An alternative is to use the total relation directly. *) Equations factN' (n : N) : N by wf n (@total_relation N) := | N0 => 1; | Npos n => (Npos n * factN' (Pos.pred_N n)). Next Obligation. constructor. Defined. (** Unsurprisingly, this computes just as well *) Time Check (@eq_refl _ (fact1001) <: factN' 1001 = fact1001). (** It takes [0.8sec] using [vm_compute] as well. *) (** [factN'] also makes sense in a strict/call-by-value language like OCaml. *) Extraction Language OCaml. Extraction factN'. (* (** val factN' : n -> n **) let rec factN' = function | N0 -> Npos XH | Npos p -> N.mul (Npos p) (let y = Pos.pred_N p in factN' y) *) Coq-Equations-1.3.1-8.20/examples/graph_complete.v000066400000000000000000000025251463127417400216040ustar00rootroot00000000000000Set Warnings "-notation-overridden". Require Import Equations.Type.All. Require Import Examples.HoTT_light. Set Universe Polymorphism. Require Import Relations. Import Id_Notations. Import Sigma_Notations. Derive Signature for Id. Equations neg (b : bool) : bool := neg true := false; neg false := true. Definition neg_fib (x : bool) := Σ a : bool, neg_graph a x. #[local] Hint Resolve neg_graph_correct : core. Definition neg_graph_rec := neg_graph_rect. Scheme neg_graph_rect_dep := Induction for neg_graph Sort Type. Lemma hfiber_graph : (Σ x : bool, hfiber neg x) <~> Σ x : bool, neg_fib x. Proof. unshelve refine {| equiv_fun := fun h => (h.1, _) |}. red. destruct h as [res [arg Heq]]. exists arg. simpl. destruct Heq. auto. simpl. unshelve refine {| equiv_inv h := (h.1, _) |}. red. destruct h as [res [arg Heq]]. exists arg. simpl. induction Heq; reflexivity. red. - intros [x [res Hind]]. simpl. induction Hind using neg_graph_rect_dep; simpl; reflexivity. - intros [res [arg Heq]]. simpl. destruct Heq; simpl. apply path_sigma_uncurried. simpl. exists id_refl. simpl. apply path_sigma_uncurried. simpl. exists id_refl. simpl. destruct arg. simpl. reflexivity. simpl. reflexivity. - simpl. intros [res [arg Heq]]. destruct Heq. destruct arg. simpl. reflexivity. simpl. reflexivity. Qed. Coq-Equations-1.3.1-8.20/examples/ho_finite_branching.v000066400000000000000000000015571463127417400225760ustar00rootroot00000000000000(** * Higher-order recursion, an example with finite branching trees *) From Equations Require Import Equations. Require Import Examples.Fin. Inductive ho : Set := | base : nat -> ho | lim : forall n : nat, (fin n -> ho) -> ho. Derive NoConfusion for ho. Equations lift_fin {n : nat} (f : fin n) : fin (S n) := lift_fin fz := fz; lift_fin (fs f) := fs (lift_fin f). Equations maxf (n : nat) (f : fin n -> nat) : nat := maxf 0 f := 0; maxf (S n) f := max (f (gof n)) (maxf n (fun y : fin n => f (lift_fin y))). Equations horec_struct (x : ho) : nat := horec_struct (base n) := n; horec_struct (lim k f) := maxf k (fun x => horec_struct (f x)). Derive Subterm for ho. Equations horec (x : ho) : nat by wf x ho_subterm := horec (base n) := n; horec (lim k f) := maxf k (fun x => horec (f x)). Definition horec_test : horec (lim 7 (fun fs => base (fog fs))) = 6 := eq_refl. Coq-Equations-1.3.1-8.20/examples/misc.v000066400000000000000000000026771463127417400175560ustar00rootroot00000000000000 (** In general, one can require more elaborate loop invariants. This [fast_length] function computes the length of a list using tail recursion: *) Equations fast_length {A} (l : list A) : nat := fast_length l := go 0 l where go : nat -> list A -> nat := go n [] := n; go n (_ :: l) := go (S n) l. (** To prove its correctness, we show its pointwise equivalence to regular [length l]. *) Lemma fast_length_length : forall {A} (l : list A), length l = fast_length l. Proof. (** Applying the eliminator is a bit more tricky in this case: we must the length *) apply (fast_length_elim (fun A l res => length l = res) (fun A l res l' lenl => res + length l' = lenl)); intros l H; simpl in *; intuition auto with arith; lia. Qed. Equations list_init {A} (n : nat) (a : A) : list A := list_init 0 _ := []; list_init (S n) x := x :: list_init n x. Require Import NArith. Equations pos_list_init {A} (n : positive) (a : A) : list A := pos_list_init xH x := [x]; pos_list_init (n~1) x := let l := pos_list_init n x in x :: l ++ l; pos_list_init (n~0) x := let l := pos_list_init n x in x :: l ++ l. (* Time Definition big_interval := Eval vm_compute in pos_list_init 20000 true. *) (* Extraction length. *) (* Extraction fast_length. *) (* Time Definition slow := Eval vm_compute in length big_interval. *) (* Time Definition fast := Eval vm_compute in fast_length big_interval. *) Coq-Equations-1.3.1-8.20/examples/mutualwfrec.v000066400000000000000000000113531463127417400211500ustar00rootroot00000000000000(* begin hide *) (**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) From Equations Require Import Equations. From Coq Require Import List Program.Syntax Arith Lia. Require Import List Wellfounded. Import ListNotations. (* end hide *) (** * Mutual well-founded recursion using dependent pattern-matching We present a simple encoding of mutual recursion through the use of dependent pattern-matching on a GADT-like representation of the functions prototypes or just using strong elimination on an enumerated type. We use a simple toy measure here to justify termination, but more elaborate well-founded relations can be used as well. *) Set Equations Transparent. Import Sigma_Notations. (** We first declare the prototypes ouf our mutual definitions. *) Set Universe Polymorphism. Inductive ty : forall (A : Type) (P : A -> Type), Set := | ty0 : ty nat (fun _ => nat) | ty1 : ty (list nat) (fun _ => bool). Derive Signature NoConfusion for ty. (** Our measure is simple, just the natural number or length of the list argument. *) Equations measure : (Σ A P (_ : A), ty A P) -> nat := measure (_, _, a, ty0) => a; measure (_, _, a, ty1) => length a. Definition rel := Program.Wf.MR lt measure. #[local] Instance: WellFounded rel. Proof. red. apply Wf.measure_wf. apply Wf_nat.lt_wf. Defined. Definition pack {A} {P} (x : A) (t : ty A P) := (A, P, x, t) : (Σ A P (_ : A), ty A P). (** We define the function by recursion on the abstract packed argument. Using dependent pattern matching, the clauses for [ty0] refine the argument and return type to [nat] and similarly for [ty1], we can hence do pattern-matching as usual on each separate definition. *) Equations? double_fn {A} {P} (t : ty A P) (x : A) : P x by wf (pack x t) rel := double_fn ty0 n := n + 0; double_fn ty1 nil := true; double_fn ty1 (x :: xs) := 0 true | x :: xs => 0 nat). Equations list_size (l : list A) : nat := list_size nil := 0; list_size (cons x xs) := S (f x + list_size xs). End list_size. Transparent list_size. Section RoseMut. Context {A : Set}. Inductive t : Set := | leaf (a : A) : t | node (l : list t) : t. Derive NoConfusion for t. Equations size (r : t) : nat := size (leaf _) := 0; size (node l) := S (list_size size l). (** An alternative way to define mutual definitions on nested types *) Equations mutmeasure (b : bool) (arg : if b then t else list t) : nat := mutmeasure true t := size t; mutmeasure false lt := list_size size lt. (** The argument and return type depend on the function label ([true] or [false] here) and any well-founded recursive call is allowed. *) Equations? elements (b : bool) (x : if b then t else list t) : if b then list A else list A by wf (mutmeasure b x) lt := elements true (leaf a) := [a]; elements true (node l) := elements false l; elements false nil := nil; elements false (cons t ts) := elements true t ++ elements false ts. Proof. all:lia. Qed. (** Dependent return types are trickier but possible: *) Equations? elements_dep (b : bool) (x : if b then t else list t) : (if b as b' return (if b' then t else list t) -> Set then fun x : t => list A else fun x : list t => list A) x by wf (mutmeasure b x) lt := elements_dep true (leaf a) := [a]; elements_dep true (node l) := elements_dep false l; elements_dep false nil := nil; elements_dep false (cons t ts) := elements_dep true t ++ elements_dep false ts. Proof. all:lia. Qed. End RoseMut. Coq-Equations-1.3.1-8.20/examples/nested_mut_rec.v000066400000000000000000000073501463127417400216140ustar00rootroot00000000000000(** printing elimination %\coqdoctac{elimination}% *) (** printing <= %\rightarrow% #⇐# *) (** * Nested and mutual structurally recursive definitions Example of a term structure with two constructors taking lists of terms. *) From Equations Require Import Equations. Require Import Program Arith List Compare_dec. Import ListNotations. (** A nested recursive definition of terms with lists of terms *) Inductive term : Set := | Var (n : nat) | Lam (t : term) | App (t : term) (l : list term) | MetaVar (n : nat) (l : list term). (** Defining capture-avoiding substitution for this language: the case of variables. *) Equations subst_var (k : nat) (u : term) (t : nat) : term := subst_var k u n with k ?= n => { | Eq => u; | Gt => Var n; | Lt => Var (pred n) }. (** Nested recursive definition using a top-level [where] definition. The nested recursive fixpoint defined by [subst_tlist] can be used multiple time in [subst_term], and of course recursively call itself and [subst_term]. The regular structural guardedness check is run on this definition to check that it is terminating. Note that one can optionally add a [struct x] annotation to [where] clauses to indicate which arguments decreases explicitely, otherwise _only the last argument_ is tried. *) Equations subst_term (k : nat) (u : term) (t : term) : term := { subst_term k u (Var n) => subst_var k u n; subst_term k u (Lam t) => Lam (subst_term (S k) u t); subst_term k u (App t l) => App (subst_term k u t) (subst_tlist k u l); subst_term k u (MetaVar t l) => MetaVar t (subst_tlist k u l) } where subst_tlist (k : nat) (u : term) (t : list term) : list term := { subst_tlist k u nil => nil; subst_tlist k u (cons t ts) => cons (subst_term k u t) (subst_tlist k u ts) }. (** Remark that our definition of [subst_tlist] is equivalent to a [List.map]: but we need the "expanded" version to properly recognize recursive calls. *) Lemma nested_map k u t : subst_tlist k u t = List.map (subst_term k u) t. Proof. induction t; simpl; rewrite ?IHt; trivial. Qed. (** The elimination principle generated from this definition is giving a conjunction of two predicates as result. One may want to instantiate [P0] with [Forall P] to recover a [map]-like elimination principle. *) Check subst_term_elim : forall (P : nat -> term -> term -> term -> Type) (P0 : nat -> term -> list term -> list term -> Type), (forall (k : nat) (u : term) (n : nat), P k u (Var n) (subst_var k u n)) -> (forall (k : nat) (u t : term), P (S k) u t (subst_term (S k) u t) -> P k u (Lam t) (Lam (subst_term (S k) u t))) -> (forall (k : nat) (u t0 : term) (l : list term), P k u t0 (subst_term k u t0) -> P0 k u l (subst_tlist k u l) -> P k u (App t0 l) (App (subst_term k u t0) (subst_tlist k u l))) -> (forall (k : nat) (u : term) (n0 : nat) (l0 : list term), P0 k u l0 (subst_tlist k u l0) -> P k u (MetaVar n0 l0) (MetaVar n0 (subst_tlist k u l0))) -> (forall (k : nat) (u : term), P0 k u []%list []%list) -> (forall (k : nat) (u t : term) (l : list term), P k u t (subst_term k u t) -> P0 k u l (subst_tlist k u l) -> P0 k u (t :: l)%list (subst_term k u t :: subst_tlist k u l)%list) -> (forall (k : nat) (u t : term), P k u t (subst_term k u t)) * (forall (k : nat) (u : term) (t : list term), P0 k u t (subst_tlist k u t)). (** One can experiment to see that this provides the right induction hypotheses for App and MetaVar *) Lemma subst_subst k u t : subst_term k u t = subst_term k u t. Proof. revert k u t. refine (fst (subst_term_elim (fun k u t c => c = c) (fun k u l c => c = c) _ _ _ _ _ _)); trivial. Qed. Coq-Equations-1.3.1-8.20/examples/nm.v000066400000000000000000000064541463127417400172320ustar00rootroot00000000000000(** Proving Termination of Normalization Functions for Conditional Expressions, L. Paulson *) From Equations Require Import Equations. Require Import List Program.Syntax Arith Lia. Inductive exp := At | If : exp -> exp -> exp -> exp. Equations exp_size : exp -> nat := exp_size At := 1; exp_size (If e1 e2 e3) := exp_size e1 * (1 + exp_size e2 + exp_size e3). Transparent exp_size. Lemma exp_size_pos (x : exp) : (0 < exp_size x)%nat. Proof. funelim (exp_size x); auto; try lia. apply Nat.mul_pos_pos; auto. lia. Qed. Hint Resolve exp_size_pos : core. Set Program Mode. Lemma size_1: forall y z : exp, exp_size y < S (exp_size y + exp_size z + 0). Proof. intros y z. lia. Qed. Lemma size_2: forall y z : exp, exp_size z < S (exp_size y + exp_size z + 0). Proof. intros y z. lia. Qed. Lemma size_3 u v w y z : exp_size (If v y z) < exp_size (If (If u v w) y z). Proof. simp exp_size. assert (1 + exp_size y + exp_size z > 0) by lia. revert H. generalize (1 + exp_size y + exp_size z). intros n Hn. generalize (exp_size_pos u). intros. rewrite 2 Nat.mul_add_distr_l. nia. Qed. Lemma size_4 u v w y z : exp_size (If w y z) < exp_size (If (If u v w) y z). Proof. simp exp_size. assert (1 + exp_size y + exp_size z > 0) by lia. revert H. generalize (1 + exp_size y + exp_size z). intros n Hn. generalize (exp_size_pos u). intros. rewrite 2 Nat.mul_add_distr_l. nia. Qed. Lemma size_5: forall u v w y z x : exp, exp_size x <= exp_size (If v y z) -> forall x0 : exp, exp_size x0 <= exp_size (If w y z) -> exp_size (If u x x0) < exp_size (If (If u v w) y z). Proof. intros u v w y z x l x0 l0. simp exp_size. rewrite <- Nat.mul_assoc. apply mult_lt_compat_l; auto. eapply Nat.le_lt_trans with (1 + exp_size (If v y z) + exp_size (If w y z)). lia. simp exp_size. simpl. rewrite Nat.mul_add_distr_r. generalize (exp_size_pos y). lia. Qed. Lemma size_6: forall u v w y z x : exp, exp_size x <= exp_size (If v y z) -> forall x0 : exp, exp_size x0 <= exp_size (If w y z) -> forall x1 : exp, exp_size x1 <= exp_size (If u x x0) -> exp_size x1 <= exp_size (If (If u v w) y z). Proof. intros u v w y z x l x0 l0 x1 l1. simp exp_size. transitivity (exp_size (If u x x0)); auto. simp exp_size. simp exp_size in *. rewrite <- Nat.mul_assoc. apply mult_le_compat_l. transitivity (1 + (exp_size v * (1 + exp_size y + exp_size z)) + (exp_size w * (1 + exp_size y + exp_size z))). lia. lia. Defined. Equations? nm_dep (e : exp) : { e' : exp | exp_size e' <= exp_size e } by wf (exp_size e) lt := nm_dep At := At; nm_dep (If At y z) := If At (nm_dep y) (nm_dep z); nm_dep (If (If u v w) y z) with nm_dep (If v y z), nm_dep (If w y z) := { | exist _ t Ht | exist _ e He := nm_dep (If u t e) }. Proof. apply size_1. apply size_2. all:repeat destruct nm_dep; simpl; try solve [simp exp_size; simpl; try lia]. apply size_3. apply size_4. apply size_5; auto. simpl in *. eapply size_6; eauto. Defined. Equations nm (e : exp) : exp := nm At := At; nm (If At y z) := If At (nm_dep y) (nm_dep z); nm (If (If u v w) y z) := nm_dep (If u (nm_dep (If v y z)) (nm_dep (If w y z))). Lemma nm_eq e : nm e = nm_dep e. Proof. funelim (nm_dep e); simp nm. simpl. rewrite Heq, Heq0. simpl. reflexivity. Qed. Extraction nm_dep. Transparent nm. Eval vm_compute in nm (If (If At At At) At At). Coq-Equations-1.3.1-8.20/examples/ordinals.v000066400000000000000000000654731463127417400204410ustar00rootroot00000000000000Require Import Arith. Require Import Coq.Logic.Eqdep_dec. Require Import Coq.Arith.Peano_dec. Require Import List. Require Import Recdef. (** Arithmétique *) (* Sur la soustraction (entière) *) Lemma minus_Sn_n : forall (n:nat), (minus (S n) n) = (S 0). induction n; auto. Qed. Lemma lt_S_r : forall (n1 n2:nat), (lt n1 n2) -> exists (n:nat), n2 = (S n). destruct n2. intro. exfalso. apply (Nat.nlt_0_r n1). assumption. intro. exists n2. trivial. Qed. Lemma minus_lt_S : forall (n1 n2:nat), (lt n1 n2) -> exists (n:nat), (minus n2 n1) = (S n). intros. elim (lt_S_r n1 n2 H). intros n H1. rewrite H1. exists (minus n n1). rewrite Nat.sub_succ_l. trivial. apply le_S_n. rewrite H1 in H. auto. Qed. (* Sur l'ordre strict 'lt' *) Lemma lt_1_0 : forall (n:nat), (lt n 1) -> (n=0). destruct n. auto. intro. inversion H. exfalso. apply (Nat.nle_succ_0 (S n)). assumption. Qed. Lemma lt_S_case : forall (m n:nat), (lt m (S n)) -> (lt m n) \/ (m=n). intros m n. generalize m. induction n. intros. rewrite (lt_1_0 m0 H). tauto. destruct m0. intro. auto with arith. intro. elim IHn with (m:=m0); auto with arith. Qed. Lemma not_lt_Sn_n : forall (n:nat), not (lt (S n) n). induction n. auto with arith. intro. auto with arith. Qed. (* Sur l'ordre large 'le' *) Lemma not_le_Sn_n : forall (n:nat), not (le (S n) n). induction n. auto with arith. intro. auto with arith. Qed. (* Cas sur les entiers *) Lemma nat_compare_case : forall (n1 n2:nat), (lt n1 n2) \/ (n1=n2) \/ (lt n2 n1). induction n1. destruct n2. tauto. left. auto with arith. destruct n2. right. right. auto with arith. elim (IHn1 n2). intro. left. auto with arith. intro. elim H. intro. right. left. auto. intro. right. right. auto with arith. Qed. (** Sur les listes *) (* Sur la longueur. *) Lemma length_0_nil : forall (w:(list nat)), (length w)=0 -> w=nil. destruct w. auto. intro. discriminate H. Qed. Lemma length_Sn_cons : forall (w:(list nat)) (n:nat), (length w)=(S n) -> exists (a:nat) (w':(list nat)), w = (cons a w'). destruct w. intros. discriminate H. intros. exists n. exists w. trivial. Qed. (* Principe d'induction sur la longueur des listes *) Lemma list_length_ind_S : forall (P: (list nat) -> Prop), (P nil) -> (forall (n:nat), (forall (xs:(list nat)), (lt (length xs) (S n)) -> (P xs)) -> forall (xs:(list nat)), (length xs)=(S n) -> (P xs)) -> forall (n:nat) (xs:(list nat)), (lt (length xs) (S n) -> (P xs)). intros P P0 Plt. induction n. intros. assert (xs=nil). apply length_0_nil. apply lt_1_0. assumption. rewrite H0. assumption. intros. elim (lt_S_case (length xs) (S n) H). auto. intro. apply (Plt n); auto. Qed. Lemma list_length_ind : forall (P: (list nat) -> Prop), (P nil) -> (forall (n:nat), (forall (xs:(list nat)), (lt (length xs) (S n)) -> (P xs)) -> forall (xs:(list nat)), (length xs)=(S n) -> (P xs)) -> forall (xs:(list nat)), (P xs). intros. apply list_length_ind_S with (n:=(length xs)). assumption. assumption. auto with arith. Qed. (* Extension d'une liste avec des 0 (en tête) *) Fixpoint zs (n:nat) : (list nat) := match n with 0 => nil | (S n) => (cons 0 (zs n)) end. Lemma zs_len: forall (n:nat), (length (zs n))=n. induction n. auto. simpl. rewrite IHn. trivial. Qed. (* Complétion d'une liste en fonction d'une autre. Le résultat est une liste de la longueur de la plus grande. *) Definition dist (w1:(list nat)) (w2:(list nat)) := (minus (length w1) (length w2)). Definition padd (w1:(list nat)) (w2:(list nat)) := (app (zs (dist w2 w1)) w1). Lemma padd_len_lt_cons : forall (w1 w2:(list nat)), (lt (length w1) (length w2)) -> exists (w:(list nat)), (padd w1 w2)=(cons 0 w). intros. unfold padd. unfold dist. elim (minus_lt_S (length w1) (length w2) H). intros. rewrite H0. simpl. exists (app (zs x) w1). trivial. Qed. Lemma padd_len_le_len : forall (w1 w2:(list nat)), (le (length w1) (length w2)) -> (length (padd w1 w2)) = (length w2). intros. unfold padd. unfold dist. rewrite app_length. rewrite zs_len. rewrite Nat.sub_add by (exact H); reflexivity. Qed. Lemma padd_cons_0 : forall (w1 w2:(list nat)) (a:nat), (length w1) = (length w2) -> (padd w1 (cons a w2)) = (cons 0 w1). intros. unfold padd. unfold dist. rewrite H. simpl length. rewrite (minus_Sn_n (length w2)). simpl. trivial. Qed. (** Sur l'accessibilité. Tribute to P. Casteran: http://www.labri.fr/perso/casteran/Cantor/HTML/AccP.html#AccElim3 *) Theorem AccElim2 : forall (A B:Set) (RA: A -> A -> Prop) (RB: B -> B -> Prop), forall (P : A -> B -> Prop), (forall x y, (forall (t : A), RA t x -> forall y', Acc RB y' -> P t y') -> (forall (t : B), RB t y -> P x t) -> (P x y)) -> forall x y, Acc RA x -> Acc RB y -> P x y. Proof. intros A B RA RB P H x y Ax; generalize y; clear y. elim Ax. clear Ax x; intros x HAccx Hrecx y Ay. elim Ay. clear Ay y. intros y HAccy Hrecy. apply H. auto. auto. Qed. (** Relation d'ordre sur les listes d'entiers considérés comme des ordinaux (formes normales de Cantor à exposants finis). *) Inductive wlt : (list nat) -> (list nat) -> Prop := wlt_nil : forall (a:nat)(w:(list nat)), (wlt nil (cons (S a) w)) | wlt_0_w : forall (w1 w2:(list nat)), (wlt w1 w2) -> (wlt (cons 0 w1) w2) | wlt_w_0 : forall (w1 w2:(list nat)), (wlt w1 w2) -> (wlt w1 (cons 0 w2)) | wlt_len : forall (w1 w2:(list nat)) (a1 a2:nat), (length w1 < length w2) -> (wlt (cons (S a1) w1) (cons (S a2) w2)) | wlt_lt : forall (w1 w2:(list nat)) (a1 a2:nat), (length w1 = length w2) -> (lt a1 a2) -> (wlt (cons (S a1) w1) (cons (S a2) w2)) | wlt_wlt : forall (w1 w2:(list nat)) (a:nat), (length w1 = length w2) -> (wlt w1 w2) -> (wlt (cons (S a) w1) (cons (S a) w2)). (* 'nil' est minimal *) Lemma not_wlt_nil : forall (w:(list nat)), not (wlt w nil). induction w. intro. inversion H. case a. intro. inversion H. auto. intro. intro. inversion H. Qed. (* Lemmes d'inversion *) Lemma wlt_0_w_inv: forall (w1 w2:(list nat)), (wlt (cons 0 w1) w2) -> (wlt w1 w2). induction w2. intros. absurd (wlt (cons 0 w1) nil). apply (not_wlt_nil (cons 0 w1)). assumption. intro. inversion H. assumption. apply wlt_w_0. auto. Qed. Lemma wlt_w_0_inv: forall (w1 w2:(list nat)), (wlt w1 (cons 0 w2)) -> (wlt w1 w2). induction w1. intros. inversion H. assumption. intros. inversion H. apply wlt_0_w. auto. assumption. Qed. (* Autres résultats négatifs *) Lemma not_wlt_len_left : forall (w1 w2:(list nat)) (a:nat), (le (length w2) (length w1)) -> not (wlt (cons (S a) w1) w2). induction w2. intros. apply not_wlt_nil. intros. destruct a. intro. absurd (wlt (cons (S a0) w1) w2). apply IHw2. apply Nat.le_trans with (m:=(length (cons 0 w2))). auto with arith. assumption. apply wlt_w_0_inv. assumption. intro. inversion H0. assert (lt (length (cons (S a) w2)) (length w2)). apply Nat.le_lt_trans with (m := (length w1)); assumption. apply (not_lt_Sn_n (length w2)). assumption. rewrite H4 in H. apply (not_le_Sn_n (length w2)); assumption. rewrite H4 in H. apply (not_le_Sn_n (length w2)); assumption. Qed. Lemma not_wlt_Sn_0 : forall (w1 w2:(list nat)) (a:nat), (length w1) = (length w2) -> not (wlt (cons (S a) w1) (cons 0 w2)). intros. intro. inversion H0. apply (not_wlt_len_left w1 w2 a). rewrite H. auto with arith. apply wlt_w_0_inv. assumption. Qed. Lemma not_wlt_len: forall (w1 w2:(list nat)) (a:nat), (length w2 <= length w1) -> not (wlt (cons (S a) w1) w2). induction w2. intros. intro. exfalso. apply (not_wlt_nil (cons (S a) w1)). assumption. intro. case a. intro. intro. apply IHw2 with (a:=a0). apply Nat.le_trans with (m:=(length (cons 0 w2))). simpl. auto with arith. assumption. apply wlt_w_0_inv. assumption. intros. intro. inversion H0. apply (Nat.lt_irrefl (length w1)). simpl in H. apply Nat.lt_trans with (m:=(length w2)); assumption. rewrite H4 in H. apply (Nat.nle_succ_diag_l (length w2)). assumption. rewrite H4 in H. apply (Nat.nle_succ_diag_l (length w2)). assumption. Qed. (* Invariance de 'wlt' pour la complétion à 0 (en tête) *) Lemma wlt_wlt_zs_right : forall (n:nat) (w1 w2:(list nat)), (wlt w1 w2) -> (wlt w1 (app (zs n) w2)). induction n. auto. intros. simpl. apply wlt_w_0. auto. Qed. Lemma wlt_zs_wlt_right : forall (n:nat) (w1 w2:(list nat)), (wlt w1 (app (zs n) w2)) -> (wlt w1 w2). induction n. auto. simpl. intros. apply IHn. apply wlt_w_0_inv. assumption. Qed. Lemma wlt_wlt_zs_left : forall (n:nat) (w1 w2:(list nat)), (wlt w1 w2) -> (wlt (app (zs n) w1) (w2)). induction n. auto. intros. simpl. apply wlt_0_w. auto. Qed. (* Caractérisation en fonction de la longueur: si '(wlt w1 w2)' et '#w2 < #w1' alors 'w1' commence par des 0 *) Lemma wlt_gt_length : forall (w1 w2:(list nat)), (wlt w1 w2) -> (lt (length w2) (length w1)) -> exists (n:nat) (w:(list nat)), (w1 = (app (zs n) w)) /\ (length w)=(length w2) /\ (wlt w w2). induction w1 as [| a w1 IHw1]; [intros w2 _ []%Nat.nlt_0_r |]. intros w2 H Hl; simpl in Hl; apply <-Nat.succ_le_mono in Hl. destruct a as [| a]. - apply Nat.le_lteq in Hl as [Hl | Hl]. + apply wlt_0_w_inv in H; specialize (IHw1 w2 H Hl) as [n [w' [H'1 [H'2 H'3]]]]. now exists (S n); exists w'; split; [rewrite H'1 | split]. + exists (S 0); exists w1; split; [reflexivity | split]; [now symmetry |]. exact (wlt_0_w_inv _ _ H). - now exfalso; apply not_wlt_len_left with (2 := H). Qed. (** Restriction de l'ordre aux listes de même longueur. (avec complémentation possible à 0): c'est l'ordre lexicographique. *) (* La relation sur les listes de même longueur *) Inductive wlt_pad : (list nat) -> (list nat) -> Prop := wlt_pad_len : forall (a:nat) (w1 w2:(list nat)), (le (length w1) (length w2)) -> (wlt_pad (padd w1 (cons (S a) w2)) (cons (S a) w2)) | wlt_pad_lt : forall (a1 a2:nat) (w1 w2:(list nat)), (length w1) = (length w2) -> (lt a1 a2) -> (wlt_pad (cons (S a1) w1) (cons (S a2) w2)) | wlt_pad_wlt_pad : forall (a:nat) (w1 w2:(list nat)), (length w1) = (length w2) -> (wlt_pad w1 w2) -> (wlt_pad (cons a w1) (cons a w2)). (* Relations entre l'ordre sur toute liste et l'ordre restreint. *) Lemma wlt_wlt_pad : forall (w1 w2:(list nat)), (length w1) = (length w2) -> (wlt w1 w2) -> (wlt_pad w1 w2). intros w1 w2. generalize w1. clear w1. induction w2. intros. exfalso. apply (not_wlt_nil w1). assumption. destruct w1. intros. discriminate H. intros. inversion H0. (* 1: wlt_0_w *) destruct a. apply wlt_pad_wlt_pad. auto. apply IHw2. auto. apply wlt_w_0_inv. assumption. rewrite <- (padd_cons_0 w1 w2 a). apply wlt_pad_len. injection H. intro. rewrite H5. auto with arith. auto. (* 2: wlt_w_0 *) destruct n. apply wlt_pad_wlt_pad. auto with arith. apply IHw2. auto with arith. apply wlt_w_0_inv. apply wlt_0_w_inv. rewrite <- H1 in H0. assumption. exfalso. apply (not_wlt_Sn_0 w1 w2 n). auto with arith. rewrite <- H1 in H0. assumption. (* 3 *) exfalso. apply (Nat.lt_irrefl (length w2)). injection H. intro. rewrite H6 in H2. assumption. (* 4 *) apply wlt_pad_lt. auto with arith. assumption. (* 5 *) apply wlt_pad_wlt_pad. auto with arith. apply IHw2. auto with arith. assumption. Qed. Lemma wlt_wlt_pad_zs : forall (w1 w2:(list nat)), (length w1) < (length w2) -> (wlt w1 w2) -> (wlt_pad (padd w1 w2) w2). intros. apply wlt_wlt_pad. apply padd_len_le_len. auto with arith. apply wlt_wlt_zs_left. assumption. Qed. (** Accessibilité pour l'ordre restreint. *) Lemma Acc_wlt_pad_ind : forall (n:nat), (forall (w:(list nat)), (lt (length w) (S n)) -> Acc wlt_pad w) -> forall (w:(list nat)), (length w)=(S n) -> Acc wlt_pad w. intros. elim (length_Sn_cons w n H0). intros a H1. elim H1. clear H1. intros w' H1. rewrite H1. rewrite H1 in H0. clear H1. generalize H0. pattern a, w'. apply AccElim2 with (RA:=lt) (RB:=wlt_pad). intros a' w'' H1 H2 H3. apply Acc_intro. intros w''' H4. inversion H4. elim (padd_len_lt_cons w1 (cons (S a0) w'')). intros w0 H9. rewrite H9. apply H1. rewrite <- H5. auto with arith. apply H. assert (lt (length (cons 0 w0)) (S (S n))). rewrite <- H9. rewrite padd_len_le_len. rewrite H5. rewrite H3. auto with arith. simpl. auto with arith. auto with arith. rewrite <- H9. rewrite padd_len_le_len. rewrite H5. assumption. simpl. auto with arith. simpl. auto with arith. apply H1. rewrite <- H5. auto with arith. apply H. rewrite H8. rewrite <- H3. auto with arith. simpl. rewrite H8. auto. apply H2. assumption. simpl. rewrite H8. auto. apply lt_wf. apply H. rewrite <- H0. auto with arith. Qed. Lemma Acc_wlt_pad_nil : (Acc wlt_pad nil). apply Acc_intro. intros. inversion H. Qed. Lemma Acc_wlt_pad : forall (w:(list nat)), (Acc wlt_pad w). induction w using list_length_ind. apply Acc_wlt_pad_nil. apply Acc_wlt_pad_ind with (n:=n); assumption. Qed. (** De l'accessibilté pour l'ordre restreint à l'accessibilité pour l'ordre sur tout liste. *) Lemma Acc_wlt_zs_Acc_wlt : forall (n:nat) (w:(list nat)), (Acc wlt (app (zs n) w)) -> (Acc wlt w). intros. apply Acc_intro. intros w' H0. apply H. apply wlt_wlt_zs_right. assumption. Qed. Lemma Acc_wlt_Acc_wlt_zs : forall (n:nat) (w:(list nat)), (Acc wlt w) -> (Acc wlt (app (zs n) w)). intros. apply Acc_intro. intros w' H0. apply H. apply wlt_zs_wlt_right with (n:=n). assumption. Qed. Lemma Acc_wlt_pad_Acc_wlt : forall (w:(list nat)), (Acc wlt_pad w) -> (Acc wlt w). intros. elim H. intros w' H0 H1. apply Acc_intro. intros w'' H2. elim (nat_compare_case (length w'') (length w')). (* #w'' < #w' *) intro. apply Acc_wlt_zs_Acc_wlt with (n:=(dist w' w'')). apply H1. apply wlt_wlt_pad_zs; assumption. (* #w'' = #w4 \/ #w' < #w'' *) intro. elim H3. (* #w'' = #w' *) intro. apply H1. apply wlt_wlt_pad; assumption. (* #w' < #w'' *) intro. elim (wlt_gt_length w'' w' H2 H4). intros a H5. elim H5. intro w0. intro. decompose [and] H6. rewrite H7. apply Acc_wlt_Acc_wlt_zs. apply H1. apply wlt_wlt_pad; assumption. Qed. (** L'ordre sur toute liste est bien fondé ! *) Theorem Acc_wlt : forall (w:(list nat)), (Acc wlt w). intro. apply Acc_wlt_pad_Acc_wlt. apply Acc_wlt_pad. Qed. (** Sur wlt *) Lemma wlt_len_gen : forall (w1 w2:list nat) (a:nat), (lt (length w1) (length (cons (S a) w2))) -> (wlt w1 (cons (S a) w2)). induction w1. intros. apply wlt_nil. intros. destruct a. apply wlt_0_w. apply IHw1. apply Nat.lt_trans with (m:=(length (cons 0 w1))). auto with arith. assumption. apply wlt_len. auto with arith. Qed. Lemma wlt_lt_gen : forall (a1 a2:nat) (w1 w2:list nat), (length w1) = (length w2) -> (lt a1 a2) -> (wlt (cons a1 w1) (cons a2 w2)). intros. destruct a2. exfalso. apply (Nat.nlt_0_r a1). assumption. destruct a1. apply wlt_0_w. apply wlt_len_gen. rewrite H. auto with arith. apply wlt_lt; auto with arith. Qed. Lemma wlt_wlt_gen : forall (a:nat) (w1 w2:list nat), (length w1) = (length w2) -> (wlt w1 w2) -> (wlt (cons a w1) (cons a w2)). destruct a. intros. apply wlt_0_w. apply wlt_w_0. assumption. intros. apply wlt_wlt; assumption. Qed. Lemma wlt_wf_ind : forall (P: (list nat) -> Prop), (forall (w1:list nat), (forall (w2:list nat), (wlt w2 w1) -> P w2) -> P w1) -> forall (w:list nat), P w. intros. apply well_founded_ind with (R:=wlt). unfold well_founded. apply Acc_wlt. intros. apply H. assumption. Qed. (* Utilitaire pour la définition des ordres *) Definition make_mwlt (A:Set) (m : A -> list nat) (a1 a2:A) := (wlt (m a1) (m a2)). (** Un ordre basé sur une mesure ordinale est bien fondé *) Lemma Acc_wlt_eq : forall (A:Set) (m: A -> list nat) (w:list nat) (x:A) , w = (m x) -> (Acc (fun x1 x2 : A => wlt (m x1) (m x2)) x). induction w using wlt_wf_ind. intros. apply Acc_intro. intros. apply H with (w2:=(m y)). rewrite H0. assumption. trivial. Qed. Lemma Acc_mwlt : forall (A:Set) (m: A -> list nat), forall (x:A), (Acc (fun x1 x2 => (wlt (m x1) (m x2))) x). intros. apply Acc_wlt_eq with (w:=(m x)). trivial. Qed. (** Applications avec Program Fixpoint *) Require Coq.Program.Wf. (* Tactique pour les preuves de bonne fondation. *) Ltac by_Acc_mwlt mwlt := unfold Wf.MR; unfold well_founded; intros; unfold mwlt; apply Acc_mwlt. (* Ordre lexicographique sur les entiers *) Definition wm_natxnat (xy:nat*nat) := match xy with (x,y) => (cons x (cons y nil)) end. Definition lex_natxnat := (make_mwlt (nat*nat) wm_natxnat). Lemma lex_natxnat_fst : forall (x1 y1 x2 y2:nat), (lt x1 x2) -> (lex_natxnat (x1,y1) (x2,y2)). intros. unfold lex_natxnat. unfold make_mwlt. simpl. apply wlt_lt_gen; auto. Qed. Lemma lex_natxnat_snd : forall (x y1 y2:nat), (lt y1 y2) -> (lex_natxnat (x,y1) (x,y2)). intros. unfold lex_natxnat. unfold make_mwlt. simpl. apply wlt_wlt_gen. auto. apply wlt_lt_gen; auto. Qed. Program Fixpoint ack_like (xy:nat*nat) {wf lex_natxnat xy} : nat := match xy with (0, y) => (S y) | (S x, 0) => (ack_like (x, S 0)) | (S x, S y) => (ack_like (x, (x + y))) + (ack_like (S x, y)) end. Obligation 1. apply lex_natxnat_fst. auto with arith. Qed. Obligation 2. apply lex_natxnat_fst. auto with arith. Qed. Obligation 3. apply lex_natxnat_snd. auto with arith. Qed. Obligation 4. by_Acc_mwlt lex_natxnat. Defined. Program Fixpoint ack (xy:nat*nat) {wf lex_natxnat xy} : nat := match xy with (0, y) => (S y) | (S x, 0) => (ack (x, S 0)) | (S x, S y) => (ack (x, ack (S x, y))) end. Obligation 1. apply lex_natxnat_fst. auto with arith. Qed. Obligation 2. apply lex_natxnat_snd. auto with arith. Qed. Obligation 3. apply lex_natxnat_fst. inversion Heq_xy. auto with arith. Qed. Obligation 4. by_Acc_mwlt lex_natxnat. Defined. (* Ordre lexicographique sur les longueurs des listes *) Definition wm_listxlist (A:Set) (xys: list A * list A) := match xys with (xs,ys) => (wm_natxnat (length xs, length ys)) end. Definition lex_listxlist (A:Set) := (make_mwlt (list A * list A) (wm_listxlist A)). Parameter ltb : nat -> nat -> bool. Program Fixpoint merge (xys: list nat * list nat) {wf (lex_listxlist nat) xys} : list nat := match xys with (nil, ys) => ys | (xs, nil) => xs | (cons x xs, cons y ys) => if (ltb x y) then (cons x (merge (xs, (cons y ys)))) else (cons y (merge ((cons x xs), ys))) end. Obligation 1. unfold lex_listxlist. unfold make_mwlt. simpl. apply wlt_lt_gen; auto with arith. Qed. Obligation 2. unfold lex_listxlist. unfold make_mwlt. simpl. apply wlt_wlt_gen. auto. apply wlt_lt_gen; auto with arith. Qed. Obligation 4. by_Acc_mwlt lex_natxnat. Defined. (* Ordre sur les listes d'entiers: ordre lexicographique sur la taille et le premier élément *) Definition m_list (xs:list nat) := match xs with nil => nil | (cons x xs) => (cons (length (cons x xs)) (cons x nil)) end. Definition lt_list := (make_mwlt (list nat) m_list). Program Fixpoint sum_list (xs:list nat) {wf lt_list xs} : nat := match xs with nil => 0 | (cons 0 xs) => (sum_list xs) | (cons (S x) xs) => S (sum_list (cons x xs)) end. Obligation 1. unfold lt_list. unfold make_mwlt. simpl. destruct xs. simpl. apply wlt_nil. simpl. apply wlt_lt_gen; auto with arith. Qed. Obligation 2. unfold lt_list. unfold make_mwlt. simpl. apply wlt_wlt_gen. auto. apply wlt_lt_gen; auto with arith. Qed. Obligation 3. by_Acc_mwlt lex_natxnat. Defined. (* Analogue sur les listes de listes *) Definition m_listlist (A:Set) (xss : list (list A)) := match xss with nil => nil | (cons xs _) => (cons (length xss) (cons (length xs) nil)) end. Definition lt_listlist (A:Set) (xss yss : list (list A)) := (wlt (m_listlist A xss) (m_listlist A yss)). Parameter A:Set. Program Fixpoint list_concat (xss : list (list A)) {wf (lt_listlist A) xss} : list A := match xss with nil => nil | (cons nil xss) => (list_concat xss) | (cons (cons x xs) xss) => (cons x (list_concat (cons xs xss))) end. Obligation 1. unfold lt_listlist. destruct xss. simpl. apply wlt_nil. simpl. apply wlt_lt_gen; auto with arith. Qed. Obligation 2. unfold lt_listlist. simpl. apply wlt_wlt_gen. auto. apply wlt_lt_gen; auto with arith. Qed. Obligation 3. by_Acc_mwlt lex_natxnat. Defined. (* Sur la longueur des listes *) Definition mw_list (A:Set) (xs: list A) := (cons (length xs) nil). Definition lt_len_list (A:Set) := (make_mwlt (list A) (mw_list A)). Program Fixpoint bubble (xs:list nat) {wf (lt_len_list nat) xs} : list nat := match xs with nil => nil | (cons x nil) => (cons x nil) | (cons x1 (cons x2 xs)) => if (ltb x1 x2) then (cons x1 (bubble (cons x2 xs))) else (cons x2 (bubble (cons x1 xs))) end. Obligation 1. unfold lt_len_list. unfold make_mwlt. unfold mw_list. simpl. apply wlt_lt_gen; auto with arith. Qed. Obligation 2. unfold lt_len_list. unfold make_mwlt. unfold mw_list. simpl. apply wlt_lt_gen; auto with arith. Qed. Obligation 3. by_Acc_mwlt lt_len_list. Defined. (* Le peigne *) Inductive btree (A:Set) : Set := Empty : (btree A) | Node : (btree A) -> A -> (btree A) -> (btree A). Arguments Empty {A}. Arguments Node [A] _ _ _. Fixpoint btree_size (A:Set) (bt:btree A) := match bt with Empty => 0 | (Node bt1 x bt2) => S (plus (btree_size A bt1) (btree_size A bt2)) end. Definition m_btree (A:Set) (bt:btree A) := match bt with Empty => nil | (Node bt1 x bt2) => (cons (btree_size A bt) (cons (btree_size A bt1) nil)) end. Definition lt_btree (A:Set) (bt1 bt2:btree A) := (wlt (m_btree A bt1) (m_btree A bt2)). Program Fixpoint to_list (bt:btree A) {wf (lt_btree A) bt} : list A := match bt with Empty => nil | (Node Empty x bt) => (cons x (to_list bt)) | (Node (Node bt1 x1 bt2) x2 bt3) => (to_list (Node bt1 x1 (Node bt2 x2 bt3))) end. Obligation 1. unfold lt_btree. destruct bt. simpl. apply wlt_nil. simpl. apply wlt_lt_gen; auto with arith. Qed. Obligation 2. unfold lt_btree. simpl. rewrite <-Nat.add_succ_comm. simpl. rewrite Nat.add_assoc. apply wlt_wlt_gen. auto. apply wlt_lt_gen; auto with arith. Qed. Obligation 3. by_Acc_mwlt lt_btree. Defined. (* Theory *) Axiom g1 : nat -> nat. Axiom g2 : nat -> nat -> nat. Axiom g3 : nat -> nat -> nat. Axiom g4 : nat -> nat -> nat. Axiom g5 : nat -> nat -> nat -> nat. Axiom g6 : nat -> nat -> nat -> nat. Axiom g7 : nat -> nat -> nat -> nat. Axiom h1 : nat -> nat -> nat -> nat. Axiom h2 : nat -> nat -> nat -> nat. Axiom h3 : nat -> nat -> nat -> nat -> nat -> nat. Definition wm_nat3 (xyz:nat * nat * nat) := match xyz with (0, y, 0) => nil | (0, y, S z) => (cons (S z) nil) | (S x, 0, z) => (cons (S x) (cons 0 nil)) | (S x, S y, z) => (cons (S x) (cons (S y) nil)) end. Definition rlex_nat3 := (make_mwlt (nat * nat * nat) wm_nat3). Program Fixpoint f (xyz : nat * nat * nat) {wf rlex_nat3 xyz} : nat := match xyz with (0, y, 0) => (g1 y) | (0, y, S z) => (h1 y z (f (0, (g2 y z), z))) | (S x, 0, z) => (h2 x z (f (x, (g3 x z), (g4 x z)))) | (S x, S y, z) => (h3 x y z (f (x, (g5 x y z), (g6 x y z))) (f (S x, y, (g7 x y z)))) end. Lemma rlex_nat3_1 : forall (y z m : nat), (rlex_nat3 (0, y, z) (0, m, S z)). intros. unfold rlex_nat3. unfold make_mwlt. destruct z. simpl. apply wlt_nil. simpl. apply wlt_lt; auto with arith. Qed. Lemma rlex_nat3_2 : forall (x z m1 m2 : nat), (rlex_nat3 (x, m1, m2) (S x, 0, z)). unfold rlex_nat3. unfold make_mwlt. destruct x. destruct m2. simpl. apply wlt_nil. simpl. apply wlt_len; auto with arith. intros. destruct m1. simpl. apply wlt_lt; auto with arith. simpl. apply wlt_lt; auto with arith. Qed. Lemma rlex_nat3_3 : forall (x y z m1 m2 : nat), (rlex_nat3 (x, m1, m2) (S x, S y, z)). unfold rlex_nat3. unfold make_mwlt. destruct x. intros. destruct m2. simpl. apply wlt_nil. simpl. apply wlt_len; auto with arith. intros. destruct m1. simpl. apply wlt_lt; auto with arith. simpl. apply wlt_lt; auto with arith. Qed. Lemma rlex_nat3_4 : forall (x y z m : nat), (rlex_nat3 (S x, y, m) (S x, S y, z)). unfold rlex_nat3. unfold make_mwlt. destruct y. intros. simpl. apply wlt_wlt. auto with arith. simpl. apply wlt_0_w. apply wlt_nil. intros. simpl. apply wlt_wlt. auto with arith. apply wlt_lt; auto with arith. Qed. Obligation 1. apply rlex_nat3_1. Qed. Obligation 2. apply rlex_nat3_2. Qed. Obligation 3. apply rlex_nat3_3. Qed. Obligation 4. apply rlex_nat3_4. Qed. Obligation 5. by_Acc_mwlt wm_nat3. Defined. (* Bootstrap *) Parameter eqb : nat -> nat -> bool. Program Fixpoint listordi (xsys : list nat * list nat) {wf (lex_listxlist nat) xsys} : bool := match xsys with (_, nil) => false | (xs, (cons 0 ys)) => (listordi (xs, ys)) | (nil, (cons (S y) ys)) => true | ((cons 0 xs), (cons (S y) ys)) => (listordi (xs, (cons (S y) ys))) | ((cons (S x) xs), (cons (S y) ys)) => (orb (ltb (length xs) (length ys)) (andb (eqb (length xs) (length ys)) (orb (ltb x y) (listordi (xs, ys))))) end. Obligation 1. unfold lex_listxlist. unfold make_mwlt. simpl. apply wlt_wlt_gen. auto. apply wlt_lt_gen; auto with arith. Qed. Obligation 2. unfold lex_listxlist. unfold make_mwlt. simpl. apply wlt_lt_gen; auto with arith. Qed. Obligation 3. unfold lex_listxlist. unfold make_mwlt. simpl. apply wlt_lt_gen; auto with arith. Qed. Obligation 4. by_Acc_mwlt lex_natxnat. Defined. (* Dershowitz/Manna: "counting tips of binary trees" *) Fixpoint list_btree_size (A:Set) (bts:list (btree A)) : nat := match bts with nil => 0 | (cons bt bts) => (plus (btree_size A bt) (list_btree_size A bts)) end. Definition wm_list_btree (A:Set) (bts:list (btree A)) : (list nat) := (cons (list_btree_size A bts) (cons (length bts) nil)). Definition lt_list_btree (A:Set) := (make_mwlt (list (btree A)) (wm_list_btree A)). Program Fixpoint count_tips (bts:(list (btree A))) {wf (lt_list_btree A) bts} : nat := match bts with nil => 0 | (cons Empty bts) => S (count_tips bts) | (cons (Node bt1 x bt2) bts) => (count_tips (cons bt1 (cons bt2 bts))) end. Obligation 1. unfold lt_list_btree. unfold make_mwlt. unfold wm_list_btree. simpl. apply wlt_wlt_gen. auto. apply wlt_lt_gen; auto with arith. Qed. Obligation 2. unfold lt_list_btree. unfold make_mwlt. unfold wm_list_btree. apply wlt_lt_gen. auto. simpl. rewrite Nat.add_assoc. auto with arith. Qed. Obligation 3. by_Acc_mwlt lt_list_btree. Defined. Print Assumptions count_tips. Coq-Equations-1.3.1-8.20/examples/polynomials.v000066400000000000000000001076721463127417400211720ustar00rootroot00000000000000(** printing elimination %\coqdoctac{elimination}% *) (** printing noconf %\coqdoctac{noconf}% *) (** printing simp %\coqdoctac{simp}% *) (** printing by %\coqdockw{by}% *) (** printing rec %\coqdockw{rec}% *) (** printing Coq %\Coq{}% *) (** printing funelim %\coqdoctac{funelim}% *) (** printing Derive %\coqdockw{Derive}% *) (** printing Signature %\coqdocclass{Signature}% *) (** printing Subterm %\coqdocclass{Subterm}% *) (** printing NoConfusion %\coqdocclass{NoConfusion}% *) (** * Polynomials Polynomials and a reflexive tactic for solving boolean goals (using heyting or classical boolean algebra). Original version by Rafael Bocquet, 2016. Updated to use Equations for all definitions by M. Sozeau, 2016-2017. If running this interactively you can ignore the printing and hide directives which are just used to instruct coqdoc. *) (* begin hide *) Require Import Program.Basics Program.Tactics. From Equations Require Import Equations. Require Import ZArith Lia. Require Import Psatz. Require Import Nat. Require Import Coq.Vectors.VectorDef. Set Keyed Unification. Notation vector := Vector.t. Arguments nil {A}. Arguments cons {A} _ {n}. Derive Signature for vector eq. Coercion Bool.Is_true : bool >-> Sortclass. Notation pack := Signature.signature_pack. Lemma Is_true_irrel (b : bool) (p q : b) : p = q. Proof. destruct b. destruct p, q. reflexivity. destruct p. Defined. #[local] Hint Resolve Is_true_irrel : core. Check Zpos. Check Zneg. Check positive. Check NoConfusion. About Signature. Check Signature.signature_pack. (* end hide *) (** We start with a simple definition deciding if some integer is equal to [0] or not. Integers are encoded using an inductive type [Z] with three constructors [Z0], [Zpos] and [Zneg], the latter two taking [positive] numbers as arguments. There is a single representant of [0] which we discriminate here. The second clause actually captures both the [Zpos] and [Zneg] constructors. *) Equations IsNZ (z : Z) : bool := IsNZ Z0 := false; IsNZ _ := true. (** The specification of this test is that it returns true iff the variable is indeed different from [0] w.r.t. the standard Leibniz equality. We elide a simple proof by case analysis. Note that we use an implicit coercion from [bool] to [Prop] here, as is usual when doing boolean reflection. *) Lemma IsNZ_spec z : IsNZ z <-> (z <> 0)%Z. Proof. funelim (IsNZ z); unfold not; split; intros; (discriminate || contradiction || constructor). Qed. (** *** Multivariate polynomials Using an indexed inductive type, we ensure that polynomials of %$\mathbb{Z}[(X_i)_{i \in \mathbb{N}}]$% have a unique representation. The first index indicates that the polynom is null. The second index gives the number of free variables. *) Inductive poly : bool -> nat -> Type := | poly_z : poly true O | poly_c (z : Z) : IsNZ z -> poly false O | poly_l {n b} (Q : poly b n) : poly b (S n) | poly_s {n b} (P : poly b n) (Q : poly false (S n)) : poly false (S n). (** - [poly_z] represents the null polynomial. - [poly_c c] represents the constant polynomial [c] where [c] is non-zero (i.e. has a proof of [IsNZ c]). - [poly_l n Q] represents the injection of [Q], a polynomial on [n] variables, as a polynomial on [n+1] variables. - Finally, [poly_s P Q : poly _ (S n)] represents $P + X_n * Q$ where [P] cannot mention the variable $X_n$ but [Q] can mention the variables up to and including $X_n$, and the multiplication is not trivial as [Q] is non-null. These indices enforce a canonical representation by ordering the multiplications of the variables. A similar encoding is actually used in the [ring] tactic of [Coq]. *) Derive Signature NoConfusion NoConfusionHom for poly. Derive Subterm for poly. (** In addition to the usual eliminators of the inductive type generated by [Coq], we automatically derive a few constructions on this [poly] datatype, and the [mono] datatype that follows, that will be used by the [Equations] command: - Its [Signature]: as described earlier %(\S \ref{sec:deppat})%, this is the packing of a polynomial with its two indices, a boolean and a natural number in this case. - Its [NoConfusion] property used to simplify equalities between constructors of the [poly] type (equation %\ref{eqn:noconf}%). - Finally, its [Subterm] relation, to be used when performing well-founded recursion on [poly]. *) (** *** Monomials Monomials represent parts of polynoms, and one can compute the coefficient constant by which each monomial is multiplied in a given polynom. Again the index of a [mono] gives the number of its free variables. *) Inductive mono : nat -> Type := | mono_z : mono O | mono_l : forall {n}, mono n -> mono (S n) | mono_s : forall {n}, mono (S n) -> mono (S n). Derive Signature NoConfusion NoConfusionHom Subterm for mono. (** Our first interesting definition computes the coefficient in [Z] by which a monomial [m] is multiplied in a polynomial [p]. *) Equations get_coef {n} (m : mono n) {b} (p : poly b n) : Z by wf (pack m) mono_subterm := get_coef mono_z poly_z := 0%Z; get_coef mono_z (poly_c z _) := z; get_coef (mono_l m) (poly_l p) := get_coef m p; get_coef (mono_l m) (poly_s p _) := get_coef m p; get_coef (mono_s m) (poly_l _) := 0%Z; get_coef (mono_s m) (poly_s p1 p2) := get_coef m p2. (** The definition can be done using either the usual structural recursion of [Coq] or well-founded recursion. If we use structural recursion, the guardness check might not be able to verify the automatically generated proof that the function respects its graph, as it involves too much rewriting due to dependent pattern-matching. We could prove it using a dependent induction instead of using the raw fixpoint combinator as the recursion is on direct subterms of the monomial, but in general it could be arbitrarily complicated, so we present a version allowing deep pattern-matching and recursion. Note that this means we lose the definitional behavior of [get_coef] during proofs on open terms, but this can advantageously be replaced using explicit [rewrite] calls, providing much more control over simplification than the reduction tactics, especially in presence of recursive functions. The [get_coef] function still uses no axioms, so it can be used to compute as part of a reflexive tactic for example. We want to do recursion on the (dependent) [m : mono n] argument, using the derived [mono_subterm] relation, which expects an element in the signature of [mono], [{ n : nat & mono n }], so we use [pack m] to lift [m] into its signature type ([pack] is just an abbreviation for the [signature_pack] overloaded constant defined in %\S \ref{sec:deppat}%). The rest of the definition is standard: to fetch a monomial coefficient, we simultaneously pattern-match on the monomial and polynomial. Note that many cases are impossible due to the invariants enforced in [poly] and [mono]. For example [mono_z] can only match polynomials built from [poly_z] or [poly_c], etc. *) (** *** Two detailed proofs The monomial decomposition is actually a complete characterization of a polynomial: two polynomials with the same coefficients for every monomial are the same. *) (** To show this, we need a lemma that shows that every non-null polynomial, has a monomial with non-null coefficient: this proof is done by dependent induction on the polynomial [p]. Note that the index of [p] rules out the [poly_z] case. *) Lemma poly_nz {n} (p : poly false n) : exists m, IsNZ (get_coef m p). Proof with (autorewrite with get_coef; auto). intros. depind p. exists mono_z... destruct IHp. exists (mono_l x)... destruct IHp2. exists (mono_s x)... Qed. Notation " ( x ; p ) " := (existT _ x p). Theorem get_coef_eq {n} b1 b2 (p1 : poly b1 n) (p2 : poly b2 n) : (forall (m : mono n), get_coef m p1 = get_coef m p2) -> (b1 ; p1) = (b2 ; p2) :> { null : _ & poly null n}. Proof with (simp get_coef in *; auto). (** Throughout the proof, we use the [simp] tactic defined by %\Equations% which is a wrapper around [autorewrite] using the hint database associated to the constant [get_coef]: the database contains the defining equations of [get_coef] as rewrite rules that can be used to simplify calls to [get_coef] in the goal. *) intros Hcoef. induction p1 as [ | z Hz | n b p1 | n b p1 IHp q1 IHq ] in b2, p2, Hcoef |- *; [dependent elimination p2 as [poly_z | poly_c z i] | dependent elimination p2 as [poly_z | poly_c z' i'] | dependent elimination p2 as [@poly_l n b' p2 | @poly_s n b' p2 q2] ..]. all:(intros; try rename n0 into n; auto; try (specialize (Hcoef mono_z); simp get_coef in Hcoef; subst z; (elim i || elim Hz || ltac:(repeat f_equal; auto)); fail)). - specialize (IHp1 _ p2). forward IHp1. intro m. specialize (Hcoef (mono_l m))... clear Hcoef. (** We first do an induction on [p1] and then eliminate (dependently) [p2], the first two branches need to consider variable-closed [p2]s while the next two branches have [p2 : poly _ (S n)], hence the [poly_l] and [poly_s] patterns. The elided rest of the tactic solves simple subgoals. We now focus on the case for [poly_l] on both sides. After some simplifications of the induction hypothesis using the [Hcoef] hypothesis, we get to the following goal: [[ (b, b' : bool) (n : nat) (p1 : poly b n) (p2 : poly b' n) IHp1 : (b; p1) = (b'; p2) ============================ (b; poly_l p1) = (b'; poly_l p2) ]] The [IHp1] hypothesis, as a general equality between dependent pairs can again be eliminated dependently to substitute [b'] by [b] and [p2] by [p1] simultaneously, using [dependent elimination IHp1 as [eq_refl]], leaving us with a trivial subgoal. *) (* begin hide *) dependent elimination IHp1 as [eq_refl]. reflexivity. - destruct (poly_nz q2) as [m HNZ]. specialize (Hcoef (mono_s m))... rewrite <- Hcoef in HNZ; elim HNZ. - destruct (poly_nz q1) as [m HNZ]. specialize (Hcoef (mono_s m))... rewrite Hcoef in HNZ; elim HNZ. - forward (IHq _ q2). intro m. specialize (Hcoef (mono_s m))... apply f_equal. forward (IHp _ p2). intro. specialize (Hcoef (mono_l m))... depelim IHp. now depelim IHq. Qed. (* end hide *) (** The next step is to give an evaluation semantics to polynomials. We program [eval p v] where [v] is a valuation in [Z] for all the variables in [p : poly _ n]. *) Equations eval {n} {b} (p : poly b n) (v : Vector.t Z n) : Z := eval poly_z nil := 0%Z; eval (poly_c z _) nil := z; eval (poly_l p) (cons _ xs) := eval p xs; eval (poly_s p1 p2) (cons y ys) := (eval p1 ys + y * eval p2 (cons y ys))%Z. (** It is quite clear that two equal polynomials should have the same value for any valuation. To show this, we first need to prove that evaluating a null polynomial always computes to [0], whichever valuation is used. *) (* begin hide *) Check eval. Lemma poly_nz_eval' : forall {n}, (forall (p : poly false n), exists v, IsNZ (eval p v)) -> (forall (p : poly false (S n)), exists v, forall m, exists x, IsNZ x /\ (Z.abs (x * eval p (Vector.cons x v)) > Z.abs m)%Z). Proof with (simp eval). depind p. - destruct (H p) as [v Hv]. exists v; intros; exists (1 + Z.abs m)%Z... rewrite IsNZ_spec in Hv |- *. nia. - destruct (IHp2 H) as [v Hv]; exists v; intros. destruct (Hv (Z.abs (eval p1 v) + Z.abs m)%Z) as [x [Hx0 Hx1]]; exists x... split; auto. rewrite IsNZ_spec in Hx0. nia. Qed. Lemma poly_nz_eval : forall {n}, (forall (p : poly false n), exists v, IsNZ (eval p v)) /\ (forall (p : poly false (S n)), exists v, forall m, exists x, IsNZ x /\ (Z.abs (x * eval p (Vector.cons x v)) > Z.abs m)%Z). Proof with (autorewrite with eval; auto using poly_nz_eval'). depind n; match goal with | [ |- ?P /\ ?Q ] => assert (HP : P); [|split;[auto|]] end... depelim p; exists Vector.nil... - destruct IHn as [IHn1 IHn2]; depelim p. + destruct (IHn1 p) as [v Hv]; exists (Vector.cons 0%Z v)... + destruct (IHn2 p2) as [v Hv]. destruct (Hv (eval p1 v)) as [x [_ Hx]]. exists (Vector.cons x v)... rewrite IsNZ_spec; nia. Qed. (* end hide *) (** This is a typical case where the proof directly follows the definition of [eval]. Instead of redoing the same case splits and induction that the function performs, we can directly appeal to its elimination principle using the [funelim] tactic. *) Lemma poly_z_eval {n} (p : poly true n) v : eval p v = 0%Z. Proof. funelim (eval p v); [ reflexivity | assumption ]. Qed. (** This leaves us with two goals as the [true] index in [p] implies that the [poly_c] and [poly_s] clauses do not need to be considered. We have to show [0 = 0] for the case [p = poly_z] and [eval q v = 0] for the [poly_l] recursive constructor, in which case the conclusion directly follows from the induction hypothesis correspondinng to the recursive call. The second subgoal is hence discharged with an [assumption] call. Addition is defined on two polynomials with the same number of variables and returns a (possibly null) polynomial with the same number of variables. We define an injection function to constructs objects in the dependent pair type [{b : bool & poly b n}]. *) Definition apoly {n b} := existT (fun b => poly b n) b. (** The definition shows the [with] feature of Equations, allowing to add a nested pattern-matching while defining the function, here in one case to inject an integer into a polynomial and in the [poly_s], [poly_s] case to inspect a recursive call. *) Notation " x .1 " := (projT1 x). Notation " x .2 " := (projT2 x). Equations plus {n} {b1} (p1 : poly b1 n) {b2} (p2 : poly b2 n) : { b : bool & poly b n } := plus poly_z poly_z := apoly poly_z; plus poly_z (poly_c y ny) := apoly (poly_c y ny); plus (poly_c x nx) poly_z := apoly (poly_c x nx); plus (poly_c x nx) (poly_c y ny) with (x + y)%Z => { | Z0 => apoly poly_z ; | Zpos z' => apoly (poly_c (Zpos z') I) ; | Zneg z' => apoly (poly_c (Zneg z') I) }; plus (poly_l p1) (poly_l p2) := apoly (poly_l (plus p1 p2).2); plus (poly_l p1) (poly_s p2 q2) := apoly (poly_s (plus p1 p2).2 q2); plus (poly_s p1 q1) (poly_l p2) := apoly (poly_s (plus p1 p2).2 q1); plus (poly_s p1 q1) (poly_s p2 q2) with plus q1 q2 => { | (false ; q3) => apoly (poly_s (plus p1 p2).2 q3); | (true ; _) => apoly (poly_l (plus p1 p2).2) }. (** The functional elimination principle can be derived all the same for [plus], allowing us to make quick work of the proof that it is a morphism for evaluation: *) Lemma plus_eval : forall {n} {b1} (p1 : poly b1 n) {b2} (p2 : poly b2 n) v, (eval p1 v + eval p2 v)%Z = eval (plus p1 p2).2 v. Proof with (simp plus eval; auto with zarith). Ltac X := (simp plus eval; auto with zarith). intros until p2. let f := constr:(fun_elim (f:=@plus)) in apply f; intros; depelim v; X; try rewrite <- H; X. - rewrite Heq in Hind. specialize (Hind (Vector.cons h v)). rewrite poly_z_eval in Hind. nia. - rewrite Heq in Hind. rewrite <- Hind. nia. Qed. #[local] Hint Rewrite <- @plus_eval : eval. (** We skip the rest of the operations definition, [poly_mult], [poly_neg] and [poly_substract]. *) Equations poly_neg {n} {b} (p : poly b n) : poly b n := poly_neg poly_z := poly_z; poly_neg (poly_c (Z.pos a) p) := poly_c (Z.neg a) p; poly_neg (poly_c (Z.neg a) p) := poly_c (Z.pos a) p; poly_neg (poly_l p) := poly_l (poly_neg p); poly_neg (poly_s p q) := poly_s (poly_neg p) (poly_neg q). Lemma neg_eval : forall {n} {b1} (p1 : poly b1 n) v, (- eval p1 v)%Z = eval (poly_neg p1) v. Proof. Ltac XX := (autorewrite with poly_neg plus eval; auto with zarith). depind p1; depelim v; XX. destruct z; depelim i; XX. rewrite <- IHp1_1; rewrite <- IHp1_2; nia. Qed. #[local] Hint Rewrite <- @neg_eval : eval. (** Equality can be decided using the difference of polynoms *) Lemma poly_diff_z_eq : forall {n} {b1} (p1 : poly b1 n) {b2} (p2 : poly b2 n), (plus p1 (poly_neg p2)).1 = true -> (_ ; p1) = (_; p2) :> { null : bool & poly null n }. Proof. intros. depind p1; depelim p2; auto; try (autorewrite with poly_neg plus in H; discriminate; fail). - destruct z; destruct i; autorewrite with poly_neg plus in *; discriminate. - f_equal; destruct z as [ | z | z], z0 as [ | z0 | z0 ]; depelim i; depelim i0; autorewrite with poly_neg plus in H. assert (z = z0). remember (Z.pos z + Z.neg z0)%Z as z1; destruct z1; try discriminate; simpl in H; nia. subst; auto. remember (Z.pos z + Z.pos z0)%Z as z1; destruct z1; try discriminate. remember (Z.neg z + Z.neg z0)%Z as z1; destruct z1; try discriminate. assert (z = z0). remember (Z.neg z + Z.pos z0)%Z as z1; destruct z1; try discriminate; simpl in H; nia. subst; auto. - autorewrite with poly_neg plus in H. specialize (IHp1 _ p2 H). depelim IHp1. auto. - autorewrite with poly_neg plus in H. specialize (IHp1_1 _ p2_1); specialize (IHp1_2 _ p2_2). remember (plus p1_2 (poly_neg p2_2)) as P; remember (plus p1_1 (poly_neg p2_1)) as Q. destruct P as [bP P]; destruct Q as [bQ Q]. destruct bP; destruct bQ; simpl in H; try rewrite <- HeqQ in H; try discriminate. specialize (IHp1_1 eq_refl); specialize (IHp1_2 eq_refl). depelim IHp1_1; try depelim IHp1_2; auto. Qed. (** *** Two polynomials with the same values are syntacically equal. This is shown using [poly_nz_eval]: the difference of two polynomials with the same values is null. Then use [poly_diff_z_eq] *) Theorem poly_eval_eq : forall {n} {b1} (p1 : poly b1 n) {b2} (p2 : poly b2 n), (forall v, eval p1 v = eval p2 v) -> (b1 ; p1) = (b2; p2) :> { b : bool & poly b n}. Proof. intros. remember (plus p1 (poly_neg p2)) as P; destruct P as [b P]; destruct b. - apply poly_diff_z_eq; inversion HeqP; auto. - exfalso. destruct (@poly_nz_eval n) as [H0 _]; destruct (H0 P) as [v H1]. assert (eval P v = eval (plus p1 (poly_neg p2)).2 v); [inversion HeqP; auto|]. rewrite H2 in H1; autorewrite with eval in H1; rewrite (H v) in H1. rewrite IsNZ_spec in H1. nia. Qed. (** *** Multiplication of polynomials This definition is a bit more laborious as there are inductive cases to treat on the second argument: it is not a simple structurally recursive definition. *) (** The [poly_l_or_s] definition is a smart constructor to construct [p + X * q] when [q] can be null. *) Equations poly_l_or_s {n} {b1} (p1 : poly b1 n) {b2} (p2 : poly b2 (S n)) : {b : bool & poly b (S n)} := poly_l_or_s p1 (b2 := true) p2 := apoly (poly_l p1); poly_l_or_s p1 (b2 := false) p2 := apoly (poly_s p1 p2). Lemma poly_l_or_s_eval : forall {n} {b1} (p1 : poly b1 n) {b2} (p2 : poly b2 (S n)) h v, eval (poly_l_or_s p1 p2).2 (Vector.cons h v) = (eval p1 v + h * eval p2 (Vector.cons h v))%Z. Proof. intros. funelim (poly_l_or_s p1 p2); simp eval; trivial. rewrite poly_z_eval. nia. Qed. #[local] Hint Rewrite @poly_l_or_s_eval : eval. (* [mult (poly_l p) q = mult_l q (mult p)] *) Equations mult_l {n} {b2} (p2 : poly b2 (S n)) (m : forall {b2} (p2 : poly b2 n), { b : bool & poly b n }) : { b : bool & poly b (S n) } := mult_l (poly_l p2) m := apoly (poly_l (m _ p2).2); mult_l (poly_s p1 p2) m := poly_l_or_s (m _ p1).2 (mult_l p2 m).2. (* [mult (poly_s p1 p2) q = mult_s q (mult p1) (mult p2)] *) Equations mult_s {n} {b2} (p2 : poly b2 (S n)) (m1 : forall {b2} (p2 : poly b2 n), { b : bool & poly b n }) (m2 : forall {b2} (p2 : poly b2 (S n)), { b : bool & poly b (S n) }) : { b : bool & poly b (S n) } := mult_s (poly_l p1) m1 m2 := poly_l_or_s (m1 _ p1).2 (m2 _ (poly_l p1)).2; mult_s (poly_s p2 q2) m1 m2 := poly_l_or_s (m1 _ p2).2 (plus (m2 _ (poly_l p2)).2 (mult_s q2 m1 m2).2).2. (** Finally, the multiplication definition. This relies on the guard condition being able to unfold the definitions of [mult_l] and [mult_s] to see that multiplication is well-guarded. *) Equations mult n b1 (p1 : poly b1 n) b2 (p2 : poly b2 n) : { b : bool & poly b n } := mult ?(0) ?(true) poly_z b2 _ := apoly poly_z; mult ?(0) ?(false) (poly_c x nx) ?(true) poly_z := apoly poly_z; mult ?(0) ?(false) (poly_c x nx) ?(false) (poly_c y ny) := match (x * y)%Z with | Z0 => apoly poly_z | Zpos z' => apoly (poly_c (Zpos z') I) | Zneg z' => apoly (poly_c (Zneg z') I) end; mult ?(S n) ?(b) (@poly_l n b p1) b2 q := mult_l q (mult _ _ p1); mult ?(S n) ?(false) (@poly_s n b p1 q1) b2 q := mult_s q (mult _ _ p1) (mult _ _ q1). Arguments mult {n} {b1} p1 {b2} p2. (** The proof that multiplication is a morphism for evaluation works as usual by induction, using previously proved lemma to get equations in [Z] that the [nia] tactic can handle. *) Lemma mult_eval : forall {n} {b1} (p1 : poly b1 n) {b2} (p2 : poly b2 n) v, (eval p1 v * eval p2 v)%Z = eval (mult p1 p2).2 v. Proof with (autorewrite with mult mult_l mult_s eval; auto with zarith). Ltac Y := (autorewrite with mult mult_l mult_s eval; auto with zarith). depind p1; try (depind p2; intros; depelim v; Y; simpl; Y; fail). depind p2; intros; depelim v; Y; simpl; Y; destruct (z * z0)%Z; simpl... - assert (mult_l_eval : forall {b2} (q : poly b2 (S n)) v h, eval (mult_l q (@mult _ _ p1)).2 (Vector.cons h v) = (eval q (Vector.cons h v) * eval p1 v)%Z). + depind q; intros; Y; rewrite <- IHp1... rewrite IHq2; auto; nia. + intros; depelim v; Y; simpl; Y; rewrite mult_l_eval... - assert (mult_s_eval : forall {b2} (q : poly b2 (S n)) v h, let mp := mult_s q (@mult _ _ p1_1) (@mult _ _ p1_2) in eval mp.2 (Vector.cons h v) = (eval q (Vector.cons h v) * (eval p1_1 v + h * eval p1_2 (Vector.cons h v)))%Z). + depind q; intros; Y; simpl; Y. rewrite <- IHp1_1, <- IHp1_2; Y; nia. rewrite <- IHp1_1. rewrite IHq2, <- IHp1_2; auto; Y; nia. + intros; depelim v; Y; simpl; Y; rewrite mult_s_eval... Qed. #[local] Hint Rewrite <- @mult_eval : eval. (** ** Boolean formulas Armed with these definitions, we can define a reflexive tactic that solves boolean tautologies using a translation into polynomials on [Z]. We start with the syntax of our formulas, including variables of some type [A], constants, conjunction disjunction and negation: *) Inductive formula {A} := | f_var : A -> formula | f_const : bool -> formula | f_and : formula -> formula -> formula | f_or : formula -> formula -> formula | f_not : formula -> formula. (** The have a straightforward evaluation semantics to booleans, assuming an interpretation of the variables into booleans. *) Equations eval_formula {A} (v : A -> bool) (f : @formula A) : bool := eval_formula f (f_var v) := f v; eval_formula f (f_const b) := b; eval_formula f (f_and a b) := andb (eval_formula f a) (eval_formula f b); eval_formula f (f_or a b) := orb (eval_formula f a) (eval_formula f b); eval_formula f (f_not v) := negb (eval_formula f v). (** [close_formula] allows to obtain a formula with a fixed finite number of free variables from a formula with with variables in [nat]. *) Definition close_formula : @formula nat -> { n : nat & forall m, m >= n -> @formula (Fin.t m) }. Proof. intro f; depind f. - unshelve eapply (S a ; _); intros m p; apply f_var. apply @Fin.of_nat_lt with (p := a). lia. - exact (O ; (fun _ _ => f_const b)). - destruct IHf1 as [n1 e1]; destruct IHf2 as [n2 e2]. apply (existT _ (max n1 n2)); intros m p; apply f_and; [apply e1|apply e2]; nia. - destruct IHf1 as [n1 e1]; destruct IHf2 as [n2 e2]. apply (existT _ (max n1 n2)); intros m p; apply f_or; [apply e1|apply e2]; nia. - destruct IHf as [n e]. apply (existT _ n); intros m p; apply f_not; apply e; nia. Defined. Definition close_formulas (f1 f2 : @formula nat) : { n : nat & (@formula (Fin.t n) * @formula (Fin.t n))%type }. Proof. destruct (close_formula f1) as [n1 e1]; destruct (close_formula f2) as [n2 e2]. apply (existT _ (max n1 n2)); apply pair; [apply e1|apply e2]; nia. Defined. (** Definitions of constant 0 [poly_zero] and 1 [poly_one] polynomials along with variable polynomials [poly_var] and corresponding evaluation lemmas *) Fixpoint poly_zero {n} : poly true n := match n with | O => poly_z | S m => poly_l poly_zero end. Lemma zero_eval : forall n v, 0%Z = eval (@poly_zero n) v. Proof. intros; rewrite poly_z_eval; auto. Qed. #[local] Hint Rewrite <- @zero_eval : eval. Fixpoint poly_one {n} : poly false n := match n with | O => poly_c 1%Z I | S m => poly_l poly_one end. Lemma one_eval : forall n v, 1%Z = eval (@poly_one n) v. Proof. depind n; depelim v; intros; simpl; autorewrite with eval; auto. Qed. #[local] Hint Rewrite <- @one_eval : eval. (** We define an injection of variables represented as indices in [Fin.t n] into non-null polynoms of [n] variables: *) Equations poly_var {n} (f : Fin.t n) : poly false n := poly_var Fin.F1 := poly_s poly_zero poly_one; poly_var (Fin.FS f) := poly_l (poly_var f). (** We can show that evaluation of the corresponding polynomial corresponds to simply fetching the value at the index in the valuation. *) Lemma var_eval : forall n f v, Vector.nth v f = eval (@poly_var n f) v. Proof with autorewrite with poly_var eval in *; simpl; auto with zarith. induction f; depelim v; intros... Qed. #[local] Hint Rewrite <- @var_eval : eval. (** Finally, we explain our interpretation of formulas as polynomials: *) Equations poly_of_formula {n} (f : @formula (Fin.t n)) : { b : bool & poly b n } := poly_of_formula (f_var v) := apoly (poly_var v); poly_of_formula (f_const false) := apoly poly_zero; poly_of_formula (f_const true) := apoly poly_one; poly_of_formula (f_not a) := plus poly_one (poly_neg (poly_of_formula a).2); poly_of_formula (f_and a b) := mult (poly_of_formula a).2 (poly_of_formula b).2; poly_of_formula (f_or a b) := plus (poly_of_formula a).2 (plus (poly_of_formula b).2 (poly_neg (mult (poly_of_formula a).2 (poly_of_formula b).2).2)).2. (** The central theorem is that evaluating the formula in some valuation is the same as evaluating the translated polynomial. *) Theorem poly_of_formula_eval : forall {n} (f : @formula (Fin.t n)) (v : Vector.t bool n), (if eval_formula (Vector.nth v) f then 1%Z else 0%Z) = eval (poly_of_formula f).2 (Vector.map (fun x : bool => if x then 1%Z else 0%Z) v). (* begin hide *) Proof. intros. funelim (poly_of_formula f); intros; autorewrite with eval_formula poly_of_formula eval in *; trivial. - erewrite Vector.nth_map; auto. - rewrite <- H, <- H0; destruct (eval_formula (Vector.nth v) a); destruct (eval_formula (Vector.nth v) b); auto. - rewrite <- H, <- H0; destruct (eval_formula (Vector.nth v) a); destruct (eval_formula (Vector.nth v) b); auto. - rewrite <- H; destruct (eval_formula (Vector.nth v) a); auto. Qed. (* end hide *) (** From this, we can derive that two boolean formulas are equivalent if the translated polynomials are themselves _syntactically_ equal, thanks to their canonical representation. *) Lemma correctness_heyting : forall {n} (f1 f2 : @formula (Fin.t n)), poly_of_formula f1 = poly_of_formula f2 -> forall v, eval_formula (Vector.nth v) f1 = eval_formula (Vector.nth v) f2. Proof. intros n f1 f2 H v. assert (H1 := poly_of_formula_eval f1 v); assert (H2 := poly_of_formula_eval f2 v). remember (eval_formula (Vector.nth v) f1) as b1; remember (eval_formula (Vector.nth v) f2) as b2. rewrite H in H1; rewrite <- H1 in H2. destruct b1; destruct b2; simpl in *; (discriminate || auto). Qed. (** *** Completeness For which theory do we have completeness? If you were attentive you might have guessed that the encodings of disjunction and conjunction are only complete for heyting boolean algebras but not classical boolean algebra, where negation is involutive. One can avoid this problem by doing a reduction transformation on polynomials. The interested reader can look at the development for that part. Completeness can be derived for the reducing version of the translation. *) Equations reduce_aux {n} {b1} (p1 : poly b1 n) {b2} (p2 : poly b2 (S n)) : { b : bool & poly b (S n) } := reduce_aux p1 (poly_l p2) := poly_l_or_s p1 (poly_l p2); reduce_aux p1 (poly_s p2_1 p2_2) := poly_l_or_s p1 (plus (poly_l p2_1) p2_2).2. Equations reduce {n} {b} (p : poly b n) : { b : bool & poly b n } := reduce poly_z := apoly poly_z; reduce (poly_c x y) := apoly (poly_c x y); reduce (poly_l p) := apoly (poly_l (reduce p).2); reduce (poly_s p q) := reduce_aux (reduce p).2 (reduce q).2. Theorem reduce_eval : forall {n} {b} (p : poly b n) (v : Vector.t bool n), eval p (Vector.map (fun x : bool => if x then 1%Z else 0%Z) v) = eval (reduce p).2 (Vector.map (fun x : bool => if x then 1%Z else 0%Z) v). Proof. Ltac YY := autorewrite with reduce reduce_aux eval; auto. depind p; intros; depelim v; YY. - rewrite IHp1, (IHp2 (Vector.cons h v)). remember (reduce p2) as P. destruct P as [bP P]. simpl. depelim P; simpl; YY. destruct h; nia. Qed. Inductive is_reduced : forall {b} {n}, poly b n -> Prop := | is_reduced_z : is_reduced poly_z | is_reduced_c : forall {z} {i}, is_reduced (poly_c z i) | is_reduced_l : forall {b} {n} (p : poly b n), is_reduced p -> is_reduced (poly_l p) | is_reduced_s : forall {b1} {n} (p : poly b1 n) (q : poly false n), is_reduced p -> is_reduced q -> is_reduced (poly_s p (poly_l q)) . Derive Signature for is_reduced. Lemma is_reduced_compat_plus : forall {n} {b1} (p1 : poly b1 n) (Hp1 : is_reduced p1) {b2} (p2 : poly b2 n) (Hp2 : is_reduced p2), is_reduced (plus p1 p2).2. Proof. intros. depind Hp1; depelim Hp2; autorewrite with plus; unfold apoly; cbn; try constructor; auto. remember (z+z0)%Z as Z; destruct Z; constructor. specialize (IHHp1_2 _ q0 Hp2_2). remember (plus q q0) as Q; destruct Q as [bQ Q]. destruct bQ; simpl. constructor; auto. constructor; auto. Qed. Lemma is_reduced_compat_neg : forall {n} {b1} (p1 : poly b1 n) (Hp1 : is_reduced p1), is_reduced (poly_neg p1). Proof. intros. depind Hp1; try destruct z, i; autorewrite with poly_neg; try constructor; auto. Qed. Lemma is_reduced_ok : forall {b} {n} (p : poly b n), is_reduced (reduce p).2. Proof. depind p; try constructor; auto. autorewrite with reduce reduce_aux. remember (reduce p2) as P2; destruct P2 as [bP2 P2]; depelim P2. destruct bP2; simpl. constructor. auto. constructor; auto. depelim IHp2. auto. depelim IHp2. autorewrite with reduce_aux plus. unfold apoly. simpl. assert (R := is_reduced_compat_plus _ IHp2_1 _ IHp2_2). remember (plus P2_1 q) as P3; destruct P3 as [bP3 P3]. simpl. simpl in *. destruct bP3; simpl; constructor; auto. Qed. Lemma red_ok : forall {n} {b} (p : poly b n), is_reduced p -> (forall v, eval p (Vector.map (fun x : bool => if x then 1%Z else 0%Z) v) = 0%Z) -> b = true. Proof. intros n b p Hp H; depind Hp. - auto. - specialize (H Vector.nil); autorewrite with eval in H; destruct z, i; discriminate. - apply IHHp. intro v. specialize (H (Vector.cons false v)). autorewrite with eval in H. auto. - assert (b1 = true). + apply IHHp1. intro v. specialize (H (Vector.cons false v)). autorewrite with eval in H. simpl in H. rewrite Z.add_0_r in H. auto. + subst. apply IHHp2. intro v. specialize (H (Vector.cons true v)). simpl in H. autorewrite with eval in H. rewrite poly_z_eval in H. nia. Qed. (** We have completeness for this form: *) Lemma correctness_classical : forall {n} (f1 f2 : @formula (Fin.t n)), reduce (poly_of_formula f1).2 = reduce (poly_of_formula f2).2 <-> forall v, eval_formula (Vector.nth v) f1 = eval_formula (Vector.nth v) f2. Proof. intros n f1 f2; split. - intros H v. assert (H1 := poly_of_formula_eval f1 v); assert (H2 := poly_of_formula_eval f2 v). rewrite reduce_eval in H1; rewrite reduce_eval in H2. remember (eval_formula (Vector.nth v) f1) as b1; remember (eval_formula (Vector.nth v) f2) as b2. rewrite H in H1; rewrite <- H1 in H2. destruct b1; destruct b2; simpl in *; (discriminate || auto). - intros. assert ((plus (reduce (poly_of_formula f1).2).2 (poly_neg (reduce (poly_of_formula f2).2).2)).1 = true). + apply red_ok with (p := (plus (reduce (poly_of_formula f1).2).2 (poly_neg (reduce (poly_of_formula f2).2).2)).2). * auto using is_reduced_compat_plus, is_reduced_ok, is_reduced_compat_neg. * intro; autorewrite with eval. assert (H1 := poly_of_formula_eval f1 v); assert (H2 := poly_of_formula_eval f2 v). rewrite <- !reduce_eval, <- H1, <- H2, (H v); nia. + apply poly_diff_z_eq in H0. remember (reduce (poly_of_formula f1).2) as P1; destruct P1 as [bP1 P1]. remember (reduce (poly_of_formula f2).2) as P2; destruct P2 as [bP2 P2]. destruct bP1; destruct bP2; auto; simpl in H0; depelim H0; auto. Qed. (** One can check that all definitions here are axiom free, and only the proofs which depend on unfolding lemmas use the [functional_extensionality_dep] axiom. *) (** *** Reflexive tactic From this it is possible to derive a tactic for checking equivalence of boolean formulas. We skip the standard reification machinery and check on a few examples that indeed our tactic computes. *) Ltac list_add a l := let rec aux a l n := match l with | nil => constr:((n, cons a l)) | cons a _ => constr:((n, l)) | cons ?x ?l => match aux a l (S n) with (?n, ?l) => constr:((n, cons x l)) end end in aux a l 0. Ltac vector_of_list l := match l with | nil => constr:(Vector.nil) | cons ?x ?xs => constr:(Vector.cons x xs) end. (** Reify boolean formulas with variables in [nat] *) Ltac read_formula f l := match f with | true => constr:((@f_const nat true, l)) | false => constr:((@f_const nat false, l)) | orb ?x ?y => match read_formula x l with (?x', ?l') => match read_formula y l' with (?y', ?l'') => constr:((f_or x' y', l'')) end end | andb ?x ?y => match read_formula x l with (?x', ?l') => match read_formula y l' with (?y', ?l'') => constr:((f_and x' y', l'')) end end | negb ?x => match read_formula x l with (?x', ?l') => constr:((f_not x', l')) end | _ => match list_add f l with (?n, ?l') => constr:((f_var n, l')) end end. Ltac read_formulas x y := match read_formula x (@nil bool) with (?x', ?l) => match read_formula y l with (?y', ?l') => constr:(((x', y'), l')) end end. (** The final reflexive tactic, taking either of the correctness lemmas as argument. *) Ltac bool_tauto_with f := intros; match goal with | [ |- ?x = ?y ] => match read_formulas x y with | ((?x', ?y'), ?l) => let ln := fresh "l" in let xyn := fresh "xy" in let nn := fresh "n" in let xn := fresh "x" in let yn := fresh "y" in match vector_of_list l with ?lv => pose (ln := lv) end; pose (xyn := close_formulas x' y'); pose (n := xyn.1); pose (xn := fst xyn.2); pose (yn := snd xyn.2); cbv in xyn, n, xn, yn; assert (H : eval_formula (Vector.nth ln) xn = eval_formula (Vector.nth ln) yn); [ apply f; vm_compute; reflexivity | exact H ] end end. (** Examples *) Goal forall a b, andb a b = andb b a. bool_tauto_with @correctness_heyting. Qed. Goal forall a b, andb (negb a) (negb b) = negb (orb a b). bool_tauto_with @correctness_heyting. Qed. Goal forall a b, orb (negb a) (negb b) = negb (andb a b). bool_tauto_with @correctness_heyting. Qed. Example neg_involutive: forall a, orb (negb a) a = true. Fail bool_tauto_with @correctness_heyting. bool_tauto_with @correctness_classical. Qed. Coq-Equations-1.3.1-8.20/examples/quicksort.v000066400000000000000000000172701463127417400206420ustar00rootroot00000000000000From Equations Require Import Equations. Require Import Vector. Notation vector := t. Derive NoConfusion NoConfusionHom for vector. Set Equations Transparent. Arguments Vector.nil {A}. Arguments Vector.cons {A} _ {n}. Notation " x |:| y " := (@Vector.cons _ x _ y) (at level 20, right associativity) : vect_scope. Notation " x |: n :| y " := (@Vector.cons _ x n y) (at level 20, right associativity) : vect_scope. Notation "[]v" := Vector.nil (at level 0) : vect_scope. Local Open Scope vect_scope. Equations app {A} {n m} (v : vector A n) (w : vector A m) : vector A (n + m) := app []v w := w ; app (cons a v) w := a |:| app v w. Equations In {A n} (x : A) (v : vector A n) : Prop := In x nil := False; In x (cons a v) := (x = a) \/ In x v. Inductive All {A : Type} (P : A -> Prop) : forall {n}, vector A n -> Prop := | All_nil : All P nil | All_cons {a n} {v : vector A n} : P a -> All P v -> All P (a |:| v). Lemma All_impl {A : Type} (P Q : A -> Prop) {n} (v : vector A n) : (forall x, P x -> Q x) -> All P v -> All Q v. Proof. induction 2; constructor; auto. Qed. Derive Signature for All. Lemma All_app {A P n m} (v : vector A n) (w : vector A m) : @All A P _ v -> All P w -> All P (app v w). Proof. funelim (app v w). auto. intros. depelim H0; simp app in *. econstructor; auto. Qed. Lemma In_All {A P n} (v : vector A n) : All P v <-> (forall x, In x v -> P x). Proof. split. induction 1. intros. depelim H. auto. intros x; simpl. simp In. intuition. subst; auto. induction v; simpl; intros; auto; constructor. apply H; simp In; auto. firstorder. Qed. (* Lemma All_In_All {A P n m} (v : vector A n) (v' : vector A m) : *) (* All (fun x => In x v) v' -> All P v -> All P v'. *) (* Proof. *) (* induction 1. simpl. constructor. *) (* intros. constructor; auto. *) (* eapply In_All; eauto. *) (* Qed. *) Inductive Sorted {A : Type} (R : A -> A -> Prop) : forall {n}, vector A n -> Prop := | Sorted_nil : Sorted R nil | Sorted_cons {a n} {v : vector A n} : All (R a) v -> Sorted R v -> Sorted R (a |:| v). Import Sigma_Notations. Derive Signature for Sorted. Lemma Sorted_app {A R n m} (v : vector A n) (w : vector A m) : @Sorted A R _ v -> Sorted R w -> Sorted R (app v w). Proof. Admitted. Lemma In_app {A n m} (v : vector A n) (w : vector A m) (a : A) : In a v \/ In a w <-> In a (app v w). Proof. funelim (app v w). intuition. depelim H0. split; intros; depelim H0; cbn; simp In app in *; intuition eauto with *; simp In in *. apply H in H0. intuition. Qed. Require Import Sumbool. Notation dec x := (sumbool_of_bool x). Section QuickSort. Context {A : Type} (leb : A -> A -> bool). Context (leb_inverse : forall x y, leb x y = false -> leb y x = true). Local Definition sorted {n} (v : vector A n) := Sorted (fun x y => leb x y = true) v. Set Program Mode. Equations? filter {n} (v : vector A n) (f : A -> bool) : Σ (k : nat), { v : vector A k | k <= n /\ All (fun x => f x = true) v } := filter nil f := (0, nil); filter (cons a v') f with dec (f a) := { | left H => (_, cons a (filter v' f).2); | right H => (_, (filter v' f).2) }. Proof. split; auto. constructor. destruct filter as [n' [v'' [Hn' Hv']]]. simpl. split; auto with arith. constructor; auto. destruct filter as [n' [v'' [Hn' Hv']]]. simpl. split; auto with arith. Defined. Equations? pivot {n} (v : vector A n) (f : A -> bool) : Σ (k : nat) (l : nat) (v' : vector A k), { w : vector A l | (k + l = n)%nat /\ forall x, In x v <-> (if f x then In x v' else In x w) } := (* All (fun x => In x v /\ f x = true) v' *) (* /\ All (fun x => In x v /\ f x = false) w } } } } := *) pivot nil f := (0 , 0 , nil, nil); pivot (cons a v') f with dec (f a), pivot v' f := { | left H | (k, l, v, w) => (_ , _, cons a v, w); | right H | (k, l, v, w) => (_ , _, v, cons a w) }. Proof. split; intros; simp In; auto. intuition. destruct (f x); auto. simpl. split; auto with arith. intros x. simp In. split; intros Hx. intuition; subst; try rewrite H; intuition. specialize (proj1 (i _) H0). destruct (f x); intuition. specialize (i x). destruct (f x); intuition. split; auto with arith. intros x. constructor; simp In; intuition auto. subst. rewrite H. auto. specialize (proj1 (i _) H1). destruct (f x); intuition. specialize (i x). destruct (f x); intuition. Defined. Equations? qs {n} (l : vector A n) : { v : vector A n | sorted v /\ (forall x, In x l <-> In x v) } by wf n lt := qs nil := nil ; qs (cons a v) with pivot v (fun x => leb x a) := { | (k, l, lower, higher) => app (qs lower) (a |:| qs higher) }. Proof. all:simpl. all:repeat (destruct qs; simpl). repeat constructor; trivial. auto with arith. auto with arith. simpl. destruct (eq_sym (plus_n_Sm k l)). simpl. intuition. apply Sorted_app; auto. constructor. apply In_All. intros x1 Inx1. apply H2 in Inx1. specialize (i x1). eapply leb_inverse. eapply All_In_All; eauto. eapply All_impl; eauto. simpl. intros x1 [inx1 lebx1]. apply leb_inverse; assumption. intuition. eapply All_app. eapply All_In_All; eauto. eapply All_impl; eauto. simpl. intros x1 [inx1 lebx1]. constructor; auto. constructor; auto. constructor. eapply All_In_All; eauto. eapply All_impl; eauto. simpl. intros x1 [Inx1 lebx1]. constructor; auto. Defined. Definition qs_forget {n} (l : vector A n) : vector A n := qs l. (* Proof after the definition. *) Lemma All_In {n} (v : vector A n) P : All P v -> (forall x, In x v -> P x). Proof. induction 1. intros x Hx. depelim Hx. intros x Hx. depelim Hx. auto. auto. Qed. Lemma All_In_self {n} (v : vector A n) : All (fun x => In x v) v. Proof. induction v. constructor. constructor. constructor. eapply All_impl; eauto. simpl. intros. constructor; auto. Qed. Local Open Scope program_scope. Local Open Scope program_scope. Lemma qs_all {n} (l : vector A n) : forall x, In x (qs_forget l) -> In x l. Proof. intros x. unfold qs_forget. destruct (qs l). simpl; eauto. destruct a. eapply (All_In _ _ H0). Qed. Lemma all_qs {n} (l : vector A n) : forall x, In x l -> In x (qs_forget l). Proof. intros x. unfold qs_forget. funelim (qs l); simpl. + trivial. + destruct qs_obligation_4. simpl. clear H1. intros Hx. eapply In_app. destruct Hx. destruct pr6. simpl in *. destruct a. destruct a. simpl in *. constructor. clear Heq. eapply All_In in a. intuition eauto. auto. clear Heq; intuition; destruct pr6; intuition; simpl in *. depelim H1. constructor. constructor. simpl in H1. apply H0 in H1. intuition. eapply All_In in H5. intuition eauto. auto. Qed. Lemma qs_all {n} (l : vector A n) : forall x, In x (qs_forget l) -> In x l. Proof. intros x. unfold qs_forget. funelim (qs l); simpl. + trivial. + destruct qs_obligation_4. simpl. clear H1. intros Hx. apply In_app in Hx. destruct Hx. apply H in H1. destruct pr6. simpl in *. destruct a. destruct a. simpl in *. constructor. clear Heq. eapply All_In in a. intuition eauto. auto. clear Heq; intuition; destruct pr6; intuition; simpl in *. depelim H1. constructor. constructor. simpl in H1. apply H0 in H1. intuition. eapply All_In in H5. intuition eauto. auto. Qed. Lemma qs_equiv {n} (l : vector A n) : forall x, In x l <-> In x (qs_forget l). Proof. split; auto using qs_all. intros. unfold qs_forget. funelim (destruct qs. simpl. intuition. eapply (All_In in H1. pose (All_In_self x0). eapply All_In_All in a; eauto. End QuickSort. Extraction Inline pivot. Extraction qs. Coq-Equations-1.3.1-8.20/examples/string_matching.v000066400000000000000000000145351463127417400217770ustar00rootroot00000000000000(** Example by Nicky Vazou, unfinished *) Require Import Arith. Require Import Coq.Classes.DecidableClass. Require Import Coq.Program.Wf. Require Import List Lia. Require Import PeanoNat. Require Import Program. From Equations Require Import Equations. Import ListNotations. Set Keyed Unification. Class Associative {T: Type} (op: T -> T -> T) := { associativity: forall x y z, op x (op y z) = op (op x y) z; }. Class Monoid (T: Type) := MkMonoid { unit: T; op: T -> T -> T; monoid_associative: Associative op; monoid_left_identity: forall x, op unit x = x; monoid_right_identity: forall x, op x unit = x; }. #[export] Instance app_Associative: forall T, Associative (@app T). Proof. intro T. constructor. induction x. { reflexivity. } { simpl. congruence. } Defined. #[export] Instance list_Monoid: forall T, Monoid (list T). Proof. intro T. apply MkMonoid with (unit := []) (op := @app T). { auto with typeclass_instances. } { reflexivity. } { induction x. { reflexivity. } { simpl. congruence. } } Defined. Notation "a ** b" := (op a b) (at level 50). Class MonoidMorphism {Tn Tm: Type} `{Mn: Monoid Tn} `{Mm: Monoid Tm} (f: Tn -> Tm) := { morphism_unit: f unit = unit; morphism_op: forall x y, f (x ** y) = f x ** f y; }. Class ChunkableMonoid (T: Type) `{Monoid T} := MkChunkableMonoid { length: T -> nat; drop: nat -> T -> T; take: nat -> T -> T; drop_spec: forall i x, length (drop i x) = length x - i; take_spec: forall i x, i <= length x -> length (take i x) = i; take_drop_spec: forall i x, x = take i x ** drop i x; }. Fixpoint list_take {T: Type} i (l: list T) := match i, l with | 0, _ => [] | _, [] => [] | S i, h::t => h :: list_take i t end. Fixpoint list_drop {T: Type} i (l: list T) := match i, l with | 0, _ => l | _, [] => [] | S i, h::t => list_drop i t end. Ltac intuition_solver ::= auto with core arith datatypes solve_subterm. #[export] Instance list_ChunkableMonoid: forall T, ChunkableMonoid (list T). Proof. intro T. apply MkChunkableMonoid with (length := @List.length T) (drop := list_drop) (take := list_take); intros. { generalize dependent x. induction i, x; intros; intuition. } { generalize dependent x. induction i, x; intros; intuition. simpl in *. rewrite IHi; intuition. } { generalize dependent x. induction i, x; intros; intuition. simpl in *. rewrite <- IHi. reflexivity. } Defined. Section Chunk. Context{T : Type} `{M : ChunkableMonoid T}. Set Program Mode. Equations? chunk (i: { i : nat | i > 0 }) (x : T) : list T by wf (length x) lt := chunk i x with dec (length x <=? i) := { | left _ => [x] ; | right p => take i x :: chunk i (drop i x) }. Proof. apply leb_complete_conv in p. rewrite drop_spec. lia. Qed. End Chunk. Theorem if_flip_helper {B: Type} {b: bool} (C E: true = b -> B) (D F: false = b -> B): (forall (r: true = b), C r = E r) -> (forall (r: false = b), D r = F r) -> (if b as an return an = b -> B then C else D) eq_refl = (if b as an return an = b -> B then E else F) eq_refl. Proof. intros. destruct b. apply H. apply H0. Qed. (* Transparent chunk. Eval compute in (chunk (exist _ 3 _) [0; 1; 2; 3; 4; 5; 6; 7; 8; 9]). *) (* = [[0; 1; 2]; [3; 4; 5]; [6; 7; 8]; [9]] : list (list nat) *) Section mconcat. Context {M : Type} `{Monoid M}. Equations mconcat (l: list M): M := mconcat [] := unit; mconcat (cons x xs) := x ** mconcat xs. End mconcat. Transparent mconcat. Theorem morphism_distribution: forall {M N: Type} `{MM: Monoid M} `{MN: Monoid N} `{CMM: @ChunkableMonoid N MN} (f: N -> M) `{MMf: @MonoidMorphism _ _ MN MM f}, forall i x, f x = mconcat (map f (chunk i x)). Proof. intros. funelim (chunk i x). { simpl. simp mconcat. now rewrite monoid_right_identity. } simpl. simp mconcat. rewrite <- H; auto. rewrite <- morphism_op. now rewrite <- take_drop_spec. Qed. Lemma length_list_drop: forall {T: Type} i (x: list T), i < Datatypes.length x -> Datatypes.length (list_drop i x) = Datatypes.length x - i. Proof. intros. generalize dependent i. induction x; destruct i; simpl; intros. { reflexivity. } { reflexivity. } { reflexivity. } { apply IHx. intuition. } Qed. Lemma length_chunk_base: forall {T: Type} I (x: list T), let i := proj1_sig I in i > 1 -> Datatypes.length x <= i -> Datatypes.length (chunk I x) = 1. Proof. intros; subst i. funelim (chunk I x). reflexivity. simpl. apply leb_correct in H1. rewrite p in H1. discriminate. Qed. Ltac feed H := match type of H with | ?foo -> _ => let FOO := fresh in assert foo as FOO; [|specialize (H FOO); clear FOO] end. Lemma length_chunk_lt: forall {T: Type} I (x: list T), let i := proj1_sig I in i > 1 -> Datatypes.length x > i -> Datatypes.length (chunk I x) < Datatypes.length x. Proof. intros; subst i. funelim (chunk I x). simpl. lia. simpl. specialize (H H0). revert H. unfold drop. simpl. pose proof (drop_spec (` i) x). simpl in H. rewrite H by lia. clear H. simp chunk. clear Heq. destruct dec. simp chunk; simpl; intros; try lia. intros. feed H. clear H. apply leb_complete_conv in e. pose proof (drop_spec (` i) x). rewrite H in e; try lia; unfold length in *; simpl in *; lia. lia. Qed. Section pmconcat. Context {M : Type} `{ChunkableMonoid M}. Equations? pmconcat (I : { i : nat | i > 0 }) (x : list M) : M by wf (length x) lt := pmconcat i x with dec ((` i <=? 1) || (length x <=? ` i))%bool => { | left H => mconcat x ; | right Hd => pmconcat i (map mconcat (chunk i x)) }. Proof. clear pmconcat. rewrite map_length. rewrite Bool.orb_false_iff in Hd. destruct Hd. apply leb_complete_conv in H2. apply leb_complete_conv in H3. apply length_chunk_lt; simpl; auto. Qed. (* 0.264s from 1.571s *) End pmconcat. #[export] Instance mconcat_mon T : MonoidMorphism (@mconcat (list T) _). Next Obligation. Proof. funelim (mconcat x). reflexivity. simpl. rewrite H. now rewrite <- app_assoc. Qed. Theorem concatEquivalence: forall {T: Type} i (x: list (list T)), pmconcat i x = mconcat x. Proof. intros. funelim (pmconcat i x). reflexivity. rewrite H. now rewrite <- (morphism_distribution mconcat). Qed. Coq-Equations-1.3.1-8.20/examples/views.v000066400000000000000000000077431463127417400177570ustar00rootroot00000000000000(* begin hide *) (**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (* end hide *) From Equations Require Import Equations. From Coq Require Import List Program.Syntax Arith Lia. (** * Views using dependent pattern-matching The standard notion of pattern-matching in type theory requires to give a case for each constructor of the inductive type we are working on, even if the function acts the same on a subset of distinct constructors. The reason is that due to dependencies, it is not clear that two branches for distinct constructors can be factorized in general: the return types of branches could be unifiable or not, depending on the branch body. Rather than trying to internalise this notion in the pattern-matching algorithm we propose the use of views to encode this phenomenon. Suppose that we want to do case analysis on an inductive with three constructors but only want to single out the [cone] constructor during pattern-matching: *) Inductive three := cone | ctwo | cthree. (** The user can write a view function to implement this. First one needs to write a discriminator for the inductive type, indicating which cases are to be merged together: *) Equations discr_three (c : three) : Prop := discr_three cone := False; discr_three _ := True. (** One can derive an inductive representing the view of [three] as [cone] and the two other [ctwo] and [cthree] cases lumbed together. *) Inductive three_two_view : three -> Set := | three_one : three_two_view cone | three_other c : discr_three c -> three_two_view c. (** This view is obviously inhabited for any element in [three]. *) Equations three_viewc c : three_two_view c := three_viewc cone := three_one; three_viewc c := three_other c I. (** Using a [with] clause one can pattern-match on the view argument to do case splitting on [three] using only two cases. In each branch, one can see that the [three] variable is determined by the view pattern. *) Equations onthree (c : three) : three := onthree c with three_viewc c := onthree ?(cone) three_one := cone; onthree ?(c) (three_other c Hc) := c. (** A similar example discriminating [10] from the rest of natural numbers. *) Equations discr_10 (x : nat) : Prop := discr_10 10 := False; discr_10 x := True. (** First alternative: using an inductive view *) Module View. Inductive view_discr_10 : nat -> Set := | view_discr_10_10 : view_discr_10 10 | view_discr_10_other c : discr_10 c -> view_discr_10 c. (** This view is obviously inhabited for any element in [three]. *) Equations discr_10_view c : view_discr_10 c := discr_10_view 10 := view_discr_10_10; discr_10_view c := view_discr_10_other c I. Equations f (n:nat) : nat := f n with discr_10_view n := f ?(10) view_discr_10_10 := 0; f ?(n) (view_discr_10_other n Hn) := n + 1. Goal forall n, n <> 10 -> f n = n + 1. intros n; apply f_elim. (* 2 cases: 10 and not 10 *) all:simpl; congruence. Qed. End View. (** Second alternative: using the discriminator directly. This currently requires massaging the eliminator a bit *) Equations f (n:nat) : nat by struct n (* FIXME *) := { f 10 := 0; f x := brx x (I : discr_10 x) } where brx (x : nat) (H : discr_10 x) : nat := brx x H := x + 1. Lemma f_elim' : forall (P : nat -> nat -> Prop), P 10 0 -> (forall (x : nat) (H : discr_10 x), P x (x + 1)) -> (forall n : nat, P n (f n)). Proof. intros. apply (f_elim P (fun x H r => P x r)); auto. Defined. Goal forall n, n <> 10 -> f n = n + 1. intros n; apply f_elim'. (* 2 cases: 10 and not 10 *) all:simpl; congruence. Qed.Coq-Equations-1.3.1-8.20/examples/wfrec.v000066400000000000000000000064351463127417400177250ustar00rootroot00000000000000(* begin hide *) (**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) From Equations Require Import Equations. From Coq Require Import List Program.Syntax Arith Lia. Require Import List. Import ListNotations. (* end hide *) (** * Well-founded recursion Equations provide support for well-founded recursion, potentially nested and mutual. Here we show a standard example of the [nubBy] function from Haskell's prelude, which is naturally expressed using well-founded recursion on the length of the list. To show this, we however use the argument that [filter f l] always returns a sublist of [l]. *) Equations filter_length {A} (l : list A) (f : A -> bool) : length (filter f l) <= length l := filter_length [] f := le_n 0; filter_length (x :: xs) f with f x := { | true => le_n_S _ _ (filter_length xs f); | false => le_S _ _ (filter_length xs f) }. Section nubBy. Context {A} (eq : A -> A -> bool). (** The proof that this function is well-founded uses simply the lemma [filter_length] and standard arithmetic reasoning. *) Equations? nubBy (l : list A) : list A by wf (length l) lt := nubBy [] => []; nubBy (x :: xs) => x :: nubBy (filter (fun y => negb (eq x y)) xs). Proof. simpl. auto using filter_length with arith. Defined. End nubBy. (** Using functional elimination, we can show standard properties of [nubBy], without having to repeat the well-founded induction principle *) Lemma nubBy_length {A} (eq : A -> A -> bool) (l : list A) : length (nubBy eq l) <= length l. Proof. funelim (nubBy eq l); simpl; trivial. rewrite filter_length in H. auto with arith. Qed. Lemma In_filter {A} (f : A -> bool) (l : list A) a : In a (filter f l) -> In a l /\ f a = true. Proof. induction l; simpl. intros []. destruct (f a0) eqn:Heq. simpl. intuition auto. now subst a0. intuition auto. Qed. Lemma In_nubBy {A} (eq : A -> A -> bool) (l : list A) (a : A) : In a (nubBy eq l) -> In a l. Proof. funelim (nubBy eq l). + trivial. + intros H0. destruct H0 as [->|H0]; auto. simpl. auto. specialize (H _ H0). apply In_filter in H as [Inal eqa]. right; auto. Qed. (** This allows to show that [nubBy] returns a list without duplicates in a few lines of proof. *) Lemma nuBy_nodup {A} (eq : A -> A -> bool) (l : list A) : (forall x y, (eq x y = true) <-> (x = y)) -> NoDup (nubBy eq l). Proof. funelim (nubBy eq l). constructor. intros Heq; specialize (H Heq). constructor. intros Hi. apply In_nubBy, In_filter in Hi as [_ eqaa]. specialize (Heq x x). destruct (eq x x). discriminate. destruct (proj2 Heq). reflexivity. discriminate. auto. Qed. Equations ack (m n : nat) : nat by wf (m, n) (Equations.Prop.Subterm.lexprod _ _ lt lt) := ack 0 0 := 1; ack 0 (S n) := S (S n); ack (S m) 0 := ack m 1; ack (S m) (S n) := ack m (ack (S m) n). Coq-Equations-1.3.1-8.20/makedoc.sh000077500000000000000000000000271463127417400165430ustar00rootroot00000000000000#!/bin/sh cd doc make Coq-Equations-1.3.1-8.20/siteexamples.sh000066400000000000000000000005141463127417400176410ustar00rootroot00000000000000#/bin/sh cd examples for i in *.v do coqdoc -s --no-lib-name -parse-comments --no-index --utf8 --interpolate --html \ --external http://github.com/mattam82/Coq-Equations/tree/main Equations \ -Q ../theories Equations -R . Examples -d ../../equations-www/examples $i done cd .. git checkout doc/examples/coqdoc.css Coq-Equations-1.3.1-8.20/src/000077500000000000000000000000001463127417400153715ustar00rootroot00000000000000Coq-Equations-1.3.1-8.20/src/META.coq-equations.in000066400000000000000000000005241463127417400212370ustar00rootroot00000000000000package "plugin" ( directory = "." description = "Coq equations" requires = "coq-core.plugins.ltac coq-core.plugins.extraction coq-core.plugins.cc" archive(byte) = "equations_plugin.cma" archive(native) = "equations_plugin.cmxa" plugin(byte) = "equations_plugin.cma" plugin(native) = "equations_plugin.cmxs" ) directory = "." Coq-Equations-1.3.1-8.20/src/context_map.ml000066400000000000000000000650511463127417400202530ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open Util open Names open Nameops open Constr open Context open Reductionops open Pp open List open EConstr open EConstr.Vars open Equations_common (** Abstract syntax for dependent pattern-matching. *) type peconstructor = Names.constructor peuniverses type pat = | PRel of int | PCstr of peconstructor * pat list | PInac of constr | PHide of int type context_map = { src_ctx : rel_context; map_inst : pat list; tgt_ctx : rel_context; } let type_of_refresh env evdref c = let ty = Retyping.get_type_of env !evdref c in let sigma, ty = Evarsolve.refresh_universes ~status:Evd.univ_flexible ~onlyalg:true (Some false) env !evdref ty in evdref := sigma; ty let mkInac env evdref c = mkApp (e_new_global evdref (Lazy.force coq_inacc), [| type_of_refresh env evdref c ; c |]) let mkHide env evdref c = mkApp (e_new_global evdref (Lazy.force coq_hide), [| type_of_refresh env evdref c ; c |]) let rec pat_constr = function | PRel i -> mkRel i | PCstr (c, p) -> let c' = mkConstructU c in mkApp (c', Array.of_list (List.map pat_constr p)) | PInac r -> r | PHide c -> mkRel c let rec constr_of_pat inacc_and_hide env evdref = function | PRel i -> mkRel i | PCstr (c, p) -> let c' = mkConstructU c in mkApp (c', Array.of_list (constrs_of_pats inacc_and_hide env evdref p)) | PInac r -> if inacc_and_hide then try mkInac env evdref r with _ -> r else r | PHide i -> if inacc_and_hide then mkHide env evdref (mkRel i) else mkRel i and constrs_of_pats inacc_and_hide env evdref l = List.map (constr_of_pat inacc_and_hide env evdref) l let constr_of_pat ?(inacc_and_hide=true) env sigma p = let evdref = ref sigma in let pc = constr_of_pat inacc_and_hide env evdref p in !evdref, pc let constrs_of_pats ?(inacc_and_hide=true) env sigma ps = let evdref = ref sigma in let pcs = constrs_of_pats inacc_and_hide env evdref ps in !evdref, pcs let rec pat_vars = function | PRel i -> Int.Set.singleton i | PCstr (c, p) -> pats_vars p | PInac _ -> Int.Set.empty | PHide _ -> Int.Set.empty and pats_vars l = fold_left (fun vars p -> let pvars = pat_vars p in let inter = Int.Set.inter pvars vars in if Int.Set.is_empty inter then Int.Set.union pvars vars else error ("Non-linear pattern: variable " ^ string_of_int (Int.Set.choose inter) ^ " appears twice")) Int.Set.empty l let inaccs_of_constrs l = List.map (fun x -> PInac x) l let rec pats_of_constrs env sigma l = List.map (pat_of_constr env sigma) l and pat_of_constr env sigma c = match kind sigma c with | Rel i -> PRel i | App (f, [| a ; c |]) when is_global env sigma (Lazy.force coq_inacc) f -> PInac c | App (f, [| a ; c |]) when is_global env sigma (Lazy.force coq_hide) f -> PHide (destRel sigma c) | App (f, args) when isConstruct sigma f -> let ((ind,_),_ as cstr) = destConstruct sigma f in let nparams = Inductiveops.inductive_nparams (Global.env()) ind in let params, args = Array.chop nparams args in PCstr (cstr, inaccs_of_constrs (Array.to_list params) @ pats_of_constrs env sigma (Array.to_list args)) | Construct f -> PCstr (f, []) | _ -> PInac c let rec pat_to_user_pat ?(avoid = ref Id.Set.empty) ?loc ctx = function | PRel i -> let decl = List.nth ctx (pred i) in let name = Context.Rel.Declaration.get_name decl in let id = Namegen.next_name_away name !avoid in avoid := Id.Set.add id !avoid; Some (DAst.make ?loc (Syntax.(PUVar (id, User)))) | PCstr (((ind, _ as cstr), _), pats) -> let n = Inductiveops.inductive_nparams (Global.env()) ind in let _, pats = List.chop n pats in Some (DAst.make ?loc (Syntax.PUCstr (cstr, n, pats_to_lhs ~avoid ?loc ctx pats))) | PInac c -> let id = Namegen.next_ident_away (Id.of_string "wildcard") !avoid in avoid := Id.Set.add id !avoid; Some (DAst.make ?loc (Syntax.(PUVar (id, Generated)))) | PHide i -> None and pats_to_lhs ?(avoid = ref Id.Set.empty) ?loc ctx pats = List.map_filter (pat_to_user_pat ~avoid ?loc ctx) pats let context_map_to_lhs ?(avoid = Id.Set.empty) ?loc map = let avoid = ref avoid in List.rev (pats_to_lhs ~avoid ?loc map.src_ctx map.map_inst) let do_renamings env sigma ctx = let avoid, ctx' = List.fold_right (fun decl (ids, acc) -> let (n, b, t) = to_tuple decl in match n.binder_name with | Name id -> let id' = Namegen.next_ident_away id ids in let decl' = make_def {n with binder_name = Name id'} b t in (Id.Set.add id' ids, decl' :: acc) | Anonymous -> let id' = Namegen.id_of_name_using_hdchar (push_rel_context acc env) sigma t Anonymous in let id' = Namegen.next_ident_away id' ids in let decl' = make_def {n with binder_name = Name id'} b t in (Id.Set.add id' ids, decl' :: acc)) ctx (Id.Set.empty, []) in ctx' (** Pretty-printing *) let pr_constr_pat env sigma c = Printer.pr_econstr_env env sigma c (* match kind sigma c with * | App _ -> str "(" ++ pr ++ str ")" * | _ -> pr *) let pr_pat env sigma c = let sigma, patc = constr_of_pat ~inacc_and_hide:true env sigma c in pr_constr_pat env sigma patc let pr_pats env sigma patcs = prlist_with_sep (fun _ -> str " ") (pr_pat env sigma) (List.rev patcs) let pr_context env sigma ctx = let _, pp = Context.Rel.fold_outside (fun d (env, pps) -> (push_rel d env, pps ++ ws 2 ++ try Printer.pr_rel_decl env sigma (EConstr.Unsafe.to_rel_decl d) with e -> str"")) ctx ~init:(env, mt ()) in hv 0 pp let ppcontext = ppenv_sigma pr_context let pr_context_map env sigma { src_ctx = delta; map_inst = patcs; tgt_ctx = gamma } = let env' = push_rel_context delta env in let ctx = pr_context env sigma delta in let ctx' = pr_context env sigma gamma in v 0 (v 0 ((if List.is_empty delta then ctx else ctx) ++ cut () ++ str "============================" ++ cut () ++ pr_pats env' sigma patcs) ++ cut () ++ str "============================" ++ cut () ++ ctx') let ppcontext_map env sigma context_map = pp (pr_context_map env sigma context_map) let ppcontext_map_empty = ppenv_sigma pr_context_map (** Debugging functions *) let typecheck_map env evars { src_ctx = ctx; map_inst = subst; tgt_ctx = ctx' } = typecheck_rel_context env evars ctx; typecheck_rel_context env evars ctx'; let env = push_rel_context ctx env in let _ = List.fold_right2 (fun decl p (evars, subst) -> let (na, b, t) = to_tuple decl in let evars, c = constr_of_pat ~inacc_and_hide:false env evars p in check_term env evars c (substl subst t); (evars, c :: subst)) ctx' subst (evars, []) in () let check_ctx_map ?(unsafe = false) env evars map = if !Equations_common.debug && not unsafe then try typecheck_map env evars map; map with Pretype_errors.PretypeError (env, sigma, Pretype_errors.TypingError e) -> errorlabstrm (str"Type error while building context map: " ++ pr_context_map env evars map ++ spc () ++ Himsg.explain_type_error env evars e) | Invalid_argument s -> errorlabstrm (str"Type error while building context map: " ++ pr_context_map env evars map ++ spc () ++ str"Invalid_argument: " ++ str s) | e when is_anomaly e -> errorlabstrm (str"Type error while building context map: " ++ pr_context_map env evars map ++ spc () ++ str"Anomaly: " ++ CErrors.print e) else map let mk_ctx_map ?(unsafe = false) env evars ctx subst ctx' = let map = { src_ctx = ctx; map_inst = subst; tgt_ctx = ctx'; } in check_ctx_map ~unsafe env evars map let rec map_patterns f ps = List.map (function | PCstr (c, pl) -> let c' = destConstruct Evd.empty (f (mkConstructU c)) in PCstr (c', map_patterns f pl) | PInac c -> PInac (f c) | x -> x) ps let map_ctx_map f map = { src_ctx = map_rel_context f map.src_ctx; map_inst = map_patterns f map.map_inst; tgt_ctx = map_rel_context f map.tgt_ctx; } (** Specialize by a substitution. *) let subst_pats_constr sigma k s c = let rec aux depth c = match kind sigma c with | Rel n -> let k = n - depth in if k > 0 then try lift depth (pat_constr (nth s (pred k))) with Failure _ (* "nth"*) -> c else c | _ -> map_with_binders sigma succ aux depth c in aux k c let subst_context sigma s ctx = let (_, ctx') = fold_right (fun decl (k, ctx') -> (succ k, map_rel_declaration (subst_pats_constr sigma k s) decl :: ctx')) ctx (0, []) in ctx' let rec specialize sigma s p = match p with | PRel i -> (try nth s (pred i) with Failure _ (* "nth" *) -> p) | PCstr(c, pl) -> PCstr (c, specialize_pats sigma s pl) | PInac r -> PInac (specialize_constr sigma s r) | PHide i -> (match nth s (pred i) (* FIXME *) with | PRel i -> PHide i | PHide i -> PHide i | PInac r -> PInac r | _ -> assert(false)) and specialize_constr sigma s c = subst_pats_constr sigma 0 s c and specialize_pats sigma s = List.map (specialize sigma s) let specialize_rel_context sigma s ctx = let subst, res = List.fold_right (fun decl (k, acc) -> let decl = map_rel_declaration (subst_pats_constr sigma k s) decl in (succ k, decl :: acc)) ctx (0, []) in res let mapping_constr sigma (s : context_map) c = specialize_constr sigma s.map_inst c (* Substitute a Constr.t in a pattern. *) let rec subst_constr_pat sigma k t p = match p with | PRel i -> if i == k then PInac t else if i > k then PRel (pred i) else p | PCstr(c, pl) -> PCstr (c, subst_constr_pats sigma k t pl) | PInac r -> PInac (substnl [t] (pred k) r) | PHide i -> PHide (destRel sigma (substnl [t] (pred k) (mkRel i))) and subst_constr_pats sigma k t = List.map (subst_constr_pat sigma k t) (* Lifting patterns. *) let rec lift_patn n k p = match p with | PRel i -> if i >= k then PRel (i + n) else p | PCstr(c, pl) -> PCstr (c, lift_patns n k pl) | PInac r -> PInac (liftn n (succ k) r) | PHide r -> PHide (destRel Evd.empty (liftn n (succ k) (mkRel r))) and lift_patns n k = List.map (lift_patn n k) let lift_pat n p = lift_patn n 0 p let lift_pats n p = lift_patns n 0 p let rec eq_pat env sigma p1 p2 = match p1, p2 with | PRel i, PRel i' -> Int.equal i i' | PHide i, PHide i' -> Int.equal i i' | PCstr (c, pl), PCstr (c', pl') -> Environ.QConstruct.equal env (fst c) (fst c') && List.for_all2 (eq_pat env sigma) pl pl' | PInac c, PInac c' -> EConstr.eq_constr sigma c c' | _, _ -> false let make_permutation ?(env = Global.env ()) (sigma : Evd.evar_map) map1 map2 : context_map = let ctx1 = map1.src_ctx in let pats1 = map1.map_inst in let ctx2 = map2.src_ctx in let pats2 = map2.map_inst in let len = List.length ctx1 in let perm = Array.make len None in let merge_rels i1 i2 = match perm.(pred i2) with | None -> perm.(pred i2) <- Some i1 | Some j when Int.equal i1 j -> () | Some k -> let rel_id i ctx = Pp.int i ++ str " = " ++ Names.Name.print (Equations_common.(get_name (lookup_rel i ctx))) in failwith (Pp.string_of_ppcmds (str "Could not generate a permutation: two different instances:" ++ rel_id i2 ctx2 ++ str" in ctx2 is invertible to " ++ rel_id k ctx1 ++ str" and " ++ rel_id i1 ctx1)) in let reduce env sigma c = let nenv = Environ.pop_rel_context (Environ.nb_rel env) env in let ctx = List.map Context.Rel.Declaration.drop_body (Environ.rel_context env) in let nenv = Environ.push_rel_context ctx nenv in let c' = Reductionops.clos_whd_flags RedFlags.all nenv sigma c in c' in let env1 = push_rel_context ctx1 env in let rec merge_pats p1 p2 = match p1, p2 with | _, PInac p2 -> () | PCstr (p, ps), PCstr (_, ps') -> List.iter2 (fun p1 p2 -> merge_pats p1 p2) ps ps' | PHide i, _ -> merge_pats (PRel i) p2 | _, PHide i -> merge_pats p1 (PRel i) | PRel i1, PRel i2 -> if i1 <= len then try merge_rels i1 i2 with Invalid_argument _ -> failwith "Could not generate a permutation: different variables" else () | PInac c, _ -> let p1' = pat_of_constr env1 sigma (reduce env1 sigma c) in if eq_pat env sigma p1 p1' then failwith "Could not generate a permutation: irreducible inaccessible" else merge_pats p1' p2 | _, _ -> failwith (Pp.string_of_ppcmds (str"Could not generate a permutation, patterns differ: " ++ pr_pat env sigma p1 ++ str " and " ++ pr_pat env sigma p2)) in (* FIXME This function could also check that constructors are the same and * so on. It also need better error handling. *) List.iter2 merge_pats pats1 pats2; let pats = let rec aux k pats = if k = 0 then pats else match perm.(pred k) with | None -> let decl = try lookup_rel k ctx2 with Not_found -> assert false in (match Context.Rel.Declaration.get_value decl with | Some body -> (* body lives in cxt2|k, pats is a substitution for it into ctx1. *) aux (pred k) (PInac (specialize_constr sigma pats body) :: pats) | None -> failwith "Could not generate a permutation") | Some i -> aux (pred k) (PRel i :: pats) in aux len [] in let ctxmap = mk_ctx_map env sigma ctx1 pats ctx2 in if !debug then Feedback.msg_debug Pp.(str"Permutation ctxmap: " ++ pr_context_map env sigma ctxmap ++ str" of " ++ pr_context_map env sigma map1 ++ str " and " ++ pr_context_map env sigma map2); ctxmap let specialize_mapping_constr sigma (m : context_map) c = specialize_constr sigma m.map_inst c let rels_of_ctx ?(with_lets=true) ctx = let len = List.length ctx in if with_lets then Termops.rel_list 0 len (* len first *) else List.rev (CList.map_filter_i (fun i d -> if Context.Rel.Declaration.is_local_assum d then Some (mkRel (succ i)) else None) ctx) let patvars_of_ctx ?(with_lets=true) ctx = let len = List.length ctx in if with_lets then CList.init len (fun i -> PRel (len - i)) else CList.rev (CList.map_filter_i (fun i d -> if Context.Rel.Declaration.is_local_assum d then Some (PRel (succ i)) else None) ctx) let pat_vars_list n = CList.init n (fun i -> PRel (succ i)) let intset_of_list = fold_left (fun s x -> Int.Set.add x s) Int.Set.empty let split_context n c = let after, before = List.chop n c in match before with | hd :: tl -> after, hd, tl | [] -> raise (Invalid_argument "split_context") let split_tele n (ctx : rel_context) = let rec aux after n l = match n, l with | 0, decl :: before -> before, decl, List.rev after | n, decl :: before -> aux (decl :: after) (pred n) before | _ -> raise (Invalid_argument "split_tele") in aux [] n ctx (* Compute the transitive closure of the dependency relation for a term in a context *) let rels_above ctx x = let len = List.length ctx in intset_of_list (CList.init (len - x) (fun i -> x + succ i)) let is_fix_proto env sigma t = match kind sigma t with | LetIn (_, f, _, _) -> is_global env sigma (Lazy.force coq_fix_proto) f | _ -> false let fix_rels env sigma ctx = List.fold_left_i (fun i acc decl -> if is_fix_proto env sigma (get_type decl) then Int.Set.add i acc else acc) 1 Int.Set.empty ctx let rec dependencies_of_rel ~with_red env evd ctx k x = let (n,b,t) = to_tuple (nth ctx (pred k)) in let b = Option.map (lift k) b and t = lift k t in let bdeps = match b with Some b -> dependencies_of_term ~with_red env evd ctx b x | None -> Int.Set.empty in Int.Set.union (Int.Set.singleton k) (Int.Set.union bdeps (dependencies_of_term ~with_red env evd ctx t x)) and dependencies_of_term ~with_red env evd ctx t x = (* First we get the syntactic dependencies of t. *) let rels = Termops.free_rels evd t in let rels = (* We check if it mentions x. If it does, we reduce t because we know it should not. *) if with_red && Int.Set.mem x rels then Termops.free_rels evd (nf_betadeltaiota env evd t) else rels in Int.Set.fold (fun i -> Int.Set.union (dependencies_of_rel ~with_red env evd ctx i x)) rels Int.Set.empty let non_dependent evd ctx c = List.fold_left_i (fun i acc (_, _, t) -> if not (Termops.dependent evd (lift (-i) c) t) then Int.Set.add i acc else acc) 1 Int.Set.empty ctx let subst_term_in_context sigma t ctx = let (term, rel, newctx) = List.fold_right (fun decl (term, rel, newctx) -> let (n, b, t) = to_tuple decl in let decl' = make_def n b (Termops.replace_term sigma term (mkRel rel) t) in (lift 1 term, succ rel, decl' :: newctx)) ctx (t, 1, []) in newctx let strengthen ?(full=true) ?(abstract=false) env evd (ctx : rel_context) x (t : constr) = let rels = Int.Set.union (if full then rels_above ctx x else Int.Set.singleton x) (Int.Set.union (dependencies_of_term ~with_red:true env evd ctx t x) (Int.Set.remove x (fix_rels env evd ctx))) in (* For each variable that we need to push under x, we check if its type or body mentions x syntactically. If it does, we normalize it. *) let maybe_reduce k t = if Int.Set.mem k (Termops.free_rels evd t) then nf_betadeltaiota env evd t else t in let ctx = List.map_i (fun k decl -> if Int.Set.mem k rels && k < x then map_rel_declaration (maybe_reduce (x - k)) decl else decl) 1 ctx in let len = length ctx in let nbdeps = Int.Set.cardinal rels in let lifting = len - nbdeps in (* Number of variables not linked to t *) let rec aux k n acc m rest s = function | decl :: ctx' -> if Int.Set.mem k rels then let rest' = subst_telescope (mkRel (nbdeps + lifting - pred m)) rest in aux (succ k) (succ n) (decl :: acc) m rest' (Inl n :: s) ctx' else aux (succ k) n (subst_telescope mkProp acc) (succ m) (decl :: rest) (Inr m :: s) ctx' | [] -> rev acc, rev rest, s in let (min, rest, subst) = aux 1 1 [] 1 [] [] ctx in let lenrest = length rest in let subst = rev subst in let reorder = List.map_i (fun i -> function Inl x -> (x + lenrest, i) | Inr x -> (x, i)) 1 subst in let subst = List.map (function Inl x -> PRel (x + lenrest) | Inr x -> PRel x) subst in let ctx' = if abstract then subst_term_in_context evd (lift (-lenrest) (specialize_constr evd subst t)) rest @ min else rest @ min in mk_ctx_map env evd ctx' subst ctx, mk_ctx_map env evd ctx (List.map (fun (i, j) -> PRel i) reorder) ctx' (* TODO Merge both strengthening functions. Bottom one might be better. *) (* Return a substitution (and its inverse) which is just a permutation * of the variables in the context which is well-typed, and such that * all variables in [t] (and their own dependencies) are now declared * before [x] in the context. *) let new_strengthen (env : Environ.env) (evd : Evd.evar_map) (ctx : rel_context) (x : int) ?(rels = rels_above ctx x) (t : constr) : context_map * context_map = let rels = Int.Set.union rels (Int.Set.union (dependencies_of_term ~with_red:true env evd ctx t x) (fix_rels env evd ctx)) in let maybe_reduce k t = if Int.Set.mem k (Termops.free_rels evd t) then Equations_common.nf_betadeltaiota env evd t else t in (* We may have to normalize some declarations in the context if they * mention [x] syntactically when they shouldn't. *) let ctx = CList.map_i (fun k decl -> if Int.Set.mem k rels && k < x then Equations_common.map_rel_declaration (maybe_reduce (x - k)) decl else decl) 1 ctx in (* Now we want to put everything in [rels] as the oldest part of the context, * and everything else after. *) let len_ctx = Context.Rel.length ctx in let lifting = len_ctx - Int.Set.cardinal rels in let rev_subst = Array.make len_ctx (PRel 0) in (* [k] is the current rel in [ctx]. * [n] is the position of the next rel that should be in the newer part of [ctx']. * [lifting] is the number of rels that will end in this newer part. * [before] and [after] are the older and newer parts of [ctx']. *) let rec aux k before after n subst = function | decl :: ctx -> (* We just lift the declaration so that it is typed under the whole * context [ctx]. We will perform the proper substitution right after. *) let decl = Equations_common.map_rel_declaration (Vars.lift k) decl in if Int.Set.mem k rels then (* [k - n + 1] is the position of this rel in the older part of [ctx'], which * is shifted by [lifting]. *) let subst = PRel (lifting + k - n + 1) :: subst in rev_subst.(k + lifting - n) <- PRel k; aux (succ k) (decl :: before) after n subst ctx else let subst = PRel n :: subst in rev_subst.(n - 1) <- PRel k; aux (succ k) before (decl :: after) (succ n) subst ctx | [] -> CList.rev (before @ after), CList.rev subst in (* Now [subst] is a list of indices which represents the substitution * that we must apply. *) (* Right now, [ctx'] is an ill-typed rel_context, we need to apply [subst]. *) let (ctx', subst) = aux 1 [] [] 1 [] ctx in let rev_subst = Array.to_list rev_subst in (* Fix the context [ctx'] by using [subst]. *) (* Currently, each declaration in [ctx'] is actually typed under [ctx]. *) (* We can apply the substitution to get a declaration typed under [ctx'], * and lift it back to its place in [ctx']. *) let do_subst k c = Vars.lift (-k) (specialize_constr evd subst c) in let ctx' = CList.map_i (fun k decl -> Equations_common.map_rel_declaration (do_subst k) decl) 1 ctx' in (* Now we have everything need to build the two substitutions. *) let s = mk_ctx_map env evd ctx' subst ctx in let rev_s = mk_ctx_map env evd ctx rev_subst ctx' in s, rev_s let id_pats g = List.rev (patvars_of_ctx g) let id_subst g = { src_ctx = g; map_inst = id_pats g; tgt_ctx = g } let eq_context_nolet env sigma (g : rel_context) (d : rel_context) = try snd (List.fold_right2 (fun decl decl' (env, acc) -> if acc then let t = get_type decl and t' = get_type decl' in let res = (eq_constr sigma t t' || (* FIXME: is_conv is not respecting some universe equalities in sigma *) let t = Evarutil.nf_evar sigma t in let t' = Evarutil.nf_evar sigma t' in is_conv env sigma t t') in if res = false then Printf.eprintf "While comparing contexts: %s and %s : %s\n" (Pp.string_of_ppcmds (Printer.pr_constr_env env sigma (EConstr.Unsafe.to_constr t))) (Pp.string_of_ppcmds (Printer.pr_constr_env env sigma (EConstr.Unsafe.to_constr t'))) (* (Pp.string_of_ppcmds (UGraph.pr_universes Univ.Level.pr (Evd.universes sigma))); *) (Pp.string_of_ppcmds (Termops.pr_evar_map ~with_univs:true None env sigma)); (push_rel decl env, res) else env, acc) g d (env, true)) with Invalid_argument _ (* "List.fold_right2" *) -> false | e -> Printf.eprintf "Exception while comparing contexts %s and %s : %s\n" (Pp.string_of_ppcmds (Termops.Internal.print_rel_context (push_rel_context g env) sigma)) (Pp.string_of_ppcmds (Termops.Internal.print_rel_context (push_rel_context d env) sigma)) (Printexc.to_string e); raise e let check_eq_context_nolet env sigma snd fst = let g = snd.tgt_ctx in let d = fst.src_ctx in if eq_context_nolet env sigma g d then () else errorlabstrm (str "Contexts do not agree for composition: " ++ pr_context_map env sigma snd ++ str " and " ++ pr_context_map env sigma fst) let compose_subst ?(unsafe = false) env ?(sigma=Evd.empty) snd fst = let { src_ctx = g; map_inst = p; tgt_ctx = d } = fst in let { src_ctx = g'; map_inst = p'; tgt_ctx = d' } = snd in if !Equations_common.debug && not unsafe then check_eq_context_nolet env sigma snd fst; mk_ctx_map ~unsafe env sigma g' (specialize_pats sigma p' p) d (* (g', (specialize_pats p' p), d) *) let push_mapping_context sigma decl subs = let { src_ctx = g; map_inst = p; tgt_ctx = d } = subs in let decl' = map_rel_declaration (specialize_constr sigma p) decl in { src_ctx = decl' :: g; map_inst = (PRel 1 :: List.map (lift_pat 1) p); tgt_ctx = decl :: d } let lift_subst env evd (ctx : context_map) (g : rel_context) = let map = List.fold_right (fun decl acc -> push_mapping_context evd decl acc) g ctx in check_ctx_map env evd map let single_subst ?(unsafe = false) env evd x p g = let t = pat_constr p in if eq_constr evd t (mkRel x) then id_subst g else if noccur_between evd 1 x t then (* The term to substitute refers only to previous variables. *) let substctx = subst_in_ctx x t g in let pats = CList.init (List.length g) (fun i -> let k = succ i in if k == x then (lift_pat (-1) p) else if k > x then PRel (pred k) else PRel k) (* let substctx = set_in_ctx x t g in *) (* let pats = list_tabulate *) (* (fun i -> let k = succ i in if k = x then p else PRel k) *) (* (List.length g) *) in mk_ctx_map ~unsafe env evd substctx pats g else let { src_ctx = ctx; map_inst = s; tgt_ctx = g }, invstr = new_strengthen env evd g x t in let x' = match nth s (pred x) with PRel i -> i | _ -> error "Occurs check singleton subst" and t' = specialize_constr evd s t in (* t' is in ctx. Do the substitution of [x'] by [t] now in the context and the patterns. *) let substctx = subst_in_ctx x' t' ctx in let pats = List.map_i (fun i p -> subst_constr_pat evd x' (lift (-1) t') p) 1 s in mk_ctx_map ~unsafe env evd substctx pats g let pr_rel_name env i = Name.print (get_name (EConstr. lookup_rel i env)) let is_local_def i ctx = let decl = List.nth ctx (pred i) in Context.Rel.Declaration.is_local_def decl let filter_def_pats map = CList.map_filter (function | PRel i when is_local_def i map.src_ctx -> None | PHide i when is_local_def i map.src_ctx -> None | p -> Some p) map.map_inst Coq-Equations-1.3.1-8.20/src/context_map.mli000066400000000000000000000145141463127417400204220ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open Environ open Names open EConstr open Equations_common type peconstructor = Names.constructor peuniverses (** Internal patterns *) type pat = PRel of int | PCstr of peconstructor * pat list | PInac of constr | PHide of int (** Substitutions: src_ctx ⊢ map_inst : tgt_ctx *) type context_map = { src_ctx : rel_context; map_inst : pat list; tgt_ctx : rel_context; } (** Tag with a Constant.t application (needs env to infer type) *) val mkInac : env -> esigma -> constr -> constr val mkHide : env -> esigma -> constr -> constr (* Constr of a pattern *) val pat_constr : pat -> constr val eq_pat : env -> Evd.evar_map -> pat -> pat -> bool (* Constr of a pattern optionally marking innaccessibles and hidden patterns and modifying the evar_map in this case only. *) val constr_of_pat : ?inacc_and_hide:bool -> env -> Evd.evar_map -> pat -> Evd.evar_map * constr val constrs_of_pats : ?inacc_and_hide:bool -> env -> Evd.evar_map -> pat list -> Evd.evar_map * constr list (** Free pattern variables (excluding inaccessibles and hiddens) *) val pat_vars : pat -> Int.Set.t val pats_vars : pat list -> Int.Set.t (** Make the terms inaccessible *) val inaccs_of_constrs : constr list -> pat list (** Reverse of constr_of_pat turning applications of innac/hide into the proper patterns *) val pats_of_constrs : Environ.env -> Evd.evar_map -> constr list -> pat list val pat_of_constr : Environ.env -> Evd.evar_map -> constr -> pat (** Translating back to user patterns. *) val context_map_to_lhs : ?avoid:Id.Set.t -> ?loc:Loc.t -> context_map -> Syntax.lhs (** Pretty-printing *) val pr_constr_pat : env -> Evd.evar_map -> constr -> Pp.t val pr_pat : env -> Evd.evar_map -> pat -> Pp.t val pr_pats : env -> Evd.evar_map -> pat list -> Pp.t val pr_context : env -> Evd.evar_map -> rel_context -> Pp.t val ppcontext : rel_context -> unit val pr_context_map : env -> Evd.evar_map -> context_map -> Pp.t val ppcontext_map : env -> Evd.evar_map -> context_map -> unit val ppcontext_map_empty : context_map -> unit val pr_rel_name : env -> int -> Pp.t (** Rename de Bruijn variables with fresh, distinct names *) val do_renamings : env -> Evd.evar_map -> rel_context -> rel_context val typecheck_map : Environ.env -> Evd.evar_map -> context_map -> unit val check_ctx_map : ?unsafe:bool -> Environ.env -> Evd.evar_map -> context_map -> context_map (** Smart constructor (doing runtime checks) *) val mk_ctx_map : ?unsafe:bool -> Environ.env -> Evd.evar_map -> rel_context -> pat list -> rel_context -> context_map val map_ctx_map : (EConstr.t -> EConstr.t) -> context_map -> context_map (** Substitution and specialization *) val subst_pats_constr : Evd.evar_map -> int -> pat list -> constr -> constr val subst_context : Evd.evar_map -> pat list -> rel_context -> rel_context val specialize : Evd.evar_map -> pat list -> pat -> pat val specialize_constr : Evd.evar_map -> pat list -> constr -> constr val specialize_pats : Evd.evar_map -> pat list -> pat list -> pat list val specialize_rel_context : Evd.evar_map -> pat list -> rel_context -> rel_context val mapping_constr : Evd.evar_map -> context_map -> constr -> constr val subst_constr_pat : Evd.evar_map -> int -> constr -> pat -> pat val subst_constr_pats : Evd.evar_map -> int -> constr -> pat list -> pat list val lift_patn : int -> int -> pat -> pat val lift_patns : int -> int -> pat list -> pat list val lift_pat : int -> pat -> pat val lift_pats : int -> pat list -> pat list val make_permutation : ?env:Environ.env -> Evd.evar_map -> context_map -> context_map -> context_map val specialize_mapping_constr : Evd.evar_map -> context_map -> constr -> constr val rels_of_ctx : ?with_lets:bool -> ('a,'b,'c) Context.Rel.pt -> constr list val patvars_of_ctx : ?with_lets:bool -> ('a,'b,'c) Context.Rel.pt -> pat list (** Includes lets by default *) val pat_vars_list : int -> pat list val intset_of_list : Int.Set.elt list -> Int.Set.t val split_context : int -> 'a list -> 'a list * 'a * 'a list val split_tele : int -> rel_context -> rel_context * rel_declaration * rel_context val rels_above : 'a list -> int -> Int.Set.t val is_fix_proto : Environ.env -> Evd.evar_map -> constr -> bool val fix_rels : Environ.env -> Evd.evar_map -> rel_context -> Int.Set.t val dependencies_of_rel : with_red:bool -> env -> Evd.evar_map -> rel_context -> Int.Set.elt -> Int.Set.elt -> Int.Set.t val dependencies_of_term : with_red:bool -> env -> Evd.evar_map -> rel_context -> constr -> Int.Set.elt -> Int.Set.t val non_dependent : Evd.evar_map -> ('a * 'b * constr) list -> constr -> Int.Set.t val subst_term_in_context : Evd.evar_map -> constr -> rel_context -> rel_context val strengthen : ?full:bool -> ?abstract:bool -> env -> Evd.evar_map -> rel_context -> Int.Set.elt -> constr -> context_map * context_map (* Return a substitution and its inverse. *) (* For more flexibility, [rels] is a set of indices which are to be * moved before the variable. By default, this is everything already before * the variable. *) val new_strengthen : Environ.env -> Evd.evar_map -> rel_context -> int -> ?rels:Int.Set.t -> constr -> context_map * context_map val id_pats : ('a, 'b,'c) Context.Rel.pt -> pat list val id_subst : rel_context -> context_map val eq_context_nolet : env -> Evd.evar_map -> rel_context -> rel_context -> bool val check_eq_context_nolet : env -> Evd.evar_map -> context_map -> context_map -> unit val compose_subst : ?unsafe:bool -> Environ.env -> ?sigma:Evd.evar_map -> context_map -> context_map -> context_map val push_mapping_context : Evd.evar_map -> rel_declaration -> context_map -> context_map val lift_subst : Environ.env -> Evd.evar_map -> context_map -> rel_context -> context_map val single_subst : ?unsafe:bool -> env -> Evd.evar_map -> Int.Set.elt -> pat -> rel_context -> context_map val filter_def_pats : context_map -> pat list Coq-Equations-1.3.1-8.20/src/covering.ml000066400000000000000000002062161463127417400175460ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open Util open Names open Nameops open Context open Constr open Reductionops open Pp open List open Evarutil open Termops open Equations_common open Syntax open Context_map open Splitting open EConstr open EConstr.Vars type int_data = { rec_type : rec_type; fixdecls : rel_context; flags : flags; program_mode : bool; intenv : Constrintern.internalization_env; notations : Vernacexpr.notation_declaration list } exception Conflict exception Stuck type 'a unif_result = UnifSuccess of 'a | UnifFailure | UnifStuck type unification_result = (context_map * int * constr * pat) option (* let isConstruct_app_or_Rel env sigma c = let hd, args = decompose_app sigma c in isRel sigma hd || isConstruct sigma hd *) (* let maybe_reduce_to_hnf env sigma c = if isConstruct_app_or_Rel env sigma c then c, false else let c' = Tacred.hnf_constr env sigma c in if isConstruct_app_or_Rel env sigma c' then c', true else c, false *) let decompose_rel evd c = match kind evd c with | Rel i -> Some i | _ -> None let rec unify env evd flex g x y = if is_conv_leq env evd x y then id_subst g else match decompose_rel evd x with | Some i -> if not (isRel evd y) && not (noccurn evd i y) then raise Conflict (* Occur check *) else if Int.Set.mem i flex then single_subst env evd i (PInac y) g else raise Stuck | None -> match decompose_rel evd y with | Some i -> if (* not (isRel evd x) && *)not (noccurn evd i x) then raise Conflict (* Occur check *) else if Int.Set.mem i flex then single_subst env evd i (PInac x) g else raise Stuck | None -> let (c, l) = decompose_app_list evd x and (c', l') = decompose_app_list evd y in if isConstruct evd c && isConstruct evd c' then if eq_constr evd c c' then unify_constrs env evd flex g l l' else raise Conflict else raise Stuck (* and unify env evd flex g x y = match unify_hnfs env evd flex g x y with | exception e -> let x, modified = maybe_reduce_to_hnf env evd x in let y, modified' = maybe_reduce_to_hnf env evd y in if modified || modified' then unify_hnfs env evd flex g x y else raise e | s -> s *) and unify_constrs env evd flex g l l' = match l, l' with | [], [] -> id_subst g | hd :: tl, hd' :: tl' -> (try let hdunif = unify env evd flex g hd hd' in let { src_ctx = d; map_inst = s } = hdunif in let specrest = List.map (specialize_constr evd s) in let tl = specrest tl and tl' = specrest tl' in let tlunif = unify_constrs env evd flex d tl tl' in compose_subst env ~sigma:evd tlunif hdunif with Stuck -> let tlunif = unify_constrs env evd flex g tl tl' in let spec = specialize_constr evd (tlunif.map_inst) in let hd = spec hd and hd' = spec hd' in let hdunif = unify env evd flex (tlunif.src_ctx) hd hd' in compose_subst env ~sigma:evd hdunif tlunif) | _, _ -> raise Conflict let flexible pats gamma = let rec aux (k,flex) pats decls = match decls, pats with | Context.Rel.Declaration.LocalAssum _ :: decls, pat :: pats -> (match pat with | PInac _ -> aux (succ k, Int.Set.add k flex) pats decls | p -> aux (succ k, flex) pats decls) | _ :: decls, pats -> aux (succ k, flex) pats decls | [], [] -> flex | _ -> assert false in aux (1, Int.Set.empty) pats gamma let rec accessible = function | PRel i -> Int.Set.singleton i | PCstr (c, l) -> accessibles l | PInac _ | PHide _ -> Int.Set.empty and accessibles l = fold_left (fun acc p -> Int.Set.union acc (accessible p)) Int.Set.empty l let hidden = function PHide _ -> true | _ -> false type match_subst = ((Loc.t option * identifier * provenance) * pat) list * (Glob_term.glob_constr * pat) list * (user_pat_loc * constr) list * ((Loc.t option * pat) list) let rec match_pattern env sigma p c = match DAst.get p, c with | PUVar (i,gen), (PCstr _ | PRel _ | PHide _) -> [DAst.with_loc_val (fun ?loc _ -> (loc, i, gen)) p, c], [], [], [] | PUCstr (c, i, pl), PCstr ((c',u), pl') -> if Environ.QConstruct.equal env c c' then let params, args = List.chop i pl' in match_patterns env sigma pl args else raise Conflict | PUInac t, t' -> [], [t, t'], [], [] | PUVar (i, gen), PInac t when isRel sigma t -> [DAst.with_loc_val (fun ?loc _ -> (loc, i, gen)) p, c], [], [], [] | _, PInac t -> [], [], [p, t], [] | PUEmpty, _ -> [], [], [], [DAst.with_loc_val (fun ?loc _ -> (loc, c)) p] | _, _ -> raise Stuck and match_patterns env sigma pl l = match pl, l with | [], [] -> [], [], [], [] | hd :: tl, hd' :: tl' -> let l = try Some (match_pattern env sigma hd hd') with Stuck -> None in let l' = try Some (match_patterns env sigma tl tl') with Stuck -> None in (match l, l' with | Some (l, li, ri, ei), Some (l', li', ri', ei') -> l @ l', li @ li', ri @ ri', ei @ ei' | _, _ -> raise Stuck) | _ -> raise Conflict open Constrintern let matches env sigma (p : user_pats) subst = try let p' = filter (fun x -> not (hidden x)) (rev subst.map_inst) in UnifSuccess (match_patterns env sigma p p') with Conflict -> UnifFailure | Stuck -> UnifStuck let rec match_user_pattern env p c = match p, DAst.get c with | PRel i, t -> [i, t], [] | PCstr ((c',_), pl'), PUCstr (c, i, pl) -> if Environ.QConstruct.equal env c c' then let params, args = List.chop i pl' in match_user_patterns env args pl else raise Conflict | PCstr _, PUVar (n,gen) -> [], [n, p] | PInac _, _ -> [], [] | _, _ -> raise Stuck and match_user_patterns env pl l = match pl, l with | [], [] -> [], [] | hd :: tl, hd' :: tl' -> let l = try Some (match_user_pattern env hd hd') with Stuck -> None in let l' = try Some (match_user_patterns env tl tl') with Stuck -> None in (match l, l' with | Some (l1, l2), Some (l1', l2') -> l1 @ l1', l2 @ l2' | _, _ -> raise Stuck) | _ -> raise Conflict let matches_user env subst (p : user_pats) = try UnifSuccess (match_user_patterns env (filter (fun x -> not (hidden x)) (rev subst.map_inst)) p) with Conflict -> UnifFailure | Stuck -> UnifStuck let refine_arg idx ctx = let before, after = List.chop idx ctx in let lenafter = List.length after in let lets_in_ctx = List.count (fun x -> Context.Rel.Declaration.is_local_def x) after in lenafter, lenafter - lets_in_ctx let adjust_sign_arity env evars p clauses = let max_args = match clauses with | [] -> Context.Rel.nhyps p.program_sign | _ -> List.fold_left (fun acc (Pre_clause (_, lhs, rhs)) -> let len = List.length lhs in max acc len) 0 clauses in let fullty = it_mkProd_or_subst env evars p.program_arity p.program_sign in let evars, sign, ty = let rec aux evars args sign ty = match args with | 0 -> evars, sign, ty | n -> match EConstr.kind evars (whd_all (push_rel_context sign env) evars ty) with | Prod (na, t, b) -> aux evars (n - 1) (Context.Rel.Declaration.LocalAssum (na, t) :: sign) b | Evar e -> let evars', t = Evardefine.define_evar_as_product env evars e in aux evars' args sign t | _ -> user_err_loc (None, str "Too many patterns in clauses for this type") in aux evars max_args [] fullty in let check_clause (Pre_clause (loc, lhs, rhs)) = if List.length lhs < max_args then user_err_loc (loc, str "This clause has not enough arguments") else () in List.iter check_clause clauses; let sign = do_renamings env evars sign in let p = { p with program_sign = sign; program_arity = ty } in evars, p let lets_of_ctx env ctx evars s = let envctx = push_rel_context ctx env in let ctxs, pats, varsubst, len, ids = fold_left (fun (ctx', cs, varsubst, k, ids) (id, pat) -> let c = pat_constr pat in match pat with | PRel i -> (ctx', cs, (i, id) :: varsubst, k, Id.Set.add id ids) | _ -> let ty = e_type_of envctx evars c in (make_def (nameR id) (Some (lift k c)) (lift k ty) :: ctx', (c :: cs), varsubst, succ k, Id.Set.add id ids)) ([],[],[],0,Id.Set.empty) s in let _, _, ctx' = List.fold_right (fun decl (ids, i, ctx') -> let (n, b, t) = to_tuple decl in try ids, pred i, (make_def (nameR (List.assoc i varsubst)) b t :: ctx') with Not_found -> let id' = Namegen.next_name_away n.Context.binder_name ids in Id.Set.add id' ids, pred i, (make_def (nameR id') b t :: ctx')) ctx (ids, List.length ctx, []) in pats, ctxs, ctx' let env_of_rhs evars ctx env s lets = let envctx = push_rel_context ctx env in let patslets, letslen = fold_right (fun decl (acc, len) -> let (_, b, _) = to_tuple decl in (lift (-len) (Option.get b) :: acc, succ len)) lets ([], 0) in let pats, ctx, len = let (pats, x, y) = lets_of_ctx env (lets @ ctx) evars (List.map (fun (id, pat) -> id, lift_pat letslen pat) s) in pats, x @ y, List.length x in let pats = List.map (lift (-letslen)) pats @ patslets in ctx, envctx, len + letslen, pats (* let _rename_prob_subst env ctx s = * let _avoid = List.fold_left (fun avoid decl -> * match get_name decl with * | Anonymous -> avoid * | Name id -> Id.Set.add id avoid) Id.Set.empty ctx * in * let varsubst, rest = * fold_left (fun (varsubst, rest) ((id, gen), pat) -> * match pat with * | PRel i when gen = false -> ((i, id) :: varsubst, rest) * | _ -> (varsubst, (id, pat) :: rest)) * ([], []) s * in * let ctx' = * List.fold_left_i (fun i acc decl -> * try let id = List.assoc i varsubst in * Context.Rel.Declaration.set_name (Name id) decl :: acc * with Not_found -> decl :: acc) * 1 [] ctx * in * (List.rev ctx', id_pats ctx, ctx) *) let is_wf_ref id rec_type = let aux = function | Some (Logical (nargs, (_, id'))) -> if Id.equal id id' then Some nargs else None | _ -> None in CList.find_map_exn aux rec_type let add_wfrec_implicits rec_type c = let open Glob_term in if has_logical rec_type then let rec aux c = let maprec a = Glob_ops.map_glob_constr_left_to_right aux a in let mapargs ts = List.map aux ts in DAst.with_loc_val (fun ?loc g -> match g with | GApp (fn, args) -> DAst.with_loc_val (fun ?loc gfn -> match gfn with | GVar fid -> (match is_wf_ref fid rec_type with | exception Not_found -> maprec c | nargs -> let kind = Evar_kinds.{ qm_obligation = Define false; qm_name = Anonymous; qm_record_field = None } in let newarg = GHole (GQuestionMark kind) in let newarg = DAst.make ?loc newarg in let before, after = List.chop nargs (mapargs args) in let args' = List.append before (newarg :: after) in DAst.make ?loc (GApp (fn, args'))) | _ -> maprec c) fn | _ -> maprec c) c in aux c else c let interp_constr_evars_impls env sigma data expected_type c = let c = add_wfrec_implicits data.rec_type c in let imps = Implicit_quantifiers.implicits_of_glob_constr ~with_products:(expected_type == Pretyping.IsType) c in let flags = Pretyping.{ all_no_fail_flags with program_mode = data.program_mode } in let sigma, c = Pretyping.understand_tcc ~flags env sigma ~expected_type c in sigma, (c, imps) let interp_glob_constr_evars_impls env sigma ctx data expected_type c = let sigma, (c, _) = interp_constr_evars_impls env sigma data expected_type c in sigma, c let expected_type = function | Some ty -> Pretyping.OfType ty | None -> Pretyping.WithoutTypeConstraint let interp_program_body env sigma ctx data body ty = match body with | ConstrExpr c -> let env = push_rel_context ctx env in let expected_type = expected_type ty in let c = intern_gen expected_type ~impls:data.intenv env sigma c in interp_glob_constr_evars_impls env sigma ctx data expected_type c | GlobConstr c -> let env = push_rel_context ctx env in interp_glob_constr_evars_impls env sigma ctx data (expected_type ty) c | Constr c -> let env = Environ.reset_with_named_context (Environ.named_context_val env) env in let env = push_rel_context ctx env in let subst = List.fold_left_i (fun i subst decl -> match get_name decl with | Name na -> (na, mkRel i) :: subst | _ -> subst) 1 [] ctx in let c = Vars.replace_vars sigma subst c in let sigma = match ty with | None -> fst (Typing.type_of env sigma c) | Some ty -> Typing.check env sigma c ty in sigma, c let interp_program_body env evars ctx data c ty = let notations = List.map Metasyntax.prepare_where_notation data.notations in Metasyntax.with_syntax_protection (fun () -> let ctx' = List.map EConstr.Unsafe.to_rel_decl ctx in List.iter (Metasyntax.set_notation_for_interpretation (Environ.push_rel_context ctx' env) data.intenv) notations; interp_program_body env evars ctx data c ty) () (* try with PretypeError (env, evm, e) -> * user_err_loc (dummy_loc, * str "Typechecking failed: " ++ Himsg.explain_pretype_error env evm e) *) (* | e -> * user_err_loc (dummy_loc, * str "Unexpected exception raised while typing body: " ++ * (match c with ConstrExpr c -> Ppconstr.pr_constr_expr c * | Constr c -> Printer.pr_econstr_env env evars c) ++ * str " in environment " ++ Printer.pr_rel_context_of (push_rel_context ctx env) evars ++ * str ":" ++ * str (Printexc.to_string e)) *) let interp_constr_in_rhs_env env evars data (ctx, envctx, liftn, subst) substlift c ty = match ty with | None -> let sigma, c = interp_program_body env !evars ctx data c None in let c' = substnl subst substlift c in let sigma = Typeclasses.resolve_typeclasses ~filter:Typeclasses.all_evars env sigma in let c' = nf_evar sigma c' in evars := sigma; c', Retyping.get_type_of envctx sigma c' | Some ty -> let ty' = lift liftn ty in let ty' = nf_evar !evars ty' in let sigma, c = interp_program_body env !evars ctx data c (Some ty') in evars := Typeclasses.resolve_typeclasses ~filter:Typeclasses.all_evars env sigma; let c' = nf_evar !evars (substnl subst substlift c) in c', nf_evar !evars (substnl subst substlift ty') let interp_constr_in_rhs env ctx evars data ty s lets c = try let env' = env_of_rhs evars ctx env s lets in interp_constr_in_rhs_env env evars data env' 0 c ty with Evarsolve.IllTypedInstance _ -> anomaly (str"Ill-typed instance in interp_constr_in_rhs") let unify_type env evars before id ty after = try let next_ident_away = let ctxids = ref (ids_of_rel_context before @ ids_of_rel_context after) in let avoid = fun id -> is_global id || List.mem id !ctxids in function id -> let id' = Namegen.next_ident_away_from id avoid in ctxids := id' :: !ctxids; id' in let envb = push_rel_context before env in let ty = nf_evar !evars ty in let (indf, args) = find_rectype envb !evars ty in let ind, params = dest_ind_family indf in let vs = List.map (Tacred.whd_simpl envb !evars) args in let params = List.map (Tacred.whd_simpl envb !evars) params in let cstrs = Inductiveops.type_of_constructors envb ind in let cstrs = Array.mapi (fun i ty -> let ty = prod_applist !evars ty params in let ctx, ty = decompose_prod_decls !evars ty in let ctx = fold_right (fun decl acc -> let open Context.Rel.Declaration in let id = match get_name decl with | Name id -> next_ident_away id | Anonymous -> let x = Namegen.id_of_name_using_hdchar (push_rel_context acc envb) !evars (get_type decl) Anonymous in next_ident_away x in (set_name (Name id) decl :: acc)) ctx [] in let env' = push_rel_context ctx env in let (indf, args) = find_rectype env' !evars ty in let ind, params = dest_ind_family indf in let realargs = rels_of_ctx ~with_lets:false ctx in let constr = applist (mkConstructUi (ind, succ i), params @ realargs) in let q = inaccs_of_constrs realargs in let constrpat = PCstr (((fst ind, succ i), snd ind), inaccs_of_constrs params @ patvars_of_ctx ~with_lets:false ctx) in env', ctx, constr, constrpat, q, args) cstrs in let unify vs = Array.map (fun (env', ctxc, c, cpat, q, us) -> let _beforelen = length before and ctxclen = length ctxc in let fullctx = ctxc @ before in try let vs' = List.map (lift ctxclen) vs in let p1 = lift_pats ctxclen (inaccs_of_constrs (rels_of_ctx ~with_lets:false before)) in let flex = flexible (p1 @ q) fullctx in let env = push_rel_context fullctx env in equations_debug Pp.(fun () -> str"Unifying " ++ prlist_with_sep spc (Printer.pr_econstr_env env !evars) vs' ++ spc () ++ str"and" ++ spc () ++ prlist_with_sep spc (Printer.pr_econstr_env env !evars) us); let s = unify_constrs env !evars flex fullctx vs' us in equations_debug Pp.(fun () -> str"Unification success"); UnifSuccess (s, ctxclen, c, cpat) with Conflict -> equations_debug Pp.(fun () -> str"Unification raised a conflict"); UnifFailure | Stuck -> equations_debug Pp.(fun () -> str"Unification got stuck"); UnifStuck) cstrs in let res = unify vs in if Array.exists (fun x -> x == UnifStuck) res then let vs' = List.map (Tacred.hnf_constr envb !evars) args in let res' = unify vs' in if not (Array.exists (fun x -> x == UnifStuck) res') then let newty = applist (mkIndU ind, params @ vs') in Some (newty, res') else let newty = applist (mkIndU ind, params @ vs) in Some (newty, res) else let newty = applist (mkIndU ind, params @ vs) in Some (newty, res) with Not_found -> (* not an inductive type *) equations_debug Pp.(fun () -> str"Unification raised Not_found"); None let blockers env curpats (subst : context_map) = let rec pattern_blockers p c = match DAst.get p, c with | PUVar (i, _), t -> [] | PUCstr (c, i, pl), PCstr ((c',_), pl') -> if Environ.QConstruct.equal env c c' then patterns_blockers pl (snd (List.chop i pl')) else [] | PUInac _, _ -> [] | _, PRel i -> [i] | _, _ -> [] and patterns_blockers pl l = match pl, l with | [], [] -> [] | hd :: tl, PHide _ :: tl' -> patterns_blockers pl tl' | hd :: tl, hd' :: tl' -> (pattern_blockers hd hd') @ (patterns_blockers tl tl') | _ -> [] in patterns_blockers curpats (rev subst.map_inst) let subst_matches_constr sigma k s c = let rec aux depth c = match kind sigma c with | Rel n -> let k = n - depth in if k >= 0 then try lift depth (assoc k s) with Not_found -> c else c | _ -> map_with_binders sigma succ aux depth c in aux k c let is_all_variables (delta, pats, gamma) = List.for_all (function PInac _ | PHide _ -> true | PRel _ -> true | PCstr _ -> false) pats type 'a split_var_result = | Splitted of 'a | CannotSplit of Names.Name.t * rel_context * constr let split_var (env,evars) var delta = (* delta = before; id; after |- curpats : gamma *) let before, decl, after = split_tele (pred var) delta in let (id, b, ty) = to_tuple decl in let unify = unify_type env evars before id ty after in let branch = function | UnifFailure -> None | UnifStuck -> assert false | UnifSuccess (map, ctxlen, cstr, cstrpat) -> let { src_ctx = ctx'; map_inst = s; tgt_ctx = ctx } = map in (* ctx' |- s : before ; ctxc *) (* ctx' |- cpat : ty *) if !debug then Feedback.msg_debug Pp.(str"cpat: " ++ pr_pat env !evars cstrpat); let cpat = specialize !evars s cstrpat in let ctx' = do_renamings env !evars ctx' in (* ctx' |- spat : before ; id *) let spat = let ctxcsubst, beforesubst = List.chop ctxlen s in let map = { src_ctx = ctx'; map_inst = cpat :: beforesubst; tgt_ctx = decl :: before } in check_ctx_map env !evars map in (* ctx' ; after |- safter : before ; id ; after = delta *) Some (lift_subst env !evars spat after) in match unify with | None -> None | Some (newty, unify) -> (* Some constructor's type is not refined enough to match ty *) if Array.exists (fun x -> x == UnifStuck) unify then Some (CannotSplit (id.binder_name, before, newty)) else let newdelta = after @ (make_def id b newty :: before) in Some (Splitted (var, do_renamings env !evars newdelta, Array.map branch unify)) let prove_empty env delta v = match split_var env v delta with | None -> None | Some (CannotSplit _) -> None | Some (Splitted (v, i, r)) -> if CArray.for_all (fun x -> x == None) r then Some (v, i, CArray.map (fun _ -> None) r) else None let find_empty env delta = let r = List.map_filter (fun v -> prove_empty env delta v) (CList.init (List.length delta) succ) in match r with x :: _ -> Some x | _ -> None (* The list of variables appearing in a list of patterns, ordered increasingly. *) let variables_of_pats pats = let rec aux acc pats = List.fold_right (fun p acc -> match p with | PRel i -> (i, false) :: acc | PCstr (c, ps) -> aux [] (rev ps) @ acc | PInac c -> acc | PHide i -> (i, true) :: acc) pats acc in List.sort (fun (i, _) (i', _) -> i - i') (aux [] pats) let pats_of_variables = List.map (fun (i, hide) -> if hide then PHide i else PRel i) let lift_rel_declaration k decl = map_rel_declaration (lift k) decl let lookup_named_i id = let rec aux i = function | decl :: _ when Id.equal id (get_id decl) -> i, decl | _ :: sign -> aux (succ i) sign | [] -> raise Not_found in aux 1 let instance_of_pats env evars (ctx : rel_context) (pats : (int * bool) list) = let subst, _, nctx = named_of_rel_context (fun () -> raise (Invalid_argument "named_of_rel_context")) ctx in let subst = List.map (destVar evars) subst in let ctx' = List.fold_right (fun (i, hide) ctx' -> let decl = let id = List.nth subst (pred i) in let i, decl = lookup_named_i id nctx in decl in decl :: ctx') pats [] in let pats' = List.map_i (fun i id -> let i', _ = lookup_named_i id ctx' in CList.find_map_exn (fun (i'', hide) -> if i'' == i then Some (if hide then PHide i' else PRel i') else None) pats) 1 subst in let pats'' = List.map_i (fun i decl -> let (id, b, t) = to_named_tuple decl in let i', _ = lookup_named_i id.binder_name nctx in CList.find_map_exn (fun (i'', hide) -> if i'' == i' then Some (if hide then PHide i else PRel i) else None) pats) 1 ctx' in fst (rel_of_named_context evars ctx'), pats', pats'' let push_rel_context_eos ctx env evars = if named_context env <> [] then let env' = push_named (make_named_def (annotR coq_end_of_section_id) (Some (get_efresh coq_the_end_of_the_section evars)) (get_efresh coq_end_of_section evars)) env in push_rel_context ctx env' else push_rel_context ctx env let split_at_eos env sigma ctx = List.split_when (fun decl -> is_lglobal env sigma coq_end_of_section (get_named_type decl)) ctx let pr_problem p env sigma { src_ctx = delta; map_inst = patcs }= let env' = push_rel_context delta env in let ctx = pr_context env sigma delta in Id.print p.program_id ++ str" " ++ pr_pats env' sigma patcs ++ (if List.is_empty delta then ctx else fnl () ++ str "In context: " ++ fnl () ++ ctx) let rel_id ctx n = Nameops.Name.get_id (pi1 (List.nth ctx (pred n))) let push_named_context = List.fold_right push_named let check_unused_clauses env sigma cl = let unused = List.filter (fun (_, (_, used)) -> used = 0) cl in match unused with | (Pre_clause (loc, lhs, _) as cl, _) :: cls -> user_err_loc (loc, str "Unused clause " ++ pr_preclause env sigma cl) | [] -> () let compute_rec_type context programs = if List.for_all (fun p -> match p.Syntax.program_rec with | None -> true | Some _ -> false) programs then None :: context else if List.for_all (fun p -> match p.Syntax.program_rec with | Some (Structural _) | None -> true | _ -> false) programs then let recids = List.map (fun p -> p.program_id, match p.program_rec with | Some (Structural ann) -> ann | None -> NestedNonRec | _ -> assert false) programs in Some (Guarded recids) :: context else begin if List.length programs != 1 then user_err_loc (None, Pp.str "Mutual well-founded definitions are not supported"); let p = List.hd programs in match p.program_rec with | Some (WellFounded (_, _, id)) -> let nargs = Context.Rel.nhyps p.program_sign in Some (Logical (nargs, id)) :: context | _ -> assert false end let print_program_info env sigma programs = let open Pp in if !Equations_common.debug then Feedback.msg_debug (str "Programs: " ++ prlist_with_sep fnl (pr_program_info env sigma) programs) let make_fix_proto env sigma ty = let relevance = Retyping.relevance_of_type env sigma ty in let _, fixproto = get_fresh sigma coq_fix_proto in let r = Retyping.relevance_of_term env sigma fixproto in let na = make_annot Anonymous r in relevance, mkLetIn (na, fixproto, Retyping.get_type_of env sigma fixproto, lift 1 ty) let compute_fixdecls_data env evd ?data programs = let protos = List.map (fun p -> let ty = it_mkProd_or_LetIn p.program_arity p.program_sign in (p.program_id, ty, p.program_impls)) programs in let names, tys, impls = List.split3 protos in let data = Constrintern.compute_internalization_env ?impls:data env !evd Constrintern.Recursive names tys impls in let fixprots = List.map (fun ty -> make_fix_proto env !evd ty) tys in let fixdecls = List.map2 (fun i (relevance, fixprot) -> of_tuple (make_annot (Name i) relevance, None, fixprot)) names fixprots in data, List.rev fixdecls, fixprots let interp_arity env evd ~poly ~is_rec ~with_evars notations (((loc,i),udecl,rec_annot,l,t,by),clauses as ieqs) = let ienv, ((env', sign), impls) = Equations_common.evd_comb1 (interp_context_evars env) evd l in let (arity, impls') = let ty = match t with | Some ty -> ty | None -> CAst.make ?loc (Constrexpr.CHole None) in Equations_common.evd_comb1 (interp_type_evars_impls env' ?impls:None) evd ty in let impls = impls @ impls' in let sign = nf_rel_context_evar ( !evd) sign in let arity = nf_evar ( !evd) arity in let interp_reca k i = match k with | None | Some Syntax.Mutual -> MutualOn i | Some Nested -> NestedOn i in let rec_annot = match by with | None -> (if is_rec then if rec_annot = Some Syntax.Nested && not (is_recursive i ([ieqs], notations)) then (* Nested but not recursive in in its own body *) Some (Structural NestedNonRec) else Some (Structural (interp_reca rec_annot None)) else None) | Some (Structural lid) -> (match lid with | Some lid -> (try let k, _, _ = lookup_rel_id (snd lid) sign in Some (Structural (interp_reca rec_annot (Some (List.length sign - k, Some lid)))) with Not_found -> user_err_loc (fst lid, Pp.(str"No argument named " ++ Id.print (snd lid) ++ str" found"))) | None -> Some (Structural (interp_reca rec_annot None))) | Some (WellFounded (c, r)) -> Some (WellFounded (c, r)) in let body = it_mkLambda_or_LetIn arity sign in let _ = if not with_evars then Pretyping.check_evars env !evd body in let program_orig_type = it_mkProd_or_LetIn arity sign in let program_sort = let u = Retyping.get_sort_of env !evd program_orig_type in let sigma, sortl, sortu = nonalgebraic_universe_level_of_universe env !evd u in evd := sigma; ESorts.kind sigma sortu in let program_implicits = Impargs.compute_implicits_with_manual env !evd program_orig_type false impls in let () = evd := Evd.minimize_universes !evd in match rec_annot with | None -> { program_loc = loc; program_id = i; program_orig_type; program_sort; program_sign = sign; program_arity = arity; program_rec = None; program_impls = impls; program_implicits } | Some (Structural ann) -> { program_loc = loc; program_id = i; program_orig_type; program_sort; program_sign = sign; program_arity = arity; program_rec = Some (Structural ann); program_impls = impls; program_implicits } | Some (WellFounded (c, r)) -> let compinfo = (loc, i) in { program_loc = loc; program_id = i; program_orig_type; program_sort; program_sign = sign; program_arity = arity; program_rec = Some (WellFounded (c, r, compinfo)); program_impls = impls; program_implicits } let recursive_patterns env progid rec_info = match rec_info with | Some (Guarded l) :: _ -> let addpat (id, k) = match k with | NestedNonRec when Id.equal id progid -> None | _ -> Some (DAst.make (PUVar (id, User))) in let structpats = List.map_filter addpat l in structpats | _ -> [] let destPRel = function PRel i -> i | _ -> assert false let pats_of_sign sign = List.rev_map (fun decl -> DAst.make (PUVar (Name.get_id (Context.Rel.Declaration.get_name decl), Implicit))) sign let abstract_term_in_context env evars idx t map = let before, after = CList.chop (pred idx) map.src_ctx in let before' = subst_term_in_context evars (lift (- pred idx) t) before in { src_ctx = before' @ after; map_inst = map.map_inst; tgt_ctx = map.tgt_ctx } let wf_fix_constr env evars sign arity sort carrier cterm crel = let sigma, tele, telety = Sigma_types.telescope_of_context env !evars sign in let () = evars := sigma in let concl = it_mkLambda_or_LetIn arity sign in let crel = mkapp env evars logic_tele_measure [| tele; carrier; cterm; crel |] in let wfty = mkapp env evars logic_wellfounded_class [| telety; crel |] in let sigma, wf = new_evar env !evars wfty in let sigma = Typeclasses.resolve_typeclasses env sigma in let () = evars := sigma in let fix = (* let _, tyrelu = destConst sigma (fst (decompose_app sigma wfty)) in *) (* if not (EInstance.is_empty tyrelu) then * let sigma, inst, glu = Equations_common.instance_of env !evars ~argu:tyrelu sort in * let () = evars := sigma in * mkApp (EConstr.mkRef (Lazy.force logic_tele_fix, inst), [| tele; crel; wf; concl|]) * else *) mkapp env evars logic_tele_fix [| tele; crel; wf; concl|] in let sigma, fixty = Typing.type_of env !evars fix in let () = evars := sigma in let reds = let flags = RedFlags.betaiotazeta in let csts = let ts = TransparentState.empty in let tr_prj = Names.PRpred.add (Projection.repr (Lazy.force coq_pr1)) Names.PRpred.empty in let tr_prj = Names.PRpred.add (Projection.repr (Lazy.force coq_pr2)) tr_prj in let cst = Names.Cpred.empty in let add_ts cst t = Names.Cpred.add (Globnames.destConstRef (Lazy.force t)) cst in let tr_cst = List.fold_left add_ts cst [logic_tele_interp; logic_tele_measure; logic_tele_fix; logic_tele_MR; logic_tele_fix_functional_type; logic_tele_type_app; logic_tele_forall_type_app; logic_tele_forall_uncurry; logic_tele_forall; logic_tele_forall_pack; logic_tele_forall_unpack] in { ts with TransparentState.tr_cst; TransparentState.tr_prj } in RedFlags.red_add_transparent flags csts in let norm env = let infos = Cbv.create_cbv_infos reds ~strong:true env !evars in Cbv.cbv_norm infos in let fixty = norm env fixty in (* let prc = Printer.pr_econstr_env env !evars in * Feedback.msg_debug (str" fix ty" ++ prc fixty); *) let functional_type, concl = match kind !evars fixty with | Prod (na, fnty, concl) -> let concl = subst1 mkProp concl in fnty, concl | _ -> assert false in let fix = norm env fix in let functional_type, full_functional_type = let ctx, rest = Reductionops.whd_decompose_prod_n_assum env !evars (Context.Rel.nhyps sign) functional_type in match kind !evars (whd_all (push_rel_context ctx env) !evars rest) with | Prod (na, b, concl) -> let ctx', rest = Reductionops.whd_decompose_prod_decls (push_rel_context ctx env) !evars b in let infos = Cbv.create_cbv_infos reds ~strong:true (push_rel_context ctx env) !evars in let norm = Cbv.cbv_norm infos in let fn_type = it_mkProd_or_LetIn rest ctx' in let fn_type = norm fn_type in fn_type, it_mkProd_or_LetIn (mkProd (na, fn_type, concl)) ctx | _ -> assert false in (* let sigma, functional_evar = new_evar env !evars functional_type in *) (* let fix = mkApp (fix, [| functional_evar |]) in *) (* Feedback.msg_debug (str" rec definition" ++ * str" fix: " ++ prc fix ++ * str " functional type : " ++ prc functional_type ++ * str " full functional type : " ++ prc full_functional_type ++ * str " conclusion type : " ++ prc concl); *) functional_type, full_functional_type, fix let wf_fix env evars subst sign arity sort term rel = let envsign = push_rel_context sign env in let sigma, cterm = interp_constr_evars envsign !evars term in let na, carrier = let r = Retyping.relevance_of_term envsign sigma cterm in let ty = Retyping.get_type_of envsign sigma cterm in let ty = nf_all envsign sigma ty in if noccur_between sigma 1 (length sign) ty then make_annot Anonymous r, lift (- length sign) ty else user_err_loc (Constrexpr_ops.constr_loc term, str"The carrier type of the recursion order cannot depend on the arguments") in let cterm = it_mkLambda_or_LetIn cterm sign in (* let cterm = substl subst cterm in *) let sigma, rsort = Evd.fresh_sort_in_family sigma (Lazy.force Equations_common.logic_sort) in let sigma, crel = let relty = (mkProd (na, carrier, mkProd (na, lift 1 carrier, mkSort rsort))) in match rel with | Some rel -> interp_casted_constr_evars env sigma rel relty | None -> new_evar env sigma relty in let () = evars := sigma in let res = wf_fix_constr env evars sign arity sort carrier cterm crel in nf_evar !evars cterm, nf_evar !evars crel, res let compute_rec_data env evars data lets subst p = match p.Syntax.program_rec with | Some (Structural ann) -> let reclen, sign = match ann with | NestedNonRec -> (* Actually the definition is not self-recursive *) let fixdecls = List.filter (fun decl -> let na = Context.Rel.Declaration.get_name decl in let id = Nameops.Name.get_id na in not (Id.equal id p.program_id)) data.fixdecls in let len = length fixdecls in len, lift_rel_context len p.program_sign @ fixdecls | _ -> let len = length data.fixdecls in len, lift_rel_context len p.program_sign @ data.fixdecls in let extpats = recursive_patterns env p.program_id data.rec_type in let sign, ctxpats = let sign = sign @ lets in let extpats' = pats_of_sign lets in sign, extpats' @ extpats in let rec_node = let info = { struct_rec_arg = ann; struct_rec_protos = List.length extpats } in StructRec info in let rec_info = { rec_sign = sign; rec_lets = lets; rec_prob = id_subst sign; rec_arity = liftn reclen (succ (length p.program_sign)) p.program_arity; rec_args = List.length p.program_sign; rec_node } in p, rec_info.rec_prob, rec_info.rec_arity, ctxpats, (Some rec_info) | Some (WellFounded (term, rel, _)) -> let arg, rel, (functional_type, _full_functional_type, fix) = wf_fix env evars subst p.program_sign p.program_arity p.program_sort term rel in let ctxpats = pats_of_sign lets in let rec_args = List.length p.program_sign in let decl = make_def (nameR p.program_id) None functional_type in let rec_sign = p.program_sign @ lets in let lhs = decl :: rec_sign in let pats = PHide 1 :: lift_pats 1 (id_pats rec_sign) in let rec_prob = { src_ctx = lhs; map_inst = pats; tgt_ctx = lhs } in let rec_node = { wf_rec_term = fix; wf_rec_functional = None; wf_rec_arg = arg; wf_rec_rel = rel} in let rec_info = { rec_sign; rec_lets = lets; rec_arity = lift 1 p.program_arity; rec_prob; rec_args; rec_node = WfRec rec_node } in let p = { p with program_sign = p.program_sign @ lets } in p, rec_info.rec_prob, rec_info.rec_arity, ctxpats, Some rec_info | _ -> let p = { p with program_sign = p.program_sign @ lets } in p, id_subst p.program_sign, p.program_arity, pats_of_sign lets, None exception UnfaithfulSplit of (Loc.t option * Pp.t) let rename_domain env sigma bindings map = let { src_ctx = ctx; map_inst = p; tgt_ctx = ctx' } = map in let fn rel decl = match Int.Map.find rel bindings with | exception Not_found -> decl | (id, _, gen) -> if gen != Generated then Context.Rel.Declaration.set_name (Name id) decl else decl in let rctx = CList.map_i fn 1 ctx in mk_ctx_map env sigma rctx p ctx' let rec eq_pat_mod_inacc env sigma p1 p2 = match p1, p2 with | (PRel i | PHide i), (PRel i' | PHide i') -> Int.equal i i' | PCstr (c, pl), PCstr (c', pl') -> Environ.QConstruct.equal env (fst c) (fst c') && List.for_all2 (eq_pat_mod_inacc env sigma) pl pl' | PRel i, PInac c when isRel sigma c -> Int.equal i (destRel sigma c) | PInac c, PRel i when isRel sigma c -> Int.equal i (destRel sigma c) | PInac c, PInac c' -> EConstr.eq_constr sigma c c' | _, _ -> false let loc_before loc loc' = match loc, loc' with | None, Some _ -> true | Some _, None -> false | None, None -> true | Some loc, Some loc' -> let start, end_ = Loc.unloc loc in let start', end' = Loc.unloc loc' in end_ < end' || (end_ = end' && start <= start') let rec covering_aux env evars p data prev (clauses : (pre_clause * (int * int)) list) path prob extpats lets ty = let { src_ctx = ctx; map_inst = pats; tgt_ctx = ctx' } = prob in if !Equations_common.debug then Feedback.msg_debug Pp.(str"Launching covering on "++ pr_preclauses env !evars (List.map fst clauses) ++ str " with problem " ++ pr_problem p env !evars prob ++ str " extpats " ++ pr_user_pats env !evars extpats); match clauses with | (Pre_clause (loc, lhs, rhs), (idx, cnt) as clause) :: clauses' -> if !Equations_common.debug then Feedback.msg_debug (str "Matching " ++ pr_user_pats env !evars (extpats @ lhs) ++ str " with " ++ pr_problem p env !evars prob); (match matches env !evars (extpats @ lhs) prob with | UnifSuccess (s, uacc, acc, empties) -> if !Equations_common.debug then Feedback.msg_debug (str "succeeded with substitution: " ++ prlist_with_sep spc (fun ((loc, x, prov), pat) -> hov 2 (pr_provenance ~with_gen:true (Id.print x) prov ++ str" = " ++ pr_pat env !evars pat ++ spc ())) s); let _check_aliases = let check acc ((loc, x, gen), pat) = match Id.Map.find x acc with | exception Not_found -> Id.Map.add x (loc, gen, pat) acc | (loc', gen', pat') -> if eq_pat_mod_inacc env !evars pat pat' then if gen == Generated then Id.Map.add x (loc', gen', pat') acc else Id.Map.add x (loc, gen, pat) acc else if data.flags.allow_aliases then acc else let env = push_rel_context prob.src_ctx env in let loc, pat, pat' = if loc_before loc loc' then loc', pat, pat' else loc, pat', pat in user_err_loc (loc, Pp.(str "The pattern " ++ Id.print x ++ str " would shadow a variable." ++ fnl () ++ str "The full patterns are: " ++ pr_user_pats ~with_gen:false env !evars (extpats @ lhs) ++ fnl () ++ str"After interpretation, in context " ++ spc () ++ pr_context env !evars (rel_context env) ++ spc () ++ Id.print x ++ str " refers to " ++ pr_pat env !evars pat ++ str " while the pattern refers to " ++ pr_pat env !evars pat')) in ignore (List.fold_left check Id.Map.empty s) in let bindings, s = CList.fold_left (fun (bindings, s) ((loc, x, gen), y) -> let pat = match y with | PRel i -> Some (i, false) | PInac i when isRel !evars i -> Some (destRel !evars i, true) | _ -> None in match pat with | Some (i, inacc) -> begin match Int.Map.find i bindings with | exception Not_found -> (Int.Map.add i (x, inacc, gen) bindings, s) | (x', inacc', gen') -> begin match gen, gen' with | Generated, (User | Implicit) -> (Int.Map.add i (x', inacc && inacc', gen') bindings, s) | Generated, Generated -> (Int.Map.add i (x, inacc && inacc', gen) bindings, s) | _, Generated -> (Int.Map.add i (x, inacc && inacc', gen) bindings, s) | _, _ -> if not (Id.equal x x') then (* We allow aliasing of implicit variable names resulting from forcing a pattern *) if not data.flags.allow_aliases && (gen == User && gen' == User) then user_err_loc (loc, Pp.(str "The pattern " ++ Id.print x ++ str " should be equal to " ++ Id.print x' ++ str", it is forced by typing")) else (bindings, (x, y) :: s) else (bindings, s) end end | None -> (bindings, (x, y) :: s)) (Int.Map.empty, []) s (* @ List.filter_map (fun (x, y) -> match DAst.get x with | PUVar (id, gen) -> Some ((DAst.with_loc_val (fun ?loc _ -> loc) x, id, gen), PInac y) | _ -> None) acc) *) in if !Equations_common.debug then Feedback.msg_debug (str "Renaming problem: " ++ hov 2 (pr_context_map env !evars prob) ++ str " with bindings " ++ prlist_with_sep spc (fun (i, (x, inacc, gen)) -> str "Rel " ++ int i ++ str" = " ++ pr_provenance ~with_gen:true (Id.print x) gen ++ str", inacc = " ++ bool inacc) (Int.Map.bindings bindings)); let prob = rename_domain env !evars bindings prob in if !Equations_common.debug then Feedback.msg_debug (str "Renamed problem: " ++ hov 2 (pr_context_map env !evars prob)); (* let sext = List.filter_map (fun (i, (x, inacc, gen)) -> if gen then None else Some (x, if inacc then PInac (mkRel i) else PRel i)) (Int.Map.bindings bindings) in *) let s = (s, uacc, acc) in if !Equations_common.debug then Feedback.msg_debug (str "Substitution: " ++ prlist_with_sep spc (fun (x, t) -> str "var " ++ Id.print x ++ str" = " ++ pr_pat env !evars t) (pi1 s)); (* let prob = compose_subst env ~sigma:!evars renaming prob in *) let clauseid = Id.of_string ("clause_" ^ string_of_int idx ^ (if cnt = 0 then "" else "_" ^ string_of_int cnt)) in (match empties, rhs with | ([], None) -> user_err_loc (loc, (str "Empty clauses should have at least one empty pattern.")) | (_ :: _, Some _) -> user_err_loc (loc, (str "This clause has an empty pattern, it cannot have a right hand side.")) | (loc, c) :: _, None -> (match c with | PCstr _ | PInac _ | PHide _ -> user_err_loc (loc, (str "This pattern cannot be empty, it matches value " ++ fnl () ++ pr_pat env !evars c)) | PRel i -> match prove_empty (env,evars) prob.src_ctx i with | Some (i, ctx, s) -> Some (List.rev prev @ ((Pre_clause (loc, lhs, rhs),(idx, cnt+1)) :: clauses'), Compute (prob, [], ty, REmpty (i, s))) | None -> user_err_loc (loc, (str "This variable does not have empty type in current problem" ++ fnl () ++ pr_problem p env !evars prob))) | [], Some rhs -> let interp = interp_clause env evars p data prev clauses' (clauseid :: path) prob extpats lets ty ((loc,lhs,rhs), cnt) s in (match interp with | None -> user_err_loc (dummy_loc, str"Clause " ++ pr_preclause env !evars (Pre_clause (loc, lhs, Some rhs)) ++ str" matched but its interpretation failed") | Some s -> Some (List.rev prev @ (Pre_clause (loc,lhs,Some rhs),(idx, cnt+1)) :: clauses', s))) | UnifFailure -> if !Equations_common.debug then Feedback.msg_debug (str "failed"); covering_aux env evars p data (clause :: prev) clauses' path prob extpats lets ty | UnifStuck -> if !Equations_common.debug then Feedback.msg_debug (str "got stuck"); let blocks = blockers env (extpats @ lhs) prob in equations_debug (fun () -> str "blockers are: " ++ prlist_with_sep spc (pr_rel_name (push_rel_context prob.src_ctx env)) blocks); let rec try_split acc vars = match vars with | [] -> None | var :: vars -> equations_debug (fun () -> str "trying next blocker " ++ pr_rel_name (push_rel_context prob.src_ctx env) var); match split_var (env,evars) var prob.src_ctx with | Some (Splitted (var, newctx, s)) -> equations_debug (fun () -> str "splitting succeded for " ++ pr_rel_name (push_rel_context prob.src_ctx env) var); let prob' = { src_ctx = newctx; map_inst = pats; tgt_ctx = ctx' } in let coverrec clauses s = covering_aux env evars p data [] clauses path (compose_subst env ~sigma:!evars s prob') extpats (specialize_rel_context !evars s.map_inst lets) (specialize_constr !evars s.map_inst ty) in (try let rec_call clauses x = match x with (* Succesful split on this blocker *) | Some s -> (match coverrec clauses s with | None -> raise Not_found | Some (clauses, s) -> equations_debug (fun _ -> str "covering succeeded"); clauses, Some s) | None -> clauses, None in let clauses, rest = Array.fold_left_map rec_call (List.rev prev @ clauses) s in Some (Splitted (clauses, Split (prob', var, ty, rest))) with | Not_found -> equations_debug (fun _ -> str "covering failed to produce a splitting in one of the branches,\ trying the next one"); try_split acc vars | UnfaithfulSplit (loc, pp) -> equations_debug (fun _ -> str "covering is not faithful to user clauses, trying the next one"); try_split acc vars) | Some (CannotSplit _) as x -> equations_debug (fun () -> str "splitting failed for " ++ pr_rel_name (push_rel_context prob.src_ctx env) var); let acc = match acc with | None -> x | _ -> acc in try_split acc vars | None -> (* Not inductive *) try_split acc vars in let result = try_split None blocks in (match result with | Some (Splitted (clauses, s)) -> Some (clauses, s) | Some (CannotSplit (id, before, newty)) -> user_err_loc (loc, str"Unable to split variable " ++ Name.print id ++ str" of (reduced) type " ++ Printer.pr_econstr_env (push_rel_context before env) !evars newty ++ str" to match a user pattern." ++ fnl () ++ str "Maybe unification is stuck as it cannot refine a context/section variable.") | None -> None)) | [] -> (* Every clause failed for the problem, it's either uninhabited or the clauses are not exhaustive *) match find_empty (env,evars) prob.src_ctx with | Some (i, ctx, s) -> Some (List.rev prev @ clauses, (* Split (prob, i, ty, s)) *) Compute (prob, [], ty, REmpty (i, s))) | None -> user_err_loc (p.program_loc, (str "Non-exhaustive pattern-matching, no clause found for:" ++ fnl () ++ pr_problem p env !evars prob)) and interp_clause env evars p data prev clauses' path prob extpats lets ty ((loc,lhs,rhs), used) (s, uinnacs, innacs) = let { src_ctx = ctx; map_inst = pats; tgt_ctx = ctx' } = prob in let env' = push_rel_context_eos ctx env evars in let get_var loc i s = match assoc i s with | PRel i -> i | _ -> user_err_loc (loc, str"Unbound variable " ++ Id.print i) in let () = (* Check innaccessibles are correct *) let check_uinnac (user, t) = (* let ty = * let t = pat_constr t in * let ty = Retyping.get_type_of env !evars t in *) let userc, usercty = interp_constr_in_rhs env ctx evars data None s lets (GlobConstr user) in match t with | PInac t -> begin match Evarconv.unify env' !evars Conversion.CONV userc t with | evars' -> evars := evars' | exception Pretype_errors.PretypeError (env, sigma, e) -> DAst.with_loc_val (fun ?loc _ -> CErrors.user_err ?loc (hov 0 (str "Incompatible innaccessible pattern " ++ Printer.pr_econstr_env env' !evars userc ++ cut () ++ spc () ++ str "should be unifiable with " ++ Printer.pr_econstr_env env' !evars t ++ cut () ++ str"Unification failed with " ++ Himsg.explain_pretype_error env sigma e))) user end | _ -> let t = pat_constr t in let msg = str "Pattern " ++ Printer.pr_econstr_env env' !evars userc ++ spc () ++ str "is not inaccessible, but should refine pattern " ++ Printer.pr_econstr_env env' !evars t in DAst.with_loc_val (fun ?loc _ -> raise (UnfaithfulSplit (loc, msg))) user in let check_innac (user, forced) = DAst.with_loc_val (fun ?loc user -> (* Allow patterns not written by the user to be forced innaccessible silently *) if Option.is_empty loc then () else match user with | PUVar (i, _) -> (* If the pattern comes from a wildcard or is a variable, allow forcing innaccessibles too *) () | _ -> let ctx, envctx, liftn, subst = env_of_rhs evars ctx env s lets in let forcedsubst = substnl subst 0 forced in CErrors.user_err ?loc (str "This pattern must be innaccessible and equal to " ++ Printer.pr_econstr_env (push_rel_context ctx env) !evars forcedsubst)) user in List.iter check_uinnac uinnacs; List.iter check_innac innacs in match rhs with | Program (c,w) -> let (ctx, envctx, liftn, subst as letctx) = env_of_rhs evars ctx env s lets in let data, envctx, lets, nwheres, env', coverings, lift, subst = interp_wheres env ctx evars path data s lets letctx w in let c', ty' = interp_constr_in_rhs_env env evars data (lets, envctx, lift, subst) nwheres c (Some (Vars.lift nwheres ty)) in (* Compute the coverings using type information from the term using the where clauses *) let coverings = List.map (fun c -> Lazy.force c) coverings in let res = Compute (prob, coverings, ty', RProgram c') in Some res | Empty (loc,i) -> (match prove_empty (env, evars) prob.src_ctx (get_var loc i s) with | None -> user_err_loc (loc, str"Cannot show that " ++ Id.print i ++ str"'s type is empty") | Some (i, ctx, s) -> Some (Compute (prob, [], ty, REmpty (i, s)))) | Refine (cs, cls) -> (* The refined term and its type *) let c, cs = match cs with | [c] -> c, [] | c :: cs -> c, cs | [] -> assert false in let cconstr, cty = interp_constr_in_rhs env ctx evars data None s lets (ConstrExpr c) in let vars = variables_of_pats pats in let newctx, pats', pats'' = instance_of_pats env !evars ctx vars in (* revctx is a variable substitution from a reordered context to the current context *) let revctx = check_ctx_map env !evars { src_ctx = newctx; map_inst = pats'; tgt_ctx = ctx } in let idref = Namegen.next_ident_away (Id.of_string "refine") (Id.Set.of_list (ids_of_rel_context newctx)) in let refterm (* in newctx *) = mapping_constr !evars revctx cconstr in let refty = mapping_constr !evars revctx cty in let decl = make_assum (nameR idref) refty in let extnewctx = decl :: newctx in (* cmap : Δ -> ctx, cty, strinv associates to indexes in the strenghtened context to variables in the original context. *) let ty_min_fv = let fvs = Int.Set.union (free_rels !evars refty) (free_rels !evars refterm) in match Int.Set.min_elt_opt fvs with | None -> 1 | Some m -> m (* Forces to declare the refined variable below its type and term dependencies for well-formedness *) in (* equations_debug Pp.(fun () -> str"Moving refine variable decl to: " ++ int ty_min_fv); *) let tytop, tytopinv = let before, after = List.chop (pred ty_min_fv) newctx in let newdecl' = make_assum (nameR idref) (lift (- (pred ty_min_fv)) refty) in let newctx = lift_rel_context 1 before @ newdecl' :: after in mk_ctx_map env !evars newctx (PRel ty_min_fv :: List.rev (patvars_of_ctx before) @ List.rev (lift_pats ty_min_fv (patvars_of_ctx after))) extnewctx, mk_ctx_map env !evars extnewctx (lift_pats 1 (List.rev (patvars_of_ctx before)) @ (PRel 1 :: List.rev (lift_pats ty_min_fv (patvars_of_ctx after)))) newctx in let refterm (* in extnewctx *) = lift 1 refterm in equations_debug Pp.(fun () -> str" Strenghtening variable decl: " ++ pr_context_map env !evars tytop); equations_debug Pp.(fun () -> str" Strenghtening variable decl inv: " ++ pr_context_map env !evars tytopinv); let cmap, strinv = new_strengthen env !evars tytop.src_ctx 1 refterm in let cmap = compose_subst env ~sigma:!evars cmap tytop in let strinv = compose_subst env ~sigma:!evars tytopinv strinv in let strinv_map = List.map_i (fun i -> function (PRel j) -> i, j | _ -> assert false) 1 strinv.map_inst in equations_debug Pp.(fun () -> str" Strenghtening: " ++ pr_context_map env !evars cmap); equations_debug Pp.(fun () -> str" Strenghtening inverse: " ++ pr_context_map env !evars strinv); let idx_of_refined = destPRel (CList.hd cmap.map_inst) in equations_debug Pp.(fun () -> str" idx_of_refined: " ++ int idx_of_refined); let cmap = abstract_term_in_context env !evars idx_of_refined (mapping_constr !evars cmap refterm) cmap in equations_debug Pp.(fun () -> str" Strenghtening + abstraction: " ++ pr_context_map env !evars cmap); let newprob_to_lhs = let inst_refctx = set_in_ctx idx_of_refined (mapping_constr !evars cmap refterm) cmap.src_ctx in let str_to_new = { src_ctx = inst_refctx; map_inst = specialize_pats !evars cmap.map_inst (lift_pats 1 pats'); tgt_ctx = newctx; } in equations_debug Pp.(fun () -> str" Strenghtening + abstraction + instantiation: " ++ pr_context_map env !evars str_to_new); compose_subst env ~sigma:!evars str_to_new revctx in equations_debug Pp.(fun () -> str" Strenghtening + abstraction + instantiation: " ++ pr_context_map env !evars newprob_to_lhs); let newprob = let ctx = cmap.src_ctx in let pats = rev_map (fun c -> let idx = destRel !evars c in (* find out if idx in ctx should be hidden depending on its use in newprob_to_lhs *) if List.exists (function PHide idx' -> idx == idx' | _ -> false) newprob_to_lhs.map_inst then PHide idx else PRel idx) (rels_of_ctx ctx) in { src_ctx = ctx; map_inst = pats; tgt_ctx = ctx } in let newty = let env' = push_rel_context extnewctx env in let refterm = Tacred.simpl env' !evars refterm in subst_term !evars refterm (Tacred.simpl env' !evars (lift 1 (mapping_constr !evars revctx ty))) in let newty = mapping_constr !evars cmap newty in (* The new problem forces a reordering of patterns under the refinement to make them match up to the context map. *) let sortinv = List.sort (fun (i, _) (i', _) -> i' - i) strinv_map in let vars' = List.rev_map snd sortinv in let cls = match cs with | [] -> cls | _ :: _ -> [Pre_clause (loc, lhs @ [DAst.make ?loc (PUVar (idref, Generated))], Some (Refine (cs, cls)))] in let rec cls' n cls = let next_unknown = let str = Id.of_string "unknown" in let i = ref (-1) in fun () -> incr i; add_suffix str (string_of_int !i) in List.map_filter (fun (Pre_clause (loc, lhs, rhs)) -> let oldpats, newpats = List.chop (List.length lhs - n) lhs in let newref, nextrefs = match newpats with hd :: tl -> hd, tl | [] -> assert false in match matches_user env prob (extpats @ oldpats) with | UnifSuccess (s, alias) -> (* A substitution from the problem variables to user patterns and from user pattern variables to patterns instantiating problem variables. *) let newlhs = List.map_filter (fun i -> if i == 1 then Some newref else if List.exists (fun (i', b) -> i' == pred i && b) vars then None else try Some (DAst.make (List.assoc (pred i) s)) with Not_found -> (* The problem is more refined than the user vars*) Some (DAst.make (PUVar (next_unknown (), Generated)))) vars' in let newrhs = match rhs with | Some (Refine (cs', cls)) -> Some (Refine (cs', cls' (List.length cs' + n) cls)) | _ -> rhs in Some (Pre_clause (loc, rev newlhs @ nextrefs, newrhs)) | _ -> CErrors.user_err ?loc (str "Non-matching clause in with subprogram:" ++ fnl () ++ int n ++ str"Problem is " ++ spc () ++ pr_context_map env !evars prob ++ fnl () ++ str"And the user patterns are: " ++ spc () ++ pr_user_pats env !evars lhs)) cls in let cls' = cls' 1 cls in let strength_app = let sortinv = List.sort (fun (i, _) (i', _) -> i' - i) strinv_map in let args = List.map (fun (i, j) (* i variable in strengthened context, j variable in the original one *) -> if j == 1 then (cconstr) else let (var, _) = List.nth vars (pred (pred j)) in mkRel var) sortinv in args in let strength_app = List.map_filter (fun t -> if isRel !evars t then let i = destRel !evars t in let decl = List.nth prob.src_ctx (pred i) in if Context.Rel.Declaration.is_local_def decl then None else Some t else Some t) strength_app in let path' = path in let lets' = let letslen = length lets in let _, ctxs, _ = lets_of_ctx env ctx evars s in let newlets = (lift_rel_context (succ letslen) ctxs) @ (lift_rel_context 1 lets) in specialize_rel_context !evars cmap.map_inst newlets in let clauses' = List.mapi (fun i x -> x, (succ i, 0)) cls' in match covering_aux env evars p data [] clauses' path' newprob [] lets' newty with | None -> errorlabstrm (str "Unable to build a covering for with subprogram:" ++ fnl () ++ pr_problem p env !evars newprob ++ fnl () ++ str "And clauses: " ++ pr_preclauses env !evars cls') | Some (clauses, s) -> let () = check_unused_clauses env !evars clauses in let term, _ = term_of_tree env evars (ESorts.make p.program_sort) s in let info = { refined_obj = (idref, cconstr, cty); refined_rettyp = ty; refined_arg = refine_arg idx_of_refined cmap.src_ctx; refined_path = path'; refined_term = term; refined_filter = None; refined_args = strength_app; (* refined_args = (mkEvar (evar, secvars), strength_app); *) refined_revctx = revctx; refined_newprob = newprob; refined_newprob_to_lhs = newprob_to_lhs; refined_newty = newty } in Some (Refined (prob, info, s)) (* else *) (* anomaly ~label:"covering" *) (* (str "Found overlapping clauses:" ++ fnl () ++ pr_clauses env (map fst prevmatch) ++ *) (* spc () ++ str"refining" ++ spc () ++ pr_context_map env prob) *) and interp_wheres env0 ctx evars path data s lets (ctx, envctx, liftn, subst) (w : (pre_prototype * pre_equation list) list * Vernacexpr.notation_declaration list) = let notations = snd w in let aux (data,lets,nlets,coverings,env) (((loc,id),udecl,nested,b,t,reca),clauses as eqs) = let is_rec = is_recursive id ([eqs], notations) in let p = interp_arity env evars ~poly:false ~is_rec ~with_evars:true notations eqs in let clauses = List.map (interp_eqn env !evars (data.notations @ notations) p ~avoid:Id.Set.empty) clauses in let sigma, p = adjust_sign_arity env !evars p clauses in let () = evars := sigma in let pre_type = Syntax.program_type p in let rel, fixp = if is_rec then make_fix_proto env !evars pre_type else Retyping.relevance_of_type env !evars pre_type, pre_type in let fixdecls = [Context.Rel.Declaration.LocalAssum (make_annot (Name id) rel, fixp)] in let rec_type = compute_rec_type data.rec_type [p] in let rec_data = {data with rec_type; fixdecls} in let p, problem, arity, extpats, rec_info = compute_rec_data env evars rec_data lets subst p in let intenv = Constrintern.compute_internalization_env ~impls:data.intenv env !evars Constrintern.Recursive [id] [pre_type] [p.program_impls] in let rec_data = { rec_data with intenv; notations = data.notations @ notations } in let data = { data with intenv; notations = data.notations @ notations } in let path = id :: path in let where_args = extended_rel_list 0 lets in let where_args = List.map (substnl subst nlets) where_args in let w' program = {where_program = program; where_program_orig = p; where_program_args = where_args; where_path = path; where_orig = path; where_context_length = List.length extpats; where_type = pre_type } in let program, term = match t with | Some ty (* non-delayed where clause, compute term right away *) -> let splitting = covering env0 evars p rec_data clauses path problem extpats arity in let program = make_single_program env0 evars data.flags p problem splitting rec_info in Lazy.from_val (w' program), program.program_term | None -> let relty = Syntax.program_type p in let src = (loc, Evar_kinds.(QuestionMark { qm_obligation=Define false; qm_name=Name id; qm_record_field=None; })) in let sigma, term = Equations_common.new_evar env0 !evars ~src relty in let () = evars := sigma in let ev = destEvar !evars term in let cover () = let splitting = covering env0 evars p rec_data clauses path problem extpats arity in let program = make_single_program env0 evars data.flags p problem splitting rec_info in evars := Evd.define (fst ev) program.program_term !evars; w' program in Lazy.from_fun cover, term in let decl = make_def (nameR id) (Some (applistc term where_args)) pre_type in (data, decl :: lets, succ nlets, program :: coverings, push_rel decl envctx) in let (data, lets, nlets, coverings, envctx') = List.fold_left aux (data, ctx, 0, [], push_rel_context ctx env0) (fst w) in (data, envctx, lets, nlets, push_rel_context ctx env0, coverings, liftn, subst) and covering ?(check_unused=true) env evars p data (clauses : pre_clause list) path prob extpats ty = let clauses = (List.mapi (fun i x -> (x,(succ i,0))) clauses) in (*TODO eta-expand clauses or type *) match covering_aux env evars p data [] clauses path prob extpats [] ty with | Some (clauses, cov) -> let () = if check_unused then check_unused_clauses env !evars clauses in cov | None -> errorlabstrm (str "Unable to build a covering for:" ++ fnl () ++ pr_problem p env !evars prob) let program_covering env evd data p clauses = let clauses = List.map (interp_eqn (push_rel_context data.fixdecls env) !evd data.notations p ~avoid:Id.Set.empty) clauses in let sigma, p = adjust_sign_arity env !evd p clauses in let () = evd := sigma in let p', prob, arity, extpats, rec_node = compute_rec_data env evd data [] [] p in let splitting = covering env evd p data clauses [p.program_id] prob extpats arity in (p', prob, splitting, rec_node) let coverings env evd data programs equations = let splittings = List.map2 (program_covering env evd data) programs equations in make_programs env evd data.flags splittings Coq-Equations-1.3.1-8.20/src/covering.mli000066400000000000000000000157261463127417400177230ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open Environ open Names open EConstr open Equations_common open Syntax open Context_map open Splitting (* Unification *) exception Conflict exception Stuck type 'a unif_result = UnifSuccess of 'a | UnifFailure | UnifStuck type unification_result = (context_map * int * constr * pat) option val unify : env -> Evd.evar_map -> Int.Set.t -> rel_context -> constr -> constr -> context_map val unify_constrs : env -> Evd.evar_map -> Int.Set.t -> rel_context -> constr list -> constr list -> context_map val flexible : pat list -> ('a,'b,'c) Context.Rel.pt -> Int.Set.t val accessible : pat -> Int.Set.t val accessibles : pat list -> Int.Set.t val hidden : pat -> bool type match_subst = ((Loc.t option * identifier * provenance) * pat) list * (Glob_term.glob_constr * pat) list * (user_pat_loc * constr) list * ((Loc.t option * pat) list) val match_pattern : Environ.env -> Evd.evar_map -> user_pat_loc -> pat -> match_subst val match_patterns : Environ.env -> Evd.evar_map -> user_pats -> pat list -> match_subst val matches : Environ.env -> Evd.evar_map -> user_pats -> context_map -> match_subst unif_result val match_user_pattern : Environ.env -> pat -> user_pat_loc -> (int * user_pat) list * (identifier * pat) list val match_user_patterns : Environ.env -> pat list -> user_pats -> (int * user_pat) list * (identifier * pat) list val matches_user : Environ.env -> context_map -> user_pats -> ((int * user_pat) list * (identifier * pat) list) unif_result val lets_of_ctx : env -> rel_context -> Evd.evar_map ref -> (Id.t * pat) list -> constr list * rel_context * rel_context type int_data = { rec_type : rec_type; fixdecls : rel_context; flags : flags; program_mode : bool; intenv : Constrintern.internalization_env; notations : Vernacexpr.notation_declaration list } val add_wfrec_implicits : Syntax.rec_type -> Glob_term.glob_constr -> Glob_term.glob_constr val interp_program_body : Environ.env -> Evd.evar_map -> EConstr.rel_context -> int_data -> Syntax.program_body -> EConstr.types option -> Evd.evar_map * EConstr.constr val interp_constr_in_rhs_env : Environ.env -> Evd.evar_map ref -> int_data -> EConstr.rel_context * Environ.env * int * EConstr.Vars.substl -> int -> Syntax.program_body -> EConstr.t option -> EConstr.constr * EConstr.types val interp_constr_in_rhs : env -> rel_context -> Evd.evar_map ref -> int_data -> constr option -> (Id.t * pat) list -> rel_context -> program_body -> constr * types val unify_type : env -> Evd.evar_map ref -> rel_context -> 'a -> types -> rel_context -> (constr * ((context_map) * int * constr * pat) unif_result array) option val blockers : Environ.env -> user_pats -> context_map -> int list val subst_matches_constr : Evd.evar_map -> int -> (int * constr) list -> constr -> constr val is_all_variables : 'a * pat list * 'b -> bool type 'a split_var_result = | Splitted of 'a | CannotSplit of Names.Name.t * rel_context * constr val split_var : env * Evd.evar_map ref -> int -> rel_context -> (int * rel_context * context_map option array) split_var_result option val find_empty : env * Evd.evar_map ref -> rel_context -> (int * rel_context * splitting option array) option val variables_of_pats : pat list -> (int * bool) list val pats_of_variables : (int * bool) list -> pat list val lift_rel_declaration : int -> rel_declaration -> rel_declaration val lookup_named_i : Id.t -> named_context -> int * named_declaration val instance_of_pats : Environ.env -> Evd.evar_map -> rel_context -> (int * bool) list -> rel_context * pat list * pat list val push_rel_context_eos : rel_context -> env -> esigma -> env val split_at_eos : Environ.env -> Evd.evar_map -> named_context -> named_context * named_context val pr_problem : program_info -> env -> Evd.evar_map -> context_map -> Pp.t val rel_id : (Name.t * 'a * 'b) list -> int -> Id.t val push_named_context : named_context -> env -> env val refine_arg : int -> rel_context -> int * int val env_of_rhs : Evd.evar_map ref -> rel_context -> Environ.env -> (Names.Id.t * pat) list -> rel_declaration list -> rel_context * Environ.env * int * constr list (** Covering computation *) val covering_aux : env -> Evd.evar_map ref -> program_info -> int_data -> (pre_clause * (int * int)) list -> (pre_clause * (int * int)) list -> path -> context_map -> user_pats -> rel_context -> constr -> ((pre_clause * (int * int)) list * splitting) option val covering : ?check_unused:bool -> env -> Evd.evar_map ref -> program_info -> int_data -> pre_clause list -> path -> context_map -> user_pats -> constr -> splitting val adjust_sign_arity : Environ.env -> Evd.evar_map -> program_info -> Syntax.pre_clause list -> Evd.evar_map * program_info val compute_rec_type : rec_type -> program_info list -> rec_type val print_program_info : env -> Evd.evar_map -> program_info list -> unit val compute_fixdecls_data : Environ.env -> Evd.evar_map ref -> ?data:Constrintern.internalization_env -> Syntax.program_info list -> Constrintern.internalization_env * Equations_common.rel_declaration list * (ERelevance.t * EConstr.t) list val wf_fix_constr : Environ.env -> Evd.evar_map ref -> EConstr.rel_context -> EConstr.t -> Sorts.t -> EConstr.t -> EConstr.t -> EConstr.t -> EConstr.t * EConstr.t * EConstr.t val wf_fix : Environ.env -> Evd.evar_map ref -> Vars.substl -> EConstr.rel_context -> EConstr.t -> Sorts.t -> Constrexpr.constr_expr -> Constrexpr.constr_expr option -> EConstr.t (* term *) * EConstr.t (* rel *) * (EConstr.t (* functional type *) * EConstr.t (* full functional type *) * EConstr.t (* fixpoint combinator *)) val compute_rec_data : Environ.env -> Evd.evar_map ref -> int_data -> Equations_common.rel_declaration list -> EConstr.Vars.substl -> Syntax.program_info -> Syntax.program_info * Context_map.context_map * EConstr.constr * (Syntax.user_pat, 'a) DAst.t list * Splitting.rec_info option val interp_arity : Environ.env -> Evd.evar_map ref -> poly:bool -> is_rec:bool -> with_evars:bool -> Vernacexpr.notation_declaration list -> pre_equation Syntax.where_clause -> program_info val coverings : Environ.env -> Evd.evar_map ref -> int_data -> Syntax.program_info list -> pre_equation list list -> Splitting.program list Coq-Equations-1.3.1-8.20/src/depelim.ml000066400000000000000000000477431463127417400173610ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (*i camlp4deps: "parsing/grammar.cma" i*) (*i camlp4use: "pa_extend.cmo" i*) open Util open Names open Nameops open Constr open Context open Termops open Declarations open Inductiveops open Reductionops open Pp open Evarutil open Namegen open Tactics open EConstr open Equations_common open Vars let hyps_of_vars env sigma sign nogen hyps = if Id.Set.is_empty hyps then [] else let (_,lh) = fold_named_context_reverse (fun (hs,hl) decl -> let x = get_id decl in if Id.Set.mem x nogen then (hs,hl) else if Id.Set.mem x hs then (hs,x::hl) else let xvars = global_vars_set_of_decl env sigma decl in if not (Id.Set.equal (Id.Set.diff xvars hs) Id.Set.empty) then (Id.Set.add x hs, x :: hl) else (hs, hl)) ~init:(hyps,[]) sign in lh exception Seen let linear sigma vars args = let seen = ref vars in try Array.iter (fun i -> let rels = ids_of_constr ~all:true sigma Id.Set.empty i in let seen' = Id.Set.fold (fun id acc -> if Id.Set.mem id acc then raise Seen else Id.Set.add id acc) rels !seen in seen := seen') args; true with Seen -> false let needs_generalization gl id = let open Tacmach in let open Proofview.Goal in let sigma = sigma gl in let f, args, def, id, oldid = let oldid = pf_get_new_id id gl in let (_, b, t) = to_named_tuple (pf_get_hyp id gl) in match b with | None -> let f, args = decompose_app sigma t in f, args, false, id, oldid | Some t -> let f, args = decompose_app sigma t in f, args, true, id, oldid in if Array.is_empty args then false else let f', args' = decompose_indapp sigma f args in let parvars = ids_of_constr ~all:true sigma Id.Set.empty f' in if not (linear sigma parvars args') then true else Array.exists (fun x -> not (isVar sigma x) || is_section_variable (Global.env ()) (destVar sigma x)) args' let dependent_pattern ?(pattern_term=true) c = let open Tacmach in Proofview.Goal.enter (fun gl -> let sigma = Proofview.Goal.sigma gl in let cr = Retyping.relevance_of_term (pf_env gl) sigma c in let cty = Retyping.get_type_of (pf_env gl) sigma c in let deps = match kind sigma cty with | App (f, args) -> let f', args' = decompose_indapp sigma f args in Array.to_list args' | _ -> [] in let varname c = match kind sigma c with | Var id -> id | _ -> pf_get_new_id (Id.of_string (hdchar (pf_env gl) (project gl) c)) gl in let env = pf_env gl in let mklambda (ty, sigma) (c, id, r, cty) = let conclvar, sigma = Find_subterm.subst_closed_term_occ env sigma (Locus.AtOccs Locus.AllOccurrences) c ty in mkNamedLambda sigma (make_annot id r) cty conclvar, sigma in let subst = let map c = (c, varname c, Retyping.relevance_of_term env sigma c, Retyping.get_type_of env sigma c) in let deps = List.rev_map map deps in if pattern_term then (c, varname c, cr, cty) :: deps else deps in let concllda, evd = List.fold_left mklambda (pf_concl gl, project gl) subst in let conclapp = applistc concllda (List.rev_map (fun (c, _, _, _) -> c) subst) in convert_concl ~cast:false ~check:false conclapp DEFAULTcast) let annot_of_context ctx = Array.map_of_list Context.Rel.Declaration.get_annot (List.rev ctx) let depcase ~poly ((mind, i as ind), u) = let indid = Nametab.basename_of_global (GlobRef.IndRef ind) in let mindb, oneind = Global.lookup_inductive ind in let relevance = ERelevance.make oneind.mind_relevance in let indna x = make_annot x relevance in let ctx = oneind.mind_arity_ctxt in let nparams = mindb.mind_nparams in let ctx = List.map of_rel_decl ctx in let args, params = List.chop (List.length ctx - nparams) ctx in let nargs = List.length args in let indapp = mkApp (mkIndU (ind,u), extended_rel_vect 0 ctx) in let evd = ref (Evd.from_env (Global.env())) in let s = evd_comb0 (Evd.new_sort_variable Evd.univ_flexible) evd in let pred = it_mkProd_or_LetIn (mkSort s) (make_assum (indna Anonymous) indapp :: args) in let nconstrs = Array.length oneind.mind_nf_lc in let mkbody i (ctx, ty) = let args = Context.Rel.instance mkRel 0 ctx in annot_of_context (EConstr.of_rel_context ctx), mkApp (mkRel (1 + nconstrs + List.length ctx - i), args) in let bodies = Array.mapi mkbody oneind.mind_nf_lc in let branches = Array.map2_i (fun i id (ctx, cty) -> let cty = Term.it_mkProd_or_LetIn cty ctx in let substcty = Vars.subst_instance_constr u (of_constr cty) in let (args, arity) = decompose_prod_decls !evd substcty in let _, indices = decompose_app !evd arity in let _, indices = Array.chop nparams indices in let ncargs = List.length args - nparams in let realargs, pars = List.chop ncargs args in let realargs = lift_rel_context (i + 1) realargs in let arity = applistc (mkRel (ncargs + i + 1)) (Array.to_list indices @ [mkApp (mkConstructU ((ind, succ i), u), Array.append (extended_rel_vect (ncargs + i + 1) params) (extended_rel_vect 0 realargs))]) in let br = it_mkProd_or_LetIn arity realargs in (make_assum (nameR (Id.of_string ("P" ^ string_of_int i))) br)) oneind.mind_consnames oneind.mind_nf_lc in let ci = make_case_info (Global.env ()) ind RegularStyle in let obj i = mkApp (mkIndU (ind,u), (Array.append (extended_rel_vect (nargs + nconstrs + i) params) (extended_rel_vect 0 args))) in let ctxpred = make_assum (indna Anonymous) (obj (2 + nargs)) :: args in let app = mkApp (mkRel (nargs + nconstrs + 3), (extended_rel_vect 0 ctxpred)) in let paramsinst = extended_rel_vect (2 + nargs + nconstrs) params in let ty = (annot_of_context ctxpred, app) in let case = mkCase (ci, EInstance.empty, paramsinst, (ty, relevance), NoInvert, mkRel 1, bodies) in let xty = obj 1 in let xid = Namegen.named_hd (Global.env ()) !evd xty Anonymous in let body = let len = 1 (* P *) + Array.length branches in it_mkLambda_or_LetIn case (make_assum (indna xid) (lift len indapp) :: ((Array.rev_to_list branches) @ (make_assum (make_annot (Name (Id.of_string "P")) (Retyping.relevance_of_sort s)) pred :: ctx))) in let () = evd := Evd.minimize_universes !evd in let univs = Evd.univ_entry ~poly !evd in let ce = Declare.definition_entry ~univs (EConstr.to_constr !evd body) in let kn = let id = add_suffix indid "_dep_elim" in GlobRef.ConstRef (Declare.declare_constant ~name:id (Declare.DefinitionEntry ce) ~kind:Decls.(IsDefinition Scheme)) in let env = (Global.env ()) in (* Refresh after declare constant *) env, Evd.from_env env, ctx, indapp, kn let derive_dep_elimination env sigma ~poly (i,u) = let env, evd, ctx, ty, gref = depcase ~poly (i,u) in let indid = Nametab.basename_of_global (GlobRef.IndRef i) in let id = add_prefix "DependentElimination_" indid in let evdref = ref evd in let cl = dependent_elimination_class evdref in let caseterm = e_new_global evdref gref in let casety = Retyping.get_type_of env !evdref caseterm in let args = extended_rel_vect 0 ctx in Equations_common.declare_instance id ~poly !evdref ctx cl [ty; prod_appvect sigma casety args; mkApp (caseterm, args)] let () = let fn ~pm env sigma ~poly c = let _ = derive_dep_elimination env sigma ~poly c in pm in Ederive.(register_derive { derive_name = "DependentElimination" ; derive_fn = make_derive_ind fn }) let pattern_call ?(pattern_term=true) c = let open Tacmach in Proofview.Goal.enter (fun gl -> let env = pf_env gl in let sigma = project gl in let cr = Retyping.relevance_of_term env sigma c in let cty = Retyping.get_type_of env sigma c in let ids = Id.Set.of_list (ids_of_named_context (Proofview.Goal.hyps gl)) in let deps = match kind sigma c with | App (f, args) -> Array.to_list args | _ -> [] in let varname c = match kind sigma c with | Var id -> id | _ -> Namegen.next_ident_away (Id.of_string (Namegen.hdchar env sigma c)) ids in let mklambda ty (c, id, cty) = let conclvar, _ = Find_subterm.subst_closed_term_occ env (project gl) (Locus.AtOccs Locus.AllOccurrences) c ty in mkNamedLambda sigma (make_annot id cr) cty conclvar in let subst = let deps = List.rev_map (fun c -> (c, varname c, pf_get_type_of gl c)) deps in if pattern_term then (c, varname c, cty) :: deps else deps in let concllda = List.fold_left mklambda (pf_concl gl) subst in let conclapp = applistc concllda (List.rev_map pi1 subst) in (convert_concl ~cast:false ~check:false conclapp DEFAULTcast)) let destPolyRef sigma c = let open GlobRef in match kind sigma c with | Ind (ind, u) -> IndRef ind, u | Const (c, u) -> ConstRef c, u | Construct (cstr, u) -> ConstructRef cstr, u | _ -> raise (Invalid_argument "destPolyRef") (** Compare up-to variables in v, skipping parameters of inductive constructors. [t] is closed *) let rec compare_upto_variables sigma t v = if (isVar sigma v || isRel sigma v) then true else match kind sigma t, kind sigma v with | App (cnstr, args), App (cnstr', args') when eq_constr_nounivs sigma cnstr cnstr' && isConstruct sigma cnstr -> let cnstr, _u = destConstruct sigma cnstr in let real = constructor_nrealargs (Global.env()) cnstr in if real <= Array.length args && real <= Array.length args' then let args = CArray.sub args (Array.length args - real) real in let args' = CArray.sub args' (Array.length args' - real) real in CArray.for_all2 (compare_upto_variables sigma) args args' else compare_constr sigma (compare_upto_variables sigma) t v | _, _ -> compare_constr sigma (compare_upto_variables sigma) t v let whd_head env sigma t = match kind sigma t with | App (eq, args) -> mkApp (eq, Array.map (Tacred.whd_simpl env sigma) args) | _ -> t let specialize_eqs ?with_block id = Proofview.Goal.enter begin fun gl -> let open Tacticals in let open Tacmach in let env = pf_env gl in let ty = pf_get_hyp_typ id gl in let evars = ref (project gl) in let unif env ctx evars c1 c2 = match Evarconv.unify env !evars Conversion.CONV (it_mkLambda_or_subst env c1 ctx) (it_mkLambda_or_subst env c2 ctx) with | exception Evarconv.UnableToUnify _ -> false | evm -> evars := evm; true in let rec aux block_count in_block in_eqs ctx subst acc ty = match kind !evars ty with | LetIn (na, b, t, ty) -> if is_global env !evars (Lazy.force coq_block) b then if with_block = None then aux block_count in_block in_eqs ctx subst acc (subst1 mkProp ty) else if (in_block || in_eqs) && Int.equal block_count 0 then acc, in_eqs, ctx, subst, (subst1 mkProp ty) else aux (block_count - 1) true in_eqs ctx subst acc (subst1 mkProp ty) else if not in_block then aux block_count in_block in_eqs (make_def na (Some b) t :: ctx) subst (lift 1 acc) ty else aux block_count in_block in_eqs ctx (make_def na (Some b) t :: subst) acc ty | Prod (na, t, b) when not in_block -> aux block_count false in_eqs (make_def na None t :: ctx) subst (mkApp (lift 1 acc, [| mkRel 1 |])) b | Prod (na, t, b) -> let env' = push_rel_context ctx env in let env' = push_rel_context subst env' in (* Feedback.msg_debug (str"Reducing" ++ Printer.pr_econstr_env env' !evars t ++ str " in env " ++ Printer.pr_rel_context_of env' !evars); *) let t' = whd_head env' !evars t in (* Feedback.msg_debug (str"Reduced" ++ Printer.pr_econstr_env env' !evars t ++ str " to " ++ Printer.pr_econstr_env env' !evars t'); *) (match kind !evars t' with | App (eq, [| eqty; x; y |]) when (is_global env !evars (Lazy.force logic_eq_type) eq && (noccur_between !evars 1 (List.length subst) x || noccur_between !evars 1 (List.length subst) y)) -> let _, u = destPolyRef !evars eq in let c, o = if noccur_between !evars 1 (List.length subst) x then x, y else y, x in let eqr = constr_of_global_univ !evars (Lazy.force logic_eq_refl, u) in let p = mkApp (eqr, [| eqty; c |]) in if ((Option.equal Int.equal with_block (Some 2) && Int.equal block_count 0) || compare_upto_variables !evars c o) && unif (push_rel_context ctx env) subst evars o c then aux block_count in_block true ctx subst (mkApp (acc, [| p |])) (subst1 p b) else acc, in_eqs, ctx, subst, ty | _ -> if in_eqs then (* aux in_block false ctx (make_def na None t :: subst) (mkApp (lift 1 acc, [| mkRel 1 |])) b *) acc, in_eqs, ctx, subst, ty else let e = evd_comb1 (Evarutil.new_evar env') evars t in aux block_count in_block false ctx (make_def na (Some e) t :: subst) (mkApp (lift 1 acc, [| mkRel 1 |])) b) | t -> acc, in_eqs, ctx, subst, ty in let acc, worked, ctx, subst, ty = aux (match with_block with None -> 0 | Some n -> n) (match with_block with None -> true | Some _ -> false) false [] [] (mkVar id) ty in let subst' = nf_rel_context_evar !evars subst in let subst'' = List.map (fun decl -> let (n,b,t) = to_tuple decl in match b with | Some k when isEvar !evars k -> make_assum n t | b -> decl) subst' in let ty = it_mkProd_or_LetIn ty subst'' in let acc = it_mkLambda_or_LetIn acc subst'' in let ty = it_mkProd_or_LetIn ty ctx in let acc = it_mkLambda_or_LetIn acc ctx in let ty = Evarutil.nf_evar !evars ty in let acc = Evarutil.nf_evar !evars acc in if worked then assert_replacing id acc ty else tclFAIL (str "Nothing to do in hypothesis " ++ Id.print id ++ Printer.pr_econstr_env env !evars ty ) end exception Specialize open Proofview.Notations let specialize_eqs ?with_block id = let open Tacticals in Proofview.Goal.enter begin fun gl -> Proofview.tclORELSE (clear [id] <*> Proofview.tclZERO Specialize) begin function | (Specialize, _) -> specialize_eqs ?with_block id | e -> tclFAIL (str "Specialization not allowed on dependent hypotheses") end end (* Dependent elimination using Equations. *) let dependent_elim_tac ?patterns id : unit Proofview.tactic = Proofview.Goal.enter begin fun gl -> let env = Proofview.Goal.env gl in let concl = Proofview.Goal.concl gl in let sigma = Proofview.Goal.sigma gl in let sort = Retyping.get_sort_of env sigma concl in let env = Environ.reset_context env in let hyps = Proofview.Goal.hyps gl in let default_loc, id = id in (* Keep aside the section variables. *) let loc_hyps, sec_hyps = CList.split_when (fun decl -> let id = Context.Named.Declaration.get_id decl in Termops.is_section_variable (Global.env ()) id) hyps in let env = push_named_context sec_hyps env in (* Check that [id] exists in the current context. *) begin try let rec lookup k = function | decl :: _ when Id.equal id (Context.Named.Declaration.get_id decl) -> k | _ :: sign -> lookup (succ k) sign | [] -> raise Not_found in Proofview.tclUNIT (lookup 1 loc_hyps) with Not_found -> Tacticals.tclZEROMSG (str "No such hypothesis: " ++ Id.print id) end >>= fun rel -> (* We want to work in a [rel_context], not a [named_context]. *) let ctx, subst = Equations_common.rel_of_named_context sigma loc_hyps in let _, rev_subst, _ = let err () = assert false in Equations_common.named_of_rel_context ~keeplets:true err ctx in (* We also need to convert the goal for it to be well-typed in * the [rel_context]. *) let ty = Vars.subst_vars sigma subst concl in let rhs = let prog = Constrexpr.CHole None in Syntax.Program (Syntax.ConstrExpr (CAst.make prog), ([], [])) in begin match patterns with | None -> (* Produce default clauses from the variable to split. *) let evd = ref sigma in begin match Covering.split_var (env, evd) rel ctx with | None | Some (Covering.CannotSplit _) -> Tacticals.tclZEROMSG (str "Could not eliminate variable " ++ Id.print id) | Some (Covering.Splitted (_, newctx, brs)) -> let brs = Option.List.flatten (Array.to_list brs) in let clauses_lhs = List.map Context_map.context_map_to_lhs brs in let clauses = List.map (fun lhs -> Syntax.Pre_clause (default_loc, lhs, Some rhs)) clauses_lhs in Proofview.tclUNIT clauses end | Some patterns -> (* For each pattern, produce a clause. *) let make_clause : (Syntax.user_pat_loc) -> Syntax.pre_clause = DAst.with_loc_val (fun ?loc pat -> let lhs = List.rev_map (fun decl -> let decl_id = Context.Named.Declaration.get_id decl in if Names.Id.equal decl_id id then DAst.make ?loc pat else DAst.make Syntax.(PUVar (decl_id, Generated))) loc_hyps in Syntax.Pre_clause (loc, lhs, Some rhs)) in Proofview.tclUNIT (List.map make_clause patterns) end >>= fun clauses -> if !debug then Feedback.msg_info (str "Generated clauses: " ++ fnl() ++ Syntax.pr_preclauses env sigma clauses); (* Produce dummy data for covering. *) (* FIXME Not very clean. *) let data = Covering.{ rec_type = [None]; flags = { polymorphic = true; open_proof = false; with_eqns = false; with_ind = false; allow_aliases = false; tactic = !Declare.Obls.default_tactic}; program_mode = false; fixdecls = []; intenv = Constrintern.empty_internalization_env; notations = [] } in let program_orig_type = it_mkProd_or_LetIn ty ctx in let p = Syntax.{program_loc = default_loc; program_id = Names.Id.of_string "dummy"; program_orig_type; program_sort = (ESorts.kind sigma sort); program_impls = []; program_implicits = []; program_rec = None; program_sign = ctx; program_arity = ty} in (* Initial problem. *) let prob = Context_map.id_subst ctx in let args = Context.Rel.instance_list mkRel 0 ctx in Refine.refine ~typecheck:true begin fun evars -> let evd = ref evars in (* Produce a splitting tree. *) let split : Splitting.splitting = Covering.covering env evd p data clauses [] prob [] ty in let c, ty = Splitting.term_of_tree env evd sort split in let c = beta_applist !evd (c, args) in let c = Vars.substl (List.rev rev_subst) c in if !Equations_common.debug then Feedback.msg_debug (str "refining with" ++ Printer.pr_econstr_env env !evd c); (!evd, c) end end let dependent_elim_tac_expr ?patterns id : unit Proofview.tactic = Proofview.Goal.enter begin fun gl -> let env = Proofview.Goal.env gl in let sigma = Proofview.Goal.sigma gl in (* Interpret each pattern to then produce clauses. *) let patterns = match patterns with | None -> None | Some p -> let avoid = Syntax.ids_of_pats None p in Some (List.map (fun x -> List.hd (snd (Syntax.interp_pat env sigma [] ~avoid None x))) p) in dependent_elim_tac ?patterns id end Coq-Equations-1.3.1-8.20/src/depelim.mli000066400000000000000000000031321463127417400175120ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open Environ open Names open EConstr val hyps_of_vars : env -> Evd.evar_map -> named_context -> Id.Set.t -> Id.Set.t -> Id.Set.elt list exception Seen val linear : Evd.evar_map -> Id.Set.t -> constr array -> bool val needs_generalization : Proofview.Goal.t -> Id.t -> bool val dependent_pattern : ?pattern_term:bool -> constr -> unit Proofview.tactic val depcase : poly:bool -> inductive * EInstance.t -> Environ.env * Evd.evar_map * rel_context * constr * Names.GlobRef.t val derive_dep_elimination : Environ.env -> Evd.evar_map -> poly:bool -> inductive * EInstance.t -> Constant.t * (Evd.evar_map * constr) val pattern_call : ?pattern_term:bool -> constr -> unit Proofview.tactic val specialize_eqs : ?with_block:int -> Names.Id.t -> unit Proofview.tactic val compare_upto_variables : Evd.evar_map -> constr -> constr -> bool val dependent_elim_tac : ?patterns:Syntax.user_pat_loc list -> Names.Id.t Syntax.with_loc -> unit Proofview.tactic val dependent_elim_tac_expr : ?patterns:Constrexpr.constr_expr list -> Names.Id.t Syntax.with_loc -> unit Proofview.tactic Coq-Equations-1.3.1-8.20/src/dune000066400000000000000000000003401463127417400162440ustar00rootroot00000000000000(library (name equations_plugin) (public_name coq-equations.plugin) (flags :standard -w -9-27+40+60 -warn-error -3-9-32-33-50) (libraries coq-core.plugins.cc coq-core.plugins.extraction)) (coq.pp (modules g_equations)) Coq-Equations-1.3.1-8.20/src/ederive.ml000066400000000000000000000061261463127417400173530ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open Constr type derive_fn_ty = pm:Declare.OblState.t -> poly:bool -> Names.GlobRef.t -> Declare.OblState.t type derive_record = { derive_name : string; derive_fn : derive_fn_ty } let make_derive fn ~pm ~poly s = let env = Global.env () in let sigma = Evd.from_env env in let sigma, c = Evd.fresh_global ~rigid:Evd.univ_rigid env sigma s in fn ~pm env sigma ~poly c let make_derive_ind fn ~pm ~poly s = let fn ~pm env sigma ~poly c = match EConstr.kind sigma c with | Ind (i,u) -> fn ~pm env sigma ~poly (i,u) | _ -> CErrors.user_err (Pp.str"Expected an inductive type") in make_derive fn ~pm ~poly s let table = ref (CString.Map.empty : derive_fn_ty CString.Map.t) let register_derive d = table := CString.Map.add d.derive_name d.derive_fn !table let get_derive d = try CString.Map.find d !table with Not_found -> CErrors.user_err Pp.(str"No derive declared for " ++ str d) module StringOrd = struct type t = string let compare = String.compare end module StringSet = Set.Make(StringOrd) (** We keep a table of which derives have been performed yet for a given global reference. *) type derive_instance = (string * Names.GlobRef.t) type derive_instance_map = StringSet.t Names.GlobRef.Map.t let derived_instances : derive_instance_map ref = Summary.ref Names.GlobRef.Map.empty ~name:"derived-instances" let cache_instance (derive, gr) = let grderives = match Names.GlobRef.Map.find_opt gr !derived_instances with | Some s -> s | None -> StringSet.empty in derived_instances := Names.GlobRef.Map.add gr (StringSet.add derive grderives) !derived_instances let subst_instance (subst, (derive, gr)) = (derive, fst (Globnames.subst_global subst gr)) let discharge_instance (derive, gr as o) = if Globnames.isVarRef gr then None else Some o let derive_instance_input : derive_instance -> Libobject.obj = let decl = Libobject.superglobal_object "derive instances state" ~cache:cache_instance ~discharge:discharge_instance ~subst:(Some subst_instance) in Libobject.declare_object decl let register_instance decl = Lib.add_leaf (derive_instance_input decl) let check_derive s gr = try let grds = Names.GlobRef.Map.find gr !derived_instances in StringSet.mem s grds with Not_found -> false let derive_one ~pm poly d grs = let fn = get_derive d in List.fold_left (fun pm x -> let pm = fn ~pm ~poly x in register_instance (d, x); pm) pm grs let derive ~pm ~poly ds grs = let grs = List.map (fun (loc, gr) -> Dumpglob.add_glob ?loc gr; gr) grs in List.fold_left (fun pm d -> derive_one ~pm poly d grs) pm ds Coq-Equations-1.3.1-8.20/src/ederive.mli000066400000000000000000000027551463127417400175300ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) type derive_record = { derive_name : string; derive_fn : pm:Declare.OblState.t -> poly:bool -> Names.GlobRef.t -> Declare.OblState.t } (** When the Derive expects a constr. *) val make_derive : (pm:Declare.OblState.t -> Environ.env -> Evd.evar_map -> poly:bool -> EConstr.constr -> Declare.OblState.t) -> pm:Declare.OblState.t -> poly:bool -> Names.GlobRef.t -> Declare.OblState.t (** When the Derive works on inductive types only. *) val make_derive_ind : (pm:Declare.OblState.t -> Environ.env -> Evd.evar_map -> poly:bool -> Names.inductive * EConstr.EInstance.t -> Declare.OblState.t) -> pm:Declare.OblState.t -> poly:bool -> Names.GlobRef.t -> Declare.OblState.t val register_derive : derive_record -> unit (** Check if a given notion has been derived already for a given global reference. *) val check_derive : string -> Names.GlobRef.t -> bool val derive : pm:Declare.OblState.t -> poly:bool -> string list -> Names.GlobRef.t Loc.located list -> Declare.OblState.t Coq-Equations-1.3.1-8.20/src/eqdec.ml000066400000000000000000000147421463127417400170140ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (* Statements: forall Δ, EqDec Δ -> EqDec (I Δ) Proofs: intros; intro x y; depind x; depelim y. { c ts = c us } + { c ts <> c us }. Takes ts, us and recurse: case (eq_dec t u) ; [ rec ts us | right; intro Heq; noconf Heq; apply Hneq; reflexivity ] *) open Util open Names open Nameops open Declarations open Inductiveops open Vars open EConstr open Equations_common type one_inductive_info = { ind_name : identifier; ind_c : constr; (* Inductive type, applied to parameters (named variables) *) ind_args : rel_context; (* Arguments, as a rel_context typed in env with named variables *) ind_constr : (rel_context * types) array; (* Constructor types as a context and an arity, with parameters instantiated by variables *) ind_case : constr -> types -> constr array -> constr; (* Case construct closure taking the target, predicate and branches *) } type mutual_inductive_info = { mutind_params : named_context; (* Mutual parameters as a named context *) mutind_inds : one_inductive_info array; (* Each inductive. *) } let erel_context = List.map of_rel_decl let inductive_info sigma ((mind, _ as ind),u) = let mindb, oneind = Global.lookup_inductive ind in let params_ctxt = subst_instance_context (EInstance.kind sigma u) mindb.mind_params_ctxt in let subst, paramargs, params = named_of_rel_context (fun () -> Id.of_string "param") (erel_context params_ctxt) in let nparams = List.length params in let env = List.fold_right push_named params (Global.env ()) in let info_of_ind i ind = let ctx = ind.mind_arity_ctxt in let args, _ = List.chop ind.mind_nrealargs ctx in let args' = subst_rel_context 0 subst (erel_context args) in let induct = ((mind, i),u) in let indname = Nametab.basename_of_global (GlobRef.IndRef (mind,i)) in let indapp = applist (mkIndU induct, paramargs) in let arities = arities_of_constructors env induct in let constrs = Array.map (fun ty -> let _, rest = decompose_prod_n_decls sigma nparams ty in let constrty = Vars.substl subst rest in decompose_prod_decls sigma constrty) arities in let case c pred brs = let ci = make_case_info (Global.env ()) (mind,i) Constr.RegularStyle in mkCase (EConstr.contract_case env sigma (ci, (pred, ERelevance.relevant), Constr.NoInvert, c, brs)) (* TODO relevance / case inversion *) in { ind_name = indname; ind_c = indapp; ind_args = args'; ind_constr = constrs; ind_case = case } in let inds = Array.mapi info_of_ind mindb.mind_packets in { mutind_params = params; mutind_inds = inds } let eq_dec_class evd = Option.get (Typeclasses.class_of_constr (Global.env()) !evd (get_efresh logic_eqdec_class evd)) let dec_eq evd = get_efresh logic_eqdec_dec_eq evd let vars_of_pars pars = Array.of_list (List.map (fun x -> mkVar (get_id x)) pars) open EConstr.Vars let derive_eq_dec ~pm env sigma ~poly ind = let info = inductive_info sigma ind in let () = if Ederive.check_derive "NoConfusion" (Names.GlobRef.IndRef (fst ind)) || Ederive.check_derive "NoConfusionHom" (Names.GlobRef.IndRef (fst ind)) then () else user_err_loc (None, Pp.(str "[Derive EqDec] requires a [NoConfusion] " ++ str"or a [NoConfusionHom] instance for type " ++ Printer.pr_inductive env (fst ind) ++ str " to be derived first.")) in let ctx = info.mutind_params in let evdref = ref sigma in let cl = fst (snd (eq_dec_class evdref)) in let info_of ind = let argsvect = extended_rel_vect 0 ind.ind_args in let indapp = mkApp (ind.ind_c, argsvect) in let app = mkApp (dec_eq evdref, [| indapp |]) in let app = let xname = nameR (Id.of_string "x") in let yname = nameR (Id.of_string "y") in mkProd (xname, indapp, mkProd (yname, lift 1 indapp, mkApp (lift 2 app, [| mkRel 2; mkRel 1 |]))) in let typ = it_mkProd_or_LetIn app ind.ind_args in let full = it_mkNamedProd_or_LetIn !evdref typ ctx in let evm, _ = Typing.solve_evars (Global.env ()) !evdref full in let () = evdref := evm in let tc gr = let b, ty = Typeclasses.instance_constructor cl [indapp; mkapp (Global.env ()) evdref (Lazy.from_val gr) (Array.append (vars_of_pars ctx) argsvect) ] in let body = it_mkNamedLambda_or_LetIn !evdref (it_mkLambda_or_LetIn (Option.get b) ind.ind_args) ctx in let types = it_mkNamedProd_or_LetIn !evdref (it_mkProd_or_LetIn ty ind.ind_args) ctx in let evm, _ = Typing.solve_evars (Global.env ()) !evdref (mkCast (body, Constr.DEFAULTcast, types)) in let () = evdref := evm in let types = to_constr !evdref types in let body = to_constr !evdref body in let univs = Evd.univ_entry ~poly !evdref in let ce = Declare.definition_entry ~univs ~types body in ce in full, tc in let indsl = Array.to_list info.mutind_inds in let indsl = List.map (fun ind -> ind, info_of ind) indsl in let hook { Declare.Hook.S.dref; _ } = List.iter (fun (ind, (stmt, tc)) -> let ce = tc dref in let kind = Decls.(IsDefinition Instance) in let entry = Declare.DefinitionEntry ce in let inst = Declare.declare_constant ~name:(add_suffix ind.ind_name "_EqDec") ~kind entry in Classes.declare_instance (Global.env ()) !evdref (Some Hints.empty_hint_info) Hints.SuperGlobal (GlobRef.ConstRef inst)) indsl in let hook = Declare.Hook.make hook in List.fold_left (fun pm (ind, (stmt, tc)) -> let id = add_suffix ind.ind_name "_eqdec" in let cinfo = Declare.CInfo.make ~name:id ~typ:(to_constr !evdref stmt) () in let info = Declare.Info.make ~poly ~hook () in let pm = Declare.Obls.add_definition ~pm ~cinfo ~info ~uctx:(Evd.evar_universe_context !evdref) ~opaque:false ~tactic:(eqdec_tac ()) [||] in fst pm) pm indsl let () = Ederive.(register_derive { derive_name = "EqDec"; derive_fn = make_derive_ind derive_eq_dec }) Coq-Equations-1.3.1-8.20/src/eqdec.mli000066400000000000000000000024121463127417400171540ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open Equations_common open EConstr type one_inductive_info = { ind_name : identifier; ind_c : constr; ind_args : rel_context; ind_constr : (rel_context * types) array; ind_case : constr -> types -> constr array -> constr; } type mutual_inductive_info = { mutind_params : named_context; mutind_inds : one_inductive_info array; } val inductive_info : Evd.evar_map -> (Names.MutInd.t * int) * EInstance.t -> mutual_inductive_info val eq_dec_class : esigma -> rel_context * (Typeclasses.typeclass peuniverses * Constr.t list) val dec_eq : esigma -> constr val vars_of_pars : named_context -> constr array val derive_eq_dec : pm:Declare.OblState.t -> Environ.env -> Evd.evar_map -> poly:bool -> Names.inductive * EInstance.t -> Declare.OblState.t Coq-Equations-1.3.1-8.20/src/equations.ml000066400000000000000000000316651463127417400177460ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open Util open Names open Nameops open Constr open Context open Termops open Environ open Libnames open Vars open Tactics open Tacticals open Tacmach open Evarutil open Equations_common open Syntax open Covering open Splitting open Principles open EConstr open Extraction_plugin let inline_helpers i = let l = List.map (fun (cst, _) -> Nametab.shortest_qualid_of_global Id.Set.empty (GlobRef.ConstRef cst)) i.helpers_info in Table.extraction_inline true l let define_unfolding_eq ~pm env evd flags p unfp prog prog' ei hook = let info' = prog'.program_split_info in let info = { info' with base_id = prog.program_split_info.base_id; helpers_info = prog.program_split_info.helpers_info @ info'.helpers_info; user_obls = Id.Set.union prog.program_split_info.user_obls info'.user_obls } in let () = inline_helpers info in let funf_cst = match info'.term_id with GlobRef.ConstRef c -> c | _ -> assert false in let () = if flags.polymorphic then evd := Evd.from_ctx info'.term_ustate in let funfc = e_new_global evd info'.term_id in let unfold_eq_id = add_suffix (program_id unfp) "_eq" in let hook_eqs _ pm = Global.set_strategy (Conv_oracle.EvalConstRef funf_cst) Conv_oracle.transparent; let () = (* Declare the subproofs of unfolding for where as rewrite rules *) let decl _ (_, id, _) = let gr = try Nametab.locate_constant (qualid_of_ident id) with Not_found -> anomaly Pp.(str "Could not find where clause unfolding lemma " ++ Names.Id.print id) in let gr = GlobRef.ConstRef gr in Principles.add_rew_rule ~l2r:true ~base:(info.base_id ^ "_where") gr; Principles.add_rew_rule ~l2r:false ~base:(info.base_id ^ "_where_rev") gr in PathMap.iter decl ei.Principles_proofs.equations_where_map in let env = Global.env () in let () = if not flags.polymorphic then evd := (Evd.from_env env) in let prog' = { program_cst = funf_cst; program_split_info = info } in let unfp = { unfp with program_info = { unfp.program_info with program_id = program_id p } } in let eqninfo = Principles_proofs.{ equations_id = program_id p; equations_where_map = ei.equations_where_map; equations_f = funfc; equations_prob = ei.equations_prob } in hook ~pm (p, Some unfp, prog', eqninfo) unfold_eq_id in let () = if not flags.polymorphic then (evd := Evd.from_env (Global.env ())) in let sign = program_sign unfp in let arity = program_arity unfp in let stmt = it_mkProd_or_LetIn (mkEq (Global.env ()) evd arity (mkApp (p.program_term, extended_rel_vect 0 sign)) (mkApp (funfc, extended_rel_vect 0 sign))) sign in let evd, stmt = Typing.solve_evars (Global.env ()) !evd stmt in let subproofs = Principles_proofs.extract_subprograms env evd ei.equations_where_map p unfp in let fold pm (name, subst, uctx, typ) = let typ = EConstr.Unsafe.to_constr typ in let typ = collapse_term_qualities uctx typ in let tac = Principles_proofs.prove_unfolding_sublemma info ei.equations_where_map prog.program_cst funf_cst subst in let cinfo = Declare.CInfo.make ~name ~typ () in let info = Declare.Info.make ~poly:info.poly ~scope:info.scope ~kind:(Decls.IsDefinition info.decl_kind) () in let pm, _ = Declare.Obls.add_definition ~pm ~cinfo ~info ~tactic:tac ~uctx ~opaque:false [||] in pm in let pm = List.fold_left fold pm subproofs in let tac = Principles_proofs.(prove_unfolding_lemma info ei.equations_where_map prog.program_cst funf_cst p unfp) in let stmt = collapse_term_qualities (Evd.evar_universe_context evd) (EConstr.to_constr evd stmt) in let cinfo = Declare.CInfo.make ~name:unfold_eq_id ~typ:stmt ~impargs:(program_impls p) () in let info = Declare.Info.make ~poly:info.poly ~scope:info.scope ~kind:(Decls.IsDefinition info.decl_kind) () in let obl_hook = Declare.Hook.make_g hook_eqs in let pm, _ = Declare.Obls.add_definition ~pm ~cinfo ~info ~reduce:(fun x -> x) ~tactic:tac ~obl_hook ~opaque:false ~uctx:(Evd.evar_universe_context evd) [||] in pm let define_principles ~pm flags rec_type progs = let env = Global.env () in let evd = ref (Evd.from_env env) in let () = if flags.polymorphic then let ustate = (snd (List.hd progs)).program_split_info.term_ustate in let () = evd := Evd.merge_universe_context !evd ustate in () else () in let pm, progs' = Principles.unfold_programs ~pm env evd flags rec_type progs in match progs' with | [p, Some (unfp, cpi'), cpi, eqi] -> let hook ~pm (p, unfp, cpi', eqi) unfold_eq_id = let cpi' = { cpi' with program_split_info = { cpi'.program_split_info with comp_obls = cpi'.program_split_info.comp_obls @ cpi.program_split_info.comp_obls } } in Principles.build_equations ~pm flags.with_ind env !evd ~alias:(make_alias (p.program_term, unfold_eq_id, p.program_splitting)) rec_type [p, unfp, cpi', eqi] in define_unfolding_eq ~pm env evd flags p unfp cpi cpi' eqi hook | splits -> let splits = List.map (fun (p, unfp, cpi, eqi) -> let unfp' = match unfp with | Some (unfp, unfpi) -> Some unfp | None -> None in (p, unfp', cpi, eqi)) splits in build_equations ~pm flags.with_ind env !evd rec_type splits let define_by_eqs ~pm ~poly ~program_mode ~tactic ~open_proof opts eqs nt = let with_eqns, with_ind = let try_bool_opt opt default = try List.assoc opt opts with Not_found -> default in let with_eqns = try_bool_opt OEquations !Equations_common.equations_derive_equations in if with_eqns then with_eqns, try_bool_opt OInd !Equations_common.equations_derive_eliminator else false, false in let env = Global.env () in let flags = { polymorphic = poly; with_eqns; with_ind; allow_aliases = false; tactic; open_proof } in let evm, udecl = match eqs with | (((loc, i), udecl, _, _, _, _), _) :: _ -> Constrintern.interp_univ_decl_opt env udecl | _ -> assert false in let evd = ref evm in let programs = List.map (fun (((loc,i),udecl,rec_annot,l,t,by),clauses as ieqs) -> let is_rec = is_recursive i (eqs, nt) in interp_arity env evd ~poly ~is_rec ~with_evars:open_proof nt ieqs) eqs in let rec_type = compute_rec_type [] programs in let () = print_program_info env !evd programs in let env = Global.env () in (* To find the comp constant *) let data, fixdecls, fixprots = compute_fixdecls_data env evd programs in let fixdecls = nf_rel_context_evar !evd fixdecls in let intenv = { rec_type; flags; fixdecls; intenv = data; notations = nt; program_mode } in let programs = coverings env evd intenv programs (List.map snd eqs) in let env = Global.env () in (* coverings has the side effect of defining comp_proj constants for now *) let fix_proto_ref = Globnames.destConstRef (Lazy.force coq_fix_proto) in (* let _kind = (Decl_kinds.Global Decl_kinds.ImportDefaultBehavior, poly, Decl_kinds.Definition) in *) let baseid = let p = List.hd programs in Id.to_string p.program_info.program_id in (* Necessary for the definition of [i] *) let () = let trs = { TransparentState.full with TransparentState.tr_cst = Cpred.complement (Cpred.singleton fix_proto_ref) } in Hints.create_hint_db false baseid trs true in let progs = Array.make (List.length eqs) None in let nt = List.map Metasyntax.prepare_where_notation nt in let hook ~pm i p info = let () = inline_helpers info in let f_cst = match info.term_id with GlobRef.ConstRef c -> c | _ -> assert false in let () = evd := Evd.from_ctx info.term_ustate in let compiled_info = { program_cst = f_cst; program_split_info = info } in progs.(i) <- Some (p, compiled_info); if CArray.for_all (fun x -> not (Option.is_empty x)) progs then (let progs = Array.map_to_list (fun x -> Option.get x) progs in let is_relevant (rel, _) = not @@ ERelevance.is_irrelevant !evd rel in let relevant = List.for_all is_relevant fixprots in let rec_info = compute_rec_type [] (List.map (fun (x, y) -> x.program_info) progs) in List.iter (Metasyntax.add_notation_interpretation ~local:false (Global.env ())) nt; if (flags.with_eqns || flags.with_ind) && relevant then define_principles ~pm flags rec_info progs else pm) else pm in let hook ~pm i p info = (), hook ~pm i p info in define_programs ~pm env evd udecl rec_type fixdecls flags programs hook let interp_tactic = function | Some qid -> let open Ltac_plugin in let kn = try Tacenv.locate_tactic qid with Not_found -> CErrors.user_err Pp.(str"Tactic " ++ pr_qualid qid ++ str" not found") in let genarg = Tacenv.interp_ltac kn in let tacval = Tacinterp.Value.of_closure (Tacinterp.default_ist ()) genarg in Tacinterp.Value.apply tacval [] | None -> !Declare.Obls.default_tactic let equations ~pm ~poly ~program_mode ?tactic opts eqs nt = List.iter (fun (((loc, i), _udecl, nested, l, t, by),eqs) -> Dumpglob.dump_definition CAst.(make ?loc i) false "def") eqs; let tactic = interp_tactic tactic in let pm, pstate = define_by_eqs ~pm ~poly ~program_mode ~tactic ~open_proof:false opts eqs nt in match pstate with | None -> pm | Some _ -> CErrors.anomaly Pp.(str"Equation.equations leaving a proof open") let equations_interactive ~pm ~poly ~program_mode ?tactic opts eqs nt = List.iter (fun (((loc, i), _udecl, nested, l, t, by),eqs) -> Dumpglob.dump_definition CAst.(make ?loc i) false "def") eqs; let tactic = interp_tactic tactic in let pm, lemma = define_by_eqs ~pm ~poly ~program_mode ~tactic ~open_proof:true opts eqs nt in match lemma with | None -> CErrors.anomaly Pp.(str"Equation.equations_interactive not opening a proof") | Some p -> pm, p let solve_equations_goal destruct_tac tac = Proofview.Goal.enter begin fun gl -> let concl = pf_concl gl in let intros, move, concl = let rec intros goal move = match Constr.kind goal with | Prod ({binder_name=Name id}, _, t) -> let id = fresh_id_in_env Id.Set.empty id (pf_env gl) in let tac, move, goal = intros (subst1 (Constr.mkVar id) t) (Some id) in tclTHEN intro tac, move, goal | LetIn ({binder_name=Name id}, c, _, t) -> if String.equal (Id.to_string id) "target" then tclIDTAC, move, goal else let id = fresh_id_in_env Id.Set.empty id (pf_env gl) in let tac, move, goal = intros (subst1 c t) (Some id) in tclTHEN intro tac, move, goal | _ -> tclIDTAC, move, goal in intros (to_constr (project gl) concl) None in let move_tac = match move with | None -> fun _ -> tclIDTAC | Some id' -> fun id -> move_hyp id (Logic.MoveBefore id') in let targetn, branchesn, targ, brs, b = match kind (project gl) (of_constr concl) with | LetIn ({binder_name=Name target}, targ, _, b) -> (match kind (project gl) b with | LetIn ({binder_name=Name branches}, brs, _, b) -> target, branches, int_of_coq_nat (to_constr (project gl) targ), int_of_coq_nat (to_constr (project gl) brs), b | _ -> error "Unnexpected goal") | _ -> error "Unnexpected goal" in let branches, b = let rec aux n c = if n == 0 then [], c else match kind (project gl) c with | LetIn ({binder_name=Name id}, br, brt, b) -> let rest, b = aux (pred n) b in (id, br, brt) :: rest, b | _ -> error "Unnexpected goal" in aux brs b in let ids = targetn :: branchesn :: List.map pi1 branches in let cleantac = intros_using_then ids clear in let dotac = tclDO (succ targ) intro in let letintac (id, br, brt) = tclTHEN (letin_tac None (Name id) br (Some brt) nowhere) (tclTHEN (move_tac id) tac) in let subtacs = tclTHENS destruct_tac (List.map letintac branches) in tclTHENLIST [intros; cleantac ; dotac ; subtacs] end let dependencies env sigma c ctx = let init = global_vars_set env c in let deps = fold_named_context_reverse (fun variables decl -> let n = get_id decl in let dvars = global_vars_set_of_decl env sigma decl in if Id.Set.mem n variables then Id.Set.union dvars variables else variables) ~init:init ctx in (init, Id.Set.diff deps init) Coq-Equations-1.3.1-8.20/src/equations.mli000066400000000000000000000032271463127417400201100ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open Environ open Names open Equations_common open Splitting val define_by_eqs : pm:Declare.OblState.t -> poly:bool -> program_mode:bool -> tactic:unit Proofview.tactic -> open_proof:bool -> Syntax.equation_options -> Syntax.pre_equations -> Vernacexpr.notation_declaration list -> Declare.OblState.t * Declare.Proof.t option val define_principles : pm:Declare.OblState.t -> flags -> Syntax.rec_type -> (program * compiled_program_info) list -> Declare.OblState.t val equations : pm:Declare.OblState.t -> poly:bool -> program_mode:bool -> ?tactic:Libnames.qualid -> Syntax.equation_options -> Syntax.pre_equations -> Vernacexpr.notation_declaration list -> Declare.OblState.t val equations_interactive : pm:Declare.OblState.t -> poly:bool -> program_mode:bool -> ?tactic:Libnames.qualid -> Syntax.equation_options -> Syntax.pre_equations -> Vernacexpr.notation_declaration list -> Declare.OblState.t * Declare.Proof.t val solve_equations_goal : unit Proofview.tactic -> unit Proofview.tactic -> unit Proofview.tactic val dependencies : env -> Evd.evar_map -> Constr.t -> named_context -> Id.Set.t * Id.Set.t Coq-Equations-1.3.1-8.20/src/equations_common.ml000066400000000000000000001140311463127417400213030ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open Util open Names open Constr open Termops open Environ open Reductionops open Pp open Locus open Context open Evarutil open List open Libnames open Tacmach open Tactics open Tacticals open Ltac_plugin open Tacexpr let ($) f g = fun x -> f (g x) let (&&&) f g (x, y) = (f x, g y) let id x = x let to_peuniverses (x, u) = (x, EConstr.EInstance.make u) let from_peuniverses sigma (x, u) = (x, EConstr.EInstance.kind sigma u) (* Options. *) let simplify_withUIP = ref false let equations_with_funext = ref true let equations_transparent = ref false let equations_derive_equations = ref true let equations_derive_eliminator = ref true let depr_with_k = Deprecation.make ~since:"equations v1.2" () let () = Goptions.declare_bool_option { Goptions.optdepr = Some depr_with_k; Goptions.optstage = Interp; Goptions.optkey = ["Equations"; "WithK"]; Goptions.optread = (fun () -> false); Goptions.optwrite = (fun b -> if b then CErrors.user_err (str"DEPRECATED. Use flag [Equations With UIP] and introduce \ an axiom [forall A, Equations.Classes.UIP A] \ as a type class instance using [Existing Instance] instead.") else simplify_withUIP := b) } let _ = Goptions.declare_bool_option { Goptions.optdepr = Some depr_with_k; Goptions.optstage = Interp; Goptions.optkey = ["Equations"; "WithKDec"]; Goptions.optread = (fun () -> !simplify_withUIP); Goptions.optwrite = (fun b -> simplify_withUIP := b) } let _ = Goptions.declare_bool_option { Goptions.optdepr = None; Goptions.optstage = Interp; Goptions.optkey = ["Equations"; "With"; "UIP"]; Goptions.optread = (fun () -> !simplify_withUIP); Goptions.optwrite = (fun b -> simplify_withUIP := b) } let _ = Goptions.declare_bool_option { Goptions.optdepr = None; Goptions.optstage = Interp; Goptions.optkey = ["Equations"; "Transparent"]; Goptions.optread = (fun () -> !equations_transparent); Goptions.optwrite = (fun b -> equations_transparent := b) } let _ = Goptions.declare_bool_option { Goptions.optdepr = None; Goptions.optstage = Interp; Goptions.optkey = ["Equations"; "With"; "Funext"]; Goptions.optread = (fun () -> !equations_with_funext); Goptions.optwrite = (fun b -> equations_with_funext := b) } let _ = Goptions.declare_bool_option { Goptions.optdepr = None; Goptions.optstage = Interp; Goptions.optkey = ["Equations"; "Derive"; "Equations"]; Goptions.optread = (fun () -> !equations_derive_equations); Goptions.optwrite = (fun b -> equations_derive_equations := b) } let _ = Goptions.declare_bool_option { Goptions.optdepr = None; Goptions.optstage = Interp; Goptions.optkey = ["Equations"; "Derive"; "Eliminator"]; Goptions.optread = (fun () -> !equations_derive_eliminator); Goptions.optwrite = (fun b -> equations_derive_eliminator := b) } (* Debugging infrastructure. *) let debug = ref false let _ = Goptions.declare_bool_option { Goptions.optdepr = None; Goptions.optstage = Interp; Goptions.optkey = ["Equations"; "Debug"]; Goptions.optread = (fun () -> !debug); Goptions.optwrite = (fun b -> debug := b) } let equations_debug s = if !debug then Feedback.msg_debug (s ()) let pp x = Pp.pp_with !Topfmt.std_ft x let ppenv_sigma f = fun x -> let env = Global.env () in pp (f env (Evd.from_env env) x) type flags = { polymorphic : bool; open_proof : bool; with_eqns : bool; with_ind : bool; allow_aliases : bool; tactic : unit Proofview.tactic } let check_term env evd c t = ignore(Typing.check env evd c t) let check_type env evd t = ignore(Typing.sort_of env evd t) let typecheck_rel_context env evd ctx = let open Context.Rel.Declaration in try let _ = List.fold_right (fun rel env -> check_type env evd (get_type rel); Option.iter (fun c -> check_term env evd c (get_type rel)) (get_value rel); EConstr.push_rel rel env) ctx env in () with e -> Printf.eprintf "Exception while typechecking context %s : %s\n" (Pp.string_of_ppcmds (Termops.Internal.print_rel_context (EConstr.push_rel_context ctx env) evd)) (Printexc.to_string e); raise e let new_untyped_evar () = let (sigma, ev) = new_pure_evar empty_named_context_val Evd.empty (EConstr.of_constr mkProp) in ev let proper_tails l = snd (List.fold_right (fun _ (t,ts) -> List.tl t, ts @ [t]) l (l, [])) let list_find_map_i f = let rec try_find_f n = function | [] -> None | h::t -> match f n h with | Some _ as res -> res | None -> try_find_f (n+1) t in try_find_f let array_remove_last a = Array.sub a 0 (Array.length a - 1) let array_chop_last a = Array.chop (Array.length a - 1) a let rev_assoc eq k = let rec loop = function | [] -> raise Not_found | (v,k')::_ when eq k k' -> v | _ :: l -> loop l in loop let array_filter_map f a = let l' = Array.fold_right (fun c acc -> Option.cata (fun r -> r :: acc) acc (f c)) a [] in Array.of_list l' let new_global sigma gr = try Evd.fresh_global (Global.env ()) sigma gr with e -> CErrors.anomaly Pp.(str"new_global raised an error on:" ++ Printer.pr_global gr) let e_new_global evdref gr = let sigma, gr = new_global !evdref gr in evdref := sigma; gr type lazy_ref = Names.GlobRef.t Lazy.t let equations_lib_ref s = Coqlib.lib_ref ("equations." ^ s) let find_global s = lazy (equations_lib_ref s) let find_constant s evd = e_new_global evd (equations_lib_ref s) let global_reference id = match Smartlocate.global_of_extended_global (Nametab.locate_extended (qualid_of_ident id)) with | Some x -> x | None -> CErrors.anomaly Pp.(str"global_reference called on non existing " ++ Names.Id.print id) let e_type_of env evd t = let evm, t = Typing.type_of ~refresh:false env !evd t in evd := evm; t let collapse_term_qualities uctx c = let nf_evar _ = None in let nf_qvar q = match UState.nf_qvar uctx q with | QConstant _ as q -> q | QVar q -> (* hack *) QConstant QType in let nf_univ _ = None in UnivSubst.nf_evars_and_universes_opt_subst nf_evar nf_qvar nf_univ c let make_definition ?opaque ?(poly=false) evm ?types b = let env = Global.env () in let evm = match types with | None -> fst (Typing.type_of env evm b) | Some t -> let evm = fst (Typing.type_of env evm t) in Typing.check env evm b t in let evm = Evd.minimize_universes evm in let evm0 = evm in let to_constr c = collapse_term_qualities (Evd.evar_universe_context evm) (EConstr.to_constr evm c) in let body = to_constr b in let typ = Option.map to_constr types in let used = Vars.universes_of_constr body in let used' = match typ with | None -> Univ.Level.Set.empty | Some typ -> Vars.universes_of_constr typ in let used = Univ.Level.Set.union used used' in let evm = Evd.restrict_universe_context evm used in let univs = Evd.univ_entry ~poly evm in evm0, evm, Declare.definition_entry ~univs ?types:typ body let declare_constant id body ty ~poly ~kind evd = let evm0, evm, ce = make_definition ~opaque:false ~poly evd ?types:ty body in let cst = Declare.declare_constant ~name:id (Declare.DefinitionEntry ce) ~kind in Flags.if_verbose Feedback.msg_info (str((Id.to_string id) ^ " is defined")); if poly then let cstr = EConstr.(mkConstU (cst, EInstance.make (UVars.UContext.instance (Evd.to_universe_context evm)))) in cst, (evm0, cstr) else cst, (evm0, EConstr.UnsafeMonomorphic.mkConst cst) let make_definition ?opaque ?(poly=false) evm ?types b = let evm', _, t = make_definition ?opaque ~poly evm ?types b in evm', t let declare_instance id ~poly evm ctx cl args = let open Typeclasses in let open EConstr in let c, t = instance_constructor cl args in let term = it_mkLambda_or_LetIn (Option.get c) ctx in let typ = EConstr.it_mkProd_or_LetIn t ctx in let cst, ecst = declare_constant id term (Some typ) ~poly evm ~kind:Decls.(IsDefinition Instance) in let () = Classes.declare_instance (Global.env ()) evm (Some Hints.empty_hint_info) Hints.SuperGlobal (GlobRef.ConstRef cst) in cst, ecst let coq_zero = (find_global "nat.zero") let coq_succ = (find_global "nat.succ") let coq_nat = (find_global "nat.type") let rec coq_nat_of_int sigma = function | 0 -> Evd.fresh_global (Global.env ()) sigma (Lazy.force coq_zero) | n -> let sigma, succ = Evd.fresh_global (Global.env ()) sigma (Lazy.force coq_succ) in let sigma, n' = coq_nat_of_int sigma (pred n) in sigma, EConstr.mkApp (succ, [| n' |]) let rec int_of_coq_nat c = match Constr.kind c with | App (f, [| arg |]) -> succ (int_of_coq_nat arg) | _ -> 0 let fresh_id_in_env avoid id env = Namegen.next_ident_away_in_goal (Global.env ()) id (Id.Set.union avoid (Id.Set.of_list (ids_of_named_context (named_context env)))) let coq_fix_proto = (find_global "fixproto") let compute_sort_family l = let env = Global.env () in let evd = Evd.from_env env in let evd, c = Evd.fresh_global env evd (Lazy.force l) in let _, s = Reduction.dest_arity env (EConstr.to_constr ~abort_on_undefined_evars:false evd (Retyping.get_type_of env evd c)) in Sorts.family s let logic_eq_type = (find_global "equality.type") let logic_eq_refl = (find_global "equality.refl") let logic_eq_case = (find_global "equality.case") let logic_eq_elim = (find_global "equality.elim") let logic_sort = lazy (compute_sort_family logic_eq_type) let logic_bot = (find_global "bottom.type") let logic_bot_case = (find_global "bottom.case") let logic_bot_elim = (find_global "bottom.elim") let logic_top = (find_global "top.type") let logic_top_intro = (find_global "top.intro") let logic_top_elim = (find_global "top.elim") let logic_conj = (find_global "conj.type") let logic_conj_intro = (find_global "conj.intro") let logic_unit = (find_global "unit.type") let logic_unit_intro = (find_global "unit.intro") let logic_product = (find_global "product.type") let logic_pair = (find_global "product.intro") let logic_wellfounded_class = (find_global "wellfounded.class") let logic_wellfounded = (find_global "wellfounded.type") let logic_relation = (find_global "relation.type") let logic_transitive_closure = (find_global "relation.transitive_closure") let logic_tele_type = (find_global "tele.type") let logic_tele_tip = (find_global "tele.tip") let logic_tele_ext = (find_global "tele.ext") let logic_tele_interp = (find_global "tele.interp") let logic_tele_measure = (find_global "tele.measure") let logic_tele_fix = (find_global "tele.fix") let logic_tele_fix_functional_type = (find_global "tele.fix_functional_type") let logic_tele_fix_unfold = (find_global "tele.fix_unfold") let logic_tele_MR = (find_global "tele.MR") let logic_tele_type_app = (find_global "tele.type_app") let logic_tele_forall_type_app = (find_global "tele.forall_type_app") let logic_tele_forall_uncurry = (find_global "tele.forall_uncurry") let logic_tele_forall = (find_global "tele.forall") let logic_tele_forall_pack = (find_global "tele.forall_pack") let logic_tele_forall_unpack = (find_global "tele.forall_unpack") let logic_eqdec_class = (find_global "eqdec.class") let logic_eqdec_dec_eq = (find_global "eqdec.dec_eq") let logic_uip_class = (find_global "uip.class") let logic_uip_uip = (find_global "uip.uip") let logic_signature_class = find_global "signature.class" let logic_signature_sig = find_global "signature.signature" let logic_signature_pack = find_global "signature.pack" let get_fresh sigma r = new_global sigma (Lazy.force r) let get_efresh r evd = e_new_global evd (Lazy.force r) let is_lglobal env sigma gr c = EConstr.isRefX env sigma (Lazy.force gr) c open EConstr let fresh_sort_in_family evd s = let evars, sort = Evd.fresh_sort_in_family !evd s in evd := evars; mkSort sort let fresh_logic_sort evd = fresh_sort_in_family evd (Lazy.force logic_sort) let mkapp env evdref t args = let evd, c = fresh_global env !evdref (Lazy.force t) in let _ = evdref := evd in mkApp (c, args) let refresh_universes_strict env evd t = let evd', t' = Evarsolve.refresh_universes ~onlyalg:true (Some true) env !evd t in evd := evd'; t' let mkEq env evd t x y = mkapp env evd logic_eq_type [| refresh_universes_strict env evd t; x; y |] let mkRefl env evd ?inst t x = match inst with | Some inst -> EConstr.mkApp (EConstr.mkRef (Lazy.force logic_eq_refl, inst), [| refresh_universes_strict env evd t; x |]) | None -> mkapp env evd logic_eq_refl [| refresh_universes_strict env evd t; x |] let dummy_loc = None type 'a located = 'a Loc.located let tac_of_string tac args = try Tacinterp.interp (CAst.(make @@ TacArg(TacCall(make (Libnames.qualid_of_string tac, args))))) with Not_found -> CErrors.anomaly Pp.(str"Cannot find tactic " ++ str tac) let get_class sigma c = let x = Typeclasses.class_of_constr (Global.env()) sigma c in fst (snd (Option.get x)) type esigma = Evd.evar_map ref type 'a peuniverses = 'a * EConstr.EInstance.t let functional_induction_class evd = let evdref = ref evd in let cl = find_constant "funind.class" evdref in !evdref, get_class !evdref cl let functional_elimination_class evd = let evdref = ref evd in let cl = find_constant "funelim.class" evdref in !evdref, get_class !evdref cl let dependent_elimination_class evd = get_class !evd (find_constant "depelim.class" evd) let coq_noconfusion_class = (find_global "noconfusion.class") let coq_nocycle_class = (find_global "nocycle.class") let coq_bang = (find_global "internal.bang") let coq_inacc = (find_global "internal.inaccessible_pattern") let coq_block = (find_global "internal.block") let coq_hide = (find_global "internal.hide_pattern") let coq_hidebody = (find_global "internal.hidebody") let coq_add_pattern = (find_global "internal.add_pattern") let coq_end_of_section_id = Id.of_string "eos" let coq_the_end_of_the_section = (find_global "internal.the_end_of_the_section") let coq_end_of_section = (find_global "internal.end_of_section") let coq_ImpossibleCall evd = find_constant "impossiblecall.class" evd let unfold_add_pattern = lazy (Tactics.unfold_in_concl [(Locus.AllOccurrences, Evaluable.EvalConstRef (Globnames.destConstRef (Lazy.force coq_add_pattern)))]) let subterm_relation_base = "subterm_relation" let coq_sigma = (find_global "sigma.type") let coq_sigmaI = (find_global "sigma.intro") let init_projection gr = let cst = Globnames.destConstRef gr in let p = Option.get @@ Structures.PrimitiveProjections.find_opt cst in Projection.make p false let coq_pr1 = lazy (init_projection (Lazy.force (find_global "sigma.pr1"))) let coq_pr2 = lazy (init_projection (Lazy.force (find_global "sigma.pr2"))) (* Misc tactics *) let rec head_of_constr sigma t = let t = strip_outer_cast sigma t in match kind sigma t with | Prod (_,_,c2) -> head_of_constr sigma c2 | LetIn (_,_,_,c2) -> head_of_constr sigma c2 | App (f,args) -> head_of_constr sigma f | _ -> t let nowhere = { onhyps = Some []; concl_occs = NoOccurrences } (* Lifting a [rel_context] by [n]. *) let of_tuple (x, b, t) = match b with | Some def -> Context.Rel.Declaration.LocalDef (x,def,t) | None -> Context.Rel.Declaration.LocalAssum (x, t) let lift_rel_contextn k n sign = let open Context.Rel in let open Declaration in let rec liftrec k = function | rel::sign -> let (na,c,t) = to_tuple rel in of_tuple (na,Option.map (Vars.liftn n k) c, Vars.liftn n k t)::(liftrec (k-1) sign) | [] -> [] in liftrec (Context.Rel.length sign + k) sign let lift_rel_context n sign = lift_rel_contextn 0 n sign let lift_list l = List.map (Vars.lift 1) l (* let compute_params cst = *) (* let body = constant_value (Global.env ()) cst in *) (* let init, n, c = *) (* let ctx, body = *) (* match Constr.kind body with *) (* | Lambda _ -> decompose_lam_assum c *) (* | _ -> [], c *) (* in *) (* (interval 0 (List.length ctx), *) (* List.length ctx, body) *) (* in *) (* let params_of_args pars n args = *) (* Array.fold_left *) (* (fun (pars, acc) x -> *) (* match pars with *) (* | [] -> (pars, acc) *) (* | par :: pars -> *) (* if isRel x then *) (* if n + par = destRel x then *) (* (pars, par :: acc) *) (* else (pars, acc) *) (* else (pars, acc)) *) (* (pars, []) args *) (* in *) (* let rec aux pars n c = *) (* match Constr.kind c with *) (* | App (f, args) -> *) (* if f = mkConst cst then *) (* let _, pars' = params_of_args pars n args in *) (* pars' *) (* else pars *) (* | _ -> pars *) (* in aux init n c *) let is_transparent_constant csts ps c = match Structures.PrimitiveProjections.find_opt c with | None -> Cset.mem c csts | Some p -> PRset.mem p ps let unfold_head env sigma (ids, csts, ps) c = let rec aux c = match kind sigma c with | Var id when Id.Set.mem id ids -> (match Environ.named_body id env with | Some b -> true, of_constr b | None -> false, c) | Const (cst,u) when is_transparent_constant csts ps cst -> true, of_constr (Environ.constant_value_in env (cst, EInstance.kind sigma u)) | App (f, args) -> (match aux f with | true, f' -> true, Reductionops.whd_betaiota env Evd.empty (mkApp (f', args)) | false, _ -> let done_, args' = Array.fold_left_i (fun i (done_, acc) arg -> if done_ then done_, arg :: acc else match aux arg with | true, arg' -> true, arg' :: acc | false, arg' -> false, arg :: acc) (false, []) args in if done_ then true, mkApp (f, Array.of_list (List.rev args')) else false, c) | _ -> let done_ = ref false in let c' = EConstr.map sigma (fun c -> if !done_ then c else let x, c' = aux c in done_ := x; c') c in !done_, c' in aux c open CErrors let unfold_head env sigma db t = let st = List.fold_left (fun (i,c,p) dbname -> let db = try Hints.searchtable_map dbname with Not_found -> user_err (str "Unknown database " ++ str dbname) in let (ids, csts, ps) = Hints.Hint_db.unfolds db in (Id.Set.union ids i, Cset.union csts c, PRset.union ps p)) (Id.Set.empty, Cset.empty, PRset.empty) db in unfold_head env sigma st t let autounfold_heads db db' cl = Proofview.Goal.enter begin fun gl -> let env = Proofview.Goal.env gl in let sigma = Proofview.Goal.sigma gl in let eq = (match cl with Some (id, _) -> pf_get_hyp_typ id gl | None -> pf_concl gl) in let did, c' = match kind (project gl) eq with | App (f, [| ty ; x ; y |]) when EConstr.isRefX env sigma (Lazy.force logic_eq_type) f -> let did, x' = unfold_head (pf_env gl) (project gl) db x in let did', y' = unfold_head (pf_env gl) (project gl) db' y in did && did', EConstr.mkApp (f, [| ty ; x' ; y' |]) | _ -> false, eq in if did then match cl with | Some hyp -> change_in_hyp ~check:true None (make_change_arg c') hyp | None -> convert_concl ~cast:false ~check:false c' DEFAULTcast else tclFAIL (str "Nothing to unfold") end type hintdb_name = string let rec db_of_constr c = match Constr.kind c with | Const (c,_) -> Label.to_string (Constant.label c) | App (c,al) -> db_of_constr c | _ -> assert false let dbs_of_constrs = List.map db_of_constr (** Bindings to Coq *) let tacvar_arg h = let ipat = Genarg.in_gen (Genarg.rawwit Tacarg.wit_intro_pattern) (CAst.make @@ Tactypes.IntroNaming (Namegen.IntroIdentifier h)) in TacGeneric (None, ipat) let rec_wf_tac h n h' rel = CAst.(make @@ TacArg(TacCall(make (qualid_of_string "Equations.Subterm.rec_wf_eqns_rel", [tacvar_arg h'; ConstrMayEval (Genredexpr.ConstrTerm n); ConstrMayEval (Genredexpr.ConstrTerm h); ConstrMayEval (Genredexpr.ConstrTerm rel)])))) let solve_rec_tac () = tac_of_string "Equations.Equations.solve_rec" [] let pi_tac () = tac_of_string "Equations.CoreTactics.pi" [] let set_eos_tac () = tac_of_string "Equations.CoreTactics.set_eos" [] (* Thos are forward references in Init, that get redefined later *) let noconf_tac () = tac_of_string "Equations.Init.solve_noconf" [] let noconf_hom_tac () = tac_of_string "Equations.Init.solve_noconf_hom" [] let eqdec_tac () = tac_of_string "Equations.Init.solve_eqdec" [] let simpl_equations_tac () = tac_of_string "Equations.Init.simpl_equations" [] let solve_subterm_tac () = tac_of_string "Equations.Init.solve_subterm" [] let specialize_mutfix_tac () = tac_of_string "Equations.Init.specialize_mutfix" [] let unfold_recursor_tac () = tac_of_string "Equations.Init.unfold_recursor" [] let unfold_recursor_ext_tac () = tac_of_string "Equations.Init.unfold_recursor_ext" [] open Libnames let reference_of_global c = Nametab.shortest_qualid_of_global Names.Id.Set.empty c let tacident_arg h = Reference (qualid_of_ident h) let find_depelim_module () = let gr = Coqlib.lib_ref "equations.depelim.module" in match gr with | GlobRef.ConstRef c -> Names.Constant.modpath c | _ -> CErrors.anomaly (str"equations.depelim.module is not defined") let depelim_module = Lazy.from_fun find_depelim_module let find_depelim_prefix () = let modpath = Lazy.force depelim_module in let mp = ModPath.to_string modpath in mp let depelim_prefix = Lazy.from_fun find_depelim_prefix let depelim_tactic s = Lazy.force depelim_prefix ^ "." ^ s let depelim_tac h = tac_of_string "Equations.Init.depelim" [tacident_arg h] let do_empty_tac h = tac_of_string (depelim_tactic "do_empty") [tacident_arg h] let depelim_nosimpl_tac h = tac_of_string (depelim_tactic "depelim_nosimpl") [tacident_arg h] let simpl_dep_elim_tac () = tac_of_string (depelim_tactic "simpl_dep_elim") [] let depind_tac h = tac_of_string (depelim_tactic "depind") [tacident_arg h] let equations_tac () = tac_of_string (depelim_tactic "equations") [] let find_empty_tac () = tac_of_string (depelim_tactic "find_empty") [] let call_tac_on_ref tac c = let var = Names.Id.of_string "x" in let tac = Locus.ArgArg (dummy_loc, tac) in let val_reference = Geninterp.val_tag (Genarg.topwit Stdarg.wit_constr) in (* This is a hack to avoid generating useless universes *) let c = Constr.mkRef (c, UVars.Instance.empty) in let c = Geninterp.Val.inject val_reference (EConstr.of_constr c) in let ist = Geninterp.{ lfun = Names.Id.Map.add var c Names.Id.Map.empty; extra = Geninterp.TacStore.empty; poly = false } in let var = Reference (Locus.ArgVar CAst.(make var)) in let tac = CAst.(make @@ TacArg (TacCall (make (tac, [var])))) in ist, tac let solve_equation () = Names.KerName.make (Lazy.force depelim_module) (Names.Label.make "solve_equation") let solve_equation_tac (c : Names.GlobRef.t) = let ist, tac = call_tac_on_ref (solve_equation ()) c in Tacinterp.eval_tactic_ist ist tac let impossible_call_tac c = let tac = Tacintern.glob_tactic (CAst.(make @@ TacArg (TacCall(make (Libnames.qualid_of_string (depelim_tactic "impossible_call"), [Reference (reference_of_global c)]))))) in let val_tac = Genarg.glbwit Tacarg.wit_tactic in Genarg.in_gen val_tac tac (* let impossible_call_tac c = *) (* let ist, tac = call_tac_on_ref impossible_call c in *) (* let val_tac = Genarg.glbwit Tacarg.wit_tactic in *) (* let c = Genarg.in_gen val_tac tac in *) (* c *) open EConstr.Vars let mkProd_or_subst decl c = let open Context.Rel.Declaration in match get_value decl with | None -> mkProd (get_annot decl, get_type decl, c) | Some b -> subst1 b c let mkProd_or_clear sigma decl c = if not (dependent sigma (mkRel 1) c) then subst1 mkProp c else mkProd_or_LetIn decl c let it_mkProd_or_clear sigma ty ctx = fold_left (fun c d -> mkProd_or_clear sigma d c) ty ctx let mkLambda_or_subst decl c = let open Context.Rel.Declaration in match get_value decl with | None -> mkLambda (get_annot decl, get_type decl, c) | Some b -> subst1 b c let mkLambda_or_subst_or_clear sigma decl c = let open Context.Rel.Declaration in let (na,body,t) = to_tuple decl in match body with | None when dependent sigma (mkRel 1) c -> mkLambda (na, t, c) | None -> subst1 mkProp c | Some b -> subst1 b c let mkProd_or_subst_or_clear sigma decl c = let open Context.Rel.Declaration in let (na,body,t) = to_tuple decl in match body with | None when dependent sigma (mkRel 1) c -> mkProd (na, t, c) | None -> subst1 mkProp c | Some b -> subst1 b c let it_mkProd_or_subst env sigma ty ctx = nf_beta env sigma (List.fold_left (fun c d -> whd_betalet env sigma (mkProd_or_LetIn d c)) ty ctx) let it_mkProd_or_clean env sigma ty ctx = let open Context.Rel.Declaration in nf_beta env sigma (List.fold_left (fun c d -> whd_betalet env sigma (if (get_name d) == Anonymous then subst1 mkProp c else mkProd_or_LetIn d c)) ty ctx) let it_mkLambda_or_subst env ty ctx = whd_betalet env Evd.empty (List.fold_left (fun c d -> mkLambda_or_LetIn d c) ty ctx) let mkLambda_or_clear_LetIn sigma decl c = let open Context.Rel.Declaration in let (na,body,t) = to_tuple decl in match body with | None -> mkLambda (na, t, c) | Some b -> if noccurn sigma 1 c then subst1 b c else mkLetIn (na, b, t, c) let it_mkLambda_or_clear_LetIn sigma ty ctx = List.fold_left (fun c d -> mkLambda_or_clear_LetIn sigma d c) ty ctx let it_mkLambda_or_subst_or_clear sigma ty ctx = (List.fold_left (fun c d -> mkLambda_or_subst_or_clear sigma d c) ty ctx) let it_mkProd_or_subst_or_clear sigma ty ctx = (List.fold_left (fun c d -> mkProd_or_subst_or_clear sigma d c) ty ctx) let lift_constrs n cs = List.map (lift n) cs let ids_of_constr sigma ?(all=false) vars c = let rec aux vars c = match kind sigma c with | Var id -> Id.Set.add id vars | App (f, args) -> (match kind sigma f with | Construct ((ind,_),_) | Ind (ind, _) -> let (mib,mip) = Global.lookup_inductive ind in Array.fold_left_from (if all then 0 else mib.Declarations.mind_nparams) aux vars args | _ -> fold sigma aux vars c) | _ -> fold sigma aux vars c in aux vars c let decompose_indapp sigma f args = match kind sigma f with | Construct ((ind,_),_) | Ind (ind,_) -> let (mib,mip) = Global.lookup_inductive ind in let first = mib.Declarations.mind_nparams_rec in let pars, args = Array.chop first args in mkApp (f, pars), args | _ -> f, args let e_conv env evdref t t' = match Reductionops.infer_conv env !evdref ~pb:Conversion.CONV t t' with | Some sigma -> (evdref := sigma; true) | None -> false let deps_of_var sigma id env = Environ.fold_named_context (fun _ decl (acc : Id.Set.t) -> let n, b, t = Context.Named.Declaration.to_tuple decl in if Option.cata (fun x -> occur_var env sigma id (of_constr x)) false b || occur_var env sigma id (of_constr t) then Id.Set.add n.binder_name acc else acc) env ~init:Id.Set.empty let idset_of_list = List.fold_left (fun s x -> Id.Set.add x s) Id.Set.empty let pr_smart_global f = Pputils.pr_or_by_notation pr_qualid f let string_of_smart_global = function | {CAst.v=Constrexpr.AN ref} -> string_of_qualid ref | {CAst.v=Constrexpr.ByNotation (s, _)} -> s let ident_of_smart_global x = Id.of_string (string_of_smart_global x) let move_after_deps id c = let open Context.Named.Declaration in let enter gl = let sigma = Proofview.Goal.sigma gl in let hyps = Proofview.Goal.hyps gl in let deps = collect_vars sigma c in let iddeps = collect_vars sigma (Tacmach.pf_get_hyp_typ id gl) in let deps = Id.Set.diff deps iddeps in let find decl = Id.Set.mem (get_id decl) deps in let first = match snd (List.split_when find (List.rev hyps)) with | a :: _ -> get_id a | [] -> user_err Pp.(str"Found no hypothesis on which " ++ Id.print id ++ str" depends") in Tactics.move_hyp id (Logic.MoveAfter first) in Proofview.Goal.enter enter let assert_replacing id c ty = tclTHENFIRST (assert_before_replacing id ty) (exact_no_check c) let observe s (tac : unit Proofview.tactic) = let open Proofview.Notations in let open Proofview in if not !debug then tac else Goal.enter (fun gl -> let env = Goal.env gl in let sigma = Goal.sigma gl in Feedback.msg_debug (str"Applying " ++ str s ++ str " on " ++ Printer.pr_named_context_of env sigma ++ str "=================" ++ Printer.pr_econstr_env env sigma (Goal.concl gl)); (Proofview.tclORELSE (Proofview.tclTHEN tac (Proofview.numgoals >>= fun gls -> if gls = 0 then (Feedback.msg_debug (str s ++ str " succeeded"); Proofview.tclUNIT ()) else Proofview.Goal.enter begin fun gl -> let () = Feedback.msg_debug (str "Subgoal: " ++ Printer.Debug.pr_goal gl) in Proofview.tclUNIT () end)) (fun iexn -> Feedback.msg_debug (str"Failed with: " ++ (match fst iexn with | Tacticals.FailError (n,expl) -> (str" Fail error " ++ int n ++ str " for " ++ str s ++ spc () ++ Lazy.force expl ++ str " on " ++ Printer.pr_econstr_env env sigma (Goal.concl gl)) | Pretype_errors.PretypeError (env, sigma, e) -> (str " Pretype error: " ++ Himsg.explain_pretype_error env sigma e) | _ -> CErrors.iprint iexn)); Proofview.tclZERO ~info:(snd iexn) (fst iexn)))) (** Compat definitions *) type rel_context = EConstr.rel_context type rel_declaration = EConstr.rel_declaration type named_declaration = EConstr.named_declaration type named_context = EConstr.named_context let extended_rel_vect n ctx = Context.Rel.instance mkRel n ctx let extended_rel_list n ctx = Context.Rel.instance_list mkRel n ctx let to_tuple = Context.Rel.Declaration.to_tuple let to_named_tuple = Context.Named.Declaration.to_tuple let of_named_tuple = Context.Named.Declaration.of_tuple let to_context c = List.map of_tuple c let get_type = Context.Rel.Declaration.get_type let get_value = Context.Rel.Declaration.get_value let get_name = Context.Rel.Declaration.get_name let get_annot = Context.Rel.Declaration.get_annot let get_named_type = Context.Named.Declaration.get_type let get_named_value = Context.Named.Declaration.get_value let make_assum n t = Context.Rel.Declaration.LocalAssum (n, t) let make_def n b t = match b with | None -> Context.Rel.Declaration.LocalAssum (n, t) | Some b -> Context.Rel.Declaration.LocalDef (n, b, t) let make_named_def n b t = match b with | None -> Context.Named.Declaration.LocalAssum (n, t) | Some b -> Context.Named.Declaration.LocalDef (n, b, t) let lookup_rel = Context.Rel.lookup let named_of_rel_context ?(keeplets = false) default l = let acc, args, _, ctx = List.fold_right (fun decl (subst, args, ids, ctx) -> let decl = Context.Rel.Declaration.map_constr (substl subst) decl in let id = match get_name decl with Anonymous -> default () | Name id -> id in let d = Named.Declaration.of_rel_decl (fun _ -> id) decl in let args = if keeplets ||Context.Rel.Declaration.is_local_assum decl then mkVar id :: args else args in (mkVar id :: subst, args, id :: ids, d :: ctx)) l ([], [], [], []) in acc, rev args, ctx let rel_of_named_context sigma ctx = List.fold_right (fun decl (ctx',subst) -> let (n, b, t) = to_named_tuple decl in let decl = make_def (map_annot (fun n -> Name n) n) (Option.map (subst_vars sigma subst) b) (subst_vars sigma subst t) in (decl :: ctx', n.binder_name :: subst)) ctx ([],[]) let empty_hint_info = Hints.empty_hint_info (* Substitute a list of constrs [cstrs] in rel_context [ctx] for variable [k] and above. *) open Context.Rel.Declaration let map_decl f x = match x with | LocalAssum (na,x) -> LocalAssum (na, f x) | LocalDef (na,b,t) -> LocalDef (na, f b, f t) let subst_rel_context k cstrs ctx = let (_, ctx') = fold_right (fun decl (k, ctx') -> (succ k, map_decl (substnl cstrs k) decl :: ctx')) ctx (k, []) in ctx' (* A telescope is a reversed rel_context *) let subst_telescope cstr ctx = let (_, ctx') = fold_left (fun (k, ctx') decl -> (succ k, (map_decl (substnl [cstr] k) decl) :: ctx')) (0, []) ctx in rev ctx' (* Substitute rel [n] by [c] in [ctx] Precondition: [c] is typable in [ctx] using variables above [n] *) let subst_in_ctx (n : int) (c : constr) (ctx : EConstr.rel_context) : EConstr.rel_context = let rec aux k after = function | [] -> [] | decl :: before -> if k == n then (subst_rel_context 0 [lift (-k) c] (List.rev after)) @ before else aux (succ k) (decl :: after) before in aux 1 [] ctx let set_in_ctx (n : int) (c : constr) (ctx : EConstr.rel_context) : EConstr.rel_context = let rec aux k after = function | [] -> [] | decl :: before -> if k == n then (rev after) @ LocalDef (get_annot decl, lift (-k) c, get_type decl) :: before else aux (succ k) (decl :: after) before in aux 1 [] ctx let get_id decl = Context.Named.Declaration.get_id decl let fold_named_context_reverse = Context.Named.fold_inside let map_rel_context = Context.Rel.map let map_rel_declaration = Context.Rel.Declaration.map_constr let map_named_declaration = Context.Named.Declaration.map_constr let map_named_context = Context.Named.map let lookup_named = Context.Named.lookup let subst_in_named_ctx sigma (n : Id.t) (c : constr) (ctx : EConstr.named_context) : EConstr.named_context = let rec aux after = function | [] -> [] | decl :: before -> let name = get_id decl in if Id.equal name n then (rev after) @ before else aux (map_named_declaration (replace_vars sigma [n,c]) decl :: after) before in aux [] ctx let pp cmds = Feedback.msg_info cmds let user_err_loc (loc, pp) = CErrors.user_err ?loc pp let error s = CErrors.user_err (str s) let errorlabstrm msg = CErrors.user_err msg let is_anomaly = CErrors.is_anomaly let print_error e = CErrors.print e let nf_betadeltaiota = nf_all let anomaly ?label pp = CErrors.anomaly ?label pp let new_evar env evm ?src ty = Evarutil.new_evar env evm ?src ty let new_type_evar env evm ?src rigid = Evarutil.new_type_evar env evm rigid ?src let to_evar_map x = x let of_evar_map x = x let evar_absorb_arguments = Evardefine.evar_absorb_arguments let hintdb_set_transparency cst b db = let locality = if Global.sections_are_opened () then Hints.Local else Hints.SuperGlobal in Hints.add_hints ~locality [db] (Hints.HintsTransparencyEntry (Hints.HintsReferences [Evaluable.EvalConstRef cst], b)) (* Call the really unsafe is_global test, we use this on evar-open terms too *) let is_global = EConstr.isRefX let constr_of_global_univ sigma u = of_constr (Constr.mkRef (from_peuniverses sigma u)) let rel_vect n m = Array.map of_constr (rel_vect n m) let applistc c a = applist (c, a) let instance_constructor sigma tc args = Typeclasses.instance_constructor tc args let decompose_appvect sigma t = match kind sigma t with | App (f, v) -> (f, v) | _ -> (t, [||]) let dest_ind_family fam = let ind, fam = Inductiveops.dest_ind_family fam in ind, fam (* XXX: EConstr-versions fo these functions really needed XXX *) let to_constr = to_constr ~abort_on_undefined_evars:false let prod_appvect sigma p args = of_constr (Term.prod_appvect (to_constr sigma p) (Array.map (to_constr sigma) args)) let beta_appvect sigma p args = of_constr (Reduction.beta_appvect (to_constr sigma p) (Array.map (to_constr sigma) args)) let find_rectype env sigma ty = let Inductiveops.IndType (ind, args) = Inductiveops.find_rectype env sigma ty in ind, args let splay_prod_n_assum env sigma n = let rec prodec_rec env n l c = if n = 0 then (l, c) else let t = whd_allnolet env sigma c in match EConstr.kind sigma t with | Prod (x,t,c) -> prodec_rec (push_rel (LocalAssum (x,t)) env) (pred n) (Context.Rel.add (LocalAssum (x,t)) l) c | LetIn (x,b,t,c) -> prodec_rec (push_rel (LocalDef (x,b,t)) env) (pred n) (Context.Rel.add (LocalDef (x,b,t)) l) c | Cast (c,_,_) -> prodec_rec env n l c | _ -> let t' = whd_all env sigma t in if EConstr.eq_constr sigma t t' then l,t else prodec_rec env n l t' in prodec_rec env n Context.Rel.empty type identifier = Names.Id.t let evd_comb0 f evd = let evm, r = f !evd in evd := evm; r let evd_comb1 f evd x = let evm, r = f !evd x in evd := evm; r (* Universe related functions *) let nonalgebraic_universe_level_of_universe env sigma u = match ESorts.kind sigma u with | Sorts.Set | Sorts.Prop | Sorts.SProp -> sigma, Univ.Level.set, u | Sorts.Type u0 | Sorts.QSort (_, u0) -> match Univ.Universe.level u0 with | Some l -> (match Evd.universe_rigidity sigma l with | Evd.UnivFlexible true -> Evd.make_nonalgebraic_variable sigma l, l, ESorts.make @@ Sorts.sort_of_univ @@ Univ.Universe.make l | _ -> sigma, l, u) | None -> let sigma, l = Evd.new_univ_level_variable Evd.univ_flexible sigma in let ul = ESorts.make @@ Sorts.sort_of_univ @@ Univ.Universe.make l in let sigma = Evd.set_leq_sort env sigma u ul in sigma, l, ul let instance_of env sigma ?argu goalu = let sigma, goall, goalu = nonalgebraic_universe_level_of_universe env sigma goalu in let inst = match argu with | Some equ -> let equ = EConstr.EInstance.kind sigma equ in let quals, equarray = UVars.Instance.to_array equ in EConstr.EInstance.make (UVars.Instance.of_array (quals, Array.append equarray [| goall |])) | None -> EConstr.EInstance.make (UVars.Instance.of_array ([||], [| goall |])) in sigma, inst, goalu Coq-Equations-1.3.1-8.20/src/equations_common.mli000066400000000000000000000400721463127417400214570ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open EConstr open Environ open Names open Ltac_plugin type 'a peuniverses = 'a * EConstr.EInstance.t (* Options *) val simplify_withUIP : bool ref val equations_with_funext : bool ref val equations_transparent : bool ref val equations_derive_equations : bool ref val equations_derive_eliminator : bool ref val debug : bool ref val equations_debug : (unit -> Pp.t) -> unit val ppenv_sigma : (Environ.env -> Evd.evar_map -> 'a -> Pp.t) -> 'a -> unit (* Common flags *) type flags = { polymorphic : bool; open_proof : bool; with_eqns : bool; with_ind : bool; allow_aliases : bool; tactic : unit Proofview.tactic } (* Point-free composition *) val ( $ ) : ('a -> 'b) -> ('c -> 'a) -> 'c -> 'b val ( &&& ) : ('a -> 'b) -> ('c -> 'd) -> 'a * 'c -> 'b * 'd val id : 'a -> 'a val array_remove_last : 'a array -> 'a array val array_chop_last : 'a array -> 'a array * 'a array val rev_assoc : ('a -> 'b -> bool) -> 'a -> ('c * 'b) list -> 'c val array_filter_map : ('a -> 'b option) -> 'a array -> 'b array (* All the tails of [x1 ... xn] : [[xn]; [xn-1; xn] ...[x2 .. xn]] *) val proper_tails : 'a list -> 'a list list (* Stop at the first Some *) val list_find_map_i : (int -> 'a -> 'b option) -> int -> 'a list -> 'b option type esigma = Evd.evar_map ref val head_of_constr : Evd.evar_map -> constr -> constr val nowhere : 'a Locus.clause_expr val dummy_loc : Loc.t option type 'a located = 'a Loc.located (** Fresh names *) val fresh_id_in_env : Names.Id.Set.t -> Names.Id.t -> Environ.env -> Names.Id.t (** Refer to a tactic *) val tac_of_string : string -> Tacexpr.r_dispatch Tacexpr.gen_tactic_arg list -> unit Proofview.tactic type rel_context = EConstr.rel_context type rel_declaration = EConstr.rel_declaration type named_declaration = EConstr.named_declaration type named_context = EConstr.named_context (** Context lifting *) val lift_rel_contextn : int -> int -> rel_context -> rel_context val lift_rel_context : int -> rel_context -> rel_context val lift_list : constr list -> constr list val lift_constrs : int -> constr list -> constr list (** Evars *) val new_untyped_evar : unit -> Evar.t (** Checking *) val check_term : Environ.env -> Evd.evar_map -> constr -> types -> unit val check_type : Environ.env -> Evd.evar_map -> types -> unit val typecheck_rel_context : Environ.env -> Evd.evar_map -> rel_context -> unit val e_conv : env -> esigma -> constr -> constr -> bool val e_type_of : env -> esigma -> constr -> types (** Term manipulation *) val mkProd_or_subst : rel_declaration -> types -> types val mkProd_or_clear : Evd.evar_map -> rel_declaration -> constr -> constr val it_mkProd_or_clear : Evd.evar_map -> constr -> rel_declaration list -> constr val mkLambda_or_subst : rel_declaration -> constr -> constr val mkLambda_or_subst_or_clear : Evd.evar_map -> rel_declaration -> constr -> constr val mkProd_or_subst_or_clear : Evd.evar_map -> rel_declaration -> constr -> types val it_mkProd_or_subst : Environ.env -> Evd.evar_map -> types -> rel_declaration list -> constr val it_mkProd_or_clean : Environ.env -> Evd.evar_map -> constr -> rel_context -> constr val it_mkLambda_or_subst : Environ.env -> constr -> rel_declaration list -> constr val it_mkLambda_or_subst_or_clear : Evd.evar_map -> constr -> rel_context -> constr val it_mkProd_or_subst_or_clear : Evd.evar_map -> constr -> rel_context -> constr val it_mkLambda_or_clear_LetIn : Evd.evar_map -> constr -> rel_context -> constr val ids_of_constr : Evd.evar_map -> ?all:bool -> Id.Set.t -> constr -> Id.Set.t val deps_of_var : Evd.evar_map -> Id.t -> env -> Id.Set.t val idset_of_list : Id.t list -> Id.Set.t val decompose_indapp : Evd.evar_map -> constr -> constr array -> constr * constr array val refresh_universes_strict : Environ.env -> esigma -> types -> types val new_global : Evd.evar_map -> Names.GlobRef.t -> Evd.evar_map * constr val e_new_global : esigma -> Names.GlobRef.t -> constr (** {6 Linking to Coq} *) val global_reference : Id.t -> Names.GlobRef.t val get_class : Evd.evar_map -> constr -> Typeclasses.typeclass * EConstr.EInstance.t val make_definition : ?opaque:'a -> ?poly:bool -> Evd.evar_map -> ?types:constr -> constr -> Evd.evar_map * Declare.proof_entry (** Declares a constant relative to an evar_map. It returns a constant and, in addition, an evar_map and econstr corresponding to it. - If the constant is polymorphic, it returns the minimized universes and a well-formed instance of the constant in that evar_map. - If it is not polymorphic, it returns a fresh evar map from the updated global environment. This allows easy construction of tactics that generate multiple related constants, even in the polymorphic case. *) val declare_constant : Id.t -> constr -> constr option -> poly:bool -> kind:Decls.logical_kind -> Evd.evar_map -> Constant.t * (Evd.evar_map * EConstr.t) val declare_instance : Names.Id.t -> poly:bool -> Evd.evar_map -> rel_context -> Typeclasses.typeclass peuniverses -> constr list -> Constant.t * (Evd.evar_map * EConstr.t) (** Standard datatypes *) type lazy_ref = Names.GlobRef.t Lazy.t val equations_lib_ref : string -> Names.GlobRef.t val find_global : string -> lazy_ref val logic_sort : Sorts.family lazy_t val logic_eq_type : lazy_ref val logic_eq_refl : lazy_ref val logic_eq_case : lazy_ref val logic_eq_elim : lazy_ref (** In Prop, True is top, bot is False, conjunction is and *) val logic_top : lazy_ref val logic_top_intro : lazy_ref val logic_top_elim : lazy_ref val logic_bot : lazy_ref val logic_bot_case : lazy_ref val logic_bot_elim : lazy_ref val logic_conj : lazy_ref val logic_conj_intro : lazy_ref val logic_unit : lazy_ref val logic_unit_intro : lazy_ref val logic_product : lazy_ref val logic_pair : lazy_ref val logic_relation : lazy_ref val logic_wellfounded : lazy_ref val logic_wellfounded_class : lazy_ref val logic_transitive_closure : lazy_ref val logic_eqdec_class : lazy_ref val logic_eqdec_dec_eq : lazy_ref val logic_uip_class : lazy_ref val logic_uip_uip : lazy_ref val logic_signature_class : lazy_ref val logic_signature_sig : lazy_ref val logic_signature_pack : lazy_ref val get_fresh : Evd.evar_map -> lazy_ref -> Evd.evar_map * constr val get_efresh : lazy_ref -> esigma -> constr val is_lglobal : Environ.env -> Evd.evar_map -> lazy_ref -> EConstr.constr -> bool val coq_sigma : lazy_ref val coq_sigmaI : lazy_ref val coq_pr1 : Names.Projection.t lazy_t val coq_pr2 : Names.Projection.t lazy_t val logic_tele_type : lazy_ref val logic_tele_tip : lazy_ref val logic_tele_ext : lazy_ref val logic_tele_interp : lazy_ref val logic_tele_measure : lazy_ref val logic_tele_fix : lazy_ref val logic_tele_fix_functional_type : lazy_ref val logic_tele_fix_unfold : lazy_ref val logic_tele_MR : lazy_ref (** Constants used in the telescopic fixpoint, to be unfolded agressively *) val logic_tele_type_app : lazy_ref val logic_tele_forall_type_app : lazy_ref val logic_tele_forall_uncurry : lazy_ref val logic_tele_forall : lazy_ref val logic_tele_forall_pack : lazy_ref val logic_tele_forall_unpack : lazy_ref val coq_zero : lazy_ref val coq_succ : lazy_ref val coq_nat : lazy_ref val coq_nat_of_int : Evd.evar_map -> int -> Evd.evar_map * EConstr.t val int_of_coq_nat : Constr.t -> int val coq_fix_proto : lazy_ref val fresh_sort_in_family : esigma -> Sorts.family -> constr val fresh_logic_sort : esigma -> constr val mkapp : Environ.env -> esigma -> lazy_ref -> constr array -> constr val mkEq : Environ.env -> esigma -> types -> constr -> constr -> constr val mkRefl : Environ.env -> esigma -> ?inst:EConstr.EInstance.t -> types -> constr -> constr (** Bindings to theories/ files *) val subterm_relation_base : string val functional_induction_class : Evd.evar_map -> Evd.evar_map * Typeclasses.typeclass peuniverses val functional_elimination_class : Evd.evar_map -> Evd.evar_map * Typeclasses.typeclass peuniverses val dependent_elimination_class : esigma -> Typeclasses.typeclass peuniverses val coq_noconfusion_class : Names.GlobRef.t lazy_t val coq_nocycle_class : Names.GlobRef.t lazy_t val coq_bang : Names.GlobRef.t Lazy.t val coq_inacc : Names.GlobRef.t Lazy.t val coq_block : Names.GlobRef.t Lazy.t val coq_hide : Names.GlobRef.t Lazy.t val coq_hidebody : Names.GlobRef.t Lazy.t val coq_add_pattern : Names.GlobRef.t Lazy.t val coq_end_of_section_id : Names.Id.t val coq_the_end_of_the_section : Names.GlobRef.t Lazy.t val coq_end_of_section : Names.GlobRef.t Lazy.t val coq_ImpossibleCall : esigma -> constr val unfold_add_pattern : unit Proofview.tactic lazy_t val observe : string -> unit Proofview.tactic -> unit Proofview.tactic val unfold_recursor_tac : unit -> unit Proofview.tactic val unfold_recursor_ext_tac : unit -> unit Proofview.tactic val equations_tac : unit -> unit Proofview.tactic val set_eos_tac : unit -> unit Proofview.tactic val solve_rec_tac : unit -> unit Proofview.tactic val find_empty_tac : unit -> unit Proofview.tactic val solve_subterm_tac : unit -> unit Proofview.tactic val pi_tac : unit -> unit Proofview.tactic val noconf_tac : unit -> unit Proofview.tactic val noconf_hom_tac : unit -> unit Proofview.tactic val eqdec_tac : unit -> unit Proofview.tactic val simpl_equations_tac : unit -> unit Proofview.tactic val solve_equation_tac : Names.GlobRef.t -> unit Proofview.tactic val impossible_call_tac : Names.GlobRef.t -> Genarg.glevel Genarg.generic_argument val depelim_tac : Names.Id.t -> unit Proofview.tactic val do_empty_tac : Names.Id.t -> unit Proofview.tactic val depelim_nosimpl_tac : Names.Id.t -> unit Proofview.tactic val simpl_dep_elim_tac : unit -> unit Proofview.tactic val depind_tac : Names.Id.t -> unit Proofview.tactic val rec_wf_tac : Genredexpr.r_trm -> Genredexpr.r_trm -> Names.Id.t -> Genredexpr.r_trm -> Tacexpr.r_dispatch Tacexpr.gen_tactic_expr (** [autounfold_first db db' loc gl] Unfolds the two first occurrences in respectively t and t' of Constant.t's declared unfoldable in db and db' (with Hint Unfold) if the goal is of the shape (@eq _ t t'), or fail. *) val autounfold_heads : Hints.hint_db_name list -> Hints.hint_db_name list -> Locus.hyp_location option -> unit Proofview.tactic val specialize_mutfix_tac : unit -> unit Proofview.tactic type hintdb_name = string val db_of_constr : Constr.t -> hintdb_name val dbs_of_constrs : Constr.t list -> hintdb_name list val pr_smart_global : Libnames.qualid Constrexpr.or_by_notation -> Pp.t val string_of_smart_global : Libnames.qualid Constrexpr.or_by_notation -> string val ident_of_smart_global : Libnames.qualid Constrexpr.or_by_notation -> Id.t val move_after_deps : Names.Id.t -> constr -> unit Proofview.tactic val assert_replacing : Names.Id.t -> constr -> types -> unit Proofview.tactic val extended_rel_vect : int -> rel_context -> constr array val extended_rel_list : int -> rel_context -> constr list val to_tuple : rel_declaration -> Names.Name.t binder_annot * constr option * constr val to_named_tuple : named_declaration -> Names.Id.t binder_annot * constr option * constr val of_tuple : Names.Name.t binder_annot * constr option * constr -> rel_declaration val of_named_tuple : Names.Id.t binder_annot * constr option * constr -> named_declaration val get_type : rel_declaration -> constr val get_name : rel_declaration -> Names.Name.t val get_annot : rel_declaration -> Names.Name.t binder_annot val get_value : rel_declaration -> constr option val make_assum : Names.Name.t binder_annot -> constr -> rel_declaration val make_def : Names.Name.t binder_annot -> constr option -> constr -> rel_declaration val make_named_def : Names.Id.t binder_annot -> constr option -> constr -> named_declaration val to_context : (Names.Name.t binder_annot * constr option * constr) list -> rel_context val named_of_rel_context : ?keeplets:bool -> (unit -> Names.Id.t) -> rel_context -> EConstr.t list * constr list * named_context val rel_of_named_context : Evd.evar_map -> named_context -> rel_context * Names.Id.t list val subst_rel_context : int -> EConstr.t list -> rel_context -> rel_context val get_id : named_declaration -> Names.Id.t val get_named_type : named_declaration -> constr val get_named_value : named_declaration -> constr option val lookup_rel : int -> rel_context -> rel_declaration val fold_named_context_reverse : ('a -> named_declaration -> 'a) -> init:'a -> named_context -> 'a val map_rel_context : (constr -> constr) -> rel_context -> rel_context val map_rel_declaration : (constr -> constr) -> rel_declaration -> rel_declaration val map_named_declaration : (constr -> constr) -> named_declaration -> named_declaration val map_named_context : (constr -> constr) -> named_context -> named_context val lookup_named : Id.t -> named_context -> named_declaration val to_evar_map : Evd.evar_map -> Evd.evar_map val of_evar_map : Evd.evar_map -> Evd.evar_map val pp : Pp.t -> unit val user_err_loc : (Loc.t option * Pp.t) -> 'a val error : string -> 'a val errorlabstrm : Pp.t -> 'a val is_anomaly : exn -> bool val print_error : exn -> Pp.t val anomaly : ?label:string -> Pp.t -> 'a val nf_betadeltaiota : Reductionops.reduction_function val subst_telescope : constr -> rel_context -> rel_context val subst_in_ctx : int -> constr -> rel_context -> rel_context val set_in_ctx : int -> constr -> rel_context -> rel_context val subst_in_named_ctx : Evd.evar_map -> Names.Id.t -> constr -> named_context -> named_context val new_evar : Environ.env -> Evd.evar_map -> ?src:Evar_kinds.t Loc.located -> types -> Evd.evar_map * constr val new_type_evar : Environ.env -> Evd.evar_map -> ?src:Evar_kinds.t Loc.located -> Evd.rigid -> Evd.evar_map * (constr * ESorts.t) val empty_hint_info : 'a Typeclasses.hint_info_gen val evar_absorb_arguments : Environ.env -> Evd.evar_map -> existential -> constr list -> Evd.evar_map * existential val hintdb_set_transparency : Constant.t -> bool -> Hints.hint_db_name -> unit (** To add to the API *) val to_peuniverses : 'a UVars.puniverses -> 'a peuniverses val from_peuniverses : Evd.evar_map -> 'a peuniverses -> 'a UVars.puniverses val is_global : Environ.env -> Evd.evar_map -> Names.GlobRef.t -> constr -> bool val constr_of_global_univ : Evd.evar_map -> Names.GlobRef.t peuniverses -> constr val rel_vect : int -> int -> constr array val applistc : constr -> constr list -> constr val instance_constructor : Evd.evar_map -> Typeclasses.typeclass peuniverses -> constr list -> constr option * types val decompose_appvect : Evd.evar_map -> constr -> constr * constr array val dest_ind_family : Inductiveops.inductive_family -> inductive peuniverses * constr list val prod_appvect : Evd.evar_map -> constr -> constr array -> constr val beta_appvect : Evd.evar_map -> constr -> constr array -> constr val find_rectype : Environ.env -> Evd.evar_map -> types -> Inductiveops.inductive_family * constr list type identifier = Names.Id.t val evd_comb1 : (Evd.evar_map -> 'a -> Evd.evar_map * 'b) -> Evd.evar_map ref -> 'a -> 'b val evd_comb0 : (Evd.evar_map -> Evd.evar_map * 'b) -> Evd.evar_map ref -> 'b val splay_prod_n_assum : env -> Evd.evar_map -> int -> types -> rel_context * types (* Universes *) val nonalgebraic_universe_level_of_universe : Environ.env -> Evd.evar_map -> ESorts.t -> Evd.evar_map * Univ.Level.t * ESorts.t val instance_of : Environ.env -> Evd.evar_map -> ?argu:EConstr.EInstance.t -> ESorts.t -> Evd.evar_map * EConstr.EInstance.t * ESorts.t val collapse_term_qualities : UState.t -> Constr.t -> Constr.t (* Hack to prevent sending terms with unbound qualities to the kernel *) Coq-Equations-1.3.1-8.20/src/equations_plugin.mllib000066400000000000000000000002751463127417400220040ustar00rootroot00000000000000Equations_common Ederive Syntax Context_map Sigma_types Simplify Splitting Covering Subterm Eqdec Depelim Principles_proofs Principles Equations Noconf_hom Noconf Extra_tactics G_equations Coq-Equations-1.3.1-8.20/src/extra_tactics.ml000066400000000000000000000050441463127417400205630ustar00rootroot00000000000000open Locusops open Constr open Names open Tactics open Equations_common open EConstr let decompose_app h h' c = Proofview.Goal.enter begin fun gl -> let f, args = EConstr.decompose_app (Proofview.Goal.sigma gl) c in let fty = Tacmach.pf_hnf_type_of gl f in let flam = mkLambda (EConstr.nameR (Id.of_string "f"), fty, mkApp (mkRel 1, args)) in (Proofview.tclTHEN (letin_tac None (Name h) f None allHyps) (letin_tac None (Name h') flam None allHyps)) end let autounfold_ref gr = let db = match gr with | GlobRef.ConstRef c -> Names.Label.to_string (Names.Constant.label c) | _ -> assert false in Eauto.autounfold ["core";db] Locusops.onConcl open Proofview.Goal open Proofview.Notations (** [refine_ho c] Matches a lemma [c] of type [∀ ctx, ty] with a conclusion of the form [∀ ctx, ?P args] using second-order matching on the problem [ctx |- ?P args = ty] and then refines the goal with [c]. *) let refine_ho c = enter begin fun gl -> let env = env gl in let sigma = sigma gl in let concl = concl gl in let ty = Tacmach.pf_apply Retyping.get_type_of gl c in let ts = TransparentState.full in let flags = Evarconv.default_flags_of ts in let evd = ref (to_evar_map sigma) in let rec aux env concl ty = match kind sigma concl, kind sigma ty with | Prod (na, b, t), Prod (na', b', t') -> (match Evarconv.unify_delay ~flags env !evd b b' with | exception Evarconv.UnableToUnify _ -> error "Products do not match" | evm -> evd := evm; aux (push_rel (of_tuple (na,None,b)) env) t t') (* | _, LetIn (na, b, _, t') -> *) (* aux env t (subst1 b t') *) | _, App (ev, args) when isEvar sigma ev -> let (evk, subst as ev) = destEvar sigma ev in let sigma = !evd in let sigma,ev = evar_absorb_arguments env sigma ev (Array.to_list args) in let evargs = Evd.expand_existential sigma ev in let argtest = Evarconv.default_occurrence_test ~allowed_evars:Evarsolve.AllowedEvars.all ts in let argoccs = List.map (fun _ -> Evarconv.Unspecified Evd.Abstraction.Abstract) evargs in let sigma, b = Evarconv.second_order_matching flags env sigma ev (argtest,argoccs) concl in if not b then error "Second-order matching failed" else Proofview.Unsafe.tclEVARS sigma <*> Refine.refine ~typecheck:false (fun sigma -> (sigma, c)) | _, _ -> error "Couldn't find a second-order pattern to match" in aux env concl ty end Coq-Equations-1.3.1-8.20/src/extra_tactics.mli000066400000000000000000000017161463127417400207360ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (** Additional general purpose tactics *) val decompose_app : Names.Id.t -> Names.Id.t -> EConstr.t -> unit Proofview.tactic val autounfold_ref : Names.GlobRef.t -> unit Proofview.tactic (** [refine_ho c] Matches a lemma [c] of type [∀ ctx, ty] with a conclusion of the form [∀ ctx, ?P args] using second-order matching on the problem [ctx |- ?P args = ty] and then refines the goal with [c]. *) val refine_ho : EConstr.t -> unit Proofview.tactic Coq-Equations-1.3.1-8.20/src/g_equations.mlg000066400000000000000000000441151463127417400204150ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (*i camlp4deps: "grammar/grammar.cma" i*) DECLARE PLUGIN "coq-equations.plugin" { open Attributes open Constr open Names open Pp open Constrexpr open Stdarg open Equations_common open EConstr open Ltac_plugin } TACTIC EXTEND decompose_app | [ "decompose_app" ident(h) ident(h') constr(c) ] -> { Extra_tactics.decompose_app h h' c } END TACTIC EXTEND autounfold_ref | [ "autounfold_ref" reference(myref) ] -> { Extra_tactics.autounfold_ref myref } END (* Sigma *) { open Proofview.Goal } TACTIC EXTEND get_signature_pack | [ "get_signature_pack" hyp(id) ident(id') ] -> { Sigma_types.Tactics.get_signature_pack id id' } END TACTIC EXTEND pattern_sigma | [ "pattern" "sigma" hyp(id) ] -> { Sigma_types.Tactics.pattern_sigma id } END TACTIC EXTEND curry | [ "curry" hyp(id) ] -> { Sigma_types.Tactics.curry_hyp id } | ["curry"] -> { Sigma_types.Tactics.curry } END TACTIC EXTEND curry_hyps | [ "uncurry_hyps" ident(id) ] -> { Sigma_types.uncurry_hyps id } END TACTIC EXTEND uncurry_call | [ "uncurry_call" constr(c) constr(c') ident(id) ident(id') ] -> { Sigma_types.Tactics.uncurry_call c c' id id' } END (* Depelim *) TACTIC EXTEND dependent_pattern | ["dependent" "pattern" constr(c) ] -> { Depelim.dependent_pattern c } END TACTIC EXTEND dependent_pattern_from | ["dependent" "pattern" "from" constr(c) ] -> { Depelim.dependent_pattern ~pattern_term:false c } END TACTIC EXTEND pattern_call | [ "pattern_call" constr(c) ] -> { Depelim.pattern_call c } END TACTIC EXTEND needs_generalization | [ "needs_generalization" hyp(id) ] -> { let open Tacticals in Proofview.Goal.enter (fun gl -> if Depelim.needs_generalization gl id then tclIDTAC else tclFAIL (str"No generalization needed")) } END (* Equations *) { open Tacarg } TACTIC EXTEND solve_equations | [ "solve_equations" tactic(destruct) tactic(tac) ] -> { Equations.solve_equations_goal (Tacinterp.tactic_of_value ist destruct) (Tacinterp.tactic_of_value ist tac) } END TACTIC EXTEND simp | [ "simp" ne_preident_list(l) clause(c) ] -> { Principles_proofs.simp_eqns_in c l } | [ "simpc" constr_list(l) clause(c) ] -> { Principles_proofs.simp_eqns_in c (dbs_of_constrs (List.map EConstr.Unsafe.to_constr l)) } END { open Syntax open Pcoq.Prim } ARGUMENT EXTEND equation_user_option PRINTED BY { pr_r_equation_user_option } | [ "noind" ] -> { OInd, false } | [ "ind" ] -> { OInd, true } | [ "eqns" ] -> { OEquations, true } | [ "noeqns" ] -> { OEquations, false } END ARGUMENT EXTEND equation_options PRINTED BY { pr_equation_options } | [ "(" ne_equation_user_option_list(l) ")" ] -> { l } | [ ] -> { [] } END { let pr_lident _ _ _ (loc, id) = Id.print id } ARGUMENT EXTEND lident PRINTED BY { pr_lident } | [ ident(i) ] -> { (Some loc, i) } END { module Vernac = Pvernac.Vernac_ type binders_argtype = Constrexpr.local_binder_expr list Genarg.uniform_genarg_type let pr_raw_binders2 _env _sigma _ _ _ l = mt () let pr_glob_binders2 _env _sigma _ _ _ l = mt () let pr_binders2 _env _sigma _ _ _ l = mt () (* let wit_binders_let2 : binders_let2_argtype = *) (* Genarg.create_arg "binders_let2" *) let wit_binders2 : binders_argtype = Genarg.create_arg "binders2" let binders2 : local_binder_expr list Pcoq.Entry.t = Pcoq.create_generic_entry2 "binders2" (Genarg.rawwit wit_binders2) let binders2_val = Geninterp.register_val0 wit_binders2 None let _ = Pptactic.declare_extra_genarg_pprule wit_binders2 pr_raw_binders2 pr_glob_binders2 pr_binders2 type deppat_equations_argtype = Syntax.pre_equation list Genarg.uniform_genarg_type let wit_deppat_equations : deppat_equations_argtype = Genarg.create_arg "deppat_equations" let deppat_equations_val = Geninterp.register_val0 wit_deppat_equations None let pr_raw_deppat_equations _env _sigma _ _ _ l = mt () let pr_glob_deppat_equations _env _sigma _ _ _ l = mt () let pr_deppat_equations _env _sigma _ _ _ l = mt () let deppat_equations : Syntax.pre_equation list Pcoq.Entry.t = Pcoq.create_generic_entry2 "deppat_equations" (Genarg.rawwit wit_deppat_equations) let _ = Pptactic.declare_extra_genarg_pprule wit_deppat_equations pr_raw_deppat_equations pr_glob_deppat_equations pr_deppat_equations type deppat_elim_argtype = Constrexpr.constr_expr list Genarg.uniform_genarg_type let wit_deppat_elim : deppat_elim_argtype = Genarg.create_arg "deppat_elim" let deppat_elim_val = Geninterp.register_val0 wit_deppat_elim None let pr_raw_deppat_elim _env _sigma _ _ _ l = mt () let pr_glob_deppat_elim _env _sigma _ _ _ l = mt () let pr_deppat_elim _env _sigma _ _ _ l = mt () let deppat_elim : Constrexpr.constr_expr list Pcoq.Entry.t = Pcoq.create_generic_entry2 "deppat_elim" (Genarg.rawwit wit_deppat_elim) let _ = Pptactic.declare_extra_genarg_pprule wit_deppat_elim pr_raw_deppat_elim pr_glob_deppat_elim pr_deppat_elim type equations_argtype = (pre_equations * Vernacexpr.notation_declaration list) Genarg.uniform_genarg_type let wit_equations : equations_argtype = Genarg.create_arg "equations" let val_equations = Geninterp.register_val0 wit_equations None let pr_raw_equations _env _sigma _ _ _ l = mt () let pr_glob_equations _env _sigma _ _ _ l = mt () let pr_equations _env _sigma _ _ _ l = mt () let equations : (pre_equations * Vernacexpr.notation_declaration list) Pcoq.Entry.t = Pcoq.create_generic_entry2 "equations" (Genarg.rawwit wit_equations) let _ = Pptactic.declare_extra_genarg_pprule wit_equations pr_raw_equations pr_glob_equations pr_equations (* preidents that are not interpreted focused *) let interp_my_preident ist s = s let make0 ?dyn name = let wit = Genarg.make0 name in let () = Geninterp.register_val0 wit dyn in wit let wit_my_preident : string Genarg.uniform_genarg_type = make0 ~dyn:(Geninterp.val_tag (Genarg.topwit wit_string)) "my_preident" let def_intern ist x = (ist, x) let def_subst _ x = x let def_interp ist x = Ftactic.return x let register_interp0 wit f = let interp ist v = Ftactic.bind (f ist v) (fun v -> Ftactic.return (Geninterp.Val.inject (Geninterp.val_tag (Genarg.topwit wit)) v)) in Geninterp.register_interp0 wit interp let declare_uniform t = Genintern.register_intern0 t def_intern; Gensubst.register_subst0 t def_subst; register_interp0 t def_interp let () = declare_uniform wit_my_preident let my_preident : string Pcoq.Entry.t = Pcoq.create_generic_entry2 "my_preident" (Genarg.rawwit wit_my_preident) open Util open Pcoq open Constr open Syntax let () = Pcoq.set_keyword_state (CLexer.add_keyword (Pcoq.get_keyword_state()) "λ") let check_eqns_ident = let open Pcoq.Lookahead in to_entry "check_eqns_ident" begin lk_kws ["|"; ";"] end } GRAMMAR EXTEND Gram GLOBAL: term pattern deppat_equations deppat_elim binders2 equations lident my_preident; my_preident: [ [ id = IDENT -> { id } ] ] ; binders2 : [ [ b = binders -> { b } ] ] ; equations_list: [ [ e = equation ; check_eqns_ident ; sep = [ ";" -> { () } | -> { () } ]; eqns = equations_list -> { e :: eqns } | e = equation -> { [e] } | -> { [] } ] ]; deppat_equations: [ [ l = equations_list -> { l } ] ] ; deppat_elim: [ [ "["; l = LIST0 lconstr SEP "|"; "]" -> { l } ] ] ; term: LEVEL "10" [ [ "λ"; "{" ; c = LIST0 equation SEP ";"; "}" -> { CAst.make ~loc @@ CGenarg (Genarg.in_gen (Genarg.rawwit Syntax.wit_equations_list) c) } ] ] ; identloc : [ [ id = ident -> { (Some loc, id) } ] ] ; patterns: [ [ pat = lconstr ; sep = [ "|" -> { () } | "," -> { () } ]; pats = patterns -> { pat :: pats } | pat = lconstr -> { [pat] } | -> { [] } ]]; equation: [ [ "|"; pats = patterns; r = rhs -> { Pre_equation (RefinePats pats, r) } | pat = pat; r = rhs -> { Pre_equation (SignPats pat, r) } ] ] ; pat: [ [ p = lconstr -> { p } ] ] ; refine: [ [ cs = LIST1 Constr.lconstr SEP "," -> { cs } ] ] ; wf_annot: [ [ "by"; IDENT "wf"; c = constr; rel = OPT constr -> { Some (WellFounded (c, rel)) } | "by"; IDENT "struct"; id = OPT identloc -> { Some (Structural id) } | -> { None } ] ] ; proto: [ [ id = lident; d = OPT univ_decl; l = binders2; ":"; t = Constr.lconstr; reca = wf_annot; ":="; eqs = sub_equations -> { (fun r -> ((id, d, r, l, Some t, reca), eqs)) } ] ] ; where_rhs: [ [ ntn = ne_lstring; ":="; c = constr; modl = G_vernac.syntax_modifiers; scopt = OPT [ ":"; sc = IDENT -> { sc } ] -> { Inr { Vernacexpr.ntn_decl_string = ntn; ntn_decl_interp = c; ntn_decl_modifiers = modl; ntn_decl_scope = scopt } } | p = proto -> { Inl (p (Some Syntax.Nested)) } ] ] ; where_clause: [ [ "where"; w = where_rhs -> { w } | "with"; p = proto -> { Inl (p (Some Syntax.Mutual)) } | p = proto -> { Inl (p None) } ] ] ; wheres: [ [ l = LIST0 where_clause -> { let rec aux = function | Inl w :: l -> let ws, ns = aux l in w :: ws, ns | Inr n :: l -> let ws, ns = aux l in ws, n :: ns | [] -> ([], []) in aux l } ] ] ; local_where_rhs: [ [ ntn = ne_lstring; ":="; c = constr; modl = G_vernac.syntax_modifiers; scopt = OPT [ ":"; sc = IDENT -> { sc } ] -> { Inr { Vernacexpr.ntn_decl_string = ntn; ntn_decl_interp = c; ntn_decl_modifiers = modl; ntn_decl_scope = scopt } } | p = proto -> { Inl (p (Some Syntax.Mutual)) } ] ] ; local_where: [ [ "where"; w = local_where_rhs -> { w } ] ] ; local_wheres: [ [ l = LIST0 local_where -> { let rec aux = function | Inl w :: l -> let ws, ns = aux l in w :: ws, ns | Inr n :: l -> let ws, ns = aux l in ws, n :: ns | [] -> ([], []) in aux l } ] ] ; rhs: [ [ ":=!"; id = identloc -> { Some (Empty id) } | [":=" -> { () } |"=>" -> { () } ]; c = Constr.lconstr; w = local_wheres -> { Some (Program (ConstrExpr c, w)) } | ["with" -> { () } ]; refs = refine; [":=" -> { () } |"=>" -> { () } ]; e = sub_equations -> { Some (Refine (refs, e)) } | -> { None } ] ] ; sub_equations: [ [ "{"; l = deppat_equations; "}" -> { l } | l = deppat_equations -> { l } ] ] ; equations: [ [ p = proto; l = wheres -> { let ws, nts = l in ((p None :: ws), nts) } ] ] ; END { let classify_equations x = Vernacextend.(VtStartProof (Doesn'tGuaranteeOpacity,[])) let interp_attrs (elim, eqns) = let interp_opt k = function | None -> [] | Some b -> [(k, b)] in List.append (interp_opt OInd elim) (interp_opt OEquations eqns) } VERNAC COMMAND EXTEND Define_equations_refine CLASSIFIED BY { classify_equations } | #[ poly = polymorphic; program_mode = program; atts = derive_flags; tactic = equations_tactic ] ![program_interactive] [ "Equations" "?" equation_options(opt) equations(eqns) ] -> { Equations.equations_interactive ~poly ~program_mode ?tactic (List.append opt (interp_attrs atts)) (fst eqns) (snd eqns) } END VERNAC COMMAND EXTEND Define_equations CLASSIFIED AS SIDEFF STATE program | #[ poly = polymorphic; program_mode = program; atts = derive_flags; tactic = equations_tactic ] [ "Equations" equation_options(opt) equations(eqns) ] -> { Equations.equations ~poly ~program_mode ?tactic (List.append opt (interp_attrs atts)) (fst eqns) (snd eqns) } END (* Dependent elimination using Equations. *) { type raw_elim_patterns = constr_expr list type glob_elim_patterns = Genintern.glob_constr_and_expr list type elim_patterns = user_pats let interp_elim_pattern env sigma avoid s = Syntax.pattern_of_glob_constr env sigma avoid Anonymous (* Should be id *) (fst s) let interp_elim_patterns ist env sigma s = let avoid = Names.Id.Map.domain ist.Geninterp.lfun in List.map (fun x -> snd (interp_elim_pattern env sigma avoid x)) s let glob_elim_patterns ist s = List.map (Tacintern.intern_constr ist) s let subst_elim_patterns s str = str let pr_elim_patterns _ _ _ (s : elim_patterns) = let env = Global.env() in let sigma = Evd.from_env env in Syntax.pr_user_pats env sigma s let pr_raw_elim_patterns env sigma prc prlc _ (s : raw_elim_patterns) = Pp.prlist_with_sep (fun _ -> str "|") (prc env sigma) s let pr_glob_elim_patterns env sigma prc prlc _ (s : glob_elim_patterns) = Pp.prlist_with_sep (fun _ -> str "|") (fun x -> prc env sigma x) s type elim_patterns_argtype = (raw_elim_patterns, glob_elim_patterns, elim_patterns) Genarg.genarg_type (* let interp_elim_patterns ist gl l = * match l with * | ArgArg x -> x * | ArgVar ({ CAst.v = id } as locid) -> * (try int_list_of_VList (Id.Map.find id ist.lfun) * with Not_found | CannotCoerceTo _ -> [interp_int ist locid]) * * let interp_elim_patterns ist gl l = * Tacmach.project gl , interp_occs ist gl l * * let wit_g_elim_patterns : elim_patterns_argtype = * Genarg.create_arg "g_elim_patterns" * * let val_g_elim_patterns = * Geninterp.register_val0 wit_g_elim_patterns None * * (\* let pr_raw_g_elim_patterns _ _ _ = Simplify.pr_elim_patterns * * let pr_glob_g_elim_patterns _ _ _ = Simplify.pr_elim_patterns * * let pr_g_elim_patterns _ _ _ = Simplify.pr_elim_patterns *\) * * let g_elim_patterns : raw_elim_patterns Pcoq.Entry.t = * Pcoq.create_generic_entry2 "g_elim_patterns" * (Genarg.rawwit wit_g_elim_patterns) * * let _ = Pptactic.declare_extra_genarg_pprule wit_g_elim_patterns * pr_raw_elim_patterns pr_glob_elim_patterns pr_elim_patterns *) } ARGUMENT EXTEND elim_patterns PRINTED BY { pr_elim_patterns } INTERPRETED BY { interp_elim_patterns } GLOBALIZED BY { glob_elim_patterns } SUBSTITUTED BY { subst_elim_patterns } RAW_PRINTED BY { pr_raw_elim_patterns env sigma } GLOB_PRINTED BY { pr_glob_elim_patterns env sigma } | [ deppat_elim(l) ] -> { l } END TACTIC EXTEND dependent_elimination | [ "dependent" "elimination" ident(id) ] -> { Depelim.dependent_elim_tac (None, id) } | [ "dependent" "elimination" ident(id) "as" elim_patterns(l) ] -> { Depelim.dependent_elim_tac ~patterns:l (None, id) (* FIXME *) } END (* Subterm *) TACTIC EXTEND is_secvar | [ "is_secvar" constr(x) ] -> { enter (fun gl -> match kind (Proofview.Goal.sigma gl) x with | Var id when Termops.is_section_variable (Global.env ()) id -> Proofview.tclUNIT () | _ -> Tacticals.tclFAIL (str "Not a section variable or hypothesis")) } END TACTIC EXTEND refine_ho | [ "refine_ho" open_constr(c) ] -> { Extra_tactics.refine_ho c } END TACTIC EXTEND eqns_specialize_eqs | [ "eqns_specialize_eqs" ident(i) ] -> { Depelim.specialize_eqs i } | [ "eqns_specialize_eqs_block" ident(i) int_opt(n) ] -> { Depelim.specialize_eqs ~with_block:(match n with None -> 1 | Some n -> n) i } END TACTIC EXTEND move_after_deps | [ "move_after_deps" ident(i) constr(c) ] -> { Equations_common.move_after_deps i c } END (** Deriving *) VERNAC COMMAND EXTEND Derive CLASSIFIED AS SIDEFF STATE program | #[ poly = polymorphic ] [ "Derive" ne_ident_list(ds) "for" global_list(c) ] -> { Ederive.derive ~poly (List.map Id.to_string ds) (List.map (fun x -> x.CAst.loc, Smartlocate.global_with_alias x) c) } | #[ poly = polymorphic ] [ "Equations" "Derive" ne_ident_list(ds) "for" global_list(c) ] -> { Ederive.derive ~poly (List.map Id.to_string ds) (List.map (fun x -> x.CAst.loc, Smartlocate.global_with_alias x) c) } END (* Simplify *) { type simplification_rules_argtype = Simplify.simplification_rules Genarg.uniform_genarg_type let wit_g_simplification_rules : simplification_rules_argtype = Genarg.create_arg "g_simplification_rules" let val_g_simplification_rules = Geninterp.register_val0 wit_g_simplification_rules None let pr_raw_g_simplification_rules _env _sigma _ _ _ = Simplify.pr_simplification_rules let pr_glob_g_simplification_rules _env _sigma _ _ _ = Simplify.pr_simplification_rules let pr_g_simplification_rules _env _sigma _ _ _ = Simplify.pr_simplification_rules let g_simplification_rules : Simplify.simplification_rules Pcoq.Entry.t = Pcoq.create_generic_entry2 "g_simplification_rules" (Genarg.rawwit wit_g_simplification_rules) let _ = Pptactic.declare_extra_genarg_pprule wit_g_simplification_rules pr_raw_g_simplification_rules pr_glob_g_simplification_rules pr_g_simplification_rules } GRAMMAR EXTEND Gram GLOBAL: g_simplification_rules; g_simplification_rules: [ [ l = LIST1 simplification_rule_located -> { l } ] ] ; simplification_rule_located: [ [ r = simplification_rule -> { (Some loc, r) } ] ] ; simplification_rule: [ [ step = simplification_step -> { Simplify.Step step } | "?" -> { Simplify.Infer_one } | "<->" -> { Simplify.Infer_direction } | "*" -> { Simplify.Infer_many } ] ]; simplification_step : [ [ "-" -> { Simplify.Deletion false } | "-"; "!" -> { Simplify.Deletion true } | "<>" -> { Simplify.NoCycle } | "$" -> { Simplify.NoConfusion [] } | "$"; "{"; rules = g_simplification_rules; "}" -> { Simplify.NoConfusion rules } | dir = direction -> { Simplify.Solution dir } ] ]; direction: [ [ "->" -> { Simplify.Left } | "<-" -> { Simplify.Right } ] ]; END { (* We need these alias due to the limitations of parsing macros. *) type simplification_rules = Simplify.simplification_rules let pr_simplification_rules _ _ _ = Simplify.pr_simplification_rules } ARGUMENT EXTEND simplification_rules PRINTED BY { pr_simplification_rules } | [ g_simplification_rules(l) ] -> { l } END TACTIC EXTEND simplify | [ "simplify" simplification_rules(l) ] -> { Simplify.simplify_tac l } | [ "simplify" ] -> { Simplify.simplify_tac [] } END TACTIC EXTEND mutual_fix | [ "mfix" my_preident_list(li) int_list(l) ] -> { Principles_proofs.mutual_fix li l } END Coq-Equations-1.3.1-8.20/src/noconf.ml000066400000000000000000000152731463127417400172150ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open Util open Names open Nameops open Constr open Context open Declarations open Inductiveops open Reductionops module CVars = Vars open Equations_common open EConstr open Vars let mkcase env sigma c ty constrs = let cty = Retyping.get_type_of env sigma c in let IndType (indf, _) as indty = Inductiveops.find_rectype env sigma cty in let mind, ind, origparams = match dest_ind_family indf with | (((mu, n),_ as i), pars) -> mu, i, pars in let mindb, oneind = Global.lookup_inductive (fst ind) in let ctx = oneind.mind_arity_ctxt in let ui = EConstr.EInstance.kind sigma (snd ind) in let ctx = CVars.subst_instance_context ui ctx in let _len = List.length ctx in let params = mindb.mind_nparams in let ci = make_case_info env (fst ind) RegularStyle in let brs = Array.map2_i (fun i id (ctx, cty) -> let cty = Term.it_mkProd_or_LetIn cty ctx in let cty = CVars.subst_instance_constr ui cty in let (args, arity) = decompose_prod_decls sigma (of_constr cty) in let realargs, pars = List.chop (List.length args - params) args in let args = substl (List.rev origparams) (it_mkProd_or_LetIn arity realargs) in let args, arity = decompose_prod_decls sigma args in let res = constrs ind i id params args arity in it_mkLambda_or_LetIn res args) oneind.mind_consnames oneind.mind_nf_lc in make_case_or_project env sigma indty ci (ty, ERelevance.relevant) c brs let mk_eq env env' evd args args' = let _, _, make = Sigma_types.telescope env evd args in let _, _, make' = Sigma_types.telescope env' evd args' in let make = lift (List.length args + 1) make in let env = push_rel_context args' env' in let ty = Retyping.get_type_of env !evd make in mkEq env evd ty make make' let derive_no_confusion ~pm env sigma0 ~poly (ind,u as indu) = let evd = ref sigma0 in let mindb, oneind = Global.lookup_inductive ind in let _, inds = Reductionops.dest_arity env sigma0 (Inductiveops.type_of_inductive env indu) in let ctx = List.map of_rel_decl oneind.mind_arity_ctxt in let ctx = subst_instance_context u ctx in let ctx = smash_rel_context ctx in let len = List.length ctx in let params = mindb.mind_nparams in let args = oneind.mind_nrealargs in let argsvect = rel_vect 0 len in let paramsvect, rest = Array.chop params argsvect in let argr, argty, x, ctx, argsctx = if Array.length rest = 0 then ERelevance.make oneind.mind_relevance, mkApp (mkIndU indu, argsvect), mkRel 1, ctx, [] else let evm, pred, pars, indty, valsig, ctx, lenargs, idx = Sigma_types.build_sig_of_ind env !evd indu in let () = evd := evm in let evm, sigma = Evd.fresh_global (Global.env ()) !evd (Lazy.force coq_sigma) in let () = evd := evm in let _, pred' = Term.decompose_lambda_n (List.length pars) (EConstr.to_constr !evd pred) in let indty = mkApp (sigma, [|idx; of_constr pred'|]) in (* sigma is not sort poly (at least for now) *) ERelevance.relevant, nf_betaiotazeta env !evd indty, mkProj (Lazy.force coq_pr2, ERelevance.relevant, mkRel 1), pars, (List.firstn lenargs ctx) in let tru = get_efresh logic_top evd in let fls = get_efresh logic_bot evd in let xid = Id.of_string "x" and yid = Id.of_string "y" in let xdecl = of_tuple (make_annot (Name xid) argr, None, argty) in let binders = xdecl :: ctx in let ydecl = of_tuple (make_annot (Name yid) argr, None, lift 1 argty) in let fullbinders = ydecl :: binders in let s = Lazy.force logic_sort in let s = match s with | Sorts.InSProp -> mkSProp | Sorts.InProp -> mkProp | Sorts.InSet -> mkSet | Sorts.InType | Sorts.InQSort -> (* In that case the noConfusion principle lives at the level of the type. *) let sort = EConstr.mkSort inds in let sigma, s = Evarsolve.refresh_universes ~status:Evd.univ_flexible ~onlyalg:true (Some false) env !evd sort in evd := sigma; s in let arity = it_mkProd_or_LetIn s fullbinders in let env = push_rel_context binders env in let paramsvect = Context.Rel.instance mkRel 0 ctx in let pack_ind_with_parlift n = lift n argty in let ind_with_parlift n = mkApp (mkIndU indu, Array.append (Array.map (lift n) paramsvect) rest) in let lenindices = List.length argsctx in let pred = let elim = (* In pars ; x |- fun args (x : ind pars args) => forall y, Prop *) let app = pack_ind_with_parlift (args + 2) in it_mkLambda_or_LetIn (mkProd_or_LetIn (of_tuple (make_annot Anonymous argr, None, app)) s) (of_tuple (nameR xid, None, ind_with_parlift (lenindices + 1)) :: lift_rel_context 1 argsctx) in mkcase env !evd x elim (fun ind i id nparams args arity -> let ydecl = (nameR yid, None, pack_ind_with_parlift (List.length args + 1)) in let env' = push_rel_context (of_tuple ydecl :: args) env in let argsctx = lift_rel_context (List.length args + 2) argsctx in let elimdecl = (nameR yid, None, ind_with_parlift (List.length args + lenindices + 2)) in mkLambda_or_LetIn (of_tuple ydecl) (mkcase env' !evd x (it_mkLambda_or_LetIn s (of_tuple elimdecl :: argsctx)) (fun _ i' id' nparams args' arity' -> if i = i' then if List.length args = 0 then tru else mk_eq env env' evd args args' else fls))) in let app = it_mkLambda_or_LetIn pred binders in let _, ce = make_definition ~poly !evd ~types:arity app in let indid = Nametab.basename_of_global (GlobRef.IndRef ind) in let id = add_prefix "NoConfusion_" indid in let cstNoConf = Declare.declare_constant ~name:id (Declare.DefinitionEntry ce) ~kind:Decls.(IsDefinition Definition) in let env = Global.env () in let sigma = Evd.from_env env in let sigma, indu = Evd.fresh_global ~rigid:Evd.univ_rigid (* Universe levels of the inductive family should not be tampered with. *) env sigma (GlobRef.IndRef ind) in let indu = destInd sigma indu in Noconf_hom.derive_noConfusion_package ~pm env sigma ~poly indu indid ~prefix:"" ~tactic:(noconf_tac ()) cstNoConf let () = Ederive.(register_derive { derive_name = "NoConfusion"; derive_fn = make_derive_ind derive_no_confusion }) Coq-Equations-1.3.1-8.20/src/noconf.mli000066400000000000000000000015431463127417400173610ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open Environ open Names open EConstr val mkcase : env -> Evd.evar_map -> constr -> constr -> ((MutInd.t * int) * EInstance.t -> int -> Id.t -> int -> rel_context -> types -> constr) -> constr val derive_no_confusion : pm:Declare.OblState.t -> env -> Evd.evar_map -> poly:bool -> Names.inductive * EInstance.t -> Declare.OblState.t Coq-Equations-1.3.1-8.20/src/noconf_hom.ml000066400000000000000000000304611463127417400200540ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open Util open Names open Nameops open Constr open Context open Declarations open Equations_common open EConstr open Vars let name_context env sigma ctx = let avoid, ctx = List.fold_right (fun decl (avoid, acc) -> let (n, b, t) = to_tuple decl in match n.binder_name with | Name id -> let id' = Namegen.next_ident_away id avoid in let avoid = Id.Set.add id' avoid in (avoid, make_def (nameR id') b t :: acc) | Anonymous -> let id' = Namegen.id_of_name_using_hdchar (push_rel_context acc env) sigma t Anonymous in let avoid = Id.Set.add id' avoid in (avoid, make_def (nameR id') b t :: acc)) ctx (Id.Set.empty, []) in ctx let occur_rigidly sigma i concl = let rec aux concl = match kind sigma concl with | App (f, cl) -> if isConstruct sigma f then Array.exists aux cl else false | Rel k -> Int.equal k i | _ -> false in let hd, args = decompose_appvect sigma concl in Array.exists aux args (* On [xn :: ... x1] returns [forcedn :: .. :: forced1] *) let get_forced_positions sigma args concl = let is_forced i acc _ = if occur_rigidly sigma i concl then true :: acc else false :: acc in List.rev (List.fold_left_i is_forced 1 [] args) let derive_noConfusion_package ~pm env sigma ~poly (ind,u as indu) indid ~prefix ~tactic cstNoConf = let mindb, oneind = Global.lookup_inductive ind in let ctx = List.map of_rel_decl oneind.mind_arity_ctxt in let ctx = subst_instance_context (snd indu) ctx in let ctx = smash_rel_context ctx in let len = if prefix = "" then mindb.mind_nparams else List.length ctx in let argsvect = rel_vect 0 len in let noid = add_prefix "noConfusion" (add_prefix prefix (add_prefix "_" indid)) and packid = add_prefix "NoConfusion" (add_prefix prefix (add_prefix "Package_" indid)) in let tc = Typeclasses.class_info_exn env sigma (Lazy.force coq_noconfusion_class) in let sigma, noconf = Evd.fresh_global ~rigid:Evd.univ_rigid env sigma (GlobRef.ConstRef cstNoConf) in let sigma, noconfcl = new_global sigma tc.Typeclasses.cl_impl in let inst, u = destInd sigma noconfcl in let noconfterm = mkApp (noconf, argsvect) in let ctx, argty = let ty = Retyping.get_type_of env sigma noconf in let ctx, ty = EConstr.decompose_prod_n_decls sigma len ty in match kind sigma ty with | Prod (_, b, _) -> ctx, b | _ -> assert false in let b, ty = Equations_common.instance_constructor sigma (tc,u) [argty; noconfterm] in let env = push_rel_context ctx (Global.env ()) in let rec term sigma c ty = match kind sigma ty with | Prod (na, t, ty) -> let sigma, arg = Evarutil.new_evar env sigma t in term sigma (mkApp (c, [|arg|])) (subst1 arg ty) | _ -> sigma, c, ty in let cty = Retyping.get_type_of env sigma (Option.get b) in let sigma, term, ty = term sigma (Option.get b) cty in let term = it_mkLambda_or_LetIn term ctx in let ty = it_mkProd_or_LetIn ty ctx in let sigma, _ = Typing.type_of env sigma term in let hook { Declare.Hook.S.dref; _ } = Classes.declare_instance (Global.env ()) sigma (Some empty_hint_info) Hints.SuperGlobal dref in let hook = Declare.Hook.make hook in let scope = Locality.(Global ImportDefaultBehavior) in let kind = Decls.(IsDefinition Definition) in let oblinfo, _, term, ty = RetrieveObl.retrieve_obligations env noid sigma 0 term ty in let cinfo = Declare.CInfo.make ~name:packid ~typ:ty () in let info = Declare.Info.make ~hook ~poly ~scope ~kind () in let pm, _ = Declare.Obls.add_definition ~pm ~cinfo ~info ~opaque:false ~body:term ~tactic ~uctx:(Evd.evar_universe_context sigma) oblinfo in pm let derive_no_confusion_hom ~pm env sigma0 ~poly (ind,u as indu) = let mindb, oneind = Global.lookup_inductive ind in let _, inds = Reductionops.dest_arity env sigma0 (Inductiveops.type_of_inductive env indu) in let ctx = List.map of_rel_decl oneind.mind_arity_ctxt in let ctx = subst_instance_context (snd indu) ctx in let ctx = smash_rel_context ctx in let len = List.length ctx in let params = mindb.mind_nparams in let args = oneind.mind_nrealargs in let argsvect = rel_vect 0 len in let paramsvect, rest = Array.chop params argsvect in let argty, x, ctx, argsctx = mkApp (mkIndU indu, argsvect), mkRel 1, ctx, [] in let sigma, tru = get_fresh sigma0 logic_top in let sigma, fls = get_fresh sigma logic_bot in let ctx = name_context env sigma ctx in let xid = Id.of_string "x" and yid = Id.of_string "y" in let xdecl = of_tuple (nameR xid, None, argty) in let binders = xdecl :: ctx in let ydecl = of_tuple (nameR yid, None, lift 1 argty) in let fullbinders = ydecl :: binders in let sigma, s = match Lazy.force logic_sort with | Sorts.InType | Sorts.InSet | Sorts.InQSort -> (* In that case noConfusion lives at the level of the inductive family *) let sort = EConstr.mkSort inds in let is_level = match ESorts.kind sigma0 inds with | Sorts.Prop | Sorts.SProp | Sorts.Set -> true | Sorts.Type u | Sorts.QSort (_, u) -> Univ.Universe.is_level u in if is_level then sigma, sort else Evarsolve.refresh_universes ~status:Evd.univ_flexible ~onlyalg:true (Some false) env sigma sort | s -> let sigma, s = Evd.fresh_sort_in_family sigma s in sigma, mkSort s in let _arity = it_mkProd_or_LetIn s fullbinders in (* let env = push_rel_context binders env in *) let paramsvect = Context.Rel.instance mkRel 0 ctx in let _pack_ind_with_parlift n = lift n argty in let _ind_with_parlift n = mkApp (mkIndU indu, Array.append (Array.map (lift n) paramsvect) rest) in let _lenindices = List.length argsctx in let ctxmap = Context_map.id_subst fullbinders in let constructors = Inductiveops.arities_of_constructors env indu in let sigma, sigT = get_fresh sigma coq_sigma in let sigma, sigI = get_fresh sigma coq_sigmaI in let sigma, eqT = get_fresh sigma logic_eq_type in let parampats = List.rev_map (fun decl -> DAst.make Syntax.(PUVar (Name.get_id (get_name decl), Generated))) ctx in let mk_clause i ty = let paramsctx, concl = decompose_prod_n_decls sigma params ty in let _, ctxpars = List.chop args ctx in let ctxvars = List.map (fun decl -> mkVar (Name.get_id (get_name decl))) ctxpars in let args, concl = decompose_prod_decls sigma (Vars.substnl ctxvars 0 concl) in let forced = get_forced_positions sigma args concl in let loc = None in let fn (avoid, acc) decl forced = let id = match Context.Rel.Declaration.get_name decl with | Name na -> na | Anonymous -> Id.of_string "wildcard" in let name = Namegen.next_ident_away (add_suffix id "0") avoid in let avoid = Id.Set.add name avoid in let name' = Namegen.next_ident_away (add_suffix id "1") avoid in let avoid = Id.Set.add name' avoid in let acc = if forced then let acc' = List.fold_left_i (fun i acc (na,na',decl) -> (na, na', Vars.substnl [mkVar name'] i decl) :: acc) 0 [] acc in List.rev acc' else ((name, name', get_type decl) :: acc) in (avoid, acc), Syntax.(PUVar (name, User), PUVar (name', User)) in let (avoid, eqs), user_pats = List.fold_left2_map fn (Id.Set.empty, []) args forced in let patl, patr = List.split user_pats in let cstr ps = Syntax.PUCstr ((ind, succ i), params, List.rev_map (fun p -> DAst.make p) ps) in let lhs = parampats @ [DAst.make (cstr patl); DAst.make (cstr patr)] in let rhs = match List.rev eqs with | [] -> tru | (name, name', ty) :: eqs -> let ty, lhs, rhs = let get_type (restty, restl, restr) (na, na', ty) = let codom = mkLambda (nameR na, ty, restty) in mkApp (sigT, [| ty; codom |]), mkApp (sigI, [| ty; codom; mkVar na; subst1 (mkVar na) restl |]), mkApp (sigI, [| ty; codom; mkVar na'; subst1 (mkVar na') restr |]) in List.fold_left get_type (ty, mkVar name, mkVar name') eqs in mkApp (eqT, [| ty; lhs; rhs |]) in Syntax.Pre_clause (loc, lhs, Some (Syntax.Program (Syntax.Constr rhs, ([], [])))) in let clauses = Array.to_list (Array.mapi mk_clause constructors) in let hole x = Syntax.(PUVar (Id.of_string x, User)) in let catch_all = let lhs = parampats @ [DAst.make (hole "x"); DAst.make (hole "y")] in let rhs = Syntax.Program (Syntax.Constr fls, ([], [])) in Syntax.Pre_clause (None, lhs, Some rhs) in let clauses = clauses @ [catch_all] in let indid = Nametab.basename_of_global (GlobRef.IndRef ind) in let id = add_prefix "NoConfusionHom_" indid in let program_orig_type = it_mkProd_or_LetIn s fullbinders in let program_sort = Retyping.get_sort_of env sigma program_orig_type in let sigma, program_sort = Evarsolve.refresh_universes ~status:Evd.univ_flexible ~onlyalg:true (Some false) env sigma (mkSort program_sort) in let program_sort = EConstr.ESorts.kind sigma (EConstr.destSort sigma program_sort) in let evd = ref sigma in let data = Covering.{ program_mode = false; rec_type = [None]; flags = { polymorphic = poly; open_proof = false; with_eqns = false; with_ind = false; allow_aliases = true; (* We let the compiler unify arguments that are forced equal *) tactic = !Declare.Obls.default_tactic }; fixdecls = []; intenv = Constrintern.empty_internalization_env; notations = [] } in let p = Syntax.{program_loc = None; program_id = id; program_impls = []; program_implicits = []; program_rec = None; program_orig_type; program_sort; program_sign = fullbinders; program_arity = s} in let splitting = Covering.covering ~check_unused:false (* The catch-all clause might not be needed *) env evd p data clauses [] ctxmap [] s in let hook ~pm _ p terminfo = (* let _proginfo = * Syntax.{ program_loc = None; program_id = id; * program_orig_type; program_sort; * program_sign = fullbinders; * program_arity = s; * program_rec = None; * program_impls = []; * program_implicits = []} * in *) let program_cst = match terminfo.Splitting.term_id with GlobRef.ConstRef c -> c | _ -> assert false in (* let _compiled_info = Splitting.{ program_cst; program_split = p.program_splitting; * program_split_info = terminfo } in * let _flags = { polymorphic; open_proof = false; with_eqns = true; with_ind = true } in * let _fixprots = [s] in *) (* let () = Equations.define_principles flags None fixprots [proginfo, compiled_info] in *) (* The principles are now shown, let's prove this forms an equivalence *) Global.set_strategy (Conv_oracle.EvalConstRef program_cst) Conv_oracle.transparent; let env = Global.env () in let sigma = Evd.from_env env in let sigma, indu = Evd.fresh_global ~rigid:Evd.univ_rigid (* Universe levels of the inductive family should not be tampered with. *) env sigma (GlobRef.IndRef ind) in let indu = destInd sigma indu in (), derive_noConfusion_package ~pm env sigma ~poly indu indid ~prefix:"Hom" ~tactic:(noconf_hom_tac ()) program_cst in let prog = Splitting.make_single_program env evd data.Covering.flags p ctxmap splitting None in Splitting.define_programs ~pm env evd UState.default_univ_decl [None] [] data.Covering.flags [prog] hook let () = let derive_no_confusion_hom ~pm env sigma ~poly v = derive_no_confusion_hom ~pm env sigma ~poly v |> fst in Ederive.(register_derive { derive_name = "NoConfusionHom"; derive_fn = make_derive_ind derive_no_confusion_hom }) Coq-Equations-1.3.1-8.20/src/noconf_hom.mli000066400000000000000000000017231463127417400202240ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open Environ open EConstr val derive_noConfusion_package : pm:Declare.OblState.t -> Environ.env -> Evd.evar_map -> poly:bool -> Names.inductive * EConstr.EInstance.t -> Names.Id.t -> prefix:string -> tactic:unit Proofview.tactic -> Names.Constant.t -> Declare.OblState.t val derive_no_confusion_hom : pm:Declare.OblState.t -> env -> Evd.evar_map -> poly:bool -> Names.inductive * EInstance.t -> Declare.OblState.t * Declare.Proof.t option Coq-Equations-1.3.1-8.20/src/principles.ml000066400000000000000000002344371463127417400201100ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (** Principles derived from equation definitions. *) open Names open Context open Equations_common open Syntax open Context_map open Splitting open Principles_proofs type statement = EConstr.constr * EConstr.types option type statements = statement list type recursive = bool type node_kind = | Regular | Refine | Where | Nested of recursive let kind_of_prog p = match p.Syntax.program_rec with | Some (Structural (NestedOn _)) -> Nested true | Some (Structural NestedNonRec) -> Nested false | _ -> Regular let regular_or_nested = function | Regular | Nested _ -> true | _ -> false let regular_or_nested_rec = function | Regular -> true | Nested r -> true | _ -> false let nested = function Nested _ -> true | _ -> false let pi1 (x,_,_) = x let pi2 (_,y,_) = y (** Objects to keep information about equations *) let cache_rew_rule (base, gr, l2r) = let gr, ((qvars, univs), csts) = UnivGen.fresh_global_instance (Global.env()) gr in let () = if not (Sorts.QVar.Set.is_empty qvars) then CErrors.user_err Pp.(str "Sort polymorphic autorewrite not supported.") in let gru = gr, (univs, csts) in Autorewrite.add_rew_rules ~locality:Hints.(if Global.sections_are_opened() then Local else SuperGlobal) base [CAst.make (gru, true, None)] let inRewRule = let open Libobject in (* Object just to provide discharge *) declare_object { (default_object "EQUATIONS_REWRITE_RULE") with cache_function = (fun obj -> cache_rew_rule obj); discharge_function = (fun x -> Some x); classify_function = (fun _ -> Dispose) } let add_rew_rule ~l2r ~base ref = Lib.add_leaf (inRewRule (base,ref,l2r)) let cache_opacity cst = Global.set_strategy (Conv_oracle.EvalConstRef cst) Conv_oracle.Opaque let subst_opacity (subst, cst) = let gr' = Mod_subst.subst_constant subst cst in gr' let inOpacity = let open Libobject in let obj = (* We allow discharging rewrite rules *) superglobal_object "EQUATIONS_OPACITY" ~cache:cache_opacity ~subst:(Some subst_opacity) ~discharge:(fun x -> Some x) in declare_object @@ obj let match_arguments sigma l l' = let rec aux i = if i < Array.length l' then if i < Array.length l then if EConstr.eq_constr sigma l.(i) l'.(i) then i :: aux (succ i) else aux (succ i) else aux (succ i) else [i] in aux 0 let filter_arguments f l = let rec aux i f l = match f, l with | n :: f', a :: l' -> if i < n then aux (succ i) f l' else if i = n then a :: aux (succ i) f' l' else assert false | _, _ -> l in aux 0 f l module CMap = Map.Make(Constr) let clean_rec_calls sigma (hyps, c) = let open Context.Rel.Declaration in (* Remove duplicate induction hypotheses under contexts *) let under_context, hyps = CMap.partition (fun ty n -> Constr.isProd ty || Constr.isLetIn ty) hyps in let hyps = CMap.fold (fun ty n hyps -> let ctx, concl = Term.decompose_prod_decls ty in let len = List.length ctx in if Vars.noccur_between 1 len concl then if CMap.mem (Constr.lift (-len) concl) hyps then hyps else CMap.add ty n hyps else CMap.add ty n hyps) under_context hyps in (* Sort by occurrence *) let elems = List.sort (fun x y -> Int.compare (snd x) (snd y)) (CMap.bindings hyps) in let (size, ctx) = List.fold_left (fun (n, acc) (ty, _) -> (succ n, LocalAssum (EConstr.nameR (Id.of_string "Hind"), EConstr.Vars.lift n (EConstr.of_constr ty)) :: acc)) (0, []) elems in (ctx, size, EConstr.Vars.lift size (EConstr.of_constr c)) let head c = fst (Constr.decompose_app c) let rec is_applied_to_structarg f is_rec lenargs = match is_rec with | Some (Guarded ids) :: rest -> begin try let kind = CList.find_map_exn (fun (f', k) -> if Id.equal f f' then Some k else None) ids in match kind with | MutualOn (Some (idx,_)) | NestedOn (Some (idx,_)) -> Some (lenargs > idx) | MutualOn None | NestedOn None | NestedNonRec -> Some true with Not_found -> is_applied_to_structarg f rest lenargs end | _ :: rest -> is_applied_to_structarg f rest lenargs | [] -> None let is_user_obl sigma user_obls f = match EConstr.kind sigma f with | Constr.Const (c, u) -> Id.Set.mem (Label.to_id (Constant.label c)) user_obls | _ -> false let cmap_map f c = CMap.fold (fun ty n hyps -> CMap.add (f ty) n hyps) c CMap.empty let cmap_union g h = CMap.merge (fun ty n m -> match n, m with | Some n, Some m -> Some (min n m) | Some _, None -> n | None, Some _ -> m | None, None -> None) g h let cmap_add ty n h = cmap_union (CMap.singleton ty n) h let subst_telescope cstr ctx = let (_, ctx') = List.fold_left (fun (k, ctx') decl -> (succ k, (Context.Rel.Declaration.map_constr (Vars.substnl [cstr] k) decl) :: ctx')) (0, []) ctx in List.rev ctx' let substitute_args args ctx = let open Context.Rel.Declaration in let rec aux ctx args = match args, ctx with | a :: args, LocalAssum _ :: ctx -> aux (subst_telescope a ctx) args | _ :: _, LocalDef (na, b, t) :: ctx -> aux (subst_telescope b ctx) args | [], ctx -> List.rev ctx | _, [] -> assert false in aux (List.rev ctx) args let drop_last_n n l = let l = List.rev l in let l = CList.skipn n l in List.rev l let find_rec_call is_rec sigma protos f args = let fm (fhead,(f',filter), alias, idx, sign, arity) = if Constr.equal (EConstr.Unsafe.to_constr fhead) f then let f' = fst (Constr.destConst f) in match is_applied_to_structarg (Names.Label.to_id (Names.Constant.label f')) is_rec (List.length args) with | Some true | None -> let signlen = List.length sign in let indargs = filter_arguments filter args in let sign, args = if signlen <= List.length indargs then (* Exact or extra application *) let indargs, rest = CList.chop signlen indargs in let fargs = drop_last_n (List.length rest) args in [], (fargs, indargs, rest) else (* Partial application *) let sign = List.map EConstr.Unsafe.to_rel_decl sign in let sign = substitute_args indargs sign in let signlen = List.length sign in let indargs = List.map (Constr.lift signlen) indargs @ Context.Rel.instance_list Constr.mkRel 0 sign in let fargs = List.map (Constr.lift signlen) args @ Context.Rel.instance_list Constr.mkRel 0 sign in sign, (fargs, indargs, []) in Some (idx, arity, filter, sign, args) | Some false -> None else match alias with | Some (f',argsf) -> let signlen = List.length sign in let f', args' = EConstr.decompose_app sigma f' in let f' = EConstr.Unsafe.to_constr f' in if Constr.equal (head f') f then let sign, args = if signlen <= List.length args then (* Exact or extra application *) let indargs, rest = CList.chop signlen args in let fargs = drop_last_n (List.length rest) args in [], (fargs, indargs, rest) else (* Partial application *) let sign = List.map EConstr.Unsafe.to_rel_decl sign in let sign = substitute_args args sign in let signlen = List.length sign in let indargs = List.map (Constr.lift signlen) args @ Context.Rel.instance_list Constr.mkRel 0 sign in let fargs = List.map (Constr.lift signlen) args @ Context.Rel.instance_list Constr.mkRel 0 sign in sign, (fargs, indargs, []) in Some (idx, arity, argsf, sign, args) else None | None -> None in CList.find_map fm protos let filter_arg i filter = let rec aux f = match f with | i' :: _ when i < i' -> true | i' :: _ when i = i' -> false | i' :: is -> aux is | [] -> false in aux filter let abstract_rec_calls sigma user_obls ?(do_subst=true) is_rec len protos c = let proto_fs = List.map (fun (_,(f,args), _, _, _, _) -> f) protos in let occ = ref 0 in let rec aux n env hyps c = let open Constr in match kind c with | Lambda (na,t,b) -> let hyps',b' = aux (succ n) ((na,None,t) :: env) CMap.empty b in let hyps' = cmap_map (fun ty -> mkProd (na, t, ty)) hyps' in cmap_union hyps hyps', c (* | Cast (_, _, f) when is_comp f -> aux n f *) | LetIn (na,b,t,body) -> let hyps',b' = aux n env hyps b in let hyps'',body' = aux (succ n) ((na,Some b,t) :: env) CMap.empty body in cmap_union hyps' (cmap_map (fun ty -> Constr.mkLetIn (na,b,t,ty)) hyps''), c | Prod (na, d, c) when Vars.noccurn 1 c -> let hyps',d' = aux n env hyps d in let hyps'',c' = aux n env hyps' (Vars.subst1 mkProp c) in hyps'', mkProd (na, d', lift 1 c') | Case (ci, u, pms, p, iv, c, brs) -> let (ci, p, iv, c, brs) = Inductive.expand_case (Global.env ()) (ci, u, pms, p, iv, c, brs) in let hyps', c' = aux n env hyps c in let hyps' = Array.fold_left (fun hyps br -> fst (aux n env hyps br)) hyps' brs in let case' = mkCase (Inductive.contract_case (Global.env ()) (ci, p, iv, c', brs)) in hyps', EConstr.Unsafe.to_constr (EConstr.Vars.substnl proto_fs (succ len) (EConstr.of_constr case')) | Proj (p, r, c) -> let hyps', c' = aux n env hyps c in hyps', mkProj (p, r, c') | _ -> let f', args = decompose_app c in if not (is_user_obl sigma user_obls (EConstr.of_constr f')) then (match find_rec_call is_rec sigma protos f' (Array.to_list args) with | Some (i, arity, filter, sign, (fargs', indargs', rest)) -> let hyps = CArray.fold_left_i (fun i hyps arg -> if filter_arg i filter then hyps else let hyps', arg' = aux n env hyps arg in hyps') hyps args in let fargs' = Constr.mkApp (f', Array.of_list fargs') in let result = Term.it_mkLambda_or_LetIn fargs' sign in let hyp = Term.it_mkProd_or_LetIn (Constr.mkApp (mkApp (mkRel (i + 1 + len + n + List.length sign), Array.of_list indargs'), [| Term.applistc (lift (List.length sign) result) (Context.Rel.instance_list mkRel 0 sign) |])) sign in let hyps = cmap_add hyp !occ hyps in let () = incr occ in let c' = Term.applist (result, rest) in hyps, c' | None -> let hyps = Array.fold_left (fun hyps arg -> let hyps', arg' = aux n env hyps arg in hyps') hyps args in hyps, mkApp (f', args)) else let c' = if do_subst then (EConstr.Unsafe.to_constr (EConstr.Vars.substnl proto_fs (len + n) (EConstr.of_constr c))) else c in hyps, c' in clean_rec_calls sigma (aux 0 [] CMap.empty (EConstr.Unsafe.to_constr c)) open EConstr let subst_app sigma f fn c = let rec aux n c = match kind sigma c with | Constr.App (f', args) when eq_constr sigma f f' -> let args' = Array.map (map_with_binders sigma succ aux n) args in fn n f' args' | Constr.Var _ when eq_constr sigma f c -> fn n c [||] | _ -> map_with_binders sigma succ aux n c in aux 0 c let subst_comp_proj sigma f proj c = subst_app sigma proj (fun n x args -> mkApp (f, if Array.length args > 0 then Array.sub args 0 (Array.length args - 1) else args)) c (* Substitute occurrences of [proj] by [f] in the splitting. *) let subst_comp_proj_split sigma f proj s = map_split (subst_comp_proj sigma f proj) s let is_ind_assum env sigma ind b = let _, concl = decompose_prod_decls sigma b in let t, _ = decompose_app sigma concl in if isInd sigma t then let (ind', _), _ = destInd sigma t in Environ.QMutInd.equal env ind' ind else false let clear_ind_assums env sigma ind ctx = let rec clear_assums c = match kind sigma c with | Constr.Prod (na, b, c) -> if is_ind_assum env sigma ind b then (assert(not (Termops.dependent sigma (mkRel 1) c)); clear_assums (Vars.subst1 mkProp c)) else mkProd (na, b, clear_assums c) | Constr.LetIn (na, b, t, c) -> mkLetIn (na, b, t, clear_assums c) | _ -> c in map_rel_context clear_assums ctx let type_of_rel k ctx = Vars.lift k (get_type (List.nth ctx (pred k))) open Vars let compute_elim_type env evd user_obls is_rec protos k leninds ind_stmts all_stmts sign app elimty = let ctx, arity = decompose_prod_decls !evd elimty in let lenrealinds = List.length (List.filter (fun (_, (_,_,_,_,_,_,_,(kind,_)),_) -> regular_or_nested_rec kind) ind_stmts) in let newctx = if lenrealinds == 1 then CList.skipn (List.length sign + 2) ctx else ctx in (* Assumes non-dep mutual eliminator of the graph *) let newarity = if lenrealinds == 1 then it_mkProd_or_LetIn (Vars.substl [mkProp; app] arity) sign else let clean_one a sign fn = let ctx, concl = decompose_prod_decls !evd a in let newctx = CList.skipn 2 ctx in let newconcl = Vars.substl [mkProp; mkApp (fn, extended_rel_vect 0 sign)] concl in it_mkProd_or_LetIn newconcl newctx in let rec aux arity ind_stmts = match kind !evd arity, ind_stmts with | _, (i, ((fn, _), _, _, sign, ar, _, _, ((Where | Refine), cut)), _) :: stmts -> aux arity stmts | Constr.App (conj, [| arity; rest |]), (i, ((fn, _), _, _, sign, ar, _, _, (refine, cut)), _) :: stmts -> mkApp (conj, [| clean_one arity sign fn ; aux rest stmts |]) | _, (i, ((fn, _), _, _, sign, ar, _, _, _), _) :: stmts -> aux (clean_one arity sign fn) stmts | _, [] -> arity in aux arity ind_stmts in let newctx' = clear_ind_assums env !evd k newctx in if leninds == 1 then List.length newctx', it_mkProd_or_LetIn newarity newctx' else let sort = fresh_sort_in_family evd Sorts.InType in let methods, preds = CList.chop (List.length newctx - leninds) newctx' in let ppred, preds = CList.sep_last preds in let newpredfn i d (idx, (f', alias, path, sign, arity, pats, args, (refine, cut)), _) = if refine != Refine then d else let (n, b, t) = to_tuple d in let signlen = List.length sign in let ctx = of_tuple (anonR, None, arity) :: sign in let app = let argsinfo = match args with | Some (c, (arg, _argnolets)) -> let idx = signlen - arg + 1 in (* lift 1, over return value *) let ty = Vars.lift (idx (* 1 for return value *)) (get_type (List.nth sign (pred (pred idx)))) in Some (idx, ty, lift 1 c, mkRel idx) | None -> None in let transport = get_efresh logic_eq_case evd in let transport ty x y eq c cty = mkApp (transport, [| ty; mkLambda (nameR (Id.of_string "abs"), ty, Termops.replace_term !evd (Vars.lift 1 x) (mkRel 1) (Vars.lift 1 cty)); x; y; eq; (* equality *) c |]) in let lenargs, pargs, subst = match argsinfo with | None -> 0, List.map (lift 1) pats, [] | Some (i, ty, c, rel) -> let lenargs = 1 in let pargs, subst = List.fold_right (fun t (pargs, subst) -> let rel = lift lenargs rel in let default () = (* for equalities + return value *) let t' = lift (lenargs+1) (t) in let t' = Termops.replace_term !evd (lift (lenargs) c) rel t' in (t' :: pargs, subst) in match EConstr.kind !evd t with | Constr.Rel k -> let tty = lift (lenargs+1) (type_of_rel k sign) in if Termops.dependent !evd rel tty then let tr = if isRel !evd c then lift (lenargs+1) t else transport (lift lenargs ty) rel (lift lenargs c) (mkRel 1) (lift (lenargs+1) (t)) tty in let t' = if isRel !evd c then lift (lenargs+3) (t) else transport (lift (lenargs+2) ty) (lift 2 rel) (mkRel 2) (mkRel 1) (lift (lenargs+3) (t)) (lift 2 tty) in (tr :: pargs, (k, t') :: subst) else default () | _ -> default ()) pats ([], []) in lenargs, pargs, subst in let result = match argsinfo with | None -> mkRel 1 | Some (i, ty, c, rel) -> (* Lift over equality *) let arity = lift 1 arity in (* equations_debug Pp.(fun () -> str"Testing dependency of " ++ Printer.pr_econstr_env env !evd arity ++ * str" in " ++ int i); *) let replace_term t tr acc = (* equations_debug Pp.(fun () -> str"Replacing term " ++ Printer.pr_econstr_env env !evd t ++ * str" by " ++ Printer.pr_econstr_env env !evd tr ++ str " in " ++ * Printer.pr_econstr_env env !evd acc); *) Termops.replace_term !evd t tr acc in if Termops.dependent !evd (mkRel i) arity then let acc = mkRel 2 in (* Under refine equality, reference to inner result of refine *) let pred = lift 3 arity in (* Under result binding, refine equality, and transport by it: abstracted endpoint and eq *) (* equations_debug Pp.(fun () -> str"Refined constrs " ++ * prlist_with_sep spc (fun (rel, t) -> * Printer.pr_econstr_env env !evd (mkRel rel) ++ str " -> " ++ * Printer.pr_econstr_env env !evd t) subst); *) (* The predicate is dependent on the refined variable. *) let eqty = mkEq env evd (lift 2 ty) (mkRel 1) (lift 2 rel) in let pred' = let absterm = replace_term (mkRel (i + 3)) (mkRel 2) pred in (* equations_debug Pp.(fun () -> str"Abstracted term " ++ Printer.pr_econstr_env env !evd absterm); *) List.fold_left (fun acc (t, tr) -> replace_term (mkRel (t + 4)) tr acc) absterm subst in let app = if noccurn !evd 1 pred' then let transport = get_efresh logic_eq_case evd in mkApp (transport, [| lift lenargs ty; mkLambda (nameR (Id.of_string "refine"), lift lenargs ty, subst1 mkProp pred'); lift lenargs rel; lift lenargs c; mkRel 1 (* equality *); acc |]) else let transportd = get_efresh logic_eq_elim evd in mkApp (transportd, [| lift lenargs ty; lift lenargs rel; mkLambda (nameR (Id.of_string "refine"), lift lenargs ty, mkLambda (nameR (Id.of_string "refine_eq"), eqty, pred')); acc; (lift lenargs c); mkRel 1 (* equality *) |]) in app else mkRel 2 in let ppath = (* The preceding P *) match path with | _ :: path -> (let res = list_find_map_i (fun i' (_, (_, _, path', _, _, _, _, _), _) -> if eq_path path' path then Some (idx + 1 - i') else None) 1 ind_stmts in match res with None -> assert false | Some i -> i) | _ -> assert false in let papp = applistc (lift (succ signlen + lenargs) (mkRel ppath)) pargs in let papp = applistc papp [result] in let refeqs = Option.map (fun (i, ty, c, rel) -> mkEq env evd ty c rel) argsinfo in let app c = match refeqs with | Some eqty -> mkProd (nameR (Id.of_string "Heq"), eqty, c) | None -> c in let indhyps = match args with | Some (c, _) -> let hyps, hypslen, c' = abstract_rec_calls !evd user_obls ~do_subst:false is_rec signlen protos (Reductionops.nf_beta env !evd (lift 1 c)) in let lifthyps = lift_rel_contextn (signlen + 2) (- (pred i)) hyps in lifthyps | None -> [] in it_mkLambda_or_LetIn (app (it_mkProd_or_clean env !evd (lift (List.length indhyps) papp) (lift_rel_context lenargs indhyps))) ctx in let ty = it_mkProd_or_LetIn sort ctx in of_tuple (n, Some app, ty) in let newpreds = CList.map2_i newpredfn 1 preds (List.rev (List.tl ind_stmts)) in let skipped, methods' = (* Skip the indirection methods due to refinements, as they are trivially provable *) let rec aux stmts meths n meths' = match stmts, meths with | (Refine, _, _, _) :: stmts, decl :: decls -> aux stmts (Equations_common.subst_telescope mkProp decls) (succ n) meths' | (_, _, _, None) :: stmts, decls -> (* Empty node, no constructor *) aux stmts decls n meths' | (_, _, _, _) :: stmts, decl :: decls -> aux stmts decls n (decl :: meths') | [], [] -> n, meths' | [], decls -> n, List.rev decls @ meths' | (_, _, _, Some _) :: stmts, [] -> anomaly Pp.(str"More statemsnts than declarations while computing eliminator") in aux all_stmts (List.rev methods) 0 [] in let ctx = methods' @ newpreds @ [ppred] in let elimty = it_mkProd_or_LetIn (lift (-skipped) newarity) ctx in let undefpreds = List.length (List.filter (fun decl -> Option.is_empty (get_value decl)) newpreds) in let nargs = List.length methods' + undefpreds + 1 in nargs, elimty let replace_vars_context sigma inst ctx = List.fold_right (fun decl (k, acc) -> let decl' = map_rel_declaration (substn_vars sigma k inst) decl in (succ k, decl' :: acc)) ctx (1, []) let pr_where env sigma ctx ({where_type} as w) = let open Pp in let envc = Environ.push_rel_context ctx env in Printer.pr_econstr_env envc sigma (where_term w) ++ fnl () ++ str"where " ++ Names.Id.print (where_id w) ++ str" : " ++ Printer.pr_econstr_env envc sigma where_type ++ str" := " ++ fnl () ++ Context_map.pr_context_map env sigma w.where_program.program_prob ++ fnl () ++ pr_splitting env sigma w.where_program.program_splitting let where_instance w = List.map (fun w -> where_term w) w let arguments sigma c = snd (decompose_app sigma c) let unfold_constr sigma c = Tactics.unfold_in_concl [(Locus.OnlyOccurrences [1], Evaluable.EvalConstRef (fst (destConst sigma c)))] let extend_prob_ctx delta map = { src_ctx = delta @ map.src_ctx; map_inst = Context_map.lift_pats (List.length delta) map.map_inst; tgt_ctx = map.tgt_ctx } let map_proto evd recarg f ty = match recarg with | Some recarg -> let lctx, ty' = decompose_prod_decls evd ty in (* Feedback.msg_debug Pp.(str"map_proto: " ++ Printer.pr_econstr_env (Global.env()) evd ty ++ str" recarg = " ++ int recarg); *) let app = let args = Termops.rel_list 0 (List.length lctx) in let before, after = if recarg == -1 then CList.drop_last args, [] else let bf, after = CList.chop recarg args in bf, List.tl after in applistc (lift (List.length lctx) f) (before @ after) in it_mkLambda_or_LetIn app lctx | None -> f type rec_subst = (Names.Id.t * (int option * EConstr.constr)) list let cut_problem evd s ctx = (* From Γ, x := t, D |- id_subst (G, x, D) : G, x : _, D to oΓ, D[t] |- id_subst (G, D) | G[ ps, prec, ps' : Δ, rec, Δ', and s : prec -> Γ |- t : rec build Γ |- ps, ps' : Δ, Δ'[prec/rec] *) let rec fn s ctxm = match s with | [] -> ctxm | (id, (recarg, term)) :: s -> try let { src_ctx = ctxl; map_inst = pats; tgt_ctx = ctxr } = ctxm in let rel, _, ty = Termops.lookup_rel_id id ctxr in let fK = map_proto evd recarg term (lift rel ty) in let ctxr' = subst_in_ctx rel fK ctxr in let left, right = CList.chop (pred rel) pats in let right' = List.tl right in let s' = List.map (fun (id, (recarg, t)) -> id, (recarg, substnl [fK] rel t)) s in fn s' { src_ctx = ctxl; map_inst = List.append left right'; tgt_ctx = ctxr' } with Not_found -> fn s ctxm in fn s (id_subst ctx) let subst_rec env evd cutprob s lhs = let ctx = lhs.src_ctx in let subst = List.fold_left (fun lhs' (id, (recarg, b)) -> let ctx = lhs'.src_ctx in try let rel, _, ty = Termops.lookup_rel_id id ctx in (* Feedback.msg_debug Pp.(str"lhs': " ++ pr_context_map env evd lhs'); *) let fK = map_proto evd recarg (mapping_constr evd lhs b) (lift rel ty) in (* Feedback.msg_debug Pp.(str"fk: " ++ Printer.pr_econstr_env (push_rel_context ctx env) evd fK); *) let substf = single_subst env evd rel (PInac fK) ctx in (* ctx[n := f] |- _ : ctx *) let substctx = compose_subst env ~sigma:evd substf lhs' in (* Feedback.msg_debug Pp.(str"substituted context: " ++ pr_context_map env evd substctx); *) substctx with Not_found (* lookup *) -> lhs') (id_subst ctx) s in let csubst = compose_subst env ~sigma:evd (compose_subst env ~sigma:evd subst lhs) cutprob in subst, csubst let subst_protos s gr = let open Context.Rel.Declaration in let modified = ref false in let env = Global.env () in let sigma = Evd.from_env env in let sigma, cst = EConstr.fresh_global env sigma gr in let ty = Retyping.get_type_of env sigma cst in let rec aux env sigma args ty = match kind sigma ty with | Constr.Prod (na, b, ty) -> begin try match na.binder_name with | Name id -> let cst = List.find (fun s -> CString.is_prefix (Id.to_string (Label.to_id (Constant.label s))) (Id.to_string id)) s in let ctx, concl = decompose_prod_assum sigma b in let lctx = List.tl ctx in let sigma, cstref = EConstr.fresh_global env sigma (GlobRef.ConstRef cst) in let appl = it_mkLambda_or_LetIn (mkApp (cstref, extended_rel_vect 1 lctx)) ctx in equations_debug Pp.(fun () -> str"Replacing variable with " ++ Printer.pr_econstr_env env sigma appl); modified := true; aux env sigma (appl :: args) (subst1 appl ty) | Anonymous -> raise Not_found with Not_found -> let sigma, term = aux (push_rel (LocalAssum (na, b)) env) sigma (mkRel 1 :: List.map (lift 1) args) ty in sigma, mkLambda (na, b, term) end | Constr.LetIn (na, b, t, ty) -> let sigma, term = aux (push_rel (LocalDef (na, b, t)) env) sigma (List.map (lift 1) args) ty in sigma, mkLetIn (na, b, t, term) | _ -> let term = mkApp (cst, CArray.rev_of_list args) in sigma, term in let sigma, term = aux env sigma [] ty in if !modified then (* let ty = Reductionops.nf_beta env sigma ty in *) (equations_debug Pp.(fun () -> str"Fixed hint " ++ Printer.pr_econstr_env env sigma term); let sigma, _ = Typing.type_of env sigma term in let sigma = Evd.minimize_universes sigma in Hints.hint_constr (Evarutil.nf_evar sigma term, Some (Evd.sort_context_set sigma))) else Hints.hint_globref gr [@@ocaml.warning "-3"] let declare_wf_obligations s info = let make_resolve gr = equations_debug Pp.(fun () -> str"Declaring wf obligation " ++ Printer.pr_global gr); (Hints.empty_hint_info, true, subst_protos s gr) in let dbname = Principles_proofs.wf_obligations_base info in let locality = if Global.sections_are_opened () then Hints.Local else Hints.SuperGlobal in Hints.create_hint_db false dbname TransparentState.full false; List.iter (fun obl -> let hint = make_resolve (GlobRef.ConstRef obl) in try Hints.add_hints ~locality [dbname] (Hints.HintsResolveEntry [hint]) with CErrors.UserError msg (* Cannot be used as a hint *) -> Feedback.msg_warning msg) info.comp_obls let map_fix_subst evd ctxmap s = List.map (fun (id, (recarg, f)) -> (id, (recarg, mapping_constr evd ctxmap f))) s (* Not necessary? If p.id is part of the substitution but isn't in the context we ignore it *) let _program_fixdecls p fixdecls = match p.Syntax.program_rec with | Some (Structural NestedNonRec) -> (* Actually the definition is not self-recursive *) List.filter (fun decl -> let na = Context.Rel.Declaration.get_name decl in let id = Nameops.Name.get_id na in not (Id.equal id p.program_id)) fixdecls | _ -> fixdecls let push_mapping_context env sigma decl (map, cut) = let open Context.Rel.Declaration in let decl' = map_rel_declaration (mapping_constr sigma cut) decl in let declassum = LocalAssum (get_annot decl, get_type decl) in { src_ctx = decl :: map.src_ctx; map_inst = PRel 1 :: List.map (lift_pat 1) map.map_inst; tgt_ctx = decl' :: map.tgt_ctx }, lift_subst env sigma cut [declassum] (** Assumes the declaration already live in \Gamma to produce \Gamma, decls |- ps : \Delta, decls *) let push_decls_map env evd (ctx : context_map) cut (g : rel_context) = let map, _ = List.fold_right (fun decl acc -> push_mapping_context env evd decl acc) g (ctx, cut) in check_ctx_map env evd map let _prsubst env evd s = Pp.(prlist_with_sep spc (fun (id, (recarg, f)) -> str (Id.to_string id) ++ str" -> " ++ Printer.pr_econstr_env env !evd f) s) let subst_rec_programs env evd ps = let where_map = ref PathMap.empty in let evd = ref evd in let cut_problem s ctx' = cut_problem !evd s ctx' in let subst_rec cutprob s lhs = subst_rec env !evd cutprob s lhs in let rec subst_programs path s ctxlen progs oterms = let fixsubst = let fn p oterm = match p.program_info.program_rec with | Some r -> let recarg = match r with | Structural _ -> None | WellFounded _ -> Some (Context.Rel.nhyps p.program_info.program_sign - ctxlen) in let oterm = lift (List.length p.program_prob.src_ctx - ctxlen) oterm in Some (p.program_info.program_id, (recarg, oterm)) | None -> None in let fixdecls = List.map2 fn progs oterms in List.rev fixdecls in let fixsubst = CList.map_filter (fun x -> x) fixsubst in (* The previous prototypes must be lifted w.r.t. the new variables bound in the where. *) let lifts = List.map (fun (id, (recarg, b)) -> (id, (recarg, lift (List.length fixsubst) b))) s in let s' = fixsubst @ lifts in (* Feedback.msg_debug Pp.(str"In subst_programs, pr_substs" ++ prsubst env evd s'); *) let one_program p oterm = let rec_prob, rec_arity = match p.program_rec with | Some { rec_prob; rec_arity } -> rec_prob, rec_arity | None -> p.program_prob, p.program_info.program_arity in let prog_info = p.program_info in let cutprob_sign = cut_problem s prog_info.program_sign in (* Feedback.msg_debug Pp.(str"In subst_programs: " ++ pr_context env !evd prog_info.program_sign); * Feedback.msg_debug Pp.(str"In subst_programs: cutprob_sign " ++ pr_context_map env !evd cutprob_sign); *) let cutprob_subst, _ = subst_rec cutprob_sign s (id_subst prog_info.program_sign) in (* Feedback.msg_debug Pp.(str"In subst_programs: subst_rec failed " ++ pr_context env !evd prog_info.program_sign); *) let program_info' = { prog_info with program_rec = None; program_sign = cutprob_subst.src_ctx; program_arity = mapping_constr !evd cutprob_subst prog_info.program_arity } in let program' = { p with program_info = program_info' } in let path' = p.program_info.program_id :: path in (* Feedback.msg_debug Pp.(str"In subst_programs, cut_problem s'" ++ pr_context env !evd (pi1 rec_prob)); *) let rec_cutprob = cut_problem s' rec_prob.src_ctx in let splitting' = aux rec_cutprob s' program' oterm path' p.program_splitting in let term', ty' = term_of_tree env evd (ESorts.make prog_info.program_sort) splitting' in { program_rec = None; program_info = program_info'; program_prob = id_subst cutprob_sign.tgt_ctx; program_term = term'; program_splitting = splitting' } in List.map2 one_program progs oterms and aux cutprob s p f path = function | Compute (lhs, where, ty, c) -> let ctx = lhs.src_ctx in let subst, lhs' = subst_rec cutprob s lhs in let lhss = map_fix_subst !evd lhs s in let progctx = (extend_prob_ctx (where_context where) lhs) in let substprog, _ = subst_rec cutprob s progctx in let islogical = List.exists (fun (id, (recarg, f)) -> Option.has_some recarg) s in let subst_where ({where_program; where_path; where_orig; where_program_args; where_type} as w) (subst_wheres, wheres) = (* subst_wheres lives in lhs', i.e. has prototypes substituted already *) let wcontext = where_context subst_wheres in let cutprob' = cut_problem s subst.tgt_ctx in (* Feedback.msg_debug Pp.(str"where_context in subst rec : " ++ pr_context env !evd wcontext); * Feedback.msg_debug Pp.(str"lifting subst : " ++ pr_context_map env !evd subst); * Feedback.msg_debug Pp.(str"cutprob : " ++ pr_context_map env !evd cutprob'); *) let wsubst0 = push_decls_map env !evd subst cutprob' wcontext in (* Feedback.msg_debug Pp.(str"new substitution in subst rec : " ++ pr_context_map env !evd wsubst0); *) let ctxlen = List.length wcontext + List.length ctx in let wp = where_program in let where_type = mapping_constr !evd wsubst0 where_type in (* The substituted prototypes must be lifted w.r.t. the new variables bound in this where and preceding ones. *) let s = List.map (fun (id, (recarg, b)) -> (id, (recarg, lift ((* List.length subst_wheres + *) List.length wp.program_prob.src_ctx - List.length ctx) b))) lhss in let wp' = match subst_programs path s ctxlen [wp] [where_term w] with | [wp'] -> wp' | _ -> assert false in let wp', args' = if islogical || (match wp.program_rec with Some { rec_node = WfRec _ } -> true | _ -> false) then let id = Nameops.add_suffix (path_id where_path) "_unfold_eq" in let id = Namegen.next_global_ident_away id Id.Set.empty in let where_program_term = mapping_constr !evd wsubst0 wp.program_term in let where_program_args = List.map (mapping_constr !evd wsubst0) where_program_args in where_map := PathMap.add where_path (applistc where_program_term where_program_args (* substituted *), id, wp'.program_splitting) !where_map; let where_program_args = extended_rel_list 0 lhs'.src_ctx in wp', where_program_args else let where_program_term = mapping_constr !evd wsubst0 wp.program_term in let where_program_args = List.map (mapping_constr !evd wsubst0) where_program_args in (* where_map := PathMap.add where_path * (applistc where_program_term where_program_args (\* substituted *\), Id.of_string ""(\*FIXNE*\), wp'.program_splitting) * !where_map; *) (* let where_program_args = extended_rel_list 0 (pi1 lhs') in *) { wp' with program_term = where_program_term }, where_program_args in let subst_where = {where_program = wp'; where_program_orig = wp.program_info; where_program_args = args'; where_path; where_orig; where_context_length = List.length lhs'.src_ctx; where_type } in (subst_where :: subst_wheres, w :: wheres) in let where', _ = List.fold_right subst_where where ([], []) in let c' = mapping_rhs !evd substprog c in let c' = map_rhs (Reductionops.nf_beta env !evd) (fun i -> i) c' in Compute (lhs', where', mapping_constr !evd substprog ty, c') | Split (lhs, n, ty, cs) -> let subst, lhs' = subst_rec cutprob s lhs in let n' = destRel !evd (mapping_constr !evd subst (mkRel n)) in Split (lhs', n', mapping_constr !evd subst ty, Array.map (Option.map (aux cutprob s p f path)) cs) | Mapping (lhs, c) -> let subst, lhs' = subst_rec cutprob s lhs in Mapping (lhs', aux cutprob s p f path c) | Refined (lhs, info, sp) -> let (id, c, cty), ty, arg, oterm, args, revctx, newprob, newty = info.refined_obj, info.refined_rettyp, info.refined_arg, info.refined_term, info.refined_args, info.refined_revctx, info.refined_newprob, info.refined_newty in (* Feedback.msg_debug Pp.(str"Before map to newprob " ++ prsubst env evd s); *) let lhss = map_fix_subst !evd lhs s in (* Feedback.msg_debug Pp.(str"lhs subst " ++ prsubst env evd lhss); *) let newprobs = map_fix_subst !evd info.refined_newprob_to_lhs lhss in (* Feedback.msg_debug Pp.(str"newprob subst: " ++ prsubst env evd newprobs); Feedback.msg_debug Pp.(str"Newprob to lhs: " ++ pr_context_map env !evd info.refined_newprob_to_lhs); Feedback.msg_debug Pp.(str"Newprob : " ++ pr_context_map env !evd newprob); *) let cutnewprob = cut_problem newprobs newprob.tgt_ctx in (* Feedback.msg_debug Pp.(str"cutnewprob : " ++ pr_context_map env !evd cutnewprob); *) let subst', newprob' = subst_rec cutnewprob newprobs newprob in (* Feedback.msg_debug Pp.(str"subst' : " ++ pr_context_map env !evd subst'); *) let subst, lhs' = subst_rec cutprob s lhs in (* Feedback.msg_debug Pp.(str"subst = " ++ pr_context_map env !evd subst); *) let _, revctx' = subst_rec (cut_problem s revctx.tgt_ctx) lhss revctx in let _, newprob_to_prob' = subst_rec (cut_problem lhss info.refined_newprob_to_lhs.tgt_ctx) lhss info.refined_newprob_to_lhs in let islogical = List.exists (fun (id, (recarg, f)) -> Option.has_some recarg) s in let path' = info.refined_path in let s' = aux cutnewprob newprobs p f path' sp in let count_lets len = let open Context.Rel.Declaration in let ctx' = newprob'.src_ctx in let rec aux ctx len = if len = 0 then 0 else match ctx with | LocalAssum _ :: ctx -> succ (aux ctx (pred len)) | LocalDef _ :: ctx -> succ (aux ctx len) | [] -> 0 in aux (List.rev ctx') len in let refarg = ref (0,0) in let refhead = if islogical then let term', _ = term_of_tree env evd (ESorts.make p.program_info.program_sort) s' in term' else mapping_constr !evd subst oterm in let args', filter = CList.fold_left_i (fun i (acc, filter) c -> if i == snd arg then (let len = List.length acc in refarg := (count_lets len, len)); if isRel !evd c then let d = List.nth lhs.src_ctx (pred (destRel !evd c)) in if List.mem_assoc (Nameops.Name.get_id (get_name d)) s then acc, filter else mapping_constr !evd subst c :: acc, i :: filter else mapping_constr !evd subst c :: acc, i :: filter) 0 ([], []) args in let args' = List.rev_map (Reductionops.nf_beta env !evd) args' in equations_debug Pp.(fun () -> str"Chopped args: " ++ prlist_with_sep spc (Printer.pr_econstr_env (push_rel_context subst.src_ctx env) !evd) args'); let term', args', arg', filter = if islogical then refhead, args', !refarg, None else (let sigma, refargs = constrs_of_pats ~inacc_and_hide:false env !evd subst'.map_inst in let refterm = it_mkLambda_or_LetIn (applistc refhead (List.rev refargs)) subst'.src_ctx in equations_debug Pp.(fun () -> str"refterm: " ++ (Printer.pr_econstr_env env !evd refterm)); let refarg = (fst arg - List.length s, snd arg - List.length s) in (refterm, args', refarg, Some (List.rev filter))) in let c' = Reductionops.nf_beta env !evd (mapping_constr !evd subst c) in let info = { refined_obj = (id, c', mapping_constr !evd subst cty); refined_rettyp = mapping_constr !evd subst ty; refined_arg = arg'; refined_path = path'; refined_term = term'; refined_filter = filter; refined_args = args'; refined_revctx = revctx'; refined_newprob = newprob'; refined_newprob_to_lhs = newprob_to_prob'; refined_newty = mapping_constr !evd subst' newty } in Refined (lhs', info, s') in let programs' = subst_programs [] [] 0 ps (List.map (fun p -> p.program_term) ps) in !where_map, programs' let unfold_programs ~pm env evd flags rec_type progs = let where_map, progs' = subst_rec_programs env !evd (List.map fst progs) in equations_debug (fun () -> Pp.str"subst_rec finished"); if PathMap.is_empty where_map && not (has_logical rec_type) then let one_program (p, prog) p' = let norecprob = Context_map.id_subst (program_sign p) in let eqninfo = Principles_proofs.{ equations_id = p.program_info.program_id; equations_where_map = where_map; equations_f = p.program_term; equations_prob = norecprob } in let p = { p with program_splitting = p'.program_splitting } in p, None, prog, eqninfo in pm, List.map2 one_program progs progs' else let one_program pm (p, prog) unfoldp = let pi = p.program_info in let i = pi.program_id in let sign = pi.program_sign in let arity = pi.program_arity in let prob = Context_map.id_subst sign in (* let () = Feedback.msg_debug (str"defining unfolding" ++ spc () ++ pr_splitting env split) in *) (* We first define the unfolding and show the fixpoint equation. *) let unfoldi = Nameops.add_suffix i "_unfold" in let unfpi = { pi with program_id = unfoldi; program_sign = sign; program_arity = arity } in let unfoldp = make_single_program env evd flags unfpi prob unfoldp.program_splitting None in let (unfoldp, term_info), pm, _pstate = define_program_immediate ~pm env evd UState.default_univ_decl [None] [] flags ~unfold:true unfoldp in let eqninfo = Principles_proofs.{ equations_id = i; equations_where_map = where_map; equations_f = unfoldp.program_term; equations_prob = prob } in let cst, _ = destConst !evd unfoldp.program_term in let cpi' = { program_cst = cst; program_split_info = term_info } in pm, (p, Some (unfoldp, cpi'), prog, eqninfo) in CList.fold_left2_map one_program pm progs progs' let subst_app sigma f fn c = let rec aux n c = match kind sigma c with | Constr.App (f', args) when eq_constr sigma f f' -> let args' = Array.map (map_with_binders sigma succ aux n) args in fn n f' args' | Constr.Var _ when eq_constr sigma f c -> fn n c [||] | _ -> map_with_binders sigma succ aux n c in aux 0 c let substitute_alias evd ((f, fargs), term) c = subst_app evd f (fun n f args -> if n = 0 then let args' = filter_arguments fargs (Array.to_list args) in applist (term, args') else mkApp (f, args)) c let substitute_aliases evd fsubst c = List.fold_right (substitute_alias evd) fsubst c type alias = ((EConstr.t * int list) * Names.Id.t * Splitting.splitting) let make_alias (f, id, s) = ((f, []), id, s) let smash_rel_context sigma ctx = let open Context.Rel.Declaration in List.fold_right (fun decl (subst, pats, ctx') -> match get_value decl with | Some b -> let b' = substl subst b in (b' :: subst, List.map (lift_pat 1) pats, ctx') | None -> (mkRel 1 :: List.map (lift 1) subst, PRel 1 :: List.map (lift_pat 1) pats, map_constr (Vars.substl subst) decl :: ctx')) ctx ([], [], []) let _remove_let_pats sigma subst patsubst pats = let remove_let pat pats = match pat with | PRel n -> let pat = List.nth patsubst (pred n) in (match pat with | PInac _ -> pats | p -> p :: pats) | _ -> specialize sigma patsubst pat :: pats in List.fold_right remove_let pats [] let smash_ctx_map env sigma m = let r = m.tgt_ctx in let subst, patsubst, r' = smash_rel_context sigma r in let smashr' = { src_ctx = r; map_inst = patsubst; tgt_ctx = r' } in compose_subst env ~sigma m smashr', subst let pattern_instance ctxmap = List.rev_map pat_constr (filter_def_pats ctxmap) type computation = Computation of Equations_common.rel_context * EConstr.t * alias option * EConstr.constr list * EConstr.t * EConstr.t * (node_kind * bool) * Splitting.splitting_rhs * ((EConstr.t * int list) * alias option * Splitting.path * Equations_common.rel_context * EConstr.t * EConstr.constr list * (EConstr.constr * (int * int)) option * computation list) list option let computations env evd alias refine p eqninfo : computation list = let { equations_prob = prob; equations_where_map = wheremap; equations_f = f } = eqninfo in let rec program_computations env prob f alias fsubst refine p : computation list = computations env prob f alias fsubst (fst refine, false) p.program_splitting and computations env prob f alias fsubst refine = function | Compute (lhs, where, ty, c) -> let where_comp w (wheres, where_comps) = (* Where term is in lhs + wheres *) let lhsterm = substl wheres (where_term w) in let term, args = decompose_app_list evd lhsterm in let alias, fsubst = try let (f, id, s) = PathMap.find w.where_path wheremap in let f, fargs = decompose_appvect evd f in let args = match_arguments evd (arguments evd (where_term w)) fargs in let fsubst = ((f, args), term) :: fsubst in (* Feedback.msg_debug Pp.(str"Substituting " ++ Printer.pr_econstr_env env evd f ++ spc () ++ prlist_with_sep spc int args ++ str" by " ++ Printer.pr_econstr_env env evd term); *) Some ((f, args), id, s), fsubst with Not_found -> None, fsubst in let term_ty = Retyping.get_type_of env evd term in let subterm, filter = let rec aux ty i args' = match kind evd ty, args' with | Constr.Prod (na, b, ty), a :: args' -> if EConstr.isRel evd a then (* A variable from the context that was not substituted by a recursive prototype, we keep it *) let term', len = aux ty (succ i) args' in mkLambda (na, b, term'), i :: len else (* The argument was substituted, we keep that substitution *) let term', len = aux (subst1 a ty) (succ i) args' in term', len | _, [] -> lhsterm, [] | _, _ :: _ -> assert false in aux term_ty 0 args in let wsmash, smashsubst = smash_ctx_map env evd (id_subst w.where_program.program_info.program_sign) in let comps = program_computations env wsmash subterm None fsubst (Regular,false) w.where_program in let arity = w.where_program.program_info.program_arity in let termf = if not (PathMap.is_empty wheremap) then subterm, [0] else subterm, filter in let where_comp = (termf, alias, w.where_orig, wsmash.src_ctx, (* substl smashsubst *) arity, pattern_instance wsmash, None (* no refinement *), comps) in (lhsterm :: wheres, where_comp :: where_comps) in let inst, wheres = List.fold_right where_comp where ([],[]) in let ctx = compose_subst env ~sigma:evd lhs prob in if !Equations_common.debug then Feedback.msg_debug Pp.(str"where_instance: " ++ prlist_with_sep spc (Printer.pr_econstr_env env evd) inst); let envlhs = (push_rel_context lhs.src_ctx env) in let envwhere = push_rel_context (where_context where) envlhs in let fn c = (* Feedback.msg_debug Pp.(str"substituting in c= " ++ Printer.pr_econstr_env envwhere evd c); *) let c' = Reductionops.nf_beta env evd (substl inst c) in (* Feedback.msg_debug Pp.(str"after where instance substitution: " ++ Printer.pr_econstr_env envlhs evd c'); *) substitute_aliases evd fsubst c' in let c' = map_rhs (fun c -> fn c) (fun x -> x) c in if !Equations_common.debug then (Feedback.msg_debug Pp.(str"substituting in: " ++ pr_splitting_rhs ~verbose:true envlhs envwhere evd lhs c ty); Feedback.msg_debug Pp.(str"substituted: " ++ pr_splitting_rhs ~verbose:true envlhs envlhs evd lhs c' ty)); let patsconstrs = pattern_instance ctx in let ty = substl inst ty in [Computation (ctx.src_ctx, f, alias, patsconstrs, ty, f, (Where, snd refine), c', Some wheres)] | Split (_, _, _, cs) -> Array.fold_left (fun acc c -> match c with | None -> acc | Some c -> acc @ computations env prob f alias fsubst refine c) [] cs | Mapping (lhs, c) -> let _newprob = compose_subst env ~sigma:evd prob lhs in computations env prob f alias fsubst refine c | Refined (lhs, info, cs) -> let (id, c, t) = info.refined_obj in let s = compose_subst env ~sigma:evd lhs prob in let patsconstrs = pattern_instance s in let refineds = compose_subst env ~sigma:evd info.refined_newprob_to_lhs s in let refinedpats = pattern_instance refineds in let progterm = applistc info.refined_term info.refined_args in let filter = Option.default [Array.length (arguments evd info.refined_term)] info.refined_filter in (* Feedback.msg_debug Pp.(str"At refine node: " ++ pr_context_map env evd info.refined_newprob); Feedback.msg_debug Pp.(str"At refine node: " ++ pr_context_map env evd info.refined_revctx); Feedback.msg_debug Pp.(str"At refine node, refined term: " ++ Printer.pr_econstr_env (push_rel_context (pi1 lhs) env) evd info.refined_term); Feedback.msg_debug Pp.(str"At refine node, program term: " ++ Printer.pr_econstr_env (push_rel_context (pi1 lhs) env) evd progterm); *) [Computation (lhs.src_ctx, f, alias, patsconstrs, info.refined_rettyp, f, (Refine, true), RProgram progterm, Some [(info.refined_term, filter), None, info.refined_path, info.refined_newprob.src_ctx, info.refined_newty, refinedpats, Some (mapping_constr evd info.refined_newprob_to_lhs c, info.refined_arg), computations env info.refined_newprob (lift 1 info.refined_term) None fsubst (Regular, true) cs])] in program_computations env prob f alias [] refine p let constr_of_global_univ gr u = let open Names.GlobRef in match gr with | ConstRef c -> mkConstU (c, u) | IndRef i -> mkIndU (i, u) | ConstructRef c -> mkConstructU (c, u) | VarRef id -> mkVar id let declare_funelim ~pm info env evd is_rec protos progs ind_stmts all_stmts sign app subst inds kn comb sort indgr ectx = let id = Id.of_string info.base_id in let leninds = List.length inds in let elim = comb in let elimc, elimty = let elimty, uctx = Typeops.type_of_global_in_context (Global.env ()) elim in let () = evd := Evd.from_env (Global.env ()) in if is_polymorphic info then (* We merge the contexts of the term and eliminator in which ind_stmts and all_stmts are derived, universe unification will take care of unifying the eliminator's fresh instance with the universes of the constant and the functional induction lemma. *) let () = evd := Evd.merge_universe_context !evd info.term_ustate in let () = evd := Evd.merge_universe_context !evd ectx in let sigma, elimc = Evd.fresh_global (Global.env ()) !evd elim in let elimty = Retyping.get_type_of env sigma elimc in let () = evd := sigma in elimc, elimty else (* If not polymorphic, we just use the global environment's universes for f and elim *) (let elimc = constr_of_global_univ elim EInstance.empty in elimc, of_constr elimty) in let nargs, newty = compute_elim_type env evd info.user_obls is_rec protos kn leninds ind_stmts all_stmts sign app elimty in let hookelim { Declare.Hook.S.dref; _ } = let env = Global.env () in let evd = Evd.from_env env in let f_gr = Nametab.locate (Libnames.qualid_of_ident id) in let evd, f = new_global evd f_gr in let evd, elimcgr = new_global evd dref in let evd, cl = functional_elimination_class evd in let evd, args_of_elim = coq_nat_of_int evd nargs in let args = [Retyping.get_type_of env evd f; f; Retyping.get_type_of env evd elimcgr; args_of_elim; elimcgr] in let instid = Nameops.add_prefix "FunctionalElimination_" id in let poly = is_polymorphic info in ignore(Equations_common.declare_instance instid ~poly evd [] cl args) in let tactic = ind_elim_tac elimc leninds (List.length progs) info indgr in let _ = try equations_debug Pp.(fun () -> str"Type-checking elimination principle: " ++ fnl () ++ Printer.pr_econstr_env env !evd newty); ignore (e_type_of (Global.env ()) evd newty); equations_debug (fun () -> Pp.str"Functional elimination principle type-checked"); with Type_errors.TypeError (env, tyerr) -> CErrors.user_err Pp.(str"Error while typechecking elimination principle type: " ++ Himsg.explain_pretype_error env !evd (Pretype_errors.TypingError (Pretype_errors.of_type_error tyerr))) in let newty = collapse_term_qualities (Evd.evar_universe_context !evd) (EConstr.to_constr !evd newty) in let cinfo = Declare.CInfo.make ~name:(Nameops.add_suffix id "_elim") ~typ:newty () in let info = Declare.Info.make ~poly:info.poly ~scope:info.scope ~hook:(Declare.Hook.make hookelim) ~kind:(Decls.IsDefinition info.decl_kind) () in let pm, _ = Declare.Obls.add_definition ~pm ~cinfo ~info ~tactic ~opaque:false ~uctx:(Evd.evar_universe_context !evd) [||] in pm let mkConj evd sort x y = let prod = get_efresh logic_product evd in mkApp (prod, [| x; y |]) let declare_funind ~pm info alias env evd is_rec protos progs ind_stmts all_stmts sign inds kn comb sort f split = let poly = is_polymorphic info.term_info in let id = Id.of_string info.term_info.base_id in let indid = Nameops.add_suffix id "_graph_correct" in (* Record nested statements which can be repeated during the proof *) let nested_statements = ref [] in let statement = let stmt (i, ((f,_), alias, path, sign, ar, _, _, (nodek, cut)), _) = if not (regular_or_nested nodek) then None else let f, split, unfsplit = match alias with | Some ((f,_), _, recsplit) -> f, recsplit, Some split | None -> f, split, None in let args = extended_rel_list 0 sign in let app = applist (f, args) in let ind = Nameops.add_suffix (path_id path)(* Id.of_string info.term_info.base_id) *) ("_graph" (* ^ if i == 0 then "" else "_" ^ string_of_int i *)) in let indt = e_new_global evd (global_reference ind) in let ty = it_mkProd_or_subst env !evd (applist (indt, args @ [app])) sign in let (prog, _, _, _) = List.find (fun (p, _, _, _) -> Id.equal p.program_info.program_id (path_id path)) progs in if nested nodek then nested_statements := (path_id path, ty, prog) :: !nested_statements; Some ty in match ind_stmts with | [] -> assert false | [hd] -> Option.get (stmt hd) | hd :: tl -> let l, last = let rec aux l = let last, l = CList.sep_last l in match stmt last with | None -> aux l | Some t -> t, l in aux ind_stmts in List.fold_right (fun x acc -> match stmt x with | Some t -> mkConj evd sort t acc | None -> acc) last l in let args = Termops.rel_list 0 (List.length sign) in let f = match alias with | Some ((f, _), _, _) -> f | None -> f in let app = applist (f, args) in let hookind { Declare.Hook.S.uctx; scope; dref; _ } pm = let env = Global.env () in (* refresh *) let locality = if Global.sections_are_opened () then Hints.Local else Hints.SuperGlobal in Hints.add_hints ~locality [info.term_info.base_id] (Hints.HintsImmediateEntry [Hints.hint_globref dref]); let pm = try declare_funelim ~pm info.term_info env evd is_rec protos progs ind_stmts all_stmts sign app scope inds kn comb sort dref uctx with | Type_errors.TypeError (env, tyerr) -> CErrors.user_err Pp.(str"Functional elimination principle could not be proved automatically: " ++ Himsg.explain_pretype_error env !evd (Pretype_errors.TypingError (Pretype_errors.of_type_error tyerr))) | Pretype_errors.PretypeError (env, sigma, tyerr) -> CErrors.user_err Pp.(str"Functional elimination principle could not be proved automatically: " ++ Himsg.explain_pretype_error env sigma tyerr) | e -> Feedback.msg_warning Pp.(str "Functional elimination principle could not be proved automatically: " ++ fnl () ++ CErrors.print e); pm in let evd = Evd.from_env env in let f_gr = Nametab.locate (Libnames.qualid_of_ident id) in let evd, f = new_global evd f_gr in let evd, indcgr = new_global evd dref in let evd, cl = functional_induction_class evd in let args = [Retyping.get_type_of env evd f; f; Retyping.get_type_of env evd indcgr; indcgr] in let instid = Nameops.add_prefix "FunctionalInduction_" id in ignore(Equations_common.declare_instance instid ~poly evd [] cl args); (* If desired the definitions should be made transparent again. *) begin if !Equations_common.equations_transparent then (Global.set_strategy (Conv_oracle.EvalConstRef (fst (destConst evd f))) Conv_oracle.transparent; match alias with | None -> () | Some ((f, _), _, _) -> Global.set_strategy (Conv_oracle.EvalConstRef (fst (destConst evd f))) Conv_oracle.transparent) else ((* Otherwise we turn them opaque and let that information be discharged as well *) Lib.add_leaf (inOpacity (fst (destConst evd f))); match alias with | None -> () | Some ((f, _), _, _) -> Lib.add_leaf (inOpacity (fst (destConst evd f)))) end; pm in let evm, stmtt = Typing.type_of (Global.env ()) !evd statement in let () = evd := evm in let to_constr c = collapse_term_qualities (Evd.evar_universe_context !evd) (EConstr.to_constr !evd c) in let stmt = to_constr statement and f = to_constr f in let uctx = Evd.evar_universe_context (if poly then !evd else Evd.from_env (Global.env ())) in let launch_ind ~pm tactic = let pm, res = let cinfo = Declare.CInfo.make ~name:indid ~typ:stmt () in let info = Declare.Info.make ~poly ~kind:(Decls.IsDefinition info.term_info.decl_kind) () in let obl_hook = Declare.Hook.make_g hookind in Declare.Obls.add_definition ~pm ~cinfo ~info ~obl_hook ~opaque:false ~tactic:(Tacticals.tclTRY tactic) ~uctx [||] in match res with | Declare.Obls.Defined gr -> () | Declare.Obls.Remain _ -> Feedback.msg_warning Pp.(str "Functional induction principle could not be proved automatically, it \ is left as an obligation.") | Declare.Obls.Dependent -> (* Only 1 obligation *) assert false in let tac = (ind_fun_tac is_rec f info id !nested_statements progs) in try launch_ind ~pm tac with Type_errors.TypeError (env, tyerr) -> CErrors.user_err Pp.(str"Functional induction principle could not be proved automatically: " ++ Himsg.explain_pretype_error env !evd (Pretype_errors.TypingError (Pretype_errors.of_type_error tyerr))) | e when CErrors.noncritical e -> Feedback.msg_warning Pp.(str "Functional induction principle could not be proved automatically: " ++ fnl () ++ CErrors.print e); launch_ind ~pm (Proofview.tclUNIT ()) let max_sort s1 s2 = let open Sorts in match s1, s2 with | (SProp, SProp) | (Prop, Prop) | (Set, Set) -> s1 | (SProp, (Prop | Set | Type _ as s)) | ((Prop | Set | Type _) as s, SProp) -> s | (Prop, (Set | Type _ as s)) | ((Set | Type _) as s, Prop) -> s | (Set, Type u) | (Type u, Set) -> Sorts.sort_of_univ (Univ.Universe.sup Univ.Universe.type0 u) | (Type u, Type v) -> Sorts.sort_of_univ (Univ.Universe.sup u v) | (QSort _, _) | (_, QSort _) -> assert false let level_of_context env evd ctx acc = let _, lev = List.fold_right (fun decl (env, lev) -> let s = Retyping.get_sort_of env evd (get_type decl) in let s = ESorts.kind evd s in (push_rel decl env, max_sort s lev)) ctx (env,acc) in lev let all_computations env evd alias progs = let comps = let fn p unfp = let p = Option.default p unfp in computations env evd alias (kind_of_prog p.program_info,false) p in List.map (fun (p, unfp, prog, eqninfo) -> p, eqninfo, fn p unfp eqninfo) progs in let rec flatten_comp (Computation (ctx, fl, flalias, pats, ty, f, refine, c, rest)) = let rest = match rest with | None -> [] | Some l -> CList.map_append (fun (f, alias, path, ctx, ty, pats, newargs, rest) -> let nextlevel, rest = flatten_comps rest in ((f, alias, path, ctx, ty, pats, newargs, refine), nextlevel) :: rest) l in (ctx, fl, flalias, pats, ty, f, refine, c), rest and flatten_comps r = List.fold_right (fun cmp (acc, rest) -> let stmt, rest' = flatten_comp cmp in (stmt :: acc, rest' @ rest)) r ([], []) in let flatten_top_comps (p, eqninfo, one_comps) acc = let (top, rest) = flatten_comps one_comps in let pi = p.program_info in let topcomp = (((eqninfo.equations_f,[]), alias, [pi.program_id], pi.program_sign, pi.program_arity, List.rev_map pat_constr eqninfo.equations_prob.map_inst, None, (kind_of_prog pi,false)), top) in topcomp :: (rest @ acc) in List.fold_right flatten_top_comps comps [] let unfold_fix = let open Proofview in Proofview.Goal.enter (fun gl -> let sigma = Goal.sigma gl in match kind sigma (Goal.concl gl) with | Constr.App (eq, [| _; lhs; _ |]) -> (match kind sigma lhs with | Constr.App (fn, args) -> (match kind sigma fn with | Constr.Fix ((indexes, p), decls) -> let fixarg = args.(indexes.(p)) in (match kind sigma fixarg with | Constr.Var id -> depelim_tac id | _ -> tclUNIT ()) | _ -> tclUNIT ()) | _ -> tclUNIT ()) | _ -> tclUNIT ()) let build_equations ~pm with_ind env evd ?(alias:alias option) rec_info progs = let () = if !Equations_common.debug then let open Pp in let msg = Feedback.msg_debug in msg (str"Definining principles of: " ++ prlist_with_sep fnl (fun (p, unfp, prog, eqninfo) -> pr_splitting ~verbose:true env evd p.program_splitting ++ fnl () ++ (match unfp with | Some unf -> str "and " ++ pr_splitting env evd unf.program_splitting | None -> mt ())) progs) in let env = Global.env () in let p, unfp, prog, eqninfo = List.hd progs in let user_obls = List.fold_left (fun acc (p, unfp, prog, eqninfo) -> Id.Set.union prog.program_split_info.user_obls acc) Id.Set.empty progs in let { equations_id = id; equations_where_map = wheremap; equations_f = f } = eqninfo in let info = prog.program_split_info in let sign = program_sign p in let cst = prog.program_cst in let ocst, _ = destConst evd p.program_term in let comps = all_computations env evd alias progs in let protos = List.map fst comps in let lenprotos = List.length protos in let subst_obls = CList.map_filter (fun ((f',filter), alias, _, sign, _, _, _, _) -> match alias with | Some ((f, filter'), id, _) -> equations_debug Pp.(fun () -> str"Prototype " ++ Printer.pr_econstr_env env evd f' ++ str"alias: " ++ Printer.pr_econstr_env env evd f); Some (fst (destConst evd (fst (decompose_app evd f)))) | None -> equations_debug Pp.(fun () -> str"Prototype " ++ Printer.pr_econstr_env env evd f' ++ str" no alias "); None) protos in let () = List.iter (fun (_, _, prog, _) -> declare_wf_obligations subst_obls prog.program_split_info) progs in let protos = CList.map_i (fun i ((f',filterf'), alias, path, sign, arity, pats, args, (refine, cut)) -> let f' = Termops.strip_outer_cast evd f' in let f'hd = let ctx, t = decompose_lambda_decls evd f' in fst (decompose_app evd t) in let alias = match alias with | None -> None | Some (f, _, _) -> Some f in (f'hd, (f',filterf'), alias, lenprotos - i, sign, to_constr evd arity)) 1 protos in let evd = ref evd in let poly = is_polymorphic info in let statement i filter (ctx, fl, flalias, pats, ty, f', (refine, cut), c) = let hd, unf = match flalias with | Some ((f', _), unf, _) -> let tac = Proofview.tclBIND (Tacticals.pf_constr_of_global (Nametab.locate (Libnames.qualid_of_ident unf))) Equality.rewriteLR in f', tac | None -> fl, if eq_constr !evd fl f then Tacticals.tclORELSE Tactics.reflexivity (Tacticals.tclTHEN (unfold_constr !evd f) unfold_fix) else Tacticals.tclIDTAC in let comp = applistc hd pats in let body = let nf_beta = Reductionops.nf_beta (push_rel_context ctx env) !evd in let b = match c with | RProgram c -> mkEq env evd ty (nf_beta comp) (nf_beta c) | REmpty (i, _) -> mkApp (coq_ImpossibleCall evd, [| ty; nf_beta comp |]) in let body = it_mkProd_or_LetIn b ctx in if !Equations_common.debug then Feedback.msg_debug Pp.(str"Typing equation " ++ Printer.pr_econstr_env env !evd body); let _ = Equations_common.evd_comb1 (Typing.type_of env) evd body in body in let cstr = match c with | RProgram c -> let len = List.length ctx in let hyps, hypslen, c' = abstract_rec_calls !evd user_obls rec_info len protos (Reductionops.nf_beta env !evd c) in let head = let f = mkRel (len + (lenprotos - i) + hypslen) in if cut then f else let fn, args = decompose_app_list !evd (Termops.strip_outer_cast !evd fl) in applistc f (filter_arguments filter (lift_constrs hypslen args)) in let ty = it_mkProd_or_clear !evd (it_mkProd_or_clean env !evd (applistc head (lift_constrs hypslen pats @ [c'])) hyps) ctx in if !Equations_common.debug then Feedback.msg_debug Pp.(str"Typing constructor " ++ Printer.pr_econstr_env env !evd ty); Some ty | REmpty (i, _) -> None in (refine, unf, body, cstr) in let statements i ((f', alias, path, sign, arity, pats, args, refine as fs), c) = let fs, filter = match alias with | Some (f', unf, split) -> (f', None, path, sign, arity, pats, args, refine), snd f' | None -> fs, snd f' in fs, List.map (statement i filter) c in let stmts = CList.map_i statements 0 comps in let ind_stmts = CList.map_i (fun i (f, c) -> i, f, CList.map_i (fun j x -> j, x) 1 c) 0 stmts in let all_stmts = List.concat (List.map (fun (f, c) -> c) stmts) in let fnind_map = ref PathMap.empty in let declare_one_ind (inds, univs, sorts) (i, (f, alias, path, sign, arity, pats, refs, refine), stmts) = let indid = Nameops.add_suffix (path_id path) "_graph" (* (if i == 0 then "_ind" else ("_ind_" ^ string_of_int i)) *) in let indapp = List.rev_map (fun x -> Constr.mkVar (Nameops.Name.get_id (get_name x))) sign in let () = fnind_map := PathMap.add path (indid,indapp) !fnind_map in let constructors = CList.map_filter (fun (_, (_, _, _, n)) -> Option.map (to_constr !evd) n) stmts in let consnames = CList.map_filter (fun (i, (r, _, _, n)) -> Option.map (fun _ -> let suff = (if r != Refine then "_equation_" else "_refinement_") ^ string_of_int i in Nameops.add_suffix indid suff) n) stmts in let merge_universes_of_constr c = Univ.Level.Set.union (snd (EConstr.universes_of_constr !evd c)) in let univs = Univ.Level.Set.union (snd (universes_of_constr !evd arity)) univs in let univs = Context.Rel.(fold_outside (Declaration.fold_constr merge_universes_of_constr) sign ~init:univs) in let univs = List.fold_left (fun univs c -> Univ.Level.Set.union (snd (universes_of_constr !evd (EConstr.of_constr c))) univs) univs constructors in let ind_sort = match Retyping.get_sort_family_of env !evd (it_mkProd_or_LetIn arity sign) with | Sorts.InProp -> (* If the program is producing a proof, then we cannot hope to have its graph in Type in general (it might be case-splitting on non-strict propositions). *) Sorts.prop | _ -> let ctx = (of_tuple (anonR, None, arity) :: sign) in let signlev = level_of_context env !evd ctx sorts in signlev in let entry = Entries.{ mind_entry_typename = indid; mind_entry_arity = to_constr !evd (it_mkProd_or_LetIn (mkProd (anonR, arity, mkSort (ESorts.make ind_sort))) sign); mind_entry_consnames = consnames; mind_entry_lc = constructors; } in ((entry, sign, arity) :: inds, univs, max_sort ind_sort sorts) in let declare_ind () = let inds, univs, sort = List.fold_left declare_one_ind ([], Univ.Level.Set.empty, Sorts.prop) ind_stmts in let sigma = Evd.restrict_universe_context !evd univs in let sigma = Evd.minimize_universes sigma in (* FIXME: try to implement a sane handling of universe state threading *) let to_constr sigma c = collapse_term_qualities (Evd.evar_universe_context sigma) (EConstr.to_constr sigma c) in let inds = List.rev_map (fun (entry, sign, arity) -> Entries.{ entry with mind_entry_lc = List.map (to_constr sigma) (List.map of_constr entry.mind_entry_lc); mind_entry_arity = to_constr sigma (it_mkProd_or_LetIn (mkProd (anonR, arity, mkSort (ESorts.make sort))) sign) }) inds in let univs, ubinders = Evd.univ_entry ~poly sigma in let uctx = match univs with | UState.Monomorphic_entry ctx -> let () = Global.push_context_set ~strict:true ctx in Entries.Monomorphic_ind_entry | UState.Polymorphic_entry uctx -> Entries.Polymorphic_ind_entry uctx in let inductive = Entries.{ mind_entry_record = None; mind_entry_universes = uctx; mind_entry_private = None; mind_entry_finite = Declarations.Finite; mind_entry_params = []; (* (identifier * local_entry) list; *) mind_entry_inds = inds; mind_entry_variance = None; } in let () = Goptions.set_bool_option_value_gen ~locality:Goptions.OptLocal ["Elimination";"Schemes"] false in let kn = DeclareInd.declare_mutual_inductive_with_eliminations inductive (univs, ubinders) [] in let () = Goptions.set_bool_option_value_gen ~locality:Goptions.OptLocal ["Elimination";"Schemes"] true in let sort = Inductiveops.top_allowed_sort (Global.env()) (kn,0) in let sort_suff = Indrec.elimination_suffix sort in let kn, comb = match inds with | [ind] -> let scheme = Nameops.add_suffix ind.Entries.mind_entry_typename sort_suff in let mutual = (CList.map_i (fun i ind -> let id = CAst.make @@ scheme in (id, false, (kn, i), sort)) 0 inds) in Indschemes.do_mutual_induction_scheme (Global.env()) ~force_mutual:true mutual; kn, Smartlocate.global_with_alias (Libnames.qualid_of_ident scheme) | _ -> let mutual = (CList.map_i (fun i ind -> let suff = "_mut" in let id = CAst.make @@ Nameops.add_suffix ind.Entries.mind_entry_typename suff in (id, false, (kn, i), sort)) 0 inds) in Indschemes.do_mutual_induction_scheme (Global.env()) ~force_mutual:true mutual; let scheme = Nameops.add_suffix (Id.of_string info.base_id) ("_graph" ^ sort_suff) in let mutual = List.map2 (fun (i, _, _, _) (_, (_, _, _, _, _, _, _, (kind, cut)), _) -> i, regular_or_nested_rec kind) mutual ind_stmts in let () = Indschemes.do_combined_scheme CAst.(make scheme) (CList.map_filter (fun ({CAst.loc;v=id}, b) -> if b then Some (Nametab.locate_constant (Libnames.qualid_of_ident ?loc id)) else None) mutual) in kn, Smartlocate.global_with_alias (Libnames.qualid_of_ident scheme) in let locality = if Global.sections_are_opened () then Hints.Local else Hints.SuperGlobal in let () = List.iteri (fun i ind -> let constrs = CList.map_i (fun j _ -> Hints.empty_hint_info, true, Hints.hint_globref (GlobRef.ConstructRef ((kn,i),j))) 1 ind.Entries.mind_entry_lc in Hints.add_hints ~locality [info.base_id] (Hints.HintsResolveEntry constrs)) inds in let info = { term_info = info; pathmap = !fnind_map; wheremap } in declare_funind ~pm info alias (Global.env ()) evd rec_info protos progs ind_stmts all_stmts sign inds kn comb sort f p.program_splitting in let () = evd := Evd.minimize_universes !evd in let () = if not poly then (* Declare the universe context necessary to typecheck the following definitions once and for all. *) (Global.push_context_set ~strict:true (Evd.universe_context_set !evd); evd := Evd.from_env (Global.env ())) else () in let eqns = CArray.map_of_list (fun (_, _, stmts) -> Array.make (List.length stmts) false) ind_stmts in let proof pm (j, (_, alias, path, sign, arity, pats, refs, refine), stmts) = let id = path_id path in let proof (pm : Declare.OblState.t) (i, (r, unf, c, n)) = let ideq = Nameops.add_suffix id ("_equation_" ^ string_of_int i) in let hook { Declare.Hook.S.dref; _ } pm = if n != None then add_rew_rule ~l2r:true ~base:info.base_id dref else (Classes.declare_instance (Global.env()) !evd None Hints.Local dref (* Hints.add_hints ~local:false [info.base_id] *) (* (Hints.HintsExternEntry *) (* (Vernacexpr.{hint_priority = Some 0; hint_pattern = None}, *) (* impossible_call_tac (GlobRef.ConstRef cst))) *)); eqns.(j).(pred i) <- true; if CArray.for_all (CArray.for_all (fun x -> x)) eqns then ( let locality = if Global.sections_are_opened () then Hints.Local else Hints.SuperGlobal in (* From now on, we don't need the reduction behavior of the constant anymore *) Hints.(add_hints ~locality [info.base_id] (HintsTransparencyEntry (HintsReferences [Evaluable.EvalConstRef ocst], false))); Classes.set_typeclass_transparency ~locality [Evaluable.EvalConstRef cst] false; (match alias with | Some ((f, _), _, _) -> let cst' = fst (destConst !evd f) in Hints.(add_hints ~locality [info.base_id] (HintsTransparencyEntry (HintsReferences [Evaluable.EvalConstRef cst'], false))); Global.set_strategy (Conv_oracle.EvalConstRef cst') Conv_oracle.Opaque | None -> ()); Global.set_strategy (Conv_oracle.EvalConstRef cst) Conv_oracle.Opaque; if with_ind then (declare_ind (); pm) else pm) else pm in let tac = let open Tacticals in tclTHENLIST [Tactics.intros; unf; (solve_equation_tac (GlobRef.ConstRef cst)); (if PathMap.is_empty wheremap then Tacticals.tclIDTAC else tclTRY (autorewrites (info.base_id ^ "_where"))); Tactics.reflexivity] in let () = (* Refresh at each equation, accumulating known constraints. *) if not poly then evd := Evd.from_env (Global.env ()) else () in (* FIXME: try to implement a sane handling of universe state threading *) let c = collapse_term_qualities (Evd.evar_universe_context !evd) (to_constr !evd c) in let cinfo = Declare.CInfo.make ~name:ideq ~typ:c () in let info = Declare.Info.make ~kind:(Decls.IsDefinition info.decl_kind) ~poly () in let obl_hook = Declare.Hook.make_g hook in let pm, _ = Declare.Obls.add_definition ~pm ~cinfo ~info ~obl_hook ~opaque:false ~tactic:tac ~uctx:(Evd.evar_universe_context !evd) [||] in pm in List.fold_left proof pm stmts in List.fold_left proof pm ind_stmts Coq-Equations-1.3.1-8.20/src/principles.mli000066400000000000000000000120201463127417400202370ustar00rootroot00000000000000(** Generation of equations and inductive graph *) open EConstr type statement = constr * types option type statements = statement list type recursive = bool type node_kind = | Regular | Refine | Where | Nested of recursive val pi1 : 'a * 'b * 'c -> 'a val pi2 : 'a * 'b * 'c -> 'b val match_arguments : Evd.evar_map -> constr array -> constr array -> int list val filter_arguments : int list -> 'a list -> 'a list val is_applied_to_structarg : Names.Id.t -> Syntax.rec_type -> int -> bool option val smash_ctx_map : Environ.env -> Evd.evar_map -> Context_map.context_map -> Context_map.context_map * EConstr.t list val subst_protos: Names.Constant.t list -> Names.GlobRef.t -> Hints.hint_term val find_rec_call : Syntax.rec_type -> Evd.evar_map -> (constr * (constr * int list) * (constr * int list) option * int * EConstr.rel_context * Constr.t) list -> Constr.constr -> Constr.constr list -> (int * Constr.t * int list * Constr.rel_context * (Constr.constr list * Constr.constr list * Constr.constr list)) option val abstract_rec_calls : Evd.evar_map -> Names.Id.Set.t -> ?do_subst:bool -> Syntax.rec_type -> int -> (constr * (constr * int list) * (constr * int list) option * int * EConstr.rel_context * Constr.t) list -> constr -> rel_context * int * constr val subst_app :Evd.evar_map -> constr -> (int -> constr -> constr array -> constr) -> constr -> constr val subst_comp_proj : Evd.evar_map -> constr -> constr -> constr -> constr val subst_comp_proj_split : Evd.evar_map -> constr -> constr -> Splitting.splitting -> Splitting.splitting val clear_ind_assums : Environ.env -> Evd.evar_map -> Names.MutInd.t -> Equations_common.rel_context -> Equations_common.rel_context val compute_elim_type : Environ.env -> Equations_common.esigma -> Names.Id.Set.t -> Syntax.rec_type -> (constr * (constr * int list) * (constr * int list) option * int * EConstr.rel_context * Constr.t) list -> Names.MutInd.t -> int -> (int * ((EConstr.constr * int list) * ((EConstr.constr * int list) * Names.Id.t * Splitting.splitting) option * Splitting.path * EConstr.rel_context * EConstr.types * EConstr.constr list * (EConstr.constr * (int * int)) option * (node_kind * bool)) * (int * (bool * unit Proofview.tactic * EConstr.t * EConstr.constr option)) list) list -> (node_kind * 'e * 'f * 'g option) list -> rel_context -> constr -> types -> int * types val replace_vars_context : Evd.evar_map -> Names.Id.t list -> Equations_common.rel_declaration list -> int * Equations_common.rel_declaration list val pr_where : Environ.env -> Evd.evar_map -> Constr.rel_context -> Splitting.where_clause -> Pp.t val where_instance : Splitting.where_clause list -> constr list val arguments : Evd.evar_map -> constr -> constr array val unfold_constr : Evd.evar_map -> constr -> unit Proofview.tactic (** Unfolding lemma tactic *) type rec_subst = (Names.Id.t * (int option * EConstr.constr)) list val cut_problem : Evd.evar_map -> rec_subst -> Equations_common.rel_declaration list -> Context_map.context_map val map_proto : Evd.evar_map -> int option -> EConstr.t -> EConstr.t -> EConstr.t val subst_rec : Environ.env -> Evd.evar_map -> Context_map.context_map -> rec_subst -> Context_map.context_map -> Context_map.context_map * Context_map.context_map val subst_rec_programs : Environ.env -> Evd.evar_map -> Splitting.program list -> (EConstr.constr * Names.Id.t * Splitting.splitting) Splitting.PathMap.t * Splitting.program list val unfold_programs : pm:Declare.OblState.t -> Environ.env -> Evd.evar_map ref -> Equations_common.flags -> Syntax.rec_type -> (Splitting.program * Splitting.compiled_program_info) list -> Declare.OblState.t * (Splitting.program * (Splitting.program * Splitting.compiled_program_info) option * Splitting.compiled_program_info * Principles_proofs.equations_info) list type alias val build_equations : pm:Declare.OblState.t -> bool -> Environ.env -> Evd.evar_map -> ?alias:alias -> Syntax.rec_type -> (Splitting.program * Splitting.program option * Splitting.compiled_program_info * Principles_proofs.equations_info) list -> Declare.OblState.t val all_computations : Environ.env -> Evd.evar_map -> ((EConstr.constr * int list) * Names.Id.t * Splitting.splitting) option -> (Splitting.program * Splitting.program option * 'b * Principles_proofs.equations_info) list -> (((EConstr.t * int list) * alias option * Splitting.path * EConstr.rel_context * EConstr.t * EConstr.constr list * (EConstr.constr * (int * int)) option * (node_kind * bool)) * (Equations_common.rel_context * EConstr.t * alias option * EConstr.constr list * EConstr.t * EConstr.t * (node_kind * bool) * Splitting.splitting_rhs) list) list val make_alias : (EConstr.t * Names.Id.t * Splitting.splitting) -> alias val add_rew_rule : l2r:bool -> base:string -> Names.GlobRef.t -> unit Coq-Equations-1.3.1-8.20/src/principles_proofs.ml000066400000000000000000001526761463127417400215040ustar00rootroot00000000000000open Util open Names open Nameops open Context open Constr open Globnames open Pp open List open Libnames open Tactics open Tacticals open Tacmach open Proofview.Notations open EConstr open Equations_common open Printer open Ppconstr open Syntax open Context_map open Splitting open Covering open Vars open Cc_plugin.Cctac type where_map = (constr * Names.Id.t * splitting) PathMap.t type equations_info = { equations_id : Names.Id.t; equations_where_map : where_map; equations_f : EConstr.t; equations_prob : Context_map.context_map } type ind_info = { term_info : term_info; pathmap : (Names.Id.t * Constr.t list) PathMap.t; (* path -> inductive name *) wheremap : where_map } let find_helper_info env sigma info f = try List.find (fun (cst, arg') -> try Environ.QConstant.equal env cst (fst (destConst sigma f)) with DestKO -> false) info.helpers_info with Not_found -> anomaly (str"Helper not found while proving induction lemma.") let simp_transparent_state () = Hints.Hint_db.transparent_state (Hints.searchtable_map "simp") let simpl_star = tclTHEN simpl_in_concl (onAllHyps (fun id -> simpl_in_hyp (id, Locus.InHyp))) let eauto_with_rec ?depth ?(strategy=Class_tactics.Dfs) l = Class_tactics.typeclasses_eauto ~depth ~st:(simp_transparent_state ()) ~strategy (l@["subterm_relation"; "rec_decision"]) let wf_obligations_base info = info.base_id ^ "_wf_obligations" let simp_eqns l = tclREPEAT (tclTHENLIST [Autorewrite.autorewrite tclIDTAC l; tclTRY (eauto_with_rec ("simp" :: l))]) let simp_eqns_in clause l = tclREPEAT (tclTHENLIST [Autorewrite.auto_multi_rewrite l clause; tclTRY (eauto_with_rec ("simp" :: l))]) let autorewrites b = tclREPEAT (Autorewrite.autorewrite tclIDTAC [b]) exception RewriteSucceeded of EConstr.t let _rewrite_try_change tac = Proofview.Goal.enter (fun gl -> let concl = Proofview.Goal.concl gl in Proofview.tclORELSE (Proofview.tclTHEN tac (Proofview.Goal.enter (fun gl -> let env = Proofview.Goal.env gl in let sigma = Proofview.Goal.sigma gl in let concl' = Proofview.Goal.concl gl in match Reductionops.infer_conv ~pb:Conversion.CONV env sigma concl concl' with | Some _ -> Proofview.tclZERO (RewriteSucceeded concl') | None -> Proofview.tclUNIT ()))) (function | (RewriteSucceeded concl', _) -> convert_concl ~cast:true ~check:false concl' DEFAULTcast | (exn, info) -> Proofview.tclZERO ~info exn)) let autorewrite_one b = let rew_rules = Autorewrite.find_rewrites b in let rec aux rules = match rules with | [] -> tclFAIL (str"Couldn't rewrite") | r :: rules -> let global, _univs = Constr.destRef (snd @@ Autorewrite.RewRule.rew_lemma r) in let tac = Proofview.tclBIND (pf_constr_of_global global) (if (Autorewrite.RewRule.rew_l2r r) then Equality.rewriteLR else Equality.rewriteRL) in Proofview.tclOR tac (fun e -> if !debug then Feedback.msg_debug (str"failed"); aux rules) in aux rew_rules let revert_last = Proofview.Goal.enter (fun gl -> let hyp = pf_last_hyp gl in Generalize.revert [get_id hyp]) (** fix generalization *) let rec mk_holes env sigma = function | [] -> (sigma, []) | arg :: rem -> let (sigma, arg) = Evarutil.new_evar env sigma arg in let (sigma, rem) = mk_holes env sigma rem in (sigma, arg :: rem) let rec check_mutind env sigma k cl = match EConstr.kind sigma (Termops.strip_outer_cast sigma cl) with | Prod (na, c1, b) -> if Int.equal k 1 then try let ((sp, _), u), _ = Inductiveops.find_inductive env sigma c1 in (sp, u) with Not_found -> error "Cannot do a fixpoint on a non inductive type." else check_mutind (push_rel (Context.Rel.Declaration.LocalAssum (na, c1)) env) sigma (pred k) b | LetIn (na, c1, t, b) -> check_mutind (push_rel (Context.Rel.Declaration.LocalDef (na, c1, t)) env) sigma k b | _ -> CErrors.user_err (str"Not enough products in " ++ Printer.pr_econstr_env env sigma cl) open Context.Named.Declaration (* Refine as a fixpoint *) let mutual_fix li l = let open Proofview in let mfix env sigma gls = let gls = List.map Proofview.drop_state gls in let infos = List.map (fun ev -> Evd.find_undefined sigma ev) gls in let types = List.map (fun evi -> Evd.evar_relevance evi, Evd.evar_concl evi) infos in let env = let ctxs = List.map (fun evi -> EConstr.Unsafe.to_named_context @@ Evd.evar_context evi) infos in let fst, rest = List.sep_last ctxs in if List.for_all (fun y -> Context.Named.equal Sorts.relevance_equal Constr.equal fst y) rest then Environ.push_named_context fst env else env in let li = match li with | [] -> List.mapi (fun i ev -> match Evd.evar_ident ev sigma with | Some id -> id | None -> Id.of_string ("fix_" ^ string_of_int i)) gls | l -> List.map Id.of_string l in let () = let lenid = List.length li in let lenidxs = List.length l in let lengoals = List.length types in if not (Int.equal lenid lenidxs && Int.equal lenid lengoals) then CErrors.user_err (str "Cannot apply mutual fixpoint, invalid arguments: " ++ int lenid ++ (str (String.plural lenid " name")) ++ str " " ++ int lenidxs ++ str (if lenidxs == 1 then " index" else " indices") ++ str" and " ++ int lengoals ++ str(String.plural lengoals " subgoal")) in let all = CList.map3 (fun id n ar -> (id,n,ar)) li l types in let (_, n, (_, ar)) = List.hd all in let (sp, u) = check_mutind env sigma n ar in let rec mk_sign sign = function | [] -> sign | (f, n, (r, ar)) :: oth -> let (sp', u') = check_mutind env sigma n ar in if not (Environ.QMutInd.equal env sp sp') then error "Fixpoints should be on the same mutual inductive declaration."; if try ignore (Context.Named.lookup f sign); true with Not_found -> false then CErrors.user_err (str "Name " ++ pr_id f ++ str " already used in the environment"); mk_sign (LocalAssum (make_annot f (ERelevance.kind sigma r), EConstr.to_constr sigma ar) :: sign) oth in let sign = mk_sign (Environ.named_context env) all in let idx = Array.map_of_list pred l in let nas = Array.map_of_list nameR li in let body = ref (fun i -> assert false) in let one_body = Refine.refine ~typecheck:false (fun sigma -> let nenv = Environ.reset_with_named_context (Environ.val_of_named_context sign) env in let types = List.map snd types in let (sigma, evs) = mk_holes nenv sigma types in let evs = Array.map_of_list (Vars.subst_vars sigma (List.rev li)) evs in let types = Array.of_list types in let decl = (nas,types,evs) in let () = body := (fun i -> mkFix ((idx,i),decl)) in sigma, !body 0) in let other_body i = Refine.refine ~typecheck:false (fun sigma -> sigma, !body (succ i)) in tclDISPATCH (one_body :: List.init (Array.length idx - 1) other_body) in tclENV >>= fun env -> tclEVARMAP >>= fun sigma -> Unsafe.tclGETGOALS >>= mfix env sigma let check_guard gls env sigma = let gl = Proofview.drop_state (List.hd gls) in try let EvarInfo evi = Evd.find sigma gl in match Evd.evar_body evi with | Evd.Evar_defined b -> Inductiveops.control_only_guard (Evd.evar_env env evi) sigma b; true | Evd.Evar_empty -> true with Type_errors.TypeError _ -> false let find_helper_arg env sigma info f args = let (cst, arg) = find_helper_info env sigma info f in cst, snd arg, args.(snd arg) let find_splitting_var sigma pats var constrs = let rec find_pat_var p c = match p, decompose_app_list sigma c with | PRel i, (c, l) when i = var -> Some (destVar sigma c) | PCstr (c, ps), (f,l) -> aux ps l | _, _ -> None and aux pats constrs = assert(List.length pats = List.length constrs); List.fold_left2 (fun acc p c -> match acc with None -> find_pat_var p c | _ -> acc) None pats constrs in Option.get (aux (rev pats) constrs) let rec intros_reducing () = Proofview.Goal.enter (fun gl -> let concl = Proofview.Goal.concl gl in match kind (Proofview.Goal.sigma gl) concl with | LetIn (_, _, _, _) -> tclTHEN hnf_in_concl (intros_reducing ()) | Prod (_, _, _) -> tclTHEN intro (intros_reducing ()) | _ -> tclIDTAC) let cstrtac = tclTHENLIST [any_constructor false None] let destSplit = function | Split (_, _, _, splits) -> Some splits | _ -> None let destRefined = function | Refined (_, _, s) -> Some s | _ -> None let destWheres = function | Compute (ctx, wheres, _, _) -> Some (ctx, wheres) | _ -> None let map_opt_split f s = match s with | None -> None | Some s -> f s let solve_ind_rec_tac info = observe "solve_ind_rec_tac" (eauto_with_rec ~depth:20 ~strategy:Class_tactics.Bfs [info.base_id; wf_obligations_base info]) let change_in_app f args idx arg = let args' = Array.copy args in args'.(idx) <- arg; mkApp (f, args') let hyps_after sigma env pos args = let open Context.Named.Declaration in List.fold_left (fun acc d -> Id.Set.add (get_id d) acc) Id.Set.empty env let simpl_of csts = let opacify () = List.iter (fun (cst,_) -> Global.set_strategy (Conv_oracle.EvalConstRef cst) Conv_oracle.Opaque) csts and transp () = List.iter (fun (cst, level) -> Global.set_strategy (Conv_oracle.EvalConstRef cst) level) csts in opacify, transp let gather_subst env sigma ty args len = let rec aux ty args n = if n = 0 then [] else match kind sigma ty, args with | Prod (_, _, ty), a :: args -> a :: aux (subst1 a ty) args (pred n) | LetIn (_, b, _, ty), args -> b :: aux (subst1 b ty) args (pred n) | _ -> assert false in aux ty (Array.to_list args) len let annot_of_rec r = match r.struct_rec_arg with | MutualOn (Some (i, _)) -> Some (i + 1) | MutualOn None -> assert false | NestedOn (Some (i, _)) -> Some (i + 1) | NestedOn None -> Some 1 | _ -> None let tclTHEN_i tac k = tac <*> Proofview.numgoals >>= fun n -> Proofview.tclDISPATCH (CList.init n (fun i -> k (i + 1))) let local_tclTHEN_i = tclTHEN_i let aux_ind_fun info chop nested unfp unfids p = let rec solve_nested () = let open Proofview.Goal in Proofview.Goal.enter (fun gl -> let sigma = sigma gl in let concl = concl gl in let nested_goal = match kind sigma concl with | App (ind, args) -> let last = Array.last args in let hd, args = decompose_app sigma last in (try let fn, args = destConst sigma hd in let fnid = Label.to_id (Constant.label fn) in Some (CList.find (fun (p, _, _) -> Id.equal p fnid) nested) with DestKO | Not_found -> None) | _ -> None in match nested_goal with | Some (p, ty, prog) -> let fixtac = match prog.program_rec with | Some { rec_node = StructRec sr; rec_args } -> tclTHENLIST [fix prog.program_info.program_id (Option.default 1 (annot_of_rec sr)); tclDO rec_args intro] | _ -> Proofview.tclUNIT () in let program_tac = tclTHEN fixtac (aux chop None [] prog.program_splitting) in let ty = EConstr.of_constr @@ collapse_term_qualities (Evd.evar_universe_context sigma) (EConstr.to_constr sigma ty) in tclTHEN (assert_by (Name (program_id prog)) ty program_tac) (observe "solving nested premises of compute rule" (solve_ind_rec_tac info.term_info)) | None -> Proofview.tclUNIT ()) and aux_program lctx chop unfp unfids porig p = let unfs = Option.map (fun s -> s.program_splitting) unfp in match p.program_rec with | None -> let is_rec, fixtac = match porig with | Some { Syntax.program_rec = Some (Structural ann) } -> (let idx = match ann with | NestedOn None -> Some 0 | NestedNonRec -> None | MutualOn None -> assert false | NestedOn (Some (idx, _)) | MutualOn (Some (idx, _)) -> Some idx in match idx with | None -> false, intros | Some idx -> let recid = add_suffix p.program_info.program_id "_rec" in (* The recursive argument is local to the where, shift it by the length of the enclosing context *) let newidx = match unfs with None -> idx | Some _ -> idx in true, observe "struct fix norec" (tclTHENLIST [(* unftac false; *) fix recid (succ newidx); intros (* unftac true *)])) | _ -> false, intros in tclTHEN fixtac (aux (fst chop, if is_rec then succ (snd chop) else snd chop) unfs unfids p.program_splitting) | Some t -> let cs = p.program_splitting in let ctx = t.rec_lets in let refine = let open Proofview.Goal in enter (fun gl -> let env = Proofview.Goal.env gl in let sigma = ref (Proofview.Goal.sigma gl) in match t with | { rec_node = WfRec r } -> let inctx, concl = decompose_prod_n_decls !sigma t.rec_args (concl gl) in Refine.refine ~typecheck:false (fun sigma -> let evd = ref sigma in let sort = Retyping.get_sort_of env sigma concl in let hd, args = decompose_app sigma concl in let subst = gather_subst env sigma (Retyping.get_type_of env sigma hd) args (List.length ctx) in let arity, arg, rel = let arg = substl (List.rev subst) r.wf_rec_arg in let term = (applistc arg (extended_rel_list 0 inctx)) in (* Feedback.msg_debug (str"Typing:" ++ Printer.pr_econstr_env (push_rel_context inctx env) sigma term ++ * str " in context " ++ pr_context env sigma inctx ++ str "subst " ++ * prlist_with_sep (fun () -> str " ") (Printer.pr_econstr_env env sigma) subst * ); *) let envsign = push_rel_context inctx env in let sigma, arity = Typing.type_of envsign sigma term in let ty = Reductionops.nf_all envsign sigma arity in let arity = if noccur_between sigma 1 (length inctx) ty then lift (- length inctx) ty else assert false in arity, arg, r.wf_rec_rel in let _functional_type, functional_type, fix = Covering.wf_fix_constr env evd inctx concl (ESorts.kind !evd sort) arity arg rel in (* TODO solve WellFounded evar *) let sigma, evar = new_evar env !evd functional_type in (sigma, mkApp (fix, [| evar |]))) | { rec_node = StructRec r } -> let fixtac = let idx = match r.struct_rec_arg with | NestedOn None -> Some 0 | NestedNonRec -> None | MutualOn None -> assert false | NestedOn (Some (idx, _)) | MutualOn (Some (idx, _)) -> Some idx in match idx with | None -> intros | Some idx -> let recid = add_suffix p.program_info.program_id "_rec" in (* The recursive argument is local to the where, shift it by the length of the enclosing context *) equations_debug (fun () -> str"Fixpoint on " ++ int idx ++ str " rec args " ++ int t.rec_args ++ str " lctx " ++ int (List.length lctx)); let newidx = match unfs with None -> idx | Some _ -> idx in observe "struct fix" (tclTHENLIST [(* unftac false; *) fix recid (succ newidx); intros (* unftac true *)]) in fixtac) in tclTHENLIST [intros; tclDO t.rec_args revert_last; observe "wf_fix" (tclTHEN refine (tclTHEN intros (aux chop unfs unfids cs)))] and aux chop unfs unfids = function | Split (lhs, var, _, splits) -> let splits = List.map_filter (fun x -> x) (Array.to_list splits) in let unfs_splits = let unfs = map_opt_split destSplit unfs in match unfs with | None -> None | Some f -> Some (List.map_filter (fun x -> x) (Array.to_list f)) in (observe "split" (tclTHEN_i (Proofview.Goal.enter (fun gl -> match kind (project gl) (pf_concl gl) with | App (ind, args) -> let pats' = List.drop_last (Array.to_list args) in let pats' = if fst chop < 0 then pats' else snd (List.chop (fst chop) pats') in let pats, var = match unfs with | Some (Split (ctx, var, _, _)) -> filter_def_pats ctx, var | _ -> filter (fun x -> not (hidden x)) (filter_def_pats lhs), var in let id = find_splitting_var (project gl) pats var pats' in Depelim.dependent_elim_tac (None, id) | _ -> tclFAIL (str"Unexpected goal in functional induction proof"))) (fun i -> Proofview.Goal.enter (fun gl -> let split = nth splits (pred i) in let unfsplit = Option.map (fun s -> nth s (pred i)) unfs_splits in aux chop unfsplit unfids split)))) | Refined (lhs, refinfo, s) -> let unfs = map_opt_split destRefined unfs in let id = pi1 refinfo.refined_obj in let elimtac gl = let open Proofview.Goal in let sigma = sigma gl in match kind sigma (concl gl) with | App (ind, args) -> let before, last_arg = CArray.chop (Array.length args - 1) args in let f, fargs = destApp sigma last_arg.(0) in let _, pos, elim = find_helper_arg (env gl) sigma info.term_info f fargs in let id = pf_get_new_id id gl in let hyps = Id.Set.elements (hyps_after sigma (hyps gl) (pos + 1 - snd chop) before) in let occs = Some (List.map (fun h -> (Locus.AllOccurrences, h), Locus.InHyp) hyps) in let occs = Locus.{ onhyps = occs; concl_occs = NoOccurrences } in let newconcl = let fnapp = change_in_app f fargs pos (mkVar id) in let indapp = change_in_app ind before (pos - snd chop) (mkVar id) in mkApp (indapp, [| fnapp |]) in tclTHENLIST [observe "letin" (letin_pat_tac true None (Name id) (Some sigma, elim) occs); observe "convert concl" (convert_concl ~cast:false ~check:false newconcl DEFAULTcast); observe "clear body" (clear_body [id]); aux chop unfs unfids s] | _ -> tclFAIL (str"Unexpected refinement goal in functional induction proof") in (observe "refine" (tclTHENLIST [ intros; tclTHENLAST (tclTHEN (tclTRY (autorewrite_one info.term_info.base_id)) cstrtac) (tclSOLVE [Proofview.Goal.enter elimtac]); (solve_ind_rec_tac info.term_info)])) | Compute (lhs, wheres, _, c) -> let lctx = lhs.src_ctx in let unfctx, unfswheres = let unfs = map_opt_split destWheres unfs in match unfs with | None -> [], List.map (fun _ -> None) wheres | Some (unfctx, wheres) -> unfctx.src_ctx, List.map (fun w -> Some w) wheres in let wheretac = if not (List.is_empty wheres) then let wheretac env evd s unfs (acc, subst) = let wp = s.where_program in let revert, ctx, where_term, fstchop, unfids = match unfs with | None -> let term = where_term s in let sign = wp.program_info.program_sign in let ctxlen = List.length sign - List.length subst in let before, after = List.chop ctxlen sign in let newwhere = substl subst term in let ctx = subst_rel_context 0 subst before in if !Equations_common.debug then Feedback.msg_debug (str" where " ++ str"term: " ++ pr_econstr_env env evd (where_term s) ++ str " subst " ++ prlist_with_sep spc (Printer.pr_econstr_env env evd) subst ++ str " final term " ++ pr_econstr_env env evd newwhere ++ str "context " ++ pr_context env evd sign); 0, ctx, newwhere, fst chop (* + List.length ctx *), unfids | Some w -> let assoc, unf, split = try PathMap.find w.where_path info.wheremap with Not_found -> assert false in if !Equations_common.debug then Feedback.msg_debug (str"Unfolded where " ++ str"term: " ++ pr_econstr_env env evd (where_term w) ++ str" type: " ++ pr_econstr_env env evd w.where_type ++ str" assoc " ++ pr_econstr_env env evd assoc); let unfwp = w.where_program in let ctxlen = List.length unfwp.program_info.program_sign - List.length unfctx in let before, after = List.chop ctxlen unfwp.program_info.program_sign in let subst = if not (List.length subst >= List.length after) then anomaly (str"Mismatch between hypotheses in named context and program") else subst in let newwhere = substl subst (where_term w) in let ctx = subst_rel_context 0 subst before in if !Equations_common.debug then Feedback.msg_debug (str"Unfolded where substitution: " ++ prlist_with_sep spc (Printer.pr_econstr_env env evd) subst ++ str"New where term" ++ Printer.pr_econstr_env env evd newwhere ++ str" context map " ++ pr_context env Evd.empty ctx); 0, ctx, newwhere, -1 (* + List.length ctx *), unf :: unfids in let chop = fstchop, snd chop in let wheretac = observe "one where" (tclTHENLIST [ tclDO revert revert_last; observe "moving section id" (tclTRY (move_hyp coq_end_of_section_id Logic.MoveLast)); (aux_program lctx chop (Option.map (fun s -> s.where_program) unfs) unfids (Some s.where_program_orig) s.where_program)]) in let wherepath, _args = try PathMap.find s.where_path info.pathmap with Not_found -> error "Couldn't find associated args of where" in if !debug then (let env = Global.env () in Feedback.msg_debug (str"Found path " ++ str (Id.to_string wherepath) ++ str" where: " ++ pr_id (where_id s) ++ str"term: " ++ Printer.pr_econstr_env env Evd.empty where_term ++ str" context map " ++ pr_context_map env Evd.empty (id_subst ctx))); let ind = Nametab.locate (qualid_of_ident wherepath) in let ty ind = let hd, args = decompose_app_list Evd.empty where_term in let indargs = List.filter (fun x -> isVar Evd.empty x) args in let rels = extended_rel_list 0 ctx in let indargs = List.append indargs rels in let app = applistc ind (List.append indargs [applistc where_term rels]) in let ty = it_mkProd_or_LetIn app ctx in let ty = EConstr.of_constr @@ collapse_term_qualities UState.empty (EConstr.Unsafe.to_constr ty) in ty in let tac = tclTHEN acc (Proofview.tclBIND (pf_constr_of_global ind) (fun ind -> if !debug then (let env = Global.env () in Feedback.msg_debug (str"Type of induction principle for " ++ str (Id.to_string (where_id s)) ++ str": " ++ Printer.pr_econstr_env env Evd.empty (ty ind))); assert_by (Name (where_id s)) (ty ind) wheretac)) in (tac, where_term :: subst) in let () = assert (List.length wheres = List.length unfswheres) in let tac = Proofview.Goal.enter (fun gl -> let env = Proofview.Goal.env gl in let sigma = Proofview.Goal.sigma gl in let subst = let concl = Proofview.Goal.concl gl in let hd, args = decompose_app_list sigma concl in let args = drop_last args in let rec collect_vars acc c = let hd, args = decompose_app sigma c in match kind sigma hd with | Var id -> if not (List.mem id acc) then id :: acc else acc | Construct _ -> Array.fold_left collect_vars acc args | _ -> acc in let args_vars = List.fold_left collect_vars [] args in let args_vars = List.filter (fun id -> not (Termops.is_section_variable (Global.env ()) id)) args_vars in List.map mkVar args_vars in let tac, _ = List.fold_right2 (wheretac env sigma) wheres unfswheres (tclIDTAC, subst) in tac) in tclTHENLIST [tac; tclTRY (autorewrite_one info.term_info.base_id)] else tclIDTAC in (match c with | REmpty _ -> observe "compute empty" (tclTHENLIST [intros_reducing (); wheretac; find_empty_tac ()]) | RProgram _ -> observe "compute " (tclTHENLIST [intros_reducing (); tclTRY (autorewrite_one info.term_info.base_id); observe "wheretac" wheretac; observe "applying compute rule" cstrtac; (* Each of the recursive calls result in an assumption. If it is a rec call in a where clause to itself we need to explicitely rewrite with the unfolding lemma (as the where clause induction hypothesis is about the unfolding whereas the term itself might mentions the original function. *) tclTHEN Tactics.intros (tclMAP (fun i -> (tclTRY (Proofview.tclBIND (pf_constr_of_global (Equations_common.global_reference i)) Equality.rewriteLR))) unfids); tclORELSE (tclCOMPLETE (observe "solving premises of compute rule" (solve_ind_rec_tac info.term_info))) (observe "solving nested recursive call" (solve_nested ()))])) | Mapping (_, s) -> aux chop unfs unfids s in aux_program [] chop unfp unfids None p let pr_subgoals sigma goals = let open Pp in let pr g = str "[" ++ Printer.Debug.pr_goal g ++ str "]" in str "[" ++ prlist_with_sep fnl pr goals ++ str "]" let observe_tac s tac = let open Proofview in if not !debug then tac else tclENV >>= fun env -> tclEVARMAP >>= fun sigma -> Proofview.Goal.goals >>= fun gls -> Proofview.Monad.List.map (fun gl -> gl) gls >>= fun gls -> Feedback.msg_debug (str"Applying " ++ str s ++ str " on " ++ pr_subgoals sigma gls); Proofview.tclORELSE (Proofview.tclTHEN tac (Proofview.numgoals >>= fun gls -> if gls = 0 then (Feedback.msg_debug (str s ++ str "succeeded"); Proofview.tclUNIT ()) else Proofview.Goal.enter begin fun gl -> let () = Feedback.msg_debug (str "Subgoal: " ++ Printer.Debug.pr_goal gl) in Proofview.tclUNIT () end)) (fun iexn -> Feedback.msg_debug (str"Failed with: " ++ (match fst iexn with | Tacticals.FailError (n,expl) -> (str" Fail error " ++ int n ++ str " for " ++ str s ++ spc () ++ Lazy.force expl ++ str " on " ++ pr_subgoals sigma gls) | _ -> CErrors.iprint iexn)); Proofview.tclUNIT ()) exception NotGuarded let check_guard tac = let open Proofview in Unsafe.tclGETGOALS >>= (fun gls -> tac >>= (fun () -> tclENV >>= fun env -> tclEVARMAP >>= (fun sigma -> if check_guard gls env sigma then tclUNIT () else tclZERO NotGuarded))) let ind_fun_tac is_rec f info fid nestedinfo progs = let open Proofview in match is_rec with | Some (Guarded l) :: context -> let mutual, nested = List.partition (function (_, MutualOn _) -> true | _ -> false) l in let mutannots = List.map (function (_, MutualOn (Some (ann, _))) -> ann + 1 | _ -> -1) mutual in let mutprogs, nestedprogs = List.partition (fun (p,_,_,e) -> match p.program_info.Syntax.program_rec with | Some (Structural (MutualOn _)) -> true | _ -> false) progs in let eauto = Class_tactics.typeclasses_eauto ["funelim"; info.term_info.base_id] in let rec splits l = match l with | [] | _ :: [] -> tclUNIT () | _ :: l -> Tactics.split Tactypes.NoBindings <*> tclDISPATCH [tclUNIT (); splits l] in let prove_progs progs = intros <*> tclDISPATCH (List.map (fun (p,_unfp,cpi,e) -> (* observe_tac "proving one mutual " *) let proginfo = { info with term_info = { info.term_info with helpers_info = info.term_info.helpers_info @ cpi.program_split_info.helpers_info } } in (aux_ind_fun proginfo (0, List.length l) nestedinfo None [] { p with program_rec = None })) progs) in let prove_nested = tclDISPATCH (List.map (function (id,NestedOn (Some (ann,_))) -> fix id (ann + 1) | (id,NestedOn None) -> fix id 1 | _ -> tclUNIT ()) nested) <*> prove_progs nestedprogs in let try_induction () = match mutannots with | [n] -> (* Try using regular induction instead *) let _ = if !Equations_common.debug then Feedback.msg_debug (str "Proof of mutual induction principle is not guarded, trying induction") in let splits = match progs with | [(p, _, _, e)] -> (match p.program_splitting with | Split (_, _, _, splits) -> Some (p, CList.map_filter (fun x -> x) (Array.to_list splits)) | _ -> None) | _ -> None in (match splits with | Some (p, s) -> observe "induction" (tclDISPATCH [tclDO n intro <*> observe "induction on last var" (onLastDecl (fun decl -> Equations_common.depind_tac (Context.Named.Declaration.get_id decl) <*> intros <*> specialize_mutfix_tac () <*> tclDISPATCH (List.map (fun split -> aux_ind_fun info (0, 1) nestedinfo None [] { p with program_rec = None; program_splitting = split }) s)))]) | None -> tclZERO NotGuarded) | _ -> tclZERO NotGuarded in let mutfix = let tac = mutual_fix [] mutannots <*> specialize_mutfix_tac () <*> prove_progs mutprogs in tclORELSE (if List.length nested > 0 then tac else check_guard tac) (fun (e, einfo) -> match e with | NotGuarded -> tclORELSE (check_guard (try_induction ())) (fun (e, einfo) -> match e with | NotGuarded -> Feedback.msg_info (str "Proof of mutual induction principle is not guarded " ++ str"and cannot be proven by induction"); tclIDTAC | _ -> tclZERO ~info:einfo e) | _ -> tclZERO ~info:einfo e) in let mutlen = List.length mutprogs in let tac gl = let mutprops, nestedprops = let rec aux concl i = match kind (Goal.sigma gl) concl with | App (conj, [| a; b |]) -> if i == 1 then a, Some b else let muts, nested = aux b (pred i) in mkApp (conj, [| a ; muts |]), nested | _ -> if i == 1 then concl, None else assert false in aux (Goal.concl gl) mutlen in set_eos_tac () <*> (match nestedprops with | Some p -> assert_before Anonymous (mkProd (anonR, mutprops, p)) <*> tclDISPATCH [observe_tac "assert mut -> nest first subgoal " (* observe_tac *) (* "proving mut -> nested" *) (intro <*> observe_tac "splitting nested" (splits nestedprogs) <*> prove_nested); tclUNIT ()] | None -> tclUNIT ()) <*> assert_before Anonymous mutprops <*> tclDISPATCH [observe_tac "mutfix" (splits mutprogs <*> tclFOCUS 1 (List.length mutual) mutfix); tclUNIT ()] <*> (* On the rest of the goals, do the nested proofs *) observe_tac "after mut -> nested and mut provable" (eauto ~depth:None) in Proofview.Goal.enter (fun gl -> tac gl) | _ -> let helpercsts = List.map (fun (cst, i) -> cst) info.term_info.helpers_info in let opacify, transp = simpl_of (List.map (fun x -> x, Conv_oracle.Expand) (fst (Constr.destConst f) :: helpercsts)) in let p, unfp = match progs with | [p, unfp, cpi, ei] -> p, unfp | _ -> assert false in opacify (); let tac = Proofview.tclBIND (tclCOMPLETE (tclTHENLIST [set_eos_tac (); intros; aux_ind_fun info (0, 0) nestedinfo unfp [] p])) (fun r -> transp (); Proofview.tclUNIT r) in tclORELSE (check_guard tac) (fun (e, einfo) -> match e with | NotGuarded -> Feedback.msg_info (str "Proof of mutual induction principle is not guarded " ++ str" and cannot be proven by induction. Consider switching to well-founded recursion."); tclUNIT () | _ -> tclZERO ~info:einfo e) let ind_fun_tac is_rec f info fid nested progs = Proofview.tclORELSE (ind_fun_tac is_rec f info fid nested progs) (fun e -> match fst e with | Pretype_errors.PretypeError (env, evd, err) -> Feedback.msg_warning (Himsg.explain_pretype_error env evd err); Exninfo.iraise e | _ -> Exninfo.iraise e) let is_primitive env evd ctx var = let decl = List.nth ctx var in let indf, _ = find_rectype env evd (Context.Rel.Declaration.get_type decl) in let (ind,_), _ = dest_ind_family indf in let mspec = Inductive.lookup_mind_specif env ind in Inductive.is_primitive_record mspec let myreplace_by a1 a2 tac = Proofview.Goal.enter (fun gl -> let env = Proofview.Goal.env gl in let sigma = Proofview.Goal.sigma gl in if eq_constr sigma a1 a2 then Proofview.tclUNIT () else let ty = Retyping.get_type_of env sigma a1 in let sigma, eq = get_fresh sigma logic_eq_type in let eqty = mkApp (eq, [| ty; a1; a2 |]) in let sigma, _ = Typing.type_of env sigma eqty in let na = pf_get_new_id (Id.of_string "Heq") gl in Proofview.Unsafe.tclEVARS sigma <*> Tactics.assert_by (Name na) eqty tac <*> Equality.rewriteLR (mkVar na) <*> Tactics.clear [na]) let headcst sigma f = let f, _ = decompose_app sigma f in if isConst sigma f then fst (destConst sigma f) else assert false (* FIXME: stop messing with the global environment *) let wrap tac before after = Proofview.tclUNIT () >>= fun () -> let () = before () in Proofview.Unsafe.tclSETENV (Global.env ()) >>= fun () -> tac >>= fun () -> let () = after () in Proofview.Unsafe.tclSETENV (Global.env ()) let solve_rec_eq simpltac subst = Proofview.Goal.enter begin fun gl -> let env = Proofview.Goal.env gl in match kind (project gl) (pf_concl gl) with | App (eq, [| ty; x; y |]) -> let sigma = project gl in let xf, _ = decompose_app sigma x and yf, _ = decompose_app sigma y in (try let f_cst, funf_cst = List.find (fun (f_cst, funf_cst) -> is_global env sigma (GlobRef.ConstRef f_cst) xf && is_global env sigma (GlobRef.ConstRef funf_cst) yf) subst in let unfolds = unfold_in_concl [((Locus.OnlyOccurrences [1]), Evaluable.EvalConstRef f_cst); ((Locus.OnlyOccurrences [1]), Evaluable.EvalConstRef funf_cst)] in tclTHENLIST [unfolds; simpltac; pi_tac ()] with Not_found -> tclORELSE reflexivity (congruence_tac 10 [])) | _ -> reflexivity end type unfold_subst = (Constant.t * Constant.t) list * Splitting.program * Splitting.program type unfold_trace = | UnfSplit of unfold_trace list | UnfRefined of refined_node * unfold_trace | UnfComputeProgram of (Splitting.program * Splitting.program * EConstr.t * Id.t) list * EConstr.rel_context | UnfComputeEmpty of Id.t type reckind = | RecWfPlain of unfold_trace | RecWfWithFunext | RecStruct of (int * int) option let compute_unfold_trace env sigma where_map split unfold_split = let rec aux split unfold_split = match split, unfold_split with | Split (_, _, _, splits), Split (lhs, var, _, unfsplits) -> let ctx = lhs.src_ctx in if is_primitive env sigma ctx (pred var) then aux (Option.get (Array.hd splits)) (Option.get (Array.hd unfsplits)) else let splits = List.map_filter (fun x -> x) (Array.to_list splits) in let unfsplits = List.map_filter (fun x -> x) (Array.to_list unfsplits) in let trace = List.map2 (fun split unfsplit -> aux split unfsplit) splits unfsplits in UnfSplit trace | _, Mapping (lhs, s) -> aux split s | Refined (_, _, s), Refined (lhs, refinfo, unfs) -> UnfRefined (refinfo, aux s unfs) | Compute (_, wheres, _, RProgram _), Compute (lhs, unfwheres, _, RProgram _) -> let () = assert (List.length wheres = List.length unfwheres) in let map w unfw = let assoc, id, _ = try PathMap.find unfw.where_path where_map with Not_found -> assert false in let wp = w.where_program in let unfwp = unfw.where_program in (wp, unfwp, assoc, id) in let data = List.map2 map wheres unfwheres in UnfComputeProgram (data, lhs.src_ctx) | Compute (_, _, _, _), Compute (lhs, _, _, REmpty (id, sp)) -> let d = nth lhs.src_ctx (pred id) in let id = Name.get_id (get_name d) in UnfComputeEmpty id | _, _ -> assert false in aux split unfold_split let get_program_reckind env sigma where_map p = match p.program_rec with | None -> None | Some r -> let k = match r with | { rec_node = WfRec _ } -> if !Equations_common.equations_with_funext then RecWfWithFunext else let trace = compute_unfold_trace env sigma where_map p.program_splitting p.program_splitting in RecWfPlain trace | { rec_node = StructRec sr } -> match annot_of_rec sr with | Some annot -> RecStruct (Some (r.rec_args, annot)) | None -> RecStruct None in Some k let extract_subprogram_trace env sigma where_map trace = let rec aux subst accu trace = match trace with | UnfSplit traces -> List.fold_left (fun accu trace -> aux subst accu trace) accu traces | UnfRefined (_, trace) -> aux subst accu trace | UnfComputeEmpty _ -> accu | UnfComputeProgram (data, lctx) -> let fold accu (wp, unfwp, assoc, id) = let kind = get_program_reckind env sigma where_map wp in let nsubst, etrace = match kind with | None -> subst, None | Some kind -> let f_cst = headcst sigma wp.program_term in let funf_cst = headcst sigma unfwp.program_term in let etrace = match kind with | RecWfPlain etrace -> Some etrace | RecWfWithFunext | RecStruct _ -> None in (f_cst, funf_cst) :: subst, etrace in let ntrace = compute_unfold_trace env sigma where_map wp.program_splitting unfwp.program_splitting in let accu = match etrace with None -> accu | Some etrace -> aux nsubst accu etrace in let accu = aux nsubst accu ntrace in let evd = ref sigma in let ty = let ctx = unfwp.program_info.program_sign in let len = List.length ctx - List.length lctx in let newctx, oldctx = List.chop len ctx in let lhs = mkApp (lift len assoc, extended_rel_vect 0 newctx) in let rhs = mkApp (unfwp.program_term, extended_rel_vect 0 ctx) in let eq = mkEq env evd unfwp.program_info.program_arity lhs rhs in it_mkProd_or_LetIn eq ctx in let uctx = Evd.evar_universe_context !evd in (id, (subst, wp, unfwp), uctx, ty) :: accu in List.fold_left fold accu data in List.rev (aux [] [] trace) let extract_subprograms env sigma where_map p unfp = let trace = compute_unfold_trace env sigma where_map p.program_splitting unfp.program_splitting in extract_subprogram_trace env sigma where_map trace let prove_unfolding info where_map f_cst funf_cst subst base unfold_base trace = let depelim h = Depelim.dependent_elim_tac (None, h) (* depelim_tac h *) in let helpercsts = List.map (fun (cst, i) -> cst) info.helpers_info in let opacify, transp = simpl_of ((destConstRef (Lazy.force coq_hidebody), Conv_oracle.transparent) :: List.map (fun x -> x, Conv_oracle.Expand) (f_cst :: funf_cst :: helpercsts)) in let opacified tac = wrap tac opacify transp in let transparent tac = wrap tac transp opacify in let simpltac = opacified (simpl_equations_tac ()) in let unfolds base base' = tclTHEN (autounfold_heads [base] [base'] None) (Tactics.reduct_in_concl ~cast:false ~check:false ((Reductionops.clos_norm_flags RedFlags.betazeta), DEFAULTcast)) in let solve_rec_eq subst = solve_rec_eq simpltac subst in let solve_eq subst = observe "solve_eq" (tclORELSE (transparent reflexivity) (solve_rec_eq subst)) in let abstract tac = (* Abstract.tclABSTRACT None *) tac in let rec aux trace = match trace with | UnfSplit traces -> observe "split" (Proofview.Goal.enter (fun gl -> match kind (project gl) (pf_concl gl) with | App (eq, [| ty; x; y |]) -> let sigma = project gl in let f, pats' = decompose_app sigma y in let c, unfolds = let _, _, _, _, _, c, _ = destCase sigma f in c, tclIDTAC in let id = destVar sigma (fst (decompose_app sigma c)) in let k i = nth traces (pred i) in abstract (local_tclTHEN_i (depelim id) (fun i -> (tclTHENLIST [unfolds; simpltac; aux (k i)]))) | _ -> tclFAIL (str"Unexpected unfolding goal"))) | UnfRefined (refinfo, trace) -> let id = pi1 refinfo.refined_obj in let rec reftac () = Proofview.Goal.enter begin fun gl -> match kind (project gl) (pf_concl gl) with | App (f, [| ty; term1; term2 |]) -> let sigma = project gl in let cst, _ = destConst sigma (fst (decompose_app sigma refinfo.refined_term)) in let f1, arg1 = destApp sigma term1 and f2, arg2 = destApp sigma term2 in let _, posa1, a1 = find_helper_arg (pf_env gl) sigma info f1 arg1 and ev2, posa2, a2 = find_helper_arg (pf_env gl) sigma info f2 arg2 in let id = pf_get_new_id id gl in if Environ.QConstant.equal (pf_env gl) ev2 cst then tclTHENLIST [myreplace_by a1 a2 (tclTHENLIST [solve_eq subst]); observe "refine after replace" (letin_tac None (Name id) a2 None Locusops.allHypsAndConcl); clear_body [id]; observe "unfoldings" (unfolds base unfold_base); aux trace] else tclTHENLIST [unfolds base unfold_base; simpltac; reftac ()] | _ -> tclFAIL (str"Unexpected unfolding lemma goal") end in let reftac = observe "refined" (reftac ()) in abstract (tclTHENLIST [intros; simpltac; reftac]) | UnfComputeProgram (data, lctx) -> let wheretac acc (wp, unfwp, assoc, id) = match Nametab.locate_constant (Libnames.qualid_of_ident id) with | cst -> Proofview.Goal.enter (fun gl -> let env = pf_env gl in let evd = ref (project gl) in (* let () = Feedback.msg_debug (str"Unfold where assoc: " ++ * Printer.pr_econstr_env env !evd assoc) in * let () = Feedback.msg_debug (str"Unfold where problem: " ++ * pr_context_map env !evd wp.program_prob) in * let () = Feedback.msg_debug (str"Unfold where problem: " ++ * pr_context_map env !evd unfwp.program_prob) in *) (* let _ = Typing.type_of env !evd ty in *) let cst = Equations_common.evd_comb1 (Evd.fresh_constant_instance env) evd cst in let cst = EConstr.of_constr (Constr.mkConstU cst) in let ty = Retyping.get_type_of env !evd cst in let tac = assert_by (Name id) ty (tclTHEN (keep []) (Tactics.exact_check cst)) in tclTHENLIST [Proofview.Unsafe.tclEVARS !evd; tac; Equality.rewriteLR (mkVar id); acc]) | exception Not_found -> tclFAIL (Pp.str "Missing subproof " ++ Id.print id) in let wheretacs = List.fold_left wheretac tclIDTAC data in observe "compute" (tclTHENLIST [intros; wheretacs; observe "compute rhs" (tclTRY (unfolds base unfold_base)); simpltac; solve_eq subst]) | UnfComputeEmpty id -> abstract (depelim id) in aux trace let prove_unfolding_lemma_aux info where_map my_simpl subst p unfp = Proofview.Goal.enter begin fun gl -> let env = Proofview.Goal.env gl in let sigma = Proofview.Goal.sigma gl in let f_cst = headcst sigma p.program_term and funf_cst = headcst sigma unfp.program_term in let unfolds = tclTHENLIST [unfold_in_concl [Locus.OnlyOccurrences [1], Evaluable.EvalConstRef f_cst; (Locus.OnlyOccurrences [1], Evaluable.EvalConstRef funf_cst)]; my_simpl] in let set_opaque () = Global.set_strategy (Conv_oracle.EvalConstRef f_cst) Conv_oracle.Opaque; Global.set_strategy (Conv_oracle.EvalConstRef funf_cst) Conv_oracle.Opaque; in let kind = get_program_reckind env sigma where_map p in let subst, fixtac, extgl = match kind with | None -> subst, unfolds, None | Some kind -> let fixtac, extgl = match kind with | RecWfPlain etrace -> tclTHENLIST [unfolds; unfold_recursor_tac ()], Some etrace | RecWfWithFunext -> tclTHENLIST [unfolds; unfold_recursor_ext_tac ()], None | RecStruct annot -> match annot with | Some (rec_args, annot) -> tclTHENLIST [tclDO rec_args revert_last; observe "mutfix" (mutual_fix [] [annot]); tclDO rec_args intro; unfolds], None | None -> Proofview.tclUNIT (), None in ((f_cst, funf_cst) :: subst), fixtac, extgl in (* let self wp unfwp = aux_program subst wp unfwp in *) let trace = compute_unfold_trace env sigma where_map p.program_splitting unfp.program_splitting in tclTHENLIST [observe "program before unfold" intros; begin match extgl with | Some etrace -> (tclTHENFIRST (observe "program fixpoint" fixtac) (tclORELSE (tclSOLVE [Proofview.Goal.enter (fun gl -> set_opaque (); observe "extensionality proof" ((prove_unfolding info where_map f_cst funf_cst subst info.base_id info.base_id etrace)))]) (tclFAIL (Pp.str "Could not prove extensionality automatically")))) | None -> observe "program fixpoint" fixtac end; (Proofview.Goal.enter (fun gl -> set_opaque (); (observe "program" ((prove_unfolding info where_map f_cst funf_cst subst info.base_id (info.base_id ^ "_unfold") trace)))))] end let prove_unfolding_lemma info where_map f_cst funf_cst p unfp = Proofview.Goal.enter begin fun gl -> let helpercsts = List.map (fun (cst, i) -> cst) info.helpers_info in let opacify, transp = simpl_of ((destConstRef (Lazy.force coq_hidebody), Conv_oracle.transparent) :: List.map (fun x -> x, Conv_oracle.Expand) (f_cst :: funf_cst :: helpercsts)) in let opacified tac = wrap tac opacify transp in let my_simpl = opacified simpl_in_concl in Proofview.tclORELSE ( tclTHENLIST [set_eos_tac (); intros; prove_unfolding_lemma_aux info where_map my_simpl [f_cst, funf_cst] p unfp] >>= fun () -> let () = transp () in Proofview.tclUNIT ()) (fun (e, info) -> let () = transp () in Proofview.tclZERO ~info e) end let prove_unfolding_sublemma info where_map f_cst funf_cst (subst, p, unfp) = let helpercsts = List.map (fun (cst, i) -> cst) info.helpers_info in let opacify, transp = simpl_of ((destConstRef (Lazy.force coq_hidebody), Conv_oracle.transparent) :: List.map (fun x -> x, Conv_oracle.Expand) (f_cst :: funf_cst :: helpercsts)) in let opacified tac = wrap tac opacify transp in let my_simpl = opacified simpl_in_concl in prove_unfolding_lemma_aux info where_map my_simpl subst p unfp let prove_unfolding_lemma info where_map f_cst funf_cst p unfp = Proofview.Goal.enter begin fun gl -> let () = if !Equations_common.debug then let open Pp in let msg = Feedback.msg_debug in let env = pf_env gl in let evd = project gl in msg (str"Proving unfolding lemma of: "); msg (pr_splitting ~verbose:true env evd p.program_splitting); msg (fnl () ++ str"and of: " ++ fnl ()); msg (pr_splitting ~verbose:true env evd unfp.program_splitting) in prove_unfolding_lemma info where_map f_cst funf_cst p unfp end (* let rec mk_app_holes env sigma = function *) (* | [] -> (sigma, []) *) (* | decl :: rem -> *) (* let (sigma, arg) = Evarutil.new_evar env sigma (Context.Rel.Declaration.get_type decl) in *) (* let (sigma, rem) = mk_app_holes env sigma (subst_rel_context 0 [arg] rem) in *) (* (sigma, arg :: rem) *) let ind_elim_tac indid inds mutinds info ind_fun = let open Proofview in let eauto = Class_tactics.typeclasses_eauto ["funelim"; info.base_id] in let prove_methods c = Proofview.Goal.enter (fun gl -> let sigma, _ = Typing.type_of (Goal.env gl) (Goal.sigma gl) c in observe "prove_methods" ( tclTHENLIST [Proofview.Unsafe.tclEVARS sigma; observe "apply eliminator" (Tactics.apply c); Tactics.simpl_in_concl; observe "solve methods" (eauto ~depth:None)])) in let rec applyind leninds args = Proofview.Goal.enter (fun gl -> let env = Goal.env gl in let sigma = Goal.sigma gl in match leninds, kind sigma (Goal.concl gl) with | 0, _ -> let app = applistc indid (List.rev args) in let sigma, ty = Typing.type_of env sigma app in if mutinds == 1 then tclTHENLIST [Proofview.Unsafe.tclEVARS sigma; Tactics.simpl_in_concl; Tactics.intros; prove_methods (Reductionops.nf_beta env sigma app)] else let ctx, concl = decompose_prod_decls sigma ty in Proofview.Unsafe.tclEVARS sigma <*> Tactics.simpl_in_concl <*> Tactics.intros <*> Tactics.cut concl <*> tclDISPATCH [tclONCE (Tactics.intro <*> (pf_constr_of_global ind_fun >>= Tactics.pose_proof Anonymous <*> eauto ~depth:None)); tclONCE (Tactics.apply app <*> Tactics.simpl_in_concl <*> eauto ~depth:None)] | _, LetIn (_, b, _, t') -> tclTHENLIST [Tactics.convert_concl ~cast:false ~check:false (subst1 b t') DEFAULTcast; applyind (pred leninds) (b :: args)] | _, Prod (_, _, t') -> tclTHENLIST [Tactics.intro; onLastHypId (fun id -> applyind (pred leninds) (mkVar id :: args))] | _, _ -> assert false) in try observe "applyind" (applyind inds []) with e -> tclFAIL (Pp.str"exception") Coq-Equations-1.3.1-8.20/src/principles_proofs.mli000066400000000000000000000056361463127417400216460ustar00rootroot00000000000000open EConstr type where_map = (constr * Names.Id.t * Splitting.splitting) Splitting.PathMap.t type equations_info = { equations_id : Names.Id.t; equations_where_map : where_map; equations_f : EConstr.t; equations_prob : Context_map.context_map } type ind_info = { term_info : Splitting.term_info; pathmap : (Names.Id.t * Constr.t list) Splitting.PathMap.t; (* path -> inductive name + parameters (de Bruijn) *) wheremap : where_map; } val find_helper_info : Environ.env -> Evd.evar_map -> Splitting.term_info -> EConstr.t -> Names.Constant.t * (int * int) val simpl_star : unit Proofview.tactic val eauto_with_rec : ?depth:Int.t -> ?strategy:Class_tactics.search_strategy -> Hints.hint_db_name list -> unit Proofview.tactic val wf_obligations_base : Splitting.term_info -> string val simp_eqns : Hints.hint_db_name list -> unit Proofview.tactic val simp_eqns_in : Locus.clause -> Hints.hint_db_name list -> unit Proofview.tactic val autorewrites : string -> unit Proofview.tactic val autorewrite_one : string -> unit Proofview.tactic (** The multigoal fix tactic *) val mutual_fix : string list -> int list -> unit Proofview.tactic val find_helper_arg : Environ.env -> Evd.evar_map -> Splitting.term_info -> EConstr.t -> 'a array -> Names.Constant.t * int * 'a val find_splitting_var : Evd.evar_map -> Context_map.pat list -> int -> constr list -> Names.Id.t val intros_reducing : unit -> unit Proofview.tactic val cstrtac : unit Proofview.tactic val destSplit : Splitting.splitting -> Splitting.splitting option array option val destRefined : Splitting.splitting -> Splitting.splitting option val destWheres : Splitting.splitting -> (Context_map.context_map * Splitting.where_clause list) option val map_opt_split : ('a -> 'b option) -> 'a option -> 'b option val solve_ind_rec_tac : Splitting.term_info -> unit Proofview.tactic val aux_ind_fun : ind_info -> int * int -> (Names.Id.t * EConstr.types * Splitting.program) list -> Splitting.program option -> Names.Id.t list -> Splitting.program -> unit Proofview.tactic val ind_fun_tac : Syntax.rec_type -> Constr.t -> ind_info -> Names.Id.t -> (Names.Id.t * EConstr.types * Splitting.program) list -> (Splitting.program * Splitting.program option * Splitting.compiled_program_info * 'b) list -> unit Proofview.tactic type unfold_subst val extract_subprograms : Environ.env -> Evd.evar_map -> where_map -> Splitting.program -> Splitting.program -> (Names.Id.t * unfold_subst * UState.t * EConstr.t) list val prove_unfolding_lemma : Splitting.term_info -> where_map -> Names.Constant.t -> Names.Constant.t -> Splitting.program -> Splitting.program -> unit Proofview.tactic val prove_unfolding_sublemma : Splitting.term_info -> where_map -> Names.Constant.t -> Names.Constant.t -> unfold_subst -> unit Proofview.tactic val ind_elim_tac : constr -> int -> int -> Splitting.term_info -> Names.GlobRef.t -> unit Proofview.tactic Coq-Equations-1.3.1-8.20/src/sigma_types.ml000066400000000000000000001145051463127417400202550ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open Util open Names open Nameops open Constr open Context open Termops open Declarations open Inductiveops open Reductionops open Pp open Evarutil open List open Globnames open Tactics open EConstr open Equations_common let mkConstructG c u = mkConstructU (destConstructRef (Lazy.force c), u) let mkIndG c u = mkIndU (destIndRef (Lazy.force c), u) let mkAppG env evd gr args = let c = e_new_global evd gr in evd_comb1 (fun sigma c -> Typing.checked_appvect env sigma c args) evd c let applistG env evd gr args = mkAppG env evd gr (Array.of_list args) let mkSig env evd (n, c, t) = let args = [| c; mkLambda (n, c, t) |] in mkAppG env evd (Lazy.force coq_sigma) args let constrs_of_coq_sigma env evd t alias = let rec aux env proj c ty = match kind !evd c with | App (f, args) when is_global env !evd (Lazy.force coq_sigmaI) f && Array.length args = 4 -> let ty = Retyping.get_type_of env !evd args.(1) in (match kind !evd ty with | Prod (n, b, t) -> (* sigma is not sort poly (at least for now) *) let p1 = mkProj (Lazy.force coq_pr1, ERelevance.relevant, proj) in let p2 = mkProj (Lazy.force coq_pr2, ERelevance.relevant, proj) in (n, args.(2), p1, args.(0)) :: aux (push_rel (of_tuple (n, None, b)) env) p2 args.(3) t | _ -> raise (Invalid_argument "constrs_of_coq_sigma")) | _ -> [(anonR, c, proj, ty)] in aux env alias t (Retyping.get_type_of env !evd t) let decompose_coq_sigma env sigma t = let s = Lazy.force coq_sigma in match kind sigma t with | App (f, args) when is_global env sigma s f && Array.length args = 2 -> let ind, u = destInd sigma f in Some (u, args.(0), args.(1)) | _ -> None let decompose_indapp sigma f args = match kind sigma f with | Construct ((ind,_),_) | Ind (ind,_) -> let (mib,mip) = Global.lookup_inductive ind in let first = mib.Declarations.mind_nparams_rec in let pars, args = Array.chop first args in mkApp (f, pars), args | _ -> f, args (* let sigT_info = lazy (make_case_info (Global.env ()) (Globnames.destIndRef (Lazy.force sigT).typ) LetStyle) *) let telescope_intro env sigma len tele = let rec aux n ty = let ty = Reductionops.whd_all env sigma ty in match kind sigma ty with | App (sigma_ty, [| a; p |]) -> let sigma_ty, u = destInd sigma sigma_ty in let p = match kind sigma p with | Lambda (na, t, b) -> mkLambda (na, t, Reductionops.whd_all (push_rel (Context.Rel.Declaration.LocalAssum (na, t)) env) sigma b) | _ -> p in let sigmaI = mkApp (mkRef (Lazy.force coq_sigmaI, u), [| a; p; mkRel n; aux (pred n) (beta_applist sigma (p, [mkRel n])) |]) in sigmaI | _ -> mkRel n in aux len tele let telescope_of_context env sigma ctx = let sigma, teleinterp = new_global sigma (Lazy.force logic_tele_interp) in let _, u = destConst sigma teleinterp in let rec aux = function | [] -> raise (Invalid_argument "Cannot make telescope out of empty context") | [decl] -> mkApp (mkRef (Lazy.force logic_tele_tip, u), [|get_type decl|]) | d :: tl -> let ty = get_type d in mkApp (mkRef (Lazy.force logic_tele_ext, u), [| ty; mkLambda (get_annot d, ty, aux tl) |]) in let tele = aux (List.rev ctx) in let tele_interp = mkApp (teleinterp, [| tele |]) in (* Infer universe constraints *) let sigma, _ = Typing.type_of env sigma tele_interp in sigma, tele, tele_interp (* Given a context ⊢ Γ, returns - ⊢ Tel(Γ) : Type (iterated Σ-type built out of Γ) - p : Tel(Γ) ⊢ Ctx(Γ) (a context of let-bindings made of each projection in order) - Γ ⊢ Intro(Γ) : Tel(Γ) (iterated constructors) *) let telescope env evd = function | [] -> assert false | [d] -> let (n, _, t) = to_tuple d in t, [of_tuple (n, Some (mkRel 1), Vars.lift 1 t)], mkRel 1 | d :: tl -> let (n, _, t) = to_tuple d in let len = succ (List.length tl) in let ts = Retyping.get_sort_of (push_rel_context tl env) !evd t in let ts = ESorts.kind !evd ts in let ty, tys = let rec aux (ty, tyuniv, tys) ds = match ds with | [] -> (ty, tys) | d :: ds -> let (n, b, t) = to_tuple d in let pred = mkLambda (n, t, ty) in let env = push_rel_context ds env in let sigty = mkAppG env evd (Lazy.force coq_sigma) [|t; pred|] in let _, u = destInd !evd (fst (destApp !evd sigty)) in let _, ua = UVars.Instance.to_array (EInstance.kind !evd u) in let l = Sorts.sort_of_univ @@ Univ.Universe.make ua.(0) in (* Ensure that the universe of the sigma is only >= those of t and pred *) let open UnivProblem in let enforce_leq env sigma t cstr = let ts = Retyping.get_sort_of env sigma t in let ts = ESorts.kind sigma ts in UnivProblem.Set.add (ULe (ts, l)) cstr in let cstrs = enforce_leq env !evd t (UnivProblem.Set.add (ULe (tyuniv, l)) UnivProblem.Set.empty) in let () = evd := Evd.add_universe_constraints !evd cstrs in aux (sigty, l, (u, pred) :: tys) ds in aux (t, ts, []) tl in let constr, _ = List.fold_right (fun (u, pred) (intro, k) -> let pred = Vars.lift k pred in let (n, dom, codom) = destLambda !evd pred in let intro = mkApp (constr_of_global_univ !evd (Lazy.force coq_sigmaI, u), [| dom; pred; mkRel k; intro|]) in (intro, succ k)) tys (mkRel 1, 2) in let (last, _, subst) = List.fold_right2 (fun pred d (prev, k, subst) -> let (n, b, t) = to_tuple d in (* sigma is not sort poly (at least for now) *) let proj1 = mkProj (Lazy.force coq_pr1, ERelevance.relevant, prev) in let proj2 = mkProj (Lazy.force coq_pr2, ERelevance.relevant, prev) in (Vars.lift 1 proj2, succ k, of_tuple (n, Some proj1, Vars.liftn 1 k t) :: subst)) (List.rev tys) tl (mkRel 1, 1, []) in ty, (of_tuple (n, Some last, Vars.liftn 1 len t) :: subst), constr let sigmaize ?(liftty=0) env0 evd pars f = let env = push_rel_context pars env0 in let ty = Retyping.get_type_of env !evd f in let ctx, concl = whd_decompose_prod_decls env !evd ty in let ctx = EConstr.Vars.smash_rel_context ctx in let argtyp, letbinders, make = telescope env evd ctx in (* Everyting is in env, move to index :: letbinders :: env *) let lenb = List.length letbinders in let pred = mkLambda (nameR (Id.of_string "index"), argtyp, it_mkProd_or_LetIn (mkApp (Vars.lift (succ lenb) f, rel_vect 0 lenb)) letbinders) in let tyargs = [| argtyp; pred |] in let tysig = mkAppG env evd (Lazy.force coq_sigma) tyargs in let indexproj = Lazy.force coq_pr1 in let valproj = Lazy.force coq_pr2 in let indices = (List.rev_map (fun l -> Vars.substl (tl l) (hd l)) (Equations_common.proper_tails (List.map (fun d -> Option.get (pi2 (to_tuple d))) letbinders))) in let valsig = let argtyp = Vars.lift (succ lenb) argtyp in let pred = mkLambda (nameR (Id.of_string "index"), argtyp, it_mkProd_or_LetIn (mkApp (Vars.lift (2 * succ lenb) f, rel_vect 0 lenb)) (Equations_common.lift_rel_contextn 1 (succ lenb) letbinders)) in let (_, u) = destInd !evd (fst @@ destApp !evd tysig) in mkApp (mkRef (Lazy.force coq_sigmaI, u), [|argtyp; pred; Vars.lift 1 make; mkRel 1|]) in let pred = it_mkLambda_or_LetIn pred pars in let _ = e_type_of env0 evd pred in let () = evd := Evd.minimize_universes !evd in (argtyp, pred, pars, indices, indexproj, valproj, valsig, tysig) let ind_name ind = Nametab.basename_of_global (GlobRef.IndRef ind) let signature_class evd = let evd, c = get_fresh evd logic_signature_class in evd, fst (snd (Option.get (Typeclasses.class_of_constr (Global.env()) evd c))) let build_sig_of_ind env sigma (ind,u as indu) = let (mib, oib as _mind) = Inductive.lookup_mind_specif env ind in let ctx = inductive_alldecls env indu in let ctx = EConstr.Vars.smash_rel_context ctx in let lenpars = mib.mind_nparams_rec in let lenargs = List.length ctx - lenpars in if lenargs = 0 then user_err_loc (None, str"No signature to derive for non-dependent inductive types"); let args, pars = List.chop lenargs ctx in let parapp = mkApp (mkIndU indu, extended_rel_vect 0 pars) in let fullapp = mkApp (mkIndU indu, extended_rel_vect 0 ctx) in let evd = ref sigma in let idx, pred, pars, _, _, _, valsig, _ = sigmaize env evd pars parapp in let sigma = !evd in sigma, pred, pars, fullapp, valsig, ctx, lenargs, idx let nf_econstr sigma c = Evarutil.nf_evar sigma c let declare_sig_of_ind env sigma ~poly (ind,u) = let sigma, pred, pars, fullapp, valsig, ctx, lenargs, idx = build_sig_of_ind env sigma (ind, u) in let indid = ind_name ind in let simpl = Tacred.simpl env sigma in let sigma = Evd.minimize_universes sigma in let fullapp = nf_econstr sigma fullapp in let idx = nf_econstr sigma idx in let _, (sigma, indsig) = let indsigid = add_suffix indid "_sig" in declare_constant indsigid pred None ~poly sigma ~kind:Decls.(IsDefinition Definition) in let pack_id = add_suffix indid "_sig_pack" in let _, (sigma, pack_fn) = let vbinder = of_tuple (nameR (add_suffix indid "_var"), None, fullapp) in let term = it_mkLambda_or_LetIn valsig (vbinder :: ctx) in (* let rettype = mkApp (mkConst indsig, extended_rel_vect (succ lenargs) pars) in *) declare_constant pack_id (simpl term) None (* (Some (it_mkProd_or_LetIn rettype (vbinder :: ctx))) *) ~poly sigma ~kind:Decls.(IsDefinition Definition) in let sigma = if not poly then Evd.from_env (Global.env ()) else sigma in let sigma, c = signature_class sigma in let signature_id = add_suffix indid "_Signature" in let inst = declare_instance signature_id ~poly sigma ctx c [fullapp; Vars.lift lenargs idx; mkApp (indsig, extended_rel_vect lenargs pars); mkApp (pack_fn, extended_rel_vect 0 ctx)] in Extraction_plugin.Table.extraction_inline true [Libnames.qualid_of_ident pack_id]; Extraction_plugin.Table.extraction_inline true [Libnames.qualid_of_ident signature_id]; inst let () = let fn ~pm env sigma ~poly c = let _ = declare_sig_of_ind env sigma ~poly c in pm in Ederive.(register_derive { derive_name = "Signature"; derive_fn = make_derive_ind fn }) let get_signature env sigma0 ty = try let sigma, (idx, _) = new_type_evar env sigma0 Evd.univ_flexible ~src:(dummy_loc, Evar_kinds.InternalHole) in let sigma, (signaturety, _) = new_type_evar env sigma Evd.univ_flexible ~src:(dummy_loc, Evar_kinds.InternalHole) in let sigma, signature = new_evar env sigma (mkProd (anonR, idx, Vars.lift 1 signaturety)) in let sigma, cl = get_fresh sigma logic_signature_class in let inst = mkApp (cl, [| ty; idx; signature |]) in let sigma, tc = Typeclasses.resolve_one_typeclass env sigma inst in (* let _, u = destConst sigma (fst (destApp sigma inst)) in *) (* let ssig = mkApp (mkConstG logic_signature_sig u, [| ty; idx; tc |]) in *) let ssig = signature in (* let spack = mkApp (mkConstG logic_signature_pack u, [| ty; idx; tc |]) in *) let spack = Reductionops.whd_all env sigma tc in (sigma, nf_evar sigma ssig, nf_evar sigma spack) with Not_found -> let pind, args = Inductive.find_rectype env (to_constr sigma0 ty) in let sigma, pred, pars, _, valsig, ctx, _, _ = build_sig_of_ind env sigma0 (to_peuniverses pind) in Feedback.msg_warning (str "Automatically inlined signature for type " ++ Printer.pr_pinductive env sigma pind ++ str ". Use [Derive Signature for " ++ Printer.pr_pinductive env sigma pind ++ str ".] to avoid this."); let indsig = pred in let vbinder = of_tuple (anonR, None, ty) in let pack_fn = it_mkLambda_or_LetIn valsig (vbinder :: ctx) in let args = List.map of_constr args in let pack_fn = beta_applist sigma (pack_fn, args) in (sigma, nf_evar sigma (mkApp (indsig, Array.of_list args)), nf_evar sigma pack_fn) (* let generalize_sigma env sigma c packid = *) (* let ty = Retyping.get_type_of env sigma c in *) (* let value, typ = mk_pack env sigma ty in *) (* let valsig = value c in *) (* let setvar = letin_tac None (Name packid) valsig (Some typ) nowhere in *) (* let geneq = generalize [mkCast (mkRefl typ (mkVar packid), *) (* DEFAULTcast, mkEq typ (mkVar packid) valsig)] in *) (* let clear = clear_body [packid] in *) (* let movetop = move_hyp true packid (Tacexpr.MoveToEnd false) in *) (* tclTHENLIST [setvar; geneq; clear; movetop] *) let pattern_sigma ~assoc_right c hyp env sigma = let open Tacticals in let evd = ref sigma in let terms = constrs_of_coq_sigma env evd c (mkVar hyp) in let terms = if assoc_right then terms else match terms with | (x, t, p, rest) :: term :: _ -> constrs_of_coq_sigma env evd t p @ terms | _ -> terms in let pat x = Patternops.pattern_of_constr env !evd x in let terms = if assoc_right then terms else match terms with | (x, t, p, rest) :: _ :: _ -> terms @ constrs_of_coq_sigma env evd t p | _ -> terms in let projs = List.map (fun (x, t, p, rest) -> (pat t, make_change_arg p)) terms in let projabs = tclTHENLIST ((if assoc_right then rev_map else List.map) (fun (t, p) -> (change ~check:true (Some t) p Locusops.onConcl)) projs) in tclTHEN (Proofview.Unsafe.tclEVARS !evd) projabs let curry_left_hyp env sigma c t = let aux c t na u ty pred concl = let (n, idx, dom) = destLambda sigma pred in let newctx = [of_tuple (na, None, dom); of_tuple (n, None, idx)] in let tuple = mkApp (mkConstructG coq_sigmaI u, [| Vars.lift 2 ty; Vars.lift 2 pred; mkRel 2; mkRel 1 |]) in let term = it_mkLambda_or_LetIn (mkApp (Vars.lift 2 c, [| tuple |])) newctx in let typ = it_mkProd_or_LetIn (Vars.subst1 tuple (Vars.liftn 2 2 concl)) newctx in (term, typ) in let rec curry_index c t = match kind sigma t with | Prod (na, dom, concl) -> (match decompose_coq_sigma env sigma dom with | None -> (c, t) | Some (u, ty, pred) -> let term, typ = aux c t na u ty pred concl in match kind sigma typ with | Prod (na', dom', concl') -> let body' = pi3 (destLambda sigma term) in let c, t = curry_index body' concl' in mkLambda (na', dom', c), mkProd (na', dom', t) | _ -> (term, typ)) | _ -> (c, t) in let curry c t = match kind sigma t with | Prod (na, dom, concl) -> (match decompose_coq_sigma env sigma dom with | None -> None | Some (inst, ty, pred) -> let term, typ = aux c t na inst ty pred concl in let c, t = curry_index term typ in Some (c, t)) | _ -> None in curry c t let curry env sigma na c = let rec make_arg na t = match decompose_coq_sigma env sigma t with | None -> if is_global env sigma (Lazy.force logic_unit) t then let _, u = destInd sigma t in [], constr_of_global_univ sigma (Lazy.force logic_unit_intro, u) else [of_tuple (na,None,t)], mkRel 1 | Some (u, ty, pred) -> let na, _, codom = if isLambda sigma pred then destLambda sigma pred else (anonR, ty, mkApp (pred, [|mkRel 1|])) in let ctx, rest = make_arg na codom in let len = List.length ctx in let tuple = mkApp (mkConstructG coq_sigmaI u, [| Vars.lift (len + 1) ty; Vars.lift (len + 1) pred; mkRel (len + 1); rest |]) in ctx @ [of_tuple (na, None, ty)], tuple in make_arg na c let uncurry_hyps name = let open Proofview in let open Proofview.Notations in Proofview.Goal.enter (fun gl -> let hyps = Goal.hyps gl in let env = Goal.env gl in let sigma = Goal.sigma gl in let hyps, _ = List.split_when (fun d -> is_global env sigma (Lazy.force coq_end_of_section) (get_named_type d) || is_section_variable (Global.env ()) (get_id d)) hyps in let ondecl (sigma, acc, ty) d = let (dna, _, dty) = to_named_tuple d in let sigma, sigmaI = new_global sigma (Lazy.force coq_sigmaI) in let _, u = destConstruct sigma sigmaI in let types = [| dty; mkNamedLambda sigma dna dty ty |] in let app = mkApp (sigmaI, Array.append types [| mkVar dna.binder_name; acc |]) in (sigma, app, mkApp (mkIndG coq_sigma u, types)) in let sigma, unit = get_fresh sigma logic_unit_intro in let sigma, unittype = get_fresh sigma logic_unit in let sigma, term, ty = fold_named_context_reverse ondecl ~init:(sigma, unit, unittype) hyps in let sigma, _ = Typing.type_of env sigma term in Proofview.Unsafe.tclEVARS sigma <*> Tactics.letin_tac None (Name name) term (Some ty) nowhere ) let uncurry_call env sigma fn c = let hd', args' = decompose_app sigma fn in let hd, args = decompose_app sigma c in let params, args = Array.chop (Array.length args') args in let hd = mkApp (hd, params) in let ty = Retyping.get_type_of env sigma hd in let ctx, _ = Reductionops.whd_decompose_prod_decls env sigma ty in let ctx = let open Context.Rel.Declaration in let rec aux env ctx args = match ctx, args with | LocalAssum (na, t) as decl :: decls, arg :: args -> if isSort sigma t then let ty = Retyping.get_type_of env sigma arg in LocalAssum (na, ty) :: aux env decls args else decl :: aux env decls args | LocalDef _ as decl :: decls, args -> decl :: aux env decls args | [], _ :: _ -> assert false | ctx, [] -> [] in List.rev (aux env (List.rev ctx) (Array.to_list args)) in let evdref = ref sigma in if CList.is_empty ctx then user_err_loc (None, Pp.str"No arguments to uncurry"); (* let ctx = (Anonymous, None, concl) :: ctx in *) let sigty, sigctx, constr = telescope env evdref ctx in let app = Vars.substl (Array.rev_to_list args) constr in let fnapp = mkApp (hd, rel_vect 0 (List.length sigctx)) in let fnapp = it_mkLambda_or_subst env fnapp sigctx in let projsid = nameR (Id.of_string "projs") in let fnapp_ty = Retyping.get_type_of (push_rel_context [Context.Rel.Declaration.LocalAssum (projsid, sigty)] env) !evdref fnapp in (* TODO: build (packargs, fn packargs.projs) = (args, c) equality *) let sigma, sigmaI = get_fresh !evdref coq_sigmaI in let packed = mkApp (sigmaI, [| sigty; mkLambda (projsid, sigty, fnapp_ty); mkRel 1; fnapp |]) in let sigma, _ = Typing.type_of (push_rel_context [Context.Rel.Declaration.LocalAssum (projsid, sigty)] env) sigma packed in sigma, app, packed, sigty (* Produce parts of a case on a variable, while introducing cuts and * equalities when necessary. * This function requires a full rel_context, a rel to eliminate, and a goal. * It returns: * - a context [ctx']; * - a return type valid under [ctx']; * - the type of the branches of the case; * - a context_map for each branch; * - a number of cuts; * - a reverse substitution from [ctx] to [ctx']; * - a list of terms in [ctx] to apply to the case once it is built; * - a boolean about the need for simplification or not. *) let smart_case (env : Environ.env) (evd : Evd.evar_map ref) (ctx : rel_context) (rel : int) (goal : EConstr.types) : rel_context * EConstr.types * (EConstr.types * int * Context_map.context_map) array * int * Context_map.context_map * EConstr.constr list * bool = let after, rel_decl, before = Context_map.split_context (pred rel) ctx in let rel_ty = Context.Rel.Declaration.get_type rel_decl in let rel_ty = Vars.lift rel rel_ty in let rel_t = EConstr.mkRel rel in (* Fetch some information about the type of the variable being eliminated. *) let pind, args = Inductiveops.find_inductive env !evd rel_ty in let mib, oib = Global.lookup_inductive (fst pind) in let params, indices = List.chop mib.mind_nparams args in (* The variable itself will be treated for all purpose as one of its indices. *) let indices = indices @ [rel_t] in let indfam = Inductiveops.make_ind_family (pind, params) in let arity_ctx = Inductiveops.make_arity_signature env !evd true indfam in let rev_arity_ctx = List.rev arity_ctx in (* Firstly, we need to analyze each index to decide if we should introduce * an equality for it or not. *) (* For each index of the type, we _omit_ it if and only if * 1) It is a variable. * 2) It did not appear before. * 3) Its type does not depend on something that was not omitted before. *) (* ===== FORWARD PASS ===== *) let rec compute_omitted prev_indices indices prev_ctx ctx omitted candidate nb = match indices, ctx with | [], [] -> omitted, nb, prev_indices, candidate | idx :: indices, decl :: ctx -> let omit, cand = (* Variable. *) if not (isRel !evd idx) then None, None (* Linearity. *) else if List.exists (Termops.dependent !evd idx) params then None, None else if List.exists (Termops.dependent !evd idx) prev_indices then None, None (* Dependency. *) else let rel = EConstr.destRel !evd idx in let decl_ty = Context.Rel.Declaration.get_type decl in let deps = Termops.free_rels !evd decl_ty in let omit = Int.Set.fold (fun x b -> b && try Option.has_some (List.nth omitted (x-1)) with Failure _ | Invalid_argument _ -> true) deps true in (if omit then Some rel else None), Some rel in compute_omitted (idx :: prev_indices) indices (decl :: prev_ctx) ctx (omit :: omitted) (cand :: candidate) (if Option.has_some omit then nb else succ nb) | _, _ -> assert false in (* [rev_indices] also include the variable being eliminated at its head. *) let omitted, nb, rev_indices, candidate = compute_omitted [] indices [] rev_arity_ctx [] [] 0 in (* Now we do a pass backwards to check if we can omit more things. *) (* More precisely, for any variable in rev_indices, we can omit it if * nothing in the remaining context that was not omitted depends on it. *) (* TODO The algorithm is very inefficient for now. *) (* ===== BACKWARD PASS ===== *) let rec compute_omitted_bis rev_omitted omitted candidate rev_indices nb = match omitted, candidate, rev_indices with | [], [], [] -> rev_omitted, nb | Some rel :: omitted, _ :: candidate, idx :: rev_indices -> compute_omitted_bis (Some rel :: rev_omitted) omitted candidate rev_indices nb | _ :: omitted, None :: candidate, idx :: rev_indices -> compute_omitted_bis (None :: rev_omitted) omitted candidate rev_indices nb | None :: omitted, Some rel :: candidate, idx :: rev_indices -> (* We know that [idx] is [Rel rel] and a candidate for omission. *) (* TODO Very inefficient... *) let new_decl = Context.Rel.Declaration.LocalAssum (anonR, goal) in let after = new_decl :: CList.firstn (pred rel) ctx in let omit = CList.for_all_i (fun i decl -> let decl_ty = Context.Rel.Declaration.get_type decl in (* No dependency. *) not (Termops.dependent !evd (mkRel (rel - i)) decl_ty) || (* Already omitted. *) List.mem (Some i) rev_omitted) 0 after in if omit then compute_omitted_bis (Some rel :: rev_omitted) omitted candidate rev_indices (pred nb) else compute_omitted_bis (None :: rev_omitted) omitted candidate rev_indices nb | _, _, _ -> assert false in let rev_omitted, nb = compute_omitted_bis [] omitted candidate rev_indices nb in let omitted = List.rev rev_omitted in (* At this point, we have [omitted] which is a list of either [Some rel] when * the corresponding index is omitted, [None] otherwise, and [nb] is the number * of [None] in this list. *) (* Now we consider the context [arity_ctx @ ctx], which is the context of * the return type. We will build a context substitution from this context * to a new one with shape [cuts @ arity_ctx @ ctx'] where [ctx'] is some * sub-context of [ctx], [cuts] is a number of declarations for which we * need to introduce cutx, and [arity_ctx] has been left untouched. *) (* ===== STRENGTHENING ===== *) let subst = Context_map.id_subst (arity_ctx @ ctx) in let rev_subst = Context_map.id_subst (arity_ctx @ ctx) in let subst, rev_subst = List.fold_left ( fun (subst, rev_subst) -> function | None -> subst, rev_subst | Some rel -> let ctx = subst.Context_map.src_ctx in let fresh_rel = Context_map.mapping_constr !evd subst (EConstr.mkRel 1) in let target_rel = EConstr.mkRel (rel + oib.mind_nrealargs + 1) in let target_rel = Context_map.mapping_constr !evd subst target_rel in let target_rel = EConstr.destRel !evd target_rel in let lsubst, lrev_subst = Context_map.new_strengthen env !evd ctx target_rel fresh_rel in let res1 = Context_map.compose_subst env ~sigma:!evd lsubst subst in let res2 = Context_map.compose_subst env ~sigma:!evd rev_subst lrev_subst in res1, res2 ) (subst, rev_subst) omitted in let nb_cuts_omit = pred (EConstr.destRel !evd (Context_map.mapping_constr !evd subst (EConstr.mkRel 1))) in (* [ctx'] is the context under which we will build the case in a first step. *) (* This is [ctx] where everything omitted and cut is removed. *) let ctx' = List.skipn (nb_cuts_omit + oib.mind_nrealargs + 1) (rev_subst.Context_map.tgt_ctx) in let rev_subst' = List.skipn (nb_cuts_omit + oib.mind_nrealargs + 1) (rev_subst.Context_map.map_inst) in let rev_subst' = Context_map.lift_pats (-(oib.mind_nrealargs+1)) rev_subst' in let rev_subst_without_cuts = Context_map.mk_ctx_map env !evd ctx rev_subst' ctx' in (* Now we will work under a context with [ctx'] as a prefix, so we will be * able to go back to [ctx] easily. *) (* ===== SUBSTITUTION ===== *) let subst = CList.fold_right_i ( fun i omit subst -> match omit with | None -> subst | Some rel -> let ctx = subst.Context_map.src_ctx in let pats = subst.Context_map.map_inst in let orig = oib.mind_nrealargs + 1 - i in let fresh_rel = Context_map.specialize !evd pats (Context_map.PRel orig) in let target_rel = EConstr.mkRel (rel + oib.mind_nrealargs + 1) in let target_rel = Context_map.mapping_constr !evd subst target_rel in let target_rel = EConstr.destRel !evd target_rel in (* We know that this call will fall in the simple case * of [single_subst], because we already strengthened everything. *) (* TODO Related to [compute_omitted_bis], we cannot actually substitute * the terms that were omitted simply due to the fact that nothing * depends on them, as it would be an ill-typed substitution. *) let lsubst = Context_map.single_subst ~unsafe:true env !evd target_rel fresh_rel ctx in Context_map.compose_subst ~unsafe:true env ~sigma:!evd lsubst subst ) 0 omitted subst in let nb_cuts = pred (EConstr.destRel !evd (Context_map.mapping_constr !evd subst (EConstr.mkRel 1))) in (* Also useful: a substitution from [ctx] to the context with cuts. *) let subst_to_cuts = let lift_subst = Context_map.mk_ctx_map env !evd (arity_ctx @ ctx) (Context_map.lift_pats (oib.mind_nrealargs + 1) (Context_map.id_pats ctx)) ctx in Context_map.compose_subst ~unsafe:true env ~sigma:!evd subst lift_subst in (* Finally, we can work on producing a return type. *) let goal = Context_map.mapping_constr !evd subst_to_cuts goal in (* ===== CUTS ===== *) let cuts_ctx, remaining = List.chop nb_cuts (subst.Context_map.src_ctx) in let fresh_ctx = List.firstn (oib.mind_nrealargs + 1) remaining in let revert_cut x = let rec revert_cut i = function | [] -> failwith "Could not revert a cut, please report." | Context_map.PRel y :: _ when Int.equal x y -> (match nth cuts_ctx (pred x) with | Context.Rel.Declaration.LocalAssum _ -> Some (EConstr.mkRel i) | Context.Rel.Declaration.LocalDef _ -> None) | _ :: l -> revert_cut (succ i) l in revert_cut (- oib.mind_nrealargs) (subst.Context_map.map_inst) in let rev_cut_vars = CList.map revert_cut (CList.init nb_cuts (fun i -> succ i)) in let cut_vars = List.rev rev_cut_vars in let cut_vars = CList.map_filter (fun x -> x) cut_vars in (* ===== EQUALITY OF TELESCOPES ===== *) let goal, to_apply, simpl = if Int.equal nb 0 then goal, [], false else let arity_ctx' = Context_map.specialize_rel_context !evd (subst_to_cuts.Context_map.map_inst) arity_ctx in let rev_indices' = List.map (Context_map.mapping_constr !evd subst_to_cuts) rev_indices in let _, rev_sigctx, tele_lhs, tele_rhs = CList.fold_left3 ( fun (k, rev_sigctx, tele_lhs, tele_rhs) decl idx -> function | Some _ -> (* Don't add a binder, but substitute *) let fresh = EConstr.mkRel (nb_cuts + oib.mind_nrealargs + 1) in let rev_sigctx = Equations_common.subst_telescope fresh rev_sigctx in succ k, rev_sigctx, tele_lhs, tele_rhs | None -> (* Add a binder to the telescope. *) let rhs = EConstr.mkRel k in succ k, decl :: rev_sigctx, idx :: tele_lhs, rhs :: tele_rhs ) (succ nb_cuts, [], [], []) arity_ctx' rev_indices' omitted in let sigctx = List.rev rev_sigctx in let sigty, _, sigconstr = telescope (push_rel_context (subst.Context_map.src_ctx) env) evd sigctx in (* Build a goal with an equality of telescopes at the front. *) let left_sig = Vars.substl (List.rev tele_lhs) sigconstr in let right_sig = Vars.substl (List.rev tele_rhs) sigconstr in (* TODO Swap left_sig and right_sig... *) let eq = Equations_common.mkEq env evd sigty right_sig left_sig in let _, equ = EConstr.destInd !evd (fst (destApp !evd eq)) in let goal = Vars.lift 1 goal in let goal = EConstr.mkProd (anonR, eq, goal) in (* Build a reflexivity proof to apply to the case. *) let tr_out t = let t = it_mkLambda_or_LetIn t cuts_ctx in let t = it_mkLambda_or_LetIn t fresh_ctx in let t = Context_map.mapping_constr !evd rev_subst_without_cuts t in Reductionops.beta_applist !evd (t, indices @ cut_vars) in goal, [Equations_common.mkRefl ~inst:equ env evd (tr_out sigty) (tr_out left_sig)], true in (* ===== RESOURCES FOR EACH BRANCH ===== *) let params = List.map (Context_map.mapping_constr !evd subst_to_cuts) params in (* If something is wrong here, it means that one of the parameters was * omitted or cut, which should be wrong... *) let params = List.map (Vars.lift (-(nb_cuts + oib.mind_nrealargs + 1))) params in let goal = EConstr.it_mkProd_or_LetIn goal cuts_ctx in let goal = it_mkLambda_or_LetIn goal fresh_ctx in let params = List.map (to_constr ~abort_on_undefined_evars:false !evd) params in let goal' = to_constr ~abort_on_undefined_evars:false !evd goal in let branches_ty = Inductive.build_branches_type (from_peuniverses !evd pind) (mib, oib) params goal' in (* Refresh the inductive family. *) let indfam = Inductiveops.make_ind_family (pind, List.map EConstr.of_constr params) in let branches_info = Inductiveops.get_constructors env indfam in let full_subst = let pats = Context_map.id_pats ctx in let pats = Context_map.lift_pats (oib.mind_nrealargs + 1) pats in let ctx' = arity_ctx @ ctx in Context_map.mk_ctx_map env !evd ctx' pats ctx in let full_subst = Context_map.compose_subst ~unsafe:true env ~sigma:!evd subst full_subst in let pats_ctx' = Context_map.id_pats ctx' in let pats_cuts = Context_map.id_pats cuts_ctx in let branches_subst = Array.map (fun summary -> (* This summary is under context [ctx']. *) let indices = summary.Inductiveops.cs_concl_realargs in let params = Array.of_list summary.Inductiveops.cs_params in let term = EConstr.mkConstructU (summary.Inductiveops.cs_cstr) in let term = EConstr.mkApp (term, params) in let term = Vars.lift (summary.Inductiveops.cs_nargs) term in let term = EConstr.mkApp (term, Context.Rel.instance EConstr.mkRel 0 summary.Inductiveops.cs_args) in (* Indices are typed under [args @ ctx'] *) let indices = (Array.to_list indices) @ [term] in let args = summary.Inductiveops.cs_args in (* Substitute the indices in [cuts_ctx]. *) let rev_indices = List.rev indices in let pats_indices = List.map (Context_map.pat_of_constr env !evd) rev_indices in let pats_ctx' = Context_map.lift_pats summary.Inductiveops.cs_nargs pats_ctx' in let pats = pats_indices @ pats_ctx' in let cuts_ctx = Context_map.specialize_rel_context !evd pats cuts_ctx in let pats = Context_map.lift_pats nb_cuts pats in let pats = pats_cuts @ pats in let csubst = Context_map.mk_ctx_map env !evd (cuts_ctx @ args @ ctx') pats (subst.Context_map.src_ctx) in Context_map.compose_subst ~unsafe:true env ~sigma:!evd csubst full_subst ) branches_info in let branches_nb = Array.map (fun summary -> summary.Inductiveops.cs_nargs) branches_info in let branches_res = Array.map3 (fun x y z -> (of_constr x, y, z)) branches_ty branches_nb branches_subst in (* ===== RESULT ===== *) let to_apply = cut_vars @ to_apply in (* We have everything we need: * - a context [ctx']; * - a return type [goal] valid under [ctx']; * - the type of the branches of the case; * - the number of arguments of each constructor; * - a context_map for each branch; * - a number of cuts [nb_cuts]; * - a reverse substitution [rev_subst_without_cuts] from [ctx] to [ctx']; * - some terms in [ctx] to apply to the case once it is built. *) (ctx', goal, branches_res, nb_cuts, rev_subst_without_cuts, to_apply, simpl) let curry_hyp env sigma hyp t = let curry t = match kind sigma t with | Prod (na, dom, concl) -> let ctx, arg = curry env sigma na dom in let term = mkApp (mkVar hyp, [| arg |]) in let ty = Reductionops.nf_betaiota env sigma (Vars.subst1 arg concl) in Some (it_mkLambda_or_LetIn term ctx, it_mkProd_or_LetIn ty ctx) | _ -> None in curry t open RedFlags let red_curry () = let redpr pr = fPROJ (Projection.repr (Lazy.force pr)) in let fold accu r = red_add accu r in let reds = mkflags [fDELTA; fBETA; fMATCH] in let reds = List.fold_left fold reds [redpr coq_pr1; redpr coq_pr2] in Reductionops.clos_norm_flags reds let curry_concl env sigma na dom codom = let ctx, arg = curry env sigma na dom in let newconcl = let body = it_mkLambda_or_LetIn (Vars.subst1 arg codom) ctx in let inst = extended_rel_vect 0 ctx in red_curry () env sigma (it_mkProd_or_LetIn (mkApp (body, inst)) ctx) in let proj last decl (terms, acc) = if last then (acc :: terms, acc) else (* sigma is not sort poly (at least for now) *) let term = mkProj (Lazy.force coq_pr1, ERelevance.relevant, acc) in let acc = mkProj (Lazy.force coq_pr2, ERelevance.relevant, acc) in (term :: terms, acc) in let terms, acc = match ctx with | hd :: (_ :: _ as tl) -> proj true hd (List.fold_right (proj false) tl ([], mkRel 1)) | hd :: tl -> ([mkRel 1], mkRel 1) | [] -> ([mkRel 1], mkRel 1) in let sigma, ev = new_evar env sigma newconcl in let term = mkLambda (na, dom, mkApp (ev, CArray.rev_of_list terms)) in sigma, term module Tactics =struct open Proofview.Notations open Proofview.Goal open Tacmach open Tacticals let curry_hyp id = Proofview.Goal.enter begin fun gl -> let sigma = Proofview.Goal.sigma gl in let decl = pf_get_hyp id gl in let (na, body, ty) = to_named_tuple decl in match curry_hyp (pf_env gl) (project gl) id ty with | Some (prf, typ) -> (match body with | Some b -> let newprf = Vars.replace_vars sigma [(id,b)] prf in tclTHEN (clear [id]) (Tactics.letin_tac None (Name id) newprf (Some typ) nowhere) | None -> assert_replacing id prf typ) | None -> tclFAIL (str"No currying to do in " ++ Id.print id) end let curry = Proofview.Goal.enter begin fun gl -> let env = env gl in let sigma = sigma gl in let concl = concl gl in match kind sigma concl with | Prod (na, dom, codom) -> Refine.refine ~typecheck:true (fun sigma -> curry_concl env sigma na dom codom) | _ -> Tacticals.tclFAIL (str"Goal cannot be curried") end let uncurry_call c c' id id' = enter begin fun gl -> let env = env gl in let sigma = sigma gl in let sigma, term, fterm, ty = uncurry_call env sigma c c' in let sigma, _ = Typing.type_of env sigma term in Proofview.Unsafe.tclEVARS sigma <*> Tactics.letin_tac None (Name id) term (Some ty) nowhere <*> Tactics.letin_tac None (Name id') (Vars.subst1 (mkVar id) fterm) None nowhere end let get_signature_pack id id' = enter begin fun gl -> let env = Proofview.Goal.env gl in let sigma = Proofview.Goal.sigma gl in try let sigma', sigsig, sigpack = get_signature env sigma (Tacmach.pf_get_hyp_typ id gl) in Proofview.Unsafe.tclEVARS sigma' <*> letin_tac None (Name id') (beta_applist sigma (sigpack, [mkVar id])) None nowhere with Not_found -> Tacticals.tclFAIL (str"No Signature instance found") end let pattern_sigma id = enter begin fun gl -> let env = Proofview.Goal.env gl in let sigma = Proofview.Goal.sigma gl in let decl = Tacmach.pf_get_hyp id gl in let term = Option.get (get_named_value decl) in pattern_sigma ~assoc_right:true term id env sigma end end Coq-Equations-1.3.1-8.20/src/sigma_types.mli000066400000000000000000000067661463127417400204370ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open EConstr val mkAppG : Environ.env -> Evd.evar_map ref -> Names.GlobRef.t -> constr array -> constr val applistG : Environ.env -> Evd.evar_map ref -> Names.GlobRef.t -> constr list -> constr val mkSig : Environ.env -> Evd.evar_map ref -> Names.Name.t binder_annot * types * constr -> constr val constrs_of_coq_sigma : Environ.env -> Evd.evar_map ref -> constr -> constr -> (Names.Name.t binder_annot * constr * constr * constr) list val decompose_coq_sigma : Environ.env -> Evd.evar_map -> constr -> (EInstance.t * constr * constr) option val decompose_indapp : Evd.evar_map -> constr -> constr array -> constr * constr array val telescope_intro : Environ.env -> Evd.evar_map -> int -> constr (* interpreted telescope *) -> constr (* introduction *) val telescope_of_context : Environ.env -> Evd.evar_map -> rel_context -> Evd.evar_map * constr (* telescope *) * constr (* interp tele *) val telescope : Environ.env -> Evd.evar_map ref -> rel_context -> constr * rel_context * constr val sigmaize : ?liftty:int -> Environ.env -> Evd.evar_map ref -> rel_context -> constr -> constr * constr * rel_context * constr list * Names.Projection.t * Names.Projection.t * constr * constr val ind_name : Names.inductive -> Names.Id.t val declare_sig_of_ind : Environ.env -> Evd.evar_map -> poly:bool -> Names.inductive * EConstr.EInstance.t -> Names.Constant.t * (Evd.evar_map * EConstr.t) val get_signature : Environ.env -> Evd.evar_map -> constr -> Evd.evar_map * constr * constr val pattern_sigma : assoc_right:bool -> constr -> Names.Id.t -> Environ.env -> Evd.evar_map -> unit Proofview.tactic (* Unused for now *) val curry_left_hyp : Environ.env -> Evd.evar_map -> constr -> types -> (constr * types) option val build_sig_of_ind : Environ.env -> Evd.evar_map -> Names.inductive Equations_common.peuniverses -> Evd.evar_map * constr * rel_context * constr * constr * rel_context * int * constr (** Pack all hypotheses into a new one using sigmas *) val uncurry_hyps : Names.Id.t -> unit Proofview.tactic (** Curry a term starting with a quantification on a sigma type, associated to the right. *) val curry : Environ.env -> Evd.evar_map -> Names.Name.t binder_annot -> constr -> rel_context * constr val uncurry_call : Environ.env -> Evd.evar_map -> constr -> constr -> Evd.evar_map * constr * constr * types val smart_case : Environ.env -> Evd.evar_map ref -> rel_context -> int -> types -> rel_context * types * (types * int * Context_map.context_map) array * int * Context_map.context_map * constr list * bool module Tactics : sig val curry_hyp : Names.Id.t -> unit Proofview.tactic val curry : unit Proofview.tactic val uncurry_call : constr -> constr -> Names.Id.t -> Names.Id.t -> unit Proofview.tactic val pattern_sigma : Names.Id.t -> unit Proofview.tactic val get_signature_pack : Names.Id.t -> Names.Id.t -> unit Proofview.tactic end Coq-Equations-1.3.1-8.20/src/simplify.ml000066400000000000000000001624751463127417400175760ustar00rootroot00000000000000open Pp open EConstr open Equations_common (* ========== Coq references ========= *) (* This section should change a lot when we approach an actual solution. *) module type SIGMAREFS = sig val sigma : Names.inductive Lazy.t val sigmaI : Names.constructor Lazy.t end module type EQREFS = sig (* Equality type. *) val eq : Names.inductive Lazy.t val eq_refl : Names.constructor Lazy.t (* Decidable equality typeclass. *) val uip : Names.Constant.t Lazy.t (* Logic types. *) val zero : Names.inductive Lazy.t val one : Names.inductive Lazy.t val one_val : Names.constructor Lazy.t val one_ind_dep : Names.Constant.t Lazy.t val zero_ind : Names.Constant.t Lazy.t val zero_ind_dep : Names.Constant.t Lazy.t (* NoConfusion. *) val noConfusion : Names.inductive Lazy.t val apply_noConfusion : Names.Constant.t Lazy.t (* NoCycle *) val noCycle : Names.inductive Lazy.t val apply_noCycle_left : Names.Constant.t Lazy.t val apply_noCycle_right : Names.Constant.t Lazy.t val simplify_ind_pack : Names.Constant.t Lazy.t val simplify_ind_pack_inv : Names.Constant.t Lazy.t val opaque_ind_pack_eq_inv : Names.Constant.t Lazy.t (* Simplification of dependent pairs. *) val simpl_sigma : Names.Constant.t Lazy.t val simpl_sigma_dep : Names.Constant.t Lazy.t val simpl_sigma_nondep_dep : Names.Constant.t Lazy.t val simpl_sigma_dep_dep : Names.Constant.t Lazy.t val pack_sigma_eq : Names.Constant.t Lazy.t (* Deletion using UIP. *) val simpl_uip : Names.Constant.t Lazy.t (* Solution lemmas. *) val solution_left : Names.Constant.t Lazy.t val solution_left_dep : Names.Constant.t Lazy.t val solution_right : Names.Constant.t Lazy.t val solution_right_dep : Names.Constant.t Lazy.t end module RefsHelper = struct let init_gr s = Lazy.force s let init_inductive s = lazy (Globnames.destIndRef (init_gr s)) let init_constructor s = lazy (Globnames.destConstructRef (init_gr s)) let init_constant s = lazy (Globnames.destConstRef (init_gr s)) end (* This should be parametrizable by the user. *) module EqRefs : EQREFS = struct include RefsHelper open Equations_common let eq = init_inductive logic_eq_type let eq_refl = init_constructor logic_eq_refl let uip = init_constant logic_uip_class let zero = init_inductive logic_bot let one = init_inductive logic_top let one_val = init_constructor logic_top_intro let one_ind_dep = init_constant logic_top_elim let zero_ind = init_constant logic_bot_case let zero_ind_dep = init_constant logic_bot_elim let noConfusion = init_inductive coq_noconfusion_class let noCycle = init_inductive coq_nocycle_class let init_depelim s = init_constant (find_global ("depelim." ^ s)) let apply_noConfusion = init_depelim "apply_noConfusion" let apply_noCycle_left = init_depelim "apply_noCycle_left" let apply_noCycle_right = init_depelim "apply_noCycle_right" let simplify_ind_pack = init_depelim "simplify_ind_pack" let simplify_ind_pack_inv = init_depelim "simplify_ind_pack_inv" let opaque_ind_pack_eq_inv = init_depelim "opaque_ind_pack_eq_inv" let simpl_sigma = init_depelim "simpl_sigma" let simpl_sigma_dep = init_depelim "simpl_sigma_dep" let simpl_sigma_nondep_dep = init_depelim "simpl_sigma_nondep_dep" let simpl_sigma_dep_dep = init_depelim "simpl_sigma_dep_dep" let pack_sigma_eq = init_depelim "pack_sigma_eq" let simpl_uip = init_depelim "simpl_uip" let solution_left = init_depelim "solution_left" let solution_left_dep = init_depelim "solution_left_dep" let solution_right = init_depelim "solution_right" let solution_right_dep = init_depelim "solution_right_dep" end (* This should not. *) module SigmaRefs : SIGMAREFS = struct include RefsHelper let sigma = init_inductive Equations_common.coq_sigma let sigmaI = init_constructor Equations_common.coq_sigmaI end (* From the references, we can build terms. *) type constr_gen = Evd.evar_map ref -> EConstr.constr type constr_univ_gen = EConstr.EInstance.t -> EConstr.constr module type BUILDER = sig val sigma : constr_gen val sigmaI : constr_gen val eq : constr_gen val equ : constr_univ_gen val eq_refl : constr_univ_gen val uip : constr_gen val zero : constr_gen val one : constr_gen val one_val : constr_gen val one_ind_dep : constr_univ_gen val zero_ind : constr_univ_gen val zero_ind_dep : constr_univ_gen val noConfusion : constr_gen val apply_noConfusion : constr_univ_gen val noCycle : constr_gen val apply_noCycle_left : constr_gen val apply_noCycle_right : constr_gen val simplify_ind_pack : constr_gen val simplify_ind_pack_inv : constr_gen val simpl_sigma : constr_gen val simpl_sigma_dep : constr_gen val simpl_sigma_nondep_dep : constr_gen val simpl_sigma_dep_dep : constr_gen val simpl_uip : constr_gen val solution_left : constr_univ_gen val solution_left_dep : constr_univ_gen val solution_right : constr_univ_gen val solution_right_dep : constr_univ_gen end module BuilderHelper = struct let gen_from_inductive ind = fun evd -> let glob = Names.GlobRef.IndRef (Lazy.force ind) in Equations_common.e_new_global evd glob let gen_from_inductive_univ ind u = let glob = Names.GlobRef.IndRef (Lazy.force ind) in EConstr.mkRef (glob, u) let gen_from_constant cst = fun evd -> let glob = Names.GlobRef.ConstRef (Lazy.force cst) in Equations_common.e_new_global evd glob let gen_from_constant_univ cst u = let glob = Names.GlobRef.ConstRef (Lazy.force cst) in EConstr.mkRef (glob, u) let gen_from_constructor constr = fun evd -> let glob = Names.GlobRef.ConstructRef (Lazy.force constr) in Equations_common.e_new_global evd glob let gen_from_constructor_univ constr = fun u -> let glob = Names.GlobRef.ConstructRef (Lazy.force constr) in EConstr.mkRef (glob, u) end module BuilderGen (SigmaRefs : SIGMAREFS) (EqRefs : EQREFS) : BUILDER = struct include BuilderHelper let sigma = gen_from_inductive SigmaRefs.sigma let sigmaI = gen_from_constructor SigmaRefs.sigmaI let eq = gen_from_inductive EqRefs.eq let equ u = gen_from_inductive_univ EqRefs.eq u let eq_refl = gen_from_constructor_univ EqRefs.eq_refl let uip = gen_from_constant EqRefs.uip let zero = gen_from_inductive EqRefs.zero let one = gen_from_inductive EqRefs.one let one_val = gen_from_constructor EqRefs.one_val let one_ind_dep = gen_from_constant_univ EqRefs.one_ind_dep let zero_ind = gen_from_constant_univ EqRefs.zero_ind let zero_ind_dep = gen_from_constant_univ EqRefs.zero_ind_dep let noConfusion = gen_from_inductive EqRefs.noConfusion let apply_noConfusion = gen_from_constant_univ EqRefs.apply_noConfusion let noCycle = gen_from_inductive EqRefs.noCycle let apply_noCycle_left = gen_from_constant EqRefs.apply_noCycle_left let apply_noCycle_right = gen_from_constant EqRefs.apply_noCycle_right let simplify_ind_pack = gen_from_constant EqRefs.simplify_ind_pack let simplify_ind_pack_inv = gen_from_constant EqRefs.simplify_ind_pack_inv let simpl_sigma = gen_from_constant EqRefs.simpl_sigma let simpl_sigma_dep = gen_from_constant EqRefs.simpl_sigma_dep let simpl_sigma_nondep_dep = gen_from_constant EqRefs.simpl_sigma_nondep_dep let simpl_sigma_dep_dep = gen_from_constant EqRefs.simpl_sigma_dep_dep let simpl_uip = gen_from_constant EqRefs.simpl_uip let solution_left = gen_from_constant_univ EqRefs.solution_left let solution_left_dep = gen_from_constant_univ EqRefs.solution_left_dep let solution_right = gen_from_constant_univ EqRefs.solution_right let solution_right_dep = gen_from_constant_univ EqRefs.solution_right_dep end module Builder : BUILDER = BuilderGen(SigmaRefs)(EqRefs) (* ========== Simplification ========== *) (* Some types to define what is a simplification. *) type direction = Left | Right type simplification_step = Deletion of bool (* Force the use of K? *) | Solution of direction | NoConfusion of simplification_rules | NoConfusionOut (* Step for the inversion of [simplify_ind_pack]. *) | NoCycle (* TODO: NoCycle should probably take a direction too. *) | ElimTrue | ElimFalse and simplification_rule = Step of simplification_step | Infer_one | Infer_direction | Infer_many and simplification_rules = (Loc.t option * simplification_rule) list type goal = rel_context * EConstr.types * ESorts.t type open_term = (goal * EConstr.existential) option * EConstr.constr exception CannotSimplify of Pp.t let check_context ~where ?name env evd ctx = let rec check env sigma ctx = match ctx with | [] -> env, sigma | decl :: ctx -> let env, sigma = check env sigma ctx in let open Context.Rel.Declaration in let sigma = match decl with | LocalAssum (na, t) -> let sigma, _ = Typing.sort_of env sigma t in sigma | LocalDef (na, c, t) -> let sigma, _ = Typing.sort_of env sigma t in Typing.check env sigma c t in push_rel decl env, sigma in let _env, sigma = try check env evd ctx with Type_errors.TypeError (env, tyerr) -> anomaly Pp.(str where ++ spc () ++ str "Equations build an ill-typed context: " ++ Printer.pr_rel_context env evd (EConstr.Unsafe.to_rel_context ctx) ++ Himsg.explain_pretype_error env evd (Pretype_errors.TypingError (Pretype_errors.of_type_error tyerr))) | Pretype_errors.PretypeError (env, evd, tyerr) -> anomaly Pp.(str where ++ spc () ++ str "Equations build an ill-typed context: " ++ Printer.pr_rel_context env evd (EConstr.Unsafe.to_rel_context ctx) ++ Himsg.explain_pretype_error env evd tyerr) in let check = Evd.check_constraints evd (snd @@ Evd.universe_context_set sigma) in if not check then anomaly Pp.(str where ++ spc () ++ str "Equations missing constraints in " ++ str (Option.default "(anonymous)" name)) (* Full type-checking + check that constraints are present *) let check_typed ~where ?name env evd c = let sigma, _ = try Typing.type_of env evd c with Type_errors.TypeError (env, tyerr) -> anomaly Pp.(str where ++ spc () ++ str "Equations build an ill-typed term: " ++ Printer.pr_econstr_env env evd c ++ Himsg.explain_pretype_error env evd (Pretype_errors.TypingError (Pretype_errors.of_type_error tyerr))) | Pretype_errors.PretypeError (env, evd, tyerr) -> anomaly Pp.(str where ++ spc () ++ str "Equations build an ill-typed term: " ++ Printer.pr_econstr_env env evd c ++ Himsg.explain_pretype_error env evd tyerr) in let check = Evd.check_constraints evd (snd @@ Evd.universe_context_set sigma) in if not check then anomaly Pp.(str where ++ spc () ++ str "Equations missing constraints in " ++ str (Option.default "(anonymous)" name)) let check_goal ~where ?name env sigma (ctx, ty, s) = let () = check_context ~where:(Printf.sprintf "[%s context]" where) ?name env sigma ctx in let env = push_rel_context ctx env in let () = check_typed ~where:(Printf.sprintf "[%s sort]" where) ?name env sigma (mkSort s) in check_typed ~where:(Printf.sprintf "[%s type]" where) ?name env sigma ty module SimpFun : sig type t val make : ?name:string -> (Environ.env -> Evd.evar_map ref -> goal -> open_term * bool * Context_map.context_map) -> t (** Invariant for [glopt, continue, c = f env evd (ctx, ty, u)] Assumes (in current !evd) - env, ctx ⊢ ty : Type@{u} Then (at return time in !evd) - env, ctx ⊢ c : ty - if glopt = Some (ctx', ty', u') then env, ctx' ⊢ ty' : Type@{u'} *) val apply : t -> Environ.env -> Evd.evar_map ref -> goal -> open_term * bool * Context_map.context_map end = struct type t = string option * (Environ.env -> Evd.evar_map ref -> goal -> open_term * bool * Context_map.context_map) let make ?name f = (name, f) let apply (name, f) = if !Equations_common.debug then fun env evd gl -> let () = check_goal ~where:"precondition" ?name env !evd gl in let ((ngl, c), continue, map) = f env evd gl in let () = check_goal ~where:"result" ?name env !evd gl in let () = match ngl with | None -> () | Some (gl, _) -> check_goal ~where:"subgoal" ?name env !evd gl in ((ngl, c), continue, map) else f end type simplification_fun = SimpFun.t let apply_simplification_fun = SimpFun.apply (* Auxiliary functions. *) (* Build a term with an evar out of [constr -> constr] function. Contrarily to the function below, it does not perform type checking in the ambient goal. *) let build_term_core (env : Environ.env) (evd : Evd.evar_map ref) (ngl : goal) (f : EConstr.constr -> EConstr.constr) : open_term = let tev = let (ctx', ty', u') = ngl in let env = push_rel_context ctx' env in Equations_common.evd_comb1 (Evarutil.new_evar env) evd ty' in let c = f tev in let ev = EConstr.destEvar !evd tev in Some (ngl, ev), c let checked_applist env evd hd args = evd_comb0 (fun sigma -> Typing.checked_applist env sigma hd args) evd let checked_appvect env evd hd args = evd_comb0 (fun sigma -> Typing.checked_appvect env sigma hd args) evd let build_app_infer_concl (env : Environ.env) (evd : Evd.evar_map ref) ((ctx, _, u) : goal) (f : Names.GlobRef.t) ?(inst:EInstance.t option) (args : EConstr.constr option list) = let tf, ty = match inst with | Some u -> let tf = EConstr.mkRef (f, u) in let auctx = Environ.universes_of_global env f in let univs = UVars.AbstractContext.instantiate (EConstr.EInstance.kind !evd u) auctx in let sigma = Evd.add_constraints !evd univs in let ty = Retyping.get_type_of env sigma tf in evd := sigma; tf, ty | None -> match f with | Names.GlobRef.VarRef var -> assert false | Names.GlobRef.ConstRef cst -> let pcst = Equations_common.evd_comb1 (Evd.fresh_constant_instance env) evd cst in let tf = Constr.mkConstU pcst in let ty = Typeops.type_of_constant_in env pcst in of_constr tf, of_constr ty | Names.GlobRef.IndRef ind -> let pind = Equations_common.evd_comb1 (Evd.fresh_inductive_instance env) evd ind in let tf = Constr.mkIndU pind in let ty = Inductiveops.type_of_inductive env (to_peuniverses pind) in of_constr tf, ty | Names.GlobRef.ConstructRef cstr -> let pcstr = Equations_common.evd_comb1 (Evd.fresh_constructor_instance env) evd cstr in let tf = Constr.mkConstructU pcstr in let ty = Inductiveops.type_of_constructor env (Util.on_snd EInstance.make pcstr) in of_constr tf, ty in let env = push_rel_context ctx env in let prefix, suffix = CList.map_until (fun o -> o) args in let hd = checked_applist env evd tf prefix in let rec aux ty args = match kind !evd ty, args with | Constr.Prod (_, t, c), hd :: tl -> aux (Vars.subst1 hd c) tl | Constr.Prod (_, t, _), [] -> t | Constr.LetIn (_, b, _, c), args -> aux (Vars.subst1 b c) args | Constr.Cast (c, _, _), args -> aux c args | _, _ -> failwith "Unexpected mismatch." in let ty' = aux ty prefix in let ty' = Reductionops.whd_beta env !evd ty' in (* let u = Retyping.get_sort_of env !evd ty' in *) let cont = fun c -> let suffix = CList.map (Option.default c) suffix in checked_applist env evd hd suffix in cont, ty', u (** Same as above but assumes that the arguments are well-typed in [ctx]. This only checks that the application is correct. *) let build_app (env : Environ.env) (evd : Evd.evar_map ref) ((ctx, ty, u) : goal) (f : Names.GlobRef.t) ?(inst:EInstance.t option) (args : EConstr.constr option list) : open_term = let cont, ty', u' = build_app_infer_concl env evd (ctx, ty, u) f ?inst args in build_term_core env evd (ctx, ty', u') cont let transparent_state env = Conv_oracle.get_transp_state (Environ.oracle env) let unif_flags env = let flags = transparent_state env in Evarconv.default_flags_of flags let is_conv (env : Environ.env) (sigma : Evd.evar_map) (ctx : rel_context) (t1 : EConstr.t) (t2 : EConstr.t) : bool = let env = push_rel_context ctx env in match Reductionops.infer_conv ~ts:(transparent_state env) env sigma t1 t2 with | Some _ -> true | None -> false (* Build an open term by substituting the second term for the hole in the * first term. *) let compose_term (env : Environ.env) (evd : Evd.evar_map ref) ((h1, c1) : open_term) ((h2, c2) : open_term) : open_term = match h1 with | Some ((ctx1, _, u1), (ev1, _)) -> let EvarInfo ev1_info = Evd.find !evd ev1 in let ev1_ctx = Evd.evar_context ev1_info in (* Keep only the context corresponding to [ctx1]. *) let named_ctx1 = CList.firstn (List.length ctx1) ev1_ctx in (* Now keep only the names and make terms out of them. *) let subst_ctx1 = List.map (fun decl -> let id = Context.Named.Declaration.get_id decl in EConstr.mkVar id) named_ctx1 in (* Finally, substitute the rels in [c2] to get a valid term for [ev1]. *) let c2 = Vars.substl subst_ctx1 c2 in evd := Evarsolve.check_evar_instance Evarconv.(conv_fun evar_conv_x) (unif_flags env) env !evd ev1 c2; evd := Evd.define ev1 c2 !evd; h2, c1 | None -> assert false let compose_res (env : Environ.env) (evd : Evd.evar_map ref) ((t1, b1, s1) : open_term * bool * Context_map.context_map) ((t2, b2, s2) : open_term * bool * Context_map.context_map) : open_term * bool * Context_map.context_map = let t = compose_term env evd t1 t2 in let s = Context_map.compose_subst env ~sigma:!evd s2 s1 in t, b2, s let safe_fun (f : simplification_fun) : simplification_fun = SimpFun.make ~name:"safe_fun" begin fun (env : Environ.env) (evd : Evd.evar_map ref) ((ctx, ty, u) : goal) -> let (_, c), _, _ as res = SimpFun.apply f env evd (ctx, ty, u) in let env = push_rel_context ctx env in evd := Typing.check env !evd c ty; res end (* Applies [g] to the goal, then [f]. *) let compose_fun (f : simplification_fun) (g : simplification_fun) : simplification_fun = SimpFun.make ~name:"compose_fun" begin fun (env : Environ.env) (evd : Evd.evar_map ref) (gl : goal) -> let (h1, _), continue, _ as res1 = SimpFun.apply g env evd gl in if continue then match h1 with | Some (gl', _) -> let res2 = SimpFun.apply f env evd gl' in compose_res env evd res1 res2 | None -> res1 else res1 end let is_block env evd (ctx, ty, glu) = let ty = Reductionops.whd_betaiota env !evd ty in try let _na, b, _ty, b' = destLetIn !evd ty in Equations_common.is_global env !evd (Lazy.force Equations_common.coq_block) b with Constr.DestKO -> false let guard_block (f : simplification_fun) : simplification_fun = SimpFun.make ~name:"guard_block" begin fun (env : Environ.env) (evd : Evd.evar_map ref) ((ctx, _, _) as gl : goal) -> if is_block env evd gl then build_term_core env evd gl (fun c -> c), false, Context_map.id_subst ctx else SimpFun.apply f env evd gl end let identity : simplification_fun = SimpFun.make ~name:"identity" begin fun (env : Environ.env) (evd : Evd.evar_map ref) ((ctx, ty, u as gl) : goal) -> build_term_core env evd gl (fun c -> c), true, Context_map.id_subst ctx end let while_fun (f : simplification_fun) : simplification_fun = SimpFun.make ~name:"while_fun" begin fun (env : Environ.env) (evd : Evd.evar_map ref) (gl : goal) -> let rec aux env evd gl = match SimpFun.apply f env evd gl with | (Some (gl', _), _), true, _ as res1 -> begin try let evd' = ref !evd in let res2 = aux env evd' gl' in let res = compose_res env evd' res1 res2 in evd := !evd'; res with CannotSimplify _ -> res1 end | (_, _), _, _ as res1 -> res1 in try aux env evd gl with CannotSimplify _ -> SimpFun.apply identity env evd gl end (* Check if a type is a given inductive. *) let check_inductive env sigma (ind : Names.inductive) : EConstr.types -> bool = Equations_common.is_global env sigma (Names.GlobRef.IndRef ind) (* Check if a term is a given constructor. *) let check_construct env sigma (constr : Names.constructor) : EConstr.constr -> bool = Equations_common.is_global env sigma (Names.GlobRef.ConstructRef constr) (* Check if a term is a given constant. *) let check_constant env sigma (cst : Names.Constant.t) : EConstr.constr -> bool = Equations_common.is_global env sigma (Names.GlobRef.ConstRef cst) (* Deconstruct the goal if it's a product. Otherwise, raise CannotSimplify. *) let check_prod sigma (ty : EConstr.types) : Names.Name.t binder_annot * EConstr.types * EConstr.types = let name, ty1, ty2 = try destProd sigma ty with Constr.DestKO -> raise (CannotSimplify (str "The goal is not a product.")) in name, ty1, ty2 (* Check that the given type is an equality, and some * additional constraints if specified. Raise CannotSimplify if it's not * the case. Otherwise, return its arguments *) let check_equality env sigma ctx ?(equal_terms : bool = false) ?(var_left : bool = false) ?(var_right : bool = false) (ty : EConstr.types) : EConstr.EInstance.t * EConstr.types * EConstr.constr * EConstr.constr = let f, args = Equations_common.decompose_appvect sigma ty in if not (check_inductive env sigma (Lazy.force EqRefs.eq) f) then raise (CannotSimplify (str "The first hypothesis in the goal is not an equality.")); let _, u = EConstr.destInd sigma f in let tA = args.(0) in let tx, ty = args.(1), args.(2) in if equal_terms && not (is_conv env sigma ctx tx ty) then raise (CannotSimplify (str "The first hypothesis in the goal is not an equality \ between identical terms.")); if var_left && not (EConstr.isRel sigma tx) then raise (CannotSimplify (str "The left-hand side of the first hypothesis in the goal is \ not a variable.")); if var_right && not (EConstr.isRel sigma ty) then raise (CannotSimplify (str "The right-hand side of the first hypothesis in the goal is \ not a variable.")); u, tA, tx, ty let decompose_sigma env sigma (t : EConstr.constr) : (EInstance.t * EConstr.types * EConstr.constr * EConstr.constr * EConstr.constr) option = let f, args = Equations_common.decompose_appvect sigma t in if check_construct env sigma (Lazy.force SigmaRefs.sigmaI) f then let _, u = EConstr.destConstruct sigma f in Some (u, args.(0), args.(1), args.(2), args.(3)) else None (** All simplifications are wrapped in with_retry so that their preconditions can be satisfied up-to head-normalization of the goal's head type. *) let hnf_goal ~(reduce_equality:bool) = fun (env : Environ.env) (evd : Evd.evar_map ref) ((ctx, ty, u) : goal) -> let glenv = push_rel_context ctx env in let reduce c = Tacred.hnf_constr glenv !evd c in (* Head-reduce the goal *) let ty = reduce ty in (* We try to reduce further when the goal is a product. *) let ty = try let name, ty1, ty2 = destProd !evd ty in let ty1 = reduce ty1 in let ty = mkProd (name, ty1, ty2) in (* If the head is an equality, reduce it. *) if reduce_equality then try let equ, tA, t1, t2 = check_equality env !evd ctx ty1 in let t1 = reduce t1 in let t2 = reduce t2 in let env = push_rel_context ctx env in let ty1 = checked_appvect env evd (Builder.equ equ) [| tA; t1; t2 |] in EConstr.mkProd (name, ty1, ty2) with CannotSimplify _ -> ty else ty with Constr.DestKO -> ty in (ctx, ty, u) let hnf ~reduce_equality : simplification_fun = SimpFun.make ~name:"hnf" begin fun (env : Environ.env) (evd : Evd.evar_map ref) ((ctx, ty, u as gl) : goal) -> let gl' = hnf_goal ~reduce_equality env evd gl in build_term_core env evd gl' (fun c -> c), true, Context_map.id_subst ctx end let with_retry (f : simplification_fun) : simplification_fun = SimpFun.make ~name:"with_retry" begin fun (env : Environ.env) (evd : Evd.evar_map ref) ((ctx, ty, u) : goal) -> try (* Be careful with the [evar_map] management. *) let evd' = ref !evd in let res = SimpFun.apply f env evd' (ctx, ty, u) in evd := !evd'; res with CannotSimplify _ -> (*msg_info (str "Reduce!");*) SimpFun.apply f env evd (hnf_goal ~reduce_equality:true env evd (ctx, ty, u)) end (* Simplification functions to handle each step. *) (* Any of these can throw a CannotSimplify exception which explains why the * rule cannot apply. *) (* This function is not accessible by the user for now. It is used to project * (if needed) the first component of an equality between sigmas. It will not * do anything if it fails. *) let remove_one_sigma ?(only_nondep=false) () : simplification_fun = SimpFun.make ~name:"remove_one_sigma" begin fun (env : Environ.env) (evd : Evd.evar_map ref) ((ctx, ty, glu) : goal) -> let name, ty1, ty2 = check_prod !evd ty in let equ, _, t1, t2 = check_equality env !evd ctx ty1 in let sigu, f, args = match decompose_sigma env !evd t1, decompose_sigma env !evd t2 with | Some (sigu, tA, tB, tt, tp), Some (_, _, _, tu, tq) -> (* Determine the degree of dependency. *) if Vars.noccurn !evd 1 ty2 then begin (* No dependency in the goal, but maybe there is a dependency in the pair itself. *) try let name', _, tBbody = destLambda !evd tB in if Vars.noccurn !evd 1 tBbody then (* No dependency whatsoever. *) let tsimpl_sigma = Names.GlobRef.ConstRef (Lazy.force EqRefs.simpl_sigma) in let tP = Termops.pop tBbody in let tB = Termops.pop ty2 in let args = [Some tA; Some tP; Some tB; Some tt; Some tu; Some tp; Some tq; None] in sigu, tsimpl_sigma, args else raise Constr.DestKO with | Constr.DestKO -> if only_nondep then raise (CannotSimplify (str"Cannot simplify dependent pair")) else (* Dependency in the pair, but not in the goal. *) let tsimpl_sigma = Names.GlobRef.ConstRef (Lazy.force EqRefs.simpl_sigma_dep) in let tP = tB in let tB = Termops.pop ty2 in let args = [Some tA; Some tP; Some tB; Some tt; Some tu; Some tp; Some tq; None] in sigu, tsimpl_sigma, args end else begin try let name', _, tBbody = destLambda !evd tB in if Vars.noccurn !evd 1 tBbody then (* Dependency in the goal, but not in the pair. *) let tsimpl_sigma = Names.GlobRef.ConstRef (Lazy.force EqRefs.simpl_sigma_nondep_dep) in let tP = Termops.pop tBbody in let tB = EConstr.mkLambda (name, ty1, ty2) in let args = [Some tA; Some tP; Some tt; Some tu; Some tp; Some tq; Some tB; None] in sigu, tsimpl_sigma, args else raise Constr.DestKO with | Constr.DestKO -> (* Full dependency *) if only_nondep then raise (CannotSimplify (str"Cannot simplify dependent pair")) else let tsimpl_sigma = Names.GlobRef.ConstRef (Lazy.force EqRefs.simpl_sigma_dep_dep) in let tP = tB in let tB = EConstr.mkLambda (name, ty1, ty2) in let args = [Some tA; Some tP; Some tt; Some tu; Some tp; Some tq; Some tB; None] in sigu, tsimpl_sigma, args end | _, _ -> raise (CannotSimplify (str "If you see this, please report.")) in let sigma, inst, glu = Equations_common.instance_of env !evd ~argu:sigu glu in evd := sigma; build_app env evd (ctx, ty, glu) f ~inst args, true, Context_map.id_subst ctx end let remove_sigma = while_fun (with_retry (remove_one_sigma ())) let deletion ~(force:bool) : simplification_fun = SimpFun.make ~name:"deletion" begin fun (env : Environ.env) (evd : Evd.evar_map ref) ((ctx, ty, glu) : goal) -> let name, ty1, ty2 = check_prod !evd ty in let u, tA, tx, _ = check_equality env !evd ctx ~equal_terms:true ty1 in let subst = Context_map.id_subst ctx in if Vars.noccurn !evd 1 ty2 then (* The goal does not depend on the equality, we can just eliminate it. *) build_term_core env evd (ctx, Termops.pop ty2, glu) (fun c -> EConstr.mkLambda (name, ty1, Vars.lift 1 c)), true, subst else let tB = EConstr.mkLambda (name, ty1, ty2) in try if not !Equations_common.simplify_withUIP then raise Not_found else (* We will try to find an instance of UIP for the type [A]. *) let tsimpl_uip = Names.GlobRef.ConstRef (Lazy.force EqRefs.simpl_uip) in let uip_ty = EConstr.mkApp (Builder.uip evd, [| tA |]) in let tuip = let env = push_rel_context ctx env in Equations_common.evd_comb1 (Typeclasses.resolve_one_typeclass env) evd uip_ty in let args = [Some tA; Some tuip; Some tx; Some tB; None] in build_app env evd (ctx, ty, glu) tsimpl_uip args, true, subst with Not_found -> let env = push_rel_context ctx env in raise (CannotSimplify (str "[deletion] Cannot simplify without UIP on type " ++ Printer.pr_econstr_env env !evd tA ++ (if not !Equations_common.simplify_withUIP then str " and the 'Equations With UIP' flag is off" else mt()))) end let deletion ~(force:bool) = with_retry (deletion ~force) let solution ~(dir:direction) : simplification_fun = SimpFun.make ~name:"solution" begin fun (env : Environ.env) (evd : Evd.evar_map ref) ((ctx, ty, glu) : goal) -> let var_left = match dir with Left -> true | Right -> false in let var_right = not var_left in let name, ty1, ty2 = check_prod !evd ty in let equ, tA, tx, tz = check_equality env !evd ctx ~var_left ~var_right ty1 in let trel, term = if var_left then tx, tz else tz, tx in let rel = EConstr.destRel !evd trel in let () = if Int.Set.mem rel (Context_map.dependencies_of_term ~with_red:true env !evd ctx term rel) then raise (CannotSimplify (str "[solution] The variable appears on both sides of the equality.")) in (* let () = equations_debug (fun () -> str "solution on " ++ Printer.pr_econstr_env (push_rel_context ctx env) !evd ty) in *) let subst, rev_subst = Context_map.new_strengthen env !evd ctx rel term in let ctx' = subst.Context_map.src_ctx in let trel' = Context_map.mapping_constr !evd subst trel in let rel' = EConstr.destRel !evd trel' in let term' = Context_map.mapping_constr !evd subst term in let tA' = Context_map.mapping_constr !evd subst tA in let ty1' = Context_map.mapping_constr !evd subst ty1 in (* We will have to generalize everything after [x'] in the new * context. *) let after', decl', before' = Context_map.split_context (pred rel') ctx' in let name' = Context.Rel.Declaration.get_annot decl' in (* let after, _, before = Context_map.split_context rel ctx in*) (* Select the correct solution lemma. *) let nondep = Vars.noccurn !evd 1 ty2 in let uinst, glu' = (* If the equality is not polymorphic, the lemmas will be monomorphic as well *) if EConstr.EInstance.is_empty equ then equ, glu else let sigma, equ, glu = Equations_common.instance_of env !evd ~argu:equ glu in evd := sigma; equ, glu in let tsolution = begin match var_left, nondep with | false, false -> Builder.solution_right_dep | false, true -> Builder.solution_right | true, false -> Builder.solution_left_dep | true, true -> Builder.solution_left end uinst in let tB', body = let body = Context_map.mapping_constr !evd subst ty in (* Right now, [body] has an equality at the head that we want * to move, in some sense. *) let _, _, body = EConstr.destProd !evd body in if nondep then let body = Termops.pop body in let body' = EConstr.it_mkProd_or_LetIn body after' in (* [body] is a term in the context [decl' :: before'], * whereas [tA'] lives in [ctx']. *) EConstr.mkLambda (name', Vars.lift (-rel') tA', body'), body else (* We make some room for the equality. *) let body = Vars.liftn 1 (succ rel') body in let body = Vars.subst1 (EConstr.mkRel rel') body in let after' = Equations_common.lift_rel_context 1 after' in let body' = EConstr.it_mkProd_or_LetIn body after' in let body' = EConstr.mkLambda (name, Vars.lift (1-rel') ty1', body') in EConstr.mkLambda (name', Vars.lift (-rel') tA', body'), body in (* [tB'] is a term in the context [before']. We want it in [ctx']. *) let tB' = Vars.lift rel' tB' in let targs' = Equations_common.extended_rel_vect 1 after' in (* [ctx''] is just [ctx'] where we replaced the substituted variable. *) let ctx'' = Equations_common.subst_in_ctx rel' term' ctx' in let after'', _ = CList.chop (pred rel') ctx'' in let ty'' = if nondep then Vars.substnl [Vars.lift (-rel') term'] (pred rel') body else let teq_refl = EConstr.mkApp (Builder.eq_refl equ, [| tA'; term' |]) in Vars.substnl [Vars.lift (-rel') teq_refl; Vars.lift (-rel') term'] (pred rel') body in let lsubst = Context_map.single_subst env !evd rel' (Context_map.pat_of_constr env !evd term') ctx' in let subst = Context_map.compose_subst env ~sigma:!evd lsubst subst in let f = fun c -> (* [c] is a term in the context [ctx'']. *) let c = EConstr.it_mkLambda_or_LetIn c after'' in (* [c] is a term in the context [before']. *) let c = Vars.lift rel' c in (* [c] is a term in the context [ctx']. *) let env = push_rel_context ctx' env in let c = if nondep then checked_appvect env evd tsolution [| tA'; tB'; term'; c; trel' |] else checked_appvect env evd tsolution [| tA'; term'; tB'; c; trel' |] in (* We make some room for the application of the equality... *) let env = push_rel (LocalAssum (name, ty1')) env in let c = Vars.lift 1 c in let c = checked_appvect env evd c [| EConstr.mkRel 1 |] in (* [targs'] are arguments in the context [eq_decl :: ctx']. *) let c = checked_appvect env evd c targs' in (* [ty1'] is the type of the equality in [ctx']. *) let c = EConstr.mkLambda (name, ty1', c) in (* And now we recover a term in the context [ctx]. *) Context_map.mapping_constr !evd rev_subst c in build_term_core env evd (ctx'', ty'', glu') f, true, subst end let whd_all env sigma t = let ts = transparent_state env in let flags = RedFlags.red_add_transparent RedFlags.all ts in Reductionops.clos_whd_flags flags env sigma t let nf_all env sigma t = let ts = transparent_state env in let flags = RedFlags.red_add_transparent RedFlags.all ts in Reductionops.clos_norm_flags flags env sigma t let pre_solution ~(dir:direction) : simplification_fun = SimpFun.make ~name:"pre_solution" begin fun (env : Environ.env) (evd : Evd.evar_map ref) ((ctx, ty, glu) : goal) -> let var_left = match dir with Left -> true | Right -> false in let var_right = not var_left in let name, ty1, ty2 = check_prod !evd ty in let _equ, tA, tx, tz = check_equality env !evd ctx ~var_left ~var_right ty1 in let trel, term = if var_left then tx, tz else tz, tx in let rel = EConstr.destRel !evd trel in let () = try let decl = lookup_rel rel ctx in if Context.Rel.Declaration.is_local_assum decl then () else raise (CannotSimplify (str "[solution] cannot apply to a let-bound variable")) with Not_found -> assert false in (* Check dependencies in both tA and term *) if not (Int.Set.mem rel (Context_map.dependencies_of_term ~with_red:false env !evd ctx (mkApp (tA, [| term |])) rel)) then SimpFun.apply identity env evd (ctx, ty, glu) else let tA = nf_all (push_rel_context ctx env) !evd tA in let term = whd_all (push_rel_context ctx env) !evd term in if Int.Set.mem rel (Context_map.dependencies_of_term ~with_red:false env !evd ctx (mkApp (tA, [|term|])) rel) then raise (CannotSimplify (str "[solution] cannot remove dependency in the variable ")) else let f c = c in let eqf, _ = Equations_common.decompose_appvect !evd ty1 in let ty1 = let env = push_rel_context ctx env in if var_left then checked_appvect env evd eqf [| tA; trel; term |] else checked_appvect env evd eqf [| tA; term; trel |] in let ty' = mkProd (name, ty1, ty2) in build_term_core env evd (ctx, ty', glu) f, true, Context_map.id_subst ctx end let pre_solution ~(dir:direction) = with_retry (pre_solution ~dir) let is_construct_sigma_2 env sigma f = let term = match decompose_sigma env sigma f with | Some (_, _, _, _, t) -> t | None -> f in let head, _ = EConstr.decompose_app sigma term in EConstr.isConstruct sigma head (* Auxiliary steps for noConfusion. *) let maybe_pack : simplification_fun = SimpFun.make ~name:"maybe_pack" begin fun (env : Environ.env) (evd : Evd.evar_map ref) ((ctx, ty, glu) : goal) -> let name, ty1, ty2 = check_prod !evd ty in let equ, tA, t1, t2 = check_equality env !evd ctx ty1 in if not (is_construct_sigma_2 env !evd t1 && is_construct_sigma_2 env !evd t2) then raise (CannotSimplify (str "This is not an equality between constructors.")); let indty = try Inductiveops.find_rectype (push_rel_context ctx env) !evd tA with Not_found -> raise (CannotSimplify (str "This is not an equality between constructors.")); in let has_noconf () = let noconf_ty = EConstr.mkApp (Builder.noConfusion evd, [| tA |]) in let env = push_rel_context ctx env in try let _noconf = Equations_common.evd_comb1 (Typeclasses.resolve_one_typeclass env) evd noconf_ty in true with Not_found -> false in let indfam, args = Inductiveops.dest_ind_type indty in if CList.is_empty args then SimpFun.apply identity env evd (ctx, ty, glu) else if has_noconf () then SimpFun.apply identity env evd (ctx, ty, glu) else begin (* We need to apply [simplify_ind_pack]. *) let ind, params = Equations_common.dest_ind_family indfam in let evd', tBfull, _, _, valsig, _, _, tA' = Sigma_types.build_sig_of_ind env !evd ind in evd := evd'; let tA' = Vars.substl (List.rev params) tA' in (* We will try to find an instance of UIP for the type [A]. *) let eqdec_ty = EConstr.mkApp (Builder.uip evd, [| tA' |]) in let tdec = let env = push_rel_context ctx env in try Equations_common.evd_comb1 (Typeclasses.resolve_one_typeclass env) evd eqdec_ty with Not_found -> raise (CannotSimplify (str "[noConfusion] Cannot simplify without UIP on type " ++ Printer.pr_econstr_env env !evd tA' ++ str " or NoConfusion for family " ++ Printer.pr_inductive env (fst ind))) in if not !Equations_common.simplify_withUIP then (let env = push_rel_context ctx env in raise (CannotSimplify (str "[noConfusion] Trying to use a non-definitional noConfusion rule on " ++ Printer.pr_econstr_env env !evd tA ++ str ", which does not have a [NoConfusionHom] instance." ++ spc () ++ str "Either [Derive NoConfusionHom for " ++ Printer.pr_inductive env (fst ind) ++ str "], or [Derive NoConfusion for " ++ Printer.pr_inductive env (fst ind) ++ str "] if it requires uniqueness of identity proofs and" ++ str " enable [Equations With UIP] to allow this"))); let tx = let _, _, _, tx, _ = Option.get (decompose_sigma env !evd valsig) in Vars.substl (CList.rev args) (Termops.pop tx) in let tsimplify_ind_pack = Names.GlobRef.ConstRef (Lazy.force EqRefs.simplify_ind_pack) in let tB = Reductionops.beta_applist !evd (tBfull, params) in (* FIXME Inserted this to simplify tB when it has the shape: {index & let H := index in foo H} Is this correct? *) let tB = let env = push_rel_context ctx env in Tacred.simpl env !evd tB in let tG = EConstr.mkLambda (name, ty1, ty2) in let args = [Some tA'; Some tdec; Some tB; Some tx; Some t1; Some t2; Some tG; None] in (* Playing a bit with the fire, [t1], [t2] and [tG] are clearly well-typed, but it's not clear for the others. It seems to work in practice. *) build_app env evd (ctx, ty, glu) tsimplify_ind_pack args, true, Context_map.id_subst ctx end end let maybe_pack = with_retry maybe_pack let apply_noconf : simplification_fun = SimpFun.make ~name:"apply_noconf" begin fun (env : Environ.env) (evd : Evd.evar_map ref) ((ctx, ty, glu) : goal) -> let name, ty1, ty2 = check_prod !evd ty in let equ, tA, t1, t2 = check_equality env !evd ctx ty1 in if not (is_construct_sigma_2 env !evd t1 && is_construct_sigma_2 env !evd t2) then raise (CannotSimplify (str "This is not an equality between constructors.")); let noconf_ty = EConstr.mkApp (Builder.noConfusion evd, [| tA |]) in let tnoconf = let env = push_rel_context ctx env in try Equations_common.evd_comb1 (Typeclasses.resolve_one_typeclass env) evd noconf_ty with Not_found -> raise (CannotSimplify (str "[noConfusion] Cannot find an instance of NoConfusion for type " ++ Printer.pr_econstr_env env !evd tA)) in let tapply_noconf = Names.GlobRef.ConstRef (Lazy.force EqRefs.apply_noConfusion) in let tB = EConstr.mkLambda (name, ty1, ty2) in let args = [Some tA; Some tnoconf; Some t1; Some t2; Some tB; None] in let inst, glu' = (* If the equality is not polymorphic, the lemmas will be monomorphic as well *) if EConstr.EInstance.is_empty equ then None, glu else let sigma, equ, glu = Equations_common.instance_of env !evd ~argu:equ glu in evd := sigma; Some equ, glu in build_app env evd (ctx, ty, glu') tapply_noconf ?inst args, true, Context_map.id_subst ctx end let noConfusion = compose_fun (compose_fun (hnf ~reduce_equality:false) apply_noconf) maybe_pack let simplify_ind_pack_inv : simplification_fun = SimpFun.make ~name:"simplify_ind_pack" begin fun (env : Environ.env) (evd : Evd.evar_map ref) ((ctx, ty, glu) : goal) -> (* FIXME Can't ignore _all_ errors? *) try let reduce c = let env = push_rel_context ctx env in Tacred.hnf_constr env !evd c (* FIXME? due to refolding this could be ill-typed *) in let try_decompose ty = let f, args = Equations_common.decompose_appvect !evd ty in if not (check_constant env !evd (Lazy.force EqRefs.opaque_ind_pack_eq_inv) f) || not (Int.equal 8 (Array.length args)) then raise (CannotSimplify (str "Expected a full application of [opaque_ind_pack_eq_inv]. Maybe\ you did not solve completely a NoConfusion step?")); f, args in let (f, args), ty = try try_decompose ty, ty with CannotSimplify _ -> let ty = reduce ty in try_decompose ty, ty in let tA = args.(0) in let teqdec = args.(1) in let tB = args.(2) in let tx = args.(3) in let tp = args.(4) in let tG = args.(6) in let teq = args.(7) in (* Check that [teq] is [eq_refl]. *) let head, _ = decompose_app !evd teq in if not (is_global env !evd (Names.GlobRef.ConstructRef (Lazy.force EqRefs.eq_refl)) head) then raise (CannotSimplify (str "[opaque_ind_pack_eq_inv] Anomaly: should be applied to a reflexivity proof.")); let tsimplify_ind_pack_inv = Names.GlobRef.ConstRef (Lazy.force EqRefs.simplify_ind_pack_inv) in let args = [Some tA; Some teqdec; Some tB; Some tx; Some tp; Some tG; None] in build_app env evd (ctx, ty, glu) tsimplify_ind_pack_inv args, true, Context_map.id_subst ctx with CannotSimplify _ -> SimpFun.apply identity env evd (ctx, ty, glu) end let noCycle : simplification_fun = SimpFun.make ~name:"noCycle" begin fun (env : Environ.env) (evd : Evd.evar_map ref) ((ctx, ty, glu) : goal) -> let name, ty1, ty2 = check_prod !evd ty in let equ, tA, t1, t2 = check_equality env !evd ctx ty1 in let isct1 = is_construct_sigma_2 env !evd t1 in let isct2 = is_construct_sigma_2 env !evd t2 in if not (isct1 || isct2) then raise (CannotSimplify (str "This is not an equality between constructors.")); let nocycle_ty = EConstr.mkApp (Builder.noCycle evd, [| tA |]) in let tnocycle = let env = push_rel_context ctx env in try Equations_common.evd_comb1 (Typeclasses.resolve_one_typeclass env) evd nocycle_ty with Not_found -> raise (CannotSimplify (str "[noConfusion] Cannot find an instance of NoCycle for type " ++ Printer.pr_econstr_env env !evd tA)) in let tapply_nocycle = if isct1 then Names.GlobRef.ConstRef (Lazy.force EqRefs.apply_noCycle_right) else Names.GlobRef.ConstRef (Lazy.force EqRefs.apply_noCycle_left) in let tB = EConstr.mkLambda (name, ty1, ty2) in let args = [Some tA; Some tnocycle; Some t1; Some t2; Some tB; None] in let inst, glu' = (* If the equality is not polymorphic, the lemmas will be monomorphic as well *) if EConstr.EInstance.is_empty equ then equ, glu else let sigma, equ, glu = Equations_common.instance_of env !evd ~argu:equ glu in evd := sigma; equ, glu in let cont, nocycle, glu' = build_app_infer_concl env evd (ctx, ty, glu) tapply_nocycle ~inst args in let subst = Context_map.id_subst ctx in try let env = push_rel_context ctx env in let prf = Equations_common.evd_comb1 (Typeclasses.resolve_one_typeclass env) evd nocycle in (None, cont prf), true, subst with Not_found -> (* We inform the user of what is missing *) raise (CannotSimplify (str "[noCycle] Cannot infer a proof of " ++ Printer.pr_econstr_env (push_rel_context ctx env) !evd nocycle)) end let noCycle = with_retry noCycle let elim_true : simplification_fun = SimpFun.make ~name:"elim_true" begin fun (env : Environ.env) (evd : Evd.evar_map ref) ((ctx, ty, glu) : goal) -> let name, ty1, ty2 = check_prod !evd ty in if not (check_inductive env !evd (Lazy.force EqRefs.one) ty1) then raise (CannotSimplify (str "[elim_true] The first hypothesis is not the unit type.")); let subst = Context_map.id_subst ctx in (* Check if the goal is dependent or not. *) if Vars.noccurn !evd 1 ty2 then (* Not dependent, we can just eliminate True. *) build_term_core env evd (ctx, Termops.pop ty2, glu) (fun c -> EConstr.mkLambda (name, ty1, Vars.lift 1 c)), true, subst else (* Apply the dependent induction principle for True. *) let tB = EConstr.mkLambda (name, ty1, ty2) in let tone_ind = Names.GlobRef.ConstRef (Lazy.force EqRefs.one_ind_dep) in let inst, glu' = (* If the equality is not polymorphic, the lemmas will be monomorphic as well *) if not (Global.is_polymorphic tone_ind) then EConstr.EInstance.empty, glu else let sigma, equ, glu = Equations_common.instance_of env !evd glu in evd := sigma; equ, glu in let args = [Some tB; None] in build_app env evd (ctx, ty, glu') tone_ind ~inst args, true, subst end let elim_true = with_retry elim_true let elim_false : simplification_fun = SimpFun.make ~name:"elim_false" begin fun (env : Environ.env) (evd : Evd.evar_map ref) ((ctx, ty, glu) : goal) -> let name, ty1, ty2 = check_prod !evd ty in if not (check_inductive env !evd (Lazy.force EqRefs.zero) ty1) then raise (CannotSimplify (str "[elim_true] The first hypothesis is not the empty type.")); let subst = Context_map.id_subst ctx in let tB, tzero_ind = (* Check if the goal is dependent or not. *) if Vars.noccurn !evd 1 ty2 then let tB = Termops.pop ty2 in let tzero_ind = Names.GlobRef.ConstRef (Lazy.force EqRefs.zero_ind) in tB, tzero_ind else let tB = EConstr.mkLambda (name, ty1, ty2) in let tzero_ind = Names.GlobRef.ConstRef (Lazy.force EqRefs.zero_ind_dep) in tB, tzero_ind in let inst, glu' = (* If the equality is not polymorphic, the lemmas will be monomorphic as well *) if not (Global.is_polymorphic tzero_ind) then EConstr.EInstance.empty, glu else let sigma, equ, glu = Equations_common.instance_of env !evd glu in evd := sigma; equ, glu in let c = checked_appvect (push_rel_context ctx env) evd (EConstr.mkRef (tzero_ind, inst)) [| tB |] in (None, c), true, subst end let elim_false = with_retry elim_false (* Inference mechanism. *) let infer_step ?(loc:Loc.t option) ~(isSol:bool) (env : Environ.env) (evd : Evd.evar_map ref) ((ctx, ty, glu) : goal) : simplification_step = (* The goal does not have to be a product, but if it's not, it has to be * an application of [opaque_ind_pack_eq_inv]. *) let f, _ = Equations_common.decompose_appvect !evd ty in if check_constant env !evd (Lazy.force EqRefs.opaque_ind_pack_eq_inv) f then NoConfusionOut else begin let name, ty1, ty2 = check_prod !evd ty in (* First things first, maybe the head of the goal is False or True. *) if check_inductive env !evd (Lazy.force EqRefs.zero) ty1 then ElimFalse else if check_inductive env !evd (Lazy.force EqRefs.one) ty1 then ElimTrue else (* We need to destruct the equality at the head to analyze it. *) let equ, tA, tu, tv = check_equality env !evd ctx ty1 in (* FIXME What is the correct way to do it? *) let choose u v = if u < v then Left else Right in (* If the user wants a solution, we need to respect his wishes. *) if isSol then if EConstr.isRel !evd tu && EConstr.isRel !evd tv then Solution (choose (EConstr.destRel !evd tu) (EConstr.destRel !evd tv)) else if EConstr.isRel !evd tu then Solution Left else if EConstr.isRel !evd tv then Solution Right else raise (CannotSimplify (str "Neither side of the equality is a variable.")) else begin let check_occur trel term = let rel = EConstr.destRel !evd trel in not (Int.Set.mem rel (Context_map.dependencies_of_term ~with_red:true env !evd ctx term rel)) in if EConstr.isRel !evd tu && EConstr.isRel !evd tv && check_occur tu tv then Solution (choose (EConstr.destRel !evd tu) (EConstr.destRel !evd tv)) else if EConstr.isRel !evd tu && check_occur tu tv then Solution Left else if EConstr.isRel !evd tv && check_occur tv tu then Solution Right else let check_ind t = let f, _ = EConstr.decompose_app !evd t in try ignore (Inductive.find_rectype env (to_constr ~abort_on_undefined_evars:false !evd f)); true with Not_found -> false in let check_construct t = let env = push_rel_context ctx env in let t = Tacred.hnf_constr env !evd t in let f, _ = EConstr.decompose_app !evd t in EConstr.isConstruct !evd f in if check_ind tA && check_construct tu && check_construct tv then NoConfusion [loc, Infer_many] else if is_conv env !evd ctx tu tv then Deletion false (* Never force K. *) else (* Check if [u] occurs in [t] under only constructors. *) (* For now we don't care about the type of these constructors. *) (* Note that we also don't need to care about binders, since we can only go through constructors and nothing else. *) let check_occur t u = let eq t = eq_constr !evd t u in let rec aux t = if eq t then raise Termops.Occur; let f, args = EConstr.decompose_app !evd t in if EConstr.isConstruct !evd f then Array.iter aux args in try aux t; false with Termops.Occur -> true in if check_occur tu tv || check_occur tv tu then NoCycle else raise (CannotSimplify (str "Could not infer a simplification step.")) end end let or_fun (f : simplification_fun) (g : simplification_fun) : simplification_fun = SimpFun.make ~name:"or_fun" begin fun (env : Environ.env) (evd : Evd.evar_map ref) (gl : goal) -> let evd0 = !evd in try SimpFun.apply f env evd gl with CannotSimplify _ -> evd := evd0; SimpFun.apply g env evd gl end let or_fun_e1 (f : simplification_fun) (g : simplification_fun) : simplification_fun = SimpFun.make ~name:"or_fun_e1" begin fun (env : Environ.env) (evd : Evd.evar_map ref) (gl : goal) -> let evd0 = !evd in try SimpFun.apply f env evd gl with CannotSimplify e -> evd := evd0; try SimpFun.apply g env evd gl with CannotSimplify _ -> evd := evd0; raise (CannotSimplify e) end let _expand_many rule env evd ((ctx, ty, glu) : goal) : simplification_rules = (* FIXME: maybe it's too brutal/expensive? *) let ty = whd_all env !evd ty in let _, ty, _ = check_prod !evd ty in try let ty = whd_all env !evd ty in let equ, ty, _, _ = check_equality env !evd ctx ty in let rec aux ty acc = let ty = Reductionops.whd_betaiotazeta env !evd ty in let f, args = Equations_common.decompose_appvect !evd ty in if check_inductive env !evd (Lazy.force SigmaRefs.sigma) f then let next_ty = Reductionops.beta_applist !evd (args.(1), [EConstr.mkRel 1]) in aux next_ty (rule :: acc) else acc in aux ty [rule] with CannotSimplify _ -> [rule] let check_block_notprod : simplification_fun = SimpFun.make ~name:"check_block_notprod" begin fun (env : Environ.env) (evd : Evd.evar_map ref) ((ctx, ty, glu as gl) : goal) -> try let _ = destLetIn !evd ty in SimpFun.apply identity env evd gl with Constr.DestKO -> try let env = push_rel_context ctx env in let ty = whd_all env !evd ty in let _na, _ty, _ty' = EConstr.destProd !evd ty in raise (CannotSimplify (str"a product")) with Constr.DestKO -> SimpFun.apply identity env evd gl end let rec apply_noConfusions () = SimpFun.make ~name:"apply_noConfusions" begin fun env evd goal -> SimpFun.apply (guard_block (or_fun noConfusion (compose_fun (apply_noConfusions ()) (remove_one_sigma ())))) env evd goal end (* Execution machinery. *) let rec execute_step : simplification_step -> simplification_fun = function | Deletion force -> deletion ~force | Solution dir -> compose_fun (solution ~dir:dir) (pre_solution ~dir:dir) | NoConfusion rules -> compose_fun simplify_ind_pack_inv (compose_fun (simplify rules) noConfusion) | NoConfusionOut -> simplify_ind_pack_inv | NoCycle -> noCycle | ElimTrue -> elim_true | ElimFalse -> elim_false and simplify_one ((loc, rule) : Loc.t option * simplification_rule) : simplification_fun = let handle_error f = SimpFun.make ~name:"handle_error" begin fun env evd gl -> try SimpFun.apply f env evd gl with CannotSimplify err -> Equations_common.user_err_loc (loc, err) end in let wrap get_step = let f = SimpFun.make ~name:"wrap" begin fun env evd gl -> let step = get_step env evd gl in SimpFun.apply (execute_step step) env evd gl end in let fr = compose_fun f remove_sigma in with_retry (or_fun f fr) in let wrap_handle get_step = let f = wrap get_step in handle_error f in match rule with | Infer_many -> let rec aux () = SimpFun.make ~name:"aux" begin fun env evd gl -> let first = guard_block (or_fun (apply_noConfusions ()) (or_fun_e1 ((wrap (infer_step ?loc ~isSol:false))) (remove_one_sigma ~only_nondep:true ()))) in SimpFun.apply (compose_fun (or_fun check_block_notprod (aux ())) first) env evd gl end in handle_error (aux ()) | Step step -> wrap_handle (fun _ _ _ -> step) | Infer_one -> handle_error (or_fun (apply_noConfusions ()) (or_fun_e1 (wrap (infer_step ?loc ~isSol:false)) (remove_one_sigma ~only_nondep:true ()))) | Infer_direction -> wrap_handle (infer_step ?loc ~isSol:true) and simplify (rules : simplification_rules) : simplification_fun = let funs = List.rev_map simplify_one rules in match funs with | [] -> identity | hd :: tl -> List.fold_left compose_fun hd tl let univ_of_goal env sigma ty = let s = Retyping.get_sort_of env sigma ty in s let simplify_tac (rules : simplification_rules) : unit Proofview.tactic = Proofview.Goal.enter (fun gl -> let env = Proofview.Goal.env gl in let sigma = Proofview.Goal.sigma gl in let concl = Proofview.Goal.concl gl in let glu = univ_of_goal env sigma concl in let hyps = Proofview.Goal.hyps gl in let env = Environ.reset_context env in (* Keep aside the section variables. *) let loc_hyps, sec_hyps = CList.split_when (fun decl -> let id = Context.Named.Declaration.get_id decl in Termops.is_section_variable (Global.env ()) id) hyps in let env = push_named_context sec_hyps env in (* We want to work in a [rel_context], not a [named_context]. *) let ctx, subst = Equations_common.rel_of_named_context sigma loc_hyps in let _, rev_subst, _ = let err () = assert false in Equations_common.named_of_rel_context ~keeplets:true err ctx in (* We also need to convert the goal for it to be well-typed in * the [rel_context]. *) let ty = Vars.subst_vars sigma subst concl in (* [ty'] is the expected type of the hole in the term, under the * context [ctx']. *) Refine.refine ~typecheck:true (fun evars -> let evd = ref evars in let (_, c), _, _ = SimpFun.apply (simplify rules) env evd (ctx, ty, glu) in let c = Vars.substl (List.rev rev_subst) c in (!evd, c))) (* Printing functions. *) let pr_simplification_step : simplification_step -> Pp.t = function | Deletion false -> str "-" | Deletion true -> str "-!" | Solution (Left) -> str "->" | Solution (Right) -> str "<-" | NoConfusion rules -> str "$" | NoConfusionOut -> str "NoConfusionOut" | NoCycle -> str "NoCycle" | ElimTrue -> str "ElimTrue" | ElimFalse -> str "ElimFalse" let pr_simplification_rule ((_, rule) : Loc.t option * simplification_rule) : Pp.t = match rule with | Infer_one -> str "?" | Infer_direction -> str "<->" | Infer_many -> str "*" | Step step -> pr_simplification_step step let pr_simplification_rules : simplification_rules -> Pp.t = prlist_with_sep spc pr_simplification_rule Coq-Equations-1.3.1-8.20/src/simplify.mli000066400000000000000000000051701463127417400177330ustar00rootroot00000000000000(* Some types to define what is a simplification. *) type direction = Left | Right type simplification_step = Deletion of bool (* Force the use of K? *) | Solution of direction | NoConfusion of simplification_rules | NoConfusionOut | NoCycle | ElimTrue | ElimFalse and simplification_rule = Step of simplification_step | Infer_one | Infer_direction | Infer_many and simplification_rules = (Loc.t option * simplification_rule) list type goal = EConstr.rel_context * EConstr.types * EConstr.ESorts.t (* The [goal] corresponds to the context and type of an evar representing a * hole in the term. *) type open_term = (goal * EConstr.existential) option * EConstr.constr exception CannotSimplify of Pp.t (* TODO Move the context_map inside the open_term... *) type simplification_fun val apply_simplification_fun : simplification_fun -> Environ.env -> Evd.evar_map ref -> goal -> open_term * bool * Context_map.context_map (* Auxiliary functions. *) (* TODO Remove completely and merge into covering.ml (* Return a substitution and its inverse. *) (* For more flexibility, [rels] is a set of indices which are to be * moved before the variable. By default, this is everything already before * the variable. *) val strengthen : Environ.env -> Evd.evar_map -> Context.rel_context -> int -> ?rels:Int.Set.t -> EConstr.constr -> Covering.context_map * Covering.context_map *) val compose_term : Environ.env -> Evd.evar_map ref -> open_term -> open_term -> open_term val safe_fun : simplification_fun -> simplification_fun val compose_fun : simplification_fun -> simplification_fun -> simplification_fun (* Simplification functions to handle each step. *) (* Any of these can throw a CannotSimplify exception which explains why the * rule cannot apply. *) (* It is assumed that the head of the goal should be a simple equality that * the function has to simplify. *) (* For instance, a goal such as [(p; x) = (q; y) -> P] has to be changed * to [forall (e : p = q), eq_rect ... x e = y -> P] beforehand. *) val deletion : force:bool -> simplification_fun val solution : dir:direction -> simplification_fun val noConfusion : simplification_fun val noCycle : simplification_fun val elim_true : simplification_fun val elim_false : simplification_fun val identity : simplification_fun val execute_step : simplification_step -> simplification_fun val infer_step : ?loc:Loc.t -> isSol:bool -> Environ.env -> Evd.evar_map ref -> goal -> simplification_step val simplify : simplification_rules -> simplification_fun val simplify_tac : simplification_rules -> unit Proofview.tactic val pr_simplification_rules : simplification_rules -> Pp.t Coq-Equations-1.3.1-8.20/src/splitting.ml000066400000000000000000001550211463127417400177440ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open Util open Names open Nameops open Constr open Context open Inductiveops open Reductionops open Pp open List open Evarutil open Evar_kinds open Equations_common open Syntax open EConstr open EConstr.Vars open Context_map (** Splitting trees *) type path_component = Id.t type path = path_component list let path_id ?(unfold=false) path = match List.rev path with | hd :: tl -> List.fold_left (fun id suffix -> add_suffix (add_suffix id "_") (Id.to_string suffix)) (if unfold then add_suffix hd "_unfold" else hd) tl | _ -> assert false module PathOT = struct type t = path let path_component_compare id id' = Id.compare id id' let rec compare p p' = match p, p' with | ev :: p, ev' :: p' -> let c = path_component_compare ev ev' in if c == 0 then compare p p' else c | _ :: _, [] -> -1 | [], _ :: _ -> 1 | [], [] -> 0 end module PathMap = struct include Map.Make (PathOT) end type wf_rec = { wf_rec_term : constr; wf_rec_functional : constr option; wf_rec_arg : constr; wf_rec_rel : constr } type struct_rec = { struct_rec_arg : Syntax.rec_annot; struct_rec_protos : int; } type rec_node = | WfRec of wf_rec | StructRec of struct_rec type rec_info = { rec_prob : context_map; rec_lets : rel_context; rec_sign : rel_context; rec_arity : constr; rec_args : int; (* number of arguments of the recursive function *) rec_node : rec_node } type splitting = | Compute of context_map * where_clause list * types * splitting_rhs | Split of context_map * int * types * splitting option array | Mapping of context_map * splitting (* Mapping Γ |- p : Γ' and splitting Γ' |- p : Δ *) | Refined of context_map * refined_node * splitting and where_clause = { where_program : program; where_program_orig : program_info; where_program_args : constr list; (* In original context, de Bruijn only *) where_path : path; where_orig : path; where_context_length : int; (* Length of enclosing context, including fixpoint prototype if any *) where_type : types } and refined_node = { refined_obj : identifier * constr * types; refined_rettyp : types; refined_arg : int * int; (* Index counting lets or not *) refined_path : path; refined_term : EConstr.t; refined_filter : int list option; refined_args : constr list; refined_revctx : context_map; refined_newprob : context_map; refined_newprob_to_lhs : context_map; refined_newty : types } and program = { program_info : program_info; program_prob : context_map; program_rec : rec_info option; program_splitting : splitting; program_term : constr } and splitting_rhs = | RProgram of constr | REmpty of int * splitting option array let where_term w = applist (w.where_program.program_term, w.where_program_args) let context_map_of_splitting : splitting -> context_map = function | Compute (subst, _, _, _) -> subst | Split (subst, _, _, _) -> subst | Mapping (subst, _) -> subst | Refined (subst, _, _) -> subst let pr_path evd = prlist_with_sep (fun () -> str":") Id.print let path_component_eq id id' = Id.equal id id' let eq_path path path' = let rec aux path path' = match path, path' with | [], [] -> true | hd :: tl, hd' :: tl' -> path_component_eq hd hd' && aux tl tl' | _, _ -> false in aux path path' let program_id p = p.program_info.program_id let program_loc p = p.program_info.program_loc let program_type p = program_type p.program_info let program_sign p = p.program_info.program_sign let program_impls p = p.program_info.program_impls let program_rec p = p.program_info.program_rec let program_arity p = p.program_info.program_arity let where_id w = w.where_program.program_info.program_id let where_context wheres = List.map (fun ({where_program; where_type } as w) -> make_def (nameR (where_id w)) (Some (where_term w)) where_type) wheres let where_program_type w = program_type w.where_program let pplhs env sigma lhs = pr_pats env sigma (lhs.Context_map.map_inst) let pr_splitting_rhs ?(verbose=false) env' env'' sigma lhs rhs ty = let verbose pp = if verbose then pp else mt () in match rhs with | RProgram c -> pplhs env' sigma lhs ++ str" := " ++ Printer.pr_econstr_env env'' sigma c ++ (verbose (str " : " ++ Printer.pr_econstr_env env'' sigma ty)) | REmpty (i, _) -> pplhs env' sigma lhs ++ str" :=! " ++ pr_rel_name env'' i let pr_program_info env sigma p = let open Pp in Names.Id.print p.program_id ++ str " : " ++ Printer.pr_econstr_env env sigma (Syntax.program_type p) ++ str " : " ++ Printer.pr_econstr_env env sigma (mkSort (ESorts.make p.program_sort)) ++ str " ( " ++ (match p.program_rec with | Some (Structural ann) -> (match ann with | MutualOn (Some (i,_)) -> str "mutually recursive on " ++ int i | MutualOn None -> str "mutually recursive on ? " | NestedOn (Some (i,_)) -> str "nested on " ++ int i | NestedOn None -> str "nested on ? " | NestedNonRec -> str "nested but not directly recursive") | Some (WellFounded (c, r, info)) -> str "wellfounded" | None -> str "not recursive") ++ str")" let pr_splitting env sigma ?(verbose=false) split = let verb pp = if verbose then pp else mt () in let rec aux = function | Compute (lhs, wheres, ty, c) -> let env' = push_rel_context (lhs.Context_map.src_ctx) env in let ppwhere w = hov 2 (str"where " ++ Id.print (where_id w) ++ str " : " ++ (try Printer.pr_econstr_env env' sigma w.where_type ++ hov 1 (str "(program type: " ++ Printer.pr_econstr_env env sigma (where_program_type w) ++ str ") ") ++ pr_program_info env sigma w.where_program.program_info ++ str "(path: " ++ Id.print (path_id w.where_path) ++ str")" ++ spc () ++ str "(where_term: " ++ Printer.pr_econstr_env env sigma (where_term w) ++ str ")" ++ str "(arity: " ++ Printer.pr_econstr_env env sigma w.where_program.program_info.program_arity ++ str ")" ++ str" (where context length : " ++ int w.where_context_length ++ str ")" ++ str " := " ++ Pp.fnl () ++ aux w.where_program.program_splitting with e -> str "*raised an exception")) in let ppwheres = prlist_with_sep Pp.fnl ppwhere wheres in let env'' = push_rel_context (where_context wheres) env' in (pr_splitting_rhs ~verbose env' env'' sigma lhs c ty ++ Pp.fnl () ++ ppwheres ++ verb (hov 2 (fnl () ++ str "(in context: " ++ spc () ++ pr_context_map env sigma lhs ++ str")" ++ fnl ()))) | Split (lhs, var, ty, cs) -> let env' = push_rel_context (lhs.Context_map.src_ctx) env in (pplhs env' sigma lhs ++ str " split: " ++ pr_rel_name env' var ++ Pp.fnl () ++ verb (hov 2 (str" : " ++ Printer.pr_econstr_env env' sigma ty ++ spc () ++ str " (in context " ++ spc () ++ pr_context_map env sigma lhs ++ str ")" ++ fnl ())) ++ (Array.fold_left (fun acc so -> acc ++ hov 2 (match so with | None -> str "*impossible case*" ++ Pp.fnl () | Some s -> aux s)) (mt ()) cs)) | Mapping (ctx, s) -> hov 2 (str"Mapping " ++ pr_context_map env sigma ctx ++ Pp.fnl () ++ aux s) | Refined (lhs, info, s) -> let (id, c, cty), ty, arg, path, scargs, revctx, newprob, newty = info.refined_obj, info.refined_rettyp, info.refined_arg, info.refined_path, info.refined_args, info.refined_revctx, info.refined_newprob, info.refined_newty in let env' = push_rel_context (lhs.Context_map.src_ctx) env in hov 0 (pplhs env' sigma lhs ++ str " with " ++ Id.print id ++ str" := " ++ Printer.pr_econstr_env env' sigma (mapping_constr sigma revctx c) ++ Pp.fnl () ++ verb (hov 2 (str " : " ++ Printer.pr_econstr_env env' sigma cty ++ str" " ++ Printer.pr_econstr_env env' sigma ty ++ str" " ++ hov 2 (str "in" ++ pr_context_map env sigma lhs) ++ spc () ++ hov 2 (str "refine term (in lhs): " ++ Printer.pr_econstr_env env' sigma info.refined_term) ++ hov 2 (str "refine args: " ++ prlist_with_sep spc (Printer.pr_econstr_env env' sigma) info.refined_args) ++ hov 2 (str "New problem: " ++ pr_context_map env sigma newprob) ++ hov 2 (str "For type: " ++ Printer.pr_econstr_env (push_rel_context (newprob.Context_map.src_ctx) env) sigma newty) ++ hov 2 (str"Eliminating:" ++ pr_rel_name (push_rel_context (newprob.Context_map.src_ctx) env) (snd arg) ++ spc ()) ++ hov 2 (str "Revctx is: " ++ pr_context_map env sigma revctx) ++ hov 2 (str "New problem to problem substitution is: " ++ pr_context_map env sigma info.refined_newprob_to_lhs ++ Pp.fnl ()))) ++ hov 0 (aux s)) in try aux split with e -> str"Error pretty-printing splitting" let pr_program env evd p = pr_program_info env evd p.program_info ++ fnl () ++ pr_splitting env evd p.program_splitting let pr_programs env evd p = prlist_with_sep fnl (pr_program env evd) p let pp s = pp_with !Topfmt.deep_ft s let ppsplit s = let env = Global.env () in let sigma = Evd.from_env env in pp (pr_splitting env sigma s) let map_wf_rec f r = { wf_rec_term = f r.wf_rec_term; wf_rec_functional = Option.map f r.wf_rec_functional; wf_rec_arg = f r.wf_rec_arg; wf_rec_rel = f r.wf_rec_rel } let map_struct_rec f r = { struct_rec_arg = r.struct_rec_arg; struct_rec_protos = r.struct_rec_protos} let map_rec_node f = function | StructRec s -> StructRec (map_struct_rec f s) | WfRec s -> WfRec (map_wf_rec f s) let map_rec_info f r = { rec_prob = map_ctx_map f r.rec_prob; rec_lets = map_rel_context f r.rec_lets; rec_sign = map_rel_context f r.rec_sign; rec_arity = f r.rec_arity; rec_args = r.rec_args; rec_node = map_rec_node f r.rec_node } let rec map_program f p = { program_info = map_program_info f p.program_info; program_prob = map_ctx_map f p.program_prob; program_splitting = map_split f p.program_splitting; program_rec = Option.map (map_rec_info f) p.program_rec; program_term = f p.program_term } and map_where f w = { w with where_program_orig = map_program_info f w.where_program_orig; where_program = map_program f w.where_program; where_program_args = List.map f w.where_program_args; where_type = f w.where_type } and map_split f split = let rec aux = function | Compute (lhs, where, ty, RProgram c) -> let where' = List.map (fun w -> map_where f w) where in let lhs' = map_ctx_map f lhs in Compute (lhs', where', f ty, RProgram (f c)) | Split (lhs, y, z, cs) -> let lhs' = map_ctx_map f lhs in Split (lhs', y, f z, Array.map (Option.map aux) cs) | Mapping (lhs, s) -> let lhs' = map_ctx_map f lhs in Mapping (lhs', aux s) | Refined (lhs, info, s) -> let lhs' = map_ctx_map f lhs in let (id, c, cty) = info.refined_obj in let scargs = info.refined_args in Refined (lhs', { info with refined_obj = (id, f c, f cty); refined_term = f info.refined_term; refined_args = List.map f scargs; refined_rettyp = f info.refined_rettyp; refined_revctx = map_ctx_map f info.refined_revctx; refined_newprob = map_ctx_map f info.refined_newprob; refined_newprob_to_lhs = map_ctx_map f info.refined_newprob_to_lhs; refined_newty = f info.refined_newty}, aux s) | Compute (lhs, where, ty, (REmpty _ as em)) -> let lhs' = map_ctx_map f lhs in Compute (lhs', where, f ty, em) in aux split let is_nested p = match p.Syntax.program_rec with | Some (Structural (NestedOn _)) -> true | Some (Structural NestedNonRec) -> true | _ -> false let compute_possible_guardness_evidences sigma n fixbody fixtype = match n with | Some i -> [i] | None -> (* If recursive argument was not given by user, we try all args. An earlier approach was to look only for inductive arguments, but doing it properly involves delta-reduction, and it finally doesn't seem to worth the effort (except for huge mutual fixpoints ?) *) let m = Termops.nb_prod sigma fixtype in let ctx = fst (decompose_prod_n_decls sigma m fixtype) in List.map_i (fun i _ -> i) 0 ctx let define_mutual_nested env evd get_prog progs = let mutual = List.filter (fun (p, prog) -> not (is_nested p)) progs in (* In the mutually recursive case, only the functionals have been defined, we build the block and its projections now *) let structargs = Array.map_of_list (fun (p,_) -> match p.Syntax.program_rec with | Some (Structural (MutualOn (Some (lid,_)))) -> Some lid | _ -> None) mutual in let mutualapp, nestedbodies = let nested = List.length progs - List.length mutual in let one_nested before p prog afterctx idx = let signlen = List.length p.program_sign in let fixbody = Vars.lift 1 (* lift over itself *) (mkApp (get_prog prog, rel_vect (signlen + (nested - 1)) (List.length mutual))) in let after = (nested - 1) - before in let fixb = (Array.make 1 idx, 0) in let fixna = Array.make 1 (make_annot (Name p.program_id) (Retyping.relevance_of_sort (ESorts.make p.program_sort))) in let fixty = Array.make 1 (Syntax.program_type p) in (* Apply to itself *) let beforeargs = Termops.rel_list (signlen + 1) before in let fixref = mkRel (signlen + 1) in let (afterargs, afterctx) = let rec aux (acc, ctx) n afterctx = if Int.equal n after then acc, ctx else match afterctx with | ty :: tl -> let term = applist (mkRel (signlen + nested), acc) in let decl = Context.Rel.Declaration.LocalDef (nameR (Id.of_string "H"), term, ty) in aux (List.map (Vars.lift 1) acc @ [mkRel 1], decl :: ctx) (succ n) tl | [] -> assert false in aux (beforeargs @ [fixref], []) 0 afterctx in let fixbody = applist (Vars.lift after fixbody, afterargs) in (* Apply to its arguments *) let fixbody = mkApp (fixbody, extended_rel_vect after p.program_sign) in let fixbody = it_mkLambda_or_LetIn fixbody afterctx in let fixbody = it_mkLambda_or_LetIn fixbody p.program_sign in it_mkLambda_or_LetIn (mkFix (fixb, (fixna, fixty, Array.make 1 fixbody))) (List.init (nested - 1) (fun _ -> (Context.Rel.Declaration.LocalAssum (anonR, mkProp)))) in let rec fixsubst i k acc l = match l with | (p', prog') :: rest -> (match p'.Syntax.program_rec with | Some (Structural (NestedOn idx)) -> let idx = match idx with | Some (idx, _) -> idx | None -> pred (List.length p'.program_sign) in let rest_tys = List.map (fun (p,_) -> Syntax.program_type p) rest in let term = one_nested k p' prog' rest_tys idx in fixsubst i (succ k) ((true, term) :: acc) rest | Some (Structural NestedNonRec) -> (* Non immediately recursive nested def *) let term = mkApp (get_prog prog', rel_vect 0 (List.length mutual)) in fixsubst i (succ k) ((true, term) :: acc) rest | _ -> fixsubst (pred i) k ((false, mkRel i) :: acc) rest) | [] -> List.rev acc in (* aux1 ... auxn *) let nested = fixsubst (List.length mutual) 0 [] progs in let nested, mutual = List.partition (fun (x, y) -> x) nested in let gns = List.fold_right (fun (_, g) acc -> applist (g, acc) :: acc) nested [] in let nested = List.fold_left (fun acc g -> applist (g, List.rev acc) :: acc) [] gns in let nested = List.rev_map (Reductionops.nf_beta env !evd) nested in List.map snd mutual, nested in let decl = let blockfn (p, prog) = let na = nameR p.program_id in let ty = Syntax.program_type p in let sign = p.program_sign in let body = beta_appvect !evd (get_prog prog) (Array.append (Array.of_list mutualapp) (Array.of_list nestedbodies)) in let body = beta_appvect !evd (Vars.lift (List.length sign) body) (extended_rel_vect 0 sign) in let body = it_mkLambda_or_LetIn body (lift_rel_context 1 sign) in na, ty, body in let blockl = List.map blockfn mutual in let names, tys, bodies = List.split3 blockl in Array.of_list names, Array.of_list tys, Array.of_list bodies in let nested, mutual = List.partition (fun (p,prog) -> is_nested p) progs in let indexes = let names, tys, bodies = decl in let possible_indexes = Array.map3 (compute_possible_guardness_evidences !evd) structargs bodies tys in Pretyping.esearch_fix_guard env !evd (Array.to_list possible_indexes) (names, tys, bodies) in let declare_fix_fns i (p,prog) = let newidx = indexes.(i) in let p = { p with Syntax.program_rec = Some (Structural (MutualOn (Some (newidx, None)))) } in let fix = mkFix ((indexes, i), decl) in (p, prog, fix) in let fixes = List.mapi declare_fix_fns mutual in let declare_nested (p,prog) body = (p, prog, body) in let nested = List.map2 declare_nested nested nestedbodies in fixes, nested let check_typed ~where ?name env evd c = let sigma, _ = try Typing.type_of env evd c with Type_errors.TypeError (env, tyerr) -> anomaly Pp.(str where ++ spc () ++ str "Equations build an ill-typed term: " ++ Printer.pr_econstr_env env evd c ++ Himsg.explain_pretype_error env evd (Pretype_errors.TypingError (Pretype_errors.of_type_error tyerr))) | Pretype_errors.PretypeError (env, evd, tyerr) -> anomaly Pp.(str where ++ spc () ++ str "Equations build an ill-typed term: " ++ Printer.pr_econstr_env env evd c ++ Himsg.explain_pretype_error env evd tyerr) in let check = Evd.check_constraints evd (snd @@ Evd.universe_context_set sigma) in if not check then anomaly Pp.(str where ++ spc () ++ str "Equations missing constraints in " ++ str (Option.default "(anonymous)" name)) let term_of_tree env0 isevar sort tree = let rec aux env evm sort = function | Compute (subst, where, ty, RProgram rhs) -> let ctx = subst.Context_map.src_ctx in let compile_where ({where_program; where_type} as w) (env, evm, ctx) = let evm, c', ty' = evm, where_term w, where_type in (env, evm, (make_def (nameR (where_id w)) (Some c') ty' :: ctx)) in let env, evm, ctx = List.fold_right compile_where where (env, evm,ctx) in let body = it_mkLambda_or_LetIn rhs ctx and typ = it_mkProd_or_subst env evm ty ctx in evm, body, typ | Compute (lhs, where, ty, REmpty (split, sp)) -> assert (List.is_empty where); let ctx = lhs.Context_map.src_ctx in let evm, bot = new_global evm (Lazy.force logic_bot) in let evm, prf, _ = aux env evm sort (Split (lhs, split, bot, sp)) in let evm, case = new_global evm (Lazy.force logic_bot_case) in let term = mkApp (case, [| ty; beta_appvect evm prf (extended_rel_vect 0 ctx) |]) in let term = it_mkLambda_or_LetIn term ctx in let ty = it_mkProd_or_subst env evm ty ctx in evm, term, ty | Mapping (subst, s) -> let ctx = subst.Context_map.src_ctx in let evm, term, ty = aux env evm sort s in let args = Array.rev_of_list (snd (constrs_of_pats ~inacc_and_hide:false env evm subst.Context_map.map_inst)) in let term = it_mkLambda_or_LetIn (whd_beta env evm (mkApp (term, args))) ctx in let ty = it_mkProd_or_subst env evm (prod_appvect evm ty args) ctx in evm, term, ty | Refined (subst, info, rest) -> let ctx = subst.Context_map.src_ctx in let (id, _, _), ty, rarg, path, f, args, newprob, newty = info.refined_obj, info.refined_rettyp, info.refined_arg, info.refined_path, info.refined_term, info.refined_args, info.refined_newprob, info.refined_newty in let evm, sterm, sty = aux env evm sort rest in let term = applist (f, args) in let term = it_mkLambda_or_LetIn term ctx in let ty = it_mkProd_or_subst env evm ty ctx in evm, term, ty | Split (subst, rel, ty, sp) -> let ctx = subst.Context_map.src_ctx in (* Produce parts of a case that will be relevant. *) let evm, block = Equations_common.(get_fresh evm coq_block) in let blockty = mkLetIn (anonR, block, Retyping.get_type_of env evm block, lift 1 ty) in let evd = ref evm in let elim_relevance = Retyping.relevance_of_type (push_rel_context ctx env) evm ty in let ctx', case_ty, branches_res, nb_cuts, rev_subst, to_apply, simpl = Sigma_types.smart_case env evd ctx rel blockty in (* The next step is to use [simplify]. *) let simpl_step = if simpl then Simplify.simplify [None, Simplify.Infer_many] else Simplify.identity in let branches = Array.map2 (fun (ty, nb, csubst) next -> (* We get the context from the constructor arity. *) let new_ctx, ty = EConstr.decompose_prod_n_decls !isevar nb ty in let new_ctx = Namegen.name_context env !isevar new_ctx in let envnew = push_rel_context (new_ctx @ ctx') env in (* Remove the cuts and append them to the context. *) let cut_ctx, ty = Equations_common.splay_prod_n_assum envnew !isevar nb_cuts ty in let ty = if simpl then Tacred.hnf_constr (push_rel_context cut_ctx envnew) !evd ty else ty in (* TODO This context should be the same as (pi1 csubst). We could * either optimize (but names in [csubst] are worse) or just insert * a sanity-check. *) if !Equations_common.debug then begin let open Feedback in let ctx = cut_ctx @ new_ctx @ ctx' in msg_debug(str"Simplifying term:"); msg_debug(let env = push_rel_context ctx env in Printer.pr_econstr_env env !evd ty); msg_debug(str"... in context:"); msg_debug(pr_context env !evd ctx); msg_debug(str"... named context:"); msg_debug(Printer.pr_named_context env !evd (EConstr.Unsafe.to_named_context (named_context env))); end; let _ = let env = push_rel_context (cut_ctx @ new_ctx @ ctx') env in evd_comb0 (fun sigma -> Typing.type_of env sigma ty) evd in let ((hole, c), _, lsubst) = Simplify.apply_simplification_fun simpl_step env evd (cut_ctx @ new_ctx @ ctx', ty, sort) in if !debug then begin let open Feedback in msg_debug (str"Finished simplifying"); msg_debug(let ctx = cut_ctx @ new_ctx @ ctx' in let env = push_rel_context ctx env in Printer.pr_econstr_env env !evd c); end; let subst = compose_subst ~unsafe:true env ~sigma:!evd csubst subst in let subst = compose_subst ~unsafe:true env ~sigma:!evd lsubst subst in (* Now we build a term to put in the match branch. *) let c = match hole, next with (* Dead code: we should have found a proof of False. *) | None, None -> c (* Normal case: build recursively a subterm. *) | Some ((next_ctx, _, glu), ev), Some s -> let evm, next_term, next_ty = aux env !evd glu s in (* Now we need to instantiate [ev] with the term [next_term]. *) (* We might need to permutate some rels. *) let next_subst = context_map_of_splitting s in let perm_subst = Context_map.make_permutation ~env evm subst next_subst in (* [next_term] starts with lambdas, so we apply it to its context. *) let args = Equations_common.extended_rel_vect 0 (perm_subst.Context_map.tgt_ctx) in let next_term = beta_appvect !evd next_term args in let next_term = Context_map.mapping_constr evm perm_subst next_term in (* We know the term is a correct instantiation of the evar, we * just need to apply it to the correct variables. *) let ev_info = Evd.find_undefined evm (fst ev) in let ev_ctx = Evd.evar_context ev_info in (* [next_term] is typed under [env, next_ctx] while the evar * is typed under [ev_ctx] *) let ev_ctx_constrs = List.map (fun decl -> let id = Context.Named.Declaration.get_id decl in EConstr.mkVar id) ev_ctx in let rels, named = List.chop (List.length next_ctx) ev_ctx_constrs in let vars_subst = List.map2 (fun decl c -> let id = Context.Named.Declaration.get_id decl in id, c) (Environ.named_context env) named in let term = Vars.replace_vars !evd vars_subst next_term in let term = Vars.substl rels term in (* let _ = * let env = Evd.evar_env ev_info in * Typing.type_of env evm term * in *) evd := Evd.define (fst ev) term evm; c (* This should not happen... *) | _ -> failwith "Should not fail here, please report." in EConstr.it_mkLambda_or_LetIn c (cut_ctx @ new_ctx) ) branches_res sp in (* Get back to the original context. *) let case_ty = mapping_constr !evd rev_subst case_ty in let branches = Array.map (mapping_constr !evd rev_subst) branches in (* Remove the block lets *) let rec clean_block c = match kind !evd c with | LetIn (_, b, _, b') when Equations_common.is_global env !evd (Lazy.force coq_block) b -> clean_block (subst1 b b') | _ -> EConstr.map !evd clean_block c in let case_ty = clean_block case_ty in let branches = Array.map clean_block branches in (* Fetch the type of the variable that we want to eliminate. *) let after, decl, before = split_context (pred rel) ctx in let rel_ty = Context.Rel.Declaration.get_type decl in let rel_ty = Vars.lift rel rel_ty in let rel_t = EConstr.mkRel rel in let pind, args = find_inductive env !evd rel_ty in (* Build the case. *) let case_info = Inductiveops.make_case_info env (fst pind) Constr.RegularStyle in let indty = Inductiveops.find_rectype env !evd (mkApp (mkIndU pind, Array.of_list args)) in let case = Inductiveops.make_case_or_project env !evd indty case_info (case_ty, elim_relevance) rel_t branches in let term = EConstr.mkApp (case, Array.of_list to_apply) in let term = EConstr.it_mkLambda_or_LetIn term ctx in let typ = it_mkProd_or_subst env evm ty ctx in let () = if !Equations_common.debug then check_typed ~where:"splitting" env !evd term in let term = Evarutil.nf_evar !evd term in !evd, term, typ in let evm, term, typ = aux env0 !isevar sort tree in isevar := evm; term, typ let define_mutual_nested_csts flags env evd get_prog progs = let mutual, nested = define_mutual_nested env evd (fun prog -> get_prog evd prog) progs in let mutual = List.map (fun (p, prog, fix) -> let ty = p.Syntax.program_orig_type in let kn, (evm, term) = declare_constant p.program_id fix (Some ty) ~poly:flags.polymorphic !evd ~kind:Decls.(IsDefinition Fixpoint) in evd := evm; Impargs.declare_manual_implicits false (GlobRef.ConstRef kn) p.program_impls; (p, prog, term)) mutual in let args = List.rev_map (fun (p', _, term) -> term) mutual in let nested = List.map (fun (p, prog, fix) -> let ty = p.Syntax.program_orig_type in let body = Vars.substl args fix in let kn, (evm, e) = declare_constant p.program_id body (Some ty) ~poly:flags.polymorphic !evd ~kind:Decls.(IsDefinition Fixpoint) in evd := evm; Impargs.declare_manual_implicits false (GlobRef.ConstRef kn) p.program_impls; (p, prog, e)) nested in mutual, nested type program_shape = | Single of program_info * context_map * rec_info option * splitting * constr | Mutual of program_info * context_map * rec_info * splitting * rel_context * constr let make_program env evd p prob s rec_info = match rec_info with | Some r -> let sort = ESorts.make p.program_sort in let lhs = id_subst r.rec_lets in (match r.rec_node with | WfRec wfr -> let fn = match wfr.wf_rec_functional with | None -> let term, ty = term_of_tree env evd sort s in term | Some t -> t in let term = beta_appvect !evd wfr.wf_rec_term [| beta_appvect !evd fn (extended_rel_vect 0 (lhs.Context_map.src_ctx)) |] in Single (p, prob, rec_info, s, it_mkLambda_or_LetIn term (lhs.Context_map.src_ctx)) | StructRec sr -> let term, ty = term_of_tree env evd sort s in let args = extended_rel_vect 0 r.rec_lets in let term = beta_appvect !evd term args in let before, after = CList.chop r.rec_args r.rec_sign in let fixdecls, after = CList.chop sr.struct_rec_protos after in let subst = List.append (List.map (fun _ -> mkProp) fixdecls) (List.rev (Array.to_list args)) in let program_sign = subst_rel_context 0 subst before in let program_arity = substnl subst r.rec_args r.rec_arity in let p' = { p with program_sign; program_arity } in let p' = match p.program_rec with | Some (Structural ann) -> let ann' = match ann with | NestedOn None -> (match s with | Split (ctx, var, _, _) -> NestedOn (Some ((List.length (ctx.Context_map.src_ctx)) - var - sr.struct_rec_protos, None)) | _ -> ann) | _ -> ann in { p' with program_rec = Some (Structural ann') } | _ -> p' in Mutual (p', prob, r, s, after, (* lift 1 *)term)) | None -> Single (p, prob, rec_info, s, fst (term_of_tree env evd (ESorts.make p.program_sort) s)) let update_rec_info p rec_info = match p.Syntax.program_rec, rec_info.rec_node with | Some (Structural ra), StructRec sr -> {rec_info with rec_node = StructRec { sr with struct_rec_arg = ra }} | _, _ -> rec_info let make_programs env evd flags ?(define_constants=false) programs = let sterms = List.map (fun (p', prob, split, rec_info) -> make_program env evd p' prob split rec_info) programs in match sterms with [Single (p, prob, rec_info, s, term)] -> let term = nf_beta env !evd term in let term = if define_constants then let (cst, (evm, e)) = Equations_common.declare_constant p.program_id term (Some (p.Syntax.program_orig_type)) ~poly:flags.polymorphic !evd ~kind:Decls.(IsDefinition Definition) in evd := evm; let () = Impargs.declare_manual_implicits false (GlobRef.ConstRef cst) p.program_impls in let () = Declare.definition_message p.program_id in e else term in [{ program_info = p; program_prob = prob; program_rec = rec_info; program_splitting = s; program_term = term }] | _ -> let terms = List.map (function | Mutual (p, prob, r, s', after, term) -> (p, (prob, r, s', after, lift 1 term)) | Single (p, _, _, _, _) -> user_err_loc (p.program_loc, str "Cannot define " ++ Names.Id.print p.program_id ++ str " mutually with other programs ")) sterms in let mutual, nested = if define_constants then if List.length terms > 1 && not (List.exists (fun (p, _) -> is_nested p) terms) then let terms = List.map (fun (p, (prob, r, s', after, term)) -> let term = it_mkLambda_or_LetIn term after in let kn, (evm, e) = declare_constant (Nameops.add_suffix p.program_id "_functional") term None ~poly:flags.polymorphic !evd ~kind:Decls.(IsDefinition Fixpoint) in evd := evm; (p, (prob, r, s', after, e))) terms in define_mutual_nested_csts flags (Global.env ()) evd (fun evd (prob, r, s', after, term) -> (applist (term, extended_rel_list 0 after))) terms else define_mutual_nested_csts flags (Global.env ()) evd (fun evd (prob, r, s', after, term) -> (it_mkLambda_or_LetIn term after)) terms else let env = let (p, (prob, r, s, after, term)) = List.hd terms in push_rel_context after env in define_mutual_nested env evd (fun (_, _, _, _, x) -> x) terms in let make_prog (p, (prob, rec_info, s', after, _), b) = let term = it_mkLambda_or_LetIn b after in let term = nf_beta env !evd term in let rec_info = update_rec_info p rec_info in let p = { p with program_sign = p.program_sign @ after } in { program_info = p; program_prob = prob; program_rec = Some rec_info; program_splitting = s'; program_term = term } in let mutual = List.map make_prog mutual in let nested = List.map make_prog nested in mutual @ nested let make_single_program env evd flags p prob s rec_info = match make_programs env evd flags [p, prob, s, rec_info] with | [p] -> p | _ -> raise (Invalid_argument "make_single_program: more than one program") let change_lhs s subs = let open Context.Rel.Declaration in let l' = List.map (function LocalDef ({binder_name=Name id}, b, t) as decl -> (try let b' = List.assoc id s in LocalDef (make_annot (Name id) (get_relevance decl), b', t) with Not_found -> decl) | decl -> decl) subs.Context_map.src_ctx in { subs with Context_map.src_ctx = l' } let change_splitting s sp = let rec aux = function | Compute (lhs, where, ty, r) -> Compute (change_lhs s lhs, where, ty, r) | Split (lhs, rel, ty, sp) -> Split (change_lhs s lhs, rel, ty, Array.map (fun x -> Option.map aux x) sp) | Mapping (lhs, sp) -> Mapping (change_lhs s lhs, aux sp) | Refined (lhs, info, rest) -> Refined (change_lhs s lhs, info, aux rest) in aux sp let check_splitting env evd sp = let check_type ctx t = let _evm, _ty = Typing.type_of (push_rel_context ctx env) evd t in () in let check_term ctx t ty = let _evm = Typing.check (push_rel_context ctx env) evd t ty in () in let check_rhs ctx ty = function | RProgram c -> check_term ctx c ty | REmpty _ -> () in let rec aux = function | Compute (lhs, where, ty, r) -> let _ = check_ctx_map env evd lhs in let ctx = check_wheres lhs where in let _ = check_type ctx ty in let _ = check_rhs ctx ty r in () | Split (lhs, rel, ty, sp) -> let _ = check_ctx_map env evd lhs in let _r = lookup_rel rel (push_rel_context (lhs.Context_map.src_ctx) env) in let _ = check_type (lhs.Context_map.src_ctx) ty in Array.iter (Option.iter aux) sp | Mapping (lhs, sp) -> let _ = check_ctx_map env evd lhs in aux sp | Refined (lhs, info, rest) -> let _ = check_ctx_map env evd lhs in aux rest and check_wheres lhs wheres = let check_where ctx w = let () = check_program w.where_program in let () = check_type ctx w.where_type in let () = check_term ctx (applist (w.where_program.program_term, w.where_program_args)) w.where_type in let () = assert(w.where_context_length = List.length ctx) in let def = make_def (nameR (where_id w)) (Some (where_term w)) w.where_type in def :: ctx in let ctx = List.fold_left check_where (lhs.Context_map.src_ctx) wheres in ctx and check_program p = let ty = program_type p in let () = check_type [] ty in let _ = check_ctx_map env evd p.program_prob in let _ = match p.program_rec with | None -> [] | Some r -> let ty = it_mkLambda_or_LetIn r.rec_arity r.rec_sign in let () = check_type [] ty in match r.rec_node with | WfRec wf -> let () = check_type [] wf.wf_rec_term in [] | StructRec s -> [] in aux p.program_splitting in aux sp let define_one_program_constants flags env0 isevar udecl unfold p = let () = assert (not (Evd.has_undefined !isevar)) in let helpers = ref [] in let rec aux_program env evm p path = match p.program_rec with | Some ({ rec_node = WfRec r } as wfr) -> let evm, s' = aux env evm p.program_splitting in let isevar = ref evm in let env = Global.env () in let term, ty = term_of_tree env isevar (ESorts.make p.program_info.program_sort) s' in let (cst, (evm, e)) = Equations_common.declare_constant (path_id (Id.of_string "functional" :: path)) term (Some ty) ~poly:flags.polymorphic !isevar ~kind:Decls.(IsDefinition Definition) in let () = helpers := (cst, (0,0)) :: !helpers in let env = Global.env () in let evm = Evd.update_sigma_univs (Environ.universes env) evm in evm, { p with program_splitting = s'; program_rec = Some { wfr with rec_node = WfRec { r with wf_rec_functional = Some e } } } | _ -> let evm, s = aux env evm p.program_splitting in evm, { p with program_splitting = s } and aux env evm = function | Compute (lhs, where, ty, RProgram rhs) -> let define_where ({where_program; where_program_args; where_type; where_path} as w) (env, evm, s, ctx) = let p = where_program in let program_prob = change_lhs s p.program_prob in let program_splitting = change_splitting s p.program_splitting in let evm, p' = aux_program env evm { p with program_splitting } where_path in let env = Global.env () in let evm = Evd.update_sigma_univs (Environ.universes env) evm in let isevar = ref evm in let program' = make_single_program env isevar flags where_program.program_info program_prob p'.program_splitting p'.program_rec in let term' = program'.program_term in let (cst, (evm, e)) = Equations_common.declare_constant (path_id ~unfold where_path) term' None(* (Some (program_type where_program)) *) ~poly:flags.polymorphic !isevar ~kind:Decls.(IsDefinition Definition) in let () = helpers := (cst, (0,0)) :: !helpers in let env = Global.env () in let evm = Evd.update_sigma_univs (Environ.universes env) evm in let p' = { program' with program_term = e } in let w' = { w with where_program = p' } in (env, evm, (where_id w, where_term w') :: s, w' :: ctx) in let env, evm, _, where = List.fold_right define_where where (env, evm, [], []) in evm, Compute (lhs, where, ty, RProgram rhs) | Compute (lhs, where, ty, REmpty (split, sp)) -> evm, Compute (lhs, where, ty, REmpty (split, sp)) | Mapping (lhs, s) -> let evm, s' = aux env evm s in evm, Mapping (lhs, s') | Refined (lhs, info, rest) -> let ctx = lhs.Context_map.src_ctx in let evm', rest' = aux env evm rest in let isevar = ref evm' in let env = Global.env () in let sort = (Retyping.get_sort_of (push_rel_context ctx env) !isevar info.refined_rettyp) in let t, ty = term_of_tree env isevar sort rest' in let (cst, (evm, e)) = Equations_common.declare_constant (path_id ~unfold info.refined_path) t (Some ty) ~poly:flags.polymorphic !isevar ~kind:Decls.(IsDefinition Definition) in let () = helpers := (cst, info.refined_arg) :: !helpers in evm, Refined (lhs, { info with refined_term = e }, rest') | Split (lhs, rel, ty, sp) -> let evm, sp' = CArray.fold_left_map (fun evm s -> match s with | Some s -> let evm, s' = aux env evm s in evm, Some s' | None -> evm, None) evm sp in evm, Split (lhs, rel, ty, sp') in let evm, tree = aux_program env0 !isevar p [p.program_info.program_id] in isevar := evm; !helpers, tree let define_program_constants flags env evd udecl ?(unfold=false) programs = let helpers, programs = List.fold_left_map (fun helpers p -> let helpers', p = define_one_program_constants flags env evd udecl unfold p in helpers @ helpers', p) [] programs in let env = Global.env () in let programs = make_programs env evd flags ~define_constants:true (List.map (fun p -> (p.program_info, p.program_prob, p.program_splitting, p.program_rec)) programs) in helpers, programs let is_comp_obl sigma comp hole_kind = match comp with | None -> false | Some r -> match hole_kind, r with | ImplicitArg (GlobRef.ConstRef c, (n, _), _), (loc, id) -> is_rec_call (snd r) c | _ -> false type term_info = { term_id : Names.GlobRef.t; term_ustate : UState.t; base_id : string; poly : bool; scope : Locality.definition_scope; decl_kind : Decls.definition_object_kind; helpers_info : (Constant.t * (int * int)) list; comp_obls : Constant.t list; (** The recursive call proof obligations *) user_obls : Id.Set.t; (** The user obligations *) } type compiled_program_info = { program_cst : Constant.t; program_split_info : term_info } let is_polymorphic info = info.poly let warn_complete id = str "Equations definition " ++ Id.print id ++ str" is complete and requires no further proofs. " ++ str "Use the \"Equations\" command to define it." let equations_cat = CWarnings.create_category ~name:"equations" () let warn_complete = CWarnings.create ~name:"equations-open-proof-complete" ~category:equations_cat ~default:CWarnings.Enabled warn_complete let solve_equations_obligations ~pm flags recids loc i sigma hook = let scope = Locality.(Global ImportNeedQualified) in let kind = Decls.(IsDefinition Definition) in let evars = Evar.Map.bindings (Evd.undefined_map sigma) in let env = Global.env () in let types = List.map (fun (ev, evi) -> if !Equations_common.debug then Feedback.msg_debug (str"evar type" ++ Printer.pr_econstr_env env sigma (Evd.evar_concl evi)); let section_length = List.length (named_context env) in let evcontext = Evd.evar_context evi in let local_context, section_context = List.chop (List.length evcontext - section_length) evcontext in let type_ = EConstr.it_mkNamedProd_or_LetIn sigma (Evd.evar_concl evi) local_context in let type_ = nf_beta env sigma type_ in env, ev, evi, local_context, type_) evars in (* Make goals from a copy of the evars *) let tele = let rec aux types evm = match types with | [] -> Proofview.TNil evm | (evar_env, ev, evi, local_context, type_) :: tys -> let cont evm wit = let evm = Evd.define ev (applist (wit, Context.Named.instance_list mkVar local_context)) evm in aux tys evm in Proofview.TCons (evar_env, evm, nf_evar evm type_, cont) in aux types sigma in let do_intros = (* Force introductions to be able to shrink the bodies later on. *) List.map (fun (env, ev, evi, ctx, _) -> Tacticals.tclDO (List.length ctx) Tactics.intro) types in (* Feedback.msg_debug (str"Starting proof"); *) let info = Declare.Info.make ~kind ~scope ~poly:flags.polymorphic () in let lemma = Declare.Proof.start_equations ~name:i ~hook ~types ~info sigma tele in (* Should this use Lemmas.by *) let lemma = Declare.Proof.map lemma ~f:(fun p -> fst (Proof.solve Goal_select.SelectAll None (Proofview.tclDISPATCH do_intros) p)) in let lemma = Declare.Proof.map lemma ~f:(fun p -> fst (Proof.solve Goal_select.SelectAll None (Tacticals.tclTRY flags.tactic) p)) in let prf = Declare.Proof.get lemma in let pm, lemma = if Proof.is_done prf then if flags.open_proof then (warn_complete ?loc i; pm, Some lemma) else (let pm, _ = Declare.Proof.save ~pm ~proof:lemma ~opaque:Vernacexpr.Transparent ~idopt:None in pm, None) else if flags.open_proof then pm, Some lemma else user_err_loc (loc, str"Equations definition generated subgoals that " ++ str "could not be solved automatically. Use the \"Equations?\" command to" ++ str " refine them interactively.") in pm, lemma let gather_fresh_context sigma u octx = let (qvars, univs), _ = Evd.sort_context_set sigma in let qarr, uarr = UVars.Instance.to_array u in let qualities = Array.fold_left (fun ctx' l -> match l with | Sorts.Quality.QConstant _ -> assert false | QVar l -> if not (Sorts.QVar.Set.mem l qvars) then Sorts.QVar.Set.add l ctx' else ctx') Sorts.QVar.Set.empty qarr in let levels = Array.fold_left (fun ctx' l -> if not (Univ.Level.Set.mem l univs) then Univ.Level.Set.add l ctx' else ctx') Univ.Level.Set.empty uarr in (qualities, levels), (UVars.AbstractContext.instantiate u octx) let swap (x, y) = (y, x) let solve_equations_obligations_program ~pm flags recids loc i sigma hook = let poly = flags.polymorphic in let scope = Locality.(Global ImportNeedQualified) in let kind = Decls.(IsDefinition Definition) in let env = Global.env () in let sigma, term = get_fresh sigma (Equations_common.logic_top_intro) in let sigma, ty = get_fresh sigma (Equations_common.logic_top) in let sigma = Evd.minimize_universes sigma in let sigma = Evd.nf_univ_variables sigma in (* XXX is this useful after minimize? *) let sigma = Evarutil.nf_evar_map_undefined sigma in let oblsid = Nameops.add_suffix i "_obligations" in let oblsinfo, (evids, cmap), term, ty = RetrieveObl.retrieve_obligations env oblsid sigma 0 ~status:(Evar_kinds.Define false) term ty in let revids = List.map swap evids in let nc = Environ.named_context env in let nc_len = Context.Named.length nc in let oblsinfo = Array.map (fun (id, ty, src, status, deps, _tac) -> let ev = List.assoc_f Id.equal id revids in let evi = Evd.find_undefined sigma ev in let ctx = Evd.evar_filtered_context evi in let tac = Tacticals.tclTHEN (Tacticals.tclDO (List.length ctx - nc_len) Tactics.intro) flags.tactic in (id, ty, src, status, deps, Some tac)) oblsinfo in let hook { Declare.Hook.S.uctx; obls; _ } pm = (* let hook uctx evars locality gr = *) (* let l = * Array.map_to_list (fun (id, ty, loc, s, d, tac) -> Libnames.qualid_of_ident id) obls in * Extraction_plugin.Table.extraction_inline true l; *) (* Problem is restrict in defineObl.define_program gets rid of universes in the obligations now... *) let evd = ref sigma in let evc id = let t = EConstr.of_constr (List.assoc_f Id.equal id obls) in let hd, args = decompose_app !evd t in if EConstr.isRef !evd hd then (if !Equations_common.debug then Feedback.msg_debug (str"mapping obligation to " ++ Printer.pr_econstr_env env !evd t ++ Prettyp.print_about env !evd (CAst.make (Constrexpr.AN (Libnames.qualid_of_ident id))) None); let cst, i = EConstr.destConst !evd hd in let ctx = Environ.constant_context (Global.env ()) cst in let ctx = gather_fresh_context !evd (EConstr.EInstance.kind sigma i) ctx in evd := Evd.merge_sort_context_set Evd.univ_flexible !evd ctx; t) else t in let () = Evd.fold_undefined (fun ev evi () -> let args = Evd.evar_identity_subst evi in let evart = EConstr.mkEvar (ev, args) in let evc = cmap evc evart in evd := Evd.define ev (whd_beta env !evd (EConstr.of_constr evc)) !evd) sigma () in let sigma = !evd in let recobls = List.map (fun (id, c) -> (* Due to shrinking, we can get lambda abstractions first *) let _, body = decompose_lambda_decls sigma (EConstr.of_constr c) in let hd, _ = decompose_app sigma body in try fst (EConstr.destConst sigma hd) with Constr.DestKO -> assert false) obls in hook ~pm recobls sigma in let obl_hook = Declare.Hook.make_g hook in let reduce x = let flags = RedFlags.beta in to_constr sigma (clos_norm_flags flags (Global.env ()) sigma (of_constr x)) in let cinfo = Declare.CInfo.make ~name:oblsid ~typ:ty () in let info = Declare.Info.make ~poly ~scope ~kind () in let pm, _ = Declare.Obls.add_definition ~pm ~cinfo ~info ~obl_hook ~body:term ~uctx:(Evd.evar_universe_context sigma) ~reduce ~opaque:false oblsinfo in pm let simplify_evars evars t = let rec aux t = match Constr.kind (EConstr.Unsafe.to_constr t) with | App (f, args) -> (match Constr.kind f with | Evar ev -> let f' = nf_evar evars (EConstr.of_constr f) in beta_applist evars (f', Array.map_to_list EConstr.of_constr args) | _ -> EConstr.map evars aux t) | Evar ev -> nf_evar evars t | _ -> EConstr.map evars aux t in aux t let unfold_entry cst = Hints.HintsUnfoldEntry [Evaluable.EvalConstRef cst] let add_hint local i cst = let locality = if local || Global.sections_are_opened () then Hints.Local else Hints.SuperGlobal in Hints.add_hints ~locality [Id.to_string i] (unfold_entry cst) type 'a hook = | HookImmediate : (pm:Declare.OblState.t -> program -> term_info -> 'a * Declare.OblState.t) -> 'a hook | HookLater : (pm:Declare.OblState.t -> int -> program -> term_info -> unit * Declare.OblState.t) -> unit hook let rec_type_ids = CList.map_append (function Some (Guarded l) -> List.map fst l | Some (Logical (_, ids)) -> [snd ids] | None -> []) let define_programs (type a) ~pm env evd udecl is_recursive fixprots flags ?(unfold=false) programs : a hook -> a * Declare.OblState.t * Declare.Proof.t option = fun hook -> let call_hook ~pm recobls p helpers uctx scope gr (hook : pm:Declare.OblState.t -> program -> term_info -> a * Declare.OblState.t) : a * Declare.OblState.t = (* let l = * Array.map_to_list (fun (id, ty, loc, s, d, tac) -> Libnames.qualid_of_ident id) obls in * Extraction_plugin.Table.extraction_inline true l; *) let kind = Decls.Definition in let baseid = Id.to_string (program_id p) in let term_info = { term_id = gr; term_ustate = uctx; base_id = baseid; helpers_info = helpers; poly = flags.polymorphic; scope; decl_kind = kind; comp_obls = recobls; user_obls = Id.Set.empty } in hook ~pm p term_info in let all_hook ~pm hook recobls sigma = let sigma = Evd.minimize_universes sigma in let sigma = Evarutil.nf_evar_map_undefined sigma in let uentry = UState.check_univ_decl ~poly:flags.polymorphic (Evd.evar_universe_context sigma) udecl in let () = if !Equations_common.debug then Feedback.msg_debug (str"Defining programs, before simplify_evars " ++ pr_programs env sigma programs); in let programs = List.map (map_program (simplify_evars sigma)) programs in let () = if !Equations_common.debug then Feedback.msg_debug (str"Defining programs " ++ pr_programs env sigma programs); in let evd = ref sigma in let helpers, programs = define_program_constants flags env evd uentry ~unfold programs in let sigma = !evd in let programs = List.map (map_program (simplify_evars sigma)) programs in let programs = List.map (map_program (nf_evar sigma)) programs in let ustate = Evd.evar_universe_context sigma in let () = List.iter (fun (cst, _) -> add_hint true (program_id (List.hd programs)) cst) helpers in hook ~pm recobls helpers ustate Locality.(Global ImportDefaultBehavior) programs in let recids = rec_type_ids is_recursive in match hook with | HookImmediate f -> assert(not (Evd.has_undefined !evd)); let hook ~pm recobls helpers ustate kind programs = let p = List.hd programs in let cst, _ = (destConst !evd p.program_term) in call_hook ~pm recobls p helpers ustate Locality.(Global ImportDefaultBehavior) (GlobRef.ConstRef cst) f in let res, pm = all_hook ~pm hook [] !evd in res, pm, None | HookLater f -> let hook ~pm recobls helpers ustate kind programs = CList.fold_left_i (fun i pm p -> let cst, _ = (destConst !evd p.program_term) in call_hook ~pm recobls p helpers ustate Locality.(Global ImportDefaultBehavior) (GlobRef.ConstRef cst) (f i) |> snd) 0 pm programs in let hdprog = List.hd programs in let loc = program_loc hdprog in let id = program_id hdprog in if Evd.has_undefined !evd then if flags.open_proof then let pm, lemma = solve_equations_obligations ~pm flags recids loc id !evd (all_hook hook) in (), pm, lemma else let pm = solve_equations_obligations_program ~pm flags recids loc id !evd (all_hook hook) in (), pm, None else if flags.open_proof then begin warn_complete ?loc id; let pm, lemma = solve_equations_obligations ~pm flags recids loc id !evd (all_hook hook) in (), pm, lemma end else let pm = all_hook ~pm hook [] !evd in (), pm, None let define_program_immediate ~pm env evd udecl is_recursive fixprots flags ?(unfold=false) program = define_programs ~pm env evd udecl is_recursive fixprots flags ~unfold [program] (HookImmediate (fun ~pm x y -> (x, y), pm)) let define_programs ~pm env evd udecl is_recursive fixprots flags ?(unfold=false) programs hook = let _, lemma, pm = define_programs ~pm env evd udecl is_recursive fixprots flags ~unfold programs (HookLater hook) in lemma, pm let mapping_rhs sigma s = function | RProgram c -> RProgram (mapping_constr sigma s c) | REmpty (i, sp) -> try match nth (s.Context_map.map_inst) (pred i) with | PRel i' -> REmpty (i', sp) | _ -> assert false with Not_found -> assert false let map_rhs f g = function | RProgram c -> RProgram (f c) | REmpty (i, sp) -> REmpty (g i, sp) Coq-Equations-1.3.1-8.20/src/splitting.mli000066400000000000000000000152721463127417400201200ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open Environ open Names open Syntax open EConstr open Equations_common open Context_map (** Programs and splitting trees *) (** Splitting trees *) type path_component = Id.t type path = path_component list val path_id : ?unfold:bool -> path -> Id.t module PathOT : sig type t = path val compare : t -> t -> int end module PathMap : CSig.MapS with type key = PathOT.t type wf_rec = { wf_rec_term : constr; wf_rec_functional : constr option; wf_rec_arg : constr; wf_rec_rel : constr } type struct_rec = { struct_rec_arg : Syntax.rec_annot; struct_rec_protos : int; } type rec_node = | WfRec of wf_rec | StructRec of struct_rec type rec_info = { rec_prob : context_map; rec_lets : rel_context; rec_sign : rel_context; rec_arity : constr; rec_args : int; rec_node : rec_node } type splitting = Compute of context_map * where_clause list * types * splitting_rhs | Split of context_map * int * types * splitting option array | Mapping of context_map * splitting | Refined of context_map * refined_node * splitting and where_clause = { where_program : program; where_program_orig : program_info; where_program_args : constr list; (* In original context, de Bruijn only *) where_path : path; where_orig : path; where_context_length : int; (* Length of enclosing context, including fixpoint prototype if any *) where_type : types } and refined_node = { refined_obj : identifier * constr * types; refined_rettyp : types; refined_arg : int * int; (* Index counting lets or not *) refined_path : path; refined_term : EConstr.t; refined_filter : int list option; refined_args : constr list; refined_revctx : context_map; refined_newprob : context_map; refined_newprob_to_lhs : context_map; refined_newty : types } and program = { program_info : program_info; program_prob : context_map; program_rec : rec_info option; program_splitting : splitting; program_term : constr } and splitting_rhs = RProgram of constr | REmpty of int * splitting option array val where_id : where_clause -> Id.t val where_term : where_clause -> constr val program_id : program -> Id.t val program_type : program -> EConstr.t val program_sign : program -> EConstr.rel_context val program_arity : program -> EConstr.t val program_impls : program -> Impargs.manual_implicits val program_rec : program -> program_rec_info option val pr_path : Evd.evar_map -> path -> Pp.t val eq_path : path -> path -> bool val pr_splitting_rhs : ?verbose:bool -> env -> env (* With wheres *) -> Evd.evar_map -> context_map -> splitting_rhs -> EConstr.t -> Pp.t val pr_splitting : env -> Evd.evar_map -> ?verbose:bool -> splitting -> Pp.t val ppsplit : splitting -> unit val where_context : where_clause list -> rel_context val pr_program_info : env -> Evd.evar_map -> program_info -> Pp.t val context_map_of_splitting : splitting -> context_map val check_splitting : env -> Evd.evar_map -> splitting -> unit (** Compilation to Coq terms *) val term_of_tree : env -> Evd.evar_map ref -> ESorts.t -> splitting -> constr * constr type program_shape = | Single of program_info * context_map * rec_info option * splitting * constr | Mutual of program_info * context_map * rec_info * splitting * rel_context * constr val make_program : env -> Evd.evar_map ref -> program_info -> context_map -> splitting -> rec_info option -> program_shape val make_programs : Environ.env -> Evd.evar_map ref -> flags -> ?define_constants:bool -> (Syntax.program_info * Context_map.context_map * splitting * rec_info option) list -> program list val make_single_program : env -> Evd.evar_map ref -> flags -> program_info -> context_map -> splitting -> rec_info option -> program val define_one_program_constants : flags -> env -> Evd.evar_map ref -> Entries.universes_entry -> bool -> program -> (Constant.t * (int * int)) list * program val define_program_constants : flags -> env -> Evd.evar_map ref -> Entries.universes_entry -> ?unfold:bool -> program list -> (Constant.t * (int * int)) list * program list (** Compilation from splitting tree to terms. *) val is_comp_obl : Evd.evar_map -> Id.t with_loc option -> Evar_kinds.t -> bool type term_info = { term_id : Names.GlobRef.t; term_ustate : UState.t; base_id : string; poly : bool; scope : Locality.definition_scope; decl_kind : Decls.definition_object_kind; helpers_info : (Constant.t * (int * int)) list; comp_obls : Constant.t list; (** The recursive call proof obligations *) user_obls : Id.Set.t; (** The user proof obligations *) } type compiled_program_info = { program_cst : Constant.t; program_split_info : term_info } val is_polymorphic : term_info -> bool val define_mutual_nested : Environ.env -> Evd.evar_map ref -> ('a -> EConstr.t) -> (program_info * 'a) list -> (program_info * 'a * EConstr.t) list * (program_info * 'a * EConstr.constr) list val define_mutual_nested_csts : Equations_common.flags -> Environ.env -> Evd.evar_map ref -> (Evd.evar_map ref -> 'a -> EConstr.t) -> (Syntax.program_info * 'a) list -> (Syntax.program_info * 'a * EConstr.t) list * (Syntax.program_info * 'a * EConstr.t) list val define_programs : pm:Declare.OblState.t -> Environ.env -> Evd.evar_map ref -> UState.universe_decl -> Syntax.rec_type -> EConstr.rel_context -> Equations_common.flags -> ?unfold:bool -> program list -> (pm:Declare.OblState.t -> int -> program -> term_info -> unit * Declare.OblState.t) -> Declare.OblState.t * Declare.Proof.t option val define_program_immediate : pm:Declare.OblState.t -> Environ.env -> Evd.evar_map ref -> UState.universe_decl -> Syntax.rec_type -> EConstr.rel_context -> Equations_common.flags -> ?unfold:bool -> program -> (program * term_info) * Declare.OblState.t * Declare.Proof.t option val mapping_rhs : Evd.evar_map -> context_map -> splitting_rhs -> splitting_rhs val map_rhs : (constr -> constr) -> (int -> int) -> splitting_rhs -> splitting_rhs val map_split : (constr -> constr) -> splitting -> splitting val simplify_evars : Evd.evar_map -> EConstr.t -> EConstr.t Coq-Equations-1.3.1-8.20/src/subterm.ml000066400000000000000000000462121463127417400174110ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open Names open Nameops open Constr open Context open Termops open Declarations open Inductiveops open Util open Entries module CVars = Vars open EConstr open Vars open Equations_common open Sigma_types let refresh_universes t = t (* MS: FIXME *) let derive_subterm ~pm env sigma ~poly (ind, u as indu) = let () = if Ederive.check_derive "NoConfusion" (Names.GlobRef.IndRef ind) || Ederive.check_derive "NoConfusionHom" (Names.GlobRef.IndRef ind) then () else user_err_loc (None, Pp.(str "[Derive Subterm] requires a [NoConfusion] " ++ str"or a [NoConfusionHom] instance for type " ++ Printer.pr_inductive env ind ++ str " to be derived first.")) in let (mind, oneind as ms) = Global.lookup_inductive ind in let ctx = CVars.subst_instance_context (EInstance.kind sigma u) oneind.mind_arity_ctxt in let indsort = let indty = Inductive.type_of_inductive (ms, EInstance.kind sigma u) in (snd (Reduction.dest_arity env indty)) in if Sorts.is_prop indsort || Sorts.is_sprop indsort then user_err_loc (None, Pp.str("Cannot define a well-founded subterm relation on a propositional inductive type.")); let sort = match Lazy.force logic_sort with | Sorts.InSProp -> failwith "not implemented" | Sorts.InProp -> mkProp | Sorts.InSet -> mkSet | Sorts.InType | Sorts.InQSort -> EConstr.mkSort (ESorts.make indsort) in let len = List.length ctx in let params = mind.mind_nparams_rec in (* let ctx = map_rel_context refresh_universes ctx in FIXME *) let lenargs = len - params in let argbinders, parambinders = List.chop lenargs (List.map of_rel_decl ctx) in let indapp = mkApp (mkIndU indu, extended_rel_vect 0 parambinders) in let getargs t = snd (Array.chop params (snd (decompose_app sigma t))) in let inds = let branches = Array.mapi (fun i ty -> let args, concl = decompose_prod_decls sigma (of_constr ty) in let lenargs = List.length args in let lenargs' = lenargs - params in let args', params' = List.chop lenargs' args in let recargs = CList.map_filter_i (fun i decl -> let (n, _, t) = to_tuple decl in let ctx, ar = decompose_prod_decls sigma t in match kind sigma (fst (decompose_app sigma ar)) with | Ind (ind',_) when Environ.QInd.equal env ind' ind -> Some (ctx, i, mkRel (succ i), getargs (lift (succ i) ar)) | _ -> None) args' in let constr = mkApp (mkConstructUi (indu, succ i), extended_rel_vect 0 args) in let constrargs = getargs concl in let branches = List.map_i (fun j (ctx, i', r, rargs) -> let ctxlen = List.length ctx in let subargs = Array.of_list ((extended_rel_list (lenargs' + ctxlen) parambinders) @ Array.to_list rargs @ (Array.map_to_list (lift ctxlen) constrargs) @ [mkApp (lift ctxlen r, extended_rel_vect 0 ctx) ; lift ctxlen constr]) in let relapp = mkApp (mkRel (succ lenargs + ctxlen), subargs) in (i, j, it_mkProd_or_LetIn (it_mkProd_or_LetIn relapp (lift_rel_context (succ i') ctx)) args')) 1 recargs in branches) (Inductive.type_of_constructors (from_peuniverses sigma indu) ms) in branches in let branches = Array.fold_right (fun x acc -> x @ acc) inds [] in let _trans_branch = let liftargbinders = lift_rel_context lenargs argbinders in let liftargbinders' = lift_rel_context lenargs liftargbinders in let indr = ERelevance.make oneind.mind_relevance in let indna id = make_annot (Name (Id.of_string id)) indr in let indapp n = (mkApp (lift (3 * lenargs + n) indapp, extended_rel_vect (n + (2 - n) * lenargs) argbinders)) in let terms = [(indna "z", None, indapp 2); (indna "y", None, indapp 1); (indna "x", None, indapp 0)] in let binders = to_context terms @ liftargbinders' @ liftargbinders @ argbinders in let lenbinders = 3 * succ lenargs in let xy = (mkApp (mkRel (succ lenbinders + params), Array.of_list (extended_rel_list lenbinders parambinders @ extended_rel_list (2 * lenargs + 3) argbinders @ extended_rel_list (lenargs + 3) argbinders @ [mkRel 3; mkRel 2]))) and yz = (mkApp (mkRel (succ lenbinders + params), Array.of_list (extended_rel_list lenbinders parambinders @ extended_rel_list (lenargs + 3) argbinders @ extended_rel_list 3 argbinders @ [mkRel 2; mkRel 1]))) and xz = (mkApp (mkRel (succ lenbinders + params), Array.of_list (extended_rel_list lenbinders parambinders @ extended_rel_list (2 * lenargs + 3) argbinders @ extended_rel_list 3 argbinders @ [mkRel 3; mkRel 1]))) in (0, 0, it_mkProd_or_LetIn (mkProd (annotR Anonymous, xy, mkProd (annotR Anonymous, lift 1 yz, lift 2 xz))) binders) in let branches = (* trans_branch :: *)branches in let declare_one_ind i ind branches = let indid = Nametab.basename_of_global (Names.GlobRef.IndRef (fst ind)) in let subtermid = add_suffix indid "_direct_subterm" in let constructors = List.map (fun (i, j, constr) -> EConstr.to_constr sigma constr) branches in let consnames = List.map (fun (i, j, _) -> add_suffix subtermid ("_" ^ string_of_int i ^ "_" ^ string_of_int j)) branches in let lenargs = List.length argbinders in let liftedbinders = lift_rel_context lenargs argbinders in let binders = liftedbinders @ argbinders in let appparams = mkApp (mkIndU ind, extended_rel_vect (2 * lenargs) parambinders) in let arity = it_mkProd_or_LetIn (mkProd (annotR Anonymous, mkApp (appparams, extended_rel_vect lenargs argbinders), mkProd (annotR Anonymous, lift 1 (mkApp (appparams, extended_rel_vect 0 argbinders)), sort))) binders in { mind_entry_typename = subtermid; mind_entry_arity = EConstr.to_constr sigma arity; mind_entry_consnames = consnames; mind_entry_lc = constructors } in let univs, ubinders = Evd.univ_entry ~poly sigma in let uctx = match univs with | UState.Monomorphic_entry ctx -> let () = Global.push_context_set ~strict:true ctx in Entries.Monomorphic_ind_entry | UState.Polymorphic_entry uctx -> Entries.Polymorphic_ind_entry uctx in let declare_ind ~pm = let inds = [declare_one_ind 0 indu branches] in let inductive = { mind_entry_record = None; mind_entry_finite = Declarations.Finite; mind_entry_params = List.map (fun d -> to_rel_decl sigma (Context.Rel.Declaration.map_constr refresh_universes d)) parambinders; mind_entry_inds = inds; mind_entry_private = None; mind_entry_universes = uctx; mind_entry_variance = None; } in let k = DeclareInd.declare_mutual_inductive_with_eliminations inductive (univs, ubinders) [] in let () = let env = Global.env () in let sigma = Evd.from_env env in let sigma, ind = Evd.fresh_inductive_instance env sigma (k,0) in ignore (Sigma_types.declare_sig_of_ind env sigma ~poly (to_peuniverses ind)) in let subind = mkIndU ((k,0), u) in let constrhints = List.map_i (fun i entry -> List.map_i (fun j _ -> empty_hint_info, true, Hints.hint_globref (GlobRef.ConstructRef ((k,i),j))) 1 entry.mind_entry_lc) 0 inds in let locality = if Global.sections_are_opened () then Hints.Local else Hints.SuperGlobal in let () = Hints.add_hints ~locality [subterm_relation_base] (Hints.HintsResolveEntry (List.concat constrhints)) in (* Proof of Well-foundedness *) let relid = add_suffix (Nametab.basename_of_global (GlobRef.IndRef ind)) "_subterm" in let id = add_prefix "well_founded_" relid in (* Catch the new signature universe *) let env = Global.env () in let sigma = Evd.update_sigma_univs (Environ.universes env) sigma in let evm = ref sigma in let kl = get_efresh logic_wellfounded_class evm in let kl = get_class sigma kl in let parambinders, body, ty = let pars, ty, rel = if List.is_empty argbinders then (* Standard homogeneous well-founded relation *) parambinders, indapp, mkApp (subind, extended_rel_vect 0 parambinders) else (* Construct a family relation by packaging all indexes into a sigma type *) let _, _, pars, indices, indexproj, valproj, valsig, typesig = sigmaize env evm parambinders indapp in let env' = push_rel_context pars env in let subrel = let liftindices = List.map (liftn 2 2) indices in (* sigma is not sort poly (at least for now) *) let yindices = List.map (subst1 (mkProj (indexproj, ERelevance.relevant, mkRel 1))) liftindices in let xindices = List.map (subst1 (mkProj (indexproj, ERelevance.relevant, mkRel 2))) liftindices in let apprel = applistc subind (extended_rel_list 2 parambinders @ (xindices @ yindices @ [mkProj (valproj, ERelevance.relevant, mkRel 2); mkProj (valproj, ERelevance.relevant, mkRel 1)])) in mkLambda (nameR (Id.of_string "x"), typesig, mkLambda (nameR (Id.of_string "y"), lift 1 typesig, apprel)) in let typesig = Tacred.simpl env' !evm typesig in let subrel = Tacred.simpl env' !evm subrel in pars, typesig, subrel in let relation = let def = it_mkLambda_or_LetIn (mkApp (get_efresh logic_transitive_closure evm, [| ty; rel |])) pars in let ty = it_mkProd_or_LetIn (mkApp (get_efresh logic_relation evm, [| ty |])) parambinders in let kn, (evm', cst) = declare_constant relid def (Some ty) ~poly !evm ~kind:Decls.(IsDefinition Definition) in evm := evm'; (* Impargs.declare_manual_implicits false (ConstRef cst) ~enriching:false *) (* (list_map_i (fun i _ -> ExplByPos (i, None), (true, true, true)) 1 parambinders); *) Hints.add_hints ~locality [subterm_relation_base] (Hints.HintsUnfoldEntry [Evaluable.EvalConstRef kn]); mkApp (cst, extended_rel_vect 0 parambinders) in let env' = push_rel_context pars env in let evar = let evt = (mkApp (get_efresh logic_wellfounded evm, [| ty; relation |])) in evd_comb1 (Evarutil.new_evar env') evm evt in let b, t = instance_constructor !evm kl [ ty; relation; evar ] in (pars, b, t) in let ty = it_mkProd_or_LetIn ty parambinders in let body = it_mkLambda_or_LetIn (Option.get body) parambinders in let hook { Declare.Hook.S.dref; _ } = let cst = match dref with GlobRef.ConstRef kn -> kn | _ -> assert false in Classes.declare_instance (Global.env ()) !evm (Some empty_hint_info) Hints.SuperGlobal (GlobRef.ConstRef cst) in let _bodyty = e_type_of (Global.env ()) evm body in let _ty' = e_type_of (Global.env ()) evm ty in let evm = Evd.minimize_universes !evm in let obls, _, constr, typ = RetrieveObl.retrieve_obligations env id evm 0 body ty in let uctx = Evd.evar_universe_context evm in let cinfo = Declare.CInfo.make ~name:id ~typ () in let info = Declare.Info.make ~poly ~scope:Locality.(Global ImportDefaultBehavior) ~kind:Decls.(IsDefinition Instance) ~hook:(Declare.Hook.make hook) () in let pm, _ = Declare.Obls.add_definition ~pm ~cinfo ~info ~body:constr ~uctx ~opaque:false ~tactic:(solve_subterm_tac ()) obls in pm in declare_ind ~pm let () = Ederive.(register_derive { derive_name = "Subterm"; derive_fn = make_derive_ind derive_subterm }) let derive_below env sigma ~poly (ind,univ as indu) = let evd = ref sigma in let mind, oneind = Global.lookup_inductive ind in let ctx = oneind.mind_arity_ctxt in let params = mind.mind_nparams in let realargs = oneind.mind_nrealargs in let realdecls = oneind.mind_nrealdecls in let ctx = List.map of_rel_decl ctx in let allargsvect = extended_rel_vect 0 ctx in let indty = mkApp (mkIndU indu, allargsvect) in let indr = ERelevance.make oneind.mind_relevance in let ctx = of_tuple (make_annot (Name (Id.of_string "c")) indr, None, indty) :: ctx in let argbinders, parambinders = List.chop (succ realdecls) ctx in let u = evd_comb0 (Evd.new_sort_variable Evd.univ_rigid) evd in let ru = Retyping.relevance_of_sort u in let u = mkSort u in let arity = it_mkProd_or_LetIn u argbinders in let aritylam = lift (succ realdecls) (it_mkLambda_or_LetIn u argbinders) in let paramsvect = rel_vect (succ realdecls) params in let argsvect = extended_rel_vect 0 (CList.firstn (succ realdecls) ctx) in let pid = Id.of_string "P" in let pdecl = make_assum (make_annot (Name pid) ru) arity in let arity = lift 1 arity in let stepid = Id.of_string "step" in let recid = Id.of_string "rec" in let belowid = Id.of_string "below" in let paramspargs = Array.append (Array.append paramsvect [| mkVar pid |]) argsvect in let tyb = mkApp (mkVar belowid, paramspargs) in let arityb = lift 2 (it_mkProd_or_LetIn tyb argbinders) in let aritylamb = lift (succ realdecls) (it_mkLambda_or_LetIn tyb argbinders) in let termB, termb = let branches = Array.mapi (fun i (ctx, ty) -> let ty = Term.it_mkProd_or_LetIn ty ctx in let ty = of_constr ty in let ty = Vars.subst_instance_constr univ ty in let nargs = constructor_nrealargs env (ind, succ i) in let recarg = mkVar recid in let args, _ = decompose_prod_decls !evd ty in let args, _ = List.chop (List.length args - params) args in let ty' = replace_term !evd (mkApp (mkIndU (ind,univ), rel_vect (-params) params)) recarg ty in let args', _ = decompose_prod_decls !evd ty' in let args', _ = List.chop (List.length args' - params) args' in let arg_tys = fst (List.fold_left (fun (acc, n) decl -> let t = get_type decl in ((mkRel n, lift n t) :: acc, succ n)) ([], 1) args') in let fold_unit f args = let res = List.fold_left (fun acc x -> match acc with | Some (c, ty) -> Option.cata (fun x -> Some x) acc (f (fun (c', ty') -> mkApp (get_efresh logic_pair evd, [| ty' ; ty ; c' ; c |]), mkApp (get_efresh logic_product evd, [| ty' ; ty |])) x) | None -> f (fun x -> x) x) None args in Option.cata (fun x -> x) (get_efresh logic_unit_intro evd, get_efresh logic_unit evd) res in (* This wrapper checks if the argument is a recursive one, * and do the appropriate transformations if it is a product. *) let wrapper f = fun g (c, t) -> let prem, res = decompose_prod_decls !evd t in let t, args = decompose_app !evd res in if eq_constr !evd t recarg then let nprem = List.length prem in let elt = mkApp (lift nprem c, rel_vect 0 nprem) in let args = Array.append args [| elt |] in let res, ty = f args nprem in let res = it_mkLambda_or_LetIn res prem in let ty = it_mkProd_or_LetIn ty prem in Some (g (res, ty)) else None in let prod = get_efresh logic_product evd in let _, bodyB = fold_unit (wrapper (fun args _ -> let ty = mkApp (prod, [| mkApp (mkVar pid, args) ; mkApp (mkVar recid, args) |]) in mkRel 0, ty)) arg_tys in let bodyb, _ = fold_unit (wrapper (fun args nprem -> let reccall = mkApp (mkVar recid, args) in let belowargs = Array.append (rel_vect (nargs + nprem) params) (Array.append [| mkVar pid |] args) in let res = mkApp (get_efresh logic_pair evd, [| mkApp (mkVar pid, args) ; mkApp (mkVar belowid, belowargs) ; mkApp (mkApp (mkVar stepid, args), [| reccall |]); reccall |]) in let ty = mkApp (prod, [| mkApp (mkVar pid, args) ; mkApp (mkVar belowid, belowargs) |]) in res, ty)) arg_tys in (* The free variables correspond to the inductive parameters. *) let bodyB = lift (succ realdecls) (it_mkLambda_or_LetIn bodyB args) in let bodyb = lift (succ realdecls) (it_mkLambda_or_LetIn bodyb args) in (nargs, bodyB, bodyb)) oneind.mind_nf_lc in let caseB = mkCase (EConstr.contract_case env !evd (make_case_info env ind RegularStyle, (aritylam, ERelevance.relevant), NoInvert, mkRel 1, Array.map pi2 branches)) and caseb = mkCase (EConstr.contract_case env !evd (make_case_info env ind RegularStyle, (aritylamb, ERelevance.relevant), NoInvert, mkRel 1, Array.map pi3 branches)) in lift 2 (it_mkLambda_or_LetIn caseB argbinders), lift 3 (it_mkLambda_or_LetIn caseb argbinders) in let fixB = mkFix (([| realargs |], 0), ([| make_annot (Name recid) ru |], [| arity |], [| subst_vars !evd [recid; pid] termB |])) in let bodyB = it_mkLambda_or_LetIn fixB (pdecl :: parambinders) in let id = add_prefix "Below_" (Nametab.basename_of_global (GlobRef.IndRef ind)) in let _, (evd, belowB) = declare_constant id bodyB None ~poly !evd ~kind:Decls.(IsDefinition Definition) in let fixb = mkFix (([| realargs |], 0), ([| nameR recid |], [| arityb |], [| subst_vars evd [recid; stepid] termb |])) in let stepdecl = let stepty = mkProd (anonR, mkApp (belowB, paramspargs), mkApp (mkVar pid, Array.map (lift 1) argsvect)) in make_assum (nameR stepid) (lift 1 (it_mkProd_or_LetIn stepty argbinders)) in let bodyb = it_mkLambda_or_LetIn (subst_vars evd [pid] (mkLambda_or_LetIn stepdecl fixb)) (pdecl :: parambinders) in let bodyb = replace_vars evd [belowid, belowB] bodyb in let id = add_prefix "below_" (Nametab.basename_of_global (GlobRef.IndRef ind)) in let evd = if poly then evd else Evd.from_env (Global.env ()) in ignore(declare_constant id bodyb None ~poly evd ~kind:Decls.(IsDefinition Definition)) let () = let derive_below ~pm env sigma ~poly indu = let () = derive_below env sigma ~poly indu in pm in Ederive.(register_derive { derive_name = "Below"; derive_fn = make_derive_ind derive_below }) Coq-Equations-1.3.1-8.20/src/subterm.mli000066400000000000000000000014121463127417400175530ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) val derive_subterm : pm:Declare.OblState.t -> Environ.env -> Evd.evar_map -> poly:bool -> Names.inductive * EConstr.EInstance.t -> Declare.OblState.t val derive_below : Environ.env -> Evd.evar_map -> poly:bool -> Names.inductive * EConstr.EInstance.t -> unit Coq-Equations-1.3.1-8.20/src/syntax.ml000066400000000000000000000576331463127417400172670ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open Printer open Ppconstr open Util open Names open Constr open Pp open Glob_term open List open Libnames open Constrexpr_ops open Constrexpr open Evar_kinds open Equations_common open Constrintern type 'a with_loc = Loc.t option * 'a type identifier = Names.Id.t type provenance = | User | Generated | Implicit (** User-level patterns *) type user_pat = PUVar of identifier * provenance | PUCstr of constructor * int * user_pats | PUInac of Glob_term.glob_constr | PUEmpty and user_pat_loc = (user_pat, [ `any ]) DAst.t and user_pats = user_pat_loc list (** AST *) type pat_expr = | PEApp of qualid Constrexpr.or_by_notation with_loc * pat_expr with_loc list | PEWildcard | PEInac of constr_expr type user_pat_expr = pat_expr with_loc type 'a input_pats = | SignPats of 'a | RefinePats of 'a list type rec_annotation = | Nested | Mutual type user_rec_annot = rec_annotation option type rec_arg = int * Id.t with_loc option type rec_annot = | MutualOn of rec_arg option | NestedOn of rec_arg option | NestedNonRec type program_body = | ConstrExpr of Constrexpr.constr_expr | GlobConstr of Glob_term.glob_constr | Constr of EConstr.constr (* We interpret a constr by substituting [Var names] of the lhs bound variables with the proper de Bruijn indices *) type lhs = user_pats and ('a,'b) rhs_aux = Program of program_body * 'a wheres | Empty of identifier with_loc | Refine of Constrexpr.constr_expr list * 'b list and ('a,'b) rhs = ('a, 'b) rhs_aux option (* Empty patterns allow empty r.h.s. *) and pre_prototype = identifier with_loc * Constrexpr.universe_decl_expr option * user_rec_annot * Constrexpr.local_binder_expr list * Constrexpr.constr_expr option * (Id.t with_loc option, Constrexpr.constr_expr * Constrexpr.constr_expr option) by_annot option and ('a, 'b) by_annot = | Structural of 'a | WellFounded of 'b and 'a where_clause = pre_prototype * 'a list and 'a wheres = 'a where_clause list * Vernacexpr.notation_declaration list type program = (signature * clause list) list and signature = identifier * rel_context * constr (* f : Π Δ. τ *) and clause = Clause of Loc.t option * lhs * (clause, clause) rhs (* lhs rhs *) type pre_equation = Pre_equation of Constrexpr.constr_expr input_pats * (pre_equation, pre_equation) rhs type pre_clause = Pre_clause of Loc.t option * lhs * (pre_equation, pre_clause) rhs type pre_equations = pre_equation where_clause list let pr_provenance ~with_gen id = function | User -> id | Generated -> str"_" ++ (if with_gen then id else mt ()) | Implicit -> str"{" ++ id ++ str"}" let rec pr_user_loc_pat ~with_gen env sigma ?loc pat = match pat with | PUVar (i, gen) -> pr_provenance ~with_gen (Id.print i) gen | PUCstr (c, i, f) -> let pc = pr_constructor env c in if not (List.is_empty f) then str "(" ++ pc ++ spc () ++ pr_user_pats env sigma f ++ str ")" else pc | PUInac c -> str "?(" ++ pr_glob_constr_env env sigma c ++ str ")" | PUEmpty -> str"!" and pr_user_pat ?(with_gen=false) env sigma = DAst.with_loc_val (pr_user_loc_pat ~with_gen env sigma) and pr_user_pats ?(with_gen=true) env sigma pats = prlist_with_sep spc (pr_user_pat ~with_gen env sigma) pats let pr_lhs = pr_user_pats ~with_gen:true let pplhs lhs = let env = Global.env () in let sigma = Evd.from_env env in pp (pr_lhs env sigma lhs) let pr_body env sigma = function | ConstrExpr rhs -> pr_constr_expr env sigma rhs | GlobConstr rhs -> pr_glob_constr_env env sigma rhs | Constr c -> str"" let rec pr_rhs_aux env sigma = function | Empty (loc, var) -> spc () ++ str ":=!" ++ spc () ++ Id.print var | Program (rhs, where) -> spc () ++ str ":=" ++ spc () ++ pr_body env sigma rhs ++ spc () ++ pr_wheres env sigma where | Refine (rhs, s) -> spc () ++ str "with" ++ spc () ++ prlist_with_sep (fun () -> str",") (pr_constr_expr env sigma) rhs ++ spc () ++ str "=>" ++ spc () ++ hov 1 (str "{" ++ pr_clauses env sigma s ++ str "}") and pr_rhs env sigma = function | None -> mt () | Some rhs -> pr_rhs_aux env sigma rhs and pr_wheres env sigma (l, nts) = if List.is_empty l then mt() else str"where" ++ spc () ++ prlist_with_sep fnl (pr_where env sigma) l and pr_where env sigma (sign, eqns) = pr_proto env sigma sign ++ str "{" ++ pr_clauses env sigma eqns ++ str "}" and pr_proto env sigma ((_,id), _, _, l, t, ann) = Id.print id ++ pr_binders env sigma l ++ pr_opt (fun t -> str" : " ++ pr_constr_expr env sigma t) t ++ (match ann with None -> mt () | Some (WellFounded (t, rel)) -> str"by wf " ++ pr_constr_expr env sigma t ++ pr_opt (pr_constr_expr env sigma) rel | Some (Structural id) -> str"by struct " ++ pr_opt (fun x -> pr_id (snd x)) id) and pr_clause env sigma (Clause (loc, lhs, rhs)) = pr_lhs env sigma lhs ++ pr_rhs env sigma rhs and pr_clauses env sigma = prlist_with_sep fnl (pr_clause env sigma) let pr_user_lhs env sigma lhs = match lhs with | SignPats x -> pr_constr_expr env sigma x | RefinePats l -> prlist_with_sep (fun () -> str "|") (pr_constr_expr env sigma) l let rec pr_user_rhs_aux env sigma = function | Empty (loc, var) -> spc () ++ str ":=!" ++ spc () ++ Id.print var | Program (rhs, where) -> spc () ++ str ":=" ++ spc () ++ pr_body env sigma rhs ++ spc () ++ pr_prewheres env sigma where | Refine (rhs, s) -> spc () ++ str "with" ++ spc () ++ prlist_with_sep (fun () -> str ",") (pr_constr_expr env sigma) rhs ++ spc () ++ str "=>" ++ spc () ++ hov 1 (str "{" ++ pr_user_clauses env sigma s ++ str "}") and pr_prerhs_aux env sigma = function | Empty (loc, var) -> spc () ++ str ":=!" ++ spc () ++ Id.print var | Program (rhs, where) -> spc () ++ str ":=" ++ spc () ++ pr_body env sigma rhs ++ spc () ++ pr_prewheres env sigma where | Refine (rhs, s) -> spc () ++ str "with" ++ spc () ++ prlist_with_sep (fun () -> str ",") (pr_constr_expr env sigma) rhs ++ spc () ++ str "=>" ++ spc () ++ hov 1 (str "{" ++ pr_preclauses env sigma s ++ str "}") and pr_user_rhs env sigma = pr_opt (pr_user_rhs_aux env sigma) and pr_prerhs env sigma = pr_opt (pr_prerhs_aux env sigma) and pr_user_clause env sigma (Pre_equation (lhs, rhs)) = pr_user_lhs env sigma lhs ++ pr_user_rhs env sigma rhs and pr_user_clauses env sigma = prlist_with_sep fnl (pr_user_clause env sigma) and pr_prewheres env sigma (l, nts) = if List.is_empty l then mt() else str"where" ++ spc () ++ prlist_with_sep fnl (pr_prewhere env sigma) l and pr_prewhere env sigma (sign, eqns) = pr_proto env sigma sign ++ str "{" ++ pr_user_clauses env sigma eqns ++ str "}" and pr_preclause env sigma (Pre_clause (loc, lhs, rhs)) = pr_lhs env sigma lhs ++ pr_prerhs env sigma rhs and pr_preclauses env sigma = prlist_with_sep fnl (pr_preclause env sigma) let ppclause clause = let env = Global.env () in let sigma = Evd.from_env env in pp(pr_clause env sigma clause) let wit_equations_list : pre_equation list Genarg.uniform_genarg_type = Genarg.create_arg "equations_list" let next_ident_away s ids = let n' = Namegen.next_ident_away s !ids in ids := Id.Set.add n' !ids; n' type equation_option = OInd | OEquations | OTactic of Libnames.qualid type equation_user_option = equation_option * bool type equation_options = equation_user_option list let pr_r_equation_user_option _prc _prlc _prt l = mt () let pr_equation_options _prc _prlc _prt l = mt () (* Attributes *) let derive_flags = let open Attributes in let open Notations in Attributes.qualify_attribute "derive" (bool_attribute ~name:"eliminator" ++ bool_attribute ~name:"equations") let equations_attributes attrs = let open Attributes in let add_bool key value l = match value with | Some b -> (key, b) :: l | None -> l in let (eliminator, equations) = parse derive_flags attrs in add_bool OInd eliminator (add_bool OEquations equations []) let tactic_parser : qualid Attributes.key_parser = fun ?loc orig args -> let open Attributes in assert_once ?loc ~name:"tactic" orig; match args with | VernacFlagLeaf (FlagString str) -> qualid_of_string str | VernacFlagLeaf (FlagIdent str) -> qualid_of_string str | _ -> CErrors.user_err ?loc (Pp.str "Ill formed \"tactic\" attribute") let equations_tactic = Attributes.attribute_of_list ["tactic",tactic_parser] type rec_type_item = | Guarded of (Id.t * rec_annot) list (* for mutual rec *) | Logical of int * Id.t with_loc (* for nested wf rec: number of arguments before the side-condition *) type rec_type = rec_type_item option list let is_structural = function Some (Guarded _) :: _ -> true | _ -> false let has_logical rec_type = List.exists (function Some (Logical _) -> true | _ -> false) rec_type let is_rec_call id c = let id' = Label.to_id (Constant.label c) in Id.equal id id' let free_vars_of_constr_expr fid c = let rec aux bdvars l = function | { CAst.v = CRef (qid, _) } when qualid_is_ident qid -> let id = qualid_basename qid in if Id.List.mem id bdvars then l else if Option.cata (Id.equal id) false fid then l else (try match Nametab.locate_extended (Libnames.qualid_of_ident id) with | Globnames.TrueGlobal gr -> if not (Globnames.isConstructRef gr) then Id.Set.add id l else l | Globnames.Abbrev _ -> l with Not_found -> Id.Set.add id l) | { CAst.v = CNotation (_,(InConstrEntry, "?( _ )"), _) } -> l | c -> fold_constr_expr_with_binders (fun a l -> a::l) aux bdvars l c in aux [] Id.Set.empty c let ids_of_pats id pats = fold_left (fun ids p -> Id.Set.union ids (free_vars_of_constr_expr id p)) Id.Set.empty pats type wf_rec_info = Constrexpr.constr_expr * Constrexpr.constr_expr option * Id.t with_loc type program_rec_info = (rec_annot, wf_rec_info) by_annot type program_info = { program_loc : Loc.t option; program_id : Id.t; program_orig_type : EConstr.t; (* The original type *) program_sort : Sorts.t; (* The sort of this type *) program_sign : EConstr.rel_context; program_arity : EConstr.t; program_rec : program_rec_info option; program_impls : Impargs.manual_implicits; program_implicits : Impargs.implicit_status list; } let map_universe f u = let u' = f (EConstr.mkSort (EConstr.ESorts.make u)) in match Constr.kind (EConstr.Unsafe.to_constr u') with | Sort s -> s | _ -> assert false let map_program_info f p = { p with program_orig_type = f p.program_orig_type; program_sort = map_universe f p.program_sort; program_sign = map_rel_context f p.program_sign; program_arity = f p.program_arity } let _chole c loc = (* let tac = Genarg.in_gen (Genarg.rawwit Constrarg.wit_tactic) (solve_rec_tac_expr ()) in *) let kn = Lib.make_kn c in let cst = Names.Constant.make kn kn in CAst.make ~loc (CHole (Some (GImplicitArg (GlobRef.ConstRef cst, (0,None), false)))), None let _check_linearity env sigma opats = let rec aux ids pats = List.fold_left (fun ids pat -> DAst.with_loc_val (fun ?loc pat -> match pat with | PUVar (n, _) -> if Id.Set.mem n ids then CErrors.user_err ?loc (str "Non-linear occurrence of variable in patterns: " ++ pr_user_pats env sigma opats) else Id.Set.add n ids | PUInac _ -> ids | PUEmpty -> ids | PUCstr (_, _, pats) -> aux ids pats) pat) ids pats in ignore (aux Id.Set.empty opats) let is_implicit_arg = function | GImplicitArg _ -> true | _ -> false let pattern_of_glob_constr env sigma avoid patname gc = let avoid = ref avoid in let rec constructor ?loc c l = let ind, _ = c in let nparams = Inductiveops.inductive_nparams env ind in let nargs = Inductiveops.constructor_nrealargs env c in let l = if List.length l < nargs then user_err_loc (loc, str "Constructor " ++ Printer.pr_global (GlobRef.ConstructRef c) ++ str" is applied to too few arguments") else if List.length l = nparams + nargs then List.skipn nparams l else if List.length l = nargs then l else user_err_loc (loc, str "Constructor is applied to too many arguments"); in Dumpglob.add_glob ?loc (GlobRef.ConstructRef c); PUCstr (c, nparams, List.map (DAst.map_with_loc (aux Anonymous)) l) and aux patname ?loc = function | GVar id -> PUVar (id, User) | GHole k -> (match patname with | Name id when is_implicit_arg k -> PUVar (id, Implicit) | _ -> let id = Id.of_string "wildcard" in let n = next_ident_away id avoid in PUVar (n, Generated)) | GRef (GlobRef.ConstructRef cstr,_) -> constructor ?loc cstr [] | GRef (GlobRef.ConstRef _ as c, _) when Environ.QGlobRef.equal env c (Lazy.force coq_bang) -> PUEmpty | GApp (c, l) -> begin match DAst.get c with | GRef (GlobRef.ConstructRef cstr,_) -> constructor ?loc cstr l | GRef (GlobRef.ConstRef _ as c, _) when Environ.QGlobRef.equal env c (Lazy.force coq_inacc) -> let inacc = List.hd (List.tl l) in PUInac inacc | _ -> user_err_loc (loc, str "Cannot interpret " ++ pr_glob_constr_env env sigma c ++ str " as a constructor") end (* | GLetIn (Name id as na',b,None,e) when is_gvar id e && na = Anonymous -> * (\* A canonical encoding of aliases *\) * DAst.get (cases_pattern_of_glob_constr na' b) *) | _ -> user_err_loc (loc, str ("Cannot interpret globalized term as a pattern")) in let gc =DAst.map_with_loc (aux patname) gc in !avoid, gc let program_type p = EConstr.it_mkProd_or_LetIn p.program_arity p.program_sign let interp_pat env sigma notations ~avoid p pat = let vars = (Id.Set.elements avoid) (* (ids_of_pats [p])) *) in (* let () = Feedback.msg_debug (str"Variables " ++ prlist_with_sep spc pr_id vars) in *) let tys = List.map (fun _ -> EConstr.mkProp) vars in let rlv = List.map (fun _ -> EConstr.ERelevance.relevant) vars in let impls = List.map (fun _ -> []) vars in (* let () = Feedback.msg_debug (str"Internalizing " ++ pr_constr_expr p) in *) let ienv = try compute_internalization_env env sigma Variable vars tys impls with Not_found -> anomaly (str"Building internalization environment") in let notations = List.map Metasyntax.prepare_where_notation notations in let vars, rlv, tys, impls, ienv = match p with | Some (p, _) -> let ty = program_type p in let r = Retyping.relevance_of_type env sigma ty in let ienv = try compute_internalization_env env sigma ~impls:ienv Recursive [p.program_id] [ty] [p.program_impls] with Not_found -> anomaly (str"Building internalization environment") in (p.program_id :: vars, r :: rlv, ty :: tys, p.program_impls :: impls, ienv) | None -> (vars, rlv, tys, impls, ienv) in let nctx = List.map3 (fun id r ty -> Context.Named.Declaration.LocalAssum (Context.make_annot id (EConstr.Unsafe.to_relevance r), EConstr.Unsafe.to_constr ty)) vars rlv tys in let env = Environ.push_named_context nctx env in let gc = Metasyntax.with_syntax_protection (fun () -> List.iter (Metasyntax.set_notation_for_interpretation (Environ.push_named_context nctx env) ienv) notations; try Constrintern.intern_gen Pretyping.WithoutTypeConstraint ~impls:ienv env sigma pat with Not_found -> anomaly (str"Internalizing pattern")) () in try match p with | Some ({ program_id = id }, patnames) -> DAst.with_loc_val (fun ?loc g -> let fn, args = match g with | GApp (fn, args) -> fn, args | _ -> DAst.make ?loc g, [] in DAst.with_loc_val (fun ?loc gh -> match gh with | GVar fid when Id.equal fid id -> let rec aux avoid args patnames = match args, patnames with | a :: args, patname :: patnames -> let avoid, pat = pattern_of_glob_constr env sigma avoid patname a in let avoid, pats = aux avoid args patnames in avoid, pat :: pats | a :: args, [] -> let avoid, pat = pattern_of_glob_constr env sigma avoid Anonymous a in let avoid, pats = aux avoid args [] in avoid, pat :: pats | [], _ -> avoid, [] in aux avoid args patnames | _ -> user_err_loc (loc, str "Expecting a pattern for " ++ Id.print id)) fn) gc | None -> let avoid, pat = pattern_of_glob_constr env sigma avoid Anonymous gc in avoid, [pat] with Not_found -> anomaly (str"While translating pattern to glob constr") (* let rename_away_from ids pats = let rec aux ?loc pat = match pat with | PUVar (id, true) when Id.Set.mem id !ids -> let id' = next_ident_away id ids in PUVar (id', true) | PUVar (id, _) -> pat | PUCstr (c, n, args) -> PUCstr (c, n, List.map (DAst.map_with_loc aux) args) | PUInac c -> pat | PUEmpty -> pat in List.map (DAst.map_with_loc aux) pats *) let interleave_implicits impls pats = let rec aux impls pats = match impls, pats with | Some id :: impls, pats -> DAst.make (PUVar (id, Implicit)) :: aux impls pats | None :: impls, pat :: pats -> pat :: aux impls pats | None :: _, [] -> [] | [], pats -> pats in aux impls pats let interp_eqn env sigma notations p ~avoid eqn = let whereid = ref (Nameops.add_suffix p.program_id "_abs_where") in let patnames = List.rev_map (fun decl -> Context.Rel.Declaration.get_name decl) p.program_sign in let impls = List.map (fun a -> if Impargs.is_status_implicit a then Some (Impargs.name_of_implicit a) else None) p.program_implicits in let interp_pat notations avoid = interp_pat env sigma notations ~avoid in let rec aux notations avoid curpats (Pre_equation (pat, rhs)) = let loc, avoid, pats = match pat with | SignPats pat -> let avoid = Id.Set.union avoid (ids_of_pats (Some p.program_id) [pat]) in let loc = Constrexpr_ops.constr_loc pat in let avoid, pats = interp_pat notations avoid (Some (p, patnames)) pat in loc, avoid, pats | RefinePats pats -> let patids = ids_of_pats None pats in (* let curpats = rename_away_from patids curpats in *) let avoid = Id.Set.union avoid patids in let loc = Constrexpr_ops.constr_loc (List.hd pats) in let avoid, pats = List.fold_left_map (fun avoid -> interp_pat notations avoid None) avoid pats in let pats = List.map (fun x -> List.hd x) pats in let pats = (* At the toplevel only, interleave using the implicit status of the function arguments *) if curpats = [] then interleave_implicits impls pats else curpats @ pats in loc, avoid, pats in Pre_clause (loc, pats, Option.map (interp_rhs notations avoid pats) rhs) and aux2 notations avoid (Pre_equation (pat, rhs)) = Pre_equation (pat, Option.map (interp_rhs' notations avoid) rhs) and interp_rhs' notations avoid = function | Refine (c, eqs) -> let avoid = Id.Set.union avoid (ids_of_pats None c) in let interp c = let wheres, c = CAst.with_loc_val (interp_constr_expr notations avoid) c in if not (List.is_empty wheres) then user_err_loc (Constrexpr_ops.constr_loc c, str"Pattern-matching lambdas not allowed in refine"); c in Refine (List.map interp c, map (aux2 notations avoid) eqs) | Program (c, (w, nts)) -> let notations = nts @ notations in let w = interp_wheres avoid w notations in let w', c = match c with | ConstrExpr c -> let wheres, c = CAst.with_loc_val (interp_constr_expr notations avoid) c in wheres, ConstrExpr c | GlobConstr c -> [], GlobConstr c | Constr c -> [], Constr c in Program (c, (List.append w' w, nts)) | Empty i -> Empty i and interp_rhs notations avoid curpats = function | Refine (c, eqs) -> let avoid = Id.Set.union avoid (ids_of_pats None c) in let interp c = let wheres, c = CAst.with_loc_val (interp_constr_expr notations avoid) c in if not (List.is_empty wheres) then user_err_loc (Constrexpr_ops.constr_loc c, str"Pattern-matching lambdas not allowed in refine"); c in Refine (List.map interp c, map (aux notations avoid curpats) eqs) | Program (c, (w, nts)) -> let w = interp_wheres avoid w (nts @ notations) in let w', c = match c with | ConstrExpr c -> let wheres, c = CAst.with_loc_val (interp_constr_expr notations avoid) c in wheres, ConstrExpr c | GlobConstr c -> [], GlobConstr c | Constr c -> [], Constr c in Program (c, (List.append w' w, nts)) | Empty i -> Empty i and interp_wheres avoid w notations = let interp_where (((loc,id),decl,nested,b,t,reca) as p,eqns) = Dumpglob.dump_reference ?loc "<>" (Id.to_string id) "def"; p, map (aux2 notations avoid) eqns in List.map interp_where w and interp_constr_expr notations (avoid : Id.Set.t) ?loc c = let wheres = ref [] in let rec aux' avoid ?loc c = match c with (* | CApp ((None, { CAst.v = CRef (qid', ie) }), args) * when qualid_is_ident qid' && Id.equal (qualid_basename qid') p.program_id -> * let id' = qualid_basename qid' in * (match p.program_rec with * | None | Some (Structural _) -> CAst.make ~loc c * | Some (WellFounded (_, _, r)) -> * let args = * List.map (fun (c, expl) -> CAst.with_loc_val (aux' ids) c, expl) args in * let c = CApp ((None, CAst.(make ~loc (CRef (qid', ie)))), args) in * let arg = CAst.make ~loc (CApp ((None, CAst.make ~loc c), [chole id' loc])) in * arg) *) | CGenarg eqns when Genarg.has_type eqns (Genarg.rawwit wit_equations_list) -> let eqns = Genarg.out_gen (Genarg.rawwit wit_equations_list) eqns in let id = !whereid in let () = whereid := Nameops.increment_subscript id in let avoid = Id.Set.add id avoid in let eqns = List.map (aux2 notations avoid) eqns in let () = wheres := (((loc, id), None, None, [], None, None), eqns) :: !wheres; in Constrexpr_ops.mkIdentC id | _ -> map_constr_expr_with_binders Id.Set.add (fun avoid -> CAst.with_loc_val (aux' avoid)) avoid (CAst.make ?loc c) in let c' = aux' avoid ?loc c in !wheres, c' in aux notations avoid [] eqn let is_recursive i : pre_equation wheres -> bool = fun eqs -> let rec occur_eqn (Pre_equation (_, rhs)) = match rhs with | Some (Program (c,w)) -> (match c with | ConstrExpr c -> occur_var_constr_expr i c || occurs w | GlobConstr c -> occurs w | Constr _ -> occurs w) | Some (Refine (c, eqs)) -> List.exists (occur_var_constr_expr i) c || occur_eqns eqs | _ -> false and occur_eqns eqs = List.exists occur_eqn eqs and occurs_notations nts = List.exists (fun nt -> occur_var_constr_expr i nt.Vernacexpr.ntn_decl_interp) nts and occurs eqs = List.exists (fun (_,eqs) -> occur_eqns eqs) (fst eqs) || occurs_notations (snd eqs) in occurs eqs Coq-Equations-1.3.1-8.20/src/syntax.mli000066400000000000000000000134621463127417400174300ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) open Constr open Environ open Evd open Names open Equations_common type 'a with_loc = Loc.t option * 'a (** User-level patterns *) type provenance = | User | Generated | Implicit type rec_annotation = | Nested | Mutual type user_rec_annot = rec_annotation option type identifier = Names.Id.t type user_pat = PUVar of identifier * provenance | PUCstr of constructor * int * user_pats | PUInac of Glob_term.glob_constr | PUEmpty and user_pat_loc = (user_pat, [ `any ]) DAst.t and user_pats = user_pat_loc list (** Raw syntax *) type pat_expr = PEApp of Libnames.qualid Constrexpr.or_by_notation with_loc * pat_expr with_loc list | PEWildcard | PEInac of Constrexpr.constr_expr type user_pat_expr = pat_expr with_loc type 'a input_pats = SignPats of 'a | RefinePats of 'a list (** Globalized syntax *) type rec_arg = int * Id.t with_loc option type rec_annot = | MutualOn of rec_arg option | NestedOn of rec_arg option | NestedNonRec type program_body = | ConstrExpr of Constrexpr.constr_expr | GlobConstr of Glob_term.glob_constr | Constr of EConstr.constr (* We interpret a constr by substituting [Var names] of the lhs bound variables with the proper de Bruijn indices *) type lhs = user_pats (* p1 ... pn *) and ('a,'b) rhs_aux = Program of program_body * 'a wheres | Empty of identifier with_loc | Refine of Constrexpr.constr_expr list * 'b list and ('a,'b) rhs = ('a, 'b) rhs_aux option and pre_prototype = identifier with_loc * Constrexpr.universe_decl_expr option * user_rec_annot * Constrexpr.local_binder_expr list * Constrexpr.constr_expr option * (Id.t with_loc option, Constrexpr.constr_expr * Constrexpr.constr_expr option) by_annot option and ('a, 'b) by_annot = | Structural of 'a | WellFounded of 'b and 'a where_clause = pre_prototype * 'a list and 'a wheres = 'a where_clause list * Vernacexpr.notation_declaration list type program = (signature * clause list) list and signature = identifier * rel_context * constr (* f : Π Δ. τ *) and clause = Clause of Loc.t option * lhs * (clause, clause) rhs (* lhs rhs *) type pre_equation = Pre_equation of Constrexpr.constr_expr input_pats * (pre_equation, pre_equation) rhs type pre_clause = Pre_clause of Loc.t option * lhs * (pre_equation, pre_clause) rhs type pre_equations = pre_equation where_clause list (* val pr_user_pat : env -> user_pat -> Pp.t *) val pr_provenance : with_gen:bool -> Pp.t -> provenance -> Pp.t val pr_user_pats : ?with_gen:bool -> env -> evar_map -> user_pats -> Pp.t val pr_lhs : env -> evar_map -> user_pats -> Pp.t val pplhs : user_pats -> unit val pr_rhs : env -> evar_map -> (clause,clause) rhs -> Pp.t val pr_clause : env -> evar_map -> clause -> Pp.t val pr_clauses : env -> evar_map -> clause list -> Pp.t val pr_preclause : env -> evar_map -> pre_clause -> Pp.t val pr_preclauses : env -> evar_map -> pre_clause list -> Pp.t val pr_user_clause : env -> evar_map -> pre_equation -> Pp.t val ppclause : clause -> unit type rec_type_item = | Guarded of (Id.t * rec_annot) list (* for mutual rec *) | Logical of int * Id.t with_loc (* for nested wf rec: number of arguments before the side-condition, name *) type rec_type = rec_type_item option list val is_structural : rec_type -> bool val has_logical : rec_type -> bool val is_rec_call : Id.t -> Constant.t -> bool val next_ident_away : Id.t -> Id.Set.t ref -> Id.t type equation_option = | OInd | OEquations | OTactic of Libnames.qualid type equation_user_option = equation_option * bool val pr_r_equation_user_option : 'a -> 'b -> 'c -> 'd -> Pp.t type equation_options = equation_user_option list val pr_equation_options : 'a -> 'b -> 'c -> 'd -> Pp.t type wf_rec_info = Constrexpr.constr_expr * Constrexpr.constr_expr option * Id.t with_loc type program_rec_info = (rec_annot, wf_rec_info) by_annot type program_info = { program_loc : Loc.t option; program_id : Id.t; program_orig_type : EConstr.t; (* The original type *) program_sort : Sorts.t; (* The sort of this type *) program_sign : EConstr.rel_context; program_arity : EConstr.t; program_rec : program_rec_info option; program_impls : Impargs.manual_implicits; program_implicits : Impargs.implicit_status list; } val program_type : program_info -> EConstr.t val map_program_info : (EConstr.t -> EConstr.t) -> program_info -> program_info val ids_of_pats : Names.Id.t option -> Constrexpr.constr_expr list -> Id.Set.t val pattern_of_glob_constr : Environ.env -> evar_map -> Names.Id.Set.t -> Names.Name.t -> Glob_term.glob_constr -> Names.Id.Set.t * (user_pat, [ `any] ) DAst.t val interp_pat : Environ.env -> Evd.evar_map -> Vernacexpr.notation_declaration list -> avoid:Id.Set.t -> (program_info * Names.Name.t list) option -> Constrexpr.constr_expr -> Id.Set.t * user_pats val interp_eqn : env -> Evd.evar_map -> Vernacexpr.notation_declaration list -> program_info -> avoid:Id.Set.t -> pre_equation -> pre_clause val wit_equations_list : pre_equation list Genarg.uniform_genarg_type val is_recursive : Names.Id.t -> pre_equation wheres -> bool val equations_attributes : Attributes.vernac_flags -> equation_user_option list val derive_flags : (bool option * bool option) Attributes.attribute val equations_tactic : Libnames.qualid option Attributes.attribute Coq-Equations-1.3.1-8.20/test-suite/000077500000000000000000000000001463127417400167105ustar00rootroot00000000000000Coq-Equations-1.3.1-8.20/test-suite/Basics.v000066400000000000000000000503571463127417400203150ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Require Import Program Bvector List Relations. Require Import Equations.Prop.Equations. Require Import Utf8. Set Keyed Unification. Equations neg (b : bool) : bool := neg true := false ; neg false := true. Lemma neg_inv : forall b, neg (neg b) = b. Proof. intros b. funelim (neg b); auto. Qed. Inductive le : nat -> nat -> Set := | le_0 n : le 0 (S n) | le_S n m : le n m -> le (S n) (S m). Derive Signature for le. Equations congS {x y : nat} (p : x = y) : S x = S y := congS eq_refl := eq_refl. (* Equations antisym {x y : nat} (p : le x y) (q : le y x) : x = y := *) (* antisym (le_S n m p) (le_S ?(m) ?(n) q) := congS (antisym p q). *) Module TestF. Equations? f (n : nat) : nat := f 0 := 42 ; f (S m) with f m := { f (S m) IH := _ }. Proof. refine IH. Defined. End TestF. #[local] Instance eqsig {A} (x : A) : Signature (x = x) A (fun a => x = a) := sigmaI _ x. Module WithUIP. Set Equations With UIP. Polymorphic Axiom uip : forall A, UIP A. Local Existing Instance uip. Equations K {A} (x : A) (P : x = x -> Type) (p : P eq_refl) (H : x = x) : P H := K x P p eq_refl := p. End WithUIP. (* Test Equations WithUIP. should be off, setting is local to the module *) Equations eq_sym {A} (x y : A) (H : x = y) : y = x := eq_sym x _ eq_refl := eq_refl. Equations eq_trans {A} (x y z : A) (p : x = y) (q : y = z) : x = z := eq_trans x _ _ eq_refl eq_refl := eq_refl. Declare Scope vect_scope. Notation " x |:| y " := (@Vector.cons _ x _ y) (at level 20, right associativity) : vect_scope. Notation " x |: n :| y " := (@Vector.cons _ x n y) (at level 20, right associativity) : vect_scope. (* Notation " [[ x .. y ]] " := (Vector.cons x .. (Vector.cons y Vector.nil) ..) : vect_scope. *) Notation "[]v" := (@Vector.nil _) (at level 0) : vect_scope. Section FilterDef. Context {A} (p : A -> bool). Equations filter (l : list A) : list A := filter List.nil := List.nil ; filter (List.cons a l) with p a => { | true := a :: filter l ; | false := filter l }. End FilterDef. (* Equations filter {A} (l : list A) (p : A -> bool) : list A := *) (* filter A List.nil p := List.nil ; *) (* filter A (List.cons a l) p <= p a => { *) (* | true := a :: filter l p ; *) (* | false := filter l p }. *) Inductive incl {A} : relation (list A) := stop : incl nil nil | keep {x : A} {xs ys : list A} : incl xs ys -> incl (x :: xs) (x :: ys) | skip {x : A} {xs ys : list A} : incl xs ys -> incl (xs) (x :: ys). Global Transparent filter. Equations sublist {A} (p : A -> bool) (xs : list A) : incl (filter p xs) xs := sublist p nil := stop ; sublist p (cons x xs) with p x := { | true := keep (sublist p xs) ; | false := skip (sublist p xs) }. (* Print Assumptions sublist. *) Notation vector := Vector.t. (* Derive Subterm for nat. *) Derive Signature for vector. Derive NoConfusion NoConfusionHom for vector. Derive Subterm for vector. Require Import Arith Wf_nat. #[local] Instance wf_nat : WellFounded lt := lt_wf. #[local] Hint Resolve PeanoNat.Nat.lt_succ_diag_r : lt. Ltac solve_rec ::= simpl in * ; cbv zeta ; intros ; try typeclasses eauto with subterm_relation simp lt. Unset Implicit Arguments. Equations testn (n : nat) : nat by wf n lt := testn 0 := 0 ; testn (S n) with testn n => { | 0 := S 0 ; | (S n') := S n' }. Require Import Vectors.Vector. Arguments Vector.nil {A}. Arguments Vector.cons {A} _ {n}. Local Open Scope vect_scope. Reserved Notation "x ++v y" (at level 60). Equations vapp' {A} {n m} (v : vector A n) (w : vector A m) : vector A (n + m) := { []v ++v w := w ; (Vector.cons a v) ++v w := Vector.cons a (v ++v w) } where "x ++v y" := (vapp' x y). (* Section vapp_def. *) (* Context {A : Type}. *) (* Equations vapp' {n m} (v : vector A n) (w : vector A m) : vector A (n + m) := *) (* vapp' []v w := w ; *) (* vapp' (Vector.cons a n v) w := Vector.cons a (vapp' v w). *) (* End vapp_def. *) (* Print Assumptions vapp'. *) #[local] Instance vector_eqdec {A n} `(EqDec A) : EqDec (vector A n). Proof. intros. intros x. induction x. left. now depelim y. intro y; depelim y. destruct (Classes.eq_dec h h0); subst. destruct (IHx y). subst. left; reflexivity. right. intro. noconf H0. contradiction. right. intro. noconf H0. contradiction. Defined. (* Print Assumptions well_founded_vector_direct_subterm. *) (** A closed proof of well-foundedness relying on the decidability of [A]. *) Definition vector_subterm A := t_subterm A. (* Instance well_founded_vector_direct_subterm' : *) (* forall A : Type, EqDec A -> WellFounded (vector_subterm A) | 0. *) (* Proof. intros. *) (* apply Transitive_Closure.wf_clos_trans. *) (* intro. simp_sigmas. induction a. *) (* constructor; intros. *) (* simp_sigmas. simpl in *. *) (* depelim H. *) (* constructor; intros. *) (* simp_sigmas. depelim H. *) (* assumption. *) (* Defined. *) (* Print Assumptions well_founded_vector_direct_subterm'. *) #[local] Instance eqdep_prod A B `(EqDec A) `(EqDec B) : EqDec (prod A B). Proof. intros. intros x y. decide equality. Defined. #[local] Hint Unfold vector_subterm : subterm_relation. (* Typeclasses Opaque vector_subterm. *) Import Vector. (* Section unzip_dec_def. *) (* Context {A B} `{EqDec A} `{EqDec B}. *) (* Equations unzip_dec {n} (v : vector (A * B) n) : vector A n * vector B n := *) (* unzip_dec n v by wf v (@vector_subterm (A * B)) := *) (* unzip_dec ?(O) nil := ([]v, []v) ; *) (* unzip_dec ?(S n) (cons (pair x y) n v) with unzip_dec v := { *) (* | pair xs ys := (cons x xs, cons y ys) }. *) (* End unzip_dec_def. *) Section foo. Context {A B : Type}. Equations unzipv {n} (v : vector (A * B) n) : vector A n * vector B n by wf (signature_pack v) (@vector_subterm (A * B)) := unzipv []v := ([]v, []v) ; unzipv ((x, y) |:| v) with unzipv v := { | pair xs ys := (cons x xs, cons y ys) }. End foo. #[local] Typeclasses Transparent vector_subterm. (** Due to the packing of all arguments, can only be done in sections right now so that A and B are treated as parameters (better computational behavior anyway) *) (* Equations unzip {A B} {n} (v : vector (A * B) n) : vector A n * vector B n := *) (* unzip v by wf (signature_pack v) (@vector_subterm (A * B)) := *) (* unzip nil := (nil, nil) ; *) (* unzip (cons (pair x y) n v) <= unzip v => { *) (* | (pair xs ys) := (cons x xs, cons y ys) }. *) (* Print Assumptions unzip. *) (* Print Assumptions unzip_dec. *) (* Ltac generalize_by_eqs v ::= generalize_eqs v. Equations unzip_n {A B} {n} (v : vector (A * B) n) : vector A n * vector B n := unzip_n A B O Vnil := (Vnil, Vnil) ; unzip_n A B (S n) (cons (pair x y) n v) with unzip_n v := { | pair xs ys := (cons x xs, cons y ys) }. *) (* Definition nos_with_comp (n : nat) := nat. *) (* Lemma nos_with (n : nat) : nos_with_comp n. *) (* rec_wf_eqns nos n. *) Equations nos_with (n : nat) : nat by wf n := nos_with O := O ; nos_with (S m) with nos_with m := { | O := S O ; | S n' := O }. Equations equal (n m : nat) : { n = m } + { n <> m } := equal O O := in_left ; equal (S n) (S m) with equal n m => { equal (S n) (S ?(n)) (left eq_refl) := left eq_refl ; equal (S n) (S m) (right p) := in_right } ; equal x y := in_right. Import List. Equations app_with {A} (l l' : list A) : list A := app_with nil l := l ; app_with (cons a v) l with app_with v l => { | vl := cons a vl }. (* Print Assumptions app_with. *) (* About app_with_elim. *) (* Print app_with_ind. *) (* Print app_with_ind_ind. *) (* Scheme app_with_elim := Minimality for app_with_ind Sort Prop *) (* with app_with_help_elim := Minimality for app_with_ind_1 Sort Prop. *) (* About app_with_elim. *) Equations plus' (n m : nat) : nat := plus' O n := n ; plus' (S n) m := S (plus' n m). (* Ltac generalize_by_eqs id ::= generalize_eqs id. *) (* Ltac generalize_by_eqs_vars id ::= generalize_eqs_vars id. *) Equations head A (default : A) (l : list A) : A := head A default nil := default ; head A default (cons a v) := a. Equations tail {A} (l : list A) : list A := tail nil := nil ; tail (cons a v) := v. (* Eval compute in @tail. *) (* Eval compute in (tail (cons 1 nil)). *) Equations app' {A} (l l' : list A) : (list A) := app' nil l := l ; app' (cons a v) l := cons a (app' v l). Global Transparent app'. Notation " x +++ y " := (@app' _ x y%list) (at level 60, right associativity). Equations rev_acc {A} (l : list A) (acc : list A) : list A := rev_acc nil acc := acc; rev_acc (cons a v) acc := rev_acc v (a :: acc). Equations rev {A} (l : list A) : list A := rev nil := nil; rev (cons a v) := rev v +++ (cons a nil). Notation " [] " := List.nil. Lemma app'_nil : forall {A} (l : list A), l +++ [] = l. Proof. intros. funelim (app' l []); cbn; auto. now rewrite H. Qed. Lemma app'_assoc : forall {A} (l l' l'' : list A), (l +++ l') +++ l'' = app' l (app' l' l''). Proof. intros. revert l''. funelim (l +++ l'); intros; simp app'; trivial. now rewrite H. Qed. Lemma rev_rev_acc : forall {A} (l : list A), rev_acc l [] = rev l. Proof. intros. replace (rev l) with (rev l +++ []) by apply app'_nil. generalize (@nil A). funelim (rev l). reflexivity. intros l'. simp rev_acc. rewrite H. rewrite app'_assoc. reflexivity. Qed. #[local] Hint Rewrite @rev_rev_acc : rev_acc. Lemma app'_funind : forall {A} (l l' l'' : list A), (l +++ l') +++ l'' = app' l (app' l' l''). Proof. intros. funelim (l +++ l'); simp app'; trivial. rewrite H. reflexivity. Qed. #[local] Hint Rewrite @app'_nil @app'_assoc : app'. Lemma rev_app' : forall {A} (l l' : list A), rev (l +++ l') = rev l' +++ rev l. Proof. intros. funelim (l +++ l'); simp rev app'; trivial. now (rewrite H, <- app'_assoc). Qed. Equations zip' {A} (f : A -> A -> A) (l l' : list A) : list A := zip' f nil nil := nil ; zip' f (cons a v) (cons b w) := cons (f a b) (zip' f v w) ; zip' f x y := nil. Equations zip'' {A} (f : A -> A -> A) (l l' : list A) (def : list A) : list A := zip'' f nil nil def := nil ; zip'' f (cons a v) (cons b w) def := cons (f a b) (zip'' f v w def) ; zip'' f nil (cons b w) def := def ; zip'' f (cons a v) nil def := def. Import Vector. Equations vector_append_one {A n} (v : vector A n) (a : A) : vector A (S n) := vector_append_one nil a := cons a nil; vector_append_one (cons a' v) a := cons a' (vector_append_one v a). Equations vrev {A n} (v : vector A n) : vector A n := vrev nil := nil; vrev (cons a v) := vector_append_one (vrev v) a. Definition cast_vector {A n m} (v : vector A n) (H : n = m) : vector A m. intros; subst; assumption. Defined. Equations vrev_acc {A n m} (v : vector A n) (w : vector A m) : vector A (n + m) := vrev_acc nil w := w; vrev_acc (cons a v) w := cast_vector (vrev_acc v (cons a w)) _. Record vect {A} := mkVect { vect_len : nat; vect_vector : vector A vect_len }. Coercion mkVect : vector >-> vect. Derive NoConfusion for vect. Inductive Split {X : Type}{m n : nat} : vector X (m + n) -> Type := append : ∀ (xs : vector X m)(ys : vector X n), Split (vapp' xs ys). Arguments Split [ X ]. (* Eval compute in @app'. *) (* About nil. About vector. *) Equations split {X : Type} {m n} (xs : vector X (m + n)) : Split m n xs by wf m := split (m:=O) xs := append nil xs ; split (m:=(S m)) (n:=n) (cons x xs) with split xs => { | append xs' ys' := append (cons x xs') ys' }. Lemma split_vapp' : ∀ (X : Type) m n (v : vector X m) (w : vector X n), let 'append v' w' := split (vapp' v w) in v = v' /\ w = w'. Proof. intros. funelim (vapp' v w). destruct split. depelim xs; intuition. simp split in *. destruct split. simpl. intuition congruence. Qed. Transparent vapp'. Definition eta_vector {A} (P : forall n, vector A n -> Type) : forall n v, match v with | nil => P 0 nil | cons a v => P _ (cons a v) end = P n v. Proof. now destruct v. Defined. Import Sigma_Notations. Axiom cheat : forall {A}, A. Lemma split' {X : Type} : forall {m n} (xs : vector X (Peano.plus m n)), Split m n xs. Proof. fix IH 3. intros m n xs. eassert ?[ty]. refine (match xs as xs' in @t _ k return (match xs' as xs'' in vector _ n' return Type with | nil => ((0, nil) = (Peano.plus m n, xs)) -> Split m n xs | @cons _ x' n' xs'' => (S n', cons x' xs'') = (Peano.plus m n, xs) -> Split m n xs end) with | nil => _ | cons x xs => _ end). (* FIXME: simplify not agressive enough to find whd *) simpl. (* apply cheat. apply cheat. *) destruct m as [|m']. + simpl. simplify *. simpl. apply (append nil nil). + simpl. simplify *. + destruct m as [|m']; simpl. simplify *. simpl. apply (append nil (x |: _ :| xs)). simplify *. simpl. specialize (IH _ _ xs). destruct IH. refine (append (cons x xs) ys). + rewrite (eta_vector (fun nv v => (nv, v) = (Peano.plus m n, xs) -> Split m n xs)) in X0. apply (X0 eq_refl). Defined. (* Eval cbv delta[split' eq_rect noConfusion NoConfusion.NoConfusionPackage_nat NoConfusion.noConfusion_nat_obligation_1 ] beta zeta iota in split'. *) Extraction Inline Logic.transport. (* Extraction split'. Extraction split. *) (* Eval compute in @zip''. *) Require Import Bvector. Equations split_struct {X : Type} {m n} (xs : vector X (m + n)) : Split m n xs := split_struct (m:=0) xs := append nil xs ; split_struct (m:=(S m)) (cons x xs) with split_struct xs => { split_struct (m:=(S m)) (cons x xs) (append xs' ys') := append (cons x xs') ys' }. (* Extraction split_struct. *) Lemma split_struct_vapp : ∀ (X : Type) m n (v : vector X m) (w : vector X n), let 'append v' w' := split_struct (vapp' v w) in v = v' /\ w = w'. Proof. intros. funelim (vapp' v w); cbn; simp split_struct in *. auto. destruct (split_struct (m:=0) w). depelim xs; intuition. cbn in *. destruct (split_struct (vapp' v ys)); simpl. intuition congruence. Qed. Equations vhead {A n} (v : vector A (S n)) : A := vhead (cons a v) := a. Equations vmap' {A B} (f : A -> B) {n} (v : vector A n) : vector B n := vmap' f nil := nil ; vmap' f (cons a v) := cons (f a) (vmap' f v). #[local] Hint Resolve PeanoNat.Nat.lt_succ_diag_r : subterm_relation. Equations vmap {A B} (f : A -> B) {n} (v : vector A n) : vector B n by wf n := vmap f nil := nil ; vmap f (cons a v) := cons (f a) (vmap f v). Transparent vmap. Transparent vmap'. (* Eval compute in (vmap' id (@Vnil nat)). *) (* Eval compute in (vmap' id (@cons nat 2 _ Vnil)). *) (* Eval compute in @vmap'. *) Section Image. Context {S T : Type}. Variable f : S -> T. Inductive Imf : T -> Type := imf (s : S) : Imf (f s). Equations inv (t : T) (im : Imf t) : S := inv ?(f s) (imf s) := s. End Image. Section Univ. Inductive univ : Set := | ubool | unat | uarrow (from:univ) (to:univ). Equations interp (u : univ) : Set := interp ubool := bool; interp unat := nat; interp (uarrow from to) := interp from -> interp to. (* Eval compute in interp. *) Transparent interp. Definition interp' := Eval compute in @interp. Equations foo (u : univ) (el : interp' u) : interp' u := foo ubool true := false ; foo ubool false := true ; foo unat t := t ; foo (uarrow from to) f := id ∘ f. Transparent foo. (* Eval lazy beta delta [ foo foo_obligation_1 foo_obligation_2 ] iota zeta in foo. *) End Univ. (* Eval compute in (foo ubool false). *) (* Eval compute in (foo (uarrow ubool ubool) negb). *) (* Eval compute in (foo (uarrow ubool ubool) id). *) Inductive foobar : Set := bar | baz. Equations bla (f : foobar) : bool := bla bar := true ; bla baz := false. (* Eval simpl in bla. *) Lemma eq_trans_eq A x : @eq_trans A x x x eq_refl eq_refl = eq_refl. Proof. reflexivity. Qed. Equations vlast {A} {n} (v : vector A (S n)) : A by struct v := vlast (cons a (n:=O) nil) := a ; vlast (cons a (n:=S n) v) := vlast v. Transparent vlast. Definition testvlast : 4 = (vlast (cons 2 (cons 5 (cons 4 nil)))) := eq_refl. Fixpoint vapp {A n m} (v : vector A n) (w : vector A m) : vector A (n + m) := match v with | nil => w | cons a v' => cons a (vapp v' w) end. (* Eval compute in (split (vapp Vnil (cons 2 Vnil))). *) (* Eval compute in (split (vapp (cons 3 Vnil) (cons 2 Vnil))). *) (* Recursive Extraction split. *) (* Transparent split. *) (* Eval cbv beta iota zeta delta [ split split_obligation_1 split_obligation_2 ] in @split. *) Equations mult (n m : nat) : nat := mult O m := 0 ; mult (S n) m := mult n m + m. Transparent mult. (* Equations mult' (n m acc : nat) : nat := *) (* mult' O m acc := acc ; mult' (S n) m acc := mult' n m (n + acc). *) Inductive Parity : nat -> Set := | even : forall n, Parity (mult 2 n) | odd : forall n, Parity (S (mult 2 n)). (* Eval compute in (fun n => mult 2 (S n)). *) Definition cast {A B : Type} (a : A) (p : A = B) : B. intros. subst. exact a. Defined. Equations parity (n : nat) : Parity n := parity O := even 0 ; parity (S n) with parity n => { parity (S ?(mult 2 k)) (even k) := odd k ; parity (S ?(S (mult 2 k))) (odd k) := cast (even (S k)) _ }. Equations half (n : nat) : nat := half n with parity n => { half ?(S (mult 2 k)) (odd k) := k ; half ?(mult 2 k) (even k) := k }. Equations vtail {A n} (v : vector A (S n)) : vector A n := vtail (cons a v') := v'. (** Well-founded recursion: note that it's polymorphic recursion in a sense: the type of vectors change at each recursive call. It does not follow a canonical elimination principle in this nested case. *) Equations diag {A n} (v : vector (vector A n) n) : vector A n by wf n lt := diag nil := nil ; diag (cons (cons a v) v') := cons a (diag (vmap vtail v')). (** The computational content is the right one here: only the vector is matched relevantly, not its indices, which could hence disappear. *) (* Extraction diag. *) (** It can be done structurally as well but we're matching on the index now. *) Equations diag_struct {A n} (v : vector (vector A n) n) : vector A n := diag_struct (n:=O) nil := nil ; diag_struct (n:=(S _)) (cons (cons a v) v') := cons a (diag_struct (vmap vtail v')). Definition mat A n m := vector (vector A m) n. Equations vmake {A} (n : nat) (a : A) : vector A n := vmake O a := nil ; vmake (S n) a := cons a (vmake n a). Equations vfold_right {A : nat -> Type} {B} (f : ∀ n, B -> A n -> A (S n)) (e : A 0) {n} (v : vector B n) : A n := vfold_right f e nil := e ; vfold_right f e (cons a v) := f _ a (vfold_right f e v). Equations vzip {A B C n} (f : A -> B -> C) (v : vector A n) (w : vector B n) : vector C n := vzip f nil _ := nil ; vzip f (cons a v) (cons a' v') := cons (f a a') (vzip f v v'). Definition transpose {A m n} : mat A m n -> mat A n m := vfold_right (A:=λ m, mat A n m) (λ m', vzip (λ a, cons a)) (vmake n nil). (* Lemma vfold_right_e {A : Type} {B} {n} (f : ∀ n', B' -> vector (vector A 0) n' -> vector (vector A 0) (S n')) *) (* (e : vector (vector A 0) n) v : vfold_right f (vmake n Vnil) v = *) (* Typeclasses eauto :=. *) Require Import fin. Generalizable All Variables. Opaque vmap. Opaque vtail. Opaque nth. Require Vectors.Vector. Arguments Vector.nil {A}. Arguments Vector.cons {A} _ {n}. Notation vnil := Vector.nil. Notation vcons := Vector.cons. Equations nth {A} {n} (v : Vector.t A n) (f : fin n) : A := nth (vcons a v) fz := a ; nth (vcons a v) (fs f) := nth v f. Lemma nth_vmap {A B n} (v : vector A n) (fn : A -> B) (f : fin n) : nth (vmap fn v) f = fn (nth v f). Proof. revert B fn. funelim (nth v f); intros; now simp nth vmap. Qed. Lemma nth_vtail `(v : vector A (S n)) (f : fin n) : nth (vtail v) f = nth v (fs f). Proof. funelim (vtail v); intros; now simp nth. Qed. #[local] Hint Rewrite @nth_vmap @nth_vtail : nth. Lemma diag_nth `(v : vector (vector A n) n) (f : fin n) : nth (diag v) f = nth (nth v f) f. Proof. revert f. funelim (diag v); intros f. depelim f. depelim f; simp nth; trivial. rewrite H. now simp nth. Qed. Equations assoc (x y z : nat) : x + y + z = x + (y + z) := assoc 0 y z := eq_refl; assoc (S x) y z with assoc x y z, x + (y + z) => { assoc (S x) y z eq_refl _ := eq_refl }. Coq-Equations-1.3.1-8.20/test-suite/BasicsDec.v000066400000000000000000000027771463127417400207340ustar00rootroot00000000000000Require Import Equations.Prop.Equations Bvector. Inductive bar1 (A : Type) : A -> Prop := . Inductive bar2 (A : Type) : (A -> A) -> Prop := . Inductive bar3 (A B : Type) : A -> Prop := . Inductive bar4 (A B : Type) : B -> Prop := . Derive Signature for bar1 bar2 bar3 bar4. Derive Signature for eq. Goal forall (U V : Type), Some U = Some V -> U = V. Proof. intros. depelim H. reflexivity. Qed. Notation vector := Vector.t. Derive Signature NoConfusionHom for Vector.t. Unset Printing All. Inductive foo (A : Type) : forall H H0 : nat, vector A H -> vector A H0 -> Prop :=. Derive Signature for foo. #[export] Instance vector_eqdec {A n} `(EqDec A) : EqDec (vector A n). Proof. intros. intros x. induction x. left. now depelim y. intro y; depelim y. destruct (eq_dec h h0) as [eq|neq]; subst. destruct (IHx y) as [eqy|neqy]. subst. left; reflexivity. right. intro H0. apply neqy. injection H0. revert H0. repeat simplify ?. simpl. reflexivity. right. intro H0. apply neq. now noconf H0. Defined. Derive Subterm for vector. Print Assumptions well_founded_t_subterm. (** A closed proof of well-foundedness relying on the decidability of [A]. *) Lemma well_founded_vector_direct_subterm' : forall A : Type, EqDec A -> WellFounded (t_subterm A). Proof. intros. apply Transitive_Closure.wf_clos_trans. intro. simp_sigmas. induction a0; constructor; intros; simp_sigmas. simpl in *. depelim H. depelim H. apply IHa0. Defined. Print Assumptions well_founded_vector_direct_subterm'. Coq-Equations-1.3.1-8.20/test-suite/BasicsHoTT.v000066400000000000000000000466641463127417400210620ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) From Equations Require Import CoreTactics. Require Import Equations.HoTT.All Equations.HoTT.WellFounded. Require Import Coq.Unicode.Utf8. Require HoTT.Basics.Overture. Require Import HoTT.Types.Bool HoTT.Spaces.Nat HoTT.Spaces.List.Core. Local Open Scope nat_scope. Set Equations Transparent. Equations neg (b : Bool) : Bool := neg true := false ; neg false := true. Lemma neg_inv : forall b, neg (neg b) = b. Proof. intros b. funelim (neg b); auto. Qed. Inductive le : nat -> nat -> Set := | le_0 n : le 0 (S n) | le_S n m : le n m -> le (S n) (S m). Derive Signature for le. Equations congS {x y : nat} (p : x = y) : S x = S y := congS 1 := 1. (* Equations antisym {x y : nat} (p : le x y) (q : le y x) : x = y := *) (* antisym (le_S n m p) (le_S ?(m) ?(n) q) := congS (antisym p q). *) Module TestF. Equations? f (n : nat) : nat := f 0 := 42 ; f (S m) with f m := { f (S m) IH := _ }. Proof. refine IH. Defined. End TestF. #[local] Instance eqsig {A} (x : A) : Signature (x = x) A (fun a => x = a) := sigmaI _ x. Module WithUIP. Set Equations With UIP. Polymorphic Axiom uip : forall A, UIP A. Local Existing Instance uip. Equations K {A} (x : A) (P : x = x -> Type) (p : P idpath) (H : x = x) : P H := K x P p idpath := p. End WithUIP. (* Test Equations WithUIP. should be off, setting is local to the module *) Equations eq_sym {A} (x y : A) (H : x = y) : y = x := eq_sym x _ idpath := idpath. Equations eq_trans {A} (x y z : A) (p : x = y) (q : y = z) : x = z := eq_trans x _ _ idpath idpath := idpath. Section FilterDef. Context {A} (p : A -> Bool). Equations filter (l : list A) : list A := filter nil := nil ; filter (cons a l) with p a => { | true := a :: filter l ; | false := filter l }. End FilterDef. Inductive incl {A} : Relation (list A) := stop : incl nil nil | keep {x : A} {xs ys : list A} : incl xs ys -> incl (x :: xs)%list (x :: ys)%list | skip {x : A} {xs ys : list A} : incl xs ys -> incl (xs) (x :: ys)%list. Global Transparent filter. Equations sublist {A} (p : A -> Bool) (xs : list A) : incl (filter p xs) xs := sublist p nil := stop ; sublist p (cons x xs) with p x := { | true := keep (sublist p xs) ; | false := skip (sublist p xs) }. (* Print Assumptions sublist. *) Declare Scope vect_scope. Inductive vector@{i | Set <= i} (A : Type@{i}) : nat -> Type@{i} := | nil : vector A 0 | cons {n} : A -> vector A n -> vector A (S n). Arguments vector A%type_scope n%nat_scope. Arguments nil {A}. Arguments cons {A%type_scope} {n%nat_scope} a v%vect_scope. Notation " x |:| y " := (@cons _ _ x y) (at level 20, right associativity) : vect_scope. Notation " x |: n :| y " := (@cons _ n x y) (at level 20, right associativity) : vect_scope. Notation "[]v" := (@nil _) (at level 0) : vect_scope. (* Derive Subterm for nat. *) Derive Signature NoConfusion for vector. (*Show Obligation Tactic.*) Derive Subterm for vector. Axiom F : Funext. #[local] Existing Instance F. #[local] Existing Instance lt_wf. Equations testn (n : nat) : nat by wf n WellFoundedInstances.lt := testn 0 := 0 ; testn (S n) with testn n => { | 0 := S 0 ; | (S n') := S n' }. Local Open Scope vect_scope. Reserved Notation "x ++v y" (at level 60). Require Import HoTT.Classes.implementations.peano_naturals. (* Require Import HoTT.Classes.interfaces.canonical_names. *) Equations vapp {A} {n m} (v : vector A n) (w : vector A m) : vector A (n + m)%nat := { []v ++v w := w ; (cons a v) ++v w := cons a (v ++v w) } where "x ++v y" := (vapp x y). (* Print Assumptions vapp. *) (* Ltac Equations.Init.solve_noconf_hom ::= idtac. *) Set Universe Minimization ToSet. Derive NoConfusionHom for vector. Unset Universe Minimization ToSet. Require Import Equations.HoTT.Tactics. #[local] Instance vector_eqdec@{i +|+} {A : Type@{i}} {n} `(EqDec@{i} A) : EqDec (vector A n). Proof. intros. intros x. intros y. induction x. - left. now depelim y. - depelim y. pose proof (Classes.eq_dec a a0). dependent elimination X as [inl idpath|inr Ha]. -- specialize (IHx v). dependent elimination IHx as [inl idpath|inr neq]. --- left; reflexivity. --- right. simplify *. now apply neq. -- right; simplify *. now apply Ha. Defined. Section foo. Context {A B : Type}. Equations unzipv {n} (v : vector (A * B) n) : vector A n * vector B n by wf (signature_pack v) (@vector_subterm (A * B)) := unzipv []v := ([]v, []v) ; unzipv ((x, y) |:| v) with unzipv v := { | pair xs ys := (cons x xs, cons y ys) }. End foo. Global Typeclasses Transparent vector_subterm. Equations nos_with (n : nat) : nat by wf n := nos_with O := O ; nos_with (S m) with nos_with m := { | O := S O ; | S n' := O }. Set Universe Minimization ToSet. Equations equal (n m : nat) : (n = m) + (n <> m) := equal O O := inl idpath ; equal (S n) (S m) with equal n m => { equal (S n) (S ?(n)) (inl idpath) := inl idpath ; equal (S n) (S m) (inr p) := inr (λ{ | idpath => p idpath }) } ; equal x y := inr _. Notation "[]" := (@List.Core.nil _) (at level 0) : list_scope. Local Open Scope list_scope. Equations app_with {A} (l l' : list A) : list A := app_with [] l := l ; app_with (a :: v) l with app_with v l => { | vl := a :: vl }. Equations plus' (n m : nat) : nat := plus' O n := n ; plus' (S n) m := S (plus' n m). Equations head A (default : A) (l : list A) : A := head A default [] := default ; head A default (a :: v) := a. Equations tail {A} (l : list A) : list A := tail [] := [] ; tail (a :: v) := v. Equations app' {A} (l l' : list A) : (list A) := app' [] l := l ; app' (a :: v) l := a :: (app' v l). Global Transparent app'. Notation " x +++ y " := (@app' _ x y%list) (at level 60, right associativity). Equations rev_acc {A} (l : list A) (acc : list A) : list A := rev_acc [] acc := acc; rev_acc (a :: v) acc := rev_acc v (a :: acc). Equations rev {A} (l : list A) : list A := rev [] := []; rev (a :: v) := rev v +++ (a :: []). Lemma app'_nil : forall {A : Type} (l : list A), l +++ [] = l. Proof. intros. funelim (app' l []); auto. now rewrite X. Qed. Lemma app'_assoc : forall {A} (l l' l'' : list A), (l +++ l') +++ l'' = app' l (app' l' l''). Proof. intros. revert l''. funelim (l +++ l'); intros; simp app'; trivial. now rewrite X. Qed. Tactic Notation "myreplace" constr(c) "with" constr(d) "by" tactic(tac) := let H := fresh in assert (H : c = d) by try tac; [rewrite H; clear H]. Lemma rev_rev_acc : forall {A} (l : list A), rev_acc l [] = rev l. Proof. intros. myreplace (rev l) with (rev l +++ []) by (symmetry; apply app'_nil). generalize (@List.Core.nil A). funelim (rev l). - intros. reflexivity. - intros l'. autorewrite with rev_acc. rewrite X. rewrite app'_assoc. reflexivity. Qed. #[local] Hint Rewrite @rev_rev_acc : rev_acc. Lemma app'_funind : forall {A} (l l' l'' : list A), (l +++ l') +++ l'' = app' l (app' l' l''). Proof. intros. funelim (l +++ l'); simp app'; trivial. rewrite X; auto. Qed. #[local] Hint Rewrite @app'_nil @app'_assoc : app'. Lemma rev_app' : forall {A} (l l' : list A), rev (l +++ l') = rev l' +++ rev l. Proof. intros. funelim (l +++ l'); simp rev app'; trivial. now (rewrite X, <- app'_assoc). Qed. Equations zip' {A} (f : A -> A -> A) (l l' : list A) : list A := zip' f [] [] := [] ; zip' f (a :: v) (b :: w) := f a b :: zip' f v w ; zip' f x y := []. Equations zip'' {A} (f : A -> A -> A) (l l' : list A) (def : list A) : list A := zip'' f [] [] def := [] ; zip'' f (a :: v) (b :: w) def := f a b :: zip'' f v w def ; zip'' f [] (b :: w) def := def ; zip'' f (a :: v) [] def := def. Equations vector_append_one {A n} (v : vector A n) (a : A) : vector A (S n) := vector_append_one nil a := cons a nil; vector_append_one (cons a' v) a := cons a' (vector_append_one v a). Equations vrev {A n} (v : vector A n) : vector A n := vrev nil := nil; vrev (cons a v) := vector_append_one (vrev v) a. Definition cast_vector {A n m} (v : vector A n) (H : n = m) : vector A m. intros. destruct H; assumption. Defined. Require HoTT.Classes.interfaces.naturals HoTT.Classes.interfaces.abstract_algebra HoTT.Classes.tactics.ring_quote HoTT.Classes.tactics.ring_tac. Equations? vrev_acc {A n m} (v : vector A n) (w : vector A m) : vector A (n + m) := vrev_acc nil w := w; vrev_acc (cons (n:=n) a v) w := cast_vector (vrev_acc v (cons a w)) _. Proof. clear. induction n. - simpl. constructor. - simpl. cbn. now rewrite IHn. Defined. Set Primitive Projections. Record vect {A} := mkVect { vect_len : nat; vect_vector : vector A vect_len }. Coercion mkVect : vector >-> vect. Derive NoConfusion for vect. Inductive Split {X : Type}{m n : nat} : vector X (m + n) -> Type := append : ∀ (xs : vector X m)(ys : vector X n), Split (vapp xs ys). Arguments Split [ X ]. (* Eval compute in @app'. *) (* About nil. About vector. *) (* Set Equations Debug. *) Set Private Polymorphic Universes. Equations split {X : Type} {m n : nat} (xs : vector X (add m n)) : Split m n xs by wf m := split (m:=0) xs := append nil xs; split (m:=m .+1) (cons x xs) with split xs => { | append xs' ys' := append (cons x xs') ys' }. (* Minimization could do a bit better here *) Example test_split' := @split@{_ _ _ _}. (* Definition split_lightu@{u0 u1 u2 | u0 < u1, u1 < u2} := @split@{u0 u1 u1 u2 u1}.*) (* 2 universes: Set < i (type of splitset) < j (universe of the type) *) Equations splitSet {X : Set} {m n : nat} (xs : vector X (add m n)) : Split m n xs by wf m := splitSet (m:=0) xs := append nil xs; splitSet (m:=m .+1) (cons x xs) with splitSet xs => { | append xs' ys' := append (cons x xs') ys' }. Definition test_splitSet := @splitSet@{_ _}. Section SplitSetParam. Context {X : Set}. Obligation Tactic := idtac. (* Here, just 1 universe for the universe of Set. *) Equations splitSetParam {m n : nat} (xs : vector X (add m n)) : Split m n xs by wf m := splitSetParam (m:=0) xs := append nil xs; splitSetParam (m:=m .+1) (cons x xs) with splitSetParam xs => { | append xs' ys' := append (cons x xs') ys' }. Next Obligation. solve_rec. Qed. End SplitSetParam. Definition test_splitSetParam := @splitSetParam@{_}. Notation "( x , .. , y , z )" := (@sigmaI _ _ x .. (@sigmaI _ _ y z) ..) (right associativity, at level 0, format "( x , .. , y , z )") : equations_scope. Global Set Default Goal Selector "1". Axiom cheat : forall {A}, A. Notation plus := add. Definition eta_vector@{i j k} {A : Type@{i}} (P : forall n : nat, vector@{i} A n -> Type@{j}) : forall (n : nat) (v : vector@{i} A n), paths@{k} (match v return Type@{j} with | nil => P 0 nil | cons n a v => P _ (cons a v) end) (P n v). Proof. now destruct v. Defined. Lemma split' {X : Type} {m n} (xs : vector X (add m n)) : Split m n xs. Proof. eassert ?[ty]. revert m n xs. fix IH 3. intros m n xs. refine (match xs as xs' in @vector _ k return (match xs' as xs'' in vector _ n' return Type with | nil => ((0, nil) = (plus m n, xs)) -> Split m n xs | @cons _ n' x' xs'' => (S n', cons x' xs'') = (plus m n, xs) -> Split m n xs end) with | nil => _ | cons n x xs => _ end). destruct m as [|m']. + simpl. simplify *. apply (append nil nil). + simplify *. + destruct m as [|m']; simpl. simplify *. simpl. apply (append nil (x |: n :| xs)). simplify *. simpl. specialize (IH m' n0 xs). rewrite (eta_vector (fun nv (v : vector X nv) => (nv, v) = (plus m' n0, xs) -> Split m' n0 xs)) in IH. specialize (IH idpath). destruct IH. refine (append (cons x xs) ys). + rewrite (eta_vector (fun nv v => (nv, v) = (plus m n, xs) -> Split m n xs)) in X0. apply (X0 idpath). Defined. Extraction Inline apply_noConfusion Empty_ind. Register sigma as core.sig.type. (* Extraction split'. *) Lemma split_vapp : ∀ (X : Type) m n (v : vector X m) (w : vector X n), let 'append v' w' := split (vapp v w) in v = v' /\ w = w'. Proof. intros. funelim (vapp v w); simp split; trivial; auto. destruct split; simp split. dependent elimination X as [pair idpath idpath]. split; constructor. Qed. (* Eval compute in @zip''. *) Equations split_struct {X : Type} {m n} (xs : vector X (plus m n)) : Split m n xs := split_struct (m:=0) xs := append nil xs ; split_struct (m:=(S m)) (cons x xs) with split_struct xs => { split_struct (m:=(S m)) (cons x xs) (append xs' ys') := append (cons x xs') ys' }. Transparent split_struct. Lemma split_struct_vapp : ∀ (X : Type) m n (v : vector X m) (w : vector X n), let 'append v' w' := split_struct (vapp v w) in v = v' /\ w = w'. Proof. intros. funelim (vapp v w); simp split_struct in *; try easy. destruct (split_struct (v ++v w)); simpl. dependent elimination X as [pair idpath idpath]; easy. Qed. Equations vhead {A n} (v : vector A (S n)) : A := vhead (cons a v) := a. Equations vmap' {A B} (f : A -> B) {n} (v : vector A n) : vector B n := vmap' f nil := nil ; vmap' f (cons a v) := cons (f a) (vmap' f v). #[local] Hint Resolve lt_n_Sn : subterm_relation. Equations vmap {A B} (f : A -> B) {n} (v : vector A n) : vector B n by wf n := vmap f nil := nil ; vmap f (cons a v) := cons (f a) (vmap f v). Transparent vmap. Transparent vmap'. (* Eval compute in (vmap' id (@Vnil nat)). *) (* Eval compute in (vmap' id (@cons nat 2 _ Vnil)). *) (* Eval compute in @vmap'. *) Section Image. Context {S T : Type}. Variable f : S -> T. Inductive Imf : T -> Type := imf (s : S) : Imf (f s). Equations inv (t : T) (im : Imf t) : S := inv ?(f s) (imf s) := s. End Image. Section Univ. Inductive univ : Set := | ubool | unat | uarrow (from:univ) (to:univ). Equations interp (u : univ) : Set := interp ubool := Bool; interp unat := nat; interp (uarrow from to) := interp from -> interp to. (* Eval compute in interp. *) Transparent interp. Definition interp' := Eval compute in @interp. Equations foo (u : univ) (el : interp' u) : interp' u := foo ubool true := false ; foo ubool false := true ; foo unat t := t ; foo (uarrow from to) f := f. Transparent foo. (* Eval lazy beta delta [ foo foo_obligation_1 foo_obligation_2 ] iota zeta in foo. *) End Univ. (* Eval compute in (foo ubool false). *) (* Eval compute in (foo (uarrow ubool ubool) negb). *) (* Eval compute in (foo (uarrow ubool ubool) id). *) Inductive foobar : Set := bar | baz. Equations bla (f : foobar) : Bool := bla bar := true ; bla baz := false. (* Eval simpl in bla. *) Lemma eq_trans_eq A x : @eq_trans A x x x idpath idpath = idpath. Proof. reflexivity. Qed. Section vlast. Context {A : Type}. Equations vlast {n} (v : vector A (S n)) : A by wf (signature_pack v) (@vector_subterm A) := vlast (cons a (n:=O) nil) := a ; vlast (cons a (n:=S n) v) := vlast v. End vlast. (* Transparent vlast. *) (* Definition testvlast : 4 = (vlast (cons 2 (cons 5 (cons 4 nil)))) := idpath. *) (* Fixpoint vapp {A n m} (v : vector A n) (w : vector A m) : vector A (n + m) := *) (* match v with *) (* | nil => w *) (* | cons a v' => cons a (vapp v' w) *) (* end. *) (* Eval compute in (split (vapp Vnil (cons 2 Vnil))). *) (* Eval compute in (split (vapp (cons 3 Vnil) (cons 2 Vnil))). *) (* Recursive Extraction split. *) (* Transparent split. *) (* Eval cbv beta iota zeta delta [ split split_obligation_1 split_obligation_2 ] in @split. *) Equations mult (n m : nat) : nat := mult O m := 0 ; mult (S n) m := mult n m + m. Transparent mult. (* Equations mult' (n m acc : nat) : nat := *) (* mult' O m acc := acc ; mult' (S n) m acc := mult' n m (n + acc). *) Inductive Parity : nat -> Set := | even : forall n, Parity (mult 2 n) | odd : forall n, Parity (S (mult 2 n)). (* Eval compute in (fun n => mult 2 (S n)). *) Definition cast {A B : Type} (a : A) (p : A = B) : B. intros. destruct p. exact a. Defined. Equations parity (n : nat) : Parity n := parity O := even 0 ; parity (S n) with parity n => { parity (S ?(mult 2 k)) (even k) := odd k ; parity (S ?(S (mult 2 k))) (odd k) := cast (even (S k)) _ }. Next Obligation. cbn. apply cheat. Defined. Equations half (n : nat) : nat := half n with parity n => { half ?(S (mult 2 k)) (odd k) := k ; half ?(mult 2 k) (even k) := k }. Equations vtail {A n} (v : vector A (S n)) : vector A n := vtail (cons a v') := v'. (** Well-founded recursion: note that it's polymorphic recursion in a sense: the type of vectors change at each recursive call. It does not follow a canonical elimination principle in this nested case. *) Equations diag {A n} (v : vector (vector A n) n) : vector A n by wf n := diag nil := nil ; diag (cons (cons a v) v') := cons a (diag (vmap vtail v')). (** The computational content is the right one here: only the vector is matched relevantly, not its indices, which could hence disappear. *) (* Extraction diag. *) (** It can be done structurally as well but we're matching on the index now. *) Equations diag_struct {A n} (v : vector (vector A n) n) : vector A n := diag_struct (n:=O) nil := nil ; diag_struct (n:=(S _)) (cons (cons a v) v') := cons a (diag_struct (vmap vtail v')). Definition mat A n m := vector (vector A m) n. Equations vmake {A} (n : nat) (a : A) : vector A n := vmake O a := nil ; vmake (S n) a := cons a (vmake n a). Equations vfold_right {A : nat -> Type} {B} (f : ∀ n, B -> A n -> A (S n)) (e : A 0) {n} (v : vector B n) : A n := vfold_right f e nil := e ; vfold_right f e (cons a v) := f _ a (vfold_right f e v). Equations vzip {A B C n} (f : A -> B -> C) (v : vector A n) (w : vector B n) : vector C n := vzip f nil _ := nil ; vzip f (cons a v) (cons a' v') := cons (f a a') (vzip f v v'). Definition transpose {A m n} : mat A m n -> mat A n m := vfold_right (A:=λ m, mat A n m) (λ m', vzip (λ a, cons a)) (vmake n nil). (* Lemma vfold_right_e {A : Type} {B} {n} (f : ∀ n', B' -> vector (vector A 0) n' -> vector (vector A 0) (S n')) *) (* (e : vector (vector A 0) n) v : vfold_right f (vmake n Vnil) v = *) (* Typeclasses eauto :=. *) Inductive fin : nat -> Set := | fz : forall {n}, fin (S n) | fs : forall {n}, fin n -> fin (S n). Derive Signature NoConfusion NoConfusionHom for fin. Generalizable All Variables. Opaque vmap. Opaque vtail. Equations nth {A : Type} {n} (v : vector A n) (f : fin n) : A := nth (cons a v) fz := a ; nth (cons a v) (fs f) := nth v f. Lemma nth_vmap {A B n} (v : vector A n) (fn : A -> B) (f : fin n) : nth (vmap fn v) f = fn (nth v f). Proof. revert B fn. funelim (nth v f); intros; now simp nth vmap. Qed. Lemma nth_vtail `(v : vector A (S n)) (f : fin n) : nth (vtail v) f = nth v (fs f). Proof. funelim (vtail v). intros; (* FIXME universe bug with [now simp nth] *) now autorewrite with nth. Qed. #[local] Hint Rewrite @nth_vmap @nth_vtail : nth. (* Lemma diag_nth `(v : vector (vector A n) n) (f : fin n) : nth (diag v) f = nth (nth v f) f. *) (* Proof. revert f. funelim (diag v); intros f. *) (* depelim f. *) (* depelim f; simp nth; trivial. *) (* rewrite H. now simp nth. *) (* Qed. *) Infix "+" := plus. Equations assoc (x y z : nat) : x + y + z = x + (y + z) := assoc 0 y z := idpath; assoc (S x) y z with assoc x y z, x + (y + z) => { assoc (S x) y z idpath _ := idpath }. (*About assoc_elim.*) Coq-Equations-1.3.1-8.20/test-suite/Below.v000066400000000000000000000104201463127417400201440ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (** Instances of [Below] for the standard datatypes. To be used by [equations] when it needs to do recursion on a type. *) Require Import Bvector. Require Import Vectors.Vector. Require Import Equations.Init Equations.CoreTactics Equations.Prop.DepElim Equations.Prop.Tactics Equations.Prop.Constants. Require Import Equations.Prop.FunctionalInduction. (** The [BelowPackage] class provides the definition of a [Below] predicate for some datatype, allowing to talk about course-of-value recursion on it. *) Class BelowPackage (A : Type) := { Below : A -> Type ; below : forall (a : A), Below a }. (** The [Recursor] class defines a recursor on a type *) Class Recursor (A : Type) := { rec_type : forall x : A, Type ; rec : forall x : A, rec_type x }. (** Support simplification of unification constraints appearing in the goal and the hypothesis. *) #[local] Hint Extern 0 (_ = _) => reflexivity : rec_decision. Ltac simpl_let := match goal with [ H : let _ := ?t in _ |- _ ] => match t with | fixproto => fail 1 | _ => cbv zeta in H end end. #[local] Hint Extern 40 => progress (cbv beta in * || simpl_let) : rec_decision. (** Use it as well as the [equations] simplifications. *) Ltac unfold_equations ::= repeat progress autounfold with equations Below. Ltac unfold_equations_in H ::= repeat progress autounfold with equations Below in H. Ltac destruct_conj := match goal with [ H : (_ * _)%type |- _ ] => destruct H end. (** Simplify [Below] hyps for proof search. *) #[local] Hint Extern 2 => progress (autorewrite with Below in * ; destruct_conj ; simplify_IH_hyps) : rec_decision. (** When solving goals with recursive prototypes in them, we allow an application only if it keeps the proof guarded (FIXME, guarded doesn't work). *) Ltac apply_fix_proto := match goal with | [ f : let _ := fixproto in _ |- _ ] => apply f end. #[local] Hint Extern 100 => apply_fix_proto : rec_decision. (** We now derive standard Below instances. *) Derive Below for nat. Definition rec_nat (P : nat -> Type) n (step : forall n, Below_nat P n -> P n) : P n := step n (below_nat P step n). #[export] Instance nat_Recursor : Recursor nat := { rec_type := fun n => forall P step, P n ; rec := fun n P step => rec_nat P n step }. Notation vector := Vector.t. Import Vector. Arguments nil {A}. Arguments cons {A} _ {n}. Open Scope equations_scope. Import EquationsNotations. Equations Below_vector A (P : forall n, vector A n -> Type) n (v : vector A n) : Type by struct v := Below_vector A P ?(0) [] := unit ; Below_vector A P _ (a :: v) := ((P _ v) * Below_vector A P _ v)%type. #[local] Hint Rewrite Below_vector_equation_2 : rec_decision. Ltac rec_fast v recname := intro_block v ; move v at top ; generalize_by_eqs_vars v ; (intros until v || revert_until v) ; let recv := eval simpl in (rec v) in (eapply recv || (dependent pattern v ; refine (recv _ _))) ; clear -recname ; intros until 1 ; on_last_hyp ltac:(fun x => rename x into recname) ; simpl in * ; simplify_dep_elim ; intros ; unblock_goal ; intros ; (try move recname at bottom) ; add_pattern (hide_pattern recname). (* Ltac rec_debug v recname := intro_block v ; move v at top ; *) (* generalize_by_eqs_vars v ; (intros until v || revert_until v) ; *) (* let recv := eval simpl in (rec v) in show_goal ; *) (* (eapply recv || (dependent pattern v ; refine (recv _ _))) ; show_hyps ; idtac "before clear"; *) (* clear_except recname ; *) (* intros until 1 ; on_last_hyp ltac:(fun x => rename x into recname) ; *) (* idtac "after clear"; *) (* show_hyps ; show_goal ; simpl in * ; simplify_dep_elim ; intros ; unblock_goal ; intros ; *) (* add_pattern (hide_pattern recname). *) Ltac rec recname v := rec_fast v recname. Coq-Equations-1.3.1-8.20/test-suite/DataStruct.v000066400000000000000000000257761463127417400211760ustar00rootroot00000000000000(* http://adam.chlipala.net/cpdt/html/DataStruct.html *) Require Import Arith List. Require Import Program Equations.Prop.Equations. Set Implicit Arguments. Set Keyed Unification. Section ilist. Variable A : Set. Inductive ilist : nat -> Set := | Nil : ilist O | Cons : forall n, A -> ilist n -> ilist (S n). Inductive fin : nat -> Set := | First : forall n, fin (S n) | Next : forall n, fin n -> fin (S n). Arguments First {n}. Derive Signature for ilist fin. Equations get {n} (ls : ilist n) (i : fin n) : A := get (Cons x _) First := x; get (Cons _ t) (Next j) := get t j. End ilist. Arguments Nil {A}. Arguments First {n}. Section ilist_map. Variables A B : Set. Variable f : A -> B. Equations imap {n} (ls : ilist A n) : ilist B n := imap Nil := Nil; imap (Cons x t) := Cons (f x) (imap t). Theorem get_imap : forall n (i : fin n) (ls : ilist A n), get (imap ls) i = f (get ls i). Proof. intros. funelim (imap ls). - depelim i. - depelim i. + repeat rewrite get_equation_2. reflexivity. + repeat rewrite get_equation_3. apply H. Qed. End ilist_map. #[universes(template)] Inductive member (A : Type) (elm : A) : list A -> Type := | HFirst : forall ls, member elm (elm :: ls) | HNext : forall x ls, member elm ls -> member elm (x :: ls). Section hlist. Variable A : Type. Variable B : A -> Type. Inductive hlist : list A -> Type := | HNil : hlist nil | HCons : forall (x : A) (ls : list A), B x -> hlist ls -> hlist (x :: ls). Variable elm : A. Derive Signature NoConfusion for member. Equations hget {ls} (mls : hlist ls) (i : member elm ls) : B elm := hget (HCons x _) (HFirst _ _) := x; hget (HCons _ t) (HNext _ j) := hget t j. End hlist. Arguments HNil {A B}. Arguments HCons {A B x ls} _ _. Arguments HFirst {A elm ls}. Arguments HNext {A elm x ls} _. Definition someTypes : list Set := nat :: bool :: nil. Example someValues : hlist (fun T : Set => T) someTypes := HCons 5 (HCons true HNil). Goal hget someValues HFirst = 5. Proof. reflexivity. Qed. Goal hget someValues (HNext HFirst) = true. Proof. now simp hget. Qed. Inductive type : Set := | Unit : type | Arrow : type -> type -> type. Inductive exp : list type -> type -> Set := | Const : forall ts, exp ts Unit | Var : forall ts t, member t ts -> exp ts t | App : forall ts dom ran, exp ts (Arrow dom ran) -> exp ts dom -> exp ts ran | Abs : forall ts dom ran, exp (dom :: ts) ran -> exp ts (Arrow dom ran). Arguments Const {ts}. Equations typeDenote (t : type): Set := typeDenote Unit := unit; typeDenote (Arrow t1 t2) := typeDenote t1 -> typeDenote t2. Equations expDenote {ts t} (e : exp ts t) (mls : hlist typeDenote ts) : typeDenote t := expDenote Const _ := tt; expDenote (Var mem) s := hget s mem; expDenote (App e1 e2) s with expDenote e1 s => { | e' := e' (expDenote e2 s) }; expDenote (Abs e) s := fun x => expDenote e (HCons x s). Equations filist (A : Set) (n : nat) : Set := filist A 0 := unit; filist A (S n) := (A * filist A n)%type. Global Transparent filist. Equations ffin (n : nat) : Set := ffin 0 := Empty_set; ffin (S n) := option (ffin n). Global Transparent ffin. Equations fget {A n} (ls : filist A n) (i : ffin n) : A := fget (n:=(S n)) (pair x _) None := x; fget (n:=(S n)) (pair _ ls) (Some i) := fget ls i. Section filist_map. Variables A B : Set. Variable f : A -> B. Equations fimap {n} (ls : filist A n) : filist B n := fimap (n:=0) tt := tt; fimap (n:=(S n)) (pair x ls) := pair (f x) (fimap ls). Theorem fget_fimap : forall n (i : ffin n) (ls : filist A n), fget (fimap ls) i = f (fget ls i). Proof. intros. funelim (fimap ls); depelim i; now simp fget. Qed. End filist_map. Section fhlist. Variable A : Type. Variable B : A -> Type. Equations fhlist (ls : list A) : Type := fhlist nil := unit; fhlist (cons x ls) := (B x * fhlist ls)%type. Transparent fhlist. Variable elm : A. Equations fmember (ls : list A) : Type := fmember nil := Empty_set; fmember (cons x ls) := ((x = elm) + fmember ls)%type. Transparent fmember. Equations fhget (ls : list A) (mls : fhlist ls) (i : fmember ls) : B elm := fhget nil _ !; fhget _ (pair x _) (inl eq_refl) := x; fhget (cons _ ls) (pair _ l) (inr i) := fhget ls l i. End fhlist. Arguments fhget {A B elm ls} _ _. (* Section tree. Variable A : Set. Inductive tree : Set := | Leaf : A -> tree | Node : forall n, ilist tree n -> tree. End tree. Section ifoldr. Variables A B : Set. Variable f : A -> B -> B. Variable i : B. Equations ifoldr n (ls : ilist A n) : B := ifoldr _ Nil := i; ifoldr _ (Cons x ls) := f x (ifoldr ls). End ifoldr. Equations sum (t : tree nat) : nat := sum (Leaf n) := n; sum (Node _ ls) := ifoldr (fun t n => sum t + n) 0 ls. (* Fixpoint inc (t : tree nat) : tree nat := match t with | Leaf n => Leaf (S n) | Node _ ls => Node (imap inc ls) end. *) Equations(nocomp) inc (t : tree nat) : tree nat := inc (Leaf n) := Leaf (S n); inc (Node _ ls) := Node (imap inc ls). Goal inc (Leaf 0) = Leaf 1. Proof. simp inc. Qed. Theorem sum_inc : forall t, sum (inc t) >= sum t. Proof. intros. funelim (inc t); simp sum. - apply Le.le_n_Sn. - unfold inc_obligation_1. Abort. *) Section tree. Variable A : Set. Inductive tree : Set := | Leaf : A -> tree | Node : forall n, (ffin n -> tree) -> tree. End tree. Arguments Node {A n} _. Section rifoldr. Variables A B : Set. Variable f : A -> B -> B. Variable i : B. Equations rifoldr (n : nat) (get : ffin n -> A) : B := rifoldr 0 _ := i; rifoldr (S n) get := f (get None) (rifoldr n (fun i => get (Some i))). End rifoldr. Arguments rifoldr {A B} _ _ {n} _. Equations sum (t : tree nat) : nat := sum (Leaf n) := n; sum (Node f) := rifoldr plus 0 (fun i => sum (f i)). Equations inc (t : tree nat) : tree nat := inc (Leaf n) := Leaf (S n); inc (Node f) := Node (fun i => inc (f i)). Import Sigma_Notations. Transparent rifoldr. Lemma sum_inc' : forall n (f1 f2 : ffin n -> nat), (forall i, f1 i >= f2 i) -> rifoldr plus 0 f1 >= rifoldr plus 0 f2. Proof. intros. funelim (rifoldr plus 0 f1). - constructor. - apply Nat.add_le_mono. + apply H0. + apply H. intros. apply H0. Qed. Theorem sum_inc : forall t, sum (inc t) >= sum t. Proof. intros t. funelim (inc t); simp sum. auto. apply sum_inc'. intros; auto. Qed. Inductive type' : Type := Nat | Bool. Derive NoConfusion for type'. Inductive exp' : type' -> Set := | NConst : nat -> exp' Nat | Plus : exp' Nat -> exp' Nat -> exp' Nat | Eq : exp' Nat -> exp' Nat -> exp' Bool | BConst : bool -> exp' Bool | Cond : forall n t, (ffin n -> exp' Bool) -> (ffin n -> exp' t) -> exp' t -> exp' t. Derive Signature NoConfusion NoConfusionHom for exp'. Equations type'Denote (t : type') : Set := type'Denote Nat := nat; type'Denote Bool := bool. Section cond. Variable A : Set. Variable default : A. Equations cond (n : nat) (tests : ffin n -> bool) (bodies : ffin n -> A) : A := cond 0 _ _ := default; cond (S n) tests bodies with tests None => { | true := bodies None; | false := cond n (fun i => tests (Some i)) (fun i => bodies (Some i)) }. End cond. Arguments cond {A} _ {n} _ _. Equations exp'Denote t (e : exp' t) : type'Denote t := exp'Denote (NConst n) := n; exp'Denote (Plus e1 e2) := (exp'Denote e1) + (exp'Denote e2); exp'Denote (Eq e1 e2) (*<= eq_nat_dec*) := Nat.eqb (exp'Denote e1) (exp'Denote e2) (*=> { | true := true; | false := false }*); exp'Denote (BConst b) := b; exp'Denote (Cond _ tests bodies default) := cond (exp'Denote default) (fun i => exp'Denote (tests i)) (fun i => exp'Denote (bodies i)). Definition someExp' : exp' Nat := Cond 1 (fun _ => BConst true) (fun _ => Plus (NConst 1) (NConst 2)) (NConst 0). Goal exp'Denote someExp' = 3. Proof. now simp exp'Denote. Qed. Goal exp'Denote (Eq someExp' (NConst 3)) = true. Proof. now simp exp'Denote. Qed. Section cfoldCond. (* A weakness? of Equations: we cannot refine section variables: here in the inner clause we would need to refine [t] to Nat or Bool. *) Variable t : type'. Variable default : exp' t. Fail Equations cfoldCond (n : nat) (tests : ffin n -> exp' Bool) (bodies : ffin n -> exp' t) : exp' t := cfoldCond 0 _ _ := default; cfoldCond (S n) tests bodies with tests None => { | BConst true := bodies None; | BConst false := cfoldCond n (fun i => tests (Some i)) (fun i => bodies (Some i)); | Eq e1 e2 with cfoldCond n (fun i => tests (Some i)) (fun i => bodies (Some i)) => { | Cond n' tests' bodies' default' := Cond (S n') (fun i => match i with | None => tests None | Some i => tests' i end) (fun i => match i with | None => bodies None | Some i => bodies' i end) default'; | e := Cond 1 (fun _ => tests None) (fun _ => bodies None) e }; | _ := default }. End cfoldCond. Equations cfoldCond (t : type') (default : exp' t) {n : nat} (tests : ffin n -> exp' Bool) (bodies : ffin n -> exp' t) : exp' t := cfoldCond default (n:=0) _ _ := default; cfoldCond default (n:=(S n)) tests bodies with tests None => { | BConst true := bodies None; | BConst false := cfoldCond default (fun i => tests (Some i)) (fun i => bodies (Some i)); | _ with cfoldCond default (fun i => tests (Some i)) (fun i => bodies (Some i)) => { | Cond n' tests' bodies' default' := Cond (S n') (fun i => match i with | None => tests None | Some i => tests' i end) (fun i => match i with | None => bodies None | Some i => bodies' i end) default'; | e := Cond 1 (fun _ => tests None) (fun _ => bodies None) e }}. Fixpoint cfold t (e : exp' t) : exp' t := match e with | NConst n => NConst n | Plus e1 e2 => let e1' := cfold e1 in let e2' := cfold e2 in match e1', e2' return exp' Nat with | NConst n1, NConst n2 => NConst (n1 + n2) | _, _ => Plus e1' e2' end | Eq e1 e2 => let e1' := cfold e1 in let e2' := cfold e2 in match e1', e2' return exp' Bool with | NConst n1, NConst n2 => BConst (if eq_nat_dec n1 n2 then true else false) | _, _ => Eq e1' e2' end | BConst b => BConst b | Cond n tests bodies default => cfoldCond (cfold default) (fun idx => cfold (tests idx)) (fun idx => cfold (bodies idx)) end. Lemma cfoldCond_correct : forall t (default : exp' t) n (tests : ffin n -> exp' Bool) (bodies : ffin n -> exp' t), exp'Denote (cfoldCond default tests bodies) = exp'Denote (Cond n tests bodies default). Proof. unshelve refine_ho (cfoldCond_elim _ _ _ _ _ _ _ _ _ _ _ _ _ _); simpl; intros. all:simpl; simp exp'Denote cond; rewrite ?H, ?Heq, ?Heq0; try rewrite ?Heq in Hind; simp exp'Denote cond; now repeat (match goal with | [ |- context[cond_clause_2 _ _ ?E _] ] => destruct E; simp cond end). Qed. Coq-Equations-1.3.1-8.20/test-suite/LogicType.v000066400000000000000000000063151463127417400210030ustar00rootroot00000000000000Set Warnings "-notation-overridden". Set Universe Polymorphism. (** Switch to an equality in Type *) Require Import Equations.Type.All. Derive Signature for Id. Definition nathset := _ : HSet nat. (* Equations test_k (x : nat) (r : x = x) : r = r := *) (* test_k x id_refl := id_refl. *) Equations foo (A : Type) (x : A) : A := foo A x := x. Inductive fin : nat -> Set := | fz : forall {n}, fin (S n) | fs : forall {n}, fin n -> fin (S n). Derive Signature for fin. Derive NoConfusion for nat. Equations finp {n} (f : fin (S n)) : unit + fin n := finp fz := inl tt; finp (fs f) := inr f. Unset Universe Minimization ToSet. Inductive vector@{i} (A : Type@{i}) : nat -> Type@{i} := | nil : vector A 0 | cons {n : nat} : A -> vector A n -> vector A (S n). Arguments vector A%type_scope n%nat_scope. Arguments nil {A}. Arguments cons {A%type_scope} {n%nat_scope} a v. Derive Signature for vector. Require Import Equations.CoreTactics Equations.Type.Tactics. Require Import Equations.Type.Tactics. Require Import Equations.Type.FunctionalInduction. Set Universe Minimization ToSet. Derive NoConfusionHom for vector. Unset Universe Minimization ToSet. #[export] Instance vector_eqdec@{i +|+} {A : Type@{i}} {n} `(EqDec@{i} A) : EqDec (vector A n). Proof. intros. intros x. intros y. induction x. - left. now depelim y. - depelim y. pose proof (Classes.eq_dec a a0). dependent elimination X as [inl id_refl|inr Ha]. -- specialize (IHx v). dependent elimination IHx as [inl id_refl|inr H']. --- left; reflexivity. --- right. simplify *. now apply H'. -- right; simplify *. now apply Ha. Defined. Record vect {A} := mkVect { vect_len : nat; vect_vector : vector A vect_len }. Coercion mkVect : vector >-> vect. Derive NoConfusion for vect. Reserved Notation "x ++v y" (at level 60). Equations vapp {A} {n m} (v : vector A n) (w : vector A m) : vector A (n + m)%nat := { nil ++v w := w ; (cons a v) ++v w := cons a (v ++v w) } where "x ++v y" := (vapp x y). Inductive Split {X : Type}{m n : nat} : vector X (m + n) -> Type := append : forall (xs : vector X m)(ys : vector X n), Split (vapp xs ys). Arguments Split [ X ]. (* Eval compute in @app'. *) (* About nil. About vector. *) (* Set Typeclasses Debug Verbosity 2. *) (* Set Typeclasses Filtered Unification. *) #[local] Hint Extern 0 (WellFounded _) => refine WellFoundedInstances.lt_wf : typeclass_instances. Equations split {X : Type} {m n : nat} (xs : vector X (Peano.plus m n)) : Split m n xs by wf m := split (m:=0) xs := append nil xs; split (m:=S m) (cons x xs) with split xs => { | append xs' ys' := append (cons x xs') ys' }. Derive Subterm for vector. #[local] Hint Unfold vector_subterm : subterm_relation. Import Sigma_Notations. Section foo. Context {A B : Type}. Equations unzipv {n} (v : vector (A * B) n) : vector A n * vector B n by wf (signature_pack v) (@vector_subterm (A * B)) := unzipv nil := (nil, nil) ; unzipv (cons (x, y) v) with unzipv v := { | (xs, ys) := (cons x xs, cons y ys) }. End foo. Section vlast. Context {A : Type}. Equations vlast {n} (v : vector A (S n)) : A by wf (signature_pack v) (@vector_subterm A) := vlast (cons (n:=O) a nil) := a ; vlast (cons (n:=S n') a v) := vlast v. End vlast. Coq-Equations-1.3.1-8.20/test-suite/Noconfinv.v000066400000000000000000000003151463127417400210350ustar00rootroot00000000000000From Equations Require Import Equations. Equations noConfusion_nat_inv (x y : nat) (P : NoConfusion x y) : x = y := noConfusion_nat_inv 0 0 _ := eq_refl; noConfusion_nat_inv (S n) (S m) p := f_equal S p. Coq-Equations-1.3.1-8.20/test-suite/Noconflet.v000066400000000000000000000012001463127417400210170ustar00rootroot00000000000000Require Import Equations. Derive NoConfusion for vector. Inductive foo (A : Type) : nat -> bool -> Set := | fool n : foo A n true | foor : foo A 0 false. Derive NoConfusion for foo. Inductive foolet (A : Type) (B := A) : nat -> bool -> Set := | fooletl n : foolet A n true | fooletr : foolet A 0 false. Derive NoConfusion for foolet. (** elim vs destruct bug on foolet *) (* Next Obligation. *) (* intros. destruct_sigma b. elim b. *) (* destruct b. solve_ *) (* destruct pr1. simpl. simpl in *. *) (* (* Bug in Coq: destruct pr2 *) *) (* elim pr2. simpl. intros; auto. *) (* simpl. auto. *) (* Defined. *) Coq-Equations-1.3.1-8.20/test-suite/_CoqProject000066400000000000000000000030571463127417400210500ustar00rootroot00000000000000-I "../src" -R . TestSuite -R "../theories" Equations INSTALLDEFAULTROOT = 1 attributes.v fin.v Basics.v notations.v BasicsDec.v rec.v f91.v Noconfinv.v zoe.v gcd.v DataStruct.v daaa.v nestedobls.v rose.v scope.v depelim.v issues/issue7.v issues/issue8.v issues/issue24.v issues/issue25.v issues/issue63.v issues/issue66.v issues/issue70.v issues/issue74.v issues/issue75.v issues/issue77.v issues/issue79.v issues/issue81.v issues/issue82.v issues/issue83.v issues/issue84.v issues/issue85.v issues/issue91.v issues/issue95_1.v issues/issue95_2.v issues/issue96.v issues/issue98.v issues/issue100.v issues/issue104.v issues/issue105.v issues/issue106.v issues/issue107.v issues/issue107_2.v issues/issue112.v issues/issue113.v issues/issue114.v issues/issue117.v issues/issue120.v issues/issue124.v issues/issue129.v issues/issue143.v issues/issue172.v issues/issue190.v issues/issue191.v issues/issue193.v issues/issue228.v issues/issue246.v issues/issue249.v issues/issue258.v issues/issue286.v issues/issue297.v issues/issue306.v issues/issue328.v issues/issue329.v issues/issue338.v issues/issue346.v issues/issue349.v issues/issue353.v issues/issue354.v issues/issue372.v issues/issue321.v issues/issue399.v issues/issue499.v issues/issue500.v issues/coq_pr_16995.v eqdec_error.v noconf_simplify.v le.v LogicType.v innacs.v mfix.v mutrec.v nestedrec.v nestedrec2.v nestfixnorec.v agda_1408.v agda_1775.v tabareau_vec.v yves.v local_where.v cfold.v nestedwhererec.v Below.v nocycle.v wfnocycle.v divmod.v fle_trans.v interval.v letred.v nocycle_def.v nolam.v ack.v funelim_ack.vCoq-Equations-1.3.1-8.20/test-suite/ack.v000066400000000000000000000061311463127417400176360ustar00rootroot00000000000000(* https://lawrencecpaulson.github.io/2022/08/31/Ackermann-not-PR-I.html *) Require Import Arith Lia. From Equations Require Import Equations. Equations ack (p : nat * nat) : nat by wf p (lexprod _ _ lt lt) := ack (0, n) := S n; ack (S m, 0) := ack (m, 1); ack (S m, S n) := ack (m, ack (S m, n)). Import Nat Peano. Lemma lt_ack2 i j: j < ack(i,j). Proof. funelim (ack (i, j)). - constructor. - eapply lt_trans. 2: exact H. constructor. - exact (le_lt_trans _ _ _ H H0). Qed. Lemma ack_lt_ack_S2 i j: ack(i, j) < ack (i, S j). Proof. induction i, j; simp ack; apply lt_ack2. Qed. Lemma ack_lt_mono2 i j k: j < k -> ack(i,j) < ack(i,k). Proof. intro H. induction H. - apply ack_lt_ack_S2. - eapply lt_trans. exact IHle. apply ack_lt_ack_S2. Qed. Lemma lt_mono_imp_le_mono f (LTM: forall n m, n < m -> f n < f m): forall n m, n <= m -> f n <= f m. Proof. intros n m H. induction H. - constructor. - eapply le_trans. exact IHle. apply lt_le_incl, LTM, lt_succ_diag_r. Qed. Lemma ack_le_mono2 k i j: j <= k -> ack(i,j) <= ack(i,k). Proof. apply (lt_mono_imp_le_mono (fun n => ack(i,n))). apply ack_lt_mono2. Qed. Lemma ack2_le_ack1 i j: ack (i, S j) <= ack (S i, j). Proof. induction j; simp ack. - constructor. - apply ack_le_mono2. eapply le_trans. exact (lt_ack2 i (S j)). exact IHj. Qed. Lemma S_less_ack_S1 i j: S j < ack(S i, j). Proof. induction j; simp ack. - apply lt_ack2. - eapply lt_le_trans. apply lt_ack2. exact (ack_le_mono2 _ _ _ IHj). Qed. Lemma ack_lt_ack_S1 i j: ack(i,j) < ack(S i, j). Proof. induction j; simp ack; apply ack_lt_mono2. - exact lt_0_1. - apply S_less_ack_S1. Qed. Lemma lt_ack1 i j: i < ack(i,j). Proof. induction i; simp ack. - apply lt_0_succ. - eapply le_lt_trans. exact IHi. apply ack_lt_ack_S1. Qed. Lemma ack_1 j: ack(1,j) = j + 2. Proof. induction j; simp ack. - constructor. - now rewrite IHj. Qed. Lemma ack_2 j: ack(2,j) = 2 * j + 3. Proof. induction j; simp ack. - trivial. - rewrite IHj, ack_1. lia. Qed. Lemma ack_lt_mono1 k i j: i < j -> ack(i, k) < ack(j, k). Proof. intro H. induction H. - apply ack_lt_ack_S1. - eapply lt_trans. apply IHle. apply ack_lt_ack_S1. Qed. Lemma ack_le_mono1 k i j: i <= j -> ack(i, k) <= ack(j, k). Proof. apply (lt_mono_imp_le_mono (fun n => ack(n, k))). apply ack_lt_mono1. Qed. Lemma ack_nest_bound i1 i2 j: ack(i1, ack(i2,j)) < ack(2 + i1 + i2, j). Proof. assert (ack (i1, ack (i2, j)) < ack(i1 + i2, ack(S (i1 + i2), j))). { eapply Nat.le_lt_trans. apply ack_le_mono1. 2: apply ack_lt_mono2. - apply le_add_r. - apply ack_lt_mono1. auto with arith. } eapply Nat.lt_le_trans. apply H. rewrite <- ack_equation_3. apply ack2_le_ack1. Qed. Lemma ack_add_bound i1 i2 j: ack(i1,j) + ack(i2,j) < ack (4 + i1 + i2, j). Proof. apply (lt_trans _ (ack(2, ack(i1 + i2, j))) _). - rewrite ack_2. pose (H1 := ack_le_mono1 j i1 (i1 + i2)). pose (H2 := ack_le_mono1 j i2 (i1 + i2)). lia. - apply ack_nest_bound. Qed. Lemma ack_add_bound2 i j k (H: i < ack(k,j)): i + j < ack (4 + k, j). Proof. replace (4 + k) with (4 + k + 0) by apply add_0_r. eapply lt_trans. 2: apply (ack_add_bound k 0 j). rewrite ack_equation_1. apply add_lt_mono. - exact H. - apply lt_succ_diag_r. Qed. Coq-Equations-1.3.1-8.20/test-suite/agda_1408.v000066400000000000000000000006561463127417400204560ustar00rootroot00000000000000From Equations Require Import Equations. Axiom I : Set. Axiom i1 i2 : I. Inductive D : I -> Set := | d1 : D i1 | d2 : D i2. Derive Signature NoConfusion for D. (** This would require general K or deciding i1 = i2. *) Fail Derive NoConfusionHom for D. Inductive P : forall {i}, D i -> Set := p1 : P d1 | p2 : P d2. Derive Signature for P. Derive NoConfusionHom for P. Equations Foo (p : P d1) : Set := Foo p1 := nat. Coq-Equations-1.3.1-8.20/test-suite/agda_1775.v000066400000000000000000000036661463127417400204710ustar00rootroot00000000000000From Equations Require Import Equations. Require Import Utf8. (* Set Universe Polymorphism. *) (** Can we define NoConfusion in SProp (squashing equalities of arguments)? Would not allow to show equivalence to (x = y) for non-strict sets. *) Import Sigma_Notations. Open Scope equations_scope. Inductive fin : nat -> Set := | fin0 n : fin (S n) | finS n : fin n -> fin (S n). Derive Signature NoConfusion for fin. Arguments fin0 {_}. Arguments finS {_} _. (* Derive NoConfusion for ℕ. *) Derive NoConfusionHom for fin. Inductive Vec (A : Set) : nat -> Set := nil : Vec A O | cons : forall {n} (x : A) (xs : Vec A n), Vec A (S n). Derive Signature NoConfusion NoConfusionHom for Vec. Arguments nil {_}. Arguments cons {_} _ _. Reserved Notation " x [ f ]= y " (at level 0, no associativity, f at next level, y at next level). Inductive at_eq {A : Set} : forall{n : nat}, Vec A n -> fin n -> A -> Set := | here : ∀ {n} {x} {xs : Vec A n}, at_eq (cons _ x xs) fin0 x | there : ∀ {n} {i : fin n} {x y} {xs : Vec A n} (H : xs [ i ]= x), (cons _ y xs) [ (finS i) ]= x where " x [ n ]= y " := (at_eq x n y). Derive Signature for at_eq. Definition Subset := Vec bool. Reserved Notation " x ∈ S " (at level 4). Definition inS {n} (f : fin n) (s : Subset n) := s [ f ]= true. Notation "x ∈ S" := (inS x S). Equations drop_there {s n x} {p : Subset n} (H : (finS x) ∈ (cons _ s p)) : x ∈ p := drop_there (there l) := l. Inductive Dec (P : Set) : Set := | yes ( p : P) : Dec P | no ( p : P -> False) : Dec P. Arguments yes {_} _. Arguments no {_} _. Equations? isin {n} (x : fin n) (p : Subset n) : Dec (x ∈ p) := isin fin0 (cons true p) := yes here; isin fin0 (cons false p) := no _; isin (finS f) (cons s p) with isin f p := | yes H := yes (there H); | no H := no (fun H' => _). Proof. depelim H. depelim H'. apply (H H'). Defined. Transparent isin. Print Assumptions isin.Coq-Equations-1.3.1-8.20/test-suite/agda_1775_full.v000066400000000000000000000161061463127417400215040ustar00rootroot00000000000000From Equations Require Import Equations DepElimDec HSets. (* Set Universe Polymorphism. *) (** Can we define NoConfusion in SProp (squashing equalities of arguments)? Would not allow to show equivalence to (x = y) for non-strict sets. *) Unset Equations WithK. Import Sigma_Notations. Open Scope equations_scope. Definition Sect {A B : Type} (s : A -> B) (r : B -> A) := forall x : A, r (s x) = x. Class IsEquiv {A B : Type} (f : A -> B) := BuildIsEquiv { equiv_inv : B -> A ; eisretr : Sect equiv_inv f; eissect : Sect f equiv_inv; eisadj : forall x : A, eisretr (f x) = f_equal f (eissect x) }. Arguments eisretr {A B}%type_scope {f%function_scope} {_} _. Arguments eissect {A B}%type_scope {f%function_scope} {_} _. Arguments eisadj {A B}%type_scope {f%function_scope} {_} _. Arguments IsEquiv {A B}%type_scope f%function_scope. Polymorphic Record Equiv (A B : Type) := { equiv :> A -> B ; is_equiv :> IsEquiv equiv }. Arguments equiv {A B} e. Polymorphic Instance Equiv_IsEquiv {A B} (e : Equiv A B) : IsEquiv (equiv e). Proof. apply is_equiv. Defined. Definition inv_equiv {A B} (E: Equiv A B) : B -> A := equiv_inv (IsEquiv:=is_equiv _ _ E). Polymorphic Definition equiv_inv_equiv {A B} {E: Equiv A B} (x : A) : inv_equiv _ (equiv E x) = x := eissect x. Definition inv_equiv_equiv {A B} {E: Equiv A B} (x : B) : equiv E (inv_equiv _ x) = x := eisretr x. Definition equiv_adj {A B} {E: Equiv A B} (x : A) : inv_equiv_equiv (equiv E x) = f_equal (equiv E) (equiv_inv_equiv x) := eisadj x. Notation " 'rew' H 'in' c " := (@eq_rect _ _ _ c _ H) (at level 20). Require Import Utf8. Notation " X <~> Y " := (Equiv X Y) (at level 90, no associativity, Y at next level). Lemma apply_equiv_dom {A B} (P : A -> Type) (e : Equiv A B) : (forall x : B, P (inv_equiv e x)) -> forall x : A, P x. Proof. intros. specialize (X (equiv e x)). rewrite equiv_inv_equiv in X. exact X. Defined. Inductive fin : nat -> Set := | fin0 n : fin (S n) | finS n : fin n -> fin (S n). Derive Signature for fin. Arguments fin0 {_}. Arguments finS {_} _. (* Derive NoConfusion for ℕ. *) Derive NoConfusion for fin. Equations noConf_fin {n} (v v' : fin n) : Prop := noConf_fin fin0 fin0 := True; noConf_fin (finS f) (finS f') := f = f'; noConf_fin _ _ := False. Transparent noConf_fin. Print Assumptions noConf_fin_elim. Definition noConf_fin_eq {n} (v v' : fin n) : v = v' -> noConf_fin v v'. Proof. intros ->. destruct v'; constructor. Defined. Definition noConf_fin_eq_inv {n} (v v' : fin n) : noConf_fin v v' -> v = v'. Proof. funelim (noConf_fin v v'); try simplify *; constructor. Defined. Lemma noConf_fin_eq_eq_inv {n} (v v' : fin n) (e : v = v') : noConf_fin_eq_inv _ _ (noConf_fin_eq _ _ e) = e. Proof. destruct e. destruct v; reflexivity. Defined. Lemma noConf_fin_refl {n} (v : fin n) : noConf_fin v v. Proof. destruct v; reflexivity. Defined. Lemma noConf_fin_eq_inv_eq_refl {n} (v : fin n) : noConf_fin_eq _ _ (noConf_fin_eq_inv v v (noConf_fin_refl v)) = (noConf_fin_refl v). Proof. destruct v; reflexivity. Defined. Lemma noConf_fin_eq_inv_eq {n} (v v' : fin n) (e : noConf_fin v v') : noConf_fin_eq _ _ (noConf_fin_eq_inv _ _ e) = e. Proof. destruct v; revert e; depelim v'; simplify *; reflexivity. Defined. Lemma noConf_fin_hom_equiv : forall n, NoConfusionPackage (fin n). Proof. unshelve econstructor. refine noConf_fin. apply noConf_fin_eq. apply noConf_fin_eq_inv. apply noConf_fin_eq_eq_inv. Defined. Existing Instances noConf_fin_hom_equiv. Definition noConf_fin_equiv {n} (v v' : fin n) : Equiv (v = v') (noConf_fin v v'). Proof. refine {| equiv := noConf_fin_eq v v' |}. unshelve refine {| equiv_inv := noConf_fin_eq_inv v v' |}. red. intros. apply noConf_fin_eq_inv_eq. red; intros. apply noConf_fin_eq_eq_inv. simplify *. destruct v'; reflexivity. Defined. Inductive Vec (A : Set) : nat -> Set := nil : Vec A O | cons : forall {n} (x : A) (xs : Vec A n), Vec A (S n). Derive Signature for Vec. Arguments nil {_}. Arguments cons {_} _ _. Derive NoConfusion for Vec. Equations noConfVec {A n} (v v' : Vec A n) : Prop := noConfVec nil nil := True; noConfVec (cons _ x xs) (cons _ x' xs') := {| pr1 := x; pr2 := xs |} = {| pr1 := x'; pr2 := xs' |}. Transparent noConfVec. Print Assumptions noConfVec_elim. Definition noConfVec_eq {A n} (v v' : Vec A n) : v = v' -> noConfVec v v'. Proof. intros ->. destruct v'; constructor. Defined. Definition noConfVec_eq_inv {A n} (v v' : Vec A n) : noConfVec v v' -> v = v'. Proof. funelim (noConfVec v v'); try simplify *; constructor. Defined. Lemma noConfVec_eq_eq_inv {A n} (v v' : Vec A n) (e : v = v') : noConfVec_eq_inv _ _ (noConfVec_eq _ _ e) = e. Proof. destruct e. destruct v; reflexivity. Defined. Lemma noConfVec_refl {A n} (v : Vec A n) : noConfVec v v. Proof. destruct v; reflexivity. Defined. Lemma noConfVec_eq_inv_eq_refl {A n} (v : Vec A n) : noConfVec_eq _ _ (noConfVec_eq_inv v v (noConfVec_refl v)) = (noConfVec_refl v). Proof. destruct v; reflexivity. Defined. Lemma noConfVec_eq_inv_eq {A n} (v v' : Vec A n) (e : noConfVec v v') : noConfVec_eq _ _ (noConfVec_eq_inv _ _ e) = e. Proof. destruct v; revert e; depelim v'; simplify *; reflexivity. Defined. Definition noConf_vec_equiv {A n} (v v' : Vec A n) : Equiv (v = v') (noConfVec v v'). Proof. refine {| equiv := noConfVec_eq v v' |}. unshelve refine {| equiv_inv := noConfVec_eq_inv v v' |}. red. intros. apply noConfVec_eq_inv_eq. red; intros. apply noConfVec_eq_eq_inv. simplify *. destruct v'; reflexivity. Defined. Lemma noConfVec_hom_equiv : forall {A : Set} n, NoConfusionPackage (Vec A n). Proof. unshelve econstructor. refine noConfVec. apply noConfVec_eq. apply noConfVec_eq_inv. apply noConfVec_eq_eq_inv. Defined. Existing Instances noConfVec_hom_equiv. Reserved Notation " x [ f ]= y " (at level 0, no associativity, f at next level, y at next level). Inductive at_eq {A : Set} : forall{n : nat}, Vec A n -> fin n -> A -> Set := | here : ∀ {n} {x} {xs : Vec A n}, at_eq (cons _ x xs) fin0 x | there : ∀ {n} {i : fin n} {x y} {xs : Vec A n} (H : xs [ i ]= x), (cons _ y xs) [ (finS i) ]= x where " x [ n ]= y " := (at_eq x n y). Definition Subset := Vec bool. Reserved Notation " x ∈ S " (at level 4). Definition inS {n} (f : fin n) (s : Subset n) := s [ f ]= true. Notation "x ∈ S" := (inS x S). Equations drop_there {s n x} {p : Subset n} (H : (finS x) ∈ (cons _ s p)) : x ∈ p := drop_there (there p) := p. Inductive Dec (P : Set) : Set := | yes ( p : P) : Dec P | no ( p : P -> False) : Dec P. Arguments yes {_} _. Arguments no {_} _. Equations isin {n} (x : fin n) (p : Subset n) : Dec (x ∈ p) := isin fin0 (cons true p) := yes here; isin fin0 (cons false p) := no _; isin (finS f) (cons s p) with isin f p := | yes H := yes (there H); | no H := no (fun H' => _). Next Obligation. depelim H. Defined. Next Obligation. depelim H'. apply (H H'). Defined. Transparent isin. Next Obligation. induction x. depelim p. depelim x. constructor. constructor. depelim p. simp isin. constructor. apply IHx. destruct (isin x p). constructor. constructor. Defined. Coq-Equations-1.3.1-8.20/test-suite/attributes.v000066400000000000000000000017161463127417400212720ustar00rootroot00000000000000From Equations Require Import Equations. Ltac solvetac := match goal with |- ?T => exact 0 end. Module WithProgram. #[tactic=idtac] Equations foo (x : nat) : nat := | 0 => 0 | S n => S _. Next Obligation. exact 0. Qed. #[tactic=solvetac] Equations foo' (x : nat) : nat := | 0 => 0 | S n => S _. Definition test := foo'. End WithProgram. Module WithProofMode. #[tactic=idtac] Equations? foo (x : nat) : nat := | 0 => _ | S n => S _. Proof. exact 0. abstract exact 1. Defined. #[tactic=solvetac] Equations foo' (x : nat) : nat := | 0 => 0 | S n => S _. End WithProofMode. Module QualifiedTactic. (* Program_simpl solves goals in nat *) #[tactic="Program.Tactics.program_simpl"] Equations foo (x : nat) : nat := | x := _. (* equations_simpl doesn't *) #[tactic="Equations.CoreTactics.equations_simplify"] Equations bar (x : nat) : nat := | x := _. Next Obligation. exact 0. Qed. End QualifiedTactic. Coq-Equations-1.3.1-8.20/test-suite/cfold.v000066400000000000000000000065451463127417400202000ustar00rootroot00000000000000(* same example in Coq using the Equations plugin. Proof size is now constant instead of quadratic., but program is as straightforward as in Agda. *) Require Import Arith. From Equations Require Import Equations. Require Import Coq.Bool.Bool. Set Keyed Unification. Set Implicit Arguments. Set Asymmetric Patterns. Inductive type : Set := | Nat : type | Bool : type | Prod : type -> type -> type. Inductive exp : type -> Set := | NConst : nat -> exp Nat | Plus : exp Nat -> exp Nat -> exp Nat | Eq : exp Nat -> exp Nat -> exp Bool | BConst : bool -> exp Bool | And : exp Bool -> exp Bool -> exp Bool | If : forall t, exp Bool -> exp t -> exp t -> exp t | Pair : forall t1 t2, exp t1 -> exp t2 -> exp (Prod t1 t2) | Fst : forall t1 t2, exp (Prod t1 t2) -> exp t1 | Snd : forall t1 t2, exp (Prod t1 t2) -> exp t2. Fixpoint typeDenote (t : type) : Set := match t with | Nat => nat | Bool => bool | Prod t1 t2 => typeDenote t1 * typeDenote t2 end%type. Fixpoint expDenote t (e : exp t) : typeDenote t := match e with | NConst n => n | Plus e1 e2 => expDenote e1 + expDenote e2 | Eq e1 e2 => if eq_nat_dec (expDenote e1) (expDenote e2) then true else false | BConst b => b | And e1 e2 => expDenote e1 && expDenote e2 | If _ e' e1 e2 => if expDenote e' then expDenote e1 else expDenote e2 | Pair _ _ e1 e2 => (expDenote e1, expDenote e2) | Fst _ _ e' => fst (expDenote e') | Snd _ _ e' => snd (expDenote e') end. Derive NoConfusion EqDec for type. Derive Signature NoConfusion for exp. Equations smartplus(e1 : exp Nat)(e2: exp Nat) : exp Nat := smartplus (NConst n1) (NConst n2) := NConst (n1 + n2) ; smartplus e1 e2 := Plus e1 e2. Equations smarteq(e1 : exp Nat)(e2: exp Nat) : exp Bool := smarteq (NConst n1) (NConst n2) := BConst (if (eq_nat_dec n1 n2) then true else false) ; smarteq e1 e2 := Eq e1 e2. Equations smartand(e1 : exp Bool)(e2: exp Bool) : exp Bool := smartand (BConst n1) (BConst n2) := BConst (n1 && n2) ; smartand e1 e2 := And e1 e2. Equations smartif {t} (e1 : exp Bool)(e2: exp t)(e3: exp t) : exp t := smartif (BConst n1) e2 e3 := if n1 then e2 else e3 ; smartif e1 e2 e3 := If e1 e2 e3. Equations smartfst {t1 t2} (e: exp (Prod t1 t2)) : exp t1 := smartfst (Pair e1 e2) := e1; smartfst e := Fst e. Equations smartsnd {t1 t2} (e: exp (Prod t1 t2)) : exp t2 := smartsnd (Pair e1 e2) := e2; smartsnd e := Snd e. Equations cfold {t} (e: exp t) : exp t := cfold (NConst n) := NConst n ; cfold (Plus e1 e2) := smartplus (cfold e1) (cfold e2); cfold (Eq e1 e2) := smarteq (cfold e1) (cfold e2); cfold (BConst x) := BConst x; cfold (And e1 e2) := smartand (cfold e1) (cfold e2); cfold (If e1 e2 e3) := smartif (cfold e1) (cfold e2) (cfold e3); cfold (Pair e1 e2) := Pair (cfold e1) (cfold e2); cfold (Fst e) := smartfst (cfold e); cfold (Snd e) := smartsnd (cfold e). Lemma cfoldcorrect: forall t (e: exp t), expDenote e = expDenote (cfold e). intros; funelim (cfold e); simpl; intros; try reflexivity; repeat match goal with [ H : expDenote ?e = _ |- context[expDenote ?e]] => rewrite H end; try (match goal with [ |- _ = expDenote ?f] => funelim f end); simpl; repeat match goal with [ H : _ = cfold ?e |- _] => rewrite <- H; simpl end; try reflexivity; repeat match goal with [ |- context[if ?e then _ else _]] => destruct e end; try reflexivity. Qed. Print Assumptions cfoldcorrect. Coq-Equations-1.3.1-8.20/test-suite/daaa.v000066400000000000000000000005151463127417400177660ustar00rootroot00000000000000Require Import Equations. Inductive A : Set := a : A | d : A -> A -> A -> A. Equations action (x : A) : A := action (d a a a) := (d a a a); action (d x y z) := a; action a := a. Equations lem (x : A) (p : x <> d a a a) : action x = a := lem (d a a a) p := False_rect _ (p eq_refl); lem (d x y z) p := eq_refl; lem a p := eq_refl.Coq-Equations-1.3.1-8.20/test-suite/depelim.v000066400000000000000000000006361463127417400205230ustar00rootroot00000000000000From Equations Require Import Equations. Goal forall x : nat, let y := S x in let z := S y in x = x. Proof. intros. dependent elimination x. exact eq_refl. exact eq_refl. Defined. Inductive ind : nat -> Set := cstr : ind 0. Goal forall (x : nat) (i : ind x), let y := S x in let z := S y in x = x. Proof. intros. set(foo := 0). move foo after x. dependent elimination i. exact eq_refl. Defined.Coq-Equations-1.3.1-8.20/test-suite/divmod.v000066400000000000000000000012521463127417400203610ustar00rootroot00000000000000From Coq Require Import Arith Lia. From Equations Require Import Equations. Equations? Div (x y : nat) : nat by wf x lt := { Div x 0 := 0 ; Div x y with le_lt_dec y x := { | left _ := 1 + Div (x - y) y ; | right _ := 0 } } . Proof. lia. Qed. Equations? Mod (x y : nat) : nat by wf x lt := { Mod x 0 := 0 ; Mod x y with le_lt_dec y x := { | left _ := Mod (x - y) y ; | right _ := x } } . Proof. lia. Qed. Fact Div_Mod x y : x = S y * Div x (S y) + Mod x (S y). Proof. induction x as [x IH] using lt_wf_ind. simp Div Mod. Opaque mult. destruct (le_lt_dec _ _) as [H|H]; cbn. - assert (x - S y < x) as IH1%IH by lia. lia. - lia. Qed.Coq-Equations-1.3.1-8.20/test-suite/eqdec_error.v000066400000000000000000000003471463127417400213750ustar00rootroot00000000000000Require Import Coq.Numbers.BinNums. From Equations Require Import Equations. Fail Derive EqDec for positive. Derive NoConfusion EqDec for positive. #[export] Instance positive_eqdec' : EqDec positive. Proof. apply _. Defined. Coq-Equations-1.3.1-8.20/test-suite/f91.v000066400000000000000000000023031463127417400174740ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Require Import Program. Require Import Equations Bvector List. Require Import Relations. Require Import Lia. Require Import Arith Wf_nat. Set Program Mode. Arguments minus : simpl never. Ltac destruct_lt_dec := match goal with [ H : le_lt_dec _ _ = _ |- _ ] => destruct H end. Equations? f91 (n : nat) : { m : nat | if le_lt_dec n 100 then m = 91 else m = n - 10 } by wf (101 - n) lt := f91 n with le_lt_dec n 100 := { | left H := f91 (f91 (n + 11)) ; | right H := (n - 10) }. Proof. all:hnf. 2-3:destruct f91; simpl. 3:destruct f91; simpl. 1:lia. all:repeat destruct le_lt_dec; lia. (* FIXME slow because big proof terms are generated until the unfolding equation is proven *) Defined. Coq-Equations-1.3.1-8.20/test-suite/fin.v000066400000000000000000000270001463127417400176520ustar00rootroot00000000000000Require Import Program Equations.Prop.EqDecInstances Equations.Prop.Equations. Import Sigma_Notations. Local Open Scope equations_scope. Set Equations Transparent. Set Keyed Unification. Inductive fin : nat -> Set := | fz : forall {n}, fin (S n) | fs : forall {n}, fin n -> fin (S n). Derive Signature NoConfusion NoConfusionHom for fin. Inductive ilist (A : Set) : nat -> Set := | Nil : ilist A 0 | Cons : forall {n}, A -> ilist A n -> ilist A (S n). Arguments Nil {A}. Arguments Cons {A n} _ _. Derive Signature NoConfusion for ilist. Equations fin_to_nat {n : nat} (i : fin n) : nat := fin_to_nat fz := 0; fin_to_nat (fs j) := S (fin_to_nat j). Derive Signature for le. Scheme le_dep := Induction for le Sort Prop. Set Equations With UIP. #[export] Instance le_uip m n : UIP (m <= n). Proof. intros x. induction x using le_dep; simplify_dep_elim; reflexivity. Defined. Derive Subterm for nat. Lemma le_subterm m n : m <= n -> (m = n \/ nat_subterm m n). Proof. induction 1. - left; reflexivity. - intuition subst. right; constructor. constructor. right. eapply Subterm.clos_trans_stepr; eauto. constructor. Qed. Lemma well_founded_antisym {A} {R : A -> A -> Prop}{wfR : WellFounded R} : forall x y : A, R x y -> R y x -> False. Proof. intros x y Rxy Ryx. red in wfR. induction (wfR y) as [y accy IHy] in x, Rxy, Ryx. specialize (IHy _ Rxy). apply (IHy _ Ryx Rxy). Qed. Lemma le_Sn_n n : S n <= n -> False. Proof. intros. apply le_subterm in H as [Heq|Hlt]. depelim Heq. apply well_founded_antisym in Hlt. auto. repeat constructor. Qed. Lemma le_hprop m n : forall e e' : m <= n, e = e'. Proof. induction e using le_dep. intros e'. DepElim.depelim e'. constructor. exfalso; now apply le_Sn_n in e'. intros. depelim e'. exfalso; clear IHe; now apply le_Sn_n in e. now rewrite (IHe e'). Qed. Lemma fin_lt_n : forall (n : nat) (i : fin n), fin_to_nat i < n. Proof. intros. funelim (fin_to_nat i). - apply -> PeanoNat.Nat.succ_le_mono; apply PeanoNat.Nat.le_0_l. - apply -> PeanoNat.Nat.succ_lt_mono; assumption. Defined. Equations? nat_to_fin {n : nat} (m : nat) (p : m < n) : fin n := nat_to_fin (n:=(S n)) 0 _ := fz; nat_to_fin (n:=(S n)) (S m) p := fs (nat_to_fin m _). Proof. apply PeanoNat.Nat.succ_lt_mono; assumption. Defined. Set Program Mode. Equations? fin_to_nat_bound {n : nat} (i : fin n) : {m : nat | m < n} := fin_to_nat_bound fz := 0; fin_to_nat_bound (fs j) := let (m, p) := fin_to_nat_bound j in (S m). Proof. - apply -> PeanoNat.Nat.succ_le_mono; apply PeanoNat.Nat.le_0_l. - apply -> PeanoNat.Nat.succ_lt_mono; assumption. Defined. Arguments exist {A} {P} _ _. Equations? nat_bound_to_fin (n : nat) (m : {m : nat | m < n}) : fin n := nat_bound_to_fin 0 (exist _ !); nat_bound_to_fin (S n') (exist 0 _) := fz; nat_bound_to_fin (S n') (exist (S m) p) := fs (nat_bound_to_fin _ m). Proof. auto with arith || inversion p. Defined. Lemma fin__nat : forall (n : nat) (m : nat) (p : m < n), fin_to_nat (nat_to_fin m p) = m. Proof. intros. funelim (nat_to_fin m p); simp fin_to_nat. reflexivity. simpl. now rewrite H. Qed. Lemma nat__fin : forall (n : nat) (i : fin n), nat_to_fin (fin_to_nat i) (fin_lt_n n i) = i. Proof. intros. funelim (fin_to_nat i); simpl. reflexivity. f_equal. rewrite <- H at 4. f_equal. apply le_hprop. Qed. Equations iget {A : Set} {n : nat} (l : ilist A n) (i : fin n) : A := iget (Cons x t) fz := x; iget (Cons _ t) (fs j) := iget t j. Equations isnoc {A : Set} {n : nat} (l : ilist A n) (x : A) : ilist A (S n) := isnoc Nil x := Cons x Nil; isnoc (Cons y t) x := Cons y (isnoc t x). Lemma append_get : forall (A : Set) (n : nat) (l : ilist A n) (x : A), iget (isnoc l x) (nat_to_fin n (PeanoNat.Nat.lt_succ_diag_r n)) = x. Proof. induction n ; intros. - depelim l. now simp isnoc nat_to_fin iget. - depelim l. simp isnoc nat_to_fin iget. unfold nat_to_fin_obligation_1. etransitivity; [|apply (IHn l)]; do 2 f_equal; apply le_hprop. Qed. Equations convert_ilist {A : Set} {n m : nat} (p : n = m) (l : ilist A n) : ilist A m := convert_ilist p Nil with p => { | eq_refl := Nil }; convert_ilist p (Cons a l) with p => { | eq_refl := Cons a (convert_ilist eq_refl l) }. Transparent convert_ilist. Lemma convert_ilist_refl {A} (n : nat) (l : ilist A n) : convert_ilist eq_refl l = l. Proof. induction l. reflexivity. simpl. now rewrite IHl. Defined. Lemma convert_ilist_trans : forall {A : Set} {n m o : nat} (p : n = m) (r : m = o) (l : ilist A n), convert_ilist r (convert_ilist p l) = convert_ilist (eq_trans p r) l. Proof. intros. simplify_eqs. now rewrite !convert_ilist_refl. Qed. #[export] Hint Rewrite @convert_ilist_refl @convert_ilist_trans : convert_ilist. Import PeanoNat.Nat. Equations irev_aux {A : Set} {i j : nat} (l : ilist A i) (acc : ilist A j) : ilist A (i + j) := irev_aux Nil acc := acc; irev_aux (Cons (n:=n) x l) acc with eq_sym (add_succ_comm n j), (S n + j) := { | refl_equal | ?(n + S j) := irev_aux l (Cons x acc) }. #[local] Obligation Tactic := idtac. Equations? irev {A : Set} {n : nat} (l : ilist A n) : ilist A n := irev l := irev_aux l Nil. (* FIXME bug with 3 refines *) (* { | rec with eq_sym (add_0_r n) := *) (* { | Heq := _ } }. *) apply add_0_r. Defined. Ltac match_refl := match goal with | [ |- context[ match ?P with _ => _ end ] ] => rewrite UIP_refl with (p := P) end. Example rev_ex : forall (A : Set) (x y : A), irev (Cons x (Cons y Nil)) = Cons y (Cons x Nil). Proof. intros. unfold irev. simp irev_aux. compute. now repeat (match_refl; compute; simp irev_aux). Qed. Equations iapp {A : Set} {n m : nat} (l1 : ilist A n) (l2 : ilist A m) : ilist A (n + m) := iapp Nil l := l; iapp (Cons x t) l := Cons x (iapp t l). Lemma iapp_eq {A : Set} (l1 l1' l2 l2' : Σ n, ilist A n) : l1 = l1' -> l2 = l2' -> (_, iapp l1.2 l2.2) = (_, iapp l1'.2 l2'.2). Proof. now simplify *. Defined. Lemma iapp_cons : forall (A : Set) (i j : nat) (l1 : ilist A i) (l2 : ilist A j) (x : A), iapp (Cons x l1) l2 = Cons x (iapp l1 l2). Proof. now simp iapp. Qed. Notation "p # t" := (eq_rect _ _ t _ p) (right associativity, at level 65) : equations_scope. Lemma rev_aux_app_hetero : forall (A : Set) (i j1 j2 : nat) (l : ilist A i) (acc1 : ilist A j1) (acc2 : ilist A j2), (_, irev_aux l (iapp acc1 acc2)) = (_, iapp (irev_aux l acc1) acc2). Proof. intros. funelim (irev_aux l acc1). - simpl. now simp irev_aux iapp. - simp irev_aux. destruct (eq_sym (add_succ_comm n (j + j2))). simpl. specialize (H _ acc2). rewrite H. clear H. refine (iapp_eq (n + S j, _) (S n + j, _) (_, _) (_, _) _ _); try constructor. clear Heq0. unshelve refine (DepElim.pack_sigma_eq _ _). exact (eq_sym Heq). reflexivity. Defined. Equations hetero_veq {A} {n m : nat} (v : ilist A n) (w : ilist A m) : Type := hetero_veq v w := Σ (e : n = m), e # v = w. Notation "x ~=~ y" := (hetero_veq x y) (at level 90). Notation "p # e" := (Logic.transport _ p e). Section hetero_veq. Context {A : Set}. Context {n m : nat}. Lemma hetero_veq_refl (v : ilist A n) : hetero_veq v v. Proof. red. exists refl_equal. constructor. Defined. Lemma hetero_veq_sym (v : ilist A n) (w : ilist A m) : hetero_veq v w -> hetero_veq w v. Proof. red. intros [eq Heq]. destruct eq. destruct Heq. exists refl_equal. constructor. Defined. Lemma hetero_veq_trans {k} (v : ilist A n) (w : ilist A m) (x : ilist A k) : hetero_veq v w -> hetero_veq w x -> hetero_veq v x. Proof. red. intros [eq Heq]. destruct eq. destruct Heq. intros [eq Heq]. destruct eq, Heq. exists refl_equal. constructor. Defined. Set Equations With UIP. Lemma iapp_hetero_cong {n' m'} (v : ilist A n) (v' : ilist A n') (w : ilist A m) (w' : ilist A m') : hetero_veq v v' -> hetero_veq w w' -> hetero_veq (iapp v w) (iapp v' w'). Proof. red. intros [eq Heq]. depelim eq. destruct Heq. intros [eq Heq]. depelim eq. destruct Heq. exists refl_equal. constructor. Defined. End hetero_veq. #[export] Hint Resolve hetero_veq_refl hetero_veq_sym hetero_veq_trans : hetero_veq. Unset Program Mode. Lemma hetero_veq_transport_right {A} {n m} (v : ilist A n) (w : ilist A m) (eq : m = n) : hetero_veq v w -> hetero_veq v (eq # w). Proof. destruct eq. simpl. trivial. Qed. Lemma hetero_veq_transport_right' {A} {n m} (v : ilist A n) (eq : n = m) : hetero_veq v (eq # v). Proof. destruct eq. simpl. apply hetero_veq_refl. Qed. Lemma hetero_veq_transport_left {A} {n m} (v : ilist A n) (w : ilist A m) (eq : n = m) : hetero_veq v w -> hetero_veq (eq # v) w. Proof. destruct eq. simpl. trivial. Qed. #[export] Hint Resolve hetero_veq_transport_left hetero_veq_transport_right : hetero_veq. Lemma rev_aux_app_hetero_eq : forall (A : Set) (i j1 j2 : nat) (l : ilist A i) (acc1 : ilist A j1) (acc2 : ilist A j2), (irev_aux l (iapp acc1 acc2)) ~=~ iapp (irev_aux l acc1) acc2. Proof. intros. funelim (irev_aux l acc1). - simpl. apply hetero_veq_refl. - simp irev_aux. destruct (eq_sym (add_succ_comm n (j + j2))). simpl. specialize (X _ acc2). simp iapp in X. eapply hetero_veq_trans. eapply X. apply (iapp_hetero_cong (n:=n + S j) (n' := S n + j)); auto with hetero_veq. apply hetero_veq_transport_right'. Qed. Equations irev' {A : Set} {n : nat} (l : ilist A n) : ilist A n := irev' Nil := Nil; irev' (Cons x t) := isnoc (irev' t) x. Lemma isnoc_irev A n a (l : ilist A n) : isnoc (irev l) a = irev (Cons a l). Proof. (* Exercise ! *) Admitted. Lemma rev__rev' : forall (A : Set) (i : nat) (l : ilist A i), irev l = irev' l. Proof. intros. funelim (irev' l). unfold irev. simplify_eqs. simp irev_aux. unfold irev. Admitted. Equations rev_range (n : nat) : ilist nat n := rev_range 0 := Nil; rev_range (S n) := Cons n (rev_range n). Equations(noind) negb (b : bool) : bool := negb true := false; negb false := true. Inductive fle : forall {n}, fin n -> fin n -> Set := | flez : forall {n j}, @fle (S n) fz j | fles : forall {n i j}, fle i j -> @fle (S n) (fs i) (fs j). Derive Signature for fle. Equations fin0_empty (i : fin 0) : False := { }. Transparent NoConfusionHom_fin. Equations fle_trans {n : nat} {i j k : fin n} (p : fle i j) (q : fle j k) : fle i k := fle_trans flez _ := flez; fle_trans (fles p') (fles q') := fles (fle_trans p' q'). #[export] Hint Unfold NoConfusion.noConfusion_nat_obligation_1 : equations. Derive DependentElimination EqDec for fin. Derive Signature for Logic.eq. Print Assumptions fin_eqdec. Derive NoConfusion NoConfusionHom EqDec Subterm for fle. Print Assumptions fle_eqdec. #[local] Obligation Tactic := program_simpl; try typeclasses eauto 10 with rec_decision subterm_relation. Equations fle_trans' {n : nat} {i j : fin n} (p : fle i j) {k} (q : fle j k) : fle i k by wf (Signature.signature_pack p) (@fle_subterm) := fle_trans' flez _ := flez; fle_trans' (fles p') (fles q') := fles (fle_trans' p' q'). Print Assumptions fle_trans'. (* Extraction fle_trans'. *) Equations lookup {A n} (f : fin n) (v : ilist A n) : A := lookup fz (Cons x xs) := x; lookup (fs f) (Cons _ xs) := lookup f xs. Inductive vforall {A : Set}(P : A -> Type) : forall {n}, ilist A n -> Type := | VFNil : vforall P Nil | VFCons : forall {n} x (xs : ilist A n), P x -> vforall P xs -> vforall P (Cons x xs). Derive Signature for vforall. Equations vforall_lookup {n} {A : Set} {P : A -> Type} {xs : ilist A n} (idx : fin n) : vforall P xs -> P (lookup idx xs) := vforall_lookup fz (VFCons _ pf _) := pf ; vforall_lookup (fs ix) (VFCons _ _ ps) := vforall_lookup ix ps. Coq-Equations-1.3.1-8.20/test-suite/fle_trans.v000066400000000000000000000045011463127417400210540ustar00rootroot00000000000000(** printing NoConfusion %\coqdocind{NoConfusion}% *) (** printing elimination %\coqdoctac{elimination}% *) (** printing Derive %\coqdockw{Derive}% *) (* begin hide *) From Equations Require Import Equations. Set Implicit Arguments. Inductive fin : nat -> Set := | fz : forall n, fin (S n) | fs : forall n, fin n -> fin (S n). Arguments fz {_}. (* end hide *) (** We define $\le$ by $\cstr{fz} \le i$ and $i \le j "->" \cstr{fs}\ i \le \cstr{fs}\ j$. *) Inductive fle : forall n, fin n -> fin n -> Set := | flez : forall n (j : fin (S n)), fle fz j | fles : forall n (i j : fin (S n)), fle i j -> fle (fs i) (fs j). (** We will need a NoConfusion principle for [fin], which we derive automatically, see section %\ref{sec:derive-noconf}%. *) Derive Signature NoConfusionHom for fin. (** We could prove the transitivity of the relation [fle] by defining a recursive function with %\Equations%, but here we will instead define a [Fixpoint] and use our [dependent elimination] tactic: *) Fixpoint fle_trans {n : nat} {i j k : fin n} (p : fle i j) (q : fle j k) : fle i k. (** We use the [dependent elimination] tactic to eliminate [p], providing a pattern for each case. We could also let %\Equations% generate names for the bound variables. *) dependent elimination p as [flez _ | @fles n i j p] ; [ apply flez | ]. (** We know that [q] has type [fle (fs _) k]. Therefore, it cannot be [flez] and we must only provide one pattern for the single relevant branch, using: *) dependent elimination q as [fles q]. (** The end of the proof is straightforward. *) (* begin hide *) apply fles. apply fle_trans with (1 := p) (2 := q). Qed. (* end hide *) (** We can check that this definition does not make use of any axiom, contrary to what we would obtain by using [dependent destruction] from Coq's standard library. *) (* begin hide *) Print Assumptions fle_trans. (** %\texttt{Closed under the global context}% *) (* end hide *) Require Import Program. (* begin hide *) Fixpoint fle_trans' {n : nat} {i j k : fin n} (p : fle i j) (q : fle j k) : fle i k. Proof. dependent destruction p. - apply flez. - dependent destruction q. apply fles. apply fle_trans with (1 := p) (2 := q). Qed. Print Assumptions fle_trans'. (* Axioms: JMeq_eq : forall (A : Type) (x y : A), x ~= y -> x = y *) (* end hide *) Coq-Equations-1.3.1-8.20/test-suite/funelim_ack.v000066400000000000000000000007621463127417400213610ustar00rootroot00000000000000 From Equations Require Import CoreTactics. From Equations.Prop Require Import DepElim. From Equations Require Import Equations. Set Equations Transparent. Equations ack (m n : nat) : nat by wf (m, n) (Equations.Prop.Subterm.lexprod _ _ lt lt) := ack 0 n := S n; ack (S m) 0 := ack m 1; ack (S m) (S n) := ack m (ack (S m) n). Definition ack_incr {m n} : ack m n < ack m (n+1). Proof. (* Was looping due to trying to reduce in the equality hypothesis *) funelim (ack m n) eqack. Admitted.Coq-Equations-1.3.1-8.20/test-suite/gcd.v000066400000000000000000000026551463127417400176440ustar00rootroot00000000000000From Equations Require Import Equations. Require Import Relations. Require Import Arith Lia. Set Keyed Unification. Ltac subst_lets := repeat match goal with | id := _ |- _ => subst id end. #[local] Hint Extern 5 => simpl; subst_lets; lia : rec_decision. #[local] Obligation Tactic := Equations.CoreTactics.equations_simpl; try typeclasses eauto with rec_decision. Equations gcd (x y : nat) : nat by wf (x + y) lt := gcd 0 x := x ; gcd x 0 := x ; gcd x y with gt_eq_gt_dec x y := { | inleft (left ygtx) := gcd x (y - x) ; | inleft (right refl) := x ; | inright xgty := gcd (x - y) y }. Transparent gcd. Eval compute in gcd 18 84. Require Import ExtrOcamlBasic. Extraction Inline pr1 pr2. Extraction gcd. (* Extraction gcd_unfold. *) Lemma gcd_same x : gcd x x = x. Proof. funelim (gcd x x); now (try (exfalso; lia)). Qed. Lemma gcd_spec0 a : gcd a 0 = a. Proof. funelim (gcd a 0); reflexivity. Qed. #[local] Hint Rewrite gcd_spec0 : gcd. Lemma mod_minus a b : b <> 0 -> b < a -> (a - b) mod b = a mod b. Proof. intros. replace a with ((a - b) + b) at 2 by lia. rewrite <- Nat.Div0.add_mod_idemp_r; auto. rewrite Nat.Div0.mod_same; auto. Qed. Lemma gcd_spec1 a b: 0 <> b -> gcd a b = gcd (Nat.modulo a b) b. Proof. funelim (gcd a b); intros. - now rewrite Nat.Div0.mod_0_l. - reflexivity. - now rewrite (Nat.mod_small (S n) (S n0)). - now rewrite refl, Nat.Div0.mod_same. - now rewrite H, mod_minus. Qed. Coq-Equations-1.3.1-8.20/test-suite/innacs.v000066400000000000000000000025321463127417400203540ustar00rootroot00000000000000Require Import Equations. Require Import Vector. Require Import fin. Notation vector := Vector.t. Arguments Vector.nil {A}. Arguments Vector.cons {A} _ {n}. Notation vnil := Vector.nil. Notation vcons := Vector.cons. Fail Equations nth {A n} (v : vector A n) (f : fin n) : A := nth (cons x v) ?(fz) := x; nth (cons _ v) (fs f) := nth v f. (* Intern is correct *) Fail Equations nth {A n} (v : vector A n) (f : fin n) : A := nth (cons x v) ?(fzblxba) := x; nth (cons _ v) (fs f) := nth v f. (* Typing of innaccessibles is correct *) Fail Equations nth {A n} (v : vector A n) (f : fin n) : A := nth (cons x v) ?(fs x) := x; nth (cons _ v) (fs f) := nth v f. (* Innaccessibles match only inaccessibles *) Fail Equations nth {A n} (v : vector A n) (f : fin n) : A := nth (cons x v) ?(_) := x; nth (cons _ v) (fs f) := nth v f. (** Correct inaccessible computation *) Equations nth {A n} (v : vector A n) (f : fin n) : A := nth (cons x v) fz := x; nth (cons _ (n:=?(n)) v) (@fs n f) := nth v f. (** Correct innaccessible computation: variables do not need to be innaccessible, they are just determined by typing and do not determine the computational behavior. They imply no conversion or splitting when evaluating nth'. *) Equations nth' {A n} (v : vector A n) (f : fin n) : A := nth' (cons x v) fz := x; nth' (cons _ (n:=n) v) (@fs n f) := nth' v f. Coq-Equations-1.3.1-8.20/test-suite/interval.v000066400000000000000000000020771463127417400207310ustar00rootroot00000000000000From Equations Require Import Equations. Require Import List Arith Lia. Import ListNotations. Set Keyed Unification. Set Equations Transparent. Equations? interval x y : list nat by wf (y - x) lt := interval x y with lt_dec x y := { | left ltxy => x :: interval (S x) y; | right nltxy => [] }. Proof. lia. Defined. Lemma interval_empty x : interval x x = []. Proof. funelim (interval x x). clear Heq; now apply Nat.lt_irrefl in ltxy. reflexivity. Qed. Lemma interval_large x y : ~ x < y -> interval x y = []. Proof. funelim (interval x y); clear Heq; intros; now try lia. Qed. Lemma interval_trans x y z : x < y < z -> interval x y ++ interval y z = interval x z. Proof. revert z; funelim (interval x y); intros z H'; clear Heq. - simpl. destruct (lt_dec (S x) y); simpl. rewrite H; try lia. rewrite (interval_equation_1 x z). destruct lt_dec; simpl; trivial. elim n. lia. assert (y = S x) as -> by lia. rewrite interval_empty. simpl. rewrite (interval_equation_1 x z). destruct (lt_dec x z); trivial. elim n0; lia. - elim nltxy; lia. Qed. Coq-Equations-1.3.1-8.20/test-suite/issues/000077500000000000000000000000001463127417400202235ustar00rootroot00000000000000Coq-Equations-1.3.1-8.20/test-suite/issues/coq_pr_16995.v000066400000000000000000000116601463127417400224560ustar00rootroot00000000000000(* -*- mode: coq; coq-prog-args: ("-emacs" "-q" "-w" "-deprecated-native-compiler-option" "-R" "/github/workspace/builds/coq/coq-failing/_build_ci/equations/test-suite" "TestSuite" "-R" "/github/workspace/builds/coq/coq-failing/_build_ci/equations/theories" "Equations" "-Q" "/github/workspace/cwd" "Top" "-Q" "/github/workspace/builds/coq/coq-failing/_install_ci/lib/coq/user-contrib/Ltac2" "Ltac2" "-I" "/github/workspace/builds/coq/coq-failing/_build_ci/equations/src" "-top" "TestSuite.LogicType" "-native-compiler" "no") -*- *) (* File reduced by coq-bug-minimizer from original input, then from 119 lines to 7 lines, then from 20 lines to 75 lines, then from 80 lines to 9 lines, then from 22 lines to 165 lines, then from 170 lines to 83 lines, then from 96 lines to 207 lines, then from 212 lines to 106 lines, then from 119 lines to 931 lines, then from 931 lines to 106 lines, then from 111 lines to 106 lines *) (* coqc version 8.18+alpha compiled with OCaml 4.09.0 coqtop version runner-jztamce-project-6138686-concurrent-0:/builds/coq/coq/_build/default,(HEAD detached at b5df8e0) (b5df8e0be892b66f6f48f588ceb3130c7e853be2) Expected coqc runtime on this file: 0.314 sec *) Require Equations.Type.DepElim. Register Equations.Init.sigma as equations.sigma.type. Register Equations.Init.sigmaI as equations.sigma.intro. Register Equations.Init.pr1 as equations.sigma.pr1. Register Equations.Init.pr2 as equations.sigma.pr2. Register Logic.Id as equations.equality.type. Register Logic.id_refl as equations.equality.refl. Register Logic.Empty as equations.bottom.type. Register Logic.Empty_case as equations.bottom.case. Register Logic.Empty_rect as equations.bottom.elim. Register Coq.Init.Datatypes.unit as equations.top.type. Register Equations.Type.Logic.unit_rect as equations.top.elim. Register DepElim.solution_left as equations.depelim.solution_left. Register DepElim.solution_right_dep as equations.depelim.solution_right_dep. Register Classes.NoConfusionPackage as equations.noconfusion.class. Register Classes.apply_noConfusion as equations.depelim.apply_noConfusion. Register DepElim.simplification_sigma1_dep as equations.depelim.simpl_sigma_dep. Register DepElim.opaque_ind_pack_inv as equations.depelim.opaque_ind_pack_eq_inv. Import Equations.CoreTactics. Set Warnings "-notation-overridden". Import Equations.Type.Logic. Import Equations.Type.DepElim. Ltac solve_noconf_prf := intros; on_last_hyp ltac:(fun id => destruct id) ; on_last_hyp ltac:(fun id => destruct_sigma id; destruct id) ; constructor. Ltac solve_noconf_inv_eq a b := destruct_sigma a; destruct_sigma b; destruct a ; depelim b; simpl in * |-; on_last_hyp ltac:(fun id => hnf in id; destruct_tele_eq id || destruct id); solve [constructor]. Ltac solve_noconf_inv := intros; match goal with |- ?R ?a ?b => destruct_sigma a; destruct_sigma b; destruct a ; depelim b; simpl in * |-; on_last_hyp ltac:(fun id => hnf in id; destruct_tele_eq id || destruct id); solve [constructor] | |- @Id _ (?f ?a ?b _) _ => solve_noconf_inv_eq a b end. Ltac solve_noconf_inv_equiv := intros; on_last_hyp ltac:(fun id => destruct id) ; on_last_hyp ltac:(fun id => destruct_sigma id; destruct id) ; simpl; constructor. Ltac solve_noconf := simpl; intros; match goal with [ H : @Id _ _ _ |- @Id _ _ _ ] => try solve_noconf_inv_equiv | [ H : @Id _ _ _ |- _ ] => try solve_noconf_prf | [ |- @Id _ _ _ ] => try solve_noconf_inv end. Ltac solve_noconf_hom_inv_eq a b := destruct_sigma a; destruct_sigma b; destruct a ; depelim b; simpl in * |-; on_last_hyp ltac:(fun id => hnf in id; destruct_tele_eq id || depelim id); solve [constructor || simpl_equations; constructor]. Ltac solve_noconf_hom_inv := intros; match goal with | |- @Id _ (?f ?a ?b _) _ => solve_noconf_hom_inv_eq a b | |- ?R ?a ?b => destruct_sigma a; destruct_sigma b; destruct a ; depelim b; simpl in * |-; on_last_hyp ltac:(fun id => hnf in id; destruct_tele_eq id || depelim id); solve [constructor || simpl_equations; constructor] end. Ltac solve_noconf_hom_inv_equiv := intros; on_last_hyp ltac:(fun id => destruct id) ; on_last_hyp ltac:(fun id => destruct_sigma id; depelim id) ; simpl; simpl_equations; constructor. Ltac solve_noconf_hom := simpl; intros; match goal with [ H : @Id _ _ _ |- @Id _ _ _ ] => try solve_noconf_hom_inv_equiv | [ H : @Id _ _ _ |- _ ] => try solve_noconf_prf | [ |- @Id _ _ _ ] => try solve_noconf_hom_inv end. Ltac Equations.Init.solve_noconf ::= solve_noconf. Ltac Equations.Init.solve_noconf_hom ::= solve_noconf_hom. Derive NoConfusion for unit bool nat option sum Datatypes.prod list sigT sig. Inductive vector@{i} (A : Type@{i}) : nat -> Type@{i} := | nil : vector A 0 | cons {n : nat} : A -> vector A n -> vector A (S n). Derive Signature NoConfusionHom for vector.Coq-Equations-1.3.1-8.20/test-suite/issues/issue100.v000066400000000000000000000025301463127417400217630ustar00rootroot00000000000000From Coq.Lists Require Import List. From Equations Require Import Equations. (* This type is from VST: * https://github.com/PrincetonUniversity/VST/blob/v2.1/floyd/compact_prod_sum.v#L13 *) Fixpoint compact_sum (T: list Type): Type := match T with | nil => unit | t :: nil => t | t :: T0 => (t + compact_sum T0)%type end. (* The rest is a nonsensical, just to give a minimalistic reproducible example *) Inductive Foo := | Sum : list Foo -> Foo. Equations foo_type (t : Foo) : Type := foo_type (Sum u) := compact_sum (List.map foo_type u). (* Moving val into the return type, rather than having it as an argument might be * unnecessary if https://github.com/mattam82/Coq-Equations/issues/73 was fixed *) Fail Equations do_foo (f : Foo) : forall (val : foo_type f), nat := { do_foo (Sum u) := fun val => do_foo_sum u val } where do_foo_sum (fs : list Foo) : forall val : compact_sum (List.map foo_type fs), nat by struct fs := { do_foo_sum nil := fun val => 0; (* Attempting to work around https://github.com/mattam82/Coq-Equations/issues/78 *) do_foo_sum (cons hd tl) with (fun val => do_foo_sum tl val) => { do_foo_sum (cons var nil) := fun val => do_foo var val; do_foo_sum (cons hd tl) do_foo_tl := fun val => match val with | inl v => do_foo hd v | inr vs => do_foo_tl vs end }}.Coq-Equations-1.3.1-8.20/test-suite/issues/issue104.v000066400000000000000000000026361463127417400217760ustar00rootroot00000000000000From Equations Require Import Equations. Require Import List. (* This type is from VST: * https://github.com/PrincetonUniversity/VST/blob/v2.1/floyd/compact_prod_sum.v#L13 *) Fixpoint compact_sum (T: list Type): Type := match T with | nil => unit | t :: nil => t | t :: T0 => (t + compact_sum T0)%type end. (* The rest is a nonsensical, just to give a minimalistic reproducible example *) Inductive foo := | Sum : (foo * list foo) -> foo. Equations foo_type (t : foo) : Type := foo_type (Sum u) := (foo_type (fst u)) * (compact_sum (List.map foo_type (snd u))). (* Moving val into the return type, rather than having it as an argument might be unnecessary if https://github.com/mattam82/Coq-Equations/issues/73 was fixed *) Equations do_foo (f : foo) : forall (val : foo_type f), nat := { do_foo (Sum s) := fun val => (do_foo (fst s) (fst val)) + (do_sum (snd s) (snd val)) } where do_sum (fs : list foo) : forall val : compact_sum (List.map foo_type fs), nat by struct fs := { do_sum nil := fun val => 0; (* Attempting to work around https://github.com/mattam82/Coq-Equations/issues/78 *) do_sum (cons hd tl) with (fun val => do_sum tl val) => { do_sum (cons var nil) _ := fun val => do_foo var val; do_sum (cons hd tl) do_foo_tl := fun val => match val with | inl vh => do_foo hd vh | inr vstl => do_foo_tl vstl end }}.Coq-Equations-1.3.1-8.20/test-suite/issues/issue105.v000066400000000000000000000023561463127417400217760ustar00rootroot00000000000000From Equations Require Import Equations. Require Import List. (* This type is from VST: * https://github.com/PrincetonUniversity/VST/blob/v2.1/floyd/compact_prod_sum.v#L13 *) Fixpoint compact_sum (T: list Type): Type := match T with | nil => unit | t :: nil => t | t :: T0 => (t + compact_sum T0)%type end. (* The rest is a nonsensical, just to give a reproducible example *) Inductive foo := | Sum : (foo * list foo) -> foo. Equations foo_type (t : foo) : Type := foo_type (Sum u) := (foo_type (fst u)) * (compact_sum (List.map foo_type (snd u))). Equations do_foo (f : foo) : forall (val : foo_type f), nat by struct f := { do_foo (Sum (pair s1 s2)) := fun val => (do_foo s1 (fst val)) + (do_sum s1 (fst val) s2 (snd val)) } where do_sum (f : foo) (otherval : foo_type f) (fs : list foo) : forall val : compact_sum (List.map foo_type fs), nat by struct fs := { do_sum _ _ nil := fun val => 0; do_sum f otherval (cons hd tl) with (fun val => do_sum f otherval tl val) => { do_sum _ _ (cons var nil) _ := fun val => do_foo var val; do_sum _ _ (cons hd tl) do_foo_tl := fun val => match val with | inl vh => do_foo hd vh | inr vstl => do_foo_tl vstl end }}.Coq-Equations-1.3.1-8.20/test-suite/issues/issue106.v000066400000000000000000000031211463127417400217660ustar00rootroot00000000000000From Coq.Lists Require Import List. From Equations Require Import Equations. (* This type is from VST: https://github.com/PrincetonUniversity/VST/blob/v2.1/floyd/compact_prod_sum.v#L6 *) Fixpoint compact_prod (T: list Type): Type := match T with | nil => unit | t :: nil => t | t :: T0 => (t * compact_prod T0)%type end. (* The rest is a nonsensical, just to give a minimalistic reproducible example *) Inductive Foo := | Prod : list Foo -> Foo. Equations foo_type (t : Foo) : Type := foo_type (Prod fs) := compact_prod (List.map foo_type fs). (* Moving val into the return type, rather than having it as an argument might be unnecessary if https://github.com/mattam82/Coq-Equations/issues/73 was fixed *) Equations lookup (t:Foo) : forall (val: foo_type t) (what: list nat), option nat := { lookup (Prod ss) := fun val what => match what with nil=> None | cons hd tail => lookup_prod ss val hd tail end} where lookup_prod (ss: list Foo) (val : compact_prod (map foo_type ss)) (what_hd: nat) (what_tl: list nat) : option nat by struct ss := { lookup_prod nil _ _ _ := None; lookup_prod (cons shd stl) _ _ what_tl with (fun val what_hd => lookup_prod stl val what_hd what_tl) => { lookup_prod (cons shd nil) val 0 what_tl _ := lookup shd val what_tl; lookup_prod (cons shd nil) _ _ _ _ := None; lookup_prod (cons shd _) val 0 what_tl _ := lookup shd (fst val) what_tl; lookup_prod (cons _ _) val (S what_hd) _ what_stl_fun := what_stl_fun (snd val) what_hd}}. Definition check := lookup_elim.Coq-Equations-1.3.1-8.20/test-suite/issues/issue107.v000066400000000000000000000026221463127417400217740ustar00rootroot00000000000000From Equations Require Import Equations. Require Import List. (* The rest is a nonsensical, just to give a minimalistic reproducible example *) Inductive Foo := | Prod : list Foo -> Foo. Fixpoint compact_prod (T: list Type): Type := match T with | nil => unit | t :: nil => t | t :: T0 => (t * compact_prod T0)%type end. Equations foo_type (t : Foo) : Type := foo_type (Prod fs) := compact_prod (List.map foo_type fs). (* Moving val into the return type, rather than having it as an argument might be unnecessary if https://github.com/mattam82/Coq-Equations/issues/73 was fixed *) Equations lookup (t:Foo) : forall (val: foo_type t) (what: list nat), option nat := { lookup (Prod ss) := fun val what => match what with nil=> None | cons hd tail => lookup_prod ss val hd tail end} where lookup_prod (ss: list Foo) (val : compact_prod (map foo_type ss)) (what_hd: nat) (what_tl: list nat) : option nat by struct ss := { lookup_prod nil _ _ _ := None; lookup_prod (cons shd stl) _ _ what_tl with (fun val what_hd => lookup_prod stl val what_hd what_tl) => { lookup_prod (cons shd nil) val 0 what_tl _ := lookup shd val what_tl; lookup_prod (cons shd nil) _ _ _ _ := None; lookup_prod (cons shd _) val 0 what_tl _ := lookup shd (fst val) what_tl; lookup_prod (cons _ _) val (S what_hd) _ what_stl_fun := what_stl_fun (snd val) what_hd}}. Coq-Equations-1.3.1-8.20/test-suite/issues/issue107_2.v000066400000000000000000000031021463127417400222070ustar00rootroot00000000000000From Equations Require Import Equations. Require Import List. (* The rest is a nonsensical, just to give a minimalistic reproducible example *) Inductive Foo := | Prod : list Foo -> Foo. Fixpoint compact_prod (T: list Type): Type := match T with | nil => unit | t :: nil => t | t :: T0 => (t * compact_prod T0)%type end. Equations foo_type (t : Foo) : Type := foo_type (Prod fs) := compact_prod (List.map foo_type fs). (* Moving val into the return type, rather than having it as an argument might be unnecessary if https://github.com/mattam82/Coq-Equations/issues/73 was fixed *) Equations lookup (t:Foo) (val: foo_type t) (what: list nat) : option nat by struct t := { lookup (Prod ss) val nil := None; lookup (Prod ss) val (cons hd tl) := lookup_prod ss val hd tl } (* match what with nil=> None | cons hd tail => lookup_prod ss val hd tail end} *) where lookup_prod (ss: list Foo) (val : compact_prod (map foo_type ss)) (what_hd: nat) (what_tl: list nat) : option nat by struct ss := { lookup_prod nil _ _ _ := None; lookup_prod (cons shd stl) _ _ what_tl with (fun val what_hd => lookup_prod stl val what_hd what_tl) => { lookup_prod (cons shd nil) val 0 what_tl _ := lookup shd val what_tl; lookup_prod (cons shd nil) _ _ _ _ := None; lookup_prod (cons shd _) val 0 what_tl _ := lookup shd (fst val) what_tl; lookup_prod (cons _ _) val (S what_hd) _ what_stl_fun := what_stl_fun (snd val) what_hd}}. Coq-Equations-1.3.1-8.20/test-suite/issues/issue112.v000066400000000000000000000021161463127417400217660ustar00rootroot00000000000000From Coq.Lists Require Import List. From Equations Require Import Equations. (* This type is from VST: https://github.com/PrincetonUniversity/VST/blob/v2.1/floyd/compact_prod_sum.v#L6 *) Fixpoint compact_prod (T: list Type): Type := match T with | nil => unit | t :: nil => t | t :: T0 => (t * compact_prod T0)%type end. (* The rest is a nonsensical, just to give a minimalistic reproducible example *) Inductive foo := | List : list foo -> foo. Fixpoint foo_type (f:foo) : Type := match f with | List fs => compact_prod (map foo_type fs) end. Equations do_foo (t : foo) : option (foo_type t) := { do_foo (List ss) := do_list ss} where do_list (ts:list foo) : option (compact_prod (map foo_type ts)) := { do_list nil := Some tt; (* Attempting to work around https://github.com/mattam82/Coq-Equations/issues/78 and 108*) do_list (cons t ts) with (fun val1 => match val1, do_list ts with Some val1, Some vals => Some (val1, vals) | _, _ => None end) => { do_list (cons t nil) _ := do_foo t; do_list (cons t ts) do_tl := do_tl (do_foo t)}}.Coq-Equations-1.3.1-8.20/test-suite/issues/issue113.v000066400000000000000000000011351463127417400217670ustar00rootroot00000000000000From Coq.Lists Require Import List. From Equations Require Import Equations. Inductive foo := | List : list foo -> foo. Equations do_foo1 (t: foo) : nat := { do_foo1 (List ss) := do_foo1_list ss } where do_foo1_list (ts:list foo) : nat := { do_foo1_list nil := 0; do_foo1_list (cons t ts) := do_foo1 t + do_foo1_list ts } where do_foo2 (t : foo) : nat := { do_foo2 (List nil) := 0; do_foo2 (List (cons t ts)) := do_foo2 t } where do_foo2_list (ts:list foo) : nat := { do_foo2_list nil := 0; do_foo2_list (cons t ts) := do_foo2_list ts }. Definition check := do_foo1_elim.Coq-Equations-1.3.1-8.20/test-suite/issues/issue114.v000066400000000000000000000016411463127417400217720ustar00rootroot00000000000000From Coq.Lists Require Import List. From Equations Require Import Equations. (* This type is from VST: https://github.com/PrincetonUniversity/VST/blob/v2.1/floyd/compact_prod_sum.v#L6 *) Fixpoint compact_prod (T: list Type): Type := match T with | nil => unit | t :: nil => t | t :: T0 => (t * compact_prod T0)%type end. (* The rest is a nonsensical, just to give a minimalistic reproducible example *) Inductive foo := | List : list foo -> foo. Fixpoint foo_type (f:foo) : Type := match f with | List fs => compact_prod (map foo_type fs) end. Equations num (f:foo) (val:foo_type f) : nat := { num (List nil) val := 0; num (List (cons hd tl)) val := sum hd (num hd) tl val } where sum (f:foo) (numf: (foo_type f -> nat)) (fs : list foo) (val: compact_prod (map foo_type (f::fs))) : nat := { sum f numf nil val := numf val; sum f numf (cons hd tl) val := numf (fst val) + sum hd (num hd) tl (snd val)}.Coq-Equations-1.3.1-8.20/test-suite/issues/issue116.v000066400000000000000000000013311463127417400217700ustar00rootroot00000000000000From Coq.Lists Require Import List. From Equations Require Import Equations. Inductive foo := | Sum : list foo -> foo. Equations do_foo1 (t: foo) : nat (* by struct t *) := { do_foo1 (Sum u) := do_foo1_list u } where do_foo2 (t : foo) : nat by struct t := { do_foo2 (Sum nil) := 0; do_foo2 (Sum (l :: tl)) := do_foo2 l } where do_foo1_list (ts:list foo) : nat by struct ts := { do_foo1_list nil := 0; do_foo1_list (cons t ts) := do_foo1 t + (do_foo1_list ts)} where do_foo2_list (ts:list foo) : nat := { do_foo2_list nil := 0; do_foo2_list (cons t ts) with (do_foo2_list ts) => { do_foo2_list (cons t _) _ := (do_foo1 t) + (do_foo2 t)}}. Definition check := do_foo1_elim.Coq-Equations-1.3.1-8.20/test-suite/issues/issue117.v000066400000000000000000000012651463127417400217770ustar00rootroot00000000000000From Coq.Lists Require Import List. From Equations Require Import Equations. Inductive foo := | Sum : list foo -> foo. Equations do_foo1 (t: foo) : nat := { do_foo1 (Sum u) := do_foo1_list u } where do_foo1_list (ts:list foo) : nat by struct ts := { do_foo1_list nil := 0; do_foo1_list (cons t ts) := do_foo1 t + do_foo1_list2 ts } where do_foo1_list2 (fs : list foo) : nat := { do_foo1_list2 _ := 0} where do_foo2 (t : foo) : nat := { do_foo2 _ := 0 } where do_foo2_list (ts:list foo) : nat := { do_foo2_list nil := 0; do_foo2_list (cons t ts) with (do_foo2_list ts) => { do_foo2_list (cons t _) _ := (do_foo1 t) + (do_foo2 t)}}. Coq-Equations-1.3.1-8.20/test-suite/issues/issue120.v000066400000000000000000000021171463127417400217660ustar00rootroot00000000000000Require Export Lia. From Equations Require Import Equations. Require Import Coq.Relations.Relation_Operators. Require Import Coq.Wellfounded.Lexicographic_Product. Inductive id : Type := Id : nat -> id. Inductive var : Type := | varF : id -> var | varB : id -> var. Inductive ty : Set := | TTop : ty | TAll : ty -> ty -> ty | TSel : var -> ty -> ty. Inductive tm : Set := | tvar : id -> tm. Fixpoint tsize_flat(T: ty) := match T with | TTop => 1 | TAll T1 T2 => S (tsize_flat T1 + tsize_flat T2) | TSel _ U => 1 + tsize_flat U end. Definition val_type_termRel := Program.Wf.MR (lexprod lt (fun _ => lt)) (fun p => let '(T, n) := p in (existT (fun _ => nat) n (tsize_flat T))). Ltac smaller_n := autounfold; apply left_lex; lia. #[export] Instance WF_val_type_termRel: WellFounded val_type_termRel. apply Wf.measure_wf; apply wf_lexprod; intro; apply Wf_nat.lt_wf. Qed. Equations? val_type (Tn: ty * nat) : Prop by wf Tn val_type_termRel := val_type (pair T (S n)) := val_type (pair T n); val_type (pair T O) := True. Proof. smaller_n. Defined. Fail Next Obligation. Coq-Equations-1.3.1-8.20/test-suite/issues/issue124.v000066400000000000000000000004051463127417400217700ustar00rootroot00000000000000From Equations Require Import Equations. Require Import List. Import ListNotations. Equations foo (f: option (nat -> nat)) (l: list nat) : list nat := foo None _ := []; foo (Some f) nil := nil; foo (Some f) (cons hd tl) := cons (f hd) (foo (Some f) tl). Coq-Equations-1.3.1-8.20/test-suite/issues/issue129.v000066400000000000000000000010421463127417400217730ustar00rootroot00000000000000Require Import Equations.Prop.Equations. Fixpoint slow n : unit := match n with O => tt | S n => match slow n with tt => slow n end end. #[local] Obligation Tactic := idtac. Goal slow 1 = tt /\ True -> True. intros H. dependent elimination H. assumption. Qed. Goal slow 20 = tt /\ True -> True. intros H. dependent elimination H. assumption. Qed. Goal slow 300 = tt /\ True -> True. intros H. Timeout 1 dependent elimination H. assumption. Qed. Goal slow 5000 = tt /\ True -> True. intros H. Timeout 1 dependent elimination H. assumption. Qed.Coq-Equations-1.3.1-8.20/test-suite/issues/issue143.v000066400000000000000000000047561463127417400220060ustar00rootroot00000000000000From Equations Require Import Equations. Require Import Lia. Section Tests. Context {A : Type}. Inductive forest : Type := | emp : A -> forest | tree : list forest -> forest. Equations fweight (f : forest) : nat := { fweight (emp _) := 1; fweight (tree l) := 1 + lweight l } where lweight (l : list forest) : nat := { lweight nil := 1; lweight (cons f l') := fweight f + lweight l' }. Inductive tail_of {A} : list A -> list A -> Prop := | t_refl : forall l, tail_of l l | t_cons : forall x l1 l2, tail_of l1 l2 -> tail_of l1 (cons x l2). #[local] Hint Constructors tail_of : core. Derive Signature for tail_of. Lemma tail_of_decons : forall {A} {x : A} {l1 l2}, tail_of (cons x l1) l2 -> tail_of l1 l2. Proof. intros. remember (cons x l1) as l. revert Heql. revert l1. induction H; intros; subst; auto. Qed. Lemma prod_conj : forall (A B : Prop), A * B -> A /\ B. Proof. intuition. Defined. Lemma fweight_neq_0 : (forall f, fweight f <> 0) /\ forall l, lweight l <> 0. Proof. assert (fe:=fun_elim (f:=fweight)). apply prod_conj. apply (fe (fun l n => n <> 0) (fun l n => n <> 0)); intros; lia. Qed. Lemma lweight_neq_0 : forall l, lweight l <> 0. Proof. apply fweight_neq_0. Defined. Lemma tail_of_fweight : forall l1 l2, tail_of l1 l2 -> lweight l1 <= lweight l2. Proof. induction 1; simp lweight; try lia. simpl. pose proof (proj1 fweight_neq_0 x). lia. Qed. End Tests. Arguments forest A : clear implicits. #[local] Hint Constructors tail_of : core. Module FlattenNestedWf. Equations? flatten {A} (f : forest A) : list A by wf (fweight f) lt := flatten (emp _) := nil; flatten (tree l) := lflatten l (t_refl _) where lflatten (fs : list (forest A)) (t : tail_of fs l) : list A by struct fs := { lflatten nil _ := nil; lflatten (cons f l') H := flatten f ++ lflatten l' (tail_of_decons H) }. Proof. intros. simp fweight. clear flatten lflatten. depind H. simpl. lia. simpl. lia. Qed. End FlattenNestedWf. Module FlattenNestedStruct. Section foo. Variable A : Type. Equations flatten (f : forest A) : list A by struct f := flatten (emp _) := nil; flatten (tree l) := lflatten l (t_refl _) where lflatten (fs : list (forest A)) (t : tail_of fs l) : list A by struct fs := { lflatten nil _ := nil; lflatten (cons f l') H := flatten f ++ lflatten l' (tail_of_decons H) }. End foo. End FlattenNestedStruct. Coq-Equations-1.3.1-8.20/test-suite/issues/issue172.v000066400000000000000000000007301463127417400217740ustar00rootroot00000000000000Set Implicit Arguments. From Equations Require Export Equations. Require Export Equations.Prop.Subterm. (* Module KAxiom. *) (** By default we disallow the K axiom, but it can be set. *) Set Equations With UIP. (** In this case the following definition uses the [K] axiom just imported. *) Axiom uip : forall A, UIP A. #[export] Existing Instance uip. Equations K {A} (x : A) (P : x = x -> Type) (p : P eq_refl) (H : x = x) : P H := K P p eq_refl := p. Coq-Equations-1.3.1-8.20/test-suite/issues/issue176.v000066400000000000000000000003721463127417400220020ustar00rootroot00000000000000From Equations Require Import Equations. Equations foo (n : nat) : nat := foo x := let x := 0 in x. Goal forall x, foo x = let x := 0 in x. Proof. intros x. rewrite foo_equation_1. match goal with |- ?x = ?y => constr_eq x y end. Abort.Coq-Equations-1.3.1-8.20/test-suite/issues/issue190.v000066400000000000000000000006301463127417400217730ustar00rootroot00000000000000From Equations Require Import Equations. Equations foo (n : nat) : nat := { foo 0 with true, true, true := { | true | true | true := 1; | _ | _ | _ := 0 }; foo _ := 0 }. Equations foo' (n : nat) : nat := { foo' 0 with true, true, true, true := { | true | true | true | true := 1; | _ | _ | _ | _ := 0 }; foo' _ := 0 }. Coq-Equations-1.3.1-8.20/test-suite/issues/issue191.v000066400000000000000000000006031463127417400217740ustar00rootroot00000000000000From Equations Require Import Equations. (* Assuming UIP *) From Coq.Logic Require Import Eqdep. Section FreeMonad. Variable S : Type. Variable P : S -> Type. Inductive FreeF A : Type := | retFree : A -> FreeF A | opr : forall s, (P s -> FreeF A) -> FreeF A. Derive Signature for Relation_Operators.clos_trans. Derive NoConfusion Subterm for FreeF. End FreeMonad. Coq-Equations-1.3.1-8.20/test-suite/issues/issue193.v000066400000000000000000000021241463127417400217760ustar00rootroot00000000000000Require Import Program. From Equations Require Import Equations. #[local] Obligation Tactic := program_simpl. Equations h_type (P : Type) (H : P) : P := h_type P H => let H1 := H in _. (* h_type_obligations has type-checked, generating 1 obligation *) (* Solving obligations automatically... *) (* h_type_obligations_obligation_1 is defined *) (* No more obligations remaining *) (* h_type_obligations is defined *) (* h_type is defined *) (* h_type is defined *) (* The command has indeed failed with message: *) (* h_type_obligations_obligation_1 cannot be used as a hint. *) Equations h_prop (P : Prop) (H : P) : P := h_prop P H => _. (* h_prop_obligations has type-checked, generating 1 obligation *) (* Solving obligations automatically... *) (* h_prop_obligations_obligation_1 is defined *) (* No more obligations remaining *) (* h_prop_obligations is defined *) (* h_prop is defined *) (* h_prop is defined *) (* The command has indeed failed with message: *) (* h_prop_obligations_obligation_1 cannot be used as a hint. *) Example test0 := h_prop. Example test1 := h_prop_obligations_obligation_1.Coq-Equations-1.3.1-8.20/test-suite/issues/issue200.v000066400000000000000000000001441463127417400217630ustar00rootroot00000000000000From Equations Require Import Equations. Require Import Vector. Derive DependentElimination for t. Coq-Equations-1.3.1-8.20/test-suite/issues/issue212.v000066400000000000000000000037141463127417400217740ustar00rootroot00000000000000Require Import List. From Coq Require Import Lia ssreflect. Unset Strict Implicit. Unset Printing Implicit Defensive. Set Asymmetric Patterns. Import ListNotations. From Equations Require Import Equations. Section list_size. Context {A : Type} (f : A -> nat). Equations list_size (l : list A) : nat := { list_size nil := 0; list_size (cons x xs) := S (f x + list_size xs) }. End list_size. Section Map. Equations map_In {A B : Type} (l : list A) (f : forall (x : A), In x l -> B) : list B := map_In nil _ := nil; map_In (cons x xs) f := cons (f x _) (map_In xs (fun x H => f x _)). End Map. Section Test. Variables (L U R : Type). Inductive Foo : Type := | C1 : L -> Foo | C3 : L -> list Foo -> Foo. Inductive Result : Type := | ULeaf : U -> Result | UNode : list Result -> Result. (* Derive NoConfusion for Result. *) Fixpoint rsize (r : Result) := match r with | ULeaf a => 0 | UNode l => S (list_size rsize l) end. Lemma In_result_rsize_leq r res : In r res -> rsize r < (list_size rsize res). Proof. funelim (list_size rsize res) => //=. case=> [-> | Hin]; [| have Hleq := (H r Hin)]; lia. Qed. Variables (Null : R) (resolve : U -> Result). Set Equations Debug. Equations? (noind) execute (initial_value : U) (foos : list Foo) : list R := { execute _ [] := []; execute initial_value (_ :: qs) := (complete_value (resolve initial_value)) ++ execute initial_value qs where complete_value (res : Result) : list R by wf (rsize res) := { complete_value (UNode res) := concat (map_In res (fun r Hin => complete_value r)); complete_value (ULeaf res) := [] } }. all: do ?[by have Hleq := (In_result_rsize_leq r res Hin); lia]. Qed. Print execute_unfold_clause_2_complete_value. End Test. Coq-Equations-1.3.1-8.20/test-suite/issues/issue228.v000066400000000000000000000040011463127417400217710ustar00rootroot00000000000000Require Import Unicode.Utf8. Require Import Program.Equality. Require Import Arith. Require Vectors.Vector. Import Vector.VectorNotations. From Equations Require Import Equations. Inductive MType : Type := | Base : Set → MType | Channel : SType → MType with SType : Type := | ø : SType | Send : MType → SType → SType | Receive : MType → SType → SType | Branch: ∀ {n}, Vector.t SType n → SType | Select : ∀ {n}, Vector.t SType n → SType . Notation "C[ s ]" := (Channel s). Notation "! m ; s" := (Send m s) (at level 90, right associativity). Notation "? m ; s" := (Receive m s) (at level 90, right associativity). Notation "▹ ss" := (Branch ss) (at level 90, right associativity). Notation "◃ ss" := (Select ss) (at level 90, right associativity). Inductive Duality : SType → SType → Prop := | Ends : Duality ø ø | MRight : ∀ {m c₁ c₂}, Duality c₁ c₂ → Duality (Send m c₁) (Receive m c₂) | MLeft : ∀ {m c₁ c₂}, Duality c₁ c₂ → Duality (Receive m c₁) (Send m c₂) . Section Processes. Variable ST : Type. Variable MT : Type → Type. Inductive Message : MType → Type := | V : ∀ {M : Set}, MT M → Message (Base M) | C : ∀ {S : SType}, ST → Message (Channel S) . Arguments V [M]. Arguments C [S]. Inductive Process : Type := | PSelect : ∀ {n : nat} {ss : Vector.t SType n} (i : Fin.t n) , Message C[Select ss] → (Message C[ss[@i]] → Process) → Process . End Processes. (**************************) (* NICETIES *) (**************************) Arguments V [ST MT M]. Arguments C [ST MT S]. Arguments PSelect [ST MT n ss]. (**************************) (* LINEARITY *) (**************************) Definition TMT : Type → Type := fun _ => unit. Derive NoConfusion for MType. Derive Signature NoConfusionHom for Message. (* t means "Has it been already found?" *) Equations find (t : bool) (p : Process bool TMT) : Prop by struct p := find t (PSelect i (C c) p) => find t (p (C false)) . Coq-Equations-1.3.1-8.20/test-suite/issues/issue24.v000066400000000000000000000003211463127417400217040ustar00rootroot00000000000000Require Import Equations.Prop.Equations. Equations Rtuple' (domain : list Type) : Type := Rtuple' nil := unit; Rtuple' (cons d nil) := d; Rtuple' (cons d (cons d' ds)) := prod (prod (Rtuple' ds) d') d. Coq-Equations-1.3.1-8.20/test-suite/issues/issue240.v000066400000000000000000000006461463127417400217760ustar00rootroot00000000000000Require Import Equations.Equations. Inductive T1: Type := | C { T: Type }: T1 . Parameter t1: T1. Record T2: Type := mkT2 { }. Equations f (t2: T2): T2 by wf 0 lt := f t2 := match t1 with | C => let x := fun (N': T2) => f N' in t2 end . Axiom admit : forall{A}, A. Next Obligation. Admitted. Print HintDb f_wf_obligations. Next Obligation. apply admit. Defined. Print f_elim. Admitted. Coq-Equations-1.3.1-8.20/test-suite/issues/issue246.v000066400000000000000000000004701463127417400217770ustar00rootroot00000000000000From Equations Require Import Equations. Inductive t (u : unit) := | strange : t tt -> t u. Derive NoConfusionHom for t. Fail Derive Subterm for t. Inductive t' (u : unit) : Set := | strange' : t' tt -> t' u. Derive NoConfusionHom for t'. Derive Subterm for t'. Definition test := well_founded_t'_subterm. Coq-Equations-1.3.1-8.20/test-suite/issues/issue249.v000066400000000000000000000005061463127417400220020ustar00rootroot00000000000000From Equations Require Import Equations. (* Removing this line makes Derive Subterm go through without troubles *) Set Universe Polymorphism. Inductive t : Type -> Type := | Build_t : forall A, t A -> t A. Derive Signature for t. Derive NoConfusionHom for t. Derive Subterm for t. Definition test := well_founded_t_subterm.Coq-Equations-1.3.1-8.20/test-suite/issues/issue25.v000066400000000000000000000003141463127417400217070ustar00rootroot00000000000000Require Import Equations.Prop.Equations. Equations Rtuple'' (domain : list Type) : Type := Rtuple'' nil => unit; Rtuple'' (cons d ds) with ds => { | nil => d ; | _ => prod (Rtuple'' ds) d }. Coq-Equations-1.3.1-8.20/test-suite/issues/issue252.v000066400000000000000000000011741463127417400217760ustar00rootroot00000000000000Require Import Equations.Equations. Inductive vec (A:Type) : nat -> Type := | nil : vec A 0 | cons : forall n, A -> vec A n -> vec A (S n). Arguments nil {A}. Arguments cons {A n}. Derive Signature for vec. Notation "x :: v" := (cons x v). Notation "[ ]" := nil. Notation "[ x ]" := (cons x nil). Notation "[ x ; y ; .. ; z ]" := (cons x (cons y .. (cons z nil) .. )). Equations head {A n} (v : vec A (S n)) : A := head (x :: _) := x. Equations tail {A n} (v : vec A (S n)) : vec A n := tail (_ :: v) := v. Equations vec_vec {A:Type} {n} (v : vec A n) : vec A n := vec_vec _ (n:=0) := []; vec_vec v := (head v) :: (tail v). Coq-Equations-1.3.1-8.20/test-suite/issues/issue258.v000066400000000000000000000013231463127417400220000ustar00rootroot00000000000000From Coq Require Import Psatz. From Equations Require Import Equations. Inductive Pre := Pi : Pre -> Pre. Fixpoint pre_size (p : Pre) : nat := match p with | Pi τ => 1 + pre_size τ end. Inductive InterpInput := | inp_ctx (Γp : nat) | inp_ty (Γp : nat) (Γ : nat) (σp : Pre). Definition measure_input (ii : InterpInput) : nat := match ii with | inp_ctx Γp => Γp | inp_ty Γp Γ σp => Γp + pre_size σp end. Equations? interp (ii : InterpInput) : nat by wf (measure_input ii) lt := interp (inp_ty Γp Γ (Pi τp)) := let piΓ := interp (inp_ctx Γp) in let τ := interp (inp_ty Γp piΓ τp) in 0; interp _ := 0. Proof. all: abstract lia. Defined. Definition test := interp_elim. Coq-Equations-1.3.1-8.20/test-suite/issues/issue272.v000066400000000000000000000024621463127417400220010ustar00rootroot00000000000000From Equations Require Import Equations. (* If it's the full definition from the start, works fine *) (* Equations even (n : nat) : bool := { *) (* even (S n) := odd n; *) (* even 0 := true } *) (* with odd (n : nat) : bool := { *) (* odd 0 := false ; *) (* odd (S n) := even n}. *) (* However, mutual definition fails when one of the function *) (* is independent from the others *) Equations even (n : nat) : bool := {even n := false } with odd (n : nat) : bool by struct n := { odd 0 := false ; odd (S n) := even n}. (* The variable even was not found in the current environment. *) (* This is bad because it prevents iterated refinement of code *) (* and it also prevents from mixing code with proof mode *) Fail Equations? even (n : nat) : bool := { even (S n) := odd n; even 0 := true } with odd (n : nat) : bool by struct n := { odd n := _}. (* The variable odd was not found in the current environment. *) (* Moreover the error gets even more mysterious when all *) (* the putative mutually defined functions are independent *) Module NoApparentRec. Equations even (n : nat) : bool by struct n := { even n := true } with odd (n : nat) : bool by struct n := { odd n := false}. Next Obligation. destruct n; auto. Defined. End NoApparentRec. (* Cannot define even mutually with other programs *) Coq-Equations-1.3.1-8.20/test-suite/issues/issue273.v000066400000000000000000000013031463127417400217730ustar00rootroot00000000000000From Equations Require Import Equations. Require Import List. Import ListNotations. Inductive T := C : list T -> T. Show Obligation Tactic. Equations f (p : bool) (e : T) : T by struct e := { f false e := e ; f p (C xs) := let xs := g true xs in C xs } where g (p : bool) (xs : list T) : list T by struct xs := { g false xs := xs ; g _ [] := []; g p (x :: xs) := let x := c true x in let xs := g true xs in x :: xs } where c (p : bool) (x : T) : T by struct x := { c false x := x ; c p x := let x := f true x in x }. Next Obligation. destruct x; auto. Defined. Next Obligation. destruct x; auto. Defined. Next Obligation. destruct xs; auto. Defined. About f_elim.Coq-Equations-1.3.1-8.20/test-suite/issues/issue286.v000066400000000000000000000003731463127417400220050ustar00rootroot00000000000000From Equations Require Import Equations. Axiom P : nat -> Type. Inductive foo : nat -> Type := | foo_let n (j := n) (k : P n) : nat -> let m := n + j + 2 in P m -> foo m. Equations bar (n : nat) (x : foo n) : nat := bar _ (foo_let x k n y) := n. Coq-Equations-1.3.1-8.20/test-suite/issues/issue297.v000066400000000000000000000002161463127417400220030ustar00rootroot00000000000000From Equations Require Import Equations. Goal forall P y, P y -> let n := y in n = 12 -> P n. intros P y py n. simplify ?. apply py. Qed. Coq-Equations-1.3.1-8.20/test-suite/issues/issue306.v000066400000000000000000000004571463127417400220010ustar00rootroot00000000000000Require Import Equations.Prop.Subterm Equations.Prop.DepElim. From Equations Require Import Equations. Unset Equations With Funext. Parameter size : forall {A}, list A -> nat. Equations test (s : list bool) : list bool by wf (size s) lt:= test pn with true => { | true := nil; | false := nil }.Coq-Equations-1.3.1-8.20/test-suite/issues/issue321.v000066400000000000000000000002141463127417400217650ustar00rootroot00000000000000From Equations Require Import Equations. Set Warnings "-equations-open-proof-complete". Equations? x : nat := x := 0. Proof. Qed. Check x. Coq-Equations-1.3.1-8.20/test-suite/issues/issue328.v000066400000000000000000000007731463127417400220060ustar00rootroot00000000000000From Coq Require Import Lia. Set Implicit Arguments. From Equations Require Import Equations. Parameter A : Type. Inductive nonEmpty (A : Type) : Type := | singleton : A -> nonEmpty A | consNE : A -> nonEmpty A -> nonEmpty A. Equations? fromList (l : list A) : length l > 0 -> nonEmpty A := { fromList nil H := _; fromList (cons x nil) _ := singleton x; fromList (cons x (cons y l)) _ := consNE x (fromList (cons y l) _) }. Proof. - exfalso. abstract lia. - abstract lia. Fail Defined. Abort.Coq-Equations-1.3.1-8.20/test-suite/issues/issue329.v000066400000000000000000000014771463127417400220110ustar00rootroot00000000000000Require Import List ssreflect. Require Import Equations.Prop.Equations. Import ListNotations. Equations ok_clause (e : nat -> option bool) (xs : list nat) : bool := ok_clause e xs => ok_clause' e xs where ok_clause' (p : nat -> option bool) (l : list nat) : bool by struct l := ok_clause' _ [] => true; ok_clause' p (x::l) with p x => { | Some _ => ok_clause' p l; | _ => false }. Lemma ok_clause_test p : ok_clause p [] = true. Proof. now simp ok_clause. Qed. Lemma ok_clause_test' p cl : ok_clause p cl = false -> exists y, In y cl /\ p y = None. Proof. apply (ok_clause_elim (fun e xs call => call = false -> exists y, In y xs /\ e y = None) (fun _ _ e xs call => call = false -> exists y, In y xs /\ e y = None)). all:try solve [cbn; try discriminate; firstorder eauto]. Qed. Coq-Equations-1.3.1-8.20/test-suite/issues/issue338.v000066400000000000000000000005561463127417400220060ustar00rootroot00000000000000(* success *) From Coq Require Import ssreflect. From Equations Require Import Equations. Import EquationsNotations. Open Scope equations_scope. Goal (forall n, n + 0 = n). intros. now rewrite -!plus_n_O. Qed. (* rewrite fails with: Error: Syntax error: '*' or [tactic:ssrrwargs] or [oriented_rewriter] expected after 'rewrite' (in [tactic:simple_tactic]). *) Coq-Equations-1.3.1-8.20/test-suite/issues/issue346.v000066400000000000000000000013061463127417400217770ustar00rootroot00000000000000From Coq Require Import Program. From Equations Require Import Equations. Definition tyty := Type -> Type. Inductive X : tyty := | K : X nat. Derive NoConfusion for X. Derive Signature for X. Derive NoConfusionHom for X. Next Obligation. now dependent induction a; dependent induction b. Defined. Next Obligation. dependent induction a; dependent induction b; simpl. now compute; rewrite JMeq_eq_refl; destruct e. Defined. Next Obligation. dependent induction b; simpl. now compute; rewrite JMeq_eq_refl. Defined. Derive Subterm for X. Derive EqDec for X. Next Obligation. now destruct x; dependent induction y; left. Defined. Check (NoConfusionPackage_X : NoConfusionPackage (sigma (fun x => X x))). Coq-Equations-1.3.1-8.20/test-suite/issues/issue349.v000066400000000000000000000203571463127417400220110ustar00rootroot00000000000000Set Asymmetric Patterns. Set Implicit Arguments. From Coq Require Import Lia Lists.List. Import ListNotations. From Equations Require Import Equations. Open Scope bool_scope. Inductive label : Set := | arrow | product | top. Inductive index : Set := | one | two. Scheme Equality for index. Definition raw_tree_type : Set := list index -> option label. Notation defined x := (exists l, x = Some l). Notation undefined x := (~ defined x). Definition is_tree_type (T : raw_tree_type) : Prop := defined (T []) /\ (forall π σ : list index, defined (T (π ++ σ)) -> defined (T π)) /\ (forall π : list index, T π = Some product \/ T π = Some arrow -> defined (T (π ++ [one])) /\ defined (T (π ++ [two]))) /\ (forall π : list index, T π = Some top -> undefined (T (π ++ [one])) /\ undefined (T (π ++ [two]))). Definition has_domain (X Y : Type) (f : X -> option Y) (𝒟 : list X) : Prop := forall x : X, defined (f x) <-> List.In x 𝒟. Definition 𝒯 : Set := { T : raw_tree_type | is_tree_type T }. Program Definition 𝒯__f : Type := { '(T, dom) : 𝒯 * list (list index) | has_domain T dom }. Local Definition inspect (X Y : Type) (f : X -> Y) (x : X) : {y | f x = y} := exist _ (f x) eq_refl. Inductive finite_tree_grammar : Type := | ftg_top : finite_tree_grammar | ftg_product : finite_tree_grammar -> finite_tree_grammar -> finite_tree_grammar | ftg_arrow : finite_tree_grammar -> finite_tree_grammar -> finite_tree_grammar. Fixpoint is_prefix (A : Type) (A_eq_dec : forall x y : A, {x = y} + {x <> y}) (xs ys : list A) : bool := match xs, ys with | [], _ => true | x :: xs, y :: ys => proj1_sig (Sumbool.bool_of_sumbool (A_eq_dec x y)) && is_prefix A_eq_dec xs ys | _, _ => false end. Program Definition finite_subtree_domain' (𝒟 : list (list index)) (π : list index) : list (list index) := map (skipn (length π)) (filter (is_prefix index_eq_dec π) 𝒟). Program Definition finite_subtree_domain (T : 𝒯__f) : list index -> list (list index) := finite_subtree_domain' (snd T). Definition subtree (T : raw_tree_type) (π : list index) (σ : list index) : option label := T (π ++ σ). Lemma skipn_length_after_prepended_is_noop : forall (A : Type) (l1 l2 : list A), skipn (length l1) (l1 ++ l2) = l2. Proof. induction l1; intuition. Qed. Lemma is_prefix_correct : forall (A : Type) (A_eq_dec : forall x y : A, {x = y} + {x <> y}) (xs ys : list A), is_prefix A_eq_dec xs ys = true <-> exists zs : list A, xs ++ zs = ys. Proof. intros. split; intro. - generalize dependent ys; induction xs; intros. + exists ys. intuition. + destruct ys. * inversion H. * apply andb_prop in H as []. fold is_prefix in H0. destruct (A_eq_dec a a0). -- apply IHxs in H0 as []. subst. exists x. cbn. subst. reflexivity. -- discriminate. - inversion H. subst. induction xs. + reflexivity. + cbn. destruct (A_eq_dec a a); intuition. apply IHxs. eauto. Qed. Lemma is_prefix_correct' : forall (A : Type) (A_eq_dec : forall x y, {x = y} + {x <> y}) (xs ys : list A), is_prefix A_eq_dec xs ys = true <-> xs ++ skipn (length xs) ys = ys. Proof. intros. split; intro; generalize dependent ys; induction xs; intros; destruct ys; intuition; try discriminate. - apply andb_prop in H as []. fold is_prefix in H0. cbn. rewrite IHxs. + f_equal. destruct (A_eq_dec a a0). * intuition. * discriminate. + intuition. - cbn. destruct (A_eq_dec a a0); inversion H; subst. + subst. apply IHxs. rewrite H2. apply H2. + contradiction. Qed. Program Lemma finite_subtree_has_domain : forall (T : 𝒯__f) (π : list index), defined ((fst T) π) -> has_domain (subtree (fst T) π) (finite_subtree_domain T π). Proof. intros (((? & ?) & ?) & ?) ?. split; intro. - replace x0 with (skipn (length π) (π ++ x0)) by apply skipn_length_after_prepended_is_noop. apply in_map. apply filter_In. split. + apply y. destruct (x π) eqn:?; inversion H; subst; intuition. + apply is_prefix_correct. eauto. - apply in_map_iff in H0 as (? & ? & ?). cbn in *. destruct (x π) eqn:?. + apply y. apply filter_In in H1 as []. apply is_prefix_correct' in H2. rewrite <- H0. rewrite H2. intuition. + inversion H. inversion H2. Qed. Program Lemma subtree_is_tree_type : forall (T : 𝒯) (π : list index), defined (T π) -> is_tree_type (subtree T π). Proof. intros (? & ? & ? & ? & ?) ? ?. unfold is_tree_type, undefined, not in *. repeat split; cbn in *; intros; try (apply a in H0 as []; rewrite app_assoc in *; intuition); try (apply a0 in H0 as []; rewrite app_assoc in *; intuition). - rewrite app_nil_r. intuition. - rewrite app_assoc in H0. intuition auto with *. Qed. Definition safe_finite_subtree (T : 𝒯__f) (π : list index) (H : defined ((proj1_sig (fst (proj1_sig T))) π)) : 𝒯__f := exist _ (pair (exist _ (subtree (proj1_sig (fst (proj1_sig T))) π) (subtree_is_tree_type (fst (proj1_sig T)) π H)) (finite_subtree_domain T π)) (finite_subtree_has_domain T π H). Lemma filter_length_le : forall (X : Type) (l : list X) (f : X -> bool), length (filter f l) <= length l. Proof. intros. induction l; intuition. cbn. destruct (f a) eqn:?; intuition. cbn. lia. Qed. Lemma filter_length_lt' : forall (X : Type) (f : X -> bool) (l : list X), (exists x : X, In x l /\ f x = false) -> length (filter f l) < length l. Proof. intros ? ? ? (? & ? & ?). induction l; intros. - inversion H. - cbn in *. destruct (f a) eqn:?. + inversion H. * subst. rewrite Heqb in H0. inversion H0. * cbn in *. apply PeanoNat.Nat.succ_lt_mono. intuition auto with *. + pose proof @filter_length_le _ l f. cbn. lia. Qed. Program Lemma finite_subtree_domain_length_lt : forall (T : 𝒯__f) (π : list index), π <> nil -> length (finite_subtree_domain T π) < length (snd T). Proof. unfold finite_subtree_domain, finite_subtree_domain'. intros (((? & ? & ? & ? & ?) & ?) & ?) ? ?. rewrite map_length. apply filter_length_lt'. induction π. - intuition. - exists []. split. + apply y. intuition. + intuition. Qed. Axiom todo : forall {A}, A. #[program] Equations reify_finite_tree (T : 𝒯__f) : finite_tree_grammar by wf (length (snd (proj1_sig T))) lt := reify_finite_tree T with inspect (fun T => (fst T) []) T := { | exist _ (Some top) _ => ftg_top; | exist _ (Some product) H__eq => ftg_product (reify_finite_tree (safe_finite_subtree T [one] _)) (reify_finite_tree (safe_finite_subtree T [two] _)); | exist _ (Some arrow) H__eq => ftg_arrow (reify_finite_tree (safe_finite_subtree T [one] _)) (reify_finite_tree (safe_finite_subtree T [two] _)); | exist _ None _ => ftg_top }. Obligation 1. clear reify_finite_tree. destruct T as (((? & ? & ? & ? & ?) & ?) & ?). assert (x [] = Some product \/ x [] = Some arrow) by intuition. apply a in H as []. apply H. Qed. Next Obligation. clear reify_finite_tree. apply finite_subtree_domain_length_lt. intro. inversion H. Qed. Next Obligation. clear reify_finite_tree. destruct T as (((? & ? & ? & ? & ?) & ?) & ?). assert (x [] = Some product \/ x [] = Some arrow) by intuition. apply a in H as []. apply H0. Qed. Next Obligation. clear reify_finite_tree. apply finite_subtree_domain_length_lt. intro. inversion H. Qed. Next Obligation. clear reify_finite_tree. destruct T as (((? & ? & ? & ? & ?) & ?) & ?). assert (x [] = Some product \/ x [] = Some arrow) by intuition. apply a in H as []. apply H. Qed. Next Obligation. clear reify_finite_tree. apply finite_subtree_domain_length_lt. intro. inversion H. Qed. Next Obligation. clear reify_finite_tree. destruct T as (((? & ? & ? & ? & ?) & ?) & ?). assert (x [] = Some product \/ x [] = Some arrow) by intuition. apply a in H as []. apply H0. Qed. Next Obligation. clear reify_finite_tree. apply finite_subtree_domain_length_lt. intro. inversion H. Qed. Lemma reify_graph_elim (T : 𝒯__f) : reify_finite_tree T = reify_finite_tree T. Proof. apply_funelim (reify_finite_tree T). Abort. Coq-Equations-1.3.1-8.20/test-suite/issues/issue353.v000066400000000000000000000016741463127417400220050ustar00rootroot00000000000000Require Import Coq.Arith.Arith. From Equations Require Import Equations. Fixpoint Ack (n m : nat) : nat := match n with | O => S m | S p => let fix Ackn (m : nat) := match m with | O => Ack p 1 | S q => Ack p (Ackn q) end in Ackn m end. Definition lex_nat (ab1 ab2 : nat * nat) : Prop := match ab1, ab2 with | (a1, b1), (a2, b2) => (a1 < a2) \/ ((a1 = a2) /\ (b1 < b2)) end. #[local] Hint Unfold lex_nat : rec_decision. Lemma lex_nat_wf : well_founded lex_nat. Admitted. #[export] Instance Lex_nat_wf : WellFounded lex_nat. apply lex_nat_wf. Defined. (** Does not generate the induction principle *) Module Alt. Equations Ack (p : nat * nat) : nat by wf p lex_nat := Ack (0, n) := S n ; Ack (S m, 0) := Ack (m, 1); Ack (S m, S n) := Ack (m, Ack (S m, n)). End Alt. Module Alt2. Equations Ack2 (p : nat * nat) : nat by wf p lex_nat := Ack2 (0, n) := S n ; Ack2 (S m, 0) := Ack2 (m, 1); Ack2 (S m, S n) := Ack2 (m, Ack2 (S m, n)). End Alt2. (* OK *) Coq-Equations-1.3.1-8.20/test-suite/issues/issue354.v000066400000000000000000000006231463127417400217770ustar00rootroot00000000000000(* From Coq Require Import Logic.StrictProp. *) From Equations Require Import Equations. Inductive Ssig {A : Type} (P : A -> SProp) := | Sexists (a : A) (b : P a) : Ssig P. Set Warnings "+bad-relevance". Equations Spr1 {A : Type} {P : A -> SProp} (s : Ssig P) : A := Spr1 (Sexists _ a b) := a. Equations Spr2 {A : Type} {P : A -> SProp} (s : Ssig P) : P (Spr1 s) := Spr2 (Sexists _ a b) := b. Coq-Equations-1.3.1-8.20/test-suite/issues/issue371.v000066400000000000000000000020541463127417400217760ustar00rootroot00000000000000From Coq Require Import Lia NArith.NArith Program.Basics. From Equations Require Export Equations. Obligation Tactic := idtac. Section Context. Import N. Local Open Scope N_scope. Context (f : N -> N) (mono : forall (a b : N) (l : a < b), f a < f b). Equations f_bogus (n : N) : positive := f_bogus n := let m := f (succ n) - f n in match m with | N0 => _ | Npos p => p end. Next Obligation. intros n m. apply xH. Qed. Equations f_inspect_fail (n : N) : positive := f_inspect_fail n := let m := f (succ n) - f n in match exist _ m (eq_refl m) with | exist _ N0 e => _ | exist _ (Npos p) e => p end. Next Obligation. intros n m _ _. Abort. Equations f_inspect (n : N) : positive := f_inspect n := let m := f (succ n) - f n in match exist _ m (eq_refl m) with | exist _ N0 e => @const _ (m = N0) _ e | exist _ (Npos p) e => @const _ (m = Npos p) p e end. Next Obligation. Abort. Next Obligation of f_inspect_obligations. intros n m _ _ e. pose proof mono n (succ n) as l. lia. Qed. End Context.Coq-Equations-1.3.1-8.20/test-suite/issues/issue372.v000066400000000000000000000015631463127417400220030ustar00rootroot00000000000000From Coq Require Import Lia NArith.NArith Program.Basics ssr.ssreflect. From Equations Require Export Equations. #[local] Obligation Tactic := idtac. Section Context. Import N. Local Open Scope N_scope. Context (f : N -> N) (mono : forall (a b : N) (l : a < b), f a < f b). Equations f_bogus (n : N) : positive := f_bogus n := let m := f (succ n) - f n in match m with | N0 => _ | Npos p => p end. Next Obligation. intros n m. apply xH. Qed. Equations f_inspect (n : N) : positive := f_inspect n := let m := f (succ n) - f n in match exist _ m (eq_refl m) with | exist _ N0 e => @const _ (m = N0) _ e | exist _ (Npos p) e => @const _ (m = Npos p) p e end. Next Obligation. intros n m _ _ e. pose proof mono n (succ n) as l. lia. Qed. Lemma eq_f (n : N) : f_bogus n = f_inspect n. Proof. Fail funelim f_bogus. Abort. End Context. Coq-Equations-1.3.1-8.20/test-suite/issues/issue389.v000066400000000000000000000002421463127417400220040ustar00rootroot00000000000000Require Import Equations.HoTT.All. Set Universe Polymorphism. Inductive A : Type := | foo : A | bar : A -> A -> A. Derive NoConfusion for A. Derive EqDec for A. Coq-Equations-1.3.1-8.20/test-suite/issues/issue390.v000066400000000000000000000006441463127417400220020ustar00rootroot00000000000000From Coq Require Import Lia NArith.NArith Program.Tactics. From Equations Require Export Equations. Ltac hidebody H ::= idtac. Import Pos N. Print Pos.compare. Open Scope N_scope. Instance lt_well_founded : WellFounded lt := Acc_intro_generator 100 lt_wf_0. (* Ltac Init.hidebody H ::= idtac. *) Equations? f (a : N) : N by wf a lt := f N0 := 0; f (pos n) := succ (f (pred_N n)). Proof. lia. Qed. Compute f 42. Coq-Equations-1.3.1-8.20/test-suite/issues/issue393.v000066400000000000000000000015601463127417400220030ustar00rootroot00000000000000From Equations Require Import Equations. Require Import List. Inductive In {X} (x : X) : list X -> Type := | here {xs} : In x (x :: xs) | there {y xs} : In x xs -> In x (y :: xs). Derive Signature NoConfusion for In. Notation " x ∈ xs " := (In x xs) (at level 70, xs at level 10). Arguments here {X x xs}. Arguments there {X x y xs} _. Check (there here) : 26 ∈ (43 :: 26 :: 76 :: nil). (* Does coq-equations mistake variable names sometimes? *) Set Equations Debug. Equations repl {A} (R : list A) prev (idx : prev ∈ R) (next : A) : list A := repl (X :: XS) X here next := next :: XS; repl (X :: XS) prev (there idx') next := X :: (repl XS prev idx' next). Equations repl' {A} (R : list A) {prev} (idx : prev ∈ R) (next : A) : list A := repl' (X :: XS) here next := next :: XS; repl' (X :: XS) (there idx') next := X :: (repl' XS idx' next). Coq-Equations-1.3.1-8.20/test-suite/issues/issue399.v000066400000000000000000000004501463127417400220060ustar00rootroot00000000000000From Equations Require Import Equations. From Coq Require Import Bool. Inductive depInd : Type -> Type := | A : depInd bool | B : depInd unit. Derive Signature for depInd. Equations bar {a} (d : depInd (id a)) : a -> bool := | A => fun x => eqb x true | B => fun x => true. About bar_elim. Coq-Equations-1.3.1-8.20/test-suite/issues/issue43.v000066400000000000000000000010141463127417400217050ustar00rootroot00000000000000Require Import Program Bvector List. Require Import Relations. From Equations Require Import Equations DepElimDec. Equations silly {A} (l : list A) : list A := silly l by wf ((fix Ffix (x : list A) : nat := match x with | []%list => 0 | (_ :: x1)%list => S (Ffix x1) end) l) lt := silly nil := nil; silly (cons a l) := a :: silly l. Equations silly' {A} (l : list A) : list A := silly' l by wf (length l) lt := silly' nil := nil; silly' (cons a l) := a :: silly' l. Check silly'_elim. Coq-Equations-1.3.1-8.20/test-suite/issues/issue499.v000066400000000000000000000002311463127417400220040ustar00rootroot00000000000000From Equations Require Import Equations. Equations bla (n : nat) : nat -> nat by wf n lt := bla 0 := fun m => 0; bla (S n) := fun m => S (bla n m). Coq-Equations-1.3.1-8.20/test-suite/issues/issue500.v000066400000000000000000000006701463127417400217720ustar00rootroot00000000000000From Equations Require Import Equations. Section box. Universe u. Context {A : Type@{u}} `(EA : EqDec A). Inductive box : Type@{u} := | pack : A -> box. Derive NoConfusion for box. Derive EqDec for box. Inspect 5. End box. Definition test := @box_eqdec nat _. Section LIST. Variable (A:Type) (eq_dec : EqDec A). Inductive list : Type := | nil | cons : A -> list -> list. Derive NoConfusion EqDec for list. End LIST. Coq-Equations-1.3.1-8.20/test-suite/issues/issue61.v000066400000000000000000000004651463127417400217160ustar00rootroot00000000000000From Equations Require Import Equations. Fail Equations bug (x : nat) : nat := bug O := O; bug (S n) <= 2 => { | x <= 3 => { | x => x } }. Equations bug (x : nat) : nat := bug O := O; bug (S n) <= 2 => { | y <= 3 => { | x => x } }. Definition check := eq_refl : bug 1 = 3.Coq-Equations-1.3.1-8.20/test-suite/issues/issue63.v000066400000000000000000000054431463127417400217210ustar00rootroot00000000000000Require Import Coq.Vectors.Vector. Require Import Coq.PArith.PArith. Require Import Lia. Require Import Equations.Prop.Equations. Set Equations With UIP. Generalizable All Variables. Definition obj_idx : Type := positive. Definition arr_idx (n : nat) : Type := Fin.t n. Import VectorNotations. Definition obj_pair := (obj_idx * obj_idx)%type. Inductive Term {a} (tys : Vector.t (obj_idx * obj_idx) a) : obj_idx -> obj_idx -> Type := | Ident : forall dom, Term tys dom dom | Morph (f : arr_idx a) : Term tys (fst (tys[@f])) (snd (tys[@f])) | Comp (dom mid cod : obj_idx) (f : Term tys mid cod) (g : Term tys dom mid) : Term tys dom cod. Arguments Ident {a tys dom}. Arguments Morph {a tys} f. Arguments Comp {a tys dom mid cod} f g. Import Sigma_Notations. Require Import Wellfounded Relations. Derive NoConfusion for positive. Derive EqDec for positive. Derive Signature NoConfusion Subterm for Term. Fixpoint term_size {a : nat} {tys : Vector.t obj_pair a} {dom cod} (t : @Term a tys dom cod) : nat := match t with | Ident => 1%nat | Morph _ => 1%nat | Comp f g => 1%nat + term_size f + term_size g end. Set Program Mode. Show Obligation Tactic. Equations? comp_assoc_simpl_rec {a : nat} {tys dom cod} (t : @Term a tys dom cod) : {t' : @Term a tys dom cod | term_size t' <= term_size t} by wf (term_size t) lt := comp_assoc_simpl_rec (Comp f g) with comp_assoc_simpl_rec f => { | exist _ (Comp i j) Hle => Comp i (comp_assoc_simpl_rec (Comp j g)); | x => Comp x (comp_assoc_simpl_rec g) }; comp_assoc_simpl_rec x := x. Proof. 1-2,4:lia. all:(simpl; try Program.Tactics.destruct_call comp_assoc_simpl_rec; simpl in *; try lia). Time Defined. Definition comp_assoc_simpl {a} {tys : Vector.t obj_pair a} {dom cod} (t : Term tys dom cod) : Term tys dom cod := comp_assoc_simpl_rec t. Lemma comp_assoc_simpl_ident {a} {tys : Vector.t obj_pair a} {dom cod} (g : Term tys dom cod) : comp_assoc_simpl (Comp Ident g) = Comp Ident (comp_assoc_simpl g). Proof. unfold comp_assoc_simpl. Opaque comp_assoc_simpl_rec. autorewrite with comp_assoc_simpl_rec. simpl. reflexivity. Qed. Unset Program Mode. Open Scope program_scope. Lemma comp_assoc_simpl_comp {a} {tys : Vector.t obj_pair a} {dom mid cod} (f : Term tys mid cod) (g : Term tys dom mid) : comp_assoc_simpl (Comp f g) = match comp_assoc_simpl f in Term _ mid cod return Term tys dom mid -> Term tys dom cod with | Comp f f' => fun g => Comp f (comp_assoc_simpl (Comp f' g)) | x => fun g => Comp x (comp_assoc_simpl g) end g. Proof. unfold comp_assoc_simpl. simp comp_assoc_simpl_rec. revert dom g. Tactics.reverse. let felim := constr:(fun_elim (f := @comp_assoc_simpl_rec)) in unshelve refine_ho (felim _ _ _ _ _ _); simpl; intros; try reflexivity. Qed. Coq-Equations-1.3.1-8.20/test-suite/issues/issue66.v000066400000000000000000000004261463127417400217200ustar00rootroot00000000000000Require Equations.Prop.Equations. Definition f (l l': list nat) (H: 0 = 0): Prop. Admitted. Axiom cheat : forall {A}, A. Equations? id (l: list nat) (H: f l l _): { r: list nat | f l r _ } := id l H := exist _ nil _. Proof. apply cheat. Defined. Definition check := id_elim.Coq-Equations-1.3.1-8.20/test-suite/issues/issue7.v000066400000000000000000000103571463127417400216370ustar00rootroot00000000000000Set Warnings "-notation-overridden". Require Import ssreflect. Require Import Utf8 Program. Require Import Equations.Prop.Equations. Inductive TupleT : nat -> Type := | nilT : TupleT 0 | consT {n} A : (A -> TupleT n) -> TupleT (S n). Derive Signature NoConfusion NoConfusionHom for TupleT. Inductive Tuple : forall n, TupleT n -> Type := nil : Tuple _ nilT | cons {n A} (x : A) (F : A -> TupleT n) : Tuple _ (F x) -> Tuple _ (consT A F). Derive Signature NoConfusionHom for Tuple. Inductive TupleMap@{i j} : forall n, TupleT n -> TupleT n -> Type@{j} := tmNil : TupleMap _ nilT nilT | tmCons {n} {A B : Type@{i}} (F : A -> TupleT n) (G : B -> TupleT n) : (forall x, sigT (TupleMap _ (F x) ∘ G)) -> TupleMap _ (consT A F) (consT B G). Derive Signature NoConfusion for TupleMap. Derive NoConfusionHom for TupleMap. Equations TupleMap_noconf {n : nat} {x y : TupleT n} (f g : TupleMap n x y) : Prop := TupleMap_noconf tmNil tmNil := True; TupleMap_noconf (tmCons _ _ fn) (tmCons F G fn') := fn = fn'. Unset Printing Primitive Projection Parameters. #[export] Program Instance TupleMap_depelim n T U : DependentEliminationPackage (TupleMap n T U) := { elim_type := ∀ (P : forall n t t0, TupleMap n t t0 -> Type), P _ _ _ tmNil -> (∀ n A B (F : A -> TupleT n) (G : B -> TupleT n) (s : ∀ x, sigT (TupleMap _ (F x) ∘ G)) (r : ∀ x, P _ _ _ (projT2 (s x))), P _ _ _ (tmCons F G s)) -> ∀ (tm : TupleMap n T U), P _ _ _ tm }. Next Obligation. revert n T U tm. fix IH 4. intros. destruct tm; [ | apply X0 ]; auto. Defined. (* Doesn't know how to deal with the nested TupleMap *) (* Derive Subterm for TupleMap. *) Inductive TupleMap_direct_subterm : ∀ (n : nat) (H H0 : TupleT n) (n0 : nat) (H1 H2 : TupleT n0), TupleMap n H H0 → TupleMap n0 H1 H2 → Prop := | TupleMap_direct_subterm_1_1 : ∀ n {A B} (F : A -> TupleT n) (G : B -> TupleT n) (H : forall x, sigT (TupleMap _ (F x) ∘ G)) (x : A), TupleMap_direct_subterm _ _ (G (projT1 (H _))) _ _ _ (projT2 (H x)) (tmCons _ _ H). Derive Signature for TupleMap_direct_subterm. #[local] Hint Constructors TupleMap_direct_subterm : subterm_relation. Import Sigma_Notations. Definition TupleMap_subterm := Relation_Operators.clos_trans _ (λ x y : Σ index : Σ (n : nat) (_ : TupleT n), TupleT n, TupleMap (pr1 index) (pr1 (pr2 index)) (pr2 (pr2 index)), TupleMap_direct_subterm _ _ _ _ _ _ (pr2 x) (pr2 y)). #[local] Hint Unfold TupleMap_subterm : subterm_relation. #[local] Program Instance WellFounded_TupleMap_subterm : WellFounded TupleMap_subterm. (* Solve All Obligations with solve_subterm. *) Ltac wf_subterm := intro; simp_sigmas; on_last_hyp depind; split; intros; simp_sigmas; on_last_hyp ltac:(fun H => red in H); [ exfalso | ]; on_last_hyp depind; intuition. Next Obligation. unfold TupleMap_subterm. apply Transitive_Closure.wf_clos_trans. red. intros ((n&H1&H2)&map). simpl in map. match goal with |- context [ Acc ?R _ ] => set(rel:=R) end. move rel at top. set (foo := elim (A:= TupleMap n H1 H2)). simpl in foo. induction map using foo. + constructor. intros ((n'&H1'&H2')&map'). simpl in *. move=> H. red in H. simpl in H. depelim H. + constructor. intros ((n'&H1'&H2')&map'). simpl in *. unfold rel; cbn. intros H; depelim H. apply r. Defined. Derive NoConfusion for Tuple. #[local] Hint Extern 100 => progress simpl : rec_decision. Time Equations myComp {n} {B C : TupleT n} (tm1 : TupleMap _ B C) {A : TupleT n} (tm2 : TupleMap _ A B) : TupleMap _ A C := myComp tmNil tmNil := tmNil; myComp (tmCons _ H g) (tmCons F G f) := tmCons _ _ (fun x => existT (fun y => TupleMap _ _ (_ y)) (projT1 (g (projT1 (f x)))) (myComp (projT2 (g (projT1 (f x)))) (projT2 (f x)))). Time Equations myComp_wf {n} {B C : TupleT n} (tm1 : TupleMap _ B C) {A : TupleT n} (tm2 : TupleMap _ A B) : TupleMap _ A C by wf (signature_pack tm1) TupleMap_subterm := myComp_wf tmNil tmNil := tmNil; myComp_wf (n:=_) (tmCons (B:=C) _ H g) (tmCons (n:=n) (A:=A) (B:=B) F G f) := tmCons _ _ (fun x => existT (fun y => TupleMap _ _ (_ y)) (projT1 (g (projT1 (f x)))) (myComp_wf (projT2 (g (projT1 (f x)))) (projT2 (f x)))). Print Assumptions myComp. (* Print Assumptions myComp_wf. *)Coq-Equations-1.3.1-8.20/test-suite/issues/issue70.v000066400000000000000000000036251463127417400217170ustar00rootroot00000000000000From Coq Require Import Fin. From Equations Require Import Equations. Set Equations Transparent. Derive Signature NoConfusion NoConfusionHom for t. Equations FL (n : nat) : Fin.t (S n) := FL 0 := F1; FL (S n) := FS (FL n). Equations FU {n : nat} (x : Fin.t n) : Fin.t (S n) := FU F1 := F1; FU (FS x) := FS (FU x). Equations invertFin {n : nat} (x : Fin.t n) : Fin.t n := invertFin F1 := FL _; invertFin (FS x) := FU (invertFin x). Equations invFULemma {n : nat} (x : Fin.t n) : invertFin (FU x) = FS (invertFin x) := invFULemma F1 := _; invFULemma (FS x) := (f_equal _ (invFULemma x)). Equations invFLLemma (n : nat) : invertFin (FL n) = F1 := invFLLemma 0 := eq_refl; invFLLemma (S n) := (f_equal _ (invFLLemma n)). #[local] Hint Rewrite @invFULemma invFLLemma : invertFin. Lemma invertFinInv' {n : nat} (x : Fin.t n) : invertFin (invertFin x) = x. Proof. funelim (invertFin x); simp invertFin; congruence. Qed. Equations invertFinInv {n : nat} (x : Fin.t n) : invertFin (invertFin x) = x := invertFinInv (n:=(S _)) F1 := (invFLLemma _); invertFinInv (n:=(S _)) (FS y) := (eq_trans (invFULemma (invertFin y)) (f_equal _ (invertFinInv y))). Definition invFinViewType {n : nat} (x : (Fin.t n)) : Type := { y : Fin.t n & x = invertFin y }. Definition invFinView {n : nat} (x : (Fin.t n)) : invFinViewType x := existT _ (invertFin x) (eq_sym (invertFinInv x)). Equations finFUOrFL {n : nat} (x : Fin.t (S n)) : { y : Fin.t n & x = FU y } + ( x = FL n ) := finFUOrFL (n:=0) F1 := (inr eq_refl); finFUOrFL (n:=(S _)) x with invFinView x => { | (existT _ F1 eq) := (inr eq); | (existT _ (FS _) eq) := (inl (existT _ (invertFin _) eq))}. Coq-Equations-1.3.1-8.20/test-suite/issues/issue71.v000066400000000000000000000006021463127417400217100ustar00rootroot00000000000000Require Equations.Equations. Axiom unsupported: False. Axiom ignore_termination: nat. Ltac t := intros; match goal with | |- (?T < ?T)%nat => unify T ignore_termination; apply False_ind; exact unsupported end. Obligation Tactic := t. Equations content (n: nat): nat := content n by wf ignore_termination lt := content n := content n. Definition check := content_elim. Coq-Equations-1.3.1-8.20/test-suite/issues/issue73.v000066400000000000000000000006111463127417400217120ustar00rootroot00000000000000From Coq.Lists Require Import List. From Equations Require Import Equations. Equations zip {A} {B} (l1 : list A) (l2 : list B) : list (A*B) := zip (cons h1 t1) (cons h2 t2) := (h1,h2) :: zip t1 t2; zip _ _ := nil. Equations zip2 {A} {B} (l1 : list A) (l2 : list B) : list (A*B) by struct l2 := zip2 (cons h1 t1) (cons h2 t2) := (h1,h2) :: zip2 t1 t2; zip2 _ _ := nil. Print zip. Print zip2.Coq-Equations-1.3.1-8.20/test-suite/issues/issue74.v000066400000000000000000000016311463127417400217160ustar00rootroot00000000000000From Coq.Lists Require Import List. From Equations Require Import Equations. Import ListNotations. Inductive foo: Set := | Foo1 : list foo -> foo | Foo2 : list foo -> foo. Equations f (x: foo) : nat := { f (Foo1 l) := aux1 l; f (Foo2 l) := aux2 l } where aux1 (l : list foo) : nat := { aux1 [] := 1; aux1 (cons hd tl) := f hd + aux1 tl + aux2 tl } where aux2 (l : list foo) : nat := { aux2 [] := 1; aux2 (cons hd tl) := f hd + aux1 tl + aux2 tl }. Definition check := f_elim. Module Three. Equations f (x: foo) : nat := { f (Foo1 l):= aux1 l; f (Foo2 l) := aux2 l } where aux1 (l : list foo) : nat := { aux1 [] := 1; aux1 (cons hd tl) := f hd + aux1 tl + aux2 tl } where aux2 (l : list foo) : nat := { aux2 [] := 1; aux2 (cons hd tl) := f hd + aux2 tl } where aux3 (l : list foo) : nat := { aux3 [] := 1; aux3 (cons hd tl) := f hd + aux3 tl }. Definition check := f_elim. End Three. Coq-Equations-1.3.1-8.20/test-suite/issues/issue75.v000066400000000000000000000017141463127417400217210ustar00rootroot00000000000000Require Import Equations.Prop.Equations. Set Program Mode. Definition ifthenelse b A (e1: true = b -> A) (e2: false = b -> A): A. Admitted. Definition List (T: Type): Type. Admitted. Definition isCons (T: Type) (src: List T): bool. Admitted. Definition Cons_type (T: Type): Type := {src: List T | (isCons T src = true)}. Definition head (T: Type) (src: Cons_type T): T. Admitted. Definition tail (T: Type) (src: Cons_type T): List T. Admitted. Definition f_type (T R: Type) (l: List T): Type := R -> (T -> R). Definition foldLeft_type (T R: Type) (l: List T): Type := R. Axiom cheat : forall {A}, A. Equations? foldLeft (T R: Type) (l: List T) (z: R) (f: f_type T R l): foldLeft_type T R l by wf 0 lt := foldLeft T R l z f := ifthenelse (isCons _ l) R (fun _ => foldLeft T R (tail T l) (f z (head T l)) f) (fun _ => z). Proof. apply cheat. Timeout 10 Defined. Definition check := foldLeft_unfold_eq. Definition check' := foldLeft_elim.Coq-Equations-1.3.1-8.20/test-suite/issues/issue77.v000066400000000000000000000013441463127417400217220ustar00rootroot00000000000000From Equations Require Import Equations. Require Vector. Notation vector := Vector.t. Arguments Vector.nil {A}. Arguments Vector.cons {A} _ {n}. Derive Signature for Vector.t. Fail Equations test {X n} (v : vector X (S n)) : vector X 0 := { test Vector.cons := Vector.nil }. Fail Equations test1 {X n} (v : vector X (S n)) : vector X n := { test1 (Vector.cons a) := a }. Equations test2 {X n} (v : vector X (S n)) : vector X n := { test2 (Vector.cons x a) := a }. Fail Equations test3 {X n} (v : vector X (S n)) : vector X n := { test3 (Vector.cons X n a) := a }. Equations test4 {X n} (v : vector X (S n)) : vector X n := { test4 (@Vector.cons _ x n a) := a }.Coq-Equations-1.3.1-8.20/test-suite/issues/issue79.v000066400000000000000000000020041463127417400217160ustar00rootroot00000000000000From Coq.Lists Require Import List. From Equations Require Import Equations. (* This type is from VST: https://github.com/PrincetonUniversity/VST/blob/v2.1/floyd/compact_prod_sum.v#L6 *) Fixpoint compact_prod (T: list Type): Type := match T with | nil => unit | t :: nil => t | t :: T0 => (t * compact_prod T0)%type end. (* The rest is a nonsensical, just to give a minimalistic reproducible example *) Inductive foo := | Nat : foo | List : foo. Definition foo_type (f:foo) : Type := match f with | Nat => nat | List => list unit end. Equations num (f:foo) (val:foo_type f) : nat := num Nat val := val; num List val := length val. Equations sum (fs : list foo) (val: compact_prod (map foo_type fs)) : nat by struct fs := { sum nil _ := 0; sum (cons f tl) val := sum_aux f tl val } where sum_aux (f:foo) (fs : list foo) (val: compact_prod (map foo_type (f::fs))) : nat by struct fs := { sum_aux f nil val := num f val; sum_aux f (cons hd tl) val := num f (fst val) + sum_aux hd tl (snd val)}. Coq-Equations-1.3.1-8.20/test-suite/issues/issue7_extr.v000066400000000000000000000005031463127417400226710ustar00rootroot00000000000000Require Import Equations.Equations Utf8. Add Rec LoadPath ".." as Top. Require Import issue7. Extraction Inline apply_noConfusion. Extraction Inline simplify_ind_pack. Extraction Inline simplify_ind_pack_inv. Extraction Inline eq_simplification_sigma1_dep_dep. Extraction Inline Equations.EqDec.K_dec. Extraction myComp.Coq-Equations-1.3.1-8.20/test-suite/issues/issue8.v000066400000000000000000000066201463127417400216360ustar00rootroot00000000000000Require Import Utf8 Program. Require Import Equations.Prop.Equations. Open Scope equations_scope. Set Warnings "-notation-overridden". Notation " '{' x : A & y } " := (@sigma A (fun x : A => y)%type) : type_scope. Inductive TupleT : nat -> Type := nilT : TupleT 0 | consT {n} A : (A -> TupleT n) -> TupleT (S n). Derive Signature NoConfusion NoConfusionHom for TupleT. Inductive Tuple : forall n, TupleT n -> Type := nil : Tuple _ nilT | cons {n A} (x : A) (F : A -> TupleT n) : Tuple _ (F x) -> Tuple _ (consT A F). Derive Signature NoConfusion NoConfusionHom for Tuple. Inductive TupleMap@{i j} : forall n, TupleT n -> TupleT n -> Type@{j} := tmNil : TupleMap _ nilT nilT | tmCons {n} {A B : Type@{i}} (F : A -> TupleT n) (G : B -> TupleT n) : (forall x, sigT (TupleMap _ (F x) ∘ G)) -> TupleMap _ (consT A F) (consT B G). Derive Signature for TupleMap. #[export] Program Instance TupleMap_depelim n T U : DependentEliminationPackage (TupleMap n T U) := { elim_type := ∀ (P : forall n t t0, TupleMap n t t0 -> Type), P _ _ _ tmNil -> (∀ n A B (F : A -> TupleT n) (G : B -> TupleT n) (s : ∀ x, sigT (TupleMap _ (F x) ∘ G)) (r : ∀ x, P _ _ _ (projT2 (s x))), P _ _ _ (tmCons F G s)) -> ∀ (tm : TupleMap n T U), P _ _ _ tm }. Next Obligation. revert n T U tm. fix IH 4. intros. Ltac destruct' x := destruct x. on_last_hyp destruct'; [ | apply X0 ]; auto. Defined. (* Doesn't know how to deal with the nested TupleMap Derive Subterm for TupleMap. *) Inductive TupleMap_direct_subterm : ∀ (n : nat) (H H0 : TupleT n) (n0 : nat) (H1 H2 : TupleT n0), TupleMap n H H0 → TupleMap n0 H1 H2 → Prop := TupleMap_direct_subterm_0_0 : ∀ (n : nat) (H H0 : TupleT n) (n0 : nat) (H1 H2 : TupleT n0) (n1 : nat) (H3 H4 : TupleT n1) (x : TupleMap n H H0) (y : TupleMap n0 H1 H2) (z : TupleMap n1 H3 H4), TupleMap_direct_subterm n H H0 n0 H1 H2 x y → TupleMap_direct_subterm n0 H1 H2 n1 H3 H4 y z → TupleMap_direct_subterm n H H0 n1 H3 H4 x z | TupleMap_direct_subterm_1_1 : ∀ n {A B} (F : A -> TupleT n) (G : B -> TupleT n) (H : forall x, sigT (TupleMap _ (F x) ∘ G)) (x : A), TupleMap_direct_subterm _ _ (G (projT1 (H _))) _ _ _ (projT2 (H x)) (tmCons _ _ H). Definition TupleMap_subterm := λ x y : {index : {n : nat & sigma (λ _ : TupleT n, TupleT n)} & TupleMap (pr1 index) (pr1 (pr2 index)) (pr2 (pr2 index))}, TupleMap_direct_subterm (pr1 (pr1 x)) (pr1 (pr2 (pr1 x))) (pr2 (pr2 (pr1 x))) (pr1 (pr1 y)) (pr1 (pr2 (pr1 y))) (pr2 (pr2 (pr1 y))) (pr2 x) (pr2 y). (* Program Instance WellFounded_TupleMap_subterm : WellFounded TupleMap_subterm. *) (* Solve All Obligations with wf_subterm. *) Equations myComp {n} {B C : TupleT n} (tm1 : TupleMap _ B C) {A : TupleT n} (tm2 : TupleMap _ A B) : TupleMap _ A C := myComp tmNil tmNil := tmNil; myComp (tmCons ?(G) H g) (tmCons F G f) := tmCons _ _ (fun x => existT (fun y => TupleMap _ _ (_ y)) (projT1 (g (projT1 (f x)))) (myComp (projT2 (g (projT1 (f x)))) (projT2 (f x)))).Coq-Equations-1.3.1-8.20/test-suite/issues/issue81.v000066400000000000000000000004001463127417400217050ustar00rootroot00000000000000Require Import Coq.Lists.List. Import ListNotations. From Equations Require Import Equations. Equations app {A} (l l' : list A) : list A := app [] l' := l' ; (* <- here notations work *) app (a :: l) l' := a :: (app l l').Coq-Equations-1.3.1-8.20/test-suite/issues/issue82.v000066400000000000000000000002351463127417400217140ustar00rootroot00000000000000From Coq Require Import List. From Equations Require Import Equations. Equations app {A} : list A -> list A -> list A := app xs ys := fold_right cons ys xs.Coq-Equations-1.3.1-8.20/test-suite/issues/issue83.v000066400000000000000000000004761463127417400217240ustar00rootroot00000000000000From Equations Require Import Equations. Section CookingKillsOpaquenessForReduction. Equations id {A} (x : A) : A := id x := x. (* id is basically transparent but considered opaque for reduction *) End CookingKillsOpaquenessForReduction. Definition foo : @id = fun A x => x. Proof. Fail unfold id. reflexivity. Qed. Coq-Equations-1.3.1-8.20/test-suite/issues/issue84.v000066400000000000000000000017761463127417400217310ustar00rootroot00000000000000From Coq.Lists Require Import List. From Equations Require Import Equations. (* This type is from VST: https://github.com/PrincetonUniversity/VST/blob/v2.1/floyd/compact_prod_sum.v#L6 *) Fixpoint compact_prod (T: list Type): Type := match T with | nil => unit | t :: nil => t | t :: T0 => (t * compact_prod T0)%type end. (* The rest is a nonsensical, just to give a minimalistic reproducible example *) Inductive foo := | Nat : foo | List : list foo -> foo. Fixpoint foo_type (f:foo) : Type := match f with | Nat => nat | List fs => compact_prod (map foo_type fs) end. Equations num (f:foo) : forall (val:foo_type f), nat := { num Nat := fun val => val; num (List nil) := fun val => 0; num (List (cons hd tl)) := fun val => sum hd (num hd) tl val } where sum (f:foo) (numf: (foo_type f -> nat)) (fs : list foo) (val: compact_prod (map foo_type (f::fs))) : nat by struct fs := { sum f numf nil val := numf val; sum f numf (cons hd tl) val := numf (fst val) + sum hd (num hd) tl (snd val)}.Coq-Equations-1.3.1-8.20/test-suite/issues/issue85.v000066400000000000000000000020151463127417400217150ustar00rootroot00000000000000From Coq.Lists Require Import List. From Equations Require Import Equations. (* This type is from VST: https://github.com/PrincetonUniversity/VST/blob/v2.1/floyd/compact_prod_sum.v#L6 *) Fixpoint compact_prod (T: list Type): Type := match T with | nil => unit | t :: nil => t | t :: T0 => (t * compact_prod T0)%type end. (* The rest is a nonsensical, just to give a minimalistic reproducible example *) Inductive foo := | Nat : foo | List : list foo -> foo. Fixpoint foo_type (f:foo) : Type := match f with | Nat => nat | List fs => compact_prod (map foo_type fs) end. Equations num (f:foo) : forall (val:foo_type f), nat := { num Nat := fun val => val; num (List nil) := fun val => 0; num (List (cons hd tl)) := fun val => sum hd (num hd) tl val } where sum (f:foo) (numf: (foo_type f -> nat)) (fs : list foo) (val: compact_prod (map foo_type (f::fs))) : nat by struct fs := { sum f numf nil val := numf val; sum f numf (cons hd tl) val := numf (fst val) + sum hd (num hd) tl (snd val)}.Coq-Equations-1.3.1-8.20/test-suite/issues/issue91.v000066400000000000000000000005321463127417400217140ustar00rootroot00000000000000From Coq Require Import Vector. From Equations Require Import Equations. Arguments Vector.nil {A}. Arguments Vector.cons {A} _ {n}. Equations silly_replicate {A} (n : nat) (x : A) : Vector.t A n := silly_replicate O _ := nil; silly_replicate (S n') x with (silly_replicate n' x) := { | nil => cons x nil; | cons h tl => cons x (cons h tl) }.Coq-Equations-1.3.1-8.20/test-suite/issues/issue93.v000066400000000000000000000023641463127417400217230ustar00rootroot00000000000000From Coq.Lists Require Import List. From Equations Require Import Equations. (* This type is from VST: https://github.com/PrincetonUniversity/VST/blob/v2.1/floyd/compact_prod_sum.v#L6 *) Fixpoint compact_prod (T: list Type): Type := match T with | nil => unit | t :: nil => t | t :: T0 => (t * compact_prod T0)%type end. (* The rest is a nonsensical, just to give a minimalistic reproducible example *) Inductive foo := | Nat : foo -> nat -> foo | List : list foo -> foo. Equations foo_type (ft:foo) : Type := foo_type (Nat f _) := foo_type f; foo_type (List fs) := compact_prod (List.map foo_type fs). Transparent foo_type. (* val was moved into the result type, rather than being an argument, to work around issues #73 and #85 *) Equations sum (fx:foo) : forall (val:foo_type fx), nat := { sum (Nat f _) := fun val => sum f val; sum (List ff) := fun val => sum_list ff val } where sum_list (fs : list foo) (vval: compact_prod (map foo_type fs)) : nat := { sum_list nil vval := 0; (* The "with clause" below is there to work around issue #78 *) sum_list (cons hd tl) val1 with fun val => sum_list tl val => { sum_list (cons hd nil) val1 _ := sum hd val1; sum_list (cons hd _) val1 sumtl := sum hd (fst val1) + sumtl (snd val1)}}. Coq-Equations-1.3.1-8.20/test-suite/issues/issue95_1.v000066400000000000000000000005131463127417400221370ustar00rootroot00000000000000Require Import Equations.Prop.Equations. Inductive type: Set := | T_bool: type | T_prod: type -> type -> type. Axiom wf : 0 < 0. Equations? transport (t: nat) (T: type): Prop by wf 0 lt := transport t T_bool := True; transport t (T_prod U V) := transport t U /\ transport t V. Proof. all: apply wf. Defined. Coq-Equations-1.3.1-8.20/test-suite/issues/issue95_2.v000066400000000000000000000004471463127417400221460ustar00rootroot00000000000000Require Import TestSuite.issues.issue95_1. Require Import Lia. Require Import Equations.Prop.Equations. Lemma l: forall T t, transport t T -> forall t', transport t' T. Proof. induction T; intuition auto; try lia. - Timeout 1 simp transport in *. intuition auto with *. Qed.Coq-Equations-1.3.1-8.20/test-suite/issues/issue96.v000066400000000000000000000005271463127417400217250ustar00rootroot00000000000000Require Import Equations.Prop.Equations. Inductive type: Set := | T_bool: type | T_prod: type -> type -> type. Axiom cheat : 0 < 0. Equations? transport (t: nat) (T: type): Prop by wf 0 lt := transport t T_bool := True; transport t (T_prod U V) := transport t U /\ transport t V. Proof. apply cheat. apply cheat. Defined. Coq-Equations-1.3.1-8.20/test-suite/issues/issue98.v000066400000000000000000000034421463127417400217260ustar00rootroot00000000000000From Coq.Lists Require Import List. From Equations Require Import Equations. (* This type is from VST: https://github.com/PrincetonUniversity/VST/blob/v2.1/floyd/compact_prod_sum.v#L6 *) Fixpoint compact_prod (T: list Type): Type := match T with | nil => unit | t :: nil => t | t :: T0 => (t * compact_prod T0)%type end. (* The rest is a nonsensical, just to give a minimalistic reproducible example *) Inductive Foo := | Foo1 : list Foo -> Foo | Foo2 : list Foo -> Foo. Equations foo_type (t : Foo) : Type := foo_type (Foo1 fs) := compact_prod (List.map foo_type fs); foo_type (Foo2 fs) := compact_prod (List.map foo_type fs). Transparent foo_type. (* Moving val nato the return type, rather than having it as an argument might be unnecessary if https://github.com/mattam82/Coq-Equations/issues/73 was fixed *) (* Set Equations Debug. *) Equations do_foo (t : Foo) : forall (val : foo_type t), nat := { do_foo (Foo1 fs) := fun val => do_foo1 fs val; do_foo (Foo2 fs) := fun val => do_foo2 fs val } where do_foo1 (fs:list Foo) : forall (val : compact_prod (map foo_type fs)), nat (* by struct fs *) := { do_foo1 nil := fun val => 0; (* do_foo1 (cons hd nil) := fun val => do_foo hd val; *) (* do_foo1 (cons hd tl) := fun val => 0 } *) (* Attempting to work around https://github.com/mattam82/Coq-Equations/issues/78 *) do_foo1 (cons hd tl) with (fun val => do_foo1 tl val) => { do_foo1 (cons hd nil) _ := fun val => do_foo hd val; do_foo1 (cons hd _) do_foo_tl := fun val => (do_foo hd (fst val)) + (do_foo_tl (snd val))}} where do_foo2 (fs : list Foo) : forall val : compact_prod (List.map foo_type fs), nat by struct fs := { do_foo2 nil := fun val => 0; do_foo2 (cons var nil) := fun val => do_foo var val; do_foo2 _ := fun val => 0 }. Coq-Equations-1.3.1-8.20/test-suite/issues/issue99.v000066400000000000000000000024731463127417400217320ustar00rootroot00000000000000From Coq.Lists Require Import List. From Equations Require Import Equations. (* This type is from VST: https://github.com/PrincetonUniversity/VST/blob/v2.1/floyd/compact_prod_sum.v#L6 *) Fixpoint compact_prod (T: list Type): Type := match T with | nil => unit | t :: nil => t | t :: T0 => (t * compact_prod T0)%type end. (* The rest is a nonsensical, just to give a minimalistic reproducible example *) Inductive Foo := | Foo1 : list Foo -> Foo | Foo2 : list Foo -> Foo. Equations foo_type (t : Foo) : Type := foo_type (Foo1 fs) := compact_prod (List.map foo_type fs); foo_type (Foo2 fs) := compact_prod (List.map foo_type fs). (* Moving val nato the return type, rather than having it as an argument might be unnecessary if https://github.com/mattam82/Coq-Equations/issues/73 was fixed *) Equations do_foo (t : Foo) : forall (val : foo_type t), nat := { do_foo (Foo1 fs) := fun val => do_foo1 fs val; do_foo (Foo2 fs) := fun val => do_foo2 fs val } where do_foo1 (fs:list Foo) : forall (val : compact_prod (map foo_type fs)), nat := { do_foo1 _ := fun val => 0} where do_foo2 (fs : list Foo) : forall val : compact_prod (List.map foo_type fs), nat by struct fs := { do_foo2 nil := fun val => 0; do_foo2 (cons var nil) := fun val => do_foo var val; do_foo2 _ := fun val => 0 }. Coq-Equations-1.3.1-8.20/test-suite/issues/issue_mutual.v000066400000000000000000000262001463127417400231310ustar00rootroot00000000000000From Equations Require Import Equations. From Coq Require Import Program Arith Compare_dec List. Import ListNotations. (* In environment eos := CoreTactics.the_end_of_the_section : CoreTactics.end_of_section H : forall z : exp, c_exp_tc_graph z (c_exp_tc z) c_exps : forall zs : list exp, c_exps_graph zs (c_exps zs) zs : list exp a : exp l : list exp c_exp_notc : forall z : exp, c_exp_notc_graph z (c_exp_notc z) z : exp l0 : list exp Recursive call to c_exps has principal argument equal to "l0" instead of "l". Recursive definition is: "fun zs : list exp => (fun zs0 : list exp => match zs0 as l return (let H := CoreTactics.block in let H0 := CoreTactics.block in c_exps_graph l (issue_mutual.c_exps l)) with | [] => let _H := CoreTactics.block in eq_rec_r (fun n : nat => c_exps_graph [] n) c_exps_graph_equation_1 c_exps_equation_1 | a :: l => (fun (e : exp) (zs1 : list exp) => let _H := CoreTactics.block in eq_rec_r (fun n : nat => c_exps_graph (e :: zs1) n) (c_exps_graph_equation_2 e zs1 (let fix c_exp_notc (z : exp) : c_exp_notc_graph z (c_exp_notc z) := (fun z0 : exp => match z0 as e0 return (let H := CoreTactics.block in let H0 := CoreTactics.block in c_exp_notc_graph e0 (issue_mutual.c_exp_notc e0)) with | Const n => (fun n0 : nat => let _H0 := CoreTactics.block in eq_rec_r (fun n1 : nat => c_exp_notc_graph (Const n0) n1) (c_exp_notc_graph_equation_1 n0) (c_exp_notc_equation_1 n0)) n | Var n => (fun n0 : nat => let _H0 := CoreTactics.block in eq_rec_r (fun n1 : nat => c_exp_notc_graph (Var n0) n1) (c_exp_notc_graph_equation_2 n0) (c_exp_notc_equation_2 n0)) n | Op l0 => (fun l1 : list exp => let _H0 := CoreTactics.block in eq_rec_r (fun n : nat => c_exp_notc_graph (Op l1) n) (c_exp_notc_graph_equation_3 l1 (c_exps l1)) (c_exp_notc_equation_3 l1)) l0 | If l0 e0 e1 => (fun (l1 : list exp) (z1 z2 : exp) => let _H0 := CoreTactics.block in eq_rec_r (fun n : nat => c_exp_notc_graph (If l1 z1 z2) n) (c_exp_notc_graph_equation_4 l1 z1 z2 (c_exps l1) (let r1 := issue_mutual.c_exps l1 in c_exp_notc z2) (let r1 := issue_mutual.c_exps l1 in let r2 := issue_mutual.c_exp_notc z2 in c_exp_notc z1)) (c_exp_notc_equation_4 l1 z1 z2)) l0 e0 e1 | Let n e0 e1 => (fun (n0 : nat) (z1 z2 : exp) => let _H0 := CoreTactics.block in eq_rec_r (fun n1 : nat => c_exp_notc_graph (Let n0 z1 z2) n1) (c_exp_notc_graph_equation_5 n0 z1 z2 (c_exp_notc z1) (let r1 := issue_mutual.c_exp_notc z1 in c_exp_notc z2)) (c_exp_notc_equation_5 n0 z1 z2)) n e0 e1 | Call n l0 => (fun (n0 : nat) (l1 : list exp) => let _H0 := CoreTactics.block in eq_rec_r (fun n1 : nat => c_exp_notc_graph (Call n0 l1) n1) (c_exp_notc_graph_equation_6 n0 l1 (c_exps l1)) (c_exp_notc_equation_6 n0 l1)) n l0 end) z in c_exp_notc e) (let res1 := c_exp_notc e in c_exps zs1)) (c_exps_equation_2 e zs1)) a l end) zs". This will become an error in the future [solve_obligation_error,tactics]coqtop Functional induction principle could not be p olve Obligations tactic returned error: Recursive definition of c_exps is ill-formed. In environment eos := CoreTactics.the_end_of_the_section : CoreTactics.end_of_section H : forall z : exp, c_exp_tc_graph z (c_exp_tc z) c_exps : forall zs : list exp, c_exps_graph zs (c_exps zs) zs : list exp a : exp l : list exp c_exp_notc : forall z : exp, c_exp_notc_graph z (c_exp_notc z) z : exp l0 : list exp Recursive call to c_exps has principal argument equal to "l0" instead of "l". Recursive definition is: "fun zs : list exp => (fun zs0 : list exp => match zs0 as l return (let H := CoreTactics.block in let H0 := CoreTactics.block in c_exps_graph l (issue_mutual.c_exps l)) with | [] => let _H := CoreTactics.block in c_exps_graph_equation_1 | a :: l => (fun (e : exp) (zs1 : list exp) => let _H := CoreTactics.block in c_exps_graph_equation_2 e zs1 (let fix c_exp_notc (z : exp) : c_exp_notc_graph z (c_exp_notc z) := (fun z0 : exp => match z0 as e0 return (let H := CoreTactics.block in let H0 := CoreTactics.block in c_exp_notc_graph e0 (issue_mutual.c_exp_notc e0)) with | Const n => (fun n0 : nat => let _H0 := CoreTactics.block in c_exp_notc_graph_equation_1 n0) n | Var n => (fun n0 : nat => let _H0 := CoreTactics.block in c_exp_notc_graph_equation_2 n0) n | Op l0 => (fun l1 : list exp => let _H0 := CoreTactics.block in c_exp_notc_graph_equation_3 l1 (c_exps l1)) l0 | If l0 e0 e1 => (fun (l1 : list exp) (z1 z2 : exp) => let _H0 := CoreTactics.block in c_exp_notc_graph_equation_4 l1 z1 z2 (c_exps l1) (let r1 := issue_mutual.c_exps l1 in c_exp_notc z2) (let r1 := issue_mutual.c_exps l1 in let r2 := issue_mutual.c_exp_notc z2 in c_exp_notc z1)) l0 e0 e1 | Let n e0 e1 => (fun (n0 : nat) (z1 z2 : exp) => let _H0 := CoreTactics.block in c_exp_notc_graph_equation_5 n0 z1 z2 (c_exp_notc z1) (let r1 := issue_mutual.c_exp_notc z1 in c_exp_notc z2)) n e0 e1 | Call n l0 => (fun (n0 : nat) (l1 : list exp) => let _H0 := CoreTactics.block in c_exp_notc_graph_equation_6 n0 l1 (c_exps l1)) n l0 end) z in c_exp_notc e) (let res1 := c_exp_notc e in c_exps zs1)) a l end) zs". This will become an error in the future [solve_obligation_error,tactics]coqtop *) Set Equations Transparent. Inductive exp := | If : list exp -> exp -> exp. Equations c_exp_tc (z: exp) : nat by struct z := { c_exp_tc (If xs x) := let r1 := c_exps xs in let r2 := c_exp_tc x in r1 } where c_exp_notc (z: exp) : nat by struct z := { c_exp_notc (If xs x) := let r1 := c_exps xs in let r2 := c_exp_notc x in r1 } where c_exps (zs: list exp) : nat by struct zs := { c_exps [] := 0 ; c_exps (x :: xs) := let res2 := c_exps xs in let res1 := c_exp_notc x in res1 + res2 }. Transparent c_exp_tc c_exp_notc c_exps. Obligation Tactic := idtac. Equations? (noind) gc_exp_tc (z: exp) : c_exp_tc_graph z (c_exp_tc z) := { gc_exp_tc (If xs y) := let r1 := gc_exps xs in let r2 := gc_exp_tc y in _ } where gc_exp_notc (z: exp) : c_exp_notc_graph z (c_exp_notc z) by struct z := { gc_exp_notc (If xs y) := let r1 := c_exps xs in let r2 := gc_exp_notc y in _ } where gc_exps (zs: list exp) : c_exps_graph zs (c_exps zs) by struct zs := { gc_exps [] := _ ; gc_exps (x :: xs) := let res2 := gc_exps xs in let res1 := gc_exp_notc x in _ }. Proof. all:autorewrite with c_exp_tc; simpl; constructor; try assumption. apply gc_exps. Defined. (* zs0 as l return (let H := CoreTactics.block in let H0 := CoreTactics.block in c_exps_graph l (issue_mutual.c_exps l)) with | [] => let _H := CoreTactics.block in c_exps_graph_equation_1 | a :: l => (fun (e : exp) (zs1 : list exp) => let _H := CoreTactics.block in c_exps_graph_equation_2 e zs1 (c_exps zs1) (let res2 := issue_mutual.c_exps zs1 in let fix c_exp_notc (z : exp) : c_exp_notc_graph z (c_exp_notc z) := (fun z0 : exp => match z0 as e0 return (let H := CoreTactics.block in let H0 := CoreTactics.block in c_exp_notc_graph e0 (issue_mutual.c_exp_notc e0)) with | If l0 => (fun l1 : list exp => let _H0 := CoreTactics.block in c_exp_notc_graph_equation_1 l1 (c_exps l1)) l0 end) z in c_exp_notc e)) a l end) zs *) Fixpoint c_exps (zs : list exp) : c_exps_graph zs (issue_mutual.c_exps zs) := match zs as l return (c_exps_graph l (issue_mutual.c_exps l)) with | [] => c_exps_graph_equation_1 | e :: zs1 => c_exps_graph_equation_2 e zs1 (c_exps zs1) (let fix c_exp_notc (z : exp) : c_exp_notc_graph z (c_exp_notc z) := match z as e0 return (c_exp_notc_graph e0 (issue_mutual.c_exp_notc e0)) with | If l0 => c_exp_notc_graph_equation_1 l0 (c_exps l0) end in c_exp_notc e) end. Equations c_exp_tc (z: exp) : nat by struct z := { c_exp_tc (Let n x y) := let r1 := c_exp_notc x in let r2 := c_exp_tc y in r1 + r2 ; c_exp_tc (If xs y z) := let r1 := c_exps xs in let r2 := c_exp_tc z in let r3 := c_exp_tc y in r1 + r2 + r3 ; c_exp_tc (Call n xs) := c_exps xs ; c_exp_tc z := c_exp_notc z } where c_exp_notc (z: exp) : nat by struct z := { c_exp_notc (Let n x y) := let r1 := c_exp_notc x in let r2 := c_exp_notc y in r1 + r2 ; c_exp_notc (If xs y z) := let r1 := c_exps xs in let r2 := c_exp_notc z in let r3 := c_exp_notc y in r1 + r2 + r3 ; c_exp_notc (Call n xs) := c_exps xs ; c_exp_notc (Const n) := n ; c_exp_notc (Var n) := n ; c_exp_notc (Op xs) := c_exps xs } where c_exps (zs: list exp) : nat by struct zs := { c_exps [] := 0 ; c_exps (x :: xs) := let res2 := c_exps xs in let res1 := c_exp_notc x in res1 + res2 }. Coq-Equations-1.3.1-8.20/test-suite/le.v000066400000000000000000000024331463127417400175010ustar00rootroot00000000000000From Equations Require Import Equations. Inductive le : nat -> nat -> Prop := | le_0 n : le 0 n | le_S n m : le n m -> le (S n) (S m). Inductive le' : nat -> nat -> Prop := | le'_0 n : le' n n | le'_S n m : le' n m -> le' n (S m). Derive Signature for le le'. Scheme le_dep := Induction for le Sort Prop. Scheme le'_dep := Induction for le' Sort Prop. Lemma le_irrel n m (p q : le n m) : p = q. Proof. revert p. induction p using le_dep. depelim q. reflexivity. depelim q. apply f_equal, IHp. Defined. Lemma leprf_nocycle n : le (S n) n -> False. Proof. intros H. depind H. auto. Qed. Derive Subterm for nat. Set Equations With UIP. Lemma le_refl n : le n n. Proof. induction n; constructor; auto. Qed. Lemma le_n_Sm n m : le n m -> le n (S m). Proof. induction 1; constructor; auto. Qed. Lemma le'_le n m : le' n m -> le n m. Proof. induction 1. apply le_refl. now apply le_n_Sm. Qed. Lemma le'prf_nocycle n : le' (S n) n -> False. Proof. intros H%le'_le. now apply leprf_nocycle in H. Qed. Lemma le'_irrel n m (p q : le' n m) : p = q. Proof. revert q. induction p using le'_dep. intros q. depelim q. reflexivity. exfalso. now eapply le'prf_nocycle in q. intros q. depelim q. exfalso. clear IHp. now eapply le'prf_nocycle in p. f_equal. apply IHp. Defined. Coq-Equations-1.3.1-8.20/test-suite/letred.v000066400000000000000000000003721463127417400203600ustar00rootroot00000000000000From Equations Require Import Equations. Equations foo (n : nat) : nat := foo x := let x := 0 in x. Goal forall x, foo x = let x := 0 in x. Proof. intros x. rewrite foo_equation_1. match goal with |- ?x = ?y => constr_eq x y end. Abort.Coq-Equations-1.3.1-8.20/test-suite/local_where.v000066400000000000000000000045511463127417400213700ustar00rootroot00000000000000Require Import Program.Basics Program.Tactics. Require Import Equations.Prop.Equations. Equations foo (n : nat) : nat := foo n := n + k 0 where k (x : nat) : nat := { k 0 := 0 ; k (S n') := n }. Parameter f : nat -> (nat * nat). Equations foo' (n : nat) : nat := foo' 0 := 0; foo' (S n) := n + k (f 0) (0, 0) where k (x : nat * nat) (y : nat * nat) : nat := { k (x, y) (x', y') := x }. Parameter kont : ((nat * nat) -> nat) -> nat. Equations foo'' (n : nat) : nat := foo'' 0 := 0; foo'' (S n) := n + kont absw where absw : (nat * nat) -> nat := absw (x, y) := x. Equations foo3 (n : nat) : nat := foo3 0 := 0; foo3 (S n) := n + kont (λ{ | (x, y) := x }). Variant index : Set := i1 | i2. Derive NoConfusion for index. Inductive expr : index -> Set := | e1 : expr i1 | e2 {i} : expr i -> expr i2. Derive Signature NoConfusion NoConfusionHom for expr. Parameter kont' : forall x : index, (expr x -> nat) -> nat. Equations foo4 {i} (n : expr i) : nat := foo4 e1 := 0; foo4 (@e2 i e) := absw e where absw : forall {i}, expr i -> nat := absw e1 := 0; absw (e2 _) := 0. Equations foo5 {i} (n : expr i) : nat := foo5 e1 := 0; foo5 (@e2 i e) := absw e where absw : expr i -> nat := absw e1 := 0; absw (e2 e') := foo5 e'. Equations foo6 : nat := foo6 := 0. Equations foo7 : nat -> nat := foo7 x := if bla then 0 else 1 + bla' eq_refl where bla : bool := { bla := true } where bla' : bla = bla -> nat := { bla' H := 1 + if bla then 1 else 2 }. Equations foo8 : nat -> nat := { foo8 0 := 0; foo8 (S x) := if bla then 0 else 1 + bla' eq_refl + foo8 x where bla : bool := { bla := true } where bla' : bla = bla -> nat := { bla' H := 1 + if bla then 1 else 2 + foo8 x } where bla'' : nat := { bla'' := 3 + bla' eq_refl + baz 4 } } where baz : nat -> nat := baz 0 := 0; baz (S n) := baz n. Set Warnings "-notation-overridden". Notation "{ x : A & y }" := (@sigma A (fun x : A => y)%type) (x at level 99) : type_scope. Notation "{ x & y }" := (@sigma _ (fun x : _ => y)%type) (x at level 99) : type_scope. Notation "&( x , .. , y , z )" := (@sigmaI _ _ x .. (@sigmaI _ _ y z) ..) (right associativity, at level 4, format "&( x , .. , y , z )"). #[local] Obligation Tactic := idtac. Equations foo9 : { x: nat & x = 0 } := foo9 := &(lhs, rhs) where lhs : nat := { lhs := 0 } where rhs : lhs = 0 := { rhs := eq_refl }. Coq-Equations-1.3.1-8.20/test-suite/mfix.v000066400000000000000000000013021463127417400200360ustar00rootroot00000000000000Require Import Equations.Prop.Equations. Inductive term : Set := | Var (n : nat) | App (t : term) (l : term_list) with term_list : Set := | nilt : term_list | const : term -> term_list -> term_list. Equations id_term (t : term) : term := { id_term (Var n) := Var n; id_term (App t l) := App (id_term t) (id_tlist l) } id_tlist (t : term_list) : term_list := { id_tlist nilt := nilt; id_tlist (const t tl) := const (id_term t) (id_tlist tl) }. Goal (forall t : term, id_term_graph t (id_term t)) * (forall t : term_list, id_tlist_graph t (id_tlist t)). split. Fail all:mfix f 1 1. all:mfix f1 f2 1 1. destruct t; constructor; simp id_term. destruct t; constructor; simp id_term. Defined. Coq-Equations-1.3.1-8.20/test-suite/mutrec.v000066400000000000000000000044671463127417400204110ustar00rootroot00000000000000Require Import Equations.Prop.Equations. Module two. Inductive term : Set := | Var (n : nat) | App (t : term) (l : term_list) with term_list : Set := | nilt : term_list | const : term -> term_list -> term_list. Equations id_term (t : term) : term := { id_term (Var n) := Var n; id_term (App t l) := App (id_term t) (id_tlist l) } with id_tlist (t : term_list) : term_list := { id_tlist nilt := nilt; id_tlist (const t tl) := const (id_term t) (id_tlist tl) }. Definition test := id_term_elim. End two. Module three. Inductive term : Set := | Var (n : nat) | App (t : term) (l : term_list) | App2 (t : term) (l : term_list') with term_list : Set := | nilt : term_list | const : term -> term_list -> term_list with term_list' : Set := | nilt' : term_list' | const' : term -> term_list' -> term_list'. Equations id_term (t : term) : term := { id_term (Var n) := Var n; id_term (App t l) := App (id_term t) (id_tlist l); id_term (App2 t l) := App (id_term t) (id_tlist' l) } with id_tlist (t : term_list) : term_list := { id_tlist nilt := nilt; id_tlist (const t tl) := const (id_term t) (id_tlist tl) } with id_tlist' (t : term_list') : term_list := { id_tlist' nilt' := nilt ; id_tlist' (const' t tl) := let t' := id_term t in let l' := id_tlist' tl in const t' l' }. Definition test := id_term_elim. End three. Module four. Inductive term : Set := | Var (n : nat) | App (t : term) (l : term_list) | App2 (t : term) (l : term_list') with term_list : Set := | nilt : term_list | const : term -> term_list -> term_list with term_list' : Set := | nilt' : term_list' | const' : term -> term_list' -> term_list'. Equations id_term (t : term) : term := { id_term (Var n) := Var n; id_term (App t l) := App (id_term t) (id_tlist l); id_term (App2 t l) := App (id_term t) (id_tlist' l) } with id_tlist (t : term_list) : term_list := { id_tlist nilt := nilt; id_tlist (const t tl) := const (id_term t) (id_tlist tl) } with id_tlist' (t : term_list') : term_list := { id_tlist' nilt' := nilt ; id_tlist' (const' t tl) := let t' := id_term t in let l' := id_tlist' tl in const t' l' } with bla (t : term) : term := { bla (Var n) := Var 0; bla (App t l) := App (bla t) (id_tlist l); bla (App2 t l) := App (bla t) (id_tlist' l) }. Definition test := id_term_elim. End four.Coq-Equations-1.3.1-8.20/test-suite/mutrec_enc.v000066400000000000000000000045071463127417400212310ustar00rootroot00000000000000Require Import Equations. Inductive foo : Set := | c0 : nat -> foo | c1 : bar -> bar -> foo (* | c3 : bar -> foo *) | c2 : foo -> foo with bar : Set := | d0 : nat -> bar | d1 : foo -> bar. (* Inductive foo_foo_subterm : foo -> foo -> Prop := *) (* | c1_sub0 b f g : *) (* foo_bar_subterm f b -> foo_foo_subterm f (c1 b g) *) (* | c1_sub1 b f g : foo_bar_subterm b g -> foo_foo_subterm b (c1 f g) *) (* | c2_sub x : foo_foo_subterm x (c2 x) *) (* with foo_bar_subterm : foo -> bar -> Prop := *) (* | d1_sub f : foo_bar_subterm f (d1 f) *) (* with bar_bar_subterm : bar -> bar -> Prop := *) (* | d1b b f : bar_foo_subterm b f -> bar_bar_subterm b (d1 f) *) (* with bar_foo_subterm : bar -> foo -> Prop := *) (* | c1b b q : bar_foo_subterm b (c1 b q) *) (* | c2b b q :bar_foo_subterm b (c1 q b). *) (* Scheme foo_foo_s := Induction for foo_foo_subterm Sort Prop *) (* with foo_bar_s := Induction for foo_bar_subterm Sort Prop *) (* with bar_bar_s := Induction for bar_bar_subterm Sort Prop *) (* with bar_foo_s := Induction for bar_foo_subterm Sort Prop. *) (* Combined Scheme foo_bar_subterm from foo_foo_s, foo_bar_s, bar_bar_s, bar_foo_s. *) Scheme foo_bar_mut := Induction for foo Sort Set with foo_bar_mut' := Induction for bar Sort Set. Equations fn (x : foo) : nat := fn (c0 x) := x; fn (c2 f) := fn f; (* fn (c3 b) := fnb b; *) fn (c1 b b') := fnb b + fnb b' where fnb (x : bar) : nat := fnb (d0 x) := x; fnb (d1 f) := fn f. Next Obligation. revert x. fix f 1. destruct x. constructor. Transparent fn. unfold fn. assert(forall x: bar, fn_ind_1 b b0 x (fn_obligation_1 fn b b0 x)). fix g 1. destruct x. simpl. constructor. constructor. apply f. constructor. apply H. apply H. simpl. constructor. apply f. Defined. Eval compute in fn. (* Combined Scheme foo_bar from foo_bar_mut, foo_bar_mut'. *) Section mutrec. Variables foo_bar_rec : foo -> nat. Variables foo_bar_rec' : bar -> nat. Equations foo_bar_fn (x : foo) : nat := foo_bar_fn (c0 x) := x; foo_bar_fn (c1 b b') := foo_bar_rec' b + foo_bar_rec' b'; foo_bar_fn (c2 f) := foo_bar_rec f. Equations foo_bar_fn' (x : bar) : nat := foo_bar_fn' (d0 x) := x; foo_bar_fn' (d1 b) := foo_bar_fn b. End mutrec. Definition foobarknot := fix F (x : foo) := foo_bar_fn F G x with G (x : bar) := foo_bar_fn' F G x for F. Coq-Equations-1.3.1-8.20/test-suite/nestedobls.v000066400000000000000000000033631463127417400212460ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Set Asymmetric Patterns. Set Implicit Arguments. Unset Strict Implicit. Require Import Arith. Require Import Lia. From Equations Require Import Equations. Require Import Wellfounded Relation_Definitions. Require Import Relation_Operators Lexicographic_Product Wf_nat. Unset Implicit Arguments. Require Import Program. Equations? test (n : nat) (pre : n >= 0 ) : { n' : nat | n' <= n } by wf n lt := test 0 p := exist _ 0 _; test (S n) p with test n _ => { | exist _ 0 _ := exist _ 0 _; | exist _ (S n') p' with test n' _ := { | exist _ k p'' := exist _ k _ } }. Proof. all:(auto with arith; lia). Defined. Module Bug. (* FIXME: shrink obligations so that they can apply during induction principle generation *) Equations? (noind) test' (n : { n : nat | n >= 0 }) : { n' : nat | n' <= `n } by wf (proj1_sig n) lt := test' (exist _ n p) with n := { | 0 := exist _ 0 _; | S n' with test' (exist _ n' _) => { | exist _ 0 _ := exist _ 0 _; | exist _ (S n'') p' with test' (exist _ n'' _) := { | exist _ k p'' := exist _ k _ } } }. Proof. all:(clear test'; unfold MR; simpl; auto with arith). lia. Defined. End Bug.Coq-Equations-1.3.1-8.20/test-suite/nestedrec.v000066400000000000000000000006531463127417400210570ustar00rootroot00000000000000Require Import Equations.Prop.Equations. Inductive term : Set := | Var (n : nat) | App (t : term) (l : list term). Equations id_term (t : term) : term := { id_term (Var n) := Var n; id_term (App t l) := App (id_term t) (id_tlist l) } where id_tlist (t : list term) : list term := { id_tlist nil := nil; id_tlist (cons t ts) := cons (id_term t) (id_tlist ts) }. Definition check := eq_refl : List.map id_term = id_tlist.Coq-Equations-1.3.1-8.20/test-suite/nestedrec2.v000066400000000000000000000020471463127417400211400ustar00rootroot00000000000000Require Import Equations.Prop.Equations. Require Import Arith. Require Import Compare_dec. Inductive term : Set := | Var (n : nat) | Lam (t : term) | App (t : term) (l : list term). Equations subst_var (k : nat) (u : term) (t : nat) : term := subst_var k u n with k ?= n => { | Eq => u; | Gt => Var n; | Lt => Var (pred n) }. Equations subst_term (k : nat) (u : term) (t : term) : term := { subst_term k u (Var n) => subst_var k u n; subst_term k u (Lam t) => Lam (subst_term (S k) u t); subst_term k u (App t l) => App (subst_term k u t) (subst_tlist k u l) } where subst_tlist (k : nat) (u : term) (t : list term) : list term := { subst_tlist k u nil := nil; subst_tlist k u (cons t ts) := cons (subst_term k u t) (subst_tlist k u ts) }. (* id_tlist t := List.map id_term t }. *) Lemma subst_subst k u t : subst_term k u t = subst_term k u t. Proof. revert k u t. refine (fst (subst_term_elim (fun k u t c => c = c) (fun k u l c => c = c) _ _ _ _ _)); trivial. Qed.Coq-Equations-1.3.1-8.20/test-suite/nestedwfrec.v000066400000000000000000000006371463127417400214160ustar00rootroot00000000000000From Equations Require Import Equations. Require Import Lia. Equations? f (n : nat) : nat by wf n lt := f 0 := 0; f (S k) := g k (le_n (S k)) where g (n' : nat) (H : n' < S k) : nat by wf n' lt := g 0 _ := 1; g (S n') H := f n' + g n' (PeanoNat.Nat.lt_le_incl (S n') (S k) H). Hint Extern 0 (_ < _) => lia : f. Proof. lia. Defined. Goal f 2 = 1. Proof. reflexivity. Defined. Definition f_e := f_elim.Coq-Equations-1.3.1-8.20/test-suite/nestedwhererec.v000066400000000000000000000025111463127417400221050ustar00rootroot00000000000000From Equations Require Import Equations. (* Bug with eqns *) (* Nested with equation not properly generated *) Module FullWf. Equations test2 (n : nat) (n' : nat) : unit by wf n lt := test2 0 _ := tt; test2 (S n) gs := aux gs where aux (n' : nat) : unit by wf n' lt := { aux 0 := tt; aux (S g) := with_new_candidate (Some g) where with_new_candidate : option nat -> unit := { | Some newg => tt; | None => aux g } }. End FullWf. Module FullStruct. Equations test2 (n : nat) (n' : nat) : unit by struct n := test2 0 _ := tt; test2 (S n) gs := aux gs where aux (n' : nat) : unit by struct n' := { aux 0 := tt; aux (S g) := with_new_candidate (Some g) where with_new_candidate : option nat -> unit := { | Some newg => tt; | None => aux g } }. End FullStruct. Module MixedWfStruct. Equations test2 (n : nat) (n' : nat) : unit by wf n lt := test2 0 _ := tt; test2 (S n) gs := aux gs where aux (n' : nat) : unit by struct n' := { aux 0 := tt; aux (S g) := with_new_candidate (Some g) where with_new_candidate : option nat -> unit := { | Some newg => tt; | None => aux g } }. End MixedWfStruct. Coq-Equations-1.3.1-8.20/test-suite/nestedwhererecwith.v000066400000000000000000000007151463127417400230050ustar00rootroot00000000000000From Equations Require Import Equations. Require Import List. Import ListNotations. Equations test {A} (n : nat) (gs : list A) : unit by wf n lt := test 0 _ := tt; test (S n) gs := aux gs [] where aux (l : list A) (acc : list A) : unit by wf (length l) lt := aux nil acc := tt; aux (g :: gs') acc with acc := { | nil => tt; | g' :: gs'' => aux gs' acc }. Coq-Equations-1.3.1-8.20/test-suite/nestfixnorec.v000066400000000000000000000210241463127417400216050ustar00rootroot00000000000000From Equations Require Import Equations. Set Keyed Unification. Require Import Coq.Lists.List. Import ListNotations. Inductive tree a := Node : a -> list (tree a) -> tree a. Equations elements {a} (t : tree a) : list a := { elements (Node x ts) => x :: list_elements ts } where list_elements {a} (l : list (tree a)) : list a := { list_elements nil := nil; list_elements (cons x l) := elements x ++ list_elements l }. Lemma list_elements_spec {A} (l : list (tree A)) : list_elements l = List.concat (List.map elements l). Proof. induction l; simp elements; trivial. rewrite IHl. simpl. auto. Qed. #[local] Hint Rewrite @list_elements_spec : elements. Class C a := { P : a -> bool }. (* Arguments P {_}. *) Definition list_P {a} (a_C : C a) : list a -> bool := existsb P. Definition list_C {a} (a_C : C a) : C (list a) := {| P := list_P a_C |}. (* Note that *) (* Eval cbn in fun a C => (P (list_C C)). *) (* evaluates to: fun a C => list_P C *) (* (* Works, using a local record *) *) (* Fixpoint tree_P1 {a} (a_C : C a) (t : tree a) : bool := *) (* let tree_C := Build_C _ (tree_P1 a_C) in *) (* let list_C' := Build_C _ (list_P tree_C) in *) (* match t with Node _ x ts => orb (P a_C x) (P list_C' ts) end. *) (* Definition tree_C {a} {a_C : C a} : C (tree a) := {| P := tree_P1 a_C |}. *) (* Works too, using list_P directly *) Fixpoint tree_P2 {a} (a_C : C a) (t : tree a) : bool := let tree_C := Build_C _ (tree_P2 a_C) in match t with Node _ x ts => orb (P x) (list_P tree_C ts) end. (* Does not work, using a globally defined record. Why not? *) (* Eval compute in (fun f => P (list_C f)). *) Fixpoint tree_P3 {a} (a_C : C a) (t : tree a) : bool := let tree_C := Build_C _ (tree_P3 a_C) in match t with Node _ x ts => orb (P x) (P (C:=list_C tree_C) ts) end. Module Works. Section tree_list. Context {a} (a_C : C a). Equations tree_P3 (t : tree a) : bool := { tree_P3 (Node x ts) => orb (P x) (list_P3 ts) } where list_P3 (l : list (tree a)) : bool := { list_P3 nil := false; list_P3 (cons l ls) := tree_P3 l || list_P3 ls }. Global Instance tree_C : C (tree a) := { P := tree_P3 }. Global Instance tree_list_C : C (list (tree a)) := { P := list_P3 }. End tree_list. Example check := (fun a (a_C : C a) => eq_refl : tree_list_C a_C = list_C (tree_C a_C)). End Works. Module Ideal. Section tree_list. Context {a} (a_C : C a). Equations tree_P3 (t : tree a) : bool := { tree_P3 (Node x ts) => orb (P x) (list_P3 ts) } where list_P3 (l : list (tree a)) : bool := { list_P3 l := existsb tree_P3 l }. Global Instance tree_C : C (tree a) := { P := tree_P3 }. Global Instance tree_list_C : C (list (tree a)) := { P := list_P3 }. End tree_list. Example check := (fun a (a_C : C a) => eq_refl : tree_list_C a_C = list_C (tree_C a_C)). End Ideal. Set Equations Transparent. Equations myexistsb {A} (P : A -> bool) (l : list A) : bool := myexistsb P nil := false; myexistsb P (cons x xl) := orb (P x) (myexistsb P xl). Equations myexistsb_prop {A} (P : A -> Prop) (l : list A) : Prop := myexistsb_prop P nil := False; myexistsb_prop P (cons x xl) := P x \/ (myexistsb_prop P xl). Inductive reflect (P : Prop) : bool -> Prop := | reflectT : P -> reflect P true | reflectnT : ~ P -> reflect P false. Inductive myexists_graph {A} (p : A -> bool) (P : A -> Prop) (R : forall x, reflect (P x) (p x)) : forall (l : list A), bool -> Prop := myexists_graph_1 : myexists_graph p P R [] false | myexists_graph_2 t l b : P t -> myexists_graph p P R l b -> myexists_graph p P R [] (p t || existsb p l). Lemma myexistsb_prop_exits {A} (P : A -> Prop) (l : list A) : myexistsb_prop P l <-> Exists P l. Proof. funelim (myexistsb_prop P l) eqm. - split; intuition auto. inversion H. - clear eqm. inversion H. cbn; rewrite H. split; intuition auto. inversion H2. intuition. subst. intuition auto. Qed. Section RoseTreeInd. Context {A : Type}. Context {P : tree A -> Type}. Context {P0 : list (tree A) -> Type}. Context (f : forall (a : A) (t : list (tree A)), P0 t -> P (Node _ a t)). Context (fnil : P0 nil). Context (fcons : forall a t, P a -> P0 t -> P0 (cons a t)). Equations tree_elim (t : tree A) : P t := { tree_elim (Node x ts) := f x ts (list_tree_elim ts) } where list_tree_elim (l : list (tree A)) : P0 l := { list_tree_elim nil := fnil; list_tree_elim (cons a t) := fcons a t (tree_elim a) (list_tree_elim t) }. End RoseTreeInd. Require Import Bool. Module IdealNoSec. Equations tree_P3 {a} {a_C : C a} (t : tree a) : bool := { tree_P3 (Node x ts) => orb (P x) (list_P3 ts) } where list_P3 {a} {a_C : C a} (l : list (tree a)) : bool := { list_P3 l := existsb tree_P3 l }. #[export] Instance tree_C {a} (a_C : C a) : C (tree a) := { P := tree_P3 }. #[export] Instance tree_list_C {a} (a_C : C a) : C (list (tree a)) := { P := list_P3 }. Example check0 := (fun a (a_C : C a) => eq_refl : tree_list_C a_C = list_C (tree_C a_C)). #[local] Set Firstorder Solver auto. (* It is impossible to derive the good nested elimination principle from the o ne generated automatically, one has to redo the nested fixpoint construction *) Definition my_P3_elim : forall (P0 : forall a : Type, C a -> tree a -> bool -> Prop) (P1 : forall a, C a -> list (tree a) -> bool -> Prop), (forall (a : Type) (a_C : C a) (a0 : a) (l : list (tree a)), P1 a a_C l (list_P3 l) -> P0 a a_C (Node a a0 l) (P a0 || list_P3 l)%bool) -> (forall (a : Type) (a_C : C a), P1 a a_C [] false) -> (forall (a : Type) (a_C : C a) t l, P0 a a_C t (tree_P3 t) -> P1 a a_C l (list_P3 l) -> P1 a a_C (t :: l) (existsb (tree_P3 ) (t :: l))) -> (forall (a : Type) (a_C : C a) (t : tree a), P0 a a_C t (tree_P3 t)) /\ (forall (a : Type) (a_C : C a) (l : list (tree a)), P1 a a_C l (list_P3 l)). Proof. intros. assert((forall (a : Type) (a_C : C a) (t : tree a), P0 a a_C t (tree_P3 t))). fix my_P3_elim 3. destruct t. simpl. apply H. revert l. fix my_P3_elim0 1. destruct l; simpl. apply H0. apply H1. apply my_P3_elim. apply my_P3_elim0. firstorder. revert l. fix my_P3_elim 1. destruct l; simpl. apply H0. apply H1. apply H2. apply my_P3_elim. Defined. Section P3_proof. Context {a} {a_C : C a}. #[local] Hint Rewrite in_app_iff : In. Lemma P3_test (t : tree a) : tree_P3 t = true -> exists x, In x (elements t) /\ P x = true. Proof. revert t. refine (tree_elim (P0:=fun t : list (tree a) => list_P3 t = true -> exists x : a, In x (list_elements t) /\ P x = true) _ _ _); clear; intros. simp tree_P3 in H0. rename H0 into Hal. - rewrite orb_true_iff in Hal. destruct Hal as [Hal|Hal]. + exists a0. simp elements. intuition auto with datatypes. + simp tree_P3 in H. specialize (H Hal). simp elements. destruct H as (ex & exl & Pexl). simp elements in *. exists ex. simpl. intuition auto. - simpl in H. discriminate. - simpl in H1. rewrite orb_true_iff in H1. destruct H1. specialize (H H1). firstorder. exists x. simpl. rewrite in_app_iff. firstorder. specialize (H0 H1). simpl. destruct H0. exists x. rewrite in_app_iff; firstorder. Qed. Lemma P3_test2 (t : tree a) : tree_P3 t = true -> exists x, In x (elements t) /\ P x = true. Proof. revert a a_C t. refine (proj1 (my_P3_elim (fun a a_C t b => b = true -> exists x : a, In x (elements t) /\ P x = true) (fun a aC (t : list (tree a)) b => b = true -> exists x : a, In x (list_elements t) /\ P x = true) _ _ _)); clear; intros. rename H0 into Hal. - rewrite orb_true_iff in Hal. destruct Hal as [Hal|Hal]. + exists a0. simp elements. intuition auto with datatypes. + specialize (H Hal). simp elements. destruct H as (ex & exl & Pexl). simp elements in *. exists ex. simpl. intuition auto. - discriminate. - simpl in H1. rewrite orb_true_iff in H1. destruct H1. specialize (H H1). firstorder. exists x. simpl. rewrite in_app_iff. firstorder. specialize (H0 H1). simpl. destruct H0. exists x. rewrite in_app_iff; firstorder. Qed. End P3_proof. End IdealNoSec. Coq-Equations-1.3.1-8.20/test-suite/noconf_hom.v000066400000000000000000000021231463127417400212220ustar00rootroot00000000000000 Require Import Program Bvector List Relations. From Equations Require Import Equations Signature DepElimDec. Require Import Utf8. Unset Equations WithK. Inductive Vec (A : Set) : nat -> Set := nil : Vec A O | cons : forall {n} (x : A) (xs : Vec A n), Vec A (S n). Derive NoConfusion for Vec. Derive NoConfusionHom for Vec. Transparent NoConfusionHom_Vec. Definition noConfVec_eq {A n} (v v' : Vec A n) : v = v' -> NoConfusionHom_Vec _ _ v v'. Proof. intros ->. destruct v'; constructor. Defined. Definition noConfVec_eq_inv {A n} (v v' : Vec A n) : NoConfusionHom_Vec _ _ v v' -> v = v'. Proof. funelim (NoConfusionHom_Vec _ _ v v'); simplify *; constructor. Defined. Lemma noConfVec_eq_eq_inv {A n} (v v' : Vec A n) (e : v = v') : noConfVec_eq_inv _ _ (noConfVec_eq _ _ e) = e. Proof. destruct e. destruct v; reflexivity. Defined. Lemma noConfVec_eq_inv_eq {A n} (v v' : Vec A n) (e : NoConfusionHom_Vec _ _ v v') : noConfVec_eq _ _ (noConfVec_eq_inv _ _ e) = e. Proof. destruct v; revert e. depelim v'. simplify *; reflexivity. depelim v'. simplify *. simpl. reflexivity. Defined. Coq-Equations-1.3.1-8.20/test-suite/noconf_noK.v000066400000000000000000000256601463127417400212010ustar00rootroot00000000000000From Equations Require Import Equations DepElimDec HSets. (* Set Universe Polymorphism. *) (** Can we define NoConfusion in SProp (squashing equalities of arguments)? Would not allow to show equivalence to (x = y) for non-strict sets. *) Unset Equations WithK. Inductive ℕ (E:Set) : Set := | O : ℕ E | S : ℕ E -> ℕ E | raise : E -> ℕ E. Arguments O {_}. Arguments S {_} _. Inductive Vec E (A : Set) : ℕ E -> Set := nil : Vec E A O | cons : forall {n} (x : A) (xs : Vec E A n), Vec E A (S n). (* | cons' : forall {n} (x : A), Vec E A (S n) *) (* | cons3 : forall n, Vec E A n. *) Derive Signature for Vec. Arguments nil {_ _}. Arguments cons {_ _ _} _ _. (* Arguments cons' {_ _ _} _ . *) Inductive vector_param E (A : Set) : forall (n : ℕ E), Vec E A n -> Set := vnil_param : vector_param E A O nil | vcons_param : forall (n : ℕ E) (a : A) (v : Vec E A n), vector_param E A n v -> vector_param E A (S n) (cons a v). Derive Signature for vector_param. Derive NoConfusion for ℕ. Derive NoConfusion for Vec. Derive NoConfusion for vector_param. Import Sigma_Notations. Open Scope equations_scope. Definition Sect {A B : Type} (s : A -> B) (r : B -> A) := forall x : A, r (s x) = x. Class IsEquiv {A B : Type} (f : A -> B) := BuildIsEquiv { equiv_inv : B -> A ; eisretr : Sect equiv_inv f; eissect : Sect f equiv_inv; eisadj : forall x : A, eisretr (f x) = f_equal f (eissect x) }. Arguments eisretr {A B}%type_scope {f%function_scope} {_} _. Arguments eissect {A B}%type_scope {f%function_scope} {_} _. Arguments eisadj {A B}%type_scope {f%function_scope} {_} _. Arguments IsEquiv {A B}%type_scope f%function_scope. Polymorphic Record Equiv (A B : Type) := { equiv :> A -> B ; is_equiv :> IsEquiv equiv }. Arguments equiv {A B} e. Polymorphic Instance Equiv_IsEquiv {A B} (e : Equiv A B) : IsEquiv (equiv e). Proof. apply is_equiv. Defined. Definition inv_equiv {A B} (E: Equiv A B) : B -> A := equiv_inv (IsEquiv:=is_equiv _ _ E). Polymorphic Definition equiv_inv_equiv {A B} {E: Equiv A B} (x : A) : inv_equiv _ (equiv E x) = x := eissect x. Definition inv_equiv_equiv {A B} {E: Equiv A B} (x : B) : equiv E (inv_equiv _ x) = x := eisretr x. Definition equiv_adj {A B} {E: Equiv A B} (x : A) : inv_equiv_equiv (equiv E x) = f_equal (equiv E) (equiv_inv_equiv x) := eisadj x. Notation " 'rew' H 'in' c " := (@eq_rect _ _ _ c _ H) (at level 20). Require Import Utf8. Notation " X <~> Y " := (Equiv X Y) (at level 90, no associativity, Y at next level). Lemma apply_equiv_dom {A B} (P : A -> Type) (e : Equiv A B) : (forall x : B, P (inv_equiv e x)) -> forall x : A, P x. Proof. intros. specialize (X (equiv e x)). rewrite equiv_inv_equiv in X. exact X. Defined. (* Equations noConfVec {E A n} (v v' : Vec E A n) : Prop := *) (* noConfVec nil nil := True; *) (* noConfVec (cons _ x xs) (cons _ x' xs') := *) (* {| pr1 := x; pr2 := xs |} = {| pr1 := x'; pr2 := xs' |}; *) (* noConfVec (cons' x) (cons' x') := x = x'; *) (* noConfVec cons3 cons3 := True; *) (* noConfVec _ _ := False. *) (* Transparent noConfVec. *) (* Print Assumptions noConfVec_elim. *) (* Next Obligation. *) (* Proof. *) (* depelim v. *) (* generalize_eqs_sig v'. destruct v'. *) (* simplify ?. *) (* refine (eq_simplification_sigma1_dep _ _ _ _ _). destruct v'; *) (* simplify *. constructor. *) (* generalize_eqs_sig v'. *) (* refine (eq_simplification_sigma1_dep _ _ _ _ _). destruct v'; *) (* simplify *; constructor. *) (* generalize_eqs_sig v'. *) (* refine (eq_simplification_sigma1_dep _ _ _ _ _). destruct v'; *) (* simplify *; constructor. *) (* Defined. *) (* Definition noConfVec_eq {E A n} (v v' : Vec E A n) : v = v' -> noConfVec v v'. *) (* Proof. *) (* intros ->. destruct v'; constructor. *) (* Defined. *) (* Definition noConfVec_eq_inv {E A n} (v v' : Vec E A n) : noConfVec v v' -> v = v'. *) (* Proof. *) (* funelim (noConfVec v v'); try simplify *; constructor. *) (* (* refine (@f_equal _ _ (fun x => cons x.1 x.2) _ _). *) *) (* (* simplify ?. *) *) (* (* simplify ?. *) *) (* (* refine (@f_equal _ _ (fun x => cons' x) _ _). *) *) (* Defined. *) (* Lemma noConfVec_eq_eq_inv {E A n} (v v' : Vec E A n) (e : v = v') : *) (* noConfVec_eq_inv _ _ (noConfVec_eq _ _ e) = e. *) (* Proof. *) (* destruct e. destruct v; reflexivity. *) (* Defined. *) (* Lemma noConfVec_refl {E A n} (v : Vec E A n) : noConfVec v v. *) (* Proof. destruct v; reflexivity. Defined. *) (* Lemma noConfVec_eq_inv_eq_refl {E A n} (v : Vec E A n) : *) (* noConfVec_eq _ _ (noConfVec_eq_inv v v (noConfVec_refl v)) = (noConfVec_refl v). *) (* Proof. *) (* destruct v; reflexivity. *) (* Defined. *) (* Lemma noConfVec_eq_inv_eq {E A n} (v v' : Vec E A n) (e : noConfVec v v') : *) (* noConfVec_eq _ _ (noConfVec_eq_inv _ _ e) = e. *) (* Proof. *) (* destruct v; revert e; depelim v'; simplify *; reflexivity. *) (* Defined. *) Definition NoConfVec {E A n} (v v' : Vec E A n) : Prop := match v in Vec _ _ n return Vec E A n -> Prop with | nil => fun v' => match v' in Vec _ _ O return Prop with | nil => True end | @cons _ _ n' x xs => fun v' => match v' in Vec _ _ (S n'') return Vec E A n'' -> Prop with | @cons _ _ n'' x' xs' => fun xs => {| pr1 := x; pr2 := xs |} = {| pr1 := x'; pr2 := xs' |} end xs end v'. (* Definition noConfVec_eq {E A n} (v v' : Vec E A n) : v = v' -> NoConfVec v v'. *) (* Proof. *) (* intros ->. destruct v'. constructor. simpl. constructor. simpl. constructor. *) (* Defined. *) (* Definition idx0_elim {E A} (P : Vec E A O -> Type) (H : P nil) : forall v, P v := *) (* fun v => match v in Vec _ _ O return P v with *) (* | nil => H *) (* end. *) (* Definition idxS_elim {E A} (P : forall n, Vec E A (S n) -> Type) *) (* (H : forall n a (v' : Vec E A n), P n (cons a v')) *) (* (H' : forall n a, P n (cons' a)) *) (* n v : P n v := *) (* match v in Vec _ _ (S n') with *) (* | cons a v' => H _ a v' *) (* | cons' a => H' _ a *) (* end. *) (* (* refine (match v as v' in Vec _ _ n' return *) *) (* (* match n' as n'' return Vec E A n'' -> Type with *) *) (* (* | O => fun _ => True *) *) (* (* | S n' => fun v => P _ v *) *) (* (* | raise _ _ => fun _ => True *) *) (* (* end v' *) *) (* (* with *) *) (* (* | nil => I *) *) (* (* | cons a v' => H _ a v' *) *) (* (* | cons' a => H' _ a *) *) (* (* end). *) *) (* Definition isNil {E A n} (v : Vec E A n) := *) (* match v with *) (* | nil => True *) (* | _ => False *) (* end. *) (* Lemma eq_simplification_sigma1_dep@{i j} {A : Type@{i}} {P : A -> Type@{i}} {B : Type@{j}} *) (* (p q : A) (x : P p) (y : P q) : *) (* (forall e : p = q, (@eq_rect A p P x q e) = y -> B) -> *) (* (sigmaI P p x = sigmaI P q y -> B). *) (* Proof. *) (* intros. revert X. *) (* change p with (pr1 &(p& x)). *) (* change q with (pr1 &(q & y)). *) (* change x with (pr2 &(p& x)) at 3. *) (* change y with (pr2 &(q & y)) at 4. *) (* destruct H. *) (* intros X. eapply (X eq_refl). apply eq_refl. *) (* Defined. *) (* (* Definition isNil_eq {E A } (v : Vec E A O) : isNil v -> v = nil. *) *) (* (* Proof. destruct v. *) *) (* (* match v with *) *) (* (* | nil => True *) *) (* (* | _ => False *) *) (* (* end. *) *) (* Set Printing Universes. *) (* Definition noConfVec_eq_inv {E A n} (v v' : Vec E A n) : NoConfVec v v' -> v = v'. *) (* Proof. *) (* destruct v. *) (* generalize_eqs_sig v'. *) (* refine (eq_simplification_sigma1_dep _ _ _ _ _). destruct v'. *) (* simplify ?. simplify ?. simplify ?. simpl. reflexivity. *) (* simplify ?. *) (* simplify ?. *) (* simpl. simplify ?. simplify ?. *) (* intros. *) (* generalize_eqs_sig v'. destruct v'. *) (* simplify ?. *) (* simplify ?. *) (* simpl in H. apply (f_equal (fun x => cons x.1 x.2)) in H. apply H. *) (* simplify ?. simpl in H. *) (* elim H. *) (* generalize_eqs_sig v'. destruct v'. *) (* simplify *. *) (* simplify *. *) (* simplify *. reflexivity. *) (* (* revert v'. refine (idx0_elim _ _). intros. constructor. *) *) (* (* revert n v' v. refine (idxS_elim _ _ _). *) *) (* (* simpl. intros. change a with (&(a & v').1). change v' with (&(a & v').2) at 2. *) *) (* (* destruct H. reflexivity. simpl. intros. elim H. *) *) (* (* revert n v'. refine (idxS_elim _ _ _). simpl. intros. elim H. *) *) (* (* simpl. intros n a H. destruct H; reflexivity. *) *) (* Defined. *) (* Lemma noConfVec_eq_eq_inv {E A n} (v v' : Vec E A n) (e : v = v') : *) (* noConfVec_eq_inv _ _ (noConfVec_eq _ _ e) = e. *) (* Proof. *) (* destruct e. destruct v; reflexivity. *) (* Defined. *) (* Lemma noConfVec_refl {E A n} (v : Vec E A n) : NoConfVec v v. *) (* Proof. destruct v; reflexivity. Defined. *) (* Lemma noConfVec_eq_inv_eq_refl {E A n} (v : Vec E A n) : *) (* noConfVec_eq _ _ (noConfVec_eq_inv v v (noConfVec_refl v)) = (noConfVec_refl v). *) (* Proof. *) (* destruct v; reflexivity. *) (* Defined. *) (* Lemma noConfVec_eq_inv_eq {E A n} (v v' : Vec E A n) (e : NoConfVec v v') : *) (* noConfVec_eq _ _ (noConfVec_eq_inv _ _ e) = e. *) (* Proof. *) (* destruct v. revert e. *) (* generalize_eqs_vars_sig v'. destruct v'. intros v'. *) (* refine (eq_simplification_sigma1_dep _ _ _ _ _). simplify ?. *) (* simplify ?. simplify ?. simplify ?. reflexivity. *) (* intro. simplify *. *) (* intro. simplify *. *) (* depelim v'. simpl in e. *) (* revert e. simplify *. simpl. reflexivity. *) (* simpl. intros. elim e. *) (* depelim v'. simpl in e. *) (* revert e. simplify *. *) (* revert e. simplify *. reflexivity. *) (* Defined. *) (* (* destruct v. revert v' e. refine (idx0_elim _ _). simpl. destruct e. reflexivity. *) *) (* (* revert n v' v e. refine (idxS_elim _ _ _). intros. *) *) (* (* simpl in e. *) *) (* (* revert e. simplify *. simpl. reflexivity. *) *) (* (* simpl. intros. elim e. *) *) (* (* revert n v' e. refine (idxS_elim _ _ _). intros. elim e. *) *) (* (* intros ??. simplify *. simpl. reflexivity. *) *) (* (* Defined. *) *) Definition noConf_vec_equiv {E A n} (v v' : Vec E A n) : Equiv (v = v') (noConfVec v v'). Proof. refine {| equiv := noConfVec_eq v v' |}. unshelve refine {| equiv_inv := noConfVec_eq_inv v v' |}. red. intros. apply noConfVec_eq_inv_eq. red; intros. apply noConfVec_eq_eq_inv. simplify *. destruct v'; reflexivity. Defined. Lemma noConfVec_hom_equiv : forall {E A : Set} n, NoConfusionPackage (Vec E A n). Proof. unshelve econstructor. refine noConfVec. apply noConfVec_eq. apply noConfVec_eq_inv. apply noConfVec_eq_eq_inv. Defined. Existing Instances noConfVec_hom_equiv. Equations param_vector_vcons E (A : Set) (a : A) (n : ℕ E) (v : Vec E A n) (X : vector_param E A (S n) (cons a v)) : vector_param E A n v := param_vector_vcons E A _ _ _ (vcons_param _ _ _ X) := X. Transparent param_vector_vcons. Coq-Equations-1.3.1-8.20/test-suite/noconf_noK_fin.v000066400000000000000000000247261463127417400220370ustar00rootroot00000000000000From Equations Require Import Equations DepElimDec HSets. (* Set Universe Polymorphism. *) (** Can we define NoConfusion in SProp (squashing equalities of arguments)? Would not allow to show equivalence to (x = y) for non-strict sets. *) Unset Equations WithK. Inductive fin : nat -> Set := | fin0 n : fin (S n) | finS n : fin n -> fin (S n). Derive Signature for fin. Arguments fin0 {_}. Arguments finS {_} _. Inductive fin_param : forall n, fin n -> Set := | finS_param : forall n (f : fin n), fin_param n f -> fin_param (S n) (finS f). Derive Signature for fin_param. (* Derive NoConfusion for ℕ. *) Derive NoConfusion for fin. Derive NoConfusion for fin_param. Import Sigma_Notations. Open Scope equations_scope. Definition Sect {A B : Type} (s : A -> B) (r : B -> A) := forall x : A, r (s x) = x. Class IsEquiv {A B : Type} (f : A -> B) := BuildIsEquiv { equiv_inv : B -> A ; eisretr : Sect equiv_inv f; eissect : Sect f equiv_inv; eisadj : forall x : A, eisretr (f x) = f_equal f (eissect x) }. Arguments eisretr {A B}%type_scope {f%function_scope} {_} _. Arguments eissect {A B}%type_scope {f%function_scope} {_} _. Arguments eisadj {A B}%type_scope {f%function_scope} {_} _. Arguments IsEquiv {A B}%type_scope f%function_scope. Polymorphic Record Equiv (A B : Type) := { equiv :> A -> B ; is_equiv :> IsEquiv equiv }. Arguments equiv {A B} e. Polymorphic Instance Equiv_IsEquiv {A B} (e : Equiv A B) : IsEquiv (equiv e). Proof. apply is_equiv. Defined. Definition inv_equiv {A B} (E: Equiv A B) : B -> A := equiv_inv (IsEquiv:=is_equiv _ _ E). Polymorphic Definition equiv_inv_equiv {A B} {E: Equiv A B} (x : A) : inv_equiv _ (equiv E x) = x := eissect x. Definition inv_equiv_equiv {A B} {E: Equiv A B} (x : B) : equiv E (inv_equiv _ x) = x := eisretr x. Definition equiv_adj {A B} {E: Equiv A B} (x : A) : inv_equiv_equiv (equiv E x) = f_equal (equiv E) (equiv_inv_equiv x) := eisadj x. Notation " 'rew' H 'in' c " := (@eq_rect _ _ _ c _ H) (at level 20). Require Import Utf8. Notation " X <~> Y " := (Equiv X Y) (at level 90, no associativity, Y at next level). Lemma apply_equiv_dom {A B} (P : A -> Type) (e : Equiv A B) : (forall x : B, P (inv_equiv e x)) -> forall x : A, P x. Proof. intros. specialize (X (equiv e x)). rewrite equiv_inv_equiv in X. exact X. Defined. Equations noConf_fin {n} (v v' : fin n) : Prop := noConf_fin fin0 fin0 := True; noConf_fin (finS f) (finS f') := f = f'; noConf_fin _ _ := False. Transparent noConf_fin. Print Assumptions noConf_fin_elim. (* Next Obligation. *) (* Proof. *) (* depelim v. *) (* generalize_eqs_sig v'. destruct v'. *) (* simplify ?. *) (* refine (eq_simplification_sigma1_dep _ _ _ _ _). destruct v'; *) (* simplify *. constructor. *) (* generalize_eqs_sig v'. *) (* refine (eq_simplification_sigma1_dep _ _ _ _ _). destruct v'; *) (* simplify *; constructor. *) (* generalize_eqs_sig v'. *) (* refine (eq_simplification_sigma1_dep _ _ _ _ _). destruct v'; *) (* simplify *; constructor. *) (* Defined. *) Definition noConf_fin_eq {n} (v v' : fin n) : v = v' -> noConf_fin v v'. Proof. intros ->. destruct v'; constructor. Defined. Definition noConf_fin_eq_inv {n} (v v' : fin n) : noConf_fin v v' -> v = v'. Proof. funelim (noConf_fin v v'); try simplify *; constructor. (* refine (@f_equal _ _ (fun x => cons x.1 x.2) _ _). *) (* simplify ?. *) (* simplify ?. *) (* refine (@f_equal _ _ (fun x => cons' x) _ _). *) Defined. Lemma noConf_fin_eq_eq_inv {n} (v v' : fin n) (e : v = v') : noConf_fin_eq_inv _ _ (noConf_fin_eq _ _ e) = e. Proof. destruct e. destruct v; reflexivity. Defined. Lemma noConf_fin_refl {n} (v : fin n) : noConf_fin v v. Proof. destruct v; reflexivity. Defined. Lemma noConf_fin_eq_inv_eq_refl {n} (v : fin n) : noConf_fin_eq _ _ (noConf_fin_eq_inv v v (noConf_fin_refl v)) = (noConf_fin_refl v). Proof. destruct v; reflexivity. Defined. Lemma noConf_fin_eq_inv_eq {n} (v v' : fin n) (e : noConf_fin v v') : noConf_fin_eq _ _ (noConf_fin_eq_inv _ _ e) = e. Proof. destruct v; revert e; depelim v'; simplify *; reflexivity. Defined. Lemma noConf_fin_hom_equiv : forall n, NoConfusionPackage (fin n). Proof. unshelve econstructor. refine noConf_fin. apply noConf_fin_eq. apply noConf_fin_eq_inv. apply noConf_fin_eq_eq_inv. Defined. Existing Instances noConf_fin_hom_equiv. Definition noConf_fin_equiv {n} (v v' : fin n) : Equiv (v = v') (noConf_fin v v'). Proof. refine {| equiv := noConf_fin_eq v v' |}. unshelve refine {| equiv_inv := noConf_fin_eq_inv v v' |}. red. intros. apply noConf_fin_eq_inv_eq. red; intros. apply noConf_fin_eq_eq_inv. simplify *. destruct v'; reflexivity. Defined. (* Definition NoConfVec {E A n} (v v' : Vec E A n) : Prop := *) (* match v in Vec _ _ n return Vec E A n -> Prop with *) (* | nil => fun v' => *) (* match v' in Vec _ _ O return Prop with *) (* | nil => True *) (* | _ => False *) (* end *) (* | @cons _ _ n' x xs => *) (* fun v' => *) (* match v' in Vec _ _ (S n'') return Vec E A n'' -> Prop with *) (* | @cons _ _ n'' x' xs' => fun xs => {| pr1 := x; pr2 := xs |} = {| pr1 := x'; pr2 := xs' |} *) (* | cons' _ => fun _ => False *) (* end xs *) (* | @cons' _ _ n' x => *) (* fun v' => *) (* match v' in Vec _ _ (S n'') return Prop with *) (* | nil => False *) (* | @cons' _ _ n'' x' => x = x' *) (* | cons _ _ => False *) (* end *) (* end v'. *) (* Definition noConfVec_eq {E A n} (v v' : Vec E A n) : v = v' -> NoConfVec v v'. *) (* Proof. *) (* intros ->. destruct v'. constructor. simpl. constructor. simpl. constructor. *) (* Defined. *) (* Definition idx0_elim {E A} (P : Vec E A O -> Type) (H : P nil) : forall v, P v := *) (* fun v => match v in Vec _ _ O return P v with *) (* | nil => H *) (* end. *) (* Definition idxS_elim {E A} (P : forall n, Vec E A (S n) -> Type) *) (* (H : forall n a (v' : Vec E A n), P n (cons a v')) *) (* (H' : forall n a, P n (cons' a)) *) (* n v : P n v := *) (* match v in Vec _ _ (S n') with *) (* | cons a v' => H _ a v' *) (* | cons' a => H' _ a *) (* end. *) (* (* refine (match v as v' in Vec _ _ n' return *) *) (* (* match n' as n'' return Vec E A n'' -> Type with *) *) (* (* | O => fun _ => True *) *) (* (* | S n' => fun v => P _ v *) *) (* (* | raise _ _ => fun _ => True *) *) (* (* end v' *) *) (* (* with *) *) (* (* | nil => I *) *) (* (* | cons a v' => H _ a v' *) *) (* (* | cons' a => H' _ a *) *) (* (* end). *) *) (* Definition isNil {E A n} (v : Vec E A n) := *) (* match v with *) (* | nil => True *) (* | _ => False *) (* end. *) (* Lemma eq_simplification_sigma1_dep@{i j} {A : Type@{i}} {P : A -> Type@{i}} {B : Type@{j}} *) (* (p q : A) (x : P p) (y : P q) : *) (* (forall e : p = q, (@eq_rect A p P x q e) = y -> B) -> *) (* (sigmaI P p x = sigmaI P q y -> B). *) (* Proof. *) (* intros. revert X. *) (* change p with (pr1 &(p& x)). *) (* change q with (pr1 &(q & y)). *) (* change x with (pr2 &(p& x)) at 3. *) (* change y with (pr2 &(q & y)) at 4. *) (* destruct H. *) (* intros X. eapply (X eq_refl). apply eq_refl. *) (* Defined. *) (* (* Definition isNil_eq {E A } (v : Vec E A O) : isNil v -> v = nil. *) *) (* (* Proof. destruct v. *) *) (* (* match v with *) *) (* (* | nil => True *) *) (* (* | _ => False *) *) (* (* end. *) *) (* Set Printing Universes. *) (* Definition noConfVec_eq_inv {E A n} (v v' : Vec E A n) : NoConfVec v v' -> v = v'. *) (* Proof. *) (* destruct v. *) (* generalize_eqs_sig v'. *) (* refine (eq_simplification_sigma1_dep _ _ _ _ _). destruct v'. *) (* simplify ?. simplify ?. simplify ?. simpl. reflexivity. *) (* simplify ?. *) (* simplify ?. *) (* simpl. simplify ?. simplify ?. *) (* intros. *) (* generalize_eqs_sig v'. destruct v'. *) (* simplify ?. *) (* simplify ?. *) (* simpl in H. apply (f_equal (fun x => cons x.1 x.2)) in H. apply H. *) (* simplify ?. simpl in H. *) (* elim H. *) (* generalize_eqs_sig v'. destruct v'. *) (* simplify *. *) (* simplify *. *) (* simplify *. reflexivity. *) (* (* revert v'. refine (idx0_elim _ _). intros. constructor. *) *) (* (* revert n v' v. refine (idxS_elim _ _ _). *) *) (* (* simpl. intros. change a with (&(a & v').1). change v' with (&(a & v').2) at 2. *) *) (* (* destruct H. reflexivity. simpl. intros. elim H. *) *) (* (* revert n v'. refine (idxS_elim _ _ _). simpl. intros. elim H. *) *) (* (* simpl. intros n a H. destruct H; reflexivity. *) *) (* Defined. *) (* Lemma noConfVec_eq_eq_inv {E A n} (v v' : Vec E A n) (e : v = v') : *) (* noConfVec_eq_inv _ _ (noConfVec_eq _ _ e) = e. *) (* Proof. *) (* destruct e. destruct v; reflexivity. *) (* Defined. *) (* Lemma noConfVec_refl {E A n} (v : Vec E A n) : NoConfVec v v. *) (* Proof. destruct v; reflexivity. Defined. *) (* Lemma noConfVec_eq_inv_eq_refl {E A n} (v : Vec E A n) : *) (* noConfVec_eq _ _ (noConfVec_eq_inv v v (noConfVec_refl v)) = (noConfVec_refl v). *) (* Proof. *) (* destruct v; reflexivity. *) (* Defined. *) (* Lemma noConfVec_eq_inv_eq {E A n} (v v' : Vec E A n) (e : NoConfVec v v') : *) (* noConfVec_eq _ _ (noConfVec_eq_inv _ _ e) = e. *) (* Proof. *) (* destruct v. revert e. *) (* generalize_eqs_vars_sig v'. destruct v'. intros v'. *) (* refine (eq_simplification_sigma1_dep _ _ _ _ _). simplify ?. *) (* simplify ?. simplify ?. simplify ?. reflexivity. *) (* intro. simplify *. *) (* intro. simplify *. *) (* depelim v'. simpl in e. *) (* revert e. simplify *. simpl. reflexivity. *) (* simpl. intros. elim e. *) (* depelim v'. simpl in e. *) (* revert e. simplify *. *) (* revert e. simplify *. reflexivity. *) (* Defined. *) (* (* destruct v. revert v' e. refine (idx0_elim _ _). simpl. destruct e. reflexivity. *) *) (* (* revert n v' v e. refine (idxS_elim _ _ _). intros. *) *) (* (* simpl in e. *) *) (* (* revert e. simplify *. simpl. reflexivity. *) *) (* (* simpl. intros. elim e. *) *) (* (* revert n v' e. refine (idxS_elim _ _ _). intros. elim e. *) *) (* (* intros ??. simplify *. simpl. reflexivity. *) *) (* (* Defined. *) *) Equations param_fin_finS (n : nat) (f : fin n) (X : fin_param (S n) (finS f)) : fin_param n f := param_fin_finS _ _ (finS_param f) := f. Transparent param_fin_finS. Print Assumptions param_fin_finS. Coq-Equations-1.3.1-8.20/test-suite/noconf_simplify.v000066400000000000000000000011051463127417400222720ustar00rootroot00000000000000From Equations Require Import Equations. Require Import List. Import ListNotations. Record decl := { na : nat; body : option nat }. Derive NoConfusion for decl. Definition ctx := list decl. Check (_ : NoConfusionPackage ctx). Inductive P : ctx -> Type := | P_nil : P nil | P_bod n k l : P l -> P ({| na := n; body := Some k |} :: l) | P_nobod n l : P l -> P ({| na := n; body := None |} :: l). Derive Signature for P. Definition vass na := {| na := na; body := None |}. Goal forall n l, P (vass n :: l) -> P l. Proof. intros n l. intros p. depelim p. exact p. Defined.Coq-Equations-1.3.1-8.20/test-suite/nocycle.v000066400000000000000000000233551463127417400205430ustar00rootroot00000000000000From Equations Require Import Equations. Require Import Below. Module NoCycle_nat. Definition noSubterm x y := Below_nat (fun y => x <> y) y. Definition noLargeSubterm x y := ((x <> y) * noSubterm x y)%type. Lemma step_S x b : noSubterm x b -> noLargeSubterm (S x) b. Proof. induction b; intros H. (* Non-recursive case. *) - split. (* Disjointness of constructors. *) + intros H'; noconf H'. (* No subterm in a non-recursive constructor. *) + simpl. exact tt. (* Recursive case. *) - split. (* Injectivity of constructors. *) + intros H'; noconf H'. apply (fst H); reflexivity. + apply (IHb (snd H)). Qed. Definition no_cycle x y : x = y -> noSubterm x y. Proof. intros ->. induction y. (* Non-recursive case. *) - exact tt. (* Recursive case. *) - apply step_S; apply IHy. Qed. End NoCycle_nat. Module NoCycle_ord. Inductive O : Set := | zero | succ : O -> O | lim : (nat -> O) -> O. Derive Below for O. Derive NoConfusion for O. Definition noSubterm x y := Below_O (fun y => x <> y) y. Definition noLargeSubterm x y := ((x <> y) * noSubterm x y)%type. Lemma step_succ x b : noSubterm x b -> noLargeSubterm (succ x) b. Proof. induction b; intros H. (* Non-recursive case. *) - split. (* Disjointness of constructors. *) + intros H'; noconf H'. (* No subterm in a non-recursive constructor. *) + exact tt. (* Recursive and interesting case. *) - split. (* Injectivity of constructors. *) + intros H'; noconf H'. apply (fst H); reflexivity. + apply (IHb (snd H)). (* Recursive and non-interesting case. *) - split. (* Disjointness of constructors. *) + intros H'; noconf H'. (* No subterm in a non-recursive constructor. *) + intros x'. apply (X _ (snd (H _))). Qed. Lemma step_lim f b : forall x, noSubterm (f x) b -> noLargeSubterm (lim f) b. Proof. induction b; intros x H. (* Non-recursive case. *) - split. (* Disjointness of constructors. *) + intros H'; noconf H'. (* No subterm in a non-recursive constructor. *) + exact tt. (* Recursive and non-interesting case. *) - split. (* Disjointness of constructors. *) + intros H'; noconf H'. + apply (IHb _ (snd H)). (* Recursive and interesting case. *) - split. (* Injectivity of constructors. *) + intros H'; noconf H'. apply (fst (H x)); reflexivity. (* No subterm in a non-recursive constructor. *) + intros x'. apply (X _ _ (snd (H _))). Qed. Definition no_cycle x y : x = y -> noSubterm x y. Proof. intros ->. induction y. (* Non-recursive case. *) - exact tt. (* Recursive case. *) - apply step_succ; apply IHy. (* Recursive case. *) - intros x. apply step_lim with (x := x). apply X. Qed. End NoCycle_ord. Module NoCycle_tree. Inductive t := | L | N (x y : t). Derive Below for t. Derive NoConfusion for t. Definition noSubterm x y := Below_t (fun y => x <> y) y. Definition noLargeSubterm x y := ((x <> y) * noSubterm x y)%type. Lemma step_N1 x y b : noSubterm x b -> noLargeSubterm (N x y) b. Proof. induction b; intros H. - split. + intros H'; noconf H'. + constructor. - split. + intros H'; noconf H'. apply (fst (snd H)); reflexivity. + split. * apply (IHb2 (snd (fst H))). * apply (IHb1 (snd (snd H))). Qed. Lemma step_N2 x y b : noSubterm y b -> noLargeSubterm (N x y) b. Proof. induction b; intros H. - split. + intros H'; noconf H'. + constructor. - split. + intros H'; noconf H'. apply (fst (fst H)); reflexivity. + split. * apply (IHb2 (snd (fst H))). * apply (IHb1 (snd (snd H))). Qed. Definition no_cycle x y : x = y -> noSubterm x y. Proof. intros ->. induction y. - constructor. - split. + apply step_N2. apply IHy2. + apply step_N1. apply IHy1. Qed. End NoCycle_tree. Module NoCycle_mut. Inductive T := | L | N (x : R) with R := | rnil | rcons : T -> R -> R. Section below. Variables (P : T -> Type) (Q : R -> Type). Fixpoint Below_T (t : T) : Type := match t with | L => True | N x => Q x * Below_R x end with Below_R (r : R) : Type := match r with | rnil => True | rcons t r => (P t * Below_T t) * (Q r * Below_R r) end. Variables (Ht : forall t', Below_T t' -> P t') (Hr : forall r', Below_R r' -> Q r'). Lemma below_t : forall t, Below_T t with below_r : forall r, Below_R r. Proof. intros [|x]. exact I. exact (Hr x (below_r x), below_r x). intros [|t rs]. exact I. exact ((Ht t (below_t t), below_t t), (Hr rs (below_r rs), below_r rs)). Defined. End below. Derive NoConfusion for T R. Definition noSubterm_T x y := Below_T (fun y => x <> y) (fun _ => True) y. Definition noSubterm_RT x y := Below_T (fun _ => True) (fun y => x <> y) y. Definition noLargeSubterm_T x y := ((x <> y) * noSubterm_T x y)%type. Definition noSubterm_R x y := Below_R (fun _ => True) (fun y => x <> y) y. Definition noSubterm_TR x y := Below_R (fun y => x <> y) (fun _ => True) y. Definition noLargeSubterm_R x y := ((x <> y) * noSubterm_R x y)%type. Lemma step_N b : forall r, noSubterm_R r b -> noSubterm_TR (N r) b with step_rcons1 b : forall t r, noSubterm_T t b -> noSubterm_RT (rcons t r) b with step_rcons2 b : forall t r, noSubterm_R r b -> noLargeSubterm_R (rcons t r) b with step_aux1 b : forall r, noSubterm_RT r b -> noLargeSubterm_T (N r) b with step_aux2 b : forall t r, noSubterm_TR t b -> noLargeSubterm_R (rcons t r) b with step_aux3 b : forall t r, noSubterm_RT r b -> noSubterm_RT (rcons t r) b. Proof. * destruct b; intros r H. - exact I. - split. + apply step_aux1. apply H. + refine (I, _). change (noSubterm_TR (N r) b). apply step_N. apply H. * destruct b; intros t r H. - exact I. - apply step_aux2. apply H. * destruct b; intros ? r H. - split. + intros H'; noconf H'. + exact I. - split. + intros H'; noconf H'. apply H; reflexivity. + split. -- refine (I, _). change (noSubterm_RT (rcons t0 r) t). apply step_aux3. apply H. -- apply step_rcons2. apply H. * destruct b; intros r H. - split. + intros H'; noconf H'. + exact I. - split. + intros H'; noconf H'. apply H; reflexivity. + refine (I, _). change (noSubterm_TR (N r) x). apply step_N. apply H. * destruct b; intros ? r H. - split. + intros H'; noconf H'. + exact I. - split. + intros H'; noconf H'. apply H; reflexivity. + split. -- refine (I, _). change (noSubterm_RT (rcons t0 r) t). apply step_rcons1. apply H. -- apply step_aux2. apply H. * destruct b; intros t r H. - exact I. - apply step_rcons2. apply H. Qed. Lemma noCycle_T x y : x = y -> noSubterm_T x y with noCycle_R x y : x = y -> noSubterm_R x y. Proof. * intros ->. destruct y. - exact I. - refine (I, _). change (noSubterm_TR (N x) x). apply step_N. apply noCycle_R. reflexivity. * intros ->. destruct y. - exact I. - split. + refine (I, _). change (noSubterm_RT (rcons t y) t). apply step_rcons1. apply noCycle_T. reflexivity. + apply step_rcons2. apply noCycle_R. reflexivity. Qed. End NoCycle_mut. Require Import Eqdep_dec. Theorem nat_dec (n m : nat) : {n = m} + {n <> m}. Proof. decide equality. Defined. Module DecidableType_nat <: DecidableType. Definition U := nat. Definition eq_dec := nat_dec. End DecidableType_nat. Module DecidableEqDep_nat := DecidableEqDep DecidableType_nat. Module NoCycle_dep. Inductive vect (A : Type) : nat -> Type := | nil : vect A O | cons : forall n, A -> vect A n -> vect A (S n). Arguments nil {A}. Arguments cons {A n} _ _. Derive Below for vect. Derive NoConfusion for vect. Section noCycle_vect. Variable (A : Type). Definition cond_neq (n : nat) (x : vect A n) (m : nat) (y : vect A m) := match nat_dec n m with | left e => eq_rect _ _ x _ e <> y | right _ => True end. Definition noSubterm n x m y := Below_vect A (cond_neq n x) m y. Definition noLargeSubterm n x m y := (cond_neq n x m y * noSubterm n x m y)%type. Lemma step_cons n x m b a : noSubterm n x m b -> noLargeSubterm (S n) (cons a x) m b. Proof. induction b; intros H. - split. + exact I. + constructor. - split. + pose proof (fst H). unfold cond_neq in H0. destruct (nat_dec n n0). * subst n. unfold cond_neq. destruct (nat_dec (S n0) (S n0)). -- rewrite <- DecidableEqDep_nat.eq_rect_eq in H0 |- *. intros H'; inversion H'. apply DecidableEqDep_nat.inj_pairT2 in H3. apply H0. apply H3. -- exact I. * unfold cond_neq. destruct (nat_dec (S n) (S n0)). -- inversion e. elim n1. apply H2. -- exact I. + apply (IHb (snd H)). Qed. Definition no_cycle n x y : x = y -> noSubterm n x n y. Proof. intros ->. induction y. - constructor. - apply step_cons; apply IHy. Qed. Definition no_cycle_dep n x m y : forall (e : n = m), eq_rect _ _ x _ e = y -> noSubterm n x m y. Proof. intros ->. rewrite <- DecidableEqDep_nat.eq_rect_eq. apply no_cycle. Qed. End noCycle_vect. End NoCycle_dep.Coq-Equations-1.3.1-8.20/test-suite/nocycle_def.v000066400000000000000000000003371463127417400213540ustar00rootroot00000000000000From Equations Require Import Equations. Require Import Equations.Prop.Subterm. Derive Subterm for nat. Equations noCycle (n : nat) (H : n = S n) : False := { }. Equations noCycle' (n : nat) (H : S n = n) : False := { }. Coq-Equations-1.3.1-8.20/test-suite/nolam.v000066400000000000000000000013401463127417400202030ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2018 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Require Import Bvector List Relations. From Equations Require Import Equations Signature. Require Import Utf8. Import ListNotations. Equations f : forall {A : Type}, list A -> nat := f [] := 0; f (_ :: tl) := S (f tl). Coq-Equations-1.3.1-8.20/test-suite/notations.v000066400000000000000000000021161463127417400211150ustar00rootroot00000000000000From Equations Require Import Equations. Require Import List. Import ListNotations. Reserved Notation "x +++ y" (at level 50). Equations app {A} (l : list A) : list A -> list A := { [] +++ y := y; (x :: xs) +++ y := x :: (xs +++ y) } where foo : nat -> nat := {foo 0 := 0; foo (S n) := foo n} where "x +++ y" := (app x y). Definition foo' : list nat := nil +++ nil. Equations plus : nat -> nat -> nat := { 0 + n := n; S m + n := S (m + n) } where "x + y" := (plus x y). (* Local notation in where *) Reserved Notation "x '++++' y" (at level 0). Equations rev {A} : list A -> list A := rev l := [] ++++ l where "x ++++ y" := (rev_aux x y) where rev_aux : list A -> list A -> list A := { acc ++++ [] := acc; acc ++++ (x :: l') := (x :: acc) ++++ l' }. Require Import Arith NArith. Local Open Scope N_scope. (** Parsing works with scopes as well *) Equations scope_match (n : nat) : nat := scope_match 0 := 0; scope_match (S n) := S (scope_match (n - 0)). Equations scope_match_N (n : N) : N := scope_match_N 0 := 0; scope_match_N (N.pos n) := N.pos n. Coq-Equations-1.3.1-8.20/test-suite/pattern_lambdas.v000066400000000000000000000020571463127417400222430ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) From Equations Require Import Equations. Variable test : forall n : nat, (nat -> bool) -> nat. Equations f : nat -> nat := f 0 := 0; f (S n) := f n. Equations f' (n : nat) : nat := f' n := test n (λ{ | 0 => true ; | S n => false }). Definition foo (x : nat) := f' match x with 0 => 0 | S x => 0 end. Equations decideeq (b b' : bool) : (b = b') + (~ b = b') := decideeq true true => inl eq_refl; decideeq false false => inl eq_refl; decideeq false true => inr (λ{ | ! }); decideeq true false => inr (λ{ | ! }). Coq-Equations-1.3.1-8.20/test-suite/rec.v000066400000000000000000000206051463127417400176530ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Require Import Program Utf8. Require Import Equations.Prop.Equations. Require Import Bvector List Relations. Require Import Arith Wf_nat. Require Import Lia. Module RecRel. Unset Equations With Funext. Equations id (n m : nat) : nat by wf n lt := id O m := m ; id (S n) m := S (id n m). End RecRel. (* Extraction RecRel.id. *) Section Nested. Ltac destruct_proj1_sig := match goal with | [ |- context [ ` (?x) ] ] => destruct x as [?x' ?H]; simpl end. (* with destruct_proj1_sigs x := *) (* match x with *) (* (* | context [ ` (?x') ] => destruct_proj1_sigs x' *) *) (* | _ => *) (* let x' := fresh in *) (* let H' := fresh in *) (* destruct x as [x' H'] ; simpl proj1_sig; destruct_proj1_sig *) (* end. *) #[local] Hint Extern 3 => progress destruct_proj1_sig : rec_decision. #[local] Hint Extern 3 => progress auto with arith : rec_decision. Equations? f (n : nat) : { x : nat | x <= n } by wf n lt := f 0 := exist _ 0 _ ; f (S n) := exist _ (proj1_sig (f (proj1_sig (f n)))) _. Proof. simpl. destruct f. simpl. destruct f. simpl. lia. Defined. Lemma exist_eq {A} (P : A -> Prop) (x y : A) (p : P x) (q : P y) : { e : x = y & eq_rect _ P p _ e = q } -> exist _ x p = exist _ y q. Proof. intros [He Hp]. destruct He. simpl in Hp. destruct Hp. reflexivity. Defined. End Nested. Require Import Coq.Lists.SetoidList. Require Import Coq.Sorting.Sorting. Require Import Coq.Sorting.Permutation. Require Import Coq.Sorting.PermutSetoid. #[export] Instance filter_ext {A} : Morphisms.Proper (pointwise_relation A eq ==> eq ==> eq) (@filter A). Proof. reduce. subst. induction y0; simpl; auto. generalize (H a). case_eq (x a). intros _ <-; congruence. intros _ <-; congruence. Qed. Lemma Permutation_filter {A} (l : list A) (p : A -> bool) : Permutation l (filter p l ++ filter (compose negb p) l). Proof with simpl; intros. induction l; unfold compose; simpl; auto. case_eq (p a); simpl; intros; auto using Permutation_cons_app. Qed. Inductive xor (A B : Prop) : Prop := | xor_left : A -> not B -> xor A B | xor_right : B -> not A -> xor A B. Require Import Bool. Coercion Is_true : bool >-> Sortclass. Lemma xor_reflect (x y : bool) : reflect (xor x y) (xorb x y). Proof. destruct x; destruct y; simpl; auto; constructor; try (intro H; elim H); firstorder. Qed. Lemma Permutation_filter2 {A} (l : list A) (p q : A -> bool) : (forall x, xorb (p x) (q x)) -> Permutation l (filter p l ++ filter q l). Proof with simpl; intros. intros. induction l; unfold compose; simpl; auto. generalize (H a). case_eq (p a); simpl; intros. apply Permutation_cons; auto. revert H1. case_eq (q a); intros qa; intros; try discriminate. elim H1. apply IHl. revert H1. case_eq (q a); intros qa; intros; try discriminate. auto using Permutation_cons_app. elim H1. Qed. Require Import EquivDec. Require Import Permutation. Module RecMeasure. #[export] Instance wf_MR {A R} `(WellFounded A R) {B} (f : B -> A) : WellFounded (MR R f). Proof. red. apply measure_wf. apply H. Defined. #[local] Obligation Tactic := program_simpl; try typeclasses eauto with rec_decision. #[local] Hint Extern 0 (MR _ _ _ _) => red : rec_decision. Equations id (n : nat) : nat by wf n (MR lt (fun n => n)) := id O := 0 ; id (S n) := S (id n). Equations f (n m : nat) : nat by wf n (MR lt (fun n => n)) := f O m := m ; f (S n) m := S (f n m) + m. Arguments length [A] _. Equations g (l : list nat) : nat by wf l (MR lt (@length nat)) := g nil := 0 ; g (cons n l) := S (g l). Lemma filter_length {A} p (l : list A) : length (filter p l) <= length l. Proof. induction l ; simpl ; auto. destruct (p a); simpl; auto with arith. Qed. #[local] Hint Resolve filter_length : datatypes. Section QuickSort. Definition le_lt_n_Sm : ∀ n m : nat, n ≤ m → n < S m. Proof. apply Nat.lt_succ_r. Qed. #[local] Hint Immediate Nat.le_succ_l : rec_decision. #[local] Hint Resolve filter_length : rec_decision. #[local] Hint Unfold lt gt : rec_decision. #[local] Hint Resolve le_lt_n_Sm : rec_decision. Context {A : Type} (leb : A -> A -> bool) (ltb : A -> A -> bool). Equations qs (l : list A) : list A by wf l (MR lt (@length A)) := qs nil := nil ; qs (cons a l) := let lower := filter (fun x => ltb x a) l in let upper := filter (fun x => leb a x) l in qs lower ++ a :: qs upper. Context (le : relation A). Context (refl_le : forall x y, reflect (le x y) (leb x y)). Context (lt : relation A). Context (refl_lt : forall x y, reflect (lt x y) (ltb x y)). Context (compare : A -> A -> comparison). Context (compspec : forall x y, CompSpec eq lt x y (compare x y)). Context (leb_complete : forall x y, leb x y <-> (x = y \/ leb y x = false)). Context (leb_complete2 : forall x y, leb x y = false -> leb y x). Context (ltb_leb : forall x y, ltb x y -> leb x y). Context (nltb_leb : forall x y, ltb x y = false -> leb y x). Context (ltb_leb' : forall x y, leb x y = false <-> ltb y x). Context (ltb_leb'' : forall x y, ltb x y <-> ~ leb y x). Set Firstorder Solver auto. Lemma filter_In' : ∀ (A : Type) (f : A → bool) (x : A) (l : list A), In x (filter f l) ↔ In x l ∧ f x. Proof. intros. rewrite filter_In. intuition auto. apply Is_true_eq_true2. auto. apply Is_true_eq_true. auto. Qed. Lemma qs_same (l : list A) : forall a, In a l <-> In a (qs l). Proof. funelim (qs l). - simpl. reflexivity. - intros a'. simpl. rewrite in_app_iff. simpl. rewrite <- H, <- H0, !filter_In'. intuition auto. destruct (compspec a a'); intuition auto. right. right. intuition auto with arith. apply ltb_leb. destruct (refl_lt a a'); auto. constructor. contradiction. left. intuition auto. destruct (refl_lt a' a); auto. constructor. contradiction. Qed. Lemma sort_le_app : forall l1 l2, sort le l1 -> sort le l2 -> (forall x y, In x l1 -> In y l2 -> le x y) -> sort le (l1 ++ l2). Proof. induction l1; simpl in *; intuition auto. inversion_clear H. constructor; auto. apply InfA_app; auto. destruct l2; auto. constructor ; auto. auto with datatypes. Qed. Lemma hd_rel_all a l : (forall x, In x l -> le a x) -> HdRel le a l. Proof. intros H. induction l; auto. constructor. apply H. auto with datatypes. Qed. Lemma hdrel_filter a l : HdRel le a (qs (filter (λ x : A, leb a x) l)). Proof. apply hd_rel_all. intros. rewrite <- qs_same in H. rewrite filter_In in H. destruct H. pose (refl_le a x). now depelim r. Qed. Context `(le_trans : Transitive A le). Lemma qs_sort (l : list A) : sort le (qs l). Proof. intros. funelim (qs l). constructor. apply sort_le_app; auto. constructor. auto. apply hdrel_filter. intros. simpl in H2. rewrite <- qs_same, filter_In' in H1, H2. intuition auto with arith. subst. pose (r:=refl_le x y); apply ltb_leb in H4; now depelim r. apply ltb_leb in H4. pose (r:=refl_le x a); pose (r':=refl_le a y); depelim r; depelim r'; reverse; DepElim.simplify_dep_elim; eauto. Qed. Lemma qs_perm l : Permutation l (qs l). Proof. funelim (qs l). constructor. intros. rewrite <- H0, <- H. apply Permutation_cons_app. rewrite Permutation_app_comm. apply Permutation_filter2. intros. case_eq (leb a x). simpl. intros. case_eq (ltb x a). intros. apply Is_true_eq_left in H2. apply Is_true_eq_left in H1. apply ltb_leb'' in H2. contradiction. constructor. simpl. intros. rewrite ltb_leb' in H1. case_eq (ltb x a). constructor. intros. rewrite H2 in H1. elim H1. Qed. End QuickSort. (* Recursive Extraction qs. *) End RecMeasure. Coq-Equations-1.3.1-8.20/test-suite/rose.v000066400000000000000000000144511463127417400200540ustar00rootroot00000000000000(** printing now %\coqdockw{now}% *) (** printing simp %\coqdoctac{simp}% *) (** printing by %\coqdockw{by}% *) (** printing rec %\coqdockw{rec}% *) (* begin hide *) From Equations Require Import Equations. Require Import Lia Utf8 List. Import ListNotations. Set Keyed Unification. Section list_size. Context {A : Type} (f : A -> nat). Equations list_size (l : list A) : nat := list_size nil := 0; list_size (cons x xs) := S (f x + list_size xs). Global Transparent list_size. Context {B : Type}. Equations? list_map_size (l : list A) (g : forall (x : A), f x < list_size l -> B) : list B := list_map_size nil _ := nil; list_map_size (cons x xs) g := cons (g x _) (list_map_size xs (fun x H => g x _)). Proof. auto with arith. lia. Defined. Lemma list_map_size_spec (g : A -> B) (l : list A) : list_map_size l (fun x _ => g x) = List.map g l. Proof. funelim (list_map_size l (λ (x : A) (_ : f x < list_size l), g x)); simpl; trivial. now rewrite H. Qed. End list_size. Require Import List. (* end hide *) (** To demonstrate nested well-founded recursive definitions, we take a well-known example from the literature: rose trees. We will define a recursive function gathering the elements in a [rose] tree in an efficient way, using nested well-founded recursion instead of the guardedness check of %\Coq%. The [rose] trees are defined as trees whose nodes contain lists of trees, i.e. forests. *) (** To solve measure subgoals *) #[local] Hint Extern 4 (_ < _) => simpl; lia : rec_decision. #[local] Obligation Tactic := CoreTactics.equations_simpl; try (simpl; lia); try typeclasses eauto with rec_decision. (* begin hide *) Section RoseTree. (* end hide *) Context {A : Type}. Inductive rose : Type := | leaf (a : A) : rose | node (l : list rose) : rose. (** This is a nested inductive type we can measure assuming a [list_size] function for measuring lists. Here we use the usual guardedness check of %\Coq% that is able to unfold the definition of [list_size] to check that this definition is terminating. *) Equations size (r : rose) : nat := size (leaf _) := 0; size (node l) := S (list_size size l). (* begin hide *) Transparent size. Derive NoConfusion for rose. (* end hide *) (** As explained at the beginning of this section, however, if we want to program more complex recursions, or rearrange our terms slightly and freely perform dependent pattern-matching, the limited syntactic guardness check will quickly get in our way. Using a _nested_ [where] clause and the support of %\Equations% for well-founded recursion, we can define the following function gathering the elements in a rose tree efficiently: *) Equations elements (r : rose) (acc : list A) : list A by wf (size r) lt := elements (leaf a) acc := a :: acc; elements (node l) acc := aux l _ where aux x (H : list_size size x < size (node l)) : list A by wf (list_size size x) lt := aux nil _ := acc; aux (cons x xs) H := elements x (aux xs _). Definition elems r := elements r nil. (** The function is nesting a well-founded recursion inside another one, based on the measure of [rose] trees and lists ([MR R f] is a combinator for [λ x y, R (f x) (f y)]). The termination of this definition is ensured solely by logical means, it does not require any syntactic check. Note that the auxiliary definition's type mentions the variable [l] bound by the enclosing pattern-matching, to pass around information on the size of arguments. Local [where] clauses allow just that. This kind of nested pattern-matching and well-founded recursion was not supported by previous definition packages for %\Coq% like %\textsc{Function}% or %\textsc{Program}%, and due to the required dependencies it is not supported by %\textsc{Isabelle}%'s %\texttt{Function}% package either (see %\cite{BoveKraussSozeau2011}% for a survey of the treatment of recursion in type-theory based tools). *) (** We can show that [elems] is actually computing the same thing as the naïve algorithm concatenating elements of each tree in each forest. *) Equations elements_spec (r : rose) : list A := elements_spec (leaf a) := [a]; elements_spec (node l) := concat (List.map elements_spec l). (** As [elements] takes an accumulator, we first have to prove a generalized lemma, typical of tail-recursive functions: *) Lemma elements_correct (r : rose) acc : elements r acc = elements_spec r ++ acc. Proof. apply (elements_elim (fun r acc f => f = elements_spec r ++ acc) (fun l acc x H r => r = concat (List.map elements_spec x) ++ acc)); intros; simp elements_spec; simpl; trivial. now rewrite H1, H0, app_assoc. Qed. (** We apply the eliminator providing the predicate for the nested recursive call and simplify using the [simp elements_spec] tactic which is rewriting with the defining equations of [elements_spec]. The induction hypotheses and associativity of concatenation are enough to solve the remaining goal which involves the two recursive calls to [elements] and [aux]. The above proof is very quick as the eliminator frees us from redoing all the nested recursive reasoning and the proofs that the induction hypotheses can be applied. It is now trivial to prove the correctness of our fast implementation: *) Lemma elems_correct (r : rose) : elems r = elements_spec r. (* begin hide *) Proof. now unfold elems; rewrite elements_correct, app_nil_r. Qed. (* end hide *) (* begin hide *) End RoseTree. (* end hide *) Arguments rose A : clear implicits. Module FullStruct. Parameter (A : Type). Equations elements (r : rose A) (acc : list A) : list A := elements (leaf a) acc := a :: acc; elements (node l) acc := aux l where aux (x : list (rose A)) : list A := aux nil := acc; aux (cons x xs) := elements x (aux xs). End FullStruct. Module WfAndStruct. Parameter (A : Type). Equations elements (r : rose A) (acc : list A) : list A by wf (size r) lt := elements (leaf a) acc := a :: acc; elements (node l) acc := aux l _ where aux (x : list (rose A)) (H : list_size size x < size (node l)) : list A by struct x := aux nil H := acc; aux (cons x xs) H := elements x (aux xs _). End WfAndStruct. Coq-Equations-1.3.1-8.20/test-suite/scope.v000066400000000000000000000321651463127417400202170ustar00rootroot00000000000000(** Example by Rafaël Bocquet: POPLmark part 1A with inductive definition of scope and well-scoped variables (and terms, types and environments). *) Require Import Program. Require Import Equations.Prop.DepElim. Require Import Equations.Prop.Equations. Require Import Coq.Logic.Eqdep_dec. Require Import Coq.Classes.EquivDec. Require Import Arith. Derive Signature for eq. Definition scope := nat. Inductive var : scope -> Set := | FO : forall {n}, var (S n) | FS : forall {n}, var n -> var (S n) . Derive Signature NoConfusion NoConfusionHom for var. Derive Subterm for nat. Definition noCycle_nat (n m : nat) : n = m -> ~ nat_direct_subterm n m. Proof. induction m in n |- *. intros. intro H'. depelim H'. intros H H'. depelim H'. apply (IHm n eq_refl). rewrite H at 2. constructor. Defined. Set Equations With UIP. Lemma var_dec_eq : forall {n} (x y : var n), {x = y} + {x <> y}. Proof. depind x; depelim y. - left; reflexivity. - right; intro H; inversion H. - right; intro H; inversion H. - destruct (IHx y); subst. + left; reflexivity. + right; intro H; depelim H. apply n0. contradiction. Qed. Inductive scope_le : scope -> scope -> Set := | scope_le_n : forall {n}, scope_le n n | scope_le_S : forall {n m}, scope_le n m -> scope_le n (S m) | scope_le_map : forall {n m}, scope_le n m -> scope_le (S n) (S m). Derive Signature NoConfusion NoConfusionHom for scope_le. Derive Subterm for scope_le. Equations scope_le_app {a b c} (p : scope_le a b) (q : scope_le b c) : scope_le a c := scope_le_app p scope_le_n := p; scope_le_app p (scope_le_S q) := scope_le_S (scope_le_app p q); scope_le_app p (scope_le_map q) with p := { | scope_le_n := scope_le_map q; | scope_le_S p' := scope_le_S (scope_le_app p' q); | (scope_le_map p') := scope_le_map (scope_le_app p' q) }. (* Equations scope_le_app {a b c} (p : scope_le a b) (q : scope_le b c) : scope_le a c := *) (* scope_le_app p q by wf (signature_pack q) scope_le_subterm := *) (* scope_le_app p scope_le_n := p; *) (* scope_le_app p (scope_le_S q) := scope_le_S (scope_le_app p q); *) (* scope_le_app p (scope_le_map q) with p := *) (* { | scope_le_n := scope_le_map q; *) (* | scope_le_S p' := scope_le_S (scope_le_app p' q); *) (* | (scope_le_map p') := scope_le_map (scope_le_app p' q) }. *) #[local] Hint Unfold NoConfusion.noConfusion_nat_obligation_1 : equations. Lemma scope_le_app_len n m (q : scope_le n m) : scope_le_app scope_le_n q = q. Proof. depind q; simp scope_le_app; trivial. now rewrite IHq. Qed. #[local] Hint Rewrite scope_le_app_len : scope_le_app. Inductive type : scope -> Type := | tvar : forall {n}, var n -> type n | ttop : forall {n}, type n | tarr : forall {n}, type n -> type n -> type n | tall : forall {n}, type n -> type (S n) -> type n . Derive Signature NoConfusion NoConfusionHom for type. Inductive env : scope -> scope -> Set := | empty : forall {n}, env n n | cons : forall {n m}, type m -> env n m -> env n (S m) . Derive Signature NoConfusion NoConfusionHom for env. Lemma env_scope_le : forall {n m}, env n m -> scope_le n m. Proof. intros n m Γ; depind Γ; constructor; auto. Defined. Equations env_app {a b c} (Γ : env a b) (Δ : env b c) : env a c := env_app Γ empty := Γ; env_app Γ (cons t Δ) := cons t (env_app Γ Δ). Lemma cons_app : forall {a b c} (Γ : env a b) (Δ : env b c) t, cons t (env_app Γ Δ) = env_app Γ (cons t Δ). Proof. intros. autorewrite with env_app. reflexivity. Qed. #[local] Hint Rewrite @cons_app. Equations map_var {n m} (f : var n -> var m) (t : var (S n)) : var (S m) := map_var f FO := FO; map_var f (FS x) := FS (f x). Lemma map_var_a : forall {n m o} f g a, @map_var n o (fun t => f (g t)) a = @map_var m o f (@map_var n m g a). Proof. depind a; autorewrite with map_var; auto. Qed. Lemma map_var_b : forall {n m} (f g : var n -> var m), (forall x, f x = g x) -> forall a, map_var f a = map_var g a. Proof. depind a; autorewrite with map_var; try f_equal; auto. Qed. Equations lift_var_by {n m} (p : scope_le n m) : var n -> var m := lift_var_by scope_le_n := fun t => t; lift_var_by (scope_le_S p) := fun t => FS (lift_var_by p t); lift_var_by (scope_le_map p) := map_var (lift_var_by p). Equations lift_type_by {n m} (f : scope_le n m) (t : type n) : type m := lift_type_by f (tvar x) := tvar (lift_var_by f x); lift_type_by f ttop := ttop; lift_type_by f (tarr a b) := tarr (lift_type_by f a) (lift_type_by f b); lift_type_by f (tall a b) := tall (lift_type_by f a) (lift_type_by (scope_le_map f) b). Lemma lift_var_by_app : forall {b c} (p : scope_le b c) {a} (q : scope_le a b) t, lift_var_by p (lift_var_by q t) = lift_var_by (scope_le_app q p) t. Proof with autorewrite with lift_var_by map_var scope_le_app in *; auto. intros b c p; induction p; intros a q t... - rewrite IHp; auto. - generalize dependent p. generalize dependent t. depind q; intros... rewrite IHp... specialize (IHp _ q). rewrite (map_var_b (lift_var_by (scope_le_app q p)) (fun t => lift_var_by p (lift_var_by q t))); eauto. rewrite <- map_var_a; auto. Qed. #[local] Hint Rewrite @lift_var_by_app : lift_var_by. Lemma lift_type_by_id : forall {n} (t : type n) P, (forall x, lift_var_by P x = x) -> lift_type_by P t = t. Proof. depind t; intros; autorewrite with lift_type_by; rewrite ?H, ?IHt1, ?IHt2; auto. intros; depelim x; autorewrite with lift_var_by map_var; try f_equal; auto. Qed. Lemma lift_type_by_n : forall {n} (t : type n), lift_type_by scope_le_n t = t. Proof. intros; eapply lift_type_by_id; intros; autorewrite with lift_var_by; auto. Qed. #[local] Hint Rewrite @lift_type_by_n : lift_type_by. Lemma lift_type_by_app : forall {a} t {b c} (p : scope_le b c) (q : scope_le a b), lift_type_by p (lift_type_by q t) = lift_type_by (scope_le_app q p) t. Proof. depind t; intros b c p; depind p; intros q; repeat (autorewrite with scope_le_app lift_var_by lift_type_by; rewrite ?IHt1, ?IHt2; auto). Qed. #[local] Hint Rewrite @lift_type_by_app : lift_type_by. Equations lookup {n} (Γ : env O n) (x : var n) : type n := lookup (n:=(S _)) (cons a Γ) FO := lift_type_by (scope_le_S scope_le_n) a; lookup (n:=(S _)) (cons a Γ) (FS x) := lift_type_by (scope_le_S scope_le_n) (lookup Γ x) . Lemma lookup_app : forall {n} (Γ : env O (S n)) {m} (Δ : env (S n) (S m)) x, lookup (env_app Γ Δ) (lift_var_by (env_scope_le Δ) x) = lift_type_by (env_scope_le Δ) (lookup Γ x). Proof with autorewrite with lookup scope_le_app env_app lift_var_by lift_type_by; auto. intros n Γ m Δ; induction Δ; intros x; simpl... rewrite IHΔ... Qed. #[local] Hint Rewrite @lookup_app : lookup. Inductive sa : forall {n}, env O n -> type n -> type n -> Prop := | sa_top : forall {n} (Γ : env O n) s, sa Γ s ttop | sa_var_refl : forall {n} (Γ : env O n) x, sa Γ (tvar x) (tvar x) | sa_var_trans : forall {n} (Γ : env O (S n)) x t, sa Γ (lookup Γ x) t -> sa Γ (tvar x) t | sa_arr : forall {n} {Γ : env O n} {t1 t2 s1 s2}, sa Γ t1 s1 -> sa Γ s2 t2 -> sa Γ (tarr s1 s2) (tarr t1 t2) | sa_all : forall {n} {Γ : env O n} {t1 t2 s1 s2}, sa Γ t1 s1 -> sa (cons t1 Γ) s2 t2 -> sa Γ (tall s1 s2) (tall t1 t2) . Derive Signature for sa. Inductive sa_env : forall {n}, env O n -> env O n -> Prop := | sa_empty : sa_env empty empty | sa_cons : forall {n} (Γ Δ : env O n) a b, sa Γ a b -> sa_env Γ Δ -> sa_env (cons a Γ) (cons b Δ) . Derive Signature for sa_env. Lemma sa_refl : forall {n} (Γ : env O n) x, sa Γ x x. Proof. depind x; constructor; auto. Qed. Lemma sa_env_refl : forall {n} (Γ : env O n), sa_env Γ Γ. Proof. depind Γ; constructor; auto using sa_refl. Qed. Inductive env_extend : forall {b c}, env O b -> env O c -> scope_le b c -> Prop := | env_extend_refl : forall {b} (Γ : env O b), env_extend Γ Γ scope_le_n | env_extend_cons : forall {b c} (Γ : env O b) (Δ : env O c) p a, env_extend Γ Δ p -> env_extend (cons a Γ) (cons (lift_type_by p a) Δ) (scope_le_map p) | env_extend_2 : forall {b c} (Γ : env O b) (Δ : env O c) p a, env_extend Γ Δ p -> env_extend Γ (cons a Δ) (scope_le_S p) . Derive Signature for env_extend. Lemma env_app_extend : forall {b c} (Γ : env O b) (Δ : env b c), env_extend Γ (env_app Γ Δ) (env_scope_le Δ). Proof. depind Δ; intros; autorewrite with env_app scope_le_app in *; simpl; constructor; auto. Qed. Lemma env_extend_lookup : forall {b c} (Γ : env O b) (Δ : env O c) P, env_extend Γ Δ P -> forall x, lift_type_by P (lookup Γ x) = lookup Δ (lift_var_by P x). Proof with autorewrite with lift_type_by lift_var_by map_var lookup scope_le_app; auto. intros b c Γ Δ P A; depind A; intros x; depelim x... all:rewrite <- IHA... Qed. Lemma sa_weakening : forall {b} (Γ : env O b) p q (A : sa Γ p q) {c P} (Δ : env O c) (B : env_extend Γ Δ P), sa Δ (lift_type_by P p) (lift_type_by P q). Proof. intros b Γ p q A; induction A; intros c P Δ B; autorewrite with lift_type_by in *; try (auto; constructor; auto; fail). - depelim c; [depelim B|]. constructor; rewrite <- (env_extend_lookup _ _ _ B); auto. - constructor; auto. eapply IHA2. constructor. auto. Qed. Lemma sa_weakening_app : forall {b} (Γ : env O b) p q (A : sa Γ p q) {c} (Δ : env b c), sa (env_app Γ Δ) (lift_type_by (env_scope_le Δ) p) (lift_type_by (env_scope_le Δ) q). Proof. intros; eapply sa_weakening. exact A. auto using env_app_extend. Qed. Lemma sa_toname : forall {n m} Γ (Δ : env (S n) m) x, x <> lift_var_by (env_scope_le Δ) FO -> forall p q, lookup (env_app (cons p Γ) Δ) x = lookup (env_app (cons q Γ) Δ) x. Proof. intros n m Γ Δ. depind Δ; intros x A p q; depelim x; simpl in *; autorewrite with env_app lookup lift_var_by in *; auto. - exfalso; auto. - specialize (IHΔ Γ x). forward IHΔ by intro; subst; auto. now rewrite (IHΔ p q). Qed. Lemma sa_narrowing : forall {s} q, (forall {s'} (P : scope_le s s') (Γ : env O s') p (A : sa Γ p (lift_type_by P q)) s'' (Δ : env (S s') s'') a b (B : sa (env_app (cons (lift_type_by P q) Γ) Δ) a b), sa (env_app (cons p Γ) Δ) a b) /\ (forall {s'} (A : scope_le s s') (Γ : env O s') p (B : sa Γ p (lift_type_by A q)) r (C : sa Γ (lift_type_by A q) r), sa Γ p r). Proof. intros s q; induction q; match goal with | [ |- _ /\ ?Q ] => assert (PLOP:Q); [ intros s' A Γ p B; depind B; subst; intros r C; autorewrite with lift_type_by lift_var_by in *; try noconf H; try (constructor; auto; fail); try (constructor; eapply IHB; autorewrite with lift_type_by; auto; fail); try (depelim C; subst; constructor; destruct_pairs; try noconf H; eauto; fail); try (specialize (IHB _ _ _ IHq1 IHq2 A); destruct_pairs; try noconf H; constructor; eauto; fail); auto | split; [ intros s' P Γ p A; depind A; subst; intros s'' Δ a b B; destruct_pairs; remember (env_app (cons _ Γ) Δ) as PPP; depind B; try (subst; constructor; auto; autorewrite with core; auto; fail); clear B; constructor; specialize (IHB _ HeqPPP); subst *; try (noconf H; auto); match goal with | [ IHB : sa _ (lookup (env_app (cons ?a _) _) ?x) _ |- sa _ (lookup (env_app (cons ?b _) _) _) _ ] => destruct (var_dec_eq x (lift_var_by (env_scope_le Δ) FO)) as [Heq|Hneq] ; [ subst; autorewrite with lookup lift_type_by lift_var_by in *; try (noconf H; auto); autorewrite with lookup lift_type_by lift_var_by scope_le_app in *; try solve [auto; depelim IHB; autorewrite with lookup lift_type_by lift_var_by scope_le_app in *; auto; constructor; auto; fail]; try solve [(apply sa_var_trans in A || assert (A := sa_arr A1 A2) || assert (A := sa_all A1 A2)); match goal with | [ A : sa _ ?p _ |- _ ] => (apply @sa_weakening_app with (Δ:=cons p empty) in A; apply @sa_weakening_app with (Δ:=Δ) in A; autorewrite with lookup env_app lift_var_by lift_type_by in *; simpl in *; eapply PLOP; [exact A | exact IHB]) end; fail] | rewrite sa_toname with (p:=b) (q:=a); auto ] end | assumption ] ] end. - clear IHB1 IHB2. depelim C; [constructor|]; destruct_pairs. constructor; eauto. simpl in H. simpl in H0. apply (H1 _ A Γ _ C1 _ empty _ _) in B2; autorewrite with env_app in B2; eauto. Qed. Print Assumptions sa_toname. Coq-Equations-1.3.1-8.20/test-suite/scope_noK.v000066400000000000000000000323101463127417400210160ustar00rootroot00000000000000(** Example by Rafaël Bocquet: POPLmark part 1A with inductive definition of scope and well-scoped variables (and terms, types and environments). *) Require Import Equations.Equations. Require Import Equations.DepElimDec. Require Import Coq.Logic.Eqdep_dec. Require Import Coq.Classes.EquivDec. Require Import Program. Require Import Arith. Ltac simpl_exist := repeat ( repeat match goal with | [ H : existT ?a ?b _ = existT ?a ?b _ |- _] => apply inj_pair2 in H end; subst; clear_dups ). Definition scope := nat. Inductive var : scope -> Set := | FO : forall {n}, var (S n) | FS : forall {n}, var n -> var (S n) . Derive Signature NoConfusion NoConfusionHom for var. Derive Subterm for nat. Definition noCycle_nat (n m : nat) : n = m -> ~ nat_direct_subterm n m. Proof. induction m in n |- *. intros. intro H'. depelim H'. intros H H'. depelim H'. apply IHm. rewrite H at 2. constructor. Defined. Lemma var_dec_eq : forall {n} (x y : var n), {x = y} + {x <> y}. Proof. depind x; depelim y. - left; reflexivity. - right; intro H; inversion H. - right; intro H; inversion H. - destruct (IHx y); subst. + left; reflexivity. + right; intro H; inversion H. noconf H. contradiction. Qed. Inductive scope_le : scope -> scope -> Set := | scope_le_n : forall {n m}, n = m -> scope_le n m | scope_le_S : forall {n m}, scope_le n m -> scope_le n (S m) | scope_le_map : forall {n m}, scope_le n m -> scope_le (S n) (S m) . Derive Signature NoConfusion NoConfusionHom for scope_le. Derive Subterm for scope_le. Ltac rec ::= Subterm.rec_wf_eqns. (* Equations scope_le_app {a b c} (p : scope_le a b) (q : scope_le b c) : scope_le a c := *) (* scope_le_app p (scope_le_n eq_refl) := p; *) (* scope_le_app p (scope_le_S q) := scope_le_S (scope_le_app p q); *) (* scope_le_app p (scope_le_map q) with p := *) (* { | scope_le_n eq_refl := scope_le_map q; *) (* | scope_le_S p' := scope_le_S (scope_le_app p' q); *) (* | (scope_le_map p') := scope_le_map (scope_le_app p' q) }. *) Equations scope_le_app {a b c} (p : scope_le a b) (q : scope_le b c) : scope_le a c := scope_le_app p q by wf (signature_pack q) scope_le_subterm := scope_le_app p (scope_le_n eq_refl) := p; scope_le_app p (scope_le_S q) := scope_le_S (scope_le_app p q); scope_le_app p (scope_le_map q) with p := { | scope_le_n eq_refl := scope_le_map q; | scope_le_S p' := scope_le_S (scope_le_app p' q); | (scope_le_map p') := scope_le_map (scope_le_app p' q) }. Hint Unfold NoConfusion.noConfusion_nat_obligation_1 : equations. Lemma scope_le_app_len n m (q : scope_le n m) : scope_le_app (scope_le_n eq_refl) q = q. Proof. depind q; simp scope_le_app. destruct e. simp scope_le_app. now rewrite IHq. Qed. Hint Rewrite scope_le_app_len : scope_le_app. Inductive type : scope -> Type := | tvar : forall {n}, var n -> type n | ttop : forall {n}, type n | tarr : forall {n}, type n -> type n -> type n | tall : forall {n}, type n -> type (S n) -> type n . Inductive env : scope -> scope -> Set := | empty : forall {n m}, n = m -> env n m | cons : forall {n m}, type m -> env n m -> env n (S m) . Lemma env_scope_le : forall {n m}, env n m -> scope_le n m. Proof. intros n m Γ; depind Γ. constructor; auto. now constructor 2. Defined. Equations env_app {a b c} (Γ : env a b) (Δ : env b c) : env a c := env_app Γ (empty eq_refl) := Γ; env_app Γ (cons t Δ) := cons t (env_app Γ Δ). Lemma cons_app : forall {a b c} (Γ : env a b) (Δ : env b c) t, cons t (env_app Γ Δ) = env_app Γ (cons t Δ). Proof. intros. autorewrite with env_app. reflexivity. Qed. Hint Rewrite @cons_app. Equations map_var {n m} (f : var n -> var m) (t : var (S n)) : var (S m) := map_var f FO := FO; map_var f (FS x) := FS (f x). Lemma map_var_a : forall {n m o} f g a, @map_var n o (fun t => f (g t)) a = @map_var m o f (@map_var n m g a). Proof. depind a; autorewrite with map_var; auto. Qed. Lemma map_var_b : forall {n m} (f g : var n -> var m), (forall x, f x = g x) -> forall a, map_var f a = map_var g a. Proof. depind a; autorewrite with map_var; try f_equal; auto. Qed. Equations lift_var_by {n m} (p : scope_le n m) : var n -> var m := lift_var_by (scope_le_n eq_refl) := fun t => t; lift_var_by (scope_le_S p) := fun t => FS (lift_var_by p t); lift_var_by (scope_le_map p) := map_var (lift_var_by p). Equations lift_type_by {n m} (f : scope_le n m) (t : type n) : type m := lift_type_by f (tvar x) := tvar (lift_var_by f x); lift_type_by f ttop := ttop; lift_type_by f (tarr a b) := tarr (lift_type_by f a) (lift_type_by f b); lift_type_by f (tall a b) := tall (lift_type_by f a) (lift_type_by (scope_le_map f) b). Lemma lift_var_by_app : forall {b c} (p : scope_le b c) {a} (q : scope_le a b) t, lift_var_by p (lift_var_by q t) = lift_var_by (scope_le_app q p) t. Proof with autorewrite with lift_var_by map_var scope_le_app in *; auto. intros b c p; induction p; intros a q t... - destruct e... - rewrite IHp; auto. - generalize dependent p. generalize dependent t. depind q; intros... simpl in e. revert e. simplify ?. intros... rewrite IHp... specialize (IHp _ q). rewrite (map_var_b (lift_var_by (scope_le_app q p)) (fun t => lift_var_by p (lift_var_by q t))); eauto. rewrite <- map_var_a; auto. Qed. Hint Rewrite @lift_var_by_app : lift_var_by. Lemma lift_type_by_id : forall {n} (t : type n) P, (forall x, lift_var_by P x = x) -> lift_type_by P t = t. Proof. depind t; intros; autorewrite with lift_type_by; rewrite ?H, ?IHt1, ?IHt2; auto. intros; depelim x; autorewrite with lift_var_by map_var; try f_equal; auto. Qed. Lemma lift_type_by_n : forall {n} (t : type n), lift_type_by (scope_le_n eq_refl) t = t. Proof. intros; eapply lift_type_by_id; intros; autorewrite with lift_var_by; auto. Qed. Hint Rewrite @lift_type_by_n : lift_type_by. Lemma lift_type_by_app : forall {a} t {b c} (p : scope_le b c) (q : scope_le a b), lift_type_by p (lift_type_by q t) = lift_type_by (scope_le_app q p) t. Proof. depind t; intros b c p; depind p; intros q; repeat (autorewrite with scope_le_app lift_var_by lift_type_by; rewrite ?IHt1, ?IHt2; auto). Qed. Hint Rewrite @lift_type_by_app : lift_type_by. Equations lookup {n} (Γ : env O n) (x : var n) : type n := lookup {n:=O} Γ x :=! x; lookup {n:=(S _)} (cons a Γ) FO := lift_type_by (scope_le_S (scope_le_n eq_refl)) a; lookup {n:=(S _)} (cons a Γ) (FS x) := lift_type_by (scope_le_S (scope_le_n eq_refl)) (lookup Γ x) . Lemma lookup_app : forall {n} (Γ : env O (S n)) {m} (Δ : env (S n) (S m)) x, lookup (env_app Γ Δ) (lift_var_by (env_scope_le Δ) x) = lift_type_by (env_scope_le Δ) (lookup Γ x). Proof with autorewrite with lookup scope_le_app env_app lift_var_by lift_type_by; auto. intros n Γ m Δ; induction Δ; intros x; simpl... destruct e... rewrite IHΔ... Qed. Hint Rewrite @lookup_app : lookup. Inductive sa : forall {n}, env O n -> type n -> type n -> Prop := | sa_top : forall {n} (Γ : env O n) s, sa Γ s ttop | sa_var_refl : forall {n} (Γ : env O n) x, sa Γ (tvar x) (tvar x) | sa_var_trans : forall {n} (Γ : env O (S n)) x t, sa Γ (lookup Γ x) t -> sa Γ (tvar x) t | sa_arr : forall {n} {Γ : env O n} {t1 t2 s1 s2}, sa Γ t1 s1 -> sa Γ s2 t2 -> sa Γ (tarr s1 s2) (tarr t1 t2) | sa_all : forall {n} {Γ : env O n} {t1 t2 s1 s2}, sa Γ t1 s1 -> sa (cons t1 Γ) s2 t2 -> sa Γ (tall s1 s2) (tall t1 t2) . Inductive sa_env : forall {n}, env O n -> env O n -> Prop := | sa_empty : sa_env (empty eq_refl) (empty eq_refl) | sa_cons : forall {n} (Γ Δ : env O n) a b, sa Γ a b -> sa_env Γ Δ -> sa_env (cons a Γ) (cons b Δ) . Lemma sa_refl : forall {n} (Γ : env O n) x, sa Γ x x. Proof. depind x; constructor; auto. Qed. Lemma sa_env_refl : forall {n} (Γ : env O n), sa_env Γ Γ. Proof. depind Γ. depelim e. constructor. constructor; auto using sa_refl. Qed. Inductive env_extend : forall {b c}, env O b -> env O c -> scope_le b c -> Prop := | env_extend_refl : forall {b} (Γ : env O b), env_extend Γ Γ (scope_le_n eq_refl) | env_extend_cons : forall {b c} (Γ : env O b) (Δ : env O c) p a, env_extend Γ Δ p -> env_extend (cons a Γ) (cons (lift_type_by p a) Δ) (scope_le_map p) | env_extend_2 : forall {b c} (Γ : env O b) (Δ : env O c) p a, env_extend Γ Δ p -> env_extend Γ (cons a Δ) (scope_le_S p) . Lemma env_app_extend : forall {b c} (Γ : env O b) (Δ : env b c), env_extend Γ (env_app Γ Δ) (env_scope_le Δ). Proof. depind Δ; intros; autorewrite with env_app scope_le_app in *; simpl; try constructor; auto. destruct e. constructor. Qed. Lemma env_extend_lookup : forall {b c} (Γ : env O b) (Δ : env O c) P, env_extend Γ Δ P -> forall x, lift_type_by P (lookup Γ x) = lookup Δ (lift_var_by P x). Proof with autorewrite with lift_type_by lift_var_by map_var lookup scope_le_app; auto. intros b c Γ Δ P A; depind A; intros x; depelim x... all:rewrite <- IHA... Qed. Lemma sa_weakening : forall {b} (Γ : env O b) p q (A : sa Γ p q) {c P} (Δ : env O c) (B : env_extend Γ Δ P), sa Δ (lift_type_by P p) (lift_type_by P q). Proof. intros b Γ p q A; induction A; intros c P Δ B; autorewrite with lift_type_by in *; try (auto; constructor; auto; fail). - depelim c; [depelim B|]. constructor; rewrite <- (env_extend_lookup _ _ _ B); auto. - constructor; auto. eapply IHA2. constructor. auto. Qed. Lemma sa_weakening_app : forall {b} (Γ : env O b) p q (A : sa Γ p q) {c} (Δ : env b c), sa (env_app Γ Δ) (lift_type_by (env_scope_le Δ) p) (lift_type_by (env_scope_le Δ) q). Proof. intros; eapply sa_weakening. exact A. auto using env_app_extend. Qed. Lemma sa_toname : forall {n m} Γ (Δ : env (S n) m) x, x <> lift_var_by (env_scope_le Δ) FO -> forall p q, lookup (env_app (cons p Γ) Δ) x = lookup (env_app (cons q Γ) Δ) x. Proof. intros n m Γ Δ; depind Δ; intros x A p q; depelim x; try depelim e; simpl in *; autorewrite with env_app lookup lift_var_by in *; auto. - exfalso; auto. - specialize (IHΔ Γ x). forward IHΔ by intro; subst; auto. now rewrite (IHΔ p q). Qed. Lemma sa_narrowing : forall {s} q, (forall {s'} (P : scope_le s s') (Γ : env O s') p (A : sa Γ p (lift_type_by P q)) s'' (Δ : env (S s') s'') a b (B : sa (env_app (cons (lift_type_by P q) Γ) Δ) a b), sa (env_app (cons p Γ) Δ) a b) /\ (forall {s'} (A : scope_le s s') (Γ : env O s') p (B : sa Γ p (lift_type_by A q)) r (C : sa Γ (lift_type_by A q) r), sa Γ p r). Proof. intros s q; induction q. match goal with | [ |- _ /\ ?Q ] => assert (PLOP:Q); [ intros s' A Γ p B; depind B; intros r C; autorewrite with lift_type_by lift_var_by in *; simpl_exist; try (constructor; auto; fail); try (constructor; eapply IHB; autorewrite with lift_type_by; auto; fail); try (depelim C; constructor; destruct_pairs; eauto; fail); try (specialize (IHB _ _ _ IHq1 IHq2 A); destruct_pairs; constructor; eauto; fail) | split; [ intros s' P Γ p A; depind A; intros s'' Δ a b B; destruct_pairs; remember (env_app (cons _ Γ) Δ) as PPP; depind B; try (subst; constructor; auto; autorewrite with core; auto; fail); clear B; constructor; specialize (IHB _ HeqPPP); subst *; match goal with | [ IHB : sa _ (lookup (env_app (cons ?a _) _) ?x) _ |- sa _ (lookup (env_app (cons ?b _) _) _) _ ] => destruct (var_dec_eq x (lift_var_by (env_scope_le Δ) FO)) ; [ subst; autorewrite with lookup lift_type_by lift_var_by in *; simpl_exist; autorewrite with lookup lift_type_by lift_var_by scope_le_app in *; try (auto; depelim IHB; auto; constructor; auto; fail); try ((apply sa_var_trans in A || assert (A := sa_arr A1 A2) || assert (A := sa_all A1 A2)); match goal with | [ A : sa _ ?p _ |- _ ] => (apply sa_weakening_app with (Δ0:=cons p empty) in A; apply sa_weakening_app with (Δ0:=Δ) in A; autorewrite with env_app lift_type_by lift_var_by scope_le_app in *; simpl in *; eapply PLOP; [exact A | exact IHB]) end; fail) | rewrite sa_toname with (p:=b) (q:=a); auto ] end | assumption ] ] end. exact C. (* TODO fix *) Abort. (* - exact C. *) (* - constructor; auto. eapply IHA. *) (* - clear IHB1 IHB2. *) (* depelim C; [constructor|]; destruct_pairs. *) (* constructor; eauto. simpl in H. simpl in H0. *) (* apply (H1 _ A Γ0 _ C1 _ empty _ _) in B2; autorewrite with env_app in B2; eauto. *) (* Qed. *) (* *) Coq-Equations-1.3.1-8.20/test-suite/smolka_inj_K2.v000066400000000000000000000071051463127417400215640ustar00rootroot00000000000000From Equations Require Import Equations. Set Equations Transparent. Inductive K : nat -> nat -> Type := | K1: K x | K2: forall y z w, w = y -> K x w -> K y z -> K x z. Inductive K (x: nat) : nat -> Type := | K1 y : S x = y -> K x y | K2: forall y z w, w = y -> K x w -> K y z -> K x z. Derive Signature for K. Derive NoConfusionHom for K. Inductive K' (x: nat) : nat -> Type := | K1' : K' x (S x) | K2' : forall y z, K' x y -> K' y z -> K' x z. Definition Sect {A B : Type} (s : A -> B) (r : B -> A) := forall x : A, r (s x) = x. Equations ap {A B : Type} (f : A -> B) {x y : A} (p : x = y) : f x = f y := ap f eq_refl := eq_refl. Equations ap2 {A B C : Type} (f : A -> B -> C) {x y : A} (p : x = y) {x' y'} (p' : x' = y') : f x x' = f y y' := ap2 f eq_refl eq_refl := eq_refl. Class IsEquiv {A B : Type} (f : A -> B) := BuildIsEquiv { equiv_inv : B -> A ; eisretr : Sect equiv_inv f; eissect : Sect f equiv_inv; eisadj : forall x : A, eisretr (f x) = ap f (eissect x) }. Arguments eisretr {A B}%type_scope f%function_scope {_} _. Arguments eissect {A B}%type_scope f%function_scope {_} _. Arguments eisadj {A B}%type_scope f%function_scope {_} _. Arguments IsEquiv {A B}%type_scope f%function_scope. Equations KK' (x y : nat) (K : K x y) : K' x y := KK' x y (K1 _ eq_refl) := K1' x; KK' x y (K2 _ _ _ eq_refl l r) := K2' _ _ _ (KK' x _ l) (KK' _ _ r). Equations K'K (x y : nat) (k : K' x y) : K x y := K'K x ?(S x) (K1' x) := K1 x (S x) eq_refl; K'K x y (K2' w y l r) := K2 _ _ _ _ _ (K'K x w l) (K'K w y r). Equations KK'sect x y : Sect (K'K x y) (KK' x y) := KK'sect x ?(S x) (K1' x) := eq_refl; KK'sect x y (K2' w y l r) := ap2 (K2' x w y) (KK'sect _ _ l) (KK'sect _ _ r). Equations K'Ksect x y : Sect (KK' x y) (K'K x y) := K'Ksect x y (K1 _ eq_refl) := eq_refl; K'Ksect x y (K2 w y w eq_refl l r) := ap2 (K2 x _ _ _ eq_refl) (K'Ksect _ _ l) (K'Ksect _ _ r). Lemma ap2_ap {A B C D E} {x y : A} {x' y' : B} {f : C -> D -> E} {g: A -> C} {h : B -> D} {p : x = y} {q : x' = y'} : ap2 f (ap g p) (ap h q) = ap2 (fun x y => f (g x) (h y)) p q. Proof. now destruct p, q; cbn. Qed. Lemma ap_ap2 {A B C D} {x y : A} {x' y' : B} {g : A -> B -> C} {f : C -> D} {p : x = y} {q : x' = y'} : ap f (ap2 g p q) = ap2 (fun x y => f (g x y)) p q. Proof. now destruct p, q; cbn. Qed. (*Lemma ap2_ap_K2' {x y z} {p : K'K x y (KK' x y k1) = k1) q} : ap2 (K2' x y z) (ap (KK' x y) p) (ap (KK' y z) q) = ap (KK' x z) (ap2 (x:=) (K2 x y z) p q). *) Lemma K_equiv x y : IsEquiv (KK' x y). Proof. unshelve eapply (BuildIsEquiv _ _ _ (K'K x y)). - apply KK'sect. - apply K'Ksect. - intros k. induction k; simp KK' KK'sect. * destruct e. reflexivity. * destruct e. unfold KK' at 4; fold (KK' x w k1). fold (KK' _ _ k2). simpl. rewrite IHk1, IHk2. now rewrite ap2_ap, ap_ap2. Qed. Lemma K2_K2'_eq x y (p q : K x y) (H : p = q) : (KK' _ _ p = KK' _ _ q). Proof. destruct H. reflexivity. Qed. Derive Signature NoConfusionHom for K'. Fact K2'_injective x y z a b a' b' : K2' x y z a b = K2' x y z a' b' -> (a,b) = (a',b'). Proof. intros h. pose proof (ap (K'K _ _) h). simp K'K in H. noconf H. depelim H. cbn in h. Fact K2_injective x y z w a b a' b' eq eq': K2 x y z w eq a b = K2 x y z w eq' a' b' -> (a,b) = (a',b'). Proof. intros h. pose proof (ap (KK' _ _) h). cbn in H. apply K_equiv in h. intros h%(noConfusionHom_K_obligation_2 _ _). simpl in h. depelim h. destruct h1'1. destruct eq. destruct h as [yy' [zz' [aa' bb']]]. rewrite (UIP_refl_nat _ yy') in aa', bb'. rewrite (UIP_refl_nat _ zz') in bb'. rewrite <- aa', <- bb'. reflexivity. Qed. Coq-Equations-1.3.1-8.20/test-suite/square_elim.v000066400000000000000000001042631463127417400214130ustar00rootroot00000000000000From Equations Require Import Equations. (** Taken from Agda issue 3034: https://github.com/agda/agda/issues/3034 *) Notation "( x , y , .. , z )" := (sigmaI _ .. (sigmaI _ x y) .. z) : core_scope. Inductive Square {A : Type} : forall w x y z: A, w = x -> x = y -> y = z -> z = w -> Type := square w : Square w w w w eq_refl eq_refl eq_refl eq_refl. Derive NoConfusion for Square. Set Universe Polymorphism. Import Sigma_Notations. Set Equations Transparent. Obligation Tactic := idtac. Lemma singleton_eq {A} (x : A) (p q : { y : A & x = y }) : p = q. Proof. destruct p, q. destruct pr2. destruct pr3. reflexivity. Defined. Lemma pack_sigma {A} {B : A -> Type} (P : forall x : A, B x -> Type) : (forall (p : &{ x : A & B x }), P p.1 p.2) -> (forall (x : A) (y : B x), P x y). Proof. intros. apply (X &(x & y)). Defined. Definition pr1_seq@{i} {A : Type@{i}} {P : A -> Type@{i}} {p q : sigma A P} (e : p = q) : p.1 = q.1. Proof. destruct e. apply eq_refl. Defined. Equations NoC_point_l {A} {x : A} (e e' : &{ y : _ & eq x y}) : Prop := NoC_point_l (x, p) (y, q) := True. Equations noc_point_l {A} {x : A} (e e' : &{ y : _ & eq x y}) (H : e = e') : NoC_point_l e e' := noc_point_l (A:=A) (x:=x) (y, p) (z, q) H := I. Equations noc_point_inv_l {A} {x : A} (e e' : &{ y : _ & eq x y}) (n : NoC_point_l e e') : e = e' := noc_point_inv_l (A:=A) (x:=x) (y, p) (z, q) n := _. Next Obligation. simpl. intros. red in n. revert z q n. refine (pack_sigma _ _). revert y p. refine (pack_sigma _ _). intros p p0. pose (singleton_eq _ p p0). destruct e. intros. reflexivity. Defined. Instance noc_point_noconf_l {A} {x : A} : NoConfusionPackage &{ y : _ & eq x y }. Proof. unshelve econstructor. apply NoC_point_l. apply noc_point_l. apply noc_point_inv_l. intros [y e]. destruct e. intros. destruct e. simpl. reflexivity. Defined. Equations NoC_point_r {A} {x : A} (e e' : &{ y : _ & eq y x}) : Prop := NoC_point_r (sigmaI _ _ _) (sigmaI _ _ _) := True. Equations noc_point_r {A} {x : A} (e e' : &{ y : _ & eq y x}) (H : e = e') : NoC_point_r e e' := noc_point_r (sigmaI _ _ _) (sigmaI _ _ _) _ := I. Equations noc_point_inv_r {A} {x : A} (e e' : &{ y : _ & eq y x}) (n : NoC_point_r e e') : e = e' := noc_point_inv_r (sigmaI _ _ eq_refl) (sigmaI _ _ eq_refl) n := eq_refl. Instance noc_point_noconf_r {A} {x : A} : NoConfusionPackage &{ y : _ & eq y x }. Proof. unshelve econstructor. apply NoC_point_r. apply noc_point_r. apply noc_point_inv_r. intros [y e]. destruct e. intros. destruct e. simpl. reflexivity. Defined. Equations NoC_hetero {A} (e e' : &{ p : &{ x : A & A } & eq p.1 p.2}) : Prop := NoC_hetero (sigmaI _ (sigmaI _ x _) _) (sigmaI _ (sigmaI _ y _) _) := x = y. Equations noc_hetero {A} (e e' : &{ p : &{ x : A & A } & eq p.1 p.2 }) (H : e = e') : NoC_hetero e e' := noc_hetero (sigmaI _ (sigmaI _ x _) _) (sigmaI _ (sigmaI _ _ _) _) eq_refl := eq_refl. Lemma noc_hetero_inv {A} (e e' : &{ p : &{ x : A & A } & eq p.1 p.2 }) (H : NoC_hetero e e') : e = e'. Proof. destruct e as [[x y] e]. destruct e' as [[x' y'] e'']. simpl in *. destruct e, e''. red in H. destruct H. reflexivity. Defined. Instance noc_hetero_package {A} : NoConfusionPackage &{ p : &{ x : A & A } & eq p.1 p.2 }. Proof. unshelve econstructor. apply NoC_hetero. apply noc_hetero. apply noc_hetero_inv. intros [[x y] e]. intros b <-. simpl in *. destruct e. simpl. reflexivity. Defined. (* Derive NoConfusionHom for eqi. *) (* Next Obligation. *) (* Notation " x = y " := (eqi x y). *) (* Notation " x = y " := (eqi x y) : type_scope. *) (* Lemma noc {A} {w x y z : A} {p q r s} (u v : Square w x y z p q r s) : Type. *) (* Proof. *) (* depelim u. *) (* depelim v. *) (* exact True. *) (* Defined. *) (* Lemma noc_noconf {A} {w x y z : A} {p q r s} (u v : Square w x y z p q r s) (e : u = v) : noc u v. *) (* Proof. *) (* destruct e. *) (* depelim u. *) (* exact I. *) (* Defined. *) Notation " p # e " := (@eq_rect _ _ _ e _ p) (at level 20). Notation " p [ P ] # e " := (@eq_rect _ _ P e _ p) (at level 20). Notation " p [ P ] ## e " := (@eq_rect_dep _ _ P e _ p) (at level 20, only parsing). Polymorphic Lemma eq_simplification_sigma1_dep_dep@{i j} {A : Type@{i}} {P : A -> Type@{i}} (x y : &{ x : A & P x }) {B : eq x y -> Type@{j}} : (forall e' : eq x.1 y.1, forall e : eq (@eq_rect A x.1 P x.2 y.1 e') y.2, B (pack_sigma_eq e' e)) -> (forall e : eq x y, B e). Proof. intros. revert X. destruct e. intros X. simpl in *. apply (X eq_refl eq_refl). Defined. Polymorphic Definition distribute_sigma_eq@{i j} {A : Type@{i}} {B : A -> Type@{i}} {C : forall x : A, B x -> Type@{j}} {x y : A} {e : eq x y} {p : B x} {q : B y} {z : C x p} {w : C y q} (e1 : eq (e [fun x => B x] # p) q) (e2 : eq (e1 [fun lhs => C y lhs] # (e [fun x e => C x (e [fun x => B x] # p)] ## z)) w) : eq (e [ fun x => @sigma@{j} _ (fun b : B x => C x b) ] # &(p & z)) &(q & w). Proof. destruct e. simpl in *. destruct e1. simpl in *. destruct e2. apply eq_refl. Defined. Monomorphic Inductive Iseq2 {A : Type} : forall x y: A, x = y -> y = x -> Type := iseq2 w : Iseq2 w w eq_refl eq_refl. Monomorphic Derive NoConfusion for Iseq2. Set Universe Polymorphism. (* Derive NoConfusion for sigma. *) (* Next Obligation. destruct b. red. reflexivity. Defined. *) (* Next Obligation. Defined. *) Lemma simplify_eq_transport {A} {B : A -> Type} {C : forall x : A, B x -> Type} {x y : A} {e : eq x y} {z : &{ b : B x & C x b}} {w : &{ b : B y & C y b}} (P : @eq &{ x : B y & C y x } (e [ fun y => &{ b : B y & C y b } ] # z) w -> Type) : (forall e1 : (eq (e [fun x => B x] # z.1) w.1), forall e2 : eq (e1 [fun lhs => C y lhs] # (e [fun x e => C x (e [fun x => B x] # z.1)] ## z.2)) w.2, P (distribute_sigma_eq e1 e2)) -> (forall e' : eq (e # z) w, P e'). Proof. intros. destruct e. simpl in *. destruct e'. specialize (X eq_refl eq_refl). simpl in X. exact X. Defined. Polymorphic Definition distribute_sigma_eq_nondep@{i j} {A : Type@{i}} {B : Type@{i}} {C : forall x : A, B -> Type@{j}} {x y : A} {e : eq x y} {p : B} {q : B} {z : C x p} {w : C y q} (e1 : eq p q) (e2 : eq (e1 [fun lhs => C y lhs] # (e [fun x e => C x p] ## z)) w) : eq (e [ fun x => @sigma@{j} _ (fun b : B => C x b) ] # &(p & z)) &(q & w). Proof. destruct e. simpl in *. destruct e1. simpl in *. destruct e2. apply eq_refl. Defined. Lemma simplify_eq_transport_nondep {A} {B : Type} {C : forall x : A, B -> Type} {x y : A} {e : eq x y} {z : &{ b : B & C x b}} {w : &{ b : B & C y b}} (P : @eq &{ x : B & C y x } (e [ fun y => &{ b : B & C y b } ] # z) w -> Type) : (forall e1 : (eq z.1 w.1), forall e2 : eq (e1 [fun lhs => C y lhs] # (e [fun x => C x _] # z.2)) w.2, P (distribute_sigma_eq_nondep e1 e2)) -> (forall e' : eq (e # z) w, P e'). Proof. intros. destruct e. simpl in *. destruct e'. specialize (X eq_refl eq_refl). simpl in X. exact X. Defined. Polymorphic Definition distribute_sigma_eq_nondep'@{i j} {A : Type@{i}} {B : A -> Type@{i}} {C : A -> Type@{i}} {x y : A} {e : eq x y} {p : B x} {q : B y} {z : C x} {w : C y} (e1 : eq (e [B] # p) q) (e2 : eq (e [C] # z) w) : eq (e [ fun x => @sigma@{j} _ (fun _ : B x => C x) ] # &(p & z)) &(q & w). Proof. destruct e. simpl in *. destruct e1. simpl in *. destruct e2. apply eq_refl. Defined. Lemma simplify_eq_transport_nondep' {A} {B : A -> Type} {C : A -> Type} {x y : A} {e : eq x y} {z : &{ b : B x & C x}} {w : &{ b : B y & C y}} (P : @eq &{ _ : B y & C y } (e [ fun y => &{ b : B y & C y } ] # z) w -> Type) : (forall e1 : (eq (e # z.1) w.1), forall e2 : eq (e [fun x => C x] # z.2) w.2, P (distribute_sigma_eq_nondep' e1 e2)) -> (forall e' : eq (e # z) w, P e'). Proof. intros. destruct e. simpl in *. destruct e'. specialize (X eq_refl eq_refl). simpl in X. exact X. Defined. Polymorphic Definition distribute_sigma_eq_nondep_dep@{i j} {A : Type@{i}} {B : Type@{i}} {C : A -> B -> Type@{i}} {x y : A} {e : eq x y} {p : B} {q : B} {z : C x p} {w : C y q} (e1 : eq p q) (e2 : eq (e1 [fun lhs => C y lhs] # (e [fun x => C x p] # z)) w) : @eq &{ x : _ & C y x } (e [ fun x => @sigma@{j} _ (fun b : B => C x b) ] # &(p & z)) (q, w). Proof. destruct e. simpl in *. destruct e1. simpl in *. destruct e2. apply eq_refl. Defined. Lemma simplify_eq_transport_nondep_dep {A} {B : Type} {C : A -> B -> Type} {x y : A} {e : eq x y} {z : &{ b : B & C x b}} {w : &{ b : B & C y b}} (P : @eq &{ b : B & C y b } (e [ fun y => &{ b : B & C y b } ] # z) w -> Type) : (forall (e1 : eq z.1 w.1) (e2 : eq (e1 [fun lhs => C y lhs] # (e [fun x => C x z.1] # z.2)) w.2), P (distribute_sigma_eq_nondep_dep e1 e2)) -> (forall e' : eq (e # z) w, P e'). Proof. intros. destruct e. simpl in *. destruct e'. specialize (X eq_refl eq_refl). simpl in X. exact X. Defined. Lemma simplify_eq_transport_inv {A} {B : A -> Type} {C : forall x : A, B x -> Type} {x y : A} {e : eq x y} {z : &{ b : B x & C x b}} {w : &{ b : B y & C y b}} (P : @eq &{ x : B y & C y x } (e [ fun y => &{ b : B y & C y b } ] # z) w -> Type) : (forall e' : eq (e # z) w, P e') -> (forall e1 : (eq (e [fun x => B x] # z.1) w.1), forall e2 : eq (e1 [fun lhs => C y lhs] # (e [fun x e => C x (e [fun x => B x] # z.1)] ## z.2)) w.2, P (distribute_sigma_eq e1 e2)). Proof. intros. destruct e. simpl in *. destruct z, w; simpl in *. destruct e1. simpl in *. destruct e2. apply X. Defined. Lemma simpl_eq_rect_constant {A B} (x y : A) (p : B) (e : eq x y) : (e [ fun _ => B ] # p) = p. Proof. destruct e. simpl. trivial. Defined. Lemma eq_rect_constant {A B} (x y : A) (p q : B) (e : eq x y) : ((e [ fun _ => B ] # p = q)) = (p = q). Proof. destruct e. simpl. trivial. Defined. Lemma eq_rect_dep_constant {A B} (x y : A) (p q : B) (e : eq x y) : ((e [ fun _ _ => B ] ## p = q)) = (p = q). Proof. destruct e. simpl. trivial. Defined. Lemma eq_rect_proj {A} {B : A -> Type} {C : forall x : A, B x -> Type} (x y : A) (e : eq x y) (p : &{ b : B x & C x b }) : ((e [ fun x => &{ b : B x & C x b } ] # p).1) = (e [ B ] # p.1). Proof. destruct e. simpl. reflexivity. Defined. Definition eq_trans2 {A} {x y z : A} (p : x = y) (q : y = z) : x = z. Proof. destruct p. exact q. Defined. Lemma eq_rect_proj_nondep {A} {B : Type} {C : forall x : A, B -> Type} (x y : A) (e : eq x y) (p : &{ b : B & C x b }) : ((e [ fun x => &{ b : B & C x b } ] # p).1) = p.1. Proof. destruct e. simpl. reflexivity. Defined. Lemma simplify_eq_rect_proj_nondep {A} {B : Type} {C : forall x : A, B -> Type} (x y : A) (e : eq x y) (p : &{ b : B & C x b }) q (P : (e [ fun x => &{ b : B & C x b } ] # p).1 = q -> Type) : (forall e' : p.1 = q, P (eq_trans2 (eq_rect_proj_nondep x y e p) e')) -> (forall x, P x). Proof. destruct e. simpl. intros. apply (X x0). Defined. Lemma simplify_eq_rect_proj {A} {B : A -> Type} {C : forall x : A, B x -> Type} (x y : A) (e : eq x y) (p : &{ b : B x & C x b }) q (P : (e [ fun x => &{ b : B x & C x b } ] # p).1 = q -> Type) : (forall e' : e [ B ] # p.1 = q, P (eq_trans2 (eq_rect_proj x y e p) e')) -> (forall x, P x). Proof. destruct e. simpl. intros. apply (X x0). Defined. Lemma eq_rect_proj2 {A} {B : A -> Type} {C : forall x : A, B x -> Type} (x y : A) (e : eq x y) (p : &{ b : B x & C x b }) : eq ((e [ fun x => &{ b : B x & C x b } ] # p).2) (e [ fun x e => C x (e #p).1 ] ## p.2). Proof. destruct e. simpl. reflexivity. Defined. Lemma eq_rect_proj_dep {A} (x : A) {B : forall x' : A, eq x x' -> Type} {C : forall (x' : A) (e : eq x x'), B x' e -> Type} (y : A) (e : eq x y) (p : &{ b : B x eq_refl & C x eq_refl b }) : eq ((e [ fun x' (e : eq x x') => &{ b : B x' e & C x' e b } ] ## p).1) (e [ B ] ## p.1). Proof. destruct e. simpl. reflexivity. Defined. Notation " 'rew' H 'in' c " := (@eq_rect _ _ _ c _ H) (at level 20, only parsing). Polymorphic Definition pr2_seq@{i} {A : Type@{i}} {P : A -> Type@{i}} {p q : sigma A P} (e : p = q) : rew (pr1_seq e) in p.2 = q.2. Proof. destruct e. apply eq_refl. Defined. Lemma pack_sigma_eq_inv {A} {B : A -> Type} (x y : A) (p : B x) (q : B y) (P : forall e : x = y, e [B] # p = q -> Type) : (forall e : &(x & p) = &(y & q), P (pr1_seq e) (pr2_seq e)) -> (forall (e : x = y) (e' : e [B] # p = q), P e e'). Proof. intros. destruct e, e'. apply (X eq_refl). Defined. (* Lemma singleton_eq {A} {B : A -> Type} (x y : A) (e : x = y) (p : B x) (q : B y) *) (* (e' : &(x & p) = &(y & q)) : rew e in p = q. *) (* Proof. *) (* destruct e. simpl. *) (* change q with &(y & q).2. *) (* revert e. change y with &(y & q).1 at 1 2 3. *) (* destruct e'. simpl. *) Lemma refine_eq_left {A} {x y w : A} (e : y = x) (P : x = w -> Type) : (forall (H : y = w), P (e [fun x => x = w]# H)) -> (forall (H : x = w), P H). Proof. destruct e. simpl. exact id. Defined. Section pathsigma. Universe i. Equations path_sigma {A : Type@{i}} {P : A -> Type@{i}} {u v : sigma A P} (p : u.1 = v.1) (q : rew p in u.2 = v.2) : u = v := path_sigma (u:=(sigmaI _ _ _)) (v:=(sigmaI _ _ _)) eq_refl eq_refl := eq_refl. End pathsigma. Lemma pack_eq_sigma {A} (P : forall (x : A), x = x -> Type) : (forall (p : &{ x : A & x = x }), P p.1 p.2) -> (forall (x : A) (e : x = x), P x e). Proof. intros. apply (X &(x & e)). Defined. Lemma eq_rect_trans {A} {x y z : A} {P : A -> Type} (e : x = y) (e' : y = z) (p : P x) : rew (eq_trans e e') in p = rew e' in (rew e in p). Proof. destruct e'. simpl. reflexivity. Defined. Lemma simplify_eq_rect_trans {A} {B : A -> Type} {x y z : A} {e : eq x y} {e' : eq y z} {p : B x} {q : B z} (P : @eq (B z) (rew e' in (rew e in p)) q -> Type) : (forall e1 : (eq (rew (eq_trans e e') in p) q), P (eq_rect_trans e e' p [fun x => x = q] # e1)) -> (forall e'' : eq (rew e' in (rew e in p)) q, P e''). Proof. intros. destruct e. simpl in *. destruct e'. apply (X e''). Defined. (* Lemma simplify_eqx A : forall (p q : &{ x : A & &{ y : A & eq x y }}), p = q -> *) (* Proof. *) (* intros [ x [y p]] [x' [y' q]]. destruct p. destruct q. reflexivity. *) Lemma invIseq2 {A} (x : A) (e : x = x) (iseq : Iseq2 x x eq_refl e) : e = eq_refl. generalize_eqs_sig iseq. destruct iseq. simplify ?. simpl. refine (eq_simplification_sigma1_dep _ _ _ _ _). simpl. intros e0. refine (simplify_eq_transport _ _). simpl. revert e0. refine (pack_sigma_eq_inv _ _ _ _ _ _). pose (apply_noConfusion (A := &{ x' : _ & eq w x'})). refine (p _ _ _ _). simpl. unfold NoC_point_l. intros []. trivial. Defined. Lemma invIseq2' {A} (x : A) (e : x = x) (iseq : Iseq2 x x e eq_refl) : e = eq_refl. generalize_eqs_sig iseq. destruct iseq. simplify ?. simpl. refine (eq_simplification_sigma1_dep _ _ _ _ _). simpl. intros e0. refine (simplify_eq_transport_nondep' _ _). simpl. intros e' e''. revert e0 e'' e'. refine (pack_sigma_eq_inv _ _ _ _ _ _). pose (apply_noConfusion (A := &{ x' : _ & eq x' w})). refine (p _ _ _ _). simpl. unfold NoC_point_r. intros. exact e'. Defined. Lemma invIseq2d {A} (x : A) (e : x = x) (iseq : Iseq2 x x eq_refl e) : &{ H : eq_refl = e & (H [fun e => Iseq2 x x eq_refl e] # iseq2 x) = iseq }. Proof. generalize_eqs_sig iseq. destruct iseq. simplify ?. simpl. refine (eq_simplification_sigma1_dep_dep _ _ _). simpl. intros e0. refine (simplify_eq_transport _ _). simpl. revert e0. refine (pack_sigma_eq_inv _ _ _ _ _ _). pose (apply_noConfusion (A := &{ x' : _ & eq w x'})). refine (p _ _ _ _). simpl. unfold NoC_point_l. intros []. intros ->. simpl. intros ->. exists eq_refl. simpl. exact eq_refl. Defined. Definition J {A} {x : A} (P : forall y : A, x = y -> Type) (p : P x eq_refl) (y : A) (e : x = y) : P y e. destruct e. exact p. Defined. Definition subst {A : Type} {x : A} {P : A -> Type} {y : A} (e : x = y) (f : P x) : P y := eq_rect x P f y e. Notation "p =_{ P ; e } q" := (subst (P:=P) e p = q) (at level 90, format "p =_{ P ; e } q"). Definition subst_expl {A : Type} {x : A} {P : A -> Type} {y : A} (e : x = y) (f : P x) : P y := subst e f. Notation " 'rewP' H 'at' P 'in' c " := (@subst_expl _ _ P _ H c) (at level 20). Definition ap@{i j} {A : Type@{i}} {B : Type@{j}} (f : A -> B) {x y : A} (e : x = y) : f x = f y := J@{i j} (fun y _ => f x = f y) (@eq_refl _ (f x)) y e. (* aka ap *) Lemma ap_iter {A B C} (f : A -> B) (g : B -> C) (x y : A) (e : x = y) : Top.ap g (Top.ap f e) = Top.ap (fun x => g (f x)) e. Proof. revert y e. refine (Top.J _ _). reflexivity. Qed. (* Lemma ap_subst2 {A B C} (f : C -> B) (x y : A) (e : x = y) (z w : A -> C) (p : z x = w x) : *) (* Top.ap f (subst2 (P:=fun x : A => z x = w x) p y e) = *) (* Top.subst2 (P := fun x : A => f (z x) = f (w x)) (Top.ap f p) y e. *) (* Proof. revert y e. refine (Top.J _ _). simpl. reflexivity. Defined. *) (* Definition apd {A} {B : A -> Type} (f : forall x : A, B x) {x y : A} (p : x = y) : *) (* subst p (f x) = f y := *) (* J (fun y p => subst p (f x) = f y) (@eq_refl _ (f x)) y p. *) (* (* aka apd *) *) Definition Sect {A B : Type} (s : A -> B) (r : B -> A) := forall x : A, r (s x) = x. (** A typeclass that includes the data making [f] into an adjoin equivalence*) Set Printing Universes. Cumulative Class IsEquiv@{i} {A : Type@{i}} {B : Type@{i}} (f : A -> B) := BuildIsEquiv { equiv_inv : B -> A ; eisretr : Sect equiv_inv f; eissect : Sect f equiv_inv; eisadj : forall x : A, eisretr (f x) = ap f (eissect x) }. Arguments eisretr {A B} f {_} _. Arguments eissect {A B} f {_} _. Arguments eisadj {A B} f {_} _. Record Equiv@{i} (A B : Type@{i}) := { equiv :> A -> B ; is_equiv :> IsEquiv equiv }. Arguments equiv {A B} e. Instance Equiv_IsEquiv {A B} (e : Equiv A B) : IsEquiv (equiv e). Proof. apply is_equiv. Defined. Definition inv_equiv {A B} (E: Equiv A B) : B -> A := equiv_inv (IsEquiv:=is_equiv _ _ E). Notation "f ^-1" := (@equiv_inv _ _ f _) (at level 3). Definition equiv_inv_equiv@{i} {A B : Type@{i}} {E: Equiv A B} (x : A) : inv_equiv _ (equiv E x) = x := eissect _ x. Definition inv_equiv_equiv@{i} {A B : Type@{i}} {E: Equiv A B} (x : B) : equiv E (inv_equiv _ x) = x := eisretr _ x. Definition equiv_adj@{i} {A B : Type@{i}} {E: Equiv A B} (x : A) : inv_equiv_equiv (equiv E x) = ap (equiv E) (equiv_inv_equiv x) := eisadj _ x. Lemma ap_trans {A B} (f : A -> B) (x y z : A) (e : x = y) (e' : y = z) : ap f (eq_trans e e') = eq_trans (ap f e) (ap f e'). Proof. destruct e, e'. reflexivity. Defined. Lemma ap_sym {A B} (f : A -> B) (x y : A) (e : x = y) : ap f (eq_sym e) = eq_sym (ap f e). Proof. destruct e. reflexivity. Defined. Equations concat {A} {x y z : A} (e : x = y) (e' : y = z) : x = z := concat eq_refl q := q. Notation "p @ q" := (concat p q) (at level 60). Equations concat_1p {A : Type} {x y : A} (p : x = y) : eq_refl @ p = p := concat_1p eq_refl := eq_refl. Equations concat_p1 {A : Type} {x y : A} (p : x = y) : p @ eq_refl = p := concat_p1 eq_refl := eq_refl. Equations concat_Vp {A : Type} {x y : A} (p : x = y) : eq_sym p @ p = eq_refl := concat_Vp eq_refl := eq_refl. Equations concat_pV {A : Type} {x y : A} (p : x = y) : p @ eq_sym p = eq_refl := concat_pV eq_refl := eq_refl. Equations concat_p_pp {A : Type} {x y z t : A} (p : x = y) (q : y = z) (r : z = t) : p @ (q @ r) = (p @ q) @ r := concat_p_pp eq_refl _ _ := eq_refl. Equations concat_pp_A1 {A : Type} {g : A -> A} (p : forall x, x = g x) {x y : A} (q : x = y) {w : A} (r : w = x) : (r @ p x) @ ap g q = (r @ q) @ p y := concat_pp_A1 _ eq_refl eq_refl := concat_p1 _. Equations whiskerL {A : Type} {x y z : A} (p : x = y) {q r : y = z} (h : q = r) : p @ q = p @ r := whiskerL _ eq_refl := eq_refl. Equations whiskerR {A : Type} {x y z : A} {p q : x = y} (h : p = q) (r : y = z) : p @ r = q @ r := whiskerR eq_refl _ := eq_refl. Equations moveL_M1 {A : Type} {x y : A} (p q : x = y) : eq_sym q @ p = eq_refl -> p = q := moveL_M1 _ eq_refl := fun e => e. Definition inverse2 {A : Type} {x y : A} {p q : x = y} (h : p = q) : eq_sym p = eq_sym q := ap (@eq_sym _ _ _) h. Equations ap02 {A B : Type} (f:A->B) {x y:A} {p q:x=y} (r:p=q) : ap f p = ap f q := ap02 f eq_refl := eq_refl. Equations ap_p_pp {A B : Type} (f : A -> B) {w : B} {x y z : A} (r : w = f x) (p : x = y) (q : y = z) : r @ (ap f (p @ q)) = (r @ ap f p) @ (ap f q) := ap_p_pp f _ eq_refl _ := concat_p_pp _ eq_refl _. Equations ap_compose {A B C : Type} (f : A -> B) (g : B -> C) {x y : A} (p : x = y) : ap (fun x => g (f x)) p = ap g (ap f p) := ap_compose f g eq_refl := eq_refl. Equations concat_A1p {A : Type} {g : A -> A} (p : forall x, g x = x) {x y : A} (q : x = y) : (ap g q) @ (p y) = (p x) @ q := concat_A1p (g:=g) p (x:=x) eq_refl with p x, g x := { concat_A1p p eq_refl eq_refl _ := eq_refl }. Equations ap_pp {A B : Type} (f : A -> B) {x y z : A} (p : x = y) (q : y = z) : ap f (p @ q) = (ap f p) @ (ap f q) := ap_pp _ eq_refl eq_refl => eq_refl. Equations concat_pp_V {A : Type} {x y z : A} (p : x = y) (q : y = z) : (p @ q) @ eq_sym q = p := concat_pp_V eq_refl eq_refl => eq_refl. Equations ap_V {A B : Type} (f : A -> B) {x y : A} (p : x = y) : ap f (eq_sym p) = eq_sym (ap f p) := ap_V f eq_refl => eq_refl. Hint Rewrite @ap_pp @ap_V : ap. Hint Rewrite @concat_pp_V : concat. Equations concat_pA1 {A : Type} {f : A -> A} (p : forall x, x = f x) {x y : A} (q : x = y) : (p x) @ (ap f q) = q @ (p y) := concat_pA1 p eq_refl := concat_p1 (p _). Equations concat_p_Vp {A : Type} {x y z : A} (p : x = y) (q : x = z) : p @ (eq_sym p @ q) = q := concat_p_Vp eq_refl eq_refl := eq_refl. Equations concat_pV_p {A : Type} {x y z : A} (p : x = z) (q : y = z) : (p @ eq_sym q) @ q = p := concat_pV_p eq_refl eq_refl := eq_refl. Hint Rewrite @concat_pA1 @concat_p_Vp @concat_pV_p : concat. Transparent concat. Definition concat_pA1_p {A : Type} {f : A -> A} (p : forall x, f x = x) {x y : A} (q : x = y) {w : A} (r : w = f x) : (r @ ap f q) @ p y = (r @ p x) @ q. Proof. destruct q; simpl. now rewrite !concat_p1. (* now simp concat. *) Defined. Definition concat2 {A} {x y z : A} {p p' : x = y} {q q' : y = z} (h : p = p') (h' : q = q') : p @ q = p' @ q' := match h, h' with eq_refl, eq_refl => eq_refl end. Reserved Notation "p @@ q" (at level 20). Notation "p @@ q" := (concat2 p q)%equations : equations_scope. Reserved Notation "p ^" (at level 3, format "p '^'"). Notation "p ^" := (eq_sym p%equations) : equations_scope. Notation "f ^^-1" := (@equiv_inv _ _ f _) (at level 3). Lemma ap_equiv_inv@{i} (Δ : Type@{i}) (T : Type@{i}) (f : Δ -> T) (x y : Δ) : IsEquiv f -> f x = f y -> x = y. Proof. intros H. refine (fun q => (eissect f x)^ @ ap f^^-1 q @ eissect f y). Defined. Axiom axiom_triangle : forall {A}, A. Instance ap_is_equiv@{i +} (Δ : Type@{i}) (T : Type@{i}) (f : Δ -> T) (I : IsEquiv f) (u v : Δ) : IsEquiv (@ap _ _ f u v) := { equiv_inv := _ }. Proof. intros. - eapply ap_equiv_inv; eauto. - red. refine (fun q => ap_pp f _ _ @ whiskerR (ap_pp f _ _) _ @ ((ap_V f _ @ inverse2 (eisadj f _)^) @@ (ap_compose (f^^-1) f _)^ @@ (eisadj f _)^) @ concat_pA1_p (eisretr f) _ _ @ whiskerR (concat_Vp _) _ @ concat_1p _). - refine (fun p => whiskerR (whiskerL _ (ap_compose f f^^-1 _)^) _ @ concat_pA1_p (eissect f) _ _ @ whiskerR (concat_Vp _) _ @ concat_1p _). - intros ->. apply axiom_triangle. Defined. Definition ap_equiv (Δ : Type) (T : Type) (f : Δ -> T) (E : IsEquiv f) (u v : Δ) : Equiv (u = v) (f u = f v) := {| equiv := @ap _ _ f u v |}. Polymorphic Definition ind_pack_eq_inv {A : Type} {B : A -> Type} (x y : A) (p : B x) (q : B y) (e : @eq (sigma A (fun x => B x)) &(x & p) &(y & q)) (i : @eq A x y) (ee : rewP e at fun z => eq z.1 y in i = eq_refl) : rew i in p = q. Proof. revert i ee. change y with (@sigmaI A (fun x => B x) y q).1 at 1 3 4 7 8. unfold subst_expl. change q with (@sigmaI A (fun x => B x) y q).2 at 9. set (s :=@sigmaI A (fun x => B x) y q) in *. clearbody s. destruct e. simpl. intros i e. symmetry in e. destruct e. reflexivity. Defined. Polymorphic Definition opaque_ind_pack_eq_inv {A : Type} {B : A -> Type} {x y : A} (i : @eq A x y) {p : B x} {q : B y} (G : p =_{B;i} q -> Type) (e : &(x & p) = &(y & q)) (ee : rewP e at (fun z => eq z.1 y) in i = eq_refl) := G (@ind_pack_eq_inv A B x y p q e i ee). Polymorphic Lemma simplify_ind_pack {A : Type} (B : A -> Type) (x y : A) (p : B x) (q : B y) (i : x = y) (G : p =_{B;i} q -> Type) : (forall (exp : @eq (sigma A (fun x => B x)) &(x & p) &(y & q)) (ee : rewP exp at (fun z => eq z.1 y) in i = eq_refl), G (@ind_pack_eq_inv A B x y p q exp i ee)) -> (forall e : p =_{B;i} q, G e). Proof. intros H. intros e. specialize (H (pack_sigma_eq i e)). unfold opaque_ind_pack_eq_inv in H. destruct i, e. simpl in H. specialize (H eq_refl). simpl in G. apply H. Defined. Arguments simplify_ind_pack : simpl never. Polymorphic Lemma simplify_ind_pack' {A : Type} (B : A -> Type) (x y : A) (p : B x) (q : B y) (i : x = y) (G : i [B] # p = q -> Type) : (forall (exp : @eq (sigma A (fun x => B x)) &(x & p) &(y & q)) (ee : eq_rect &(x & p) (fun z => eq z.1 y) i _ exp = eq_refl), G (@ind_pack_eq_inv A B x y p q exp i ee)) -> (forall e : i [B] # p = q, G e). Proof. intros H. intros e. specialize (H (pack_sigma_eq i e)). unfold opaque_ind_pack_eq_inv in H. destruct i, e. simpl in H. specialize (H eq_refl). simpl in G. apply H. Defined. Arguments simplify_ind_pack : simpl never. Polymorphic Definition pack_nondep_sigma_eq@{i} {A : Type@{i}} {P : Type@{i}} {x y : A} {p : P} {q : P} (e' : x = y) (e : p = q) : &(x & p) = &(y & q). Proof. destruct e'. simpl in e. destruct e. apply eq_refl. Defined. Set Printing Universes. Polymorphic Lemma eq_simplification_sigma1_nondep_dep@{i j} {A : Type@{i}} {B : Type@{i}} {x y : sigma@{i} _ (fun x : A => B)} {P : eq x y -> Type@{j}} : (forall e : x.1 = y.1, forall e' : x.2 = y.2, P (pack_nondep_sigma_eq@{i} e e')) -> (forall e : x = y, P e). Proof. intros H. destruct e. apply (H eq_refl eq_refl). Defined. (* Lemma transport_pack_nondep_1 {A} (x : A) {B} (p q : B) (e1 : @eq_refl _ x = eq_refl) (e2 : p = q) (q : eq_refl = eq_refl) *) (* (t : x = _) : *) (* (pack_nondep_sigma_eq e1 e2) [fun x => x.1 = q] # t = e1 [fun x => x = q] # t. *) Definition associate_sigma {A} {B : A -> Type} {C : forall x : A, B x -> Type} (x : &{ p : &{ x : A & B x } & C p.1 p.2 }) : &{ x : A & &{ b : B x & C x b }} := &(x.1.1, x.1.2 & x.2). Definition associate_sigma_inv {A} {B : A -> Type} {C : forall x : A, B x -> Type} (x : &{ x : A & &{ b : B x & C x b }}) : &{ p : &{ x : A & B x } & C p.1 p.2 } := &(&(x.1 & x.2.1) & x.2.2). Definition associate_sigma_inv_eq {A} {B : A -> Type} {C : forall x : A, B x -> Type} (x y : &{ x : A & &{ b : B x & C x b }}) (e : x = y) : associate_sigma_inv x = associate_sigma_inv y. Proof. destruct x as [a [b c]]. destruct e. unfold associate_sigma_inv. simpl. reflexivity. Defined. Definition associate_sigma_eq {A} {B : A -> Type} {C : forall x : A, B x -> Type} (P : forall (x y : &{ p : &{ x : A & B x } & C p.1 p.2 }) (e : x = y), Type) : (forall x y (e : x = y), P (associate_sigma_inv x) (associate_sigma_inv y) (associate_sigma_inv_eq x y e)) -> (forall x y (e : x = y), P x y e). Proof. intros H. intros x ? <-. specialize (H (associate_sigma x) (associate_sigma x) eq_refl). apply H. Defined. Polymorphic Definition associate_sigma_eq' {A} {B : A -> Type} {C : forall x : A, B x -> Type} (x y : &{ p : &{ x : A & B x } & C p.1 p.2 }) (P : (x = y)-> Type) : (forall (e : associate_sigma x = associate_sigma y), P (associate_sigma_inv_eq _ _ e)) -> (forall (e : x = y), P e). Proof. intros H. intros <-. specialize (H eq_refl). apply H. Defined. (* Lemma rew_in_sigma_nondep {A} {C : A -> A -> B -> Type} *) (* (x y : A) (e : x = y) *) (* (x' y' : A) (e' : x' = y') *) (* (p : &{ b : B & C x x' b }) *) (* (q : &{ b : B & C y y' b }) : *) (* (e [ fun x => &{ b : B & C x y' b } ] # *) (* (e' [fun lhs => &{ b : B & C x lhs b }] # p) = q) -> *) (* &{ e' : p.1 = q.1 & e [ fun x => p.2 = q.2 }. *) Polymorphic Definition pack_sigma_eq_indep@{i} {A : Type@{i}} {P : Type@{i}} {p q : A} {x : P} {y : P} (e' : p = q) (e : x = y) : &(p & x) = &(q & y). Proof. destruct e'. simpl in e. destruct e. apply eq_refl. Defined. Notation "p ..1" := (pr1_seq p) (at level 3). Notation "p ..2" := (pr2_seq p) (at level 3). Lemma rew_in_sigma_nondep {A} {C : A -> A -> Type} (x y : A) (e : x = y) (x' y' : A) (e' : x' = y') (p : C x x') (q : C y y') : (e [ fun x => C x y' ] # (e' [fun lhs => C x lhs] # p) = q) -> (@eq_rect _ &(x & x') (fun x => C x.1 x.2) p _ (pack_sigma_eq_indep e e')) = q. Proof. destruct e, e'. simpl. trivial. Defined. Lemma rew_in_sigma_nondep_inv {A} {B} {C : A -> B -> Type} (x y : A) (e : x = y) (x' y' : B) (e' : x' = y') (p : C x x') (q : C y y') : (@eq_rect _ &(x & x') (fun x => C x.1 x.2) p _ (pack_sigma_eq_indep e e')) = q -> (e [ fun x => C x y' ] # (e' [fun lhs => C x lhs] # p) = q). Proof. destruct e, e'. simpl. trivial. Defined. Lemma simplify_eq_rect_nested {A B} {C : A -> B -> Type} {x y : A} {e : x = y} {x' y' : B} {e' : x' = y'} (p : C x x') (q : C y y') (P : forall (x : (e [ fun x => C x y' ] # (e' [fun lhs => C x lhs] # p) = q)), Type) : (forall (e'' :(@eq_rect _ &(x & x') (fun x => C x.1 x.2) p _ (pack_sigma_eq_indep e e')) = q), P (@rew_in_sigma_nondep_inv _ _ _ x y e x' y' e' p q e'')) -> (forall e, P e). Proof. intros. destruct e, e'. simpl in *. apply X. Defined. (* Lemma eq_rect_f {A} {A'} (B : A' -> Type) (x y : A) (e : x = y) *) (* (f : A -> A') (p : B (f x)) (q : B (f y)) : *) (* ((f x, p) = (f y, q) :> &{ x : A' & B x }) -> *) (* e [fun x : A => B (f x)] # p = q. *) (* Proof. *) (* destruct e. intros. simpl. *) Ltac lower := match goal with [ |- forall (e : @eq ?A _ _), _ ] => let T := fresh in evar (T : Type); match goal with | [ T : _ |- _ ] => evar (f : A -> T) end end. (* Lemma apply_equiv_dom {A B} (P : B -> Type) (e : Equiv A B) : *) (* (forall x : A, P (equiv e x)) -> forall x : B, P x. *) (* Proof. *) (* intros. *) (* specialize (X (e ^-1 x)). *) (* rewrite inv_equiv_equiv in X. exact X. *) (* Defined. *) Lemma apply_equiv_dom {A B} (P : A -> Type) (e : Equiv A B) : (forall x : B, P (inv_equiv e x)) -> forall x : A, P x. Proof. intros. specialize (X (e x)). rewrite equiv_inv_equiv in X. exact X. Defined. (* intros. move H0 before x0. *) (* move H1 before y. *) (* revert_until H0. *) (* uncurry_hyps hyps. pattern sigma hyps. clearbody hyps. clear. set_eos. *) (* intros. *) (* uncurry_hyps hyps'. clearbody hyps'. revert hyps'. clear. revert hyps. set_eos. *) (* intros. *) (* uncurry_hyps hyps''. clearbody hyps''. revert hyps''. clear. *) (* intros h. exact h. *) (* simpl in f. *) (* subst H. *) (* unshelve evar (Hf : IsEquiv f). *) (* { unshelve econstructor. *) (* intros H. decompose_sigmas. simpl in *. *) (* unshelve eexists. exists pr1, pr9, pr0, pr4, pr10, pr3, pr5. exact pr6. *) (* simpl. exact pr7. simpl. red. *) (* intros [h h']. decompose_sigmas. subst f. simpl. destruct pr2. destruct pr11, pr7. reflexivity. *) (* simpl. *) (* intros [h h']. simpl in *. *) (* revert h h'. curry. intros. reflexivity. simpl. *) (* curry. curry. intros. reflexivity. } *) (* simpl in *. *) (* hidebody Hf. *) (* pose (ap_equiv _ _ f Hf). *) (* refine (apply_equiv_dom _ (e _ _) _). *) (* intros x0. unfold f in x0. simpl in x0. *) (* unfold eq_rect_r in x0. simpl in x0. *) (* revert x0. *) (* simplify ?. simpl. *) (* simplify ?. simpl. *) Parameter prf : forall {A} (x : A) p, Square x x x x eq_refl p eq_refl eq_refl -> Type. Definition J2 {A : Type} (x : A) (p : x = x) (s : Square x x x x eq_refl p eq_refl eq_refl) (pr : prf x eq_refl (square x)) : prf x p s. revert pr. generalize_eqs_sig s. destruct s. refine (eq_simplification_sigma1_dep _ _ _ _ _). simpl. unshelve lower. shelve. { set_eos. curry. intros ????. intros -> H' -> ->. uncurry_hyps h. exact h. } subst H. unshelve evar (Hf : IsEquiv f). { unshelve econstructor. intros [x' [H _]]. exists x', x', x', x'. exists eq_refl. exists H. exists eq_refl. exact eq_refl. red. subst f. simpl. intros [x' [H u]]. unfold eq_rect_r. simpl. destruct u. exact eq_refl. subst f. simpl. intros (w' & x' & y & z & -> & H & -> & ->); simpl; exact eq_refl. apply axiom_triangle. } hidebody Hf. pose (ap_equiv _ _ f Hf). refine (apply_equiv_dom _ (e _ _) _). intros x0. unfold f in x0. simpl in x0. unfold eq_rect_r in x0. simpl in x0. revert x0. refine (eq_simplification_sigma1_dep_dep _ _ _). intros e'. move e' before x. revert w e'. cbn -[f e]. refine (solution_right_dep _ _). cbn -[f e]. refine (eq_simplification_sigma1_dep_dep _ _ _). intros e'. simpl in e'. move e' before p. revert p e' s0. cbn -[f e]. refine (solution_left_dep _ _). cbn -[f e]. intros s0. pose (apply_noConfusion (A := unit)). refine (p _ _ _ _). clear p; cbn -[f e]. intros []. intros H. compute in H. revert H. intros ->. clear f Hf e. trivial. Defined. Lemma noconf_noc {A} {w x y z : A} {p q r s} (u v : Square w x y z p q r s) (f : noc u v) : u = v. Proof. depelim u. revert f. generalize_eqs_sig v. destruct v. simpl. simpl. simplify ?. simpl. simplify ?. simpl. intros e. intros e'. rewrite <- e'. clear e'. depelim e. destruct e''2. destruct e''3. destruct e''4. rewrite <- H in f. exact I. Defined. Show Proof. Equations noc {A} {w x y z : A} {p q r s} (u v : Square w x y z p q r s) : Type := noc (square ?(w)) (square w) := _. Parameter prf : forall {A} (x : A) p, Square x x x x eqrefl p eqrefl eqrefl -> Type. Equations J2 {A : Type} (x : A) (p : x = x) (s : Square x x x x eqrefl p eqrefl eqrefl) : prf x p s := J2 _ _ (square x) := _. Coq-Equations-1.3.1-8.20/test-suite/tabareau_vec.v000066400000000000000000000030061463127417400215170ustar00rootroot00000000000000From Equations Require Import Equations. Set Universe Polymorphism. Inductive ℕ (E:Set) : Set := | O : ℕ E | S : ℕ E -> ℕ E | raise : E -> ℕ E. Derive NoConfusion for ℕ. Arguments O {_}. Arguments S {_} _. Inductive Vec E (A : Set) : ℕ E -> Set := nil : Vec E A O | cons : forall {n} (x : A) (xs : Vec E A n), Vec E A (S n). Derive Signature NoConfusion NoConfusionHom for Vec. Arguments nil {_ _}. Arguments cons {_ _ _} _ _. Inductive vector_param E (A : Set) : forall (n : ℕ E), Vec E A n -> Set := vnil_param : vector_param E A O nil | vcons_param : forall (n : ℕ E) (a : A) (v : Vec E A n), vector_param E A n v -> vector_param E A (S n) (cons a v). Derive Signature NoConfusion NoConfusionHom for vector_param. Equations param_vector_vcons E (A : Set) (a : A) (n : ℕ E) (v : Vec E A n) (X : vector_param E A (S n) (cons a v)) : vector_param E A n v := param_vector_vcons E A _ _ _ (vcons_param _ _ _ X) := X. Definition foo := param_vector_vcons_elim : forall P : forall (E A : Set) (a : A) (n : ℕ E) (v : Vec E A n), vector_param E A (S n) (cons a v) -> vector_param E A n v -> Prop, (forall (E A : Set) (a0 : A) (n0 : ℕ E) (v0 : Vec E A n0) (v1 : vector_param E A n0 v0), P E A a0 n0 v0 (vcons_param E A n0 a0 v0 v1) v1) -> forall (E A : Set) (a : A) (n : ℕ E) (v : Vec E A n) (X : vector_param E A (S n) (cons a v)), P E A a n v X (param_vector_vcons E A a n v X). (* Print Assumptions param_vector_vcons. *) Coq-Equations-1.3.1-8.20/test-suite/telescopes.v000066400000000000000000001537171463127417400212630ustar00rootroot00000000000000Require Import Equations Utf8. Set Universe Polymorphism. Import Sigma_Notations. Open Scope equations_scope. Polymorphic Definition pr1_seq {A} {P : A -> Type} {p q : sigma A P} (e : p = q) : p.1 = q.1. Proof. destruct e. apply eq_refl. Defined. Require Vector. Derive NoConfusion for Vector.t. Notation " 'rew' H 'in' c " := (@eq_rect _ _ _ c _ H) (at level 20). Definition J {A} {x : A} (P : forall y : A, x = y -> Type) (p : P x eq_refl) (y : A) (e : x = y) : P y e. destruct e. exact p. Defined. Lemma J_on_refl {A} (x y : A) (e : x = y) : J (λ (y : A) _, x = y) eq_refl y e = e. Proof. destruct e. constructor. Defined. Definition subst {A : Type} {x : A} {P : A -> Type} {y : A} (e : x = y) (f : P x) : P y := J (fun x _ => P x) f y e. Definition subst2 {A : Type} {x : A} {P : A -> Type} (f : P x) (y : A) (e : x = y) : P y := J (fun x _ => P x) f y e. Definition cong@{i j} {A : Type@{i}} {B : Type@{j}} (f : A -> B) {x y : A} (e : x = y) : f x = f y := J@{i j} (fun y _ => f x = f y) (@eq_refl _ (f x)) y e. (* aka ap *) Lemma cong_iter {A B C} (f : A -> B) (g : B -> C) (x y : A) (e : x = y) : Top.cong g (Top.cong f e) = Top.cong (fun x => g (f x)) e. Proof. revert y e. refine (Top.J _ _). reflexivity. Qed. Lemma cong_subst2 {A B C} (f : C -> B) (x y : A) (e : x = y) (z w : A -> C) (p : z x = w x) : Top.cong f (Top.subst2 (P:=fun x : A => z x = w x) p y e) = Top.subst2 (P := fun x : A => f (z x) = f (w x)) (Top.cong f p) y e. Proof. revert y e. refine (Top.J _ _). simpl. reflexivity. Defined. Definition congd {A} {B : A -> Type} (f : forall x : A, B x) {x y : A} (p : x = y) : subst p (f x) = f y := J (fun y p => subst p (f x) = f y) (@eq_refl _ (f x)) y p. (* aka apd *) Notation " 'rew' H 'in' c " := (@subst _ _ _ _ H c) (at level 20). Notation "p =_{ P ; e } q" := (subst (P:=P) e p = q) (at level 90, format "p =_{ P ; e } q"). Definition subst_expl {A : Type} {x : A} {P : A -> Type} {y : A} (e : x = y) (f : P x) : P y := subst e f. Notation " 'rewP' H 'at' P 'in' c " := (@subst_expl _ _ P _ H c) (at level 20). Definition Sect {A B : Type} (s : A -> B) (r : B -> A) := forall x : A, r (s x) = x. (** A typeclass that includes the data making [f] into an adjoin equivalence*) Set Printing Universes. Cumulative Class IsEquiv@{i} {A : Type@{i}} {B : Type@{i}} (f : A -> B) := BuildIsEquiv { equiv_inv : B -> A ; eisretr : Sect equiv_inv f; eissect : Sect f equiv_inv; eisadj : forall x : A, eisretr (f x) = cong f (eissect x) }. Arguments eisretr {A B} f {_} _. Arguments eissect {A B} f {_} _. Arguments eisadj {A B} f {_} _. Record Equiv@{i} (A B : Type@{i}) := { equiv :> A -> B ; is_equiv :> IsEquiv equiv }. Arguments equiv {A B} e. Instance Equiv_IsEquiv {A B} (e : Equiv A B) : IsEquiv (equiv e). Proof. apply is_equiv. Defined. Definition inv_equiv {A B} (E: Equiv A B) : B -> A := equiv_inv (IsEquiv:=is_equiv _ _ E). Definition equiv_inv_equiv@{i} {A B : Type@{i}} {E: Equiv A B} (x : A) : inv_equiv _ (equiv E x) = x := eissect _ x. Definition inv_equiv_equiv@{i} {A B : Type@{i}} {E: Equiv A B} (x : B) : equiv E (inv_equiv _ x) = x := eisretr _ x. Definition equiv_adj@{i} {A B : Type@{i}} {E: Equiv A B} (x : A) : inv_equiv_equiv (equiv E x) = cong (equiv E) (equiv_inv_equiv x) := eisadj _ x. Notation " X <~> Y " := (Equiv X Y) (at level 90, no associativity, Y at next level). Definition equiv_id A : A <~> A. Proof. intros. refine {| equiv a := a |}. unshelve refine {| equiv_inv e := e |}. - red. reflexivity. - red; intros. reflexivity. - intros. simpl. reflexivity. Defined. Axiom axiom_triangle : forall {A : Prop}, A. Definition equiv_sym {A B} : A <~> B -> B <~> A. Proof. intros. refine {| equiv a := inv_equiv X a |}. unshelve refine {| equiv_inv e := equiv X e |}. - red; intros. apply eissect. - red; intros. apply eisretr. - intros x. simpl. destruct X. simpl. unfold inv_equiv. simpl. apply axiom_triangle. Defined. Require Import DepElimDec. (* Unset Equations OCaml Splitting. *) (* BUG *) (* Equations tel_eq (Δ : Tel) (t s : Tuple Δ) : Type := *) (* tel_eq nil nil nil := unit; *) (* tel_eq (consTel A f) (cons t ts) (cons s ss) := *) (* sigma (t = s) (fun e : t = s => tel_eq (f s) (rewP e at fun x => Tuple (f x) in ts) ss). *) Set Equations Transparent. Set Refolding Reduction. Ltac rewrite_change c := match type of c with ?foo = ?bar => change foo with bar in * end. Set Printing Universes. Arguments sigmaI {A} {B} pr1 pr2. Section pathsigmauncurried. Universe i. Equations path_sigma_uncurried {A : Type@{i}} {P : A -> Type@{i}} (u v : sigma@{i} A P) (pq : sigma@{Set} _ (fun p => subst p u.2 = v.2)) : u = v := path_sigma_uncurried (sigmaI _ u1 u2) (sigmaI _ ?(u1) ?(u2)) (sigmaI _ eq_refl eq_refl) := eq_refl. End pathsigmauncurried. Definition pr1_path@{i} {A : Type@{i}} {P : A -> Type@{i}} {u v : sigma@{i} A P} (p : u = v) : u.1 = v.1 := cong@{i i} (@pr1 _ _) p. Notation "p ..1" := (pr1_path p) (at level 3). Definition pr2_path@{i} {A : Type@{i}} `{P : A -> Type@{i}} {u v : sigma A P} (p : u = v) : rew (p..1) in u.2 = v.2. destruct p. apply eq_refl. Defined. Notation "p ..2" := (pr2_path p) (at level 3). Definition eta_path_sigma_uncurried@{i} {A : Type@{i}} {P : A -> Type@{i}} {u v : sigma A P} (p : u = v) : path_sigma_uncurried _ _ (sigmaI@{i} p..1 p..2) = p. destruct p. apply eq_refl. Defined. Section pathsigma. Universe i. Equations path_sigma {A : Type@{i}} {P : A -> Type@{i}} {u v : sigma A P} (p : u.1 = v.1) (q : rew p in u.2 = v.2) : u = v := path_sigma (u:=sigmaI _ _ _) (v:=sigmaI _ _ _) eq_refl eq_refl := eq_refl. End pathsigma. Definition eta_path_sigma A `{P : A -> Type} {u v : sigma A P} (p : u = v) : path_sigma (p..1) (p..2) = p := eta_path_sigma_uncurried p. Instance path_sigma_equiv@{i} {A : Type@{i}} (P : A -> Type@{i}) (u v : sigma A P): IsEquiv@{i} (path_sigma_uncurried u v). unshelve refine (BuildIsEquiv _ _ _ _ _ _ _). - exact (fun r => &(r..1 & r..2)). - intro. apply eta_path_sigma_uncurried. - destruct u, v; intros [p q]; simpl in *. destruct p. simpl in *. destruct q. reflexivity. - destruct u, v; intros [p q]; simpl in *; destruct p. simpl in *. destruct q; simpl in *. apply eq_refl. Defined. Definition path_sigma_equivalence@{i} {A : Type@{i}} (P : A -> Type@{i}) (u v : sigma A P): sigma@{i} _ (fun p : u.1 = v.1 => u.2 =_{P;p} v.2) <~> u = v. Proof. exists (path_sigma_uncurried u v). apply path_sigma_equiv. Defined. Module Telescopes. Cumulative Inductive t@{i} : Type := | inj : Type@{i} -> t | ext (A : Type@{i}) : (A -> t) -> t. Notation Tel := t. Delimit Scope telescope with telescope. Notation "[]" := (inj unit) : telescope. Bind Scope telescope with t. Example onetel := ext Type (fun A => ext nat (fun n => inj (vector A n))). Fixpoint telescope@{i} (T : Tel@{i}) : Type@{i} := match T with | inj A => A | ext A f => sigma A (fun x => telescope (f x)) end. Coercion telescope : Tel >-> Sortclass. (** Telescopic equality: an iterated sigma of dependent equalities *) Fixpoint eq@{i} (Δ : Tel@{i}) : forall (t s : Δ), Tel@{i} := match Δ return forall t s : Δ, Tel@{i} with | inj A => fun a b => inj@{i} (a = b) | ext A f => fun a b => ext (a.1 = b.1) (fun e => eq (f b.1) (rew e in a.2) b.2) end. Reserved Notation "x == y" (at level 70, y at next level, no associativity). Reserved Notation "x =={ Δ } y" (at level 70, y at next level, no associativity, format "x =={ Δ } '/ ' y"). Infix "==" := (eq _) : telescope. Definition eq_expl := eq. Infix "=={ Δ }" := (eq_expl Δ) : telescope. Equations refl {Δ : Tel} (t : telescope Δ) : eq Δ t t := refl (Δ:=inj A) a := eq_refl; refl (Δ:=ext A f) (sigmaI t ts) := &(eq_refl & refl ts). Local Open Scope telescope. Equations J {Δ : Tel} (r : Δ) (P : forall s : Δ, eq Δ r s -> Type) (p : P r (refl r)) (s : Δ) (e : eq _ r s) : P s e := J (Δ:=inj A) a P p b e := Top.J P p b e; J (Δ:=ext A f) a P p b e := (* (sigmaI _ r rs) P p (sigmaI _ s ss) (sigmaI _ e es) := *) Top.J (x:=a.1) (fun (s' : A) (e' : a.1 = s') => forall (ss' : f s') (es' : eq (f s') (rewP e' at f in a.2) ss'), P &(s' & ss') &(e' & es')) (fun ss' es' => J _ (fun ss'' (es'' : eq (f a.1) a.2 ss'') => P &(a.1 & ss'') &(eq_refl & es'')) p ss' es') b.1 e.1 b.2 e.2. Lemma J_refl {Δ : Tel} (r : Δ) (P : forall s : Δ, eq Δ r s -> Type) (p : P r (refl r)) : J r P p r (refl r) = p. Proof. induction Δ. simpl. reflexivity. simpl. destruct r. refine (H pr1 pr2 _ _). Defined. Lemma J_on_refl {Δ : Tel} (x y : Δ) (e : x == y) : J _ (λ (y : Δ) _, x == y) (refl _) y e = e. Proof. revert y e. refine (J _ _ _). refine (J_refl _ _ _). Defined. Equations subst {Δ : Tel} (P : Δ -> Type) {u v : Δ} (e : u =={Δ} v) (p : P u) : P v := subst (v:=v) (u:=u) P e p := J u (fun v _ => P v) p v e. Definition cong@{i j k} {Δ : Tel@{i}} {T : Type@{j}} (f : Δ -> T) (u v : Δ) (e : u =={Δ} v) : f u = f v := J@{j k i} u (fun v _ => f u = f v) (@eq_refl T (f u)) v e. Notation "p ==_{ P ; e } q" := (subst P e p = q) (at level 70, q at next level, no associativity) : telescope. Reserved Notation "x =={ T ; e } y" (at level 70, y at next level, no associativity, only parsing, format "x =={ T ; e } '/ ' y"). Notation "x =={ P ; e } y" := (subst P e x == y) (only parsing) : telescope. Lemma eq_over_refl {Δ : Tel} {T} (f : forall x : Δ, T x) (u : Δ) : f u ==_{T;refl u} f u. Proof. unfold subst. refine (J_refl _ _ _). Defined. Equations dcong {Δ : Tel} {T} (f : forall x : Δ, T x) (u v : Δ) (e : u =={Δ} v) : f u ==_{T;e} f v := dcong (T:=T) f u v e := J u (fun v e => f u ==_{T;e} f v) (eq_over_refl f u) v e. Equations cong_tel {Δ : Tel} {Γ : Tel} (f : Δ -> Γ) {u v : Δ} (e : u =={Δ} v) : f u =={Γ} f v := cong_tel (v:=v) f e := J _ (fun v _ => f _ =={_} f v) (refl _) v e. Equations dcong_tel {Δ : Tel} {T : Δ -> Tel} (f : forall x : Δ, T x) {u v : Δ} (e : u =={Δ} v) : f u =={T;e} f v := dcong_tel f e := J _ (fun v e => f _ =={_;e} f v) _ _ e. Next Obligation. clear. unfold subst. rewrite J_refl. apply refl. Defined. Notation "'tele' x .. y 'in' z " := (@ext _ (fun x => .. (@ext _ (fun y => inj z)) ..)) (at level 0, x binder, right associativity, z at level 60, format "'[' 'tele' '/ ' x .. y '/ ' 'in' '/ ' z ']'") : type_scope. Local Open Scope telescope. Notation "'telei' x .. y 'in' z " := (@sigmaI _ _ x .. (@sigmaI _ _ y z) ..) (at level 0, right associativity, y at next level, format "'[' 'telei' '/ ' x .. y 'in' z ']'", only parsing) : telescope. Lemma solution@{i} {A : Type@{i}} (t : A) : Equiv@{i} (sigma@{i} A (fun x : A => x = t)) unit. Proof. refine {| equiv a := tt |}. unshelve refine {| equiv_inv e := telei t in eq_refl |}. - red; intros. destruct x. reflexivity. - red; intros. destruct x. now destruct pr2. - intros [x eq]. revert t eq. refine (Top.J@{i i} _ _). constructor. Defined. Fixpoint eq_eq_equiv@{i} (Δ : Tel@{i}) : forall (u v : Δ) (e : u = v), u == v := match Δ as Δ return forall (u v : Δ) (e : u = v), u == v with | inj A => fun a b e => e | ext A f => fun u v e => let p := equiv_inv@{i} (IsEquiv:=path_sigma_equiv _ u v) e in &(p.1 & eq_eq_equiv _ _ _ p.2) end. Fixpoint extend_tele@{i} (Δ : Tel@{i}) : forall (Γ : telescope Δ -> t@{i}), t@{i} := match Δ with | inj A => fun Γ => ext A Γ | ext A f => fun Γ => ext A (fun a => extend_tele (f a) (fun fa => Γ &(a & fa))) end. (* Equations extend_tele (Δ : t) (Γ : telescope Δ -> t) : t := *) (* extend_tele (inj A) Γ := ext A Γ; *) (* extend_tele (ext A f) Γ := ext A (fun a => extend_tele (f a) (fun fa => Γ &(a & fa))). *) Equations inj_extend_tel (Δ : t) (Γ : telescope Δ -> t) (s : Δ) (t : Γ s) : extend_tele Δ Γ := inj_extend_tel (inj A) Γ s t := &(s & t) ; inj_extend_tel (ext A f) Γ (sigmaI _ t ts) e := &(t & inj_extend_tel (f t) (fun fa => Γ &(t & fa)) ts e). Lemma reorder_tele@{i +} (Δ : t@{i}) (Γ : telescope Δ -> t@{i}) : telescope (extend_tele Δ Γ) <~> tele (x : telescope Δ) in Γ x. Proof. unshelve econstructor. - induction Δ; simpl extend_tele in *; simpl; intros. trivial. simpl in Γ. specialize (X X0.1 _ X0.2). refine &(&(X0.1 & X.1)&X.2). - unshelve econstructor. + induction Δ; simpl extend_tele in *; intros; simpl in *; trivial. specialize (X X0.1.1). exists X0.1.1. apply X. exact &(X0.1.2 & X0.2). + red. intro. induction Δ; simpl. destruct x. constructor. destruct x. simpl. rewrite H. reflexivity. + red. intro. induction Δ; simpl. destruct x. constructor. destruct x. simpl. rewrite H. reflexivity. + apply axiom_triangle. Defined. Lemma eq_eq_equiv_refl {Δ : Tel} (u : Δ) : eq_eq_equiv Δ u u eq_refl = refl u. Proof. induction Δ; simpl. reflexivity. simpl. now rewrite H. Defined. Fixpoint eq_eq_equiv_inv@{i} (Δ : Tel@{i}) : forall (u v : Δ) (e : u == v), u = v := match Δ with | inj A => fun a b e => e | ext A f => fun u v e => let e' := eq_eq_equiv_inv _ _ _ e.2 in equiv@{i} (path_sigma_equivalence _ u v) &(e.1 & e') end. Lemma eq_eq_equiv_inv_refl@{i} (Δ : Tel@{i}) (u : Δ) : eq_eq_equiv_inv Δ u u (refl@{i} u) = eq_refl. Proof. induction Δ; simpl. reflexivity. simpl. now rewrite H. Defined. Lemma sect@{i} : forall (Δ : Tel@{i}) (u v : Δ), Sect@{i i} (eq_eq_equiv_inv Δ u v) (eq_eq_equiv Δ u v). Proof. induction Δ. simpl. intros. intro. constructor. intros u v. intros He. simpl in * |-. Opaque path_sigma_uncurried path_sigma path_sigma_equivalence path_sigma_equiv. pose proof (eissect (path_sigma_uncurried u v)). simpl. red in H0. Transparent path_sigma_uncurried path_sigma path_sigma_equivalence path_sigma_equiv. match goal with |- context[equiv _ ?x] => set (foo:=x) end. specialize (H0 foo). set (bar := (equiv_inv@{i} (equiv@{i} _ foo))) in *. change (bar = foo) in H0. symmetry in H0. unfold foo in H0. subst foo. clearbody bar. revert bar H0. refine (@Top.subst2@{i i} _ _ _ _). simpl. simpl. red in H. specialize (H _ _ _ He.2). destruct He. simpl. apply Top.cong. apply H. Defined. Require Import EqDecInstances. Typeclasses Transparent telescope. Transparent path_sigma_equiv path_sigma_uncurried. Lemma retr@{i} : forall (Δ : Tel@{i}) (u v : Δ), Sect@{i i} (eq_eq_equiv Δ u v) (eq_eq_equiv_inv Δ u v). Proof. induction Δ. + simpl. intros. intro. constructor. + intros u v e. simpl. specialize (H v.1 (rew (equiv_inv (IsEquiv := path_sigma_equiv _ _ _) e).1 in u.2) v.2 (equiv_inv (IsEquiv := path_sigma_equiv _ _ _) e).2). set (foo := eq_eq_equiv_inv _ _ _ _) in *. symmetry in H. clearbody foo. revert foo H. refine (Top.subst2@{i i} _). refine (eisretr (path_sigma_uncurried u v) _). Defined. Lemma eq_sym_dep {A} (x y : A) (P : x = y -> Type) (G : forall e : y = x, P (eq_sym e)) : forall e : x = y, P e. Proof. intros. destruct e. apply (G eq_refl). Defined. Global Instance eq_points_isequiv@{i} (Δ : Tel@{i}) (u v : Δ) : IsEquiv@{i} (eq_eq_equiv Δ u v) := {| equiv_inv := eq_eq_equiv_inv Δ u v |}. Proof. - apply sect. - apply retr. - revert v. induction Δ as [ | A t IH]. + refine (Top.J@{i i} _ _). constructor. + simpl in u; refine (Top.J@{i i} _ _). simpl sect. rewrite (IH u.1 u.2 u.2 eq_refl). simpl eq_eq_equiv. simpl retr. set (r:=retr@{i} _ _ _ _) in *. set(lhs' := eq_eq_equiv _ _ _). set(lhs:=eq_eq_equiv_inv _ _ _ _) in *. clearbody r. revert r. refine (eq_sym_dep@{i i} _ _ _ _). apply axiom_triangle. (* clearbody lhs. *) (* clearbody lhs. *) (* revert lhs. now refine (Top.J _ _). *) Defined. (** Telescopic equality is equivalent to equality of the sigmas. *) Definition eq_points_equiv@{i} (Δ : Tel@{i}) (u v : Δ) : Equiv@{i} (u = v) (u == v) := {| equiv := eq_eq_equiv Δ u v |}. (* Goal (forall n : nat, True). *) (* intros. *) (* pose (tele (n' : nat) in (S n' =={ inj nat } S n)). *) (** Necessary as the telescope structure is not easy for Coq to infer *) Global Hint Extern 0 (Equiv (?x = ?y) (telescope (eq ?Δ ?x' ?y'))) => exact (eq_points_equiv Δ x' y') : typeclass_instances. Definition NoConf@{i} := fun (A : Type@{i}) (x : sigma@{i} _ (fun index : nat => vector A index)) => match x.2 with | Vector.nil => fun y : &{ index : nat & vector A index} => match y.2 with | Vector.nil => True | Vector.cons _ _ => False end | @Vector.cons _ h n x0 => fun y : &{ index : nat & vector A index} => match y.2 with | Vector.nil => False | @Vector.cons _ h0 n0 x1 => telei (h) (n) in (x0) = telei (h0) (n0) in (x1) :> tele (_ : A) (n : nat) in vector A n end end. Lemma noconf@{i +} : forall (A : Type@{i}) (a b : &{ index : nat & vector A index}), a = b -> NoConf@{i} A a b. Proof. intros. destruct H. destruct a. simpl. destruct pr2. simpl. exact I. simpl. reflexivity. Defined. Lemma noconf_inv@{i +} : forall (A : Type@{i}) (a b : &{ index : nat & vector A index}), NoConf@{i} A a b -> a = b. Proof. intros. destruct a, b. destruct pr2, pr3; try constructor || contradiction. simpl in H. NoConfusion.destruct_tele_eq H. reflexivity. Defined. Import NoConfusion. Global Instance noconf_isequiv@{i} (A : Type@{i}) (a b : sigma@{i} _ _) : IsEquiv@{i} (noconf A a b). Proof. unshelve refine {| equiv_inv := noconf_inv A a b |}. intro. - destruct_sigma a; destruct_sigma b; destruct a ; destruct b; simpl in * |-; on_last_hyp ltac:(fun id => destruct_tele_eq id || destruct id); solve [constructor]. - intro. solve_noconf_inv_equiv. - intros. destruct x. destruct a. destruct pr2; simpl; constructor. Defined. Definition noconf_equiv@{i} (A : Type@{i}) a b : Equiv (a = b) (NoConf@{i} A a b) := {| equiv := noconf A a b |}. Global Hint Extern 0 (@IsEquiv (?x = ?y) (telescope (eq ?Δ ?x' ?y')) _) => exact (@eq_points_isequiv Δ x' y') : typeclass_instances. Global Hint Extern 0 (@IsEquiv (?x = ?y) _ _) => exact (@noconf_isequiv _ x y) : typeclass_instances. Global Hint Extern 0 (@Equiv (?x = ?y) _) => exact (@noconf_equiv _ x y) : typeclass_instances. Arguments noconf_equiv : simpl never. Arguments noconf_isequiv : simpl never. Arguments equiv : simpl never. Arguments equiv_inv : simpl never. Notation "f ^-1" := (@equiv_inv _ _ f _) (at level 3). Infix "@" := eq_trans (at level 80). (** The composition of equivalences is an equivalence. *) Instance isequiv_compose A B f C g {E : @IsEquiv A B f} {E' : @IsEquiv B C g} : IsEquiv (compose g f) | 1000 := BuildIsEquiv A C (compose g f) (compose f^-1 g^-1) _ _ _ . Proof. exact (fun c => Top.cong g (eisretr f (g^-1 c)) @ eisretr g c). exact (fun a => Top.cong (f^-1) (eissect g (f a)) @ eissect f a). intro. simpl. apply axiom_triangle. Defined. Definition equiv_compose {A B C} (E : Equiv A B) (E' : Equiv B C) : Equiv A C := Build_Equiv A C (compose (@equiv _ _ E') (@equiv _ _ E)) _. Definition injectivity_cons {A} (u v : tele (x : A) (n : nat) in vector A n) : (&(S u.2.1 & @Vector.cons A u.1 u.2.1 u.2.2) = &(S v.2.1 & @Vector.cons A v.1 v.2.1 v.2.2)) <~> u == v := equiv_compose (noconf_equiv A &(S u.2.1 & @Vector.cons A u.1 u.2.1 u.2.2) &(S v.2.1 & @Vector.cons A v.1 v.2.1 v.2.2)) (eq_points_equiv (tele (x : A) (n : nat) in vector A n) _ _). End Telescopes. Module Example_cons. Notation " 'rewP' H 'at' B 'in' c " := (@Top.subst _ _ B _ H c) (at level 20, only parsing). Import Telescopes. Lemma inj_dep {A} (P : A -> Type) (G : forall e : inj A, P e) : forall e : A, P e. Proof. apply G. Defined. Polymorphic Definition pr1_seq@{i} {A : Type@{i}} {P : A -> Type@{i}} {p q : sigma A P} (e : p = q) : p.1 = q.1. Proof. destruct e. apply eq_refl. Defined. Notation " 'rew' H 'in' c " := (@eq_rect _ _ _ c _ H) (at level 20). Polymorphic Definition pr2_seq@{i} {A : Type@{i}} {P : A -> Type@{i}} {p q : sigma A P} (e : p = q) : rew (pr1_seq e) in p.2 = q.2. Proof. destruct e. apply eq_refl. Defined. Polymorphic Definition rewh@{i} {A : Type@{i}} {B : A -> Type@{i}} {x : A} {p q : B x} (e : &(x & p) = &(x & q)) (e' : pr1_seq e = eq_refl) : p = q := (@eq_rect _ (pr1_seq e) (fun f : x = x => rew f in p = q) (pr2_seq e) eq_refl e'). Polymorphic Lemma solution_inv@{i j} {A : Type@{i}} (B : A -> Type@{i}) (x : A) (p q : B x) (G : p = q -> Type@{j}) : (forall (e : &(x & p) = &(x & q)) (e' : pr1_seq e = eq_refl), G (rewh e e')) -> (forall e : p = q, G e). Proof. intros H. intros e. destruct e. specialize (H eq_refl eq_refl). simpl in H. apply H. Defined. Definition uncurry {A} {B : A -> Type} {C : forall x : A, B x -> Type} (f : forall s : &{ x : A & B x }, C s.1 s.2) : forall (x : A) (b : B x), C x b := fun x b => f &(x & b). Lemma rewrite_in {A} {x y z : A} (e : x = y) (e' : x = z) : y = z. Proof. destruct e. apply e'. Defined. Lemma rewrite_inr {A} {x y z : A} (e : x = y) (e' : y = z) : x = z. Proof. destruct e. apply e'. Defined. Open Scope telescope. Lemma cong_equiv_inv@{i} (Δ : Tel@{i}) (T : Type@{i}) (f : Δ -> T) (u v : Δ) : IsEquiv f -> f u = f v -> u =={Δ} v. Proof. intros. apply eq_points_equiv. apply (Top.cong equiv_inv) in H. transitivity (f ^-1 (f u)). symmetry. apply (eissect f u). transitivity (f ^-1 (f v)). apply H. apply (eissect f v). Defined. Instance cong_is_equiv@{i} (Δ : Tel@{i}) (T : Type@{i}) (f : Δ -> T) (u v : Δ) (I : IsEquiv f) : IsEquiv (cong f u v) := { equiv_inv := _ }. Proof. intros. - eapply cong_equiv_inv; eauto. - red. intros x. unfold cong_equiv_inv. apply axiom_triangle. - apply axiom_triangle. - apply axiom_triangle. Defined. Definition cong_equiv (Δ : Tel) (u v : Δ) (T : Type) (f : Δ -> T) (E : IsEquiv f) : u =={Δ} v <~> f u = f v := {| equiv := cong f u v |}. Notation "'telei' x .. y 'in' z " := (@sigmaI _ _ x .. (@sigmaI _ _ y z) ..) (at level 0, right associativity, y at next level, format "'[' 'telei' '/ ' x .. y 'in' z ']'", only parsing) : telescope. Notation " a '={' P ; e } b " := (Top.subst (P:=P) e a = b) (at level 90). Notation " a '==={' P ; e } b " := (subst P _ _ e a = b) (at level 90, only parsing) : telescope. Lemma equiv_cong_subst {A B} (P : B -> Type) (f : A -> B) (s t : A) (e : s = t) (u : P (f s)) (v : P (f t)) : u =_{(fun x => P (f x)); e} v <~> (u =_{P; Top.cong f e} v). Proof. unfold Top.subst. destruct e. simpl. apply equiv_id. Defined. Lemma equiv_cong_subst_dep {A} {B : A -> Type} (P : forall x : A, B x -> Type) (f : forall x : A, B x) (s t : A) (e : s = t) (u : P s (f s)) (v : P t (f t)) : u =_{(fun x => P x (f x)); e} v <~> (Top.J (fun y e => P y (rew e in (f s))) u _ e =_{(fun x => P _ x); Top.congd f e} v). Proof. unfold Top.subst. destruct e. simpl. apply equiv_id. Defined. Lemma equiv_cong_subst_tel {Δ Γ : Tel} (P : Γ -> Tel) (f : Δ -> Γ) (s t : Δ) (e : s =={Δ} t) (u : P (f s)) : subst P (cong_tel f e) u = subst (fun x => P (f x)) e u. Proof. unfold subst. revert t e. refine (J _ _ _). intros. rewrite J_refl. unfold cong_tel. simpl. rewrite !J_refl. reflexivity. Defined. Lemma equiv_tele_l {A} {A'} {B : A' -> Type} (e : Equiv A A') : tele (x : A) in B (equiv e x) <~> tele (x : A') in B x. Proof. simpl. unshelve refine {| equiv a := &(e a.1 & _) |}. exact a.2. unshelve refine {| equiv_inv a := &(e ^-1 a.1 & _) |}. destruct a. simpl. rewrite eisretr. exact pr2. red; intros. simpl. destruct x. simpl. pose (eisretr e pr1). apply path_sigma_uncurried. simpl. exists e0. simpl. unfold eq_rect_r. clearbody e0. apply axiom_triangle. apply axiom_triangle. apply axiom_triangle. (* apply eisretr. *) (* red; intros. simpl. destruct x. simpl. apply Top.cong. *) (* apply eissect. *) (* intros [x bx]. *) (* simpl. rewrite eisadj. simpl. *) (* destruct (eissect (e x) bx). simpl. reflexivity. *) Defined. Lemma equiv_tele_r@{i} {A : Type@{i}} {B B' : A -> Type@{i}} (e : forall x : A, Equiv (B x) (B' x)) : tele (x : A) in B x <~> tele (x : A) in (B' x). Proof. simpl. unshelve refine {| equiv a := &(a.1 & e a.1 a.2) |}. unshelve refine {| equiv_inv a := &(a.1 & inv_equiv (e a.1) a.2) |}. red; intros. simpl. destruct x. simpl. apply Top.cong. apply eisretr. red; intros. simpl. destruct x. simpl. apply Top.cong. apply eissect. intros [x bx]. simpl. rewrite eisadj. simpl. destruct (eissect (e x) bx). simpl. reflexivity. Defined. Lemma eq_sym_equiv@{i} {A : Type@{i}} {x y : A} : Equiv@{i} (x = y) (y = x). Proof. unshelve refine {| equiv a := eq_sym a |}. unshelve refine {| equiv_inv a := eq_sym a |}. intro e; destruct e. apply eq_refl. intro e; destruct e. apply eq_refl. intro e; destruct e. apply eq_refl. Defined. Lemma eq_tele_sym_equiv@{i} {Δ : Tel@{i}} {x y : Δ} : x == y <~> y == x. Proof. refine (equiv_compose _ _). refine (equiv_sym _). refine (eq_points_equiv _ _ _). refine (equiv_compose _ _). refine eq_sym_equiv. refine (eq_points_equiv _ _ _). Defined. Lemma subst_subst@{i} (Δ : Tel@{i}) (a b : Δ) (r s : a =={Δ} b) : subst (λ y : Δ, b == y) s (subst (λ x : Δ, x == a) r (refl a)) == refl b <~> r =={a =={Δ} b} s. Proof. induction Δ. + simpl in *. destruct r. unfold subst. simpl. edestruct (eq_sym (Top.J_on_refl@{i i} _ _ s)). apply eq_sym_equiv. + unfold subst. revert b r s. refine (J@{i i i} _ _ _). intros s. edestruct (eq_sym (J_refl@{i i i} _ (fun v _ => v == a) (refl a))). edestruct (eq_sym (J_on_refl@{i i i} _ _ s)). refine (eq_tele_sym_equiv@{i}). Defined. (** This is the square we get (almost) by applying congruence: it is dependent over e. *) Definition dep_square {Γ : Tel} (Δ : Γ -> Tel) u v (e : u =={Γ} v) (a b : forall ρ, Δ ρ) (eqΔ := λ ρ, a ρ =={Δ ρ} b ρ) (r : eqΔ u) (s : eqΔ v) := (subst (fun y => telescope (b u =={Δ;e} y)) s (subst (fun y => telescope (y =={Δ;e} a v)) r (dcong_tel a e)) =={b u =={Δ;e} b v} (dcong_tel b e)). Definition square_tel {Δ : Tel} {w x y z : Δ} (t : w =={Δ} x) (b : y == z) (l : w == y) (r : x == z) : Tel := subst (fun x : Δ => x == y) t l =={fun y => x == y;b} r. Arguments telescope : simpl never. (** This is the square we want: we already simplified the dependency on of the endpoints types. *) Lemma inj_extend_tel_equiv@{i} (Γ : Tel@{i}) (u v : Γ) (Δ : Tel@{i}) (a b : Γ → Δ) (eqΔ:=λ ρ : Γ, a ρ =={Δ} b ρ) (r : eqΔ u) (s : eqΔ v) : inj_extend_tel Γ eqΔ u r =={extend_tele Γ eqΔ} inj_extend_tel Γ eqΔ v s <~> extend_tele (u =={Γ} v) (λ x : u =={Γ} v, square_tel r s (cong_tel a x) (cong_tel b x)). induction Γ. Transparent telescope eq. - simpl extend_tele. simpl inj_extend_tel. refine (equiv_tele_r _). intros x. unfold square_tel. simpl in x. revert v x s. refine (Top.J@{i i} _ _). intros s. simpl. unfold square_tel. unfold cong_tel. simpl. subst eqΔ. simpl in *. refine (equiv_sym _). apply subst_subst. - simpl. refine (equiv_tele_r _). intros. destruct v. simpl in *. subst eqΔ. simpl in *. revert pr1 x pr2 s. refine (Top.J@{i i} _ _). simpl. intros. specialize (X u.1 u.2 pr2). specialize (X (fun ρ => a &(u.1 & ρ))). simpl in X. specialize (X (fun ρ => b &(u.1 & ρ))). simpl in X. destruct u. simpl in *. specialize (X r s). apply X. Defined. Definition lifted_solution@{i j} (Γ : Tel@{i}) (u v : Γ) (Γ' : Tel@{i}) (Δ : Tel@{i}) (a b : Γ -> Δ) (eqΔ := λ ρ, a ρ =={Δ} b ρ) (r : eqΔ u) (s : eqΔ v) (f : extend_tele Γ eqΔ <~> Γ') : tele (e : u =={Γ} v) in square_tel r s (cong_tel a e) (cong_tel b e) <~> f (inj_extend_tel Γ eqΔ u r) =={Γ'} f (inj_extend_tel Γ eqΔ v s). Proof. refine (equiv_compose _ _). Focus 2. refine (equiv_compose _ _). refine (cong_equiv@{i i i} (extend_tele Γ eqΔ) (inj_extend_tel Γ eqΔ u r) (inj_extend_tel Γ eqΔ v s) _ f _). Show Universes. refine (eq_points_equiv _ _ _). unfold square_tel. refine (equiv_compose _ _). refine (equiv_sym _). refine (reorder_tele@{i j} (u =={Γ} v) (fun x => _)). refine (equiv_sym _). apply inj_extend_tel_equiv. Defined. Lemma lower_solution@{i +} : forall (A : Type@{i}) n, Equiv@{i} (tele (x' : A) (n' : nat) (v : vector A n') in (S n' = S n)) (tele (x : A) in vector A n). Proof. intros A n. unshelve refine {| equiv a := _ |}. refine &(a.1 & _). destruct a. destruct pr2. destruct pr2. simpl in pr3. noconf pr3. exact pr2. unshelve eapply BuildIsEquiv@{i}. intros a. refine &(a.1, n & _). refine &(a.2 & eq_refl). intro. simpl. unfold solution_left. simpl. reflexivity. intro. simpl. unfold solution_left. unfold NoConfusion.noConfusion_nat_obligation_1. simpl. destruct x. destruct pr2. destruct pr2. simpl. refine (Top.cong@{i i} _ _). revert pr3. simplify_one_dep_elim. simplify_one_dep_elim. intros. reflexivity. intros. simpl. destruct x as (x&n'&v&e). unfold solution_left_dep, apply_noConfusion. simpl. unfold Top.cong. revert e. simpl. simplify_one_dep_elim. simplify_one_dep_elim. intros. reflexivity. Defined. Definition telu A := tele (x' : A) (n' : nat) in vector A n'. Definition telv A n := tele (x : A) in vector A n. Lemma apply_equiv_dom {A B} (P : B -> Type) (e : Equiv A B) : (forall x : A, P (equiv e x)) -> forall x : B, P x. Proof. intros. specialize (X (e ^-1 x)). rewrite inv_equiv_equiv in X. exact X. Defined. Lemma apply_equiv_codom {A} {B B' : A -> Type} (e : forall x, Equiv (B x) (B' x)) : (forall x : A, B x) <~> forall x : A, B' x. Proof. intros. unshelve refine {| equiv f := fun x => e x (f x) |}. unshelve refine {| equiv_inv f := fun x => (e x)^-1 (f x) |}. red; intros. extensionality y. apply inv_equiv_equiv. intro. extensionality y. apply equiv_inv_equiv. intros. apply axiom_triangle. Defined. Polymorphic Lemma equiv_switch_indep {A : Type} {B : Type} : (tele (_ : A) in B <~> tele (_ : B) in A). Proof. unshelve refine {| equiv a := _ |}. simpl. exact &(a.2 & a.1). unshelve refine {| equiv_inv a := _ |}. simpl. exact &(a.2 & a.1). - intro a. simpl. reflexivity. - intro a. simpl. reflexivity. - intro a. simpl. reflexivity. Defined. Polymorphic Lemma equiv_elim_unit {A : Type} : (tele (_ : []) in A) <~> inj A. Proof. unshelve refine {| equiv a := _ |}. simpl. exact a.2. unshelve refine {| equiv_inv a := _ |}. simpl. exact &(tt & a). - intro a. simpl. reflexivity. - intros [[ ] t]. simpl. reflexivity. - intros [[ ] t]. simpl. reflexivity. Defined. Set Printing Universes. Arguments telescope : simpl never. Polymorphic Lemma solution_inv_tele@{i j +} {A : Type@{i}} (B : A -> Type@{i}) (x : A) (p q : B x) : (Equiv@{i} (p = q) (sigma@{i} _ (fun x0 : x = x => sigma@{i} _ (fun _ : p ={ B; x0} q => x0 = eq_refl)))). Proof. refine (equiv_compose (B:=sigma@{i} _ (fun x0 : x = x => sigma@{i} _ (fun _ : x0 = eq_refl => p ={ B; x0} q))) _ _). all:cycle 1. refine (equiv_tele_r _); intro e. refine (equiv_switch_indep@{i i i}). refine (equiv_sym _). refine (equiv_compose _ _). refine (reorder_tele@{i j} (tele (e : x = x) in ((e = eq_refl) : Type@{i})) (fun ρ => inj (p ={B;ρ.1} q))). simpl. refine (equiv_compose _ _). refine (equiv_sym (equiv_tele_l@{i i i} _)). refine (equiv_sym _). refine (@solution (x = x) eq_refl). simpl. refine equiv_elim_unit. Defined. Definition NoConf@{i} := fun (A : Type@{i}) (x : sigma@{i} _ (fun index : nat => vector A index)) => match x.2 with | Vector.nil => fun y : &{ index : nat & vector A index} => match y.2 with | Vector.nil => inj@{i} unit | Vector.cons _ _ => inj@{i} False end | @Vector.cons _ h n x0 => fun y : &{ index : nat & vector A index} => match y.2 with | Vector.nil => inj@{i} False | @Vector.cons _ h0 n0 x1 => telei (h) (n) in (x0) =={tele (x : A) (n : nat) in vector A n} telei (h0) (n0) in (x1) end end. Inductive Iseq2 {A : Type} : forall x y: A, x = y -> y = x -> Type := iseq2 w : Iseq2 w w eq_refl eq_refl. Lemma invIseq2' {A} (x : A) (e : x = x) (iseq : Iseq2 x x e eq_refl) : &{ H : eq_refl = e & (Top.subst (P:=fun e => Iseq2 x x e eq_refl) H (iseq2 x)) = iseq }. generalize_eqs_sig iseq. destruct iseq. intros H; symmetry in H. revert H. refine (eq_simplification_sigma1_dep_dep _ _ _ _ _). intros. subst iseq0. revert e'. intros e'. set (eos := the_end_of_the_section). move eos before A. uncurry_hyps pack. pattern sigma pack. clearbody pack. clear. set(vartel := tele (x : A) (e : x = x) (w : A) (e : (&(w, w, eq_refl & eq_refl) = &(x, x, e & eq_refl) :> (tele (x : A) (y : A) (e : x = y) in (y = x)))) in unit). change (telescope vartel) in pack. unfold vartel in pack. clear vartel. revert pack. unshelve refine (apply_equiv_dom _ _ _). shelve. refine (equiv_sym _). - refine (equiv_compose _ _). set(vartel := tele (x : A) (e : x = x) in A). set(eqtel' := (fun x : vartel => tele (_ : &(x.2.2, x.2.2, eq_refl & eq_refl) = &(x.1, x.1, x.2.1 & eq_refl) :> tele (x : A) (y : A) (e : x = y) in (y = x)) in unit)). pose (reorder_tele vartel eqtel'). simpl in e. unfold eqtel' in e. simpl in e. refine e. refine (equiv_compose _ _). refine (equiv_compose _ _). refine (equiv_tele_r _). intros x. set(eqtel := (tele (x : A) (y : A) (e : x = y) in (y = x))). set(vartel := tele (x : A) (e : x = x) in A). set(eqtel'' := (fun x : vartel => tele (_ : &(x.2.2, x.2.2, eq_refl & eq_refl) =={eqtel} &(x.1, x.1, x.2.1 & eq_refl)) in unit)). refine (equiv_compose _ _). refine (equiv_tele_l (B := fun _ => unit) _). apply (@eq_points_equiv eqtel). refine (equiv_id _). cbn. simpl. refine (equiv_id _). refine (equiv_id _). simpl. refine (con simpl. simplify ?. simpl. Lemma noconf : forall (A : Type) (a b : &{ index : nat & vector A index}), a =={ext nat (fun n => inj (vector A n))} b -> NoConf A a b. Proof. intros. destruct X. destruct a, b. simpl in pr1, pr2. destruct pr1. simpl in pr2. destruct pr2. simpl. destruct pr3. simpl. simpl. exact tt. simpl. exists eq_refl. exists eq_refl. simpl. constructor. Defined. Lemma noconf_inv : forall (A : Type) (a b : &{ index : nat & vector A index}), NoConf A a b -> a =={ext nat (fun n => inj (vector A n))} b. Proof. intros. destruct a, b. destruct pr2, pr3; try constructor || contradiction. simpl in X. exists eq_refl. constructor. unfold NoConf in X. cbv beta iota delta -[telescope eq_expl] in X. apply (@cong_tel (tele (x : A) (n : nat) in (vector A n)) (tele (n1 : nat) in vector A n1) (fun x => &(S x.2.1 & Vector.cons x.1 x.2.2)) _ _ X). Defined. Import NoConfusion. Global Instance noconf_isequiv A a b : IsEquiv (noconf A a b). Proof. unshelve refine {| equiv_inv := noconf_inv A a b |}. intro. - destruct_sigma a; destruct_sigma b. destruct a ; destruct b; simpl in * |-. simpl. on_last_hyp ltac:(fun id => destruct_tele_eq id || destruct id); solve [constructor]. simpl. bang. simpl. bang. simpl. unfold telescope in x. destruct_sigma x. destruct_sigma x. destruct idx, idx0. simpl in x. destruct x. simpl. reflexivity. - intro. destruct_sigma a; destruct_sigma b. destruct x. simpl in *. destruct pr1, pr2. destruct a; simpl in * |-; constructor. - intros. destruct x, a, b. simpl in *; destruct pr1, pr2; simpl. destruct pr3; constructor. Defined. Definition noconf_equiv A a b : Equiv (a =={tele (n : nat) in vector A n} b) (NoConf A a b) := {| equiv := noconf A a b |}. Definition injectivity_cons2 {A} (u v : tele (x : A) (n : nat) in vector A n) : tele (e : S u.2.1 = S v.2.1) in (@Vector.cons A u.1 u.2.1 u.2.2 ==_{fun x : telescope (inj nat) => Vector.t A x;e} @Vector.cons A v.1 v.2.1 v.2.2) <~> u == v. Proof. refine (noconf_equiv A &(S u.2.1 & @Vector.cons A u.1 u.2.1 u.2.2) &(S v.2.1 & @Vector.cons A v.1 v.2.1 v.2.2)). Defined. Ltac intros_tele := match goal with |- Equiv (telescope (ext _ ?F)) _ => refine (equiv_tele_r _); match F with (fun x => @?F x) => intros ?x; match goal with id : _ |- Equiv _ ?y => let f' := eval simpl in (F id) in change (Equiv (telescope f') y) end end | |- Equiv (sigma _ (fun x => _)) _ => refine (equiv_tele_r _); intros ?x end. Lemma rew_sym@{i} (A : Type@{i}) {Δ : A -> Tel@{i}} (x y : A) (px : Δ x) (py : Δ y) (e : y = x) : px =={Δ x} Top.subst (P:=Δ) e py -> Top.subst (P:=Δ) (eq_sym e) px =={Δ y} py. Proof. destruct e. simpl. trivial. Defined. Equations sym {Δ : Tel} {s t : Δ} (e : s =={Δ} t) : t =={Δ} s := sym {Δ:=(inj A)} e := eq_sym e ; sym {Δ:=(ext A f)} e := &(eq_sym e.1 & rew_sym _ _ _ _ _ _ (sym e.2)). Lemma cong_tel_proj@{i} (Δ : Tel@{i}) (A : Type@{i}) (Γ : A -> Tel@{i}) (f : Δ → ext A Γ) (u v : Δ) (e : u =={Δ} v) : (cong_tel f e).1 = cong_tel (Γ:=inj A) (fun x => (f x).1) e. Proof. induction Δ. + revert v e. refine (J@{i i i} _ _ _). simpl. unfold cong_tel. simpl. reflexivity. + revert v e. refine (J@{i i i} _ _ _). simpl. specialize (H u.1 (fun t => f &(u.1 & t)) u.2 u.2 (refl _)). unfold cong_tel at 1. simpl. unfold cong_tel in H. rewrite H. reflexivity. Defined. Lemma cong_tel_nondep@{i} {Δ Γ : Tel@{i}} (T : Γ) (u v : Δ) (e : u =={Δ} v) : cong_tel (fun _ => T) e == refl T. Proof. revert v e. refine (J _ _ _). unfold cong_tel. rewrite J_refl. apply refl. Defined. Arguments eq : simpl never. Lemma example@{i j +} {A : Type@{i}} : sigma@{j} (Tel@{i}) (fun Γ' : Tel@{i} => Equiv@{i} (tele (n : nat) (x y : A) (v v' : Vector.t A n) in (Vector.cons x v = Vector.cons y v')) Γ'). Proof. intros. eexists. refine (equiv_compose _ _). do 5 intros_tele. 2:simpl. refine (equiv_compose _ _). refine (solution_inv_tele (A:=nat) (Vector.t A) _ _ _). refine (equiv_compose _ _). refine (reorder_tele@{i j} (ext@{i} _ (fun e0 : S n = S n => inj@{i} (Vector.cons x v ={ vector A ; e0} (Vector.cons y v')))) (fun ρ => inj@{i} (ρ.1 = eq_refl))). simpl. refine (equiv_compose _ _). refine (equiv_sym _). refine (equiv_tele_l _). refine (equiv_sym _). refine (injectivity_cons2@{i i} &(x, n & v) &(y, n & v')). cbn. pose (lower_solution@{i i} A n). pose (inv_equiv e &(x & v)). simpl in e. pose (sol:=lifted_solution@{i j} (tele (_ : A) (n' : nat) in vector A n')). simpl in sol. simpl in t0. unfold e, lower_solution, equiv, equiv_inv, inv_equiv in t0. simpl in t0. unfold e, lower_solution, equiv, equiv_inv, inv_equiv in t0. simpl in t0. set (solinst := sigmaI@{i} (fun x => sigma@{i} nat (fun n => vector A n)) t0.1 &(t0.2.1 & t0.2.2.1)). specialize (sol solinst). specialize (sol &(y, n & v')). (* specialize (sol solinst). (*&(y, n & v')).*) *) specialize (sol (telv@{i i} A n)). specialize (sol (inj@{i} nat)). simpl in sol. specialize (sol (fun x => S x.2.1) (fun x => S n) eq_refl eq_refl). simpl in sol. specialize (sol e). subst e. simpl in sol. unfold solution_left in *. simpl in *. unfold inv_equiv in sol. unfold eq_points_equiv in sol. simpl in *. unfold equiv_inv in *. simpl in *. unfold cong in sol. simpl in *. refine (equiv_compose (C:=tele (e : x = y) in (v ={ (λ _ : A, inj (vector A n)); e} v')) _ sol). simpl. refine (equiv_tele_r _). intros. unfold equiv_sym. unfold injectivity_cons2. simpl. unfold noconf_equiv, equiv, inv_equiv, equiv_inv. simpl. unfold square_tel. simpl. Transparent eq telescope. simpl. clear sol. subst solinst. unfold subst. simpl. rewrite (cong_tel_proj@{i} _ _ (fun x : nat => inj (vector A x)) (λ x0 : tele (_ : A) (n0 : nat) in vector A n0, &(S (x0.2).1 & Vector.cons x0.1 (x0.2).2)) _ _ x0). simpl. Transparent telescope. unfold telescope at 1. simpl telescope. rewrite (cong_tel_nondep@{i} (Γ:=inj nat) (S n) _ _ x0). refine (equiv_id _). simpl. refine (equiv_id _). Defined. Print Assumptions example. Eval compute in (pr1 (@example nat)). Definition uncurry4 {A} {B : A -> Type} {C : forall x : A, B x -> Type} {D : forall (a : A) (b : B a) (c : C a b), Type} {E : forall (a : A) (b : B a) (c : C a b) (d : D a b c), Type} (f : forall s : tele (a : A) (b : B a) (c : C a b) in (D a b c), E s.1 s.2.1 s.2.2.1 s.2.2.2) : forall (x : A) (b : B x) (c : C x b) (d : D x b c), E x b c d := fun x b c d => f &(x , b , c & d). Definition uncurry5 {A} {B : A -> Type} {C : forall x : A, B x -> Type} {D : forall (a : A) (b : B a) (c : C a b), Type} {E : forall (a : A) (b : B a) (c : C a b) (d : D a b c), Type} {F : forall (a : A) (b : B a) (c : C a b) (d : D a b c) (e : E a b c d), Type} (f : forall s : tele (a : A) (b : B a) (c : C a b) (d : D a b c) in E a b c d, F s.1 s.2.1 s.2.2.1 s.2.2.2.1 s.2.2.2.2) : forall (x : A) (b : B x) (c : C x b) (d : D x b c) (e : E x b c d), F x b c d e := fun x b c d e => f &(x , b , c , d & e). (* Lemma apply_equiv_dom {A B} (P : B -> Type) (e : Equiv A B) : *) (* (forall x : A, P (equiv e x)) -> forall x : B, P x. *) (* Proof. *) (* intros. *) (* specialize (X (e ^-1 x)). *) (* rewrite inv_equiv_equiv in X. exact X. *) (* Defined. *) Definition uncurry6 {A} {B : A -> Type} {C : forall x : A, B x -> Type} {D : forall (a : A) (b : B a) (c : C a b), Type} {E : forall (a : A) (b : B a) (c : C a b) (d : D a b c), Type} {F : forall (a : A) (b : B a) (c : C a b) (d : D a b c) (e : E a b c d), Type} {G : forall (a : A) (b : B a) (c : C a b) (d : D a b c) (e : E a b c d) (f : F a b c d e), Type} (fn : forall s : tele (a : A) (b : B a) (c : C a b) (d : D a b c) (e : E a b c d) in F a b c d e, G s.1 s.2.1 s.2.2.1 s.2.2.2.1 s.2.2.2.2.1 s.2.2.2.2.2) : forall (x : A) (b : B x) (c : C x b) (d : D x b c) (e : E x b c d) (f : F x b c d e), G x b c d e f := fun x b c d e f => fn &(x , b , c , d , e & f). Goal forall {A} n (x y : A) (v v' : Vector.t A n) (e : Vector.cons x v = Vector.cons y v') (P : forall n x y v v' (e : Vector.cons x v = Vector.cons y v'), Type), (P n x x v v eq_refl) -> P n x y v v' e. Proof. intros. revert e P X. revert n x y v v'. refine (uncurry6 _). unshelve refine (apply_equiv_dom _ _ _). shelve. refine (equiv_sym _). refine (pr2 (@example A)). intros. Transparent telescope eq. simpl in x. destruct x as (n&x&y&v&v'&e&e'). vm_compute in X. simpl in e'. destruct e. destruct e'. vm_compute. unfold cong_tel_proj. exact X. Defined. Lemma NoConfusionPackage_isequiv {A} (a b : A) {e : NoConfusionPackage A} : Equiv (a = b) (NoConfusion a b). Proof. unshelve refine {| equiv := noConfusion |}. unshelve refine {| equiv_inv := noConfusion_inv |}. red; intros. apply axiom_triangle. red. apply noConfusion_is_equiv. apply axiom_triangle. Defined. (* Lemma equiv_K A {NC : NoConfusionPackage A} (x y : A) : forall p q : NoConfusion x y, p = q. *) (* Proof. *) (* intros. *) (* pose (NoConfusionPackage_isequiv x y). *) (* destruct x, y; simpl in *. *) (* destruct p, q. reflexivity. *) (* destruct p. *) (* destruct p. *) (* destruct p. *) Equations noConf_nat (x y : nat) : Type := noConf_nat 0 0 := True; noConf_nat (S x) (S y) := noConf_nat x y; noConf_nat _ _ := False. (* BUG if with ind *) Equations(noind) noConf_nat_inv (x y : nat) (e : x = y) : noConf_nat x y := noConf_nat_inv x ?(x) eq_refl <= x => { | 0 => I; | S n => (noConf_nat_inv n n eq_refl) }. Next Obligation. Transparent noConf_nat_inv. unfold noConf_nat_inv. destruct x. simpl. apply eq_refl. apply eq_refl. Defined. Lemma noConfusion_nat_k (x y : nat) (p : noConf_nat x y) : x = y. Proof. induction x in y, p |- *; destruct y. destruct p. reflexivity. destruct p. destruct p. apply Top.cong. apply IHx. apply p. Defined. Lemma iseq x y : IsEquiv (noConfusion_nat_k x y). Proof. unshelve refine {| equiv_inv := noConf_nat_inv x y |}. apply axiom_triangle. apply axiom_triangle. apply axiom_triangle. Defined. Definition equiv' x y : Equiv (noConf_nat x y) (x = y) . Proof. refine {| equiv := noConfusion_nat_k x y |}. apply iseq. Defined. Lemma noConfusion_nat_k3 (x : nat) (p : noConf_nat x x) : noConfusion_nat_k x x p = eq_refl. Proof. induction x. simpl. destruct p. reflexivity. simpl. rewrite IHx. reflexivity. Defined. Lemma equiv_unit A (x : A) : inj (@eq_refl A x = eq_refl) <~> inj unit. Proof. refine {| equiv x := tt |}. unshelve refine {| equiv_inv x := eq_refl |}. red; intros. destruct x0. reflexivity. red; intros. revert x0. set (foo:=@eq_refl A x). clearbody foo. intros x0. apply axiom_triangle. apply axiom_triangle. Defined. Lemma noConf_nat_refl_true n : noConf_nat n n <~> inj unit. Proof. refine {| equiv x := tt |}. unshelve refine {| equiv_inv x := _ |}. induction n. constructor. apply IHn. red; intros. destruct x. reflexivity. red; intros. induction n. destruct x. reflexivity. simpl. apply IHn. simpl. intros. induction n ; simpl. destruct x. reflexivity. simpl. apply IHn. Defined. Lemma example' {A} : &{ Γ' : Tel & tele (n : nat) (x y : A) (v v' : Vector.t A n) in (Vector.cons x v = Vector.cons y v') <~> Γ' }. Proof. intros. eexists. refine (equiv_compose _ _). do 5 intros_tele. 2:simpl. refine (equiv_compose _ _). refine (solution_inv_tele (A:=nat) (Vector.t A) _ _ _). refine (equiv_compose _ _). refine (reorder_tele (tele (e0 : S n = S n) in (Vector.cons x v ={ vector A ; e0} (Vector.cons y v'))) (fun ρ => inj (ρ.1 = eq_refl))). simpl. refine (equiv_compose _ _). refine (equiv_sym _). refine (equiv_tele_l _). refine (equiv_sym _). refine (injectivity_cons2 &(x, n & v) &(y, n & v')). refine (equiv_compose (C:=tele (e : x = y) in (v ={ (λ _ : A, inj (vector A n)); e} v')) _ _). refine (equiv_tele_r _). intros. unfold telescope in x0. simpl in x0. destruct x0. destruct pr2. destruct pr1. simpl in *. unfold injectivity_cons2. simpl. unfold noconf_equiv, equiv, equiv_sym, inv_equiv, equiv_inv. simpl. simpl. intros. eexists. refine (equiv_compose _ _). do 5 intros_tele. 2:simpl. refine (equiv_compose _ _). refine (solution_inv_tele (A:=nat) (Vector.t A) _ _ _). refine (reorder_tele (tele (e0 : S n = S n) in (Vector.cons x v ={ vector A ; e0} (Vector.cons y v'))) (fun ρ => inj (ρ.1 = eq_refl))). simpl. refine (equiv_compose _ _). refine (equiv_sym _). refine (equiv_tele_l _). refine (equiv_sym _). refine (injectivity_cons2 &(x, n & v) &(y, n & v')). refine (equiv_compose _ _). refine (equiv_sym _). refine (equiv_tele_l _). refine (equiv' _ _). simpl. unfold equiv', equiv. simpl. refine (equiv_compose _ _). intros_tele. rewrite noConfusion_nat_k3. simpl. unfold telescope. intros_tele. refine (equiv_unit _ _). simpl. refine (equiv_compose _ _). refine (equiv_sym _). refine (equiv_tele_l _). refine (equiv_sym _). refine (noConf_nat_refl_true _). simpl. intros_tele. simpl. refine (equiv_compose _ _). unfold telescope. intros_tele. refine (equiv_id _). refine (equiv_id _). simpl. rewrite noConfusion_nat_k3. simpl. unfold telescope. intros_tele. refine (equiv_unit _ _). simpl. pose (lower_solution A n). pose (inv_equiv e &(x & v)). simpl in e. pose (telei x n in v : telu A). pose (sol:=lifted_solution (tele (_ : A) (n' : nat) in vector A n')). simpl in sol. simpl in t0. unfold e, lower_solution, equiv, equiv_inv, inv_equiv in t0. simpl in t0. unfold e, lower_solution, equiv, equiv_inv, inv_equiv in t0. simpl in t0. set (solinst := sigmaI (fun x => sigma nat (fun n => vector A n)) t0.1 &(t0.2.1 & t0.2.2.1)). specialize (sol solinst). specialize (sol &(y, n & v')). (* specialize (sol solinst). (*&(y, n & v')).*) *) specialize (sol (telv A n)). specialize (sol (inj nat)). simpl in e. specialize (sol (fun x => S x.2.1) (fun x => S n) eq_refl eq_refl). simpl in sol. specialize (sol e). subst e. simpl in sol. unfold solution_left in *. simpl in *. unfold inv_equiv in sol. unfold eq_points_equiv in sol. simpl in *. unfold equiv_inv in *. simpl in *. unfold cong in sol. simpl in *. refine (equiv_compose (C:=tele (e : x = y) in (v ={ (λ _ : A, inj (vector A n)); e} v')) _ sol). refine (equiv_tele_r _). intros. Lemma noConfusion_nat_k2 (x y : nat) (p q : noConf_nat x y) : p = q. Proof. induction x in y, p, q |- *; destruct y. destruct p, q. reflexivity. destruct p. destruct p. simpl in p, q. apply IHx. Defined. Lemma noConf_HProp (x y : nat) : (forall p q : x = y, p = q). Proof. unshelve refine (apply_equiv_dom _ _ _). shelve. refine (equiv_sym (NoConfusionPackage_isequiv x y)). intros x0. unshelve refine (apply_equiv_dom _ _ _). shelve. refine (equiv_sym (NoConfusionPackage_isequiv x y)). revert x0. revert x y. fix 1. unfold equiv, equiv_inv, equiv_sym, inv_equiv, NoConfusionPackage_isequiv, NoConfusion.noConfusion_nat_obligation_1; simpl; unfold equiv, equiv_inv, equiv_sym, inv_equiv, NoConfusionPackage_isequiv, NoConfusion.noConfusion_nat_obligation_1; simpl. destruct x; destruct y; intros. destruct x0, x. reflexivity. destruct x0. destruct x0. change (Top.cong S x0 = Top.cong S x1). apply Top.cong. simpl in x0, x1. intros. simpl in x0, x1. pose (equiv (NoConfusionPackage_isequiv x y) x0). pose (equiv (NoConfusionPackage_isequiv x y) x1). specialize (noConf_HProp _ _ n n0). simpl. subst n n0. change ( @equiv (@NoConfusion nat NoConfusionPackage_nat x y) (@Logic.eq nat x y) (@equiv_sym (@Logic.eq nat x y) (@NoConfusion nat NoConfusionPackage_nat x y) (@NoConfusionPackage_isequiv nat x y NoConfusionPackage_nat))) with (@inv_equiv _ _ (@NoConfusionPackage_isequiv nat x y NoConfusionPackage_nat)) in *. rewrite equiv_inv_equiv in noConf_HProp. rewrite equiv_inv_equiv in noConf_HProp. apply noConf_HProp. Defined. Eval compute in noConf_HProp. Lemma noConf_HProp : (forall x y : nat, NoConfusion_nat x y) <~> (forall x y : nat, forall p q : x = y, p = q). Proof. refine (equiv_compose _ _). refine (equiv_sym _). refine (apply_equiv_codom _). intros x. refine (apply_equiv_codom _). intros x0. refine (NoConfusionPackage_isequiv x x0). simpl. unshelve refine {| equiv f := fun x y p q => _ |}. pose (f x y). transitivity e. destruct p. unshelve refine {| equiv_inv f := fun x y => _ |}. refine (match f x y with | left p => p | right e => _ end). Lemma noConf_K : (forall x y : nat, NoConfusion_nat x y) <~> (forall x y : nat, { x = y } + { x <> y }). Proof. refine (equiv_compose _ _). refine (equiv_sym _). refine (apply_equiv_codom _). intros x. refine (apply_equiv_codom _). intros x0. refine (NoConfusionPackage_isequiv x x0). simpl. unshelve refine {| equiv f := fun x y => left (f x y) |}. unshelve refine {| equiv_inv f := fun x y => _ |}. refine (match f x y with | left p => p | right e => _ end). Coq-Equations-1.3.1-8.20/test-suite/telescopes_nonempty.v000066400000000000000000002026461463127417400232100ustar00rootroot00000000000000Require Import Equations. Set Universe Polymorphism. Open Scope sigma_scope. Polymorphic Definition pr1_seq {A} {P : A -> Type} {p q : sigma A P} (e : p = q) : p.1 = q.1. Proof. destruct e. apply eq_refl. Defined. Require Vector. Derive NoConfusion for Vector.t. Notation " 'rew' H 'in' c " := (@eq_rect _ _ _ c _ H) (at level 20). Definition J {A} {x : A} (P : forall y : A, x = y -> Type) (p : P x eq_refl) (y : A) (e : x = y) : P y e. destruct e. exact p. Defined. Definition subst {A : Type} {x : A} {P : A -> Type} {y : A} (e : x = y) (f : P x) : P y := J (fun x _ => P x) f y e. Definition subst2 {A : Type} {x : A} {P : A -> Type} (f : P x) (y : A) (e : x = y) : P y := J (fun x _ => P x) f y e. Definition cong {A B : Type} (f : A -> B) {x y : A} (e : x = y) : f x = f y := J (fun y _ => f x = f y) (@eq_refl _ (f x)) y e. (* aka ap *) Definition congd {A} {B : A -> Type} (f : forall x : A, B x) {x y : A} (p : x = y) : subst p (f x) = f y := J (fun y p => subst p (f x) = f y) (@eq_refl _ (f x)) y p. (* aka apd *) Notation " 'rew' H 'in' c " := (@subst _ _ _ _ H c) (at level 20). Notation "p =_{ P ; e } q" := (subst (P:=P) e p = q) (at level 90, format "p =_{ P ; e } q"). Definition subst_expl {A : Type} {x : A} {P : A -> Type} {y : A} (e : x = y) (f : P x) : P y := subst e f. Notation " 'rewP' H 'at' P 'in' c " := (@subst_expl _ _ P _ H c) (at level 20). Definition Sect {A B : Type} (s : A -> B) (r : B -> A) := forall x : A, r (s x) = x. (** A typeclass that includes the data making [f] into an adjoin equivalence*) Class IsEquiv {A B : Type} (f : A -> B) := BuildIsEquiv { equiv_inv : B -> A ; eisretr : Sect equiv_inv f; eissect : Sect f equiv_inv; eisadj : forall x : A, eisretr (f x) = cong f (eissect x) }. Arguments eisretr {A B} f {_} _. Arguments eissect {A B} f {_} _. Arguments eisadj {A B} f {_} _. Record Equiv (A B : Type) := { equiv :> A -> B ; is_equiv :> IsEquiv equiv }. Arguments equiv {A B} e. Instance Equiv_IsEquiv {A B} (e : Equiv A B) : IsEquiv (equiv e). Proof. apply is_equiv. Defined. Definition inv_equiv {A B} (E: Equiv A B) : B -> A := equiv_inv (IsEquiv:=is_equiv _ _ E). Definition equiv_inv_equiv {A B} {E: Equiv A B} (x : A) : inv_equiv _ (equiv E x) = x := eissect _ x. Definition inv_equiv_equiv {A B} {E: Equiv A B} (x : B) : equiv E (inv_equiv _ x) = x := eisretr _ x. Definition equiv_adj {A B} {E: Equiv A B} (x : A) : inv_equiv_equiv (equiv E x) = cong (equiv E) (equiv_inv_equiv x) := eisadj _ x. Notation " X <~> Y " := (Equiv X Y) (at level 90, no associativity, Y at next level). Definition equiv_id A : A <~> A. Proof. intros. refine {| equiv a := a |}. unshelve refine {| equiv_inv e := e |}. - red. reflexivity. - red; intros. reflexivity. - intros. simpl. reflexivity. Defined. Axiom axiom : forall {A}, A. Definition equiv_sym {A B} : A <~> B -> B <~> A. Proof. intros. refine {| equiv a := inv_equiv X a |}. unshelve refine {| equiv_inv e := equiv X e |}. - red; intros. apply eissect. - red; intros. apply eisretr. - intros x. simpl. destruct X. simpl. unfold inv_equiv. simpl. apply axiom. Defined. Require Import DepElimDec. (* Unset Equations OCaml Splitting. *) (* BUG *) (* Equations tel_eq (Δ : Tel) (t s : Tuple Δ) : Type := *) (* tel_eq nil nil nil := unit; *) (* tel_eq (consTel A f) (cons t ts) (cons s ss) := *) (* sigma (t = s) (fun e : t = s => tel_eq (f s) (rewP e at fun x => Tuple (f x) in ts) ss). *) Set Equations Transparent. Set Refolding Reduction. Ltac rewrite_change c := match type of c with ?foo = ?bar => change foo with bar in * end. (* Definition path_sigma_uncurried {A : Type} {P : A -> Type} (u v : sigma A P) *) (* (pq : sigma _ (fun p => subst p u.2 = v.2)) : u = v. *) (* Proof. *) (* destruct u, v. simpl in *. *) (* destruct pq. revert pr0 pr4 pr3 pr5. refine (Top.J _ _). *) (* simpl. refine (Top.subst2 _). reflexivity. *) (* Defined. *) Definition path_sigma_uncurried {A : Type} {P : A -> Type} (u v : sigma A P) (pq : &{p : u.1 = v.1 & subst p u.2 = v.2}) : u = v := match pq.2 in (_ = v2) return u = &(v.1 & v2) with | eq_refl => match pq.1 as p in (_ = v1) return u = &(v1 & subst p u.2) with | eq_refl => eq_refl end end. (** Simplify only if [pq] is a constructor *) Arguments path_sigma_uncurried _ _ _ _ !pq : simpl nomatch. (* Equations path_sigma_uncurried {A : Type} {P : A -> Type} (u v : sigma A P) *) (* (pq : sigma _ (fun p => subst p u.2 = v.2)) *) (* : u = v := *) (* path_sigma_uncurried (sigmaI u1 u2) (sigmaI v1 v2) (sigmaI eq_refl eq_refl) := *) (* eq_refl. *) Definition pr1_path {A} `{P : A -> Type} {u v : sigma A P} (p : u = v) : u.1 = v.1 := cong (@pr1 _ _) p. Notation "p ..1" := (pr1_path p) (at level 3). Definition pr2_path {A} `{P : A -> Type} {u v : sigma A P} (p : u = v) : rew (p..1) in u.2 = v.2. destruct p. apply eq_refl. Defined. Notation "p ..2" := (pr2_path p) (at level 3). Definition eta_path_sigma_uncurried {A} `{P : A -> Type} {u v : sigma A P} (p : u = v) : path_sigma_uncurried _ _ &(p..1 & p..2) = p. destruct p. apply eq_refl. Defined. Definition path_sigma {A : Type} {P : A -> Type} {u v : sigma A P} (p : u.1 = v.1) (q : rew p in u.2 = v.2) : u = v := match q in (_ = v2) return u = &(v.1 & v2) with | eq_refl => match p as p in (_ = v1) return u = &(v1 & subst p u.2) with | eq_refl => eq_refl end end. (* Equations path_sigma {A : Type} {P : A -> Type} {u v : sigma A P} *) (* (p : u.1 = v.1) (q : rew p in u.2 = v.2) *) (* : u = v := *) (* path_sigma {u:=(sigmaI _ _)} {v:=(sigmaI _ _)} eq_refl eq_refl := eq_refl. *) Definition eta_path_sigma A `{P : A -> Type} {u v : sigma A P} (p : u = v) : path_sigma (p..1) (p..2) = p := eta_path_sigma_uncurried p. Instance path_sigma_equiv {A : Type} (P : A -> Type) (u v : sigma A P): IsEquiv (path_sigma_uncurried u v). unshelve refine (BuildIsEquiv _ _ _ _ _ _ _). - exact (fun r => &(r..1 & r..2)). - intro. apply eta_path_sigma_uncurried. - destruct u, v; intros [p q]; simpl in *. destruct p. simpl in *. destruct q. reflexivity. - destruct u, v; intros [p q]; simpl in *; destruct p. simpl in *. destruct q; simpl in *. apply eq_refl. Defined. Definition path_sigma_equivalence {A : Type} (P : A -> Type) (u v : sigma A P): &{ p : u.1 = v.1 & u.2 =_{P;p} v.2 } <~> u = v. Proof. exists (path_sigma_uncurried u v). apply path_sigma_equiv. Defined. Module Telescopes. Inductive t : Type := | inj : Type -> t | ext A : (A -> t) -> t. Notation Tel := t. Delimit Scope telescope with telescope. Notation "[]" := (inj unit) : telescope. Bind Scope telescope with t. Example onetel := ext Type (fun A => ext nat (fun n => inj (vector A n))). Equations telescope (T : Tel) : Type := telescope (inj A) := A; telescope (ext A f) := sigma A (fun x => telescope (f x)). Coercion telescope : Tel >-> Sortclass. (** Accessors *) Equations nth_type (Δ : t) (t : Δ) (n : nat) : Type := nth_type (inj A) _ _ := A; nth_type (ext A f) _ 0 := A; nth_type (ext A f) (sigmaI t ts) (S n) := nth_type (f t) ts n. Equations nth_value (Δ : t) (t : Δ) (n : nat) : nth_type Δ t n := nth_value (inj A) a _ := a; nth_value (ext A f) (sigmaI t _) 0 := t; nth_value (ext A f) (sigmaI t ts) (S n) := nth_value (f t) ts n. (** Telescopic equality: an iterated sigma of dependent equalities *) Equations eq (Δ : Tel) (t s : Δ) : Tel := eq (inj A) a b := inj (a = b); eq (ext A f) (sigmaI t ts) (sigmaI s ss) := ext (t = s) (fun e : t = s => eq (f s) (rew e in ts) ss). Reserved Notation "x == y" (at level 70, y at next level, no associativity). Reserved Notation "x =={ Δ } y" (at level 70, y at next level, no associativity, format "x =={ Δ } '/ ' y"). Infix "==" := (eq _) : telescope. Definition eq_expl := eq. Infix "=={ Δ }" := (eq_expl Δ) : telescope. Equations refl {Δ : Tel} (t : telescope Δ) : eq Δ t t := refl {Δ:=(inj A)} a := eq_refl; refl {Δ:=(ext A f)} (sigmaI t ts) := &(eq_refl & refl ts). Local Open Scope telescope. Equations J {Δ : Tel} (r : Δ) (P : forall s : Δ, eq Δ r s -> Type) (p : P r (refl r)) (s : Δ) (e : eq _ r s) : P s e := J {Δ:=(inj A)} a P p b e := Top.J P p b e; J {Δ:=(ext A f)} (sigmaI r rs) P p (sigmaI s ss) (sigmaI e es) := Top.J (x:=r) (fun (s' : A) (e' : r = s') => forall (ss' : f s') (es' : eq (f s') (rewP e' at f in rs) ss'), P &(s' & ss') &(e' & es')) (fun ss' es' => J _ (fun ss'' (es'' : eq (f r) rs ss'') => P &(r & ss'') &(eq_refl & es'')) p ss' es') s e ss es. Lemma J_refl {Δ : Tel} (r : Δ) (P : forall s : Δ, eq Δ r s -> Type) (p : P r (refl r)) : J r P p r (refl r) = p. Proof. induction Δ. simpl. reflexivity. simpl. destruct r. refine (H pr1 pr2 _ _). Defined. Equations subst {Δ : Tel} (P : Δ -> Type) {u v : Δ} (e : u =={Δ} v) (p : P u) : P v := subst P e p := J u (fun v _ => P v) p v e. Equations cong {Δ : Tel} {T} (f : Δ -> T) (u v : Δ) (e : u =={Δ} v) : f u = f v := cong f u v e := J u (fun v _ => f u = f v) (@eq_refl T (f u)) v e. Notation "p ==_{ P ; e } q" := (subst P e p = q) (at level 70, q at next level, no associativity) : telescope. Reserved Notation "x =={ T ; e } y" (at level 70, y at next level, no associativity, only parsing, format "x =={ T ; e } '/ ' y"). Notation "x =={ P ; e } y" := (subst P e x == y) (only parsing) : telescope. Lemma eq_over_refl {Δ : Tel} {T} (f : forall x : Δ, T x) (u : Δ) : f u ==_{T;refl u} f u. Proof. unfold subst. refine (J_refl _ _ _). Defined. Equations dcong {Δ : Tel} {T} (f : forall x : Δ, T x) (u v : Δ) (e : u =={Δ} v) : f u ==_{T;e} f v := dcong f u v e := J u (fun v e => f u ==_{T;e} f v) (eq_over_refl f u) v e. Equations cong_tel {Δ : Tel} {Γ : Tel} (f : Δ -> Γ) (u v : Δ) (e : u =={Δ} v) : f u =={Γ} f v := cong_tel f u v e := J u (fun v _ => f u =={Γ} f v) (refl _) v e. Equations dcong_tel {Δ : Tel} {T : Δ -> Tel} (f : forall x : Δ, T x) {u v : Δ} (e : u =={Δ} v) : f u =={T;e} f v := dcong_tel f e := J u (fun v e => f u =={T;e} f v) _ v e. Next Obligation. clear. unfold subst. rewrite J_refl. apply refl. Defined. Notation "'tele' x .. y 'in' z " := (@ext _ (fun x => .. (@ext _ (fun y => inj z)) ..)) (at level 0, x binder, right associativity, z at level 60, format "'[' 'tele' '/ ' x .. y '/ ' 'in' '/ ' z ']'") : type_scope. (* Notation "'tele' x .. y " := (@ext _ (fun x => .. (@ext _ (fun y => inj)) ..)) *) (* (at level 0, x binder, right associativity, *) (* format "'[' 'tele' '/ ' x .. y ']'") *) (* : telescope. *) Local Open Scope telescope. Notation "'telei' x .. y 'in' z " := (@sigmaI _ _ x .. (@sigmaI _ _ y z) ..) (at level 0, right associativity, y at next level, format "'[' 'telei' '/ ' x .. y 'in' z ']'", only parsing) : telescope. Lemma solution {A} (t : A) : tele ( x : A ) in (x = t) <~> []. Proof. refine {| equiv a := tt |}. unshelve refine {| equiv_inv e := telei t in eq_refl |}. - red; intros. destruct x. reflexivity. - red; intros. destruct x. now destruct pr2. - intros [x eq]. revert t eq. refine (Top.J _ _). constructor. Defined. Equations eq_eq_equiv (Δ : Tel) (u v : Δ) (e : u = v) : u == v := eq_eq_equiv (inj A) a b e := e; eq_eq_equiv (ext A f) u v e := let p := equiv_inv (IsEquiv:=path_sigma_equiv _ u v) e in &(p.1 & eq_eq_equiv _ _ _ p.2). Equations extend_tele (Δ : t) (Γ : telescope Δ -> t) : t := extend_tele (inj A) Γ := ext A Γ; extend_tele (ext A f) Γ := ext A (fun a => extend_tele (f a) (fun fa => Γ &(a & fa))). (* "forall s : f t, (fun a : f t => Γ &(t & a)) s -> extend_tele (f t) (fun a : f t => Γ &(t & a))" == "forall s : f t, Γ &(t & s) -> extend_tele (f t) (fun a : f t => Γ &(t & a))" == telescope (extend_tele (f t) (fun fa : f t => Γ &(t & fa)) telescope ((fun a : A => extend_tele (f a) (fun fa : f a => Γ &(a & fa))) t)) "(fun x : A => telescope ((fun a : A => extend_tele (f a) (fun fa : f a => Γ &(a & fa))) x)) t"). *) Equations inj_extend_tel (Δ : t) (Γ : telescope Δ -> t) (s : Δ) (t : Γ s) : extend_tele Δ Γ := inj_extend_tel (inj A) Γ s t := &(s & t) ; inj_extend_tel (ext A f) Γ (sigmaI t ts) e := &(t & inj_extend_tel (f t) (fun fa => Γ &(t & fa)) ts e). Lemma reorder_tele (Δ : t) (Γ : telescope Δ -> t) : telescope (extend_tele Δ Γ) <~> tele (x : telescope Δ) in Γ x. Proof. unshelve econstructor. - induction Δ; simpl extend_tele in *; simpl; intros. trivial. simpl in Γ. specialize (X X0.1 _ X0.2). refine &(&(X0.1 & X.1)&X.2). - unshelve econstructor. + induction Δ; simpl extend_tele in *; intros; simpl in *; trivial. specialize (X X0.1.1). exists X0.1.1. apply X. exact &(X0.1.2 & X0.2). + red. intro. induction Δ; simpl. destruct x. constructor. destruct x. simpl. rewrite H. reflexivity. + red. intro. induction Δ; simpl. destruct x. constructor. destruct x. simpl. rewrite H. reflexivity. + apply axiom. Defined. Lemma eq_eq_equiv_refl {Δ : Tel} (u : Δ) : eq_eq_equiv Δ u u eq_refl = refl u. Proof. induction Δ; simp eq_eq_equiv. simpl. now rewrite H. Defined. Equations eq_eq_equiv_inv (Δ : Tel) (u v : Δ) (e : u == v) : u = v := eq_eq_equiv_inv (inj A) a b e := e; eq_eq_equiv_inv (ext A f) u v e := let e' := eq_eq_equiv_inv _ _ _ e.2 in equiv (path_sigma_equivalence _ u v) &(e.1 & e'). Lemma eq_eq_equiv_inv_refl (Δ : Tel) (u : Δ) : eq_eq_equiv_inv Δ u u (refl u) = eq_refl. Proof. induction Δ; simp eq_eq_equiv_inv. simpl. now rewrite H. Defined. Lemma sect : forall (Δ : Tel) (u v : Δ), Sect (eq_eq_equiv_inv Δ u v) (eq_eq_equiv Δ u v). Proof. induction Δ. simpl. intros. intro. constructor. intros u v. intros He. simpl in * |-. Opaque path_sigma_uncurried path_sigma path_sigma_equivalence path_sigma_equiv. pose proof (eissect (path_sigma_uncurried u v)). simpl. red in H0. Transparent path_sigma_uncurried path_sigma path_sigma_equivalence path_sigma_equiv. match goal with |- context[equiv _ ?x] => set (foo:=x) end. specialize (H0 foo). set (bar := (equiv_inv (equiv _ foo))) in *. change (bar = foo) in H0. symmetry in H0. unfold foo in H0. subst foo. clearbody bar. revert bar H0. refine (@Top.subst2 _ _ _ _). simpl. simpl. red in H. specialize (H _ _ _ He.2). destruct He. simpl. apply Top.cong. apply H. Defined. Require Import EqDecInstances. Typeclasses Transparent telescope. Transparent path_sigma_equiv path_sigma_uncurried. Lemma retr : forall (Δ : Tel) (u v : Δ), Sect (eq_eq_equiv Δ u v) (eq_eq_equiv_inv Δ u v). Proof. induction Δ. + simpl. intros. intro. constructor. + intros u v e. simpl. specialize (H v.1 (rew (equiv_inv (IsEquiv := path_sigma_equiv _ _ _) e).1 in u.2) v.2 (equiv_inv (IsEquiv := path_sigma_equiv _ _ _) e).2). set (foo := eq_eq_equiv_inv _ _ _ _) in *. symmetry in H. clearbody foo. revert foo H. refine (Top.subst2 _). refine (eisretr (path_sigma_uncurried u v) _). Defined. Lemma cong_iter {A B C} (f : A -> B) (g : B -> C) (x y : A) (e : x = y) : Top.cong g (Top.cong f e) = Top.cong (fun x => g (f x)) e. Proof. revert y e. refine (Top.J _ _). reflexivity. Qed. Lemma cong_subst2 {A B C} (f : C -> B) (x y : A) (e : x = y) (z w : A -> C) (p : z x = w x) : Top.cong f (Top.subst2 (P:=fun x : A => z x = w x) p y e) = Top.subst2 (P := fun x : A => f (z x) = f (w x)) (Top.cong f p) y e. Proof. revert y e. refine (Top.J _ _). simpl. reflexivity. Defined. Lemma eq_sym_dep {A} (x y : A) (P : x = y -> Type) (G : forall e : y = x, P (eq_sym e)) : forall e : x = y, P e. Proof. intros. destruct e. apply (G eq_refl). Defined. Global Instance eq_points_isequiv (Δ : Tel) (u v : Δ) : IsEquiv (eq_eq_equiv Δ u v) := {| equiv_inv := eq_eq_equiv_inv Δ u v |}. Proof. - apply sect. - apply retr. - revert v. induction Δ as [ | A t IH]. + refine (Top.J _ _). constructor. + simpl in u; refine (Top.J _ _). simpl. rewrite IH. set (r:=retr (t u.1) u.2 u.2 eq_refl) in *. set(lhs:=eq_eq_equiv_inv _ _ _ _) in *. clearbody r. clearbody lhs. revert r. refine (eq_sym_dep _ _ _ _). revert lhs. now refine (Top.J _ _). Defined. Definition eq_points_equiv (Δ : Tel) (u v : Δ) : u = v <~> u == v := {| equiv := eq_eq_equiv Δ u v |}. (** Necessary as the telescope structure is not easy for Coq to infer *) Global Hint Extern 0 (Equiv (?x = ?y) (telescope (eq ?Δ ?x' ?y'))) => exact (eq_points_equiv Δ x' y') : typeclass_instances. Definition NoConf := fun (A : Type) (x : &{ index : nat & vector A index}) => match x.2 with | Vector.nil => fun y : &{ index : nat & vector A index} => match y.2 with | Vector.nil => True | Vector.cons _ _ => False end | @Vector.cons _ h n x0 => fun y : &{ index : nat & vector A index} => match y.2 with | Vector.nil => False | @Vector.cons _ h0 n0 x1 => telei (h) (n) in (x0) = telei (h0) (n0) in (x1) :> tele (_ : A) (n : nat) in vector A n end end. Lemma noconf : forall (A : Type) (a b : &{ index : nat & vector A index}), a = b -> NoConf A a b. Proof. intros. destruct H. destruct a. simpl. destruct pr2. simpl. exact I. simpl. reflexivity. Defined. Lemma noconf_inv : forall (A : Type) (a b : &{ index : nat & vector A index}), NoConf A a b -> a = b. Proof. intros. destruct a, b. destruct pr2, pr3; try constructor || contradiction. simpl in H. NoConfusion.destruct_tele_eq H. reflexivity. Defined. Import NoConfusion. Global Instance noconf_isequiv A a b : IsEquiv (noconf A a b). Proof. unshelve refine {| equiv_inv := noconf_inv A a b |}. intro. - destruct_sigma a; destruct_sigma b; destruct a ; destruct b; simpl in * |-; on_last_hyp ltac:(fun id => destruct_tele_eq id || destruct id); solve [constructor]. - intro. solve_noconf_inv_equiv. - intros. destruct x. destruct a. destruct pr2; simpl; constructor. Defined. Definition noconf_equiv A a b : Equiv (a = b) (NoConf A a b) := {| equiv := noconf A a b |}. Global Hint Extern 0 (@IsEquiv (?x = ?y) (telescope (eq ?Δ ?x' ?y')) _) => exact (@eq_points_isequiv Δ x' y') : typeclass_instances. Global Hint Extern 0 (@IsEquiv (?x = ?y) _ _) => exact (@noconf_isequiv _ x y) : typeclass_instances. Global Hint Extern 0 (@Equiv (?x = ?y) _) => exact (@noconf_equiv _ x y) : typeclass_instances. Arguments noconf_equiv : simpl never. Arguments noconf_isequiv : simpl never. Arguments equiv : simpl never. Arguments equiv_inv : simpl never. Notation "f ^-1" := (@equiv_inv _ _ f _) (at level 3). Infix "@" := eq_trans (at level 80). (** The composition of equivalences is an equivalence. *) Instance isequiv_compose A B f C g {E : @IsEquiv A B f} {E' : @IsEquiv B C g} : IsEquiv (compose g f) | 1000 := BuildIsEquiv A C (compose g f) (compose f^-1 g^-1) _ _ _ . Proof. exact (fun c => Top.cong g (eisretr f (g^-1 c)) @ eisretr g c). exact (fun a => Top.cong (f^-1) (eissect g (f a)) @ eissect f a). intro. simpl. apply axiom. Defined. Definition equiv_compose {A B C} (E : Equiv A B) (E' : Equiv B C) : Equiv A C := Build_Equiv A C (compose (@equiv _ _ E') (@equiv _ _ E)) _. Definition injectivity_cons {A} (u v : tele (x : A) (n : nat) in vector A n) : (&(S u.2.1 & @Vector.cons A u.1 u.2.1 u.2.2) = &(S v.2.1 & @Vector.cons A v.1 v.2.1 v.2.2)) <~> u == v := equiv_compose (noconf_equiv A &(S u.2.1 & @Vector.cons A u.1 u.2.1 u.2.2) &(S v.2.1 & @Vector.cons A v.1 v.2.1 v.2.2)) (eq_points_equiv (tele (x : A) (n : nat) in vector A n) _ _). Definition square_tel {Δ : Tel} {w x y z : Δ} (t : w =={Δ} x) (b : y == z) (l : w == y) (r : x == z) : Tel := subst (fun x : Δ => x == y) t l =={fun y => x == y;b} r. Definition flip_square_tel {A} {w x y z} {t b l r} (s : @square_tel A w x y z t b l r) : square_tel l r t b. Proof. revert x t b l r s. refine (J _ _ _). revert z. refine (J _ _ _). intros l r s. unfold square_tel in s. simpl in s. unfold subst in s. revert r s. refine (J _ _ _). revert y l. refine (J _ _ _). unfold square_tel. unfold subst. rewrite 3!J_refl. apply refl. Defined. Instance flip_square_tel_isequiv {A : Tel} {w x y z : A} {t b l r} : IsEquiv (@flip_square_tel A w x y z t b l r). Proof. unshelve refine {| equiv_inv := _ |}. - revert x t0 l r b. refine (J _ _ _). revert y. refine (J _ _ _). revert z. refine (J _ _ _). intros b s. unfold square_tel in s. simpl in s. unfold subst in s. revert b s. refine (J _ _ _). unfold square_tel. simpl. unfold subst. rewrite !J_refl. apply refl. (* Not shown coherent! Many J applications... *) - apply axiom. - apply axiom. - apply axiom. Defined. Definition flip_square_tel_equiv {A : Tel} {w x y z : A} {t b l r} : Equiv (@square_tel A w x y z t b l r) (square_tel l r t b) := {| equiv := @flip_square_tel A w x y z t b l r |}. End Telescopes. Module Example_cons. Lemma lower_solution : forall A n, forall (x : &{ a : nat & &{ _ : A & &{ _ : vector A a & S a = S n}}}) (P : forall n0 w : nat, A -> vector A w -> S x.1 = S n0 -> Type), (P x.1 x.1 x.2.1 x.2.2.1 eq_refl) -> P n x.1 (x.2).1 ((x.2).2).1 ((x.2).2).2. Proof. intros A n. curry. intros. revert H. simplify_dep_elim. simplify_dep_elim. simpl. trivial. Defined. Notation " 'rewP' H 'at' B 'in' c " := (@Top.subst _ _ B _ H c) (at level 20, only parsing). Definition square {A} {w x y z : A} (t : w = x) (b : y = z) (l : w = y) (r : x = z) : Type := rewP t at (fun x => x = y) in l =_{_;b} r. Definition flip_square {A} {w x y z} {t b l r} (s : @square A w x y z t b l r) : square l r t b. Proof. destruct t. destruct b. unfold square in s. simpl in s. destruct s. unfold square. destruct l. constructor. Defined. Instance flip_square_isequiv {A} {w x y z} {t b l r} : IsEquiv (@flip_square A w x y z t b l r). Proof. unshelve refine {| equiv_inv := _ |}. destruct t, l, r. unfold square. simpl. intros. simpl in H. destruct H. reflexivity. red. intros. destruct t, l, r. red in x0. simpl in x0. destruct x0. reflexivity. red. intros. destruct t, b, l. red in x0. simpl in x0. destruct x0. reflexivity. unfold square. intros. destruct t, b, l, x0. simpl. reflexivity. Defined. Definition flip_square_equiv {A} {w x y z : A} {t b l r} : Equiv (@square A w x y z t b l r) (square l r t b) := {| equiv := flip_square |}. Import Telescopes. Lemma inj_dep {A} (P : A -> Type) (G : forall e : inj A, P e) : forall e : A, P e. Proof. apply G. Defined. Polymorphic Definition pr1_seq {A} {P : A -> Type} {p q : sigma A P} (e : p = q) : p.1 = q.1. Proof. destruct e. apply eq_refl. Defined. Notation " 'rew' H 'in' c " := (@eq_rect _ _ _ c _ H) (at level 20). Polymorphic Definition pr2_seq {A} {P : A -> Type} {p q : sigma A P} (e : p = q) : rew (pr1_seq e) in p.2 = q.2. Proof. destruct e. apply eq_refl. Defined. Polymorphic Definition rewh {A : Type} {B : A -> Type} {x : A} {p q : B x} (e : &(x & p) = &(x & q)) (e' : pr1_seq e = eq_refl) : p = q := (@eq_rect _ (pr1_seq e) (fun f : x = x => rew f in p = q) (pr2_seq e) eq_refl e'). Polymorphic Lemma solution_inv {A : Type} (B : A -> Type) (x : A) (p q : B x) (G : p = q -> Type) : (forall (e : &(x & p) = &(x & q)) (e' : pr1_seq e = eq_refl), G (rewh e e')) -> (forall e : p = q, G e). Proof. intros H. intros e. destruct e. specialize (H eq_refl eq_refl). simpl in H. apply H. Defined. Definition uncurry {A} {B : A -> Type} {C : forall x : A, B x -> Type} (f : forall s : &{ x : A & B x }, C s.1 s.2) : forall (x : A) (b : B x), C x b := fun x b => f &(x & b). Lemma rewrite_in {A} {x y z : A} (e : x = y) (e' : x = z) : y = z. Proof. destruct e. apply e'. Defined. Lemma rewrite_inr {A} {x y z : A} (e : x = y) (e' : y = z) : x = z. Proof. destruct e. apply e'. Defined. Open Scope telescope. Lemma cong_equiv_inv (Δ : Tel) (T : Type) (f : Δ -> T) (u v : Δ) : IsEquiv f -> f u = f v -> u =={Δ} v. Proof. intros. induction Δ; simpl; simpl in *. + apply (Top.cong equiv_inv) in H. transitivity (f ^-1 (f u)). symmetry. apply (eissect f u). transitivity (f ^-1 (f v)). apply H. apply (eissect f v). + assert (u = v). transitivity (f ^-1 (f u)). symmetry. apply (eissect f u). transitivity (f ^-1 (f v)). now apply (Top.cong equiv_inv) in H. apply (eissect f v). apply (equiv_inv (f:=path_sigma_uncurried u v)) in H0. exists H0.1. set (f' := fun v2 : t0 v.1 => f &(v.1 & v2)). apply (X0 _ f'). apply axiom. subst f'. simpl. apply Top.cong. apply Top.cong. exact H0.2. Defined. Instance cong_is_equiv (Δ : Tel) (T : Type) (f : Δ -> T) (u v : Δ) (I : IsEquiv f) : IsEquiv (cong f u v) := { equiv_inv := _ }. Proof. intros. - eapply cong_equiv_inv; eauto. - red. intros x. induction Δ. simpl. apply axiom. simpl. apply axiom. - apply axiom. - apply axiom. Defined. Definition cong_equiv (Δ : Tel) (u v : Δ) (T : Type) (f : Δ -> T) (E : IsEquiv f) : u =={Δ} v <~> f u = f v := {| equiv := cong f u v |}. Notation "'telei' x .. y 'in' z " := (@sigmaI _ _ x .. (@sigmaI _ _ y z) ..) (at level 0, right associativity, y at next level, format "'[' 'telei' '/ ' x .. y 'in' z ']'", only parsing) : telescope. Require Import Utf8. Notation " a '={' P ; e } b " := (Top.subst (P:=P) e a = b) (at level 90). Notation " a '==={' P ; e } b " := (subst P _ _ e a = b) (at level 90, only parsing) : telescope. Lemma equiv_cong_subst {A B} (P : B -> Type) (f : A -> B) (s t : A) (e : s = t) (u : P (f s)) (v : P (f t)) : u =_{(fun x => P (f x)); e} v <~> (u =_{P; Top.cong f e} v). Proof. unfold Top.subst. destruct e. simpl. apply equiv_id. Defined. Lemma equiv_cong_subst_dep {A} {B : A -> Type} (P : forall x : A, B x -> Type) (f : forall x : A, B x) (s t : A) (e : s = t) (u : P s (f s)) (v : P t (f t)) : u =_{(fun x => P x (f x)); e} v <~> (Top.J (fun y e => P y (rew e in (f s))) u _ e =_{(fun x => P _ x); Top.congd f e} v). Proof. unfold Top.subst. destruct e. simpl. apply equiv_id. Defined. Lemma equiv_cong_subst_tel {Δ Γ : Tel} (P : Γ -> Tel) (f : Δ -> Γ) (s t : Δ) (e : s =={Δ} t) (u : P (f s)) (v : P (f t)) : subst P (cong_tel f _ _ e) u = subst (fun x => P (f x)) e u. Proof. unfold subst. revert t e v. refine (J _ _ _). intros. rewrite J_refl. unfold cong_tel. simpl. rewrite !J_refl. reflexivity. Defined. Lemma equiv_tele_l {A} {A'} {B : A' -> Type} (e : Equiv A A') : tele (x : A) in B (equiv e x) <~> tele (x : A') in B x. Proof. simpl. unshelve refine {| equiv a := &(e a.1 & _) |}. exact a.2. unshelve refine {| equiv_inv a := &(e ^-1 a.1 & _) |}. destruct a. simpl. rewrite eisretr. exact pr2. red; intros. simpl. destruct x. simpl. pose (eisretr e pr1). apply path_sigma_uncurried. simpl. exists e0. simpl. unfold eq_rect_r. clearbody e0. apply axiom. apply axiom. apply axiom. (* apply eisretr. *) (* red; intros. simpl. destruct x. simpl. apply Top.cong. *) (* apply eissect. *) (* intros [x bx]. *) (* simpl. rewrite eisadj. simpl. *) (* destruct (eissect (e x) bx). simpl. reflexivity. *) Defined. Lemma equiv_tele_r {A} {B B' : A -> Type} (e : forall x : A, Equiv (B x) (B' x)) : tele (x : A) in B x <~> tele (x : A) in (B' x). Proof. simpl. unshelve refine {| equiv a := &(a.1 & e a.1 a.2) |}. unshelve refine {| equiv_inv a := &(a.1 & inv_equiv (e a.1) a.2) |}. red; intros. simpl. destruct x. simpl. apply Top.cong. apply eisretr. red; intros. simpl. destruct x. simpl. apply Top.cong. apply eissect. intros [x bx]. simpl. rewrite eisadj. simpl. destruct (eissect (e x) bx). simpl. reflexivity. Defined. (* Definition lifted_solution_lhs_type (Γ : Tel) (u v : Γ) *) (* (Δ : Γ -> Tel) *) (* (a b : forall ρ, Δ ρ) *) (* (eqΔ := λ ρ, a ρ =={Δ ρ} b ρ) *) (* (r : eqΔ u) (s : eqΔ v) := *) (* tele (e : u =={Γ} v) in *) (* let lhs := dcong a u v e in *) (* let rhs := dcong b u v e in *) (* let lhs' := *) (* subst (fun au : Δ u => au ==_{λ ρ', Δ ρ'; e} a v) (a u) (b u) r lhs in *) (* let lhs'' := *) (* subst (fun bv : Δ v => b u ==_{λ ρ', Δ ρ'; e} bv) (a v) (b v) s lhs' in *) (* lhs'' = rhs. *) (* Definition lifted_solution_lhs_type (Γ : Tel) (u v : Γ) *) (* (Δ : Γ -> Tel) *) (* (a b : forall ρ, Δ ρ) *) (* (eqΔ := λ ρ, a ρ =={Δ ρ} b ρ) *) (* (r : eqΔ u) (s : eqΔ v) := *) (* tele (e : u =={Γ} v) in *) (* let lhs := dcong a u v e in *) (* let rhs := dcong b u v e in *) (* square (cong (subst (λ ρ : Γ, Δ ρ) u v e) _ _ r) *) (* (inv_equiv (eq_points_equiv (Δ v) (a v) (b v)) s) *) (* lhs rhs. *) Definition lifted_solution_lhs_type (Γ : Tel) (u v : Γ) (Δ : Γ -> Tel) (a b : forall ρ, Δ ρ) (eqΔ := λ ρ, a ρ =={Δ ρ} b ρ) (r : eqΔ u) (s : eqΔ v) := tele (e : u =={Γ} v) in let lhs := dcong_tel a e in let rhs := dcong_tel b e in square (cong (subst Δ e) _ _ r) (inv_equiv (eq_points_equiv (Δ v) (a v) (b v)) s) (inv_equiv (eq_points_equiv _ _ _) lhs) (inv_equiv (eq_points_equiv _ _ _) rhs). (** This is the square we get (almost) by applying congruence *) Definition dep_square {Γ : Tel} (Δ : Γ -> Tel) u v (e : u =={Γ} v) (a b : forall ρ, Δ ρ) (eqΔ := λ ρ, a ρ =={Δ ρ} b ρ) (r : eqΔ u) (s : eqΔ v) := (subst (fun y => telescope (b u =={Δ;e} y)) s (subst (fun y => telescope (y =={Δ;e} a v)) r (dcong_tel a e)) =={b u =={Δ;e} b v} (dcong_tel b e)). (** This is the square we want: *) Definition theorem_square {Γ : Tel} (Δ : Γ -> Tel) u v (e : u =={Γ} v) (a b : forall ρ, Δ ρ) (r : a u =={Δ u} b u) (s : a v =={Δ v} b v) := (* subst (λ x : Δ v, x == subst Δ e (b u)) (dcong_tel a e) *) (* (cong_tel (subst Δ e) (a u) (b u) r) *) (* =={λ y : Δ v, a v == y; dcong_tel b e} s. *) square_tel (dcong_tel a e) (dcong_tel b e) (cong_tel (subst (λ ρ : Γ, Δ ρ) e) _ _ r) s. Definition lifted_solution_lhs_tel_type (Γ : Tel) (u v : Γ) (Δ : Γ -> Tel) (a b : forall ρ, Δ ρ) (eqΔ := λ ρ, a ρ =={Δ ρ} b ρ) (r : eqΔ u) (s : eqΔ v) := tele (e : u =={Γ} v) in theorem_square Δ u v e a b r s. (* Definition lifted_solution_lhs_tel_type (Γ : Tel) (u v : Γ) *) (* (Δ : Γ -> Tel) *) (* (a b : forall ρ, Δ ρ) *) (* (eqΔ := λ ρ, a ρ =={Δ ρ} b ρ) *) (* (r : eqΔ u) (s : eqΔ v) := *) (* tele (e : u =={Γ} v) in *) (* let lhs := dcong_tel a e : a u =={Δ;e} a v in *) (* let rhs := dcong_tel b e in *) (* (subst (fun y => telescope (b u =={Δ;e} y)) s *) (* (subst (fun y => telescope (y =={Δ;e} a v)) r (dcong_tel a e)) *) (* =={b u =={Δ;e} b v} dcong_tel b e). *) (* @square_tel (Δ v) _ _ _ _ (cong_tel (subst (λ ρ : Γ, Δ ρ) e) _ _ r) s lhs rhs. *) Definition transport_u {Δ : Tel} (Γ : Δ -> Tel) (f : forall x : Δ, Γ x) (P : forall (x : Δ), Γ x -> Type) (s : Δ) (u : P _ (f s)) : P s (subst (fun x => telescope (Γ x)) (refl s) (f s)). Proof. unfold subst. rewrite J_refl. exact u. Defined. Lemma equiv_cong_subst_tel_dep {Δ : Tel} (Γ : Δ -> Tel) (f : forall x : Δ, Γ x) (P : forall (x : Δ), Γ x -> Type) (s t : Δ) (e : s =={Δ} t) (u : P _ (f s)) (v : P t (f t)) : u ==_{(fun x => P x (f x)); e} v <~> (J _ (fun y e => P y (subst _ e (f s))) (transport_u Γ f P s u) _ e ==_{(fun x => P _ x); dcong_tel f e} v). Proof. unfold subst. revert t e v. refine (J _ _ _). intros. rewrite J_refl. unfold transport_u. unfold eq_rect_r. simpl. Admitted. (* Not so sure this is provable *) Definition lifted_solution_rhs_type (Γ : Tel) (u v : Γ) (Γ' : Tel) (Δ : Γ -> Tel) (a b : forall ρ, Δ ρ) (eqΔ := λ ρ, a ρ =={Δ ρ} b ρ) (r : a u =={Δ u} b u) (s : a v =={Δ v} b v) (f : extend_tele Γ (fun ρ => eq (Δ ρ) (a ρ) (b ρ)) <~> Γ') := f (inj_extend_tel Γ eqΔ u r) =={Γ'} f (inj_extend_tel Γ eqΔ v s). Definition lifted_solution (Γ : Tel) (u v : Γ) (Γ' : Tel) (Δ : Γ -> Tel) (a b : forall ρ, Δ ρ) (eqΔ := λ ρ, a ρ =={Δ ρ} b ρ) (r : eqΔ u) (s : eqΔ v) (f : extend_tele Γ eqΔ <~> Γ') : lifted_solution_lhs_type Γ u v Δ a b r s <~> lifted_solution_rhs_type Γ u v Γ' Δ a b r s f. Proof. unfold lifted_solution_rhs_type. unfold lifted_solution_lhs_type. refine (equiv_compose _ _). unshelve refine (@equiv_tele_r _ _ _ _). intros x. set(lhs:=dcong_tel a x). set(rhs:=dcong_tel b x). unfold eqΔ in r, s. exact (@square _ _ _ _ _ (inv_equiv (eq_points_equiv _ _ _) lhs) (inv_equiv (eq_points_equiv _ _ _) rhs) (cong _ _ _ r) (inv_equiv (eq_points_equiv (Δ v) (a v) (b v)) s) ). simpl. intros x. apply flip_square_equiv. pose (cong_equiv (extend_tele Γ eqΔ) (inj_extend_tel Γ eqΔ u r) (inj_extend_tel Γ eqΔ v s) _ f _). refine (equiv_compose _ _). Focus 2. refine (eq_points_equiv _ _ _). refine (equiv_compose _ _). Focus 2. refine e. refine (equiv_compose _ _). refine (equiv_sym _). cbv beta zeta. refine (reorder_tele (u =={Γ} v) (fun x => inj (square (inv_equiv (eq_points_equiv (Δ v) (subst (λ x0 : Γ, Δ x0) x (a u)) (a v)) (dcong_tel a x)) (inv_equiv (eq_points_equiv (Δ v) (subst (λ x0 : Γ, Δ x0) x (b u)) (b v)) (dcong_tel b x)) (cong (subst (λ x0 : Γ, Δ x0) x) (a u) (b u) r) (inv_equiv (eq_points_equiv (Δ v) (a v) (b v)) s)))). clear. unfold eqΔ. simpl. induction Γ. - simpl extend_tele. simpl inj_extend_tel. refine (equiv_tele_r _). intros x. revert v x s. refine (Top.J _ _). intros s. simpl. unfold square. unfold dcong_tel. simpl. unfold Telescopes.dcong_tel_obligation_1. simpl. subst eqΔ. simpl in *. unfold subst. simpl. unfold eq_over_refl. simpl. unfold eq_rect_r. simpl. set (au := a u) in *. set (bu := b u) in *. unfold dcong_tel. simpl. clearbody bu. clearbody au. revert bu r s. refine (J _ _ _). intros s. unfold cong. rewrite J_refl. unfold inv_equiv, eq_points_equiv. unfold equiv_inv. simpl. (* Rew equivalence *) rewrite eq_eq_equiv_inv_refl. simpl. unshelve refine {| equiv a := _ |}. apply (Top.cong (equiv (eq_points_equiv (Δ u) au au))) in a. unfold equiv in a. simpl in a. unfold inv_equiv in a. simpl in a. rewrite eq_eq_equiv_refl in a. symmetry in a. destruct a. pose proof (eisretr (eq_eq_equiv (Δ u) au au)). red in H. specialize (H s). rewrite H. apply refl. unshelve refine {| equiv_inv a := _ |}. revert s a. refine (J _ _ _). symmetry. apply eq_eq_equiv_inv_refl. + red. intros. revert s x. refine (J _ _ _). simpl. unfold eq_sym, Logic.eq_ind. simpl. apply axiom. + apply axiom. + apply axiom. - apply axiom. (* apply eq_eq_equiv. *) (* unfold Top.subst. simp. *) (* apply refl. *) (* unshelve refine {| equiv_inv a := _ |}. *) (* unfold eq_expl in *. *) (* revert s a. refine (J _ _ _). *) (* symmetry. apply eq_eq_equiv_inv_refl. *) (* - red; intros. *) (* unfold eq_expl in *. *) (* revert s x. refine (J _ _ _). *) (* unfold Logic.eq_ind. *) (* rewrite J_refl. *) (* unfold inv_equiv_equiv. *) (* apply axiom. *) (* - apply axiom. *) (* - apply axiom. *) (* - simpl. *) (* refine (equiv_tele_r _). destruct u, v. intros ->. *) (* simpl. apply axiom. *) Defined. Lemma inj_extend_tel_tele (Γ : Tel) (Δ : Γ -> Tel) u v a b (f := fun ρ => a ρ =={Δ ρ} b ρ) (r : f u) (s : f v) : inj_extend_tel Γ f u r =={extend_tele Γ f} inj_extend_tel Γ f v s <~> tele (x : u =={Γ} v) in (subst (fun y => telescope (b u =={Δ;x} y)) s (subst (fun y => telescope (y =={Δ;x} a v)) r (dcong_tel a x)) =={b u =={Δ;x} b v} dcong_tel b x). Proof. Admitted. (* square_tel (dcong_tel a x) (dcong_tel b x) (cong_tel (subst Δ x) _ _ r) s. *) (* Definition lifted_solution' (Γ : Tel) (u v : Γ) (Γ' : Tel) *) (* (Δ : Γ -> Tel) *) (* (a b : forall ρ, Δ ρ) *) (* (eqΔ := λ ρ, a ρ =={Δ ρ} b ρ) *) (* (r : eqΔ u) (s : eqΔ v) *) (* (f : extend_tele Γ eqΔ <~> Γ') : *) (* lifted_solution_lhs_tel_type Γ u v Δ a b r s <~> *) (* lifted_solution_rhs_type Γ u v Γ' Δ a b r s f. *) (* Proof. *) (* unfold lifted_solution_rhs_type. *) (* unfold lifted_solution_lhs_tel_type. *) (* refine (equiv_compose _ _). *) (* refine (equiv_sym _). *) (* refine (equiv_compose _ _). *) (* refine (inj_extend_tel_tele Γ Δ u v a b r s). *) (* refine (equiv_tele_r _). *) (* intros x. *) (* unfold square_tel. *) (* simpl. *) (* unfold theorem_square. *) (* unfold square_tel. *) (* set (lhs := dcong_tel a x). *) (* set (rhs := dcong_tel b x). *) (* pose (@equiv_cong_subst_tel (Δ v)). *) (* Unset Printing Notations. unfold eq_expl. *) (* Close Scope telescope. *) (* Unset Printing Notations. unfold eq_expl. *) (* specialize (e *) (* (inj &{ u : Δ v & Δ v }) *) (* (fun y => (eq (Δ v) y.1 y.2)) *) (* (fun x => &(a v & x)) *) (* (subst _ x (b u)) *) (* (b v) *) (* (dcong_tel b x) lhs s). *) (* pose (cong_equiv (extend_tele Γ eqΔ) (inj_extend_tel Γ eqΔ u r) (inj_extend_tel Γ eqΔ v s) _ f _). *) (* set(lhs:=dcong_tel a x). *) (* set(rhs:=dcong_tel b x). *) (* unfold eqΔ in r, s. *) (* exact (@square_tel _ _ _ _ _ lhs rhs (cong_tel _ _ _ r) s). *) (* intros x. (* Anomaly without simpl *) simpl. *) (* apply flip_square_tel_equiv. *) (* refine (equiv_compose _ _). *) (* Focus 2. *) (* refine (eq_points_equiv _ _ _). *) (* refine (equiv_compose _ _). Focus 2. *) (* refine e. *) (* refine (equiv_compose _ _). *) (* refine (equiv_tele_r _). *) (* intro. *) (* refine (equiv_compose _ _). *) (* simpl. *) (* unfold square_tel. *) (* unfold eqΔ in r, s. *) (* pose (@equiv_cong_subst_tel). (Δ v) (inj &{ u : Δ v & Δ v }) *) (* pose (r =={cong_tel } s). *) (* (* P (f v) = a v =={Δ v} b v *) *) (* Check (dcong_tel b u v x). *) (* set (lhs:= subst _ _ (a v) _ _). *) (* pose (@equiv_cong_subst_tel (Δ v) (inj &{ u : Δ v & Δ v }) *) (* (fun y => (eq (Δ v) y.1 y.2)) *) (* (fun x => &(a v & x)) *) (* (subst _ _ _ x (b u)) *) (* (b v) *) (* (dcong_tel b u v x) lhs s). *) (* Unshelve. 4:intro x. simpl. *) (* refine e0. simpl. *) (* refine (equiv_id _). *) (* refine (equiv_compose _ _). *) (* refine (equiv_sym _). *) (* cbv beta zeta. *) (* refine (reorder_tele (u =={Γ} v) (fun x => _)). *) (* clear. *) (* induction Γ. *) (* - simpl extend_tele. *) (* simpl inj_extend_tel. *) (* refine (equiv_tele_r _). intros x. revert v x s. refine (Top.J _ _). intros s. *) (* simpl. unfold square_tel. unfold dcong_tel. simpl. *) (* unfold Telescopes.dcong_tel_obligation_1. simpl. unfold eq_rect_r. *) (* simpl. *) (* subst eqΔ. simpl in *. *) (* unfold subst. simpl. *) (* rewrite J_refl. simpl. *) (* unfold cong_tel. rewrite J_refl. simpl. *) (* set (au := a u) in *. *) (* set (bu := b u) in *. *) (* unfold dcong_tel. simpl. *) (* clearbody bu. clearbody au. *) (* revert bu r s. *) (* refine (J _ _ _). intros s. *) (* rewrite J_refl. apply equiv_id. *) (* - simpl. refine (equiv_tele_r _). intros. *) (* destruct v. simpl in *. subst eqΔ. simpl in *. *) (* revert pr1 x pr2 s. *) (* refine (Top.J _ _). *) (* simpl. intros. specialize (X u.1 u.2 pr2). *) (* specialize (X (fun ρ => Δ &(u.1 & ρ))). *) (* simpl in X. specialize (X (fun ρ => a &(u.1 & ρ)) *) (* (fun ρ => b &(u.1 & ρ))). *) (* simpl in X. destruct u. simpl in *. *) (* specialize (X r s). *) (* apply X. *) (* Defined. *) Definition lifted_solution' (Γ : Tel) (u v : Γ) (Γ' : Tel) (Δ : Γ -> Tel) (a b : forall ρ, Δ ρ) (eqΔ := λ ρ, a ρ =={Δ ρ} b ρ) (r : eqΔ u) (s : eqΔ v) (f : extend_tele Γ eqΔ <~> Γ') : lifted_solution_lhs_tel_type Γ u v Δ a b r s <~> lifted_solution_rhs_type Γ u v Γ' Δ a b r s f. Proof. unfold lifted_solution_rhs_type. unfold lifted_solution_lhs_tel_type. refine (equiv_compose _ _). unshelve refine (@equiv_tele_r _ _ _ _). intros x. set(lhs:=dcong_tel a x). set(rhs:=dcong_tel b x). unfold eqΔ in r, s. exact (@square_tel _ _ _ _ _ (cong_tel _ _ _ r) s lhs rhs ). intros x. (* Anomaly without simpl *) simpl. unfold theorem_square. apply flip_square_tel_equiv. refine (equiv_compose _ _). Focus 2. refine (eq_points_equiv _ _ _). refine (equiv_compose _ _). Focus 2. refine (cong_equiv (extend_tele Γ eqΔ) (inj_extend_tel Γ eqΔ u r) (inj_extend_tel Γ eqΔ v s) _ f _). refine (equiv_compose _ _). refine (equiv_tele_r _). intro. refine (equiv_compose _ _). simpl. unfold square_tel. unfold eqΔ in r, s. set (lhs := dcong_tel a x). set (rhs := dcong_tel b x). pose (@equiv_cong_subst_tel (Δ u) (Δ v) (fun x => x == a v) (subst Δ x) (a u) (b u) r lhs). simpl in e. specialize (e (subst (λ x0 : Δ u, subst (λ x : Γ, Δ x) x x0 == a v) r lhs)). rewrite e. refine (equiv_id _). refine (equiv_id _). refine (equiv_compose _ _). refine (equiv_sym _). cbv beta zeta. refine (reorder_tele (u =={Γ} v) (fun x => _)). clear f. induction Γ. Transparent telescope eq. - simpl extend_tele. simpl inj_extend_tel. refine (equiv_tele_r _). intros x. revert v x s. refine (Top.J _ _). intros s. simpl. unfold square_tel. unfold dcong_tel. simpl. unfold Telescopes.dcong_tel_obligation_1. simpl. unfold eq_rect_r. simpl. subst eqΔ. simpl in *. unfold subst. simpl. set (au := a u) in *. set (bu := b u) in *. clearbody bu. clearbody au. revert bu r s. refine (J _ _ _). intros s. rewrite J_refl. apply axiom. - simpl. refine (equiv_tele_r _). intros. destruct v. simpl in *. subst eqΔ. simpl in *. revert pr1 x pr2 s. refine (Top.J _ _). simpl. intros. specialize (X u.1 u.2 pr2). specialize (X (fun ρ => Δ &(u.1 & ρ))). simpl in X. specialize (X (fun ρ => a &(u.1 & ρ)) (fun ρ => b &(u.1 & ρ))). simpl in X. destruct u. simpl in *. specialize (X r s). apply X. Defined. Lemma lower_solution' : forall A n, tele (x' : A) (n' : nat) (v : vector A n') in (S n' = S n) <~> tele (x : A) in vector A n. Proof. intros A n. unshelve refine {| equiv a := _ |}. refine &(a.1 & _). destruct a. destruct pr2. destruct pr2. simpl in pr3. noconf pr3. exact pr2. unshelve refine {| equiv_inv a := _ |}. refine &(a.1, n & _). refine &(a.2 & eq_refl). apply axiom. apply axiom. apply axiom. Defined. Definition telu A := tele (x' : A) (n' : nat) in vector A n'. Definition telv A n := tele (x : A) in vector A n. Lemma apply_equiv_dom {A B} (P : B -> Type) (e : Equiv A B) : (forall x : A, P (equiv e x)) -> forall x : B, P x. Proof. intros. specialize (X (e ^-1 x)). rewrite inv_equiv_equiv in X. exact X. Defined. Polymorphic Lemma equiv_switch_indep {A : Type} {B : Type} : (tele (_ : A) in B <~> tele (_ : B) in A). Proof. unshelve refine {| equiv a := _ |}. simpl. exact &(a.2 & a.1). unshelve refine {| equiv_inv a := _ |}. simpl. exact &(a.2 & a.1). - intro a. simpl. reflexivity. - intro a. simpl. reflexivity. - intro a. simpl. reflexivity. Defined. Polymorphic Lemma equiv_elim_unit {A : Type} : (tele (_ : []) in A) <~> inj A. Proof. unshelve refine {| equiv a := _ |}. simpl. exact a.2. unshelve refine {| equiv_inv a := _ |}. simpl. exact &(tt & a). - intro a. simpl. reflexivity. - intros [[ ] t]. simpl. reflexivity. - intros [[ ] t]. simpl. reflexivity. Defined. Arguments telescope : simpl never. Polymorphic Lemma solution_inv_tele {A : Type} (B : A -> Type) (x : A) (p q : B x) : (p = q <~> tele (e : x = x) (e' : p =_{B;e} q) in (e = eq_refl)). Proof. refine (equiv_compose (B:=tele (e : x = x) (e' : e = eq_refl) in (p =_{B;e} q)) _ _). all:cycle 1. refine (equiv_tele_r _); intro e. refine (equiv_switch_indep). refine (equiv_sym _). refine (equiv_compose _ _). refine (reorder_tele (tele (e : x = x) in (e = eq_refl)) (fun ρ => inj (p ={B;ρ.1} q))). simpl. refine (equiv_compose _ _). refine (equiv_sym (equiv_tele_l _)). refine (equiv_sym _). refine (@solution (x = x) eq_refl). simpl. refine equiv_elim_unit. Defined. Definition NoConf := fun (A : Type) (x : &{ index : nat & vector A index}) => match x.2 with | Vector.nil => fun y : &{ index : nat & vector A index} => match y.2 with | Vector.nil => inj unit | Vector.cons _ _ => inj False end | @Vector.cons _ h n x0 => fun y : &{ index : nat & vector A index} => match y.2 with | Vector.nil => inj False | @Vector.cons _ h0 n0 x1 => telei (h) (n) in (x0) =={tele (x : A) (n : nat) in vector A n} telei (h0) (n0) in (x1) end end. Lemma noconf : forall (A : Type) (a b : &{ index : nat & vector A index}), a =={ext nat (fun n => inj (vector A n))} b -> NoConf A a b. Proof. intros. destruct X. destruct a, b, pr1, pr2. simpl. destruct pr3. simpl. simpl. exact tt. simpl. exists eq_refl. exists eq_refl. simpl. constructor. Defined. Lemma noconf_inv : forall (A : Type) (a b : &{ index : nat & vector A index}), NoConf A a b -> a =={ext nat (fun n => inj (vector A n))} b. Proof. intros. destruct a, b. destruct pr2, pr3; try constructor || contradiction. simpl in X. exists eq_refl. constructor. unfold NoConf in X. cbv beta iota delta -[telescope eq_expl] in X. apply (@cong_tel (tele (x : A) (n : nat) in (vector A n)) (tele (n1 : nat) in vector A n1) (fun x => &(S x.2.1 & Vector.cons x.1 x.2.2)) _ _ X). Defined. Import NoConfusion. Global Instance noconf_isequiv A a b : IsEquiv (noconf A a b). Proof. unshelve refine {| equiv_inv := noconf_inv A a b |}. intro. - destruct_sigma a; destruct_sigma b. destruct a ; destruct b; simpl in * |-. simpl. on_last_hyp ltac:(fun id => destruct_tele_eq id || destruct id); solve [constructor]. simpl. bang. simpl. bang. simpl. unfold telescope in x. destruct_sigma x. destruct_sigma x. destruct idx, idx0. simpl in x. destruct x. simpl. reflexivity. - intro. destruct_sigma a; destruct_sigma b. destruct x. destruct pr1, pr2. destruct a; simpl in * |-; constructor. - intros. destruct x, a, b. destruct pr1, pr2; simpl. destruct pr3; constructor. Defined. Definition noconf_equiv A a b : Equiv (a =={tele (n : nat) in vector A n} b) (NoConf A a b) := {| equiv := noconf A a b |}. Definition injectivity_cons2 {A} (u v : tele (x : A) (n : nat) in vector A n) : tele (e : S u.2.1 = S v.2.1) in (@Vector.cons A u.1 u.2.1 u.2.2 ==_{fun x : telescope (inj nat) => Vector.t A x;e} @Vector.cons A v.1 v.2.1 v.2.2) <~> u == v. Proof. refine (noconf_equiv A &(S u.2.1 & @Vector.cons A u.1 u.2.1 u.2.2) &(S v.2.1 & @Vector.cons A v.1 v.2.1 v.2.2)). Defined. Ltac intros_tele := match goal with |- Equiv (telescope (ext _ ?F)) _ => refine (equiv_tele_r _); match F with (fun x => @?F x) => intros ?x; match goal with id : _ |- Equiv _ ?y => let f' := eval simpl in (F id) in change (Equiv (telescope f') y) end end | |- Equiv (sigma _ (fun x => _)) _ => refine (equiv_tele_r _); intros ?x end. Lemma example {A} : &{ Γ' : Tel & tele (n : nat) (x y : A) (v v' : Vector.t A n) in (Vector.cons x v = Vector.cons y v') <~> Γ' }. Proof. intros. eexists. refine (equiv_compose _ _). do 5 intros_tele. 2:simpl. refine (equiv_compose _ _). refine (solution_inv_tele (A:=nat) (Vector.t A) _ _ _). refine (equiv_compose _ _). refine (reorder_tele (tele (e0 : S n = S n) in (Vector.cons x v ={ vector A ; e0} (Vector.cons y v'))) (fun ρ => inj (ρ.1 = eq_refl))). simpl. refine (equiv_compose _ _). refine (equiv_sym _). refine (equiv_tele_l _). refine (equiv_sym _). refine (injectivity_cons2 &(x, n & v) &(y, n & v')). simpl. pose (lower_solution' A n). pose (inv_equiv e &(x & v)). simpl in e. pose (telei x n in v : telu A). pose (sol:=lifted_solution' (tele (_ : A) (n' : nat) in vector A n')). simpl in sol. simpl in t0. unfold e, lower_solution, equiv, equiv_inv, inv_equiv in t0. simpl in t0. unfold e, lower_solution, equiv, equiv_inv, inv_equiv in t0. simpl in t0. set (solinst := sigmaI (fun x => sigma nat (fun n => vector A n)) t0.1 &(t0.2.1 & t0.2.2.1)). specialize (sol solinst). specialize (sol &(y, n & v')). (* specialize (sol solinst). (*&(y, n & v')).*) *) specialize (sol (telv A n)). specialize (sol (fun x => inj nat)). (* inj (S x.1 = S x.1))). *) simpl in e. specialize (sol (fun x => S x.2.1) (fun x => S n) eq_refl eq_refl). simpl in sol. specialize (sol e). subst e. unfold lifted_solution_lhs_type, lifted_solution_lhs_tel_type, lifted_solution_rhs_type in sol. simpl in sol. unfold theorem_square, solution_left in *. simpl in *. unfold inv_equiv in sol. unfold eq_points_equiv in sol. simpl in *. unfold equiv_inv in *. simpl in *. unfold cong in sol. simpl in *. refine (equiv_compose (C:=tele (e : x = y) in (v ={ (λ _ : A, inj (vector A n)); e} v')) _ sol). refine (equiv_tele_r _). intros. unfold square_tel. (* Lemma equiv_cong_subst_tel {Δ Γ : Tel} (P : Γ -> Tel) (f : Δ -> Γ) (s t : Δ) (e : s =={Δ} t) (u : P (f s)) (v : P (f t)) : subst P (cong_tel f _ _ e) u = subst (fun x => P (f x)) e u. *) Lemma subst_dcong_tel {Δ} {Γ : Δ -> Tel} (P : forall x: Δ, Γ x -> Tel) (f : forall x : Δ, Γ x) (s t : Δ) (e : s =={Δ} t) (u : P (f s)) (v : P (f t)) : subst P (dcong_tel f _ _ e) u = unfold cong_tel. simpl. unfold subst. simpl. simpl. rewrite (equiv_cong_subst_tel Transparent telescope eq. unfold telescope. simpl. Opaque eq telescope. simpl. unfold subst. simpl. unfold equiv_sym. unfold injectivity_cons2. simpl. unfold noconf_equiv, equiv, inv_equiv, equiv_inv. simpl. Transparent eq telescope. simpl. unfold noconf_equiv, equiv, inv_equiv, equiv_inv. simpl. clear sol. subst solinst. unfold square_tel. unfold subst. simpl. simpl. refine (equiv_compose _ _). Focus 2. pose (@equiv_cong_subst_tel). specialize (e () Lemma dcong_tel_cong_tel (Δ Γ : Tel) (f : Γ) (u v : Δ) (e : u =={Δ} v) : f =={fun _ => Γ; e} f <~> f == f. Proof. revert v e. refine (J _ _ _). unfold subst. rewrite J_refl. apply equiv_id. Defined. Lemma dcong_tel_cong_tel' (Δ Γ : Tel) (f : Γ) (u v : Δ) (e : u =={Δ} v) : f =={fun _ => Γ; e} f = (f == f). Proof. revert v e. refine (J _ _ _). unfold subst. rewrite J_refl. reflexivity. Defined. (* Lemma dcong_tel_nondep_simplify (Δ Γ : Tel) (f f' : Δ -> Γ) (u v : Δ) (e : u =={Δ} v) : *) (* let lhs := @dcong_tel Δ (fun _ => Γ) f u v e in *) (* let rhs := @dcong_tel Δ (fun _ => Γ) f' u v e in *) (* lhs = rhs <~>(forall x : Δ, f x = f' x). *) (* (rhs := @cong_tel Δ Γ (fun _ => f) u v e) : Type. *) (* simpl in *. *) (* pose (subst (fun _ => Γ) u v e). *) (* unfold eq_expl in *. *) set (rhs := dcong_tel _ _ _ _) at 2. simpl in rhs. unfold telescope in rhs. pose (dcong_tel_cong_tel' (tele (x : A) (x0 : nat) in vector A x0) (inj nat) (S n) &(x, n & v) &(y, n & v') x0). pose (Top.subst (P:=fun X : Tel => telescope X) e rhs). pose (@cong_tel (tele (x : A) (x0 : nat) in vector A x0) (inj nat) (fun _ => S n) &(x, n & v) &(y, n & v') x0). simpl in t3. simpl in t2. set(bar:=dcong_tel _ _ _ _). simpl in bar. Lemma dcong_tel_cong_tel_coerce (Δ Γ : Tel) (f : Γ) (u v : Δ) (e : u =={Δ} v) (lhs := @dcong_tel Δ (fun _ => Γ) (fun _ => f) u v e) (rhs := @cong_tel Δ Γ (fun _ => f) u v e) : Type. simpl in *. pose (subst (fun _ => Γ) u v e). unfold eq_expl in *. (* A : Type n : nat x, y : A v, v' : vector A n t0 := &(x, n, v & eq_refl) : tele (_ : A) (n' : nat) (_ : vector A n') in (S n' = S n) t1 := &(x, n & v) : telu A x0 : tele (e : x = y) (e0 : (rew e in &(n & v)).1 = n) in ((rew e in &(n & v)).2 ={ λ x : nat, inj (vector A x); e0} v') ============================ (cong_tel (λ x1 : &{ _ : A & &{ x2 : nat & vector A x2}}, &(S (x1.2).1 & Vector.cons x1.1 (x1.2).2)) &(x, n & v) &(y, n & v') x0).1 = eq_refl <~> dcong_tel (λ x1 : &{ _ : A & &{ x2 : nat & vector A x2}}, S (x1.2).1) &(x, n & v) &(y, n & v') x0 = dcong_tel (λ _ : &{ _ : A & &{ x2 : nat & vector A x2}}, S n) &(x, n & v) &(y, n & v') x0 *) unfold dcong_tel. unfold Telescopes.dcong_tel_obligation_1. unfold eq_rect_r. unfold J_refl. simpl. set(lhs := cong_tel _ _ _ _). simpl in lhs. set(rhs1 := dcong_tel _ _ _ _). set(rhs2 := dcong_tel _ _ _ _). simpl in *. unfold dcong_tel. apply axiom. refine (equiv_id _). Defined. Eval compute in (pr1 (@example nat)). unfold cong_tel. subst solinst. destruct x0. destruct pr2. simpl. unfold dcong. simpl. specialize (sol x0). Goal forall {A} n (x y : A) (v v' : Vector.t A n) (e : Vector.cons x v = Vector.cons y v') (P : forall n x y v v' (e : Vector.cons x v = Vector.cons y v'), Type), (P n x x v v eq_refl) -> P n x y v v' e. Proof. intros. revert e P X. refine (solution_inv _ _ _ _ _ _). refine (uncurry _). Lemma apply_equiv_dom {A B} (P : B -> Type) (e : Equiv A B) : (forall x : A, P (equiv e x)) -> forall x : B, P x. Proof. intros. specialize (X (e ^-1 x)). rewrite inv_equiv_equiv in X. exact X. Defined. unshelve refine (apply_equiv_dom _ _ _). shelve. refine (equiv_tele_l _). refine (equiv_sym _). refine (injectivity_cons &(x, n & v) &(y, n&v')). intros. pose (lower_solution' A n). pose (inv_equiv e &(x & v)). simpl in e. pose (telei x n in v : telu A). pose (sol:=lifted_solution (tele (_ : A) (n' : nat) in vector A n')). simpl in sol. simpl in t0. set (solinst := sigmaI (fun x => sigma nat (fun n => vector A n)) t0.1 &(t0.2.1 & t0.2.2.1)). specialize (sol solinst). specialize (sol &(y, n & v')). (* specialize (sol solinst). (*&(y, n & v')).*) *) specialize (sol (telv A n)). specialize (sol (fun x => inj nat)). (* inj (S x.1 = S x.1))). *) simpl in e. specialize (sol (fun x => S x.2.1) (fun x => S n) eq_refl eq_refl). simpl in sol. specialize (sol e). subst e. simpl in sol. unfold solution_left in *. simpl in *. simpl in *. unfold inv_equiv in sol. unfold eq_points_equiv in sol. simpl in *. unfold equiv_inv in *. simpl in *. unfold dcong in sol. specialize (sol x0). unfold subst in sol. simpl in sol. pose (inv_equiv sol). specialize (s &(pr1 & pr3)). destruct s as (s & s'). destruct s as [Hx simpl in s'. simpl in s. unfold equiv, equiv_inv, inv_equiv in H. simpl in H. unfold injectivity_cons in H. simpl in H. unfold eq_points_equiv in H. simpl in *. unfold inv_equiv at 1 in H. simpl in H. unfold equiv_compose in H. simpl in *. unfold compose in H. simpl in H. unfold pr1_seq in H. simpl in *. unfold noconf_equiv in H. simpl in H. unfold compose in H. simpl in H. unfold equiv, equiv_inv, inv_equiv in H. simpl in H. unfold isequiv_compose in H. simpl in H. unfold compose in H. simpl in H. unfold equiv_inv in H. simpl in H. unfold path_sigma_equivalence in H. simpl in H. unfold equiv in H. unfold path_sigma_uncurried in H. simpl in H. simpl. revert P X. unfold equiv_sym, equiv, equiv_inv, injectivity_cons. simpl. unfold equiv_compose, equiv, equiv_inv, injectivity_cons. simpl. unfold inv_equiv, equiv, equiv_inv, injectivity_cons. simpl. unfold compose. unfold rewh. unfold eq_expl. unfold subst. unfold eq_rect. simpl. pose (inv_equiv e &(eq_refl & eq_refl)). pose (equiv_inv e). simpl. compute. set (foo:=(equiv_sym (injectivity_cons &(x, n & v) &(y, n & v'))) a) in *. specialize (equiv0 H). unfold inv_equiv in equiv0. unfold equiv_inv in equiv0. simpl in equiv0. unfold cong in equiv0. simpl in equiv0. simpl in equiv0. specialize (equiv0 t1.2.2.2). unfold dcong in equiv0. simpl in equiv0. specialize (equiv0 eq_refl). unfold solution_left in equiv0. simpl in equiv0. clear t0 t1. rewrite dcong_refl in equiv0. specialize (equiv0 eq_refl). curry equiv0. specialize (equiv0 eq_refl). destruct a. simpl in e. Goal forall A n (x y : A) (xs : vector A n) (ys : vector A n) (r : n = n), &(n, x, xs) tele (e1 : n = n) (e2 : x = y) (e3 : xs = ys) in (Top.cong S e1 = eq_refl) <~> tele (e2'' : x = y) in (xs = ys). Proof. intros. pose (lifted_solution ). pose (lower_solution' A n). pose (telei n x in xs : telu A). simpl in e. pose (inv_equiv e0 &(x & xs)). specialize (e (tele (n' : nat) (_ : A) in vector A n')). specialize (e &(t1.1, t1.2.1 & t1.2.2.1)). specialize (e &(t1.1, t1.2.1 & t1.2.2.1)). specialize (e (telv A n)). specialize (e (fun x => inj nat)). (* inj (S x.1 = S x.1))). *) simpl in e. specialize (e (fun x => S x.1) (fun x => S n)). simpl in e. specialize (e t1.2.2.2 t1.2.2.2). simpl in e. specialize (e e0). subst e0. simpl in e. destruct e. simpl in *. unshelve refine {| equiv a := _ |}. simpl in e. specialize (e (tele (n' : nat) (_ : A) (_ : vector A n') in (S n' = S n))). specialize (e t1 t1). specialize (e (telv A)). specialize (e (fun x => inj unit)). specialize (e (fun x => tt) (fun x => tt)). specialize (e eq_refl eq_refl). simpl in e. specialize (e (telu A) t0 t0 (telv A)). specialize (e (fun ρ => inj (S ρ.1 = S n))). unshelve refine {| equiv a := _ |}. specialize (e (fun ρ => S n)). simpl in e. match goal with H : telescope ?u <~> telescope ?v |- _ => set (telu:= u) in *; set (telv := v) in * end. pose (cong_equiv telu (telei n x xs in (Top.cong S r)) (telei n y ys in eq_refl)). specialize (e0 _ (equiv e) _). Lemma lifted_solution : forall A n (x y : A) (xs : vector A n) (ys : vector A n) (r : n = n), tele (e1 : n = n) (e2 : x = y) (e3 : xs = ys) in (Top.cong S e1 = eq_refl) <~> tele (e2'' : x = y) in (xs = ys). Proof. intros. set (foo :=telu A n). pose ((telei n x xs in (Top.cong S r)) : telu A n). pose (lower_solution' A n). match goal with H : telescope ?u <~> telescope ?v |- _ => set (telu:= u) in *; set (telv := v) in * end. pose (cong_equiv telu (telei n x xs in (Top.cong S r)) (telei n y ys in eq_refl)). specialize (e0 _ (equiv e) _). refine (equiv_compose _ _). Lemma equiv_teleeq {A : Type} (Δ : A -> Tel) (f g : forall x : A, Δ x) (x y : A) : (&(x & f x) =={ext A Δ} &(y & g y)) <~> ext (x = y) (fun e1 => rewP e1 at (fun x => telescope (Δ x)) in f x =={Δ y} g y). Proof. Admitted. refine (equiv_teleeq (fun A => tele (_ : x = y) (_ : xs = ys) in (Top.cong S e1 = eq_refl)) _ _ _ _). Lemma lifted_solution : forall A n (x y : A) (xs : vector A n) (ys : vector A n), tele (e1 : n = n) (e2 : x = y) (e3 : xs = ys) in (Top.cong S e1 = eq_refl) <~> tele (e2'' : x = y) in (xs = ys). Proof. intros. refine (equiv_compose _ _). pose (lower_solution' A n). match goal with H : telescope ?u <~> telescope ?v |- _ => set (telu:= u) in *; set (telv := v) in * end. pose (cong_equiv telu (telei n x xs in eq_refl) (telei n y ys in eq_refl)). specialize (e0 _ (equiv e) _). (* Lemma fold_teleq (Δ : Tel) (u v : Δ) : u =={Δ} v -> (u = v). *) (* match goal with *) (* H : telescope ?x <~> telescope ?y |- _ => *) (* pose (cong_equiv _ x y _ e) *) (* end. *) pose (cong_equiv Goal forall {A} n (x y : A) (v v' : Vector.t A n) (e : Vector.cons x v = Vector.cons y v') (P : forall n x y v v' (e : Vector.cons x v = Vector.cons y v'), Type), (P n x x v v eq_refl) -> P n x y v v' e. Proof. intros. revert e P X. refine (solution_inv _ _ _ _ _ _). refine (uncurry _). change ( forall (s : tele (x0 : &(S n & Vector.cons x v) = &(S n & Vector.cons y v')) in (pr1_seq x0 = eq_refl)) (P : forall (n0 : nat) (x0 y0 : A) (v0 v'0 : vector A n0), Vector.cons x0 v0 = Vector.cons y0 v'0 -> Type), P n x x v v eq_refl -> P n x y v v' (rewh s.1 s.2)). Lemma apply_equiv_dom {A B} (P : B -> Type) (e : Equiv A B) : (forall x : A, P (equiv e x)) -> forall x : B, P x. Proof. intros. Admitted. unshelve refine (apply_equiv_dom _ _ _). shelve. refine (equiv_compose _ _). shelve. unshelve refine (equiv_tele_l _). shelve. refine (equiv_sym _). refine (injectivity_cons &(x, n & v) &(y, n&v')). Unshelve. all:cycle 1. shelve. Opaque telescope. simpl. unfold injectivity_cons. simpl. Transparent telescope. (* Anomaly *) (* pose (reorder_tele (tele (e : x = y) (e0 : (rew e in &(n & v)).1 = n) in ((rew e in &(n & v)).2 =_{ *) (* fun x0 : nat => *) (* inj (vector A x0);e0} v')) _). *) unshelve refine (equiv_tele_l _). shelve. simpl. Lemma equiv_tele_l {A} {A'} {B : A' -> Type} (e : Equiv A A') : tele (x : A) in B (equiv e x) <~> tele (x : A) (b : rew e in c ) in C x b. Proof. simpl. Admitted. refine (simpleeq _). refine (geninj_eq_inv_goal (fun A n x y v v' e => forall (e' : pr1_seq (pack_pathover (cons x v) (cons y v') e) = eq_refl) (P : forall (n0 : nat) (x0 y0 : A) (v0 v'0 : vector A n0), cons x0 v0 = cons y0 v'0 -> Type), P n x x v v eq_refl -> P n x y v v' (rewh (pack_pathover (cons x v) (cons y v') e) e')) _). intros e. pose proof (pr2_seq_pack_pathover (geninj_eq e)). set (pr1_seq_pack_pathover (geninj_eq e)) in *. clearbody e0. unfold rewh. rewrite H. rewrite e0. clear e0 H. revert e. intros [e]. revert pr2. intros [e' e'']. simpl pr1. cbn -[geninj_eq]. Coq-Equations-1.3.1-8.20/test-suite/terms.v000066400000000000000000000012331463127417400202300ustar00rootroot00000000000000From Equations Require Import Equations Fin DepElimDec. Inductive term := | Var : nat -> term | App : term -> list term -> term | Lam : term -> term. Equations(nocomp) term_size (t : term) : nat := term_size (Var n) := 0; term_size (App f l) := S (List.fold_left (fun acc x => max acc (term_size x)) l (term_size f)); term_size (Lam f) := S (term_size f). (** TODO: recognize recursive call under lambda abstraction *) Equations(nocomp) subst (t : term) (k : nat) (u : term) : term := subst t k (Var n) := if Nat.eqb k n then t else Var n; subst t k (App f l) := App (subst t k f) (List.map (fun x => subst t k x) l); subst t k (Lam f) := Lam (subst t (S k) f). Coq-Equations-1.3.1-8.20/test-suite/test-hott.v000066400000000000000000000002321463127417400210270ustar00rootroot00000000000000From Equations.HoTT Require Import Loader. From HoTT Require Import Basics. Equations foo {A} (x y : A) (e : x = y) : e = e := foo _ _ idpath := idpath. Coq-Equations-1.3.1-8.20/test-suite/univpoly.v000066400000000000000000000011011463127417400207550ustar00rootroot00000000000000Require Import Equations.Equations. Set Universe Polymorphism. Set Implicit Arguments. (* Move fix_proto to poly version *) Equations(noind) id (A : Type) (a : A) : A := id A x := x. Set Printing Universes. (* Move fix_proto to poly version *) Equations foo (A : _) (a : A) : A := foo A a := a. Equations(nocomp) foo' (A : _) (x : A) : A := foo' A x := x. Equations(nocomp) refl (A : _) (x : A) : x = x := refl A x := @eq_refl _ x. Equations(nocomp) id' {A : Type} (a : A) : A := id' _ a := a. Equations(nocomp noind) foo'' (u : unit) : id' tt = id' tt := foo'' tt := _. Coq-Equations-1.3.1-8.20/test-suite/wfnocycle.v000066400000000000000000000076341463127417400211020ustar00rootroot00000000000000Set Warnings "-notation-overridden". From Equations Require Import Equations. Require Import Utf8 Arith Compare_dec List Lia. Require Import Relation_Operators. Arguments clos_trans [A]. Import Sigma_Notations. Set Equations Transparent. Require Import fin. Equations lift_fin {n} (k : nat) (f : fin n) : fin (S n) := lift_fin 0 f := fs f; lift_fin (S k) fz := fz; lift_fin (S k) (fs f) := fs (lift_fin k f). Open Scope list_scope. Derive Signature for Forall2. #[local] Hint Constructors Forall2 : core. Local Open Scope program_scope. Local Open Scope equations_scope. Arguments map {A B}. (* end hide *) Require Import Equations.Prop.Subterm. Derive Subterm for nat. (* Hint Extern 30 (@NoCycle ?A (NoCycle_WellFounded ?R ?wfr) _ _) => *) (* hnf ; typeclasses eauto with subterm_relation : typeclass_instances. *) Lemma nocycle_nat x : S x = x -> False. simplify ?. Qed. Goal forall x, S (S x) = x -> False. intros x. simplify ?. Qed. Inductive tree := leaf : tree | node : tree -> tree -> tree. Derive NoConfusion Subterm for tree. Goal forall x y, node x y = x -> False. intros x y. simplify ?. Qed. Goal forall x y, node y (node y x) = x -> False. intros x y. simplify ?. Qed. Goal forall x y, node y (node y x) = x -> False. intros x y. simplify ?. Qed. Goal forall x y, x = node y (node y x) -> False. intros x y. simplify ?. Show Proof. Qed. Goal forall x y z, x = node (node y z) (node y x) -> False. intros x y z. simplify ?. Qed. (** It would be hard to come up with an example for vector, but for indexed terms in some language, yes *) Derive NoConfusion Subterm for list. Reserved Notation " x ∈ s " (at level 70, s at level 10). Polymorphic Inductive In {A} (x : A) : list A -> Type := | here {xs} : x ∈ (x :: xs) | there {y xs} : x ∈ xs -> x ∈ (y :: xs) (* begin hide *) where "x ∈ s" := (In x s). Derive Signature NoConfusion for In. Arguments here {A x xs}. Arguments there {A x y xs} _. (* end hide *) Set Equations With UIP. Set Universe Polymorphism. Section InNoconfusion. Context {A} {hset : UIP A}. Global Instance list_UIP: UIP (list A). Proof. intros inx inx' e. depelim e. induction inx. - now simplify *. - simplify $. simpl. simplify ?. intros. specialize (IHinx e). simpl. destruct IHinx. reflexivity. Defined. Instance In_UIP (x : A) l : UIP (x ∈ l). Abort. (* Impossible! As defined, x ∈ l is not proof irrelevant, it contains an index into the list *) End InNoconfusion. Unset Universe Polymorphism. (* end hide *) Inductive type : Set := | tbool | tunit | tarrow : type -> type -> type. Derive NoConfusion Subterm for type. #[export] Instance type_uip : UIP type. Proof. red. intros x y ->. induction y; try repeat (simplify ?; simpl); trivial. intros. specialize (IHy1 e'). specialize (IHy2 e). destruct IHy1. destruct IHy2. reflexivity. Qed. Inductive term : forall (ctx : list type), type -> Set := | ttrue {ctx} : term ctx tbool | tfalse{ctx} : term ctx tbool | tvar {ctx} {τ} (x : τ ∈ ctx) : term ctx τ | tapp {ctx} {τ τ'} (f : term ctx (tarrow τ τ')) (a : term ctx τ) : term ctx τ | tlam {ctx} {τ τ'} (abs : term (τ :: ctx) τ') : term ctx (tarrow τ τ'). Derive Signature for term. Derive NoConfusionHom for term. (* FIXME subterm and non-uniform indices and universe issue... *) (* Derive Subterm for term. Lemma wft : WellFounded term_subterm. solve_subterm. destruct index. simpl in *. revert H. simplify *. destruct index. simpl in *. revert H. simplify *. Defined. Existing Instance wft. Goal forall ctx τ (f : term ctx (tarrow τ τ)) (a : term ctx τ), signature_pack (tapp f a) = signature_pack a -> False. Proof. intros *. simplify <>. Qed. Goal forall ctx τ (f : term ctx (tarrow τ τ)) (a : term ctx τ), tapp f a = a -> False. Proof. intros *. refine (simplify_ind_pack (A:=Σ ctx : list type, type) (fun x => term x.1 x.2) (ctx, τ) _ _ (fun _ => False) _). simplify <>. Defined. *)Coq-Equations-1.3.1-8.20/test-suite/yves.v000066400000000000000000000022511463127417400200650ustar00rootroot00000000000000Require Import Arith. From Equations Require Import Equations. Inductive btree (T : Type) : Type := Leaf | Node (val : T) (t1 t2 : btree T). Arguments Leaf {T}. Arguments Node {T}. Fixpoint count {T : Type} (p : T -> bool) (t : btree T) : nat := match t with | Leaf => 0 | Node x t1 t2 => (if p x then 1 else 0) + (count p t1 + count p t2) end. Definition size {T : Type} (t : btree T) := count (fun x => true) t. Lemma size1 {T} (a : T) t1 t2 : size t1 < size (Node a t1 t2). Proof. unfold size; simpl. unfold lt; apply Peano.le_n_S, Nat.le_add_r. Qed. Lemma size2 {T} (a : T) t1 t2 : size t2 < size (Node a t1 t2). Proof. unfold size; simpl. unfold lt; apply Peano.le_n_S; rewrite Nat.add_comm; apply Nat.le_add_r. Qed. Equations? redo_rev_tree {T} (t : btree T) : btree T by wf (size t) lt := redo_rev_tree Leaf := Leaf ; redo_rev_tree (Node a t1 t2) := Node a (redo_rev_tree t2) (redo_rev_tree t1). Proof. apply size2. apply size1. Defined. Lemma redo_rev_tree_invol {T} (t : btree T) : redo_rev_tree (redo_rev_tree t) = t. Proof. funelim (redo_rev_tree t). reflexivity. simp redo_rev_tree. rewrite H, H0. reflexivity. Qed.Coq-Equations-1.3.1-8.20/test-suite/zoe.v000066400000000000000000000146471463127417400177100ustar00rootroot00000000000000From Equations Require Import Equations. Require Import Arith List ListSet Lia Program. Import ListNotations. Set Keyed Unification. Definition fvar : Type := nat. Definition bvar : Type := nat. Inductive sort : Type := | N : sort | ArrowS : sort -> sort -> sort. Definition eq_sort_dec (s1 s2 : sort) : {s1 = s2} + {s1 <> s2}. Proof. do 2 decide equality. Defined. (* A very simple language with locally nameless binder representation *) Inductive index : Type := | IBVar : bvar -> index | IFVar : fvar -> index | Z : index | Plus1 : index -> index | IAbs : sort -> index -> index | IApp : index -> index -> index. (* Set notations *) Notation "\{}" := (empty_set fvar). Notation "\{ x }" := ([x]). Notation "E \u F" := (set_union eq_nat_dec E F) (at level 37, right associativity). Notation "x \in E" := (set_In x E) (at level 38). Notation "x \notin E" := (~ (x \in E)) (at level 38). (* Environments *) Definition env := list (fvar * sort). Definition get (x : fvar) (e : env) : option sort := match find (fun p => if eq_nat_dec x (fst p) then true else false) e with | Some p => Some (snd p) | None => None end. Definition dom (e : env) : set fvar := fold_left (fun s x => \{fst x} \u s) e \{}. (* Free variables *) Fixpoint ifv (i : index) : set fvar := match i with | IFVar x => \{x} | IBVar _ | Z => \{} | Plus1 i => ifv i | IAbs _ i => ifv i | IApp i1 i2 => ifv i1 \u ifv i2 end. Definition var_gen (E : set fvar) := 1 + fold_right max O E. Lemma var_gen_spec : forall E, (var_gen E) \notin E. Proof. intros E. assert (Hlt : forall n, n \in E -> n < var_gen E). { intros n H. induction E as [|x xs IHxs]; unfold var_gen in *; simpl in *; try (now exfalso; auto). destruct H; subst; rewrite Nat.succ_max_distr, Nat.max_lt_iff; auto. } intros contra. apply Hlt in contra. lia. Qed. Fixpoint ii_open_rec (k : nat) (i : index) (x : fvar) : index := match i with | IBVar n => if eq_nat_dec k n then (IFVar x) else (IBVar n) | IFVar _ | Z => i | Plus1 i => Plus1 (ii_open_rec k i x) | IAbs s i => IAbs s (ii_open_rec (S k) i x) | IApp i1 i2 => IApp (ii_open_rec k i1 x) (ii_open_rec k i2 x) end. Definition ii_open_var (i : index) (x : fvar) := ii_open_rec 0 i x. (* Typing rules *) Inductive sorting (e : env) : index -> sort -> Prop := | IFVarRule : forall (x : fvar) (s : sort), get x e = Some s -> sorting e (IFVar x) s | ZRule : sorting e Z N | Plus1Rule : forall (I : index), sorting e I N -> sorting e (Plus1 I) N | IAbsRule : forall (I : index) (s1 s2 : sort), (* Cheating a bit to make the proof easier, but this rule should be equivalent with the previous one - the proof must be cumbersome though *) let x := var_gen (dom e \u ifv I) in sorting ((x, s1) :: e) (ii_open_var I x) s2 -> sorting e (IAbs s1 I) (ArrowS s1 s2) | IAppRule : forall (I1 I2 : index) (S1 S2 : sort), sorting e I1 (ArrowS S1 S2) -> sorting e I2 S1 -> sorting e (IApp I1 I2) S2. Fixpoint index_size (i : index) : nat := match i with | IBVar _ | IFVar _ | Z => 0 | Plus1 i | IAbs _ i => 1 + index_size i | IApp i1 i2 => 1 + (index_size i1 + index_size i2) end. Lemma size_ii_open_rec : forall (i : index) (x : fvar) (rec :nat), index_size (ii_open_rec rec i x) = index_size i. Proof. intros i x. induction i; intros n; simpl; repeat (match goal with | [ H : forall (_ : nat), _ = _ |- _ ] => rewrite H end); try lia. now destruct (eq_nat_dec _ _). Qed. Lemma size_ii_open_rec_lt : forall (i : index) (x : fvar), index_size (ii_open_var i x) < S (index_size i). Proof. intros; unfold ii_open_var; rewrite size_ii_open_rec. auto with arith. Defined. (** Hints for automatically solving recursive call obligations *) #[local] Hint Extern 3 => progress cbn : rec_decision. #[local] Hint Resolve size_ii_open_rec_lt : rec_decision. #[local] Hint Extern 3 => progress auto with arith : rec_decision. (* Equations infer_sort (ie : env) (i : index) : option sort := infer_sort ie i by wf i (MR lt index_size) := (** Need to strengthen the subst *) infer_sort ie (IBVar x) := None ; infer_sort ie (IFVar x) := get x ie; infer_sort ie Z := Some N; infer_sort ie (Plus1 i) <= infer_sort ie i => { | Some N := Some N; | _ := None }; infer_sort ie (IAbs s i) <= infer_sort ((0, s) :: ie) (ii_open_var i 0) => { infer_sort ie (IAbs s i) (Some s2) := Some (ArrowS s s2) ; infer_sort _ _ _ := None } ; infer_sort ie (IApp i1 i2) <= infer_sort ie i1 => { | Some (ArrowS s1 s2) <= infer_sort ie i2 => { | None := None; | Some s1' <= sort_eq_dec s1 s1' => { | (left _) := Some s2; | (right _) := None } }; | _ := None }. *) (* BUG! not general enough, need to inverse the order of arguments so that ie can change at recursive calls. *) #[local] Obligation Tactic := program_simpl; try typeclasses eauto 10 with rec_decision simp subterm_relation. Equations infer_sort (i : index) (ie : env) : option sort by wf i (MR lt index_size) := infer_sort (IBVar x) ie := None ; infer_sort (IFVar x) ie := get x ie; infer_sort Z ie := Some N; infer_sort (Plus1 i) ie with infer_sort i ie => { | Some N := Some N; | _ := None }; infer_sort (IAbs s i) ie with let x := (var_gen (dom ie \u ifv i)) in infer_sort (ii_open_var i x) ((x, s) :: ie) => { infer_sort (IAbs s i) ie (Some s2) := Some (ArrowS s s2) ; infer_sort _ _ _ := None } ; infer_sort (IApp i1 i2) ie with infer_sort i1 ie := { infer_sort (IApp i1 i2) ie (Some (ArrowS s1 s2)) with infer_sort i2 ie := { | None := None; | Some s1' with eq_sort_dec s1 s1' := { | (left Heq) := Some s2; | (right _) := None } }; infer_sort (IApp i1 i2) ie _ := None }. Lemma infer_sort_sound: forall (i : index) (ie: env) (s : sort), infer_sort i ie = Some s -> sorting ie i s. Proof. intros i ie. funelim (infer_sort i ie); intros s' Heqs; try (noconf Heqs || (rewrite Heq in *; noconf Heqs)). - now constructor. - now constructor. - specialize (Hind _ Heq). now constructor. - specialize (Hind _ Heq). now constructor. - pose proof (Hind _ Heq1). pose proof (Hind0 _ Heq2). subst s1. econstructor; eauto. Qed. Coq-Equations-1.3.1-8.20/theories/000077500000000000000000000000001463127417400164245ustar00rootroot00000000000000Coq-Equations-1.3.1-8.20/theories/CoreTactics.v000066400000000000000000000152571463127417400210300ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (** Tactics supporting equations *) Require Export Equations.Init. Require Import Equations.Signature. Local Open Scope equations_scope. (** Try to find a contradiction. *) (** We will use the [block] definition to separate the goal from the equalities generated by the tactic. *) Definition block := the_equations_tag. Ltac intros_until_block := match goal with |- let _ := block in _ => intros _ | |- _ => try (intro; intros_until_block) end. Ltac block_goal := match goal with | [ |- ?T ] => change (let _ := block in T) end. Ltac unblock_goal := unfold block in *; cbv zeta. Ltac blocked t := block_goal ; t ; unblock_goal. Definition hide_pattern {A : Type} (t : A) := t. Definition add_pattern {B} (A : Type) (b : B) := A. Ltac add_pattern t := match goal with |- ?T => change (add_pattern T t) end. (** To handle sections, we need to separate the context in two parts: variables introduced by the section and the rest. We introduce a dummy variable between them to indicate that. *) Variant end_of_section := the_end_of_the_section. Ltac set_eos := let eos := fresh "eos" in assert (eos:=the_end_of_the_section). Ltac with_eos_aux tac := match goal with [ H : end_of_section |- _ ] => tac H end. Ltac with_eos tac orelse := with_eos_aux tac + (* No section variables *) orelse. Ltac clear_nonsection := repeat match goal with [ H : ?T |- _ ] => match T with end_of_section => idtac | _ => clear H end end. (** Do something on the last hypothesis, or fail *) Ltac on_last_hyp tac := lazymatch goal with [ H : _ |- _ ] => tac H end. (** Reverse everything up to hypothesis id (not included). *) Ltac revert_until id := on_last_hyp ltac:(fun id' => match id' with | id => idtac | _ => revert id' ; revert_until id end). (** We have a specialized [reverse_local] tactic to reverse the goal until the begining of the section variables *) Ltac reverse_local := match goal with | [ H : ?T |- _ ] => match T with | end_of_section => idtac | _ => revert H ; reverse_local end | _ => idtac end. Ltac clear_local := match goal with | [ H : ?T |- _ ] => match T with | end_of_section => idtac | _ => clear H ; clear_local end | _ => idtac end. (** Internally used constants *) Register block as equations.internal.block. Register hide_pattern as equations.internal.hide_pattern. Register add_pattern as equations.internal.add_pattern. Register the_end_of_the_section as equations.internal.the_end_of_the_section. Register end_of_section as equations.internal.end_of_section. (* Generic NoConfusion derivation *) (** Apply [noConfusion] on a given hypothsis. *) (** Used by the [Derive NoConfusion] command. *) Ltac destruct_sigma id := match type of id with @sigma ?A ?P => let idx := fresh "idx" in destruct id as [idx id]; repeat destruct_sigma idx; simpl in id | _ => idtac end. Ltac simp_sigmas := repeat destruct_one_sigma ; simpl in *. Ltac eapply_hyp := multimatch goal with [ H : _ |- _ ] => eapply H end. Ltac destruct_tele_eq H := match type of H with ?R ?x ?y => let rhs := fresh in set (rhs := y) in *; pattern sigma rhs; clearbody rhs; destruct H; simpl end. (** Used by funelim *) Ltac apply_args c elimc k := match c with | _ ?a ?b ?c ?d ?e ?f ?g ?h ?i ?j ?k ?l ?m => k uconstr:(elimc a b c d e f g h i j k l m) | _ ?a ?b ?c ?d ?e ?f ?g ?h ?i ?j ?k ?l => k uconstr:(elimc a b c d e f g h i j k l) | _ ?a ?b ?c ?d ?e ?f ?g ?h ?i ?j ?k => k uconstr:(elimc a b c d e f g h i j k) | _ ?a ?b ?c ?d ?e ?f ?g ?h ?i ?j => k uconstr:(elimc a b c d e f g h i j) | _ ?a ?b ?c ?d ?e ?f ?g ?h ?i => k uconstr:(elimc a b c d e f g h i) | _ ?a ?b ?c ?d ?e ?f ?g ?h => k uconstr:(elimc a b c d e f g h) | _ ?a ?b ?c ?d ?e ?f ?g => k uconstr:(elimc a b c d e f g) | _ ?a ?b ?c ?d ?e ?f => k uconstr:(elimc a b c d e f) | _ ?a ?b ?c ?d ?e => k uconstr:(elimc a b c d e) | _ ?a ?b ?c ?d => k uconstr:(elimc a b c d) | _ ?a ?b ?c => k uconstr:(elimc a b c) | _ ?a ?b => k uconstr:(elimc a b) | _ ?a => k uconstr:(elimc a) end. (** Used to destruct recurive calls in obligations, simplifying them. *) Ltac on_application f tac T := match T with | context [f ?x ?y ?z ?w ?v ?u ?a ?b ?c] => tac (f x y z w v u a b c) | context [f ?x ?y ?z ?w ?v ?u ?a ?b] => tac (f x y z w v u a b) | context [f ?x ?y ?z ?w ?v ?u ?a] => tac (f x y z w v u a) | context [f ?x ?y ?z ?w ?v ?u] => tac (f x y z w v u) | context [f ?x ?y ?z ?w ?v] => tac (f x y z w v) | context [f ?x ?y ?z ?w] => tac (f x y z w) | context [f ?x ?y ?z] => tac (f x y z) | context [f ?x ?y] => tac (f x y) | context [f ?x] => tac (f x) end. (** Tactical [on_call f tac] applies [tac] on any application of [f] in the hypothesis or goal. *) Ltac on_call f tac := match goal with | |- ?T => on_application f tac T | H : ?T |- _ => on_application f tac T end. (* Destructs calls to f in hypothesis or conclusion, useful if f creates a subset object. *) (* Already defined in HoTT.Core.Tactics *) Ltac destruct_call_eqns f := let tac t := (destruct t) in on_call f tac. Ltac destruct_calls f := repeat destruct_call_eqns f. Ltac destruct_rec_calls := match goal with | [ H : let _ := fixproto in _ |- _ ] => red in H; destruct_calls H ; clear H end. Ltac destruct_all_rec_calls := repeat destruct_rec_calls. (** Revert the last hypothesis. *) Ltac revert_last := match goal with [ H : _ |- _ ] => revert H end. (** Repeatedly reverse the last hypothesis, putting everything in the goal. *) Ltac reverse := repeat revert_last. (* Redefine to use simplification *) Ltac equations_simplify := intros; destruct_all_rec_calls; simpl in *; try progress (reverse; simplify_equalities). Ltac solve_wf := match goal with |- ?R _ _ => try typeclasses eauto with subterm_relation simp rec_decision end. (* program_simpl includes a [typeclasses eauto with program] which solves, e.g. [nat] goals trivially. We remove it. *) Ltac equations_simpl := equations_simplify ; try solve_wf. Global Obligation Tactic := equations_simpl. Coq-Equations-1.3.1-8.20/theories/HoTT/000077500000000000000000000000001463127417400172425ustar00rootroot00000000000000Coq-Equations-1.3.1-8.20/theories/HoTT/All.v000066400000000000000000000022231463127417400201400ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (** The set of libraries required to run Equations using an equality in Type with all features. *) Set Warnings "-notation-overridden". Require Export Equations.HoTT.Loader. Require Export Equations.HoTT.Tactics. Require Export Equations.HoTT.Telescopes. Require Export Equations.HoTT.Subterm. Require Export Equations.HoTT.WellFoundedInstances. Global Obligation Tactic := Equations.CoreTactics.equations_simpl. (** Tactic to solve well-founded proof obligations by default *) Ltac solve_rec := simpl in * ; cbv zeta ; intros ; try typeclasses eauto with subterm_relation simp rec_decision. Export EquationsNotations. Open Scope equations_scope. Coq-Equations-1.3.1-8.20/theories/HoTT/Classes.v000066400000000000000000000144111463127417400210270ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Require Import Equations.Init Equations.CoreTactics. Set Warnings "-notation-overridden". Require Import HoTT.Basics.Trunc HoTT.HSet. Require Import Equations.HoTT.Logic Equations.HoTT.Relation Equations.HoTT.Relation_Properties Equations.HoTT.WellFounded. Set Universe Polymorphism. (** A class for well foundedness proofs. Instances can be derived automatically using [Derive Subterm for ind]. *) Class WellFounded {A : Type} (R : Relation A) := wellfounded : well_founded R. (** This class contains no-cyclicity proofs. They can be derived from well-foundedness proofs for example. *) (** The proofs of [NoCycle] can be arbitrarily large, it doesn't actually matter in the sense that they are used to prove absurdity. *) (* Cumulative *) Class NoCyclePackage@{i|} (A : Type@{i}) := { NoCycle : A -> A -> Type@{i}; noCycle : forall {a b}, NoCycle a b -> (a = b -> Empty) }. (** These lemmas explains how to apply it during simplification. *) (** We always generate a goal of the form [NoCycle x C[x]], using either the left or right versions of the following lemma. *) Lemma apply_noCycle_left@{i j|} {A : Type@{i}} {noconf : NoCyclePackage A} (p q : A) {B : p = q -> Type@{j}} : NoCycle@{i} p q -> (forall H : p = q, B H). Proof. intros noc eq. destruct (noCycle noc eq). Defined. Lemma apply_noCycle_right@{i j|} {A : Type@{i}} {noconf : NoCyclePackage A} (p q : A) {B : p = q -> Type@{j}} : NoCycle q p -> (forall H : p = q, B H). Proof. intros noc eq. destruct (noCycle noc (inverse eq)). Defined. (* Extraction Inline apply_noCycle_left apply_noCycle_right. *) (** NoCycle can be decided using the well-founded subterm relation. *) Definition NoCycle_WellFounded {A} (R : Relation A) (wfR : WellFounded R) : NoCyclePackage A := {| NoCycle := R; noCycle := WellFounded.well_founded_irreflexive (wfR:=wfR) |}. #[export] Existing Instance NoCycle_WellFounded. #[export] Hint Extern 30 (@NoCycle ?A (NoCycle_WellFounded ?R ?wfr) _ _) => hnf; typeclasses eauto with subterm_relation : typeclass_instances. (** The NoConfusionPackage class provides a method for solving injectivity and discrimination of constructors, represented by an equality on an inductive type [I]. The type of [noConfusion] should be of the form [ Π Δ, (x y : I Δ) (x = y) -> NoConfusion x y ], where [NoConfusion x y] for constructor-headed [x] and [y] will give equality of their arguments or the absurd proposition in case of conflict. This gives a general method for simplifying by discrimination or injectivity of constructors. Some actual instances are defined later in the file using the more primitive [discriminate] and [injection] tactics on which we can always fall back. *) Cumulative Class NoConfusionPackage@{i} (A : Type@{i}) := { NoConfusion : A -> A -> Type@{i}; noConfusion : forall {a b}, NoConfusion a b -> a = b; noConfusion_inv : forall {a b}, a = b -> NoConfusion a b; noConfusion_sect : forall {a b} (e : NoConfusion a b), noConfusion_inv (noConfusion e) = e; noConfusion_retr : forall {a b} (e : a = b), (noConfusion (noConfusion_inv e)) = e; }. (** This lemma explains how to apply it during simplification. *) Lemma apply_noConfusion@{i j|} {A : Type@{i}} {noconf : NoConfusionPackage A} (p q : A) {B : p = q -> Type@{j}} : (forall e : NoConfusion p q, B (noConfusion e)) -> (forall e : p = q, B e). Proof. intros. generalize (noConfusion_retr e). destruct e. intros eq. destruct eq. apply X. Defined. (* Extraction Inline apply_noConfusion. *) (** Classes for types with UIP or decidable equality. *) Class UIP@{i|} (A : Type@{i}) := uip : forall {x y : A} (e e' : x = y), e = e'. #[export] Instance IsHSet_UIP (A : Type) (H : IsHSet A) : UIP A. Proof. apply axiomK_hset in H. intros x y e e'. red in H. destruct e'. apply H. Defined. (* By truncation, we also get those, we keep a single instance for HSet *) Example IsHProp_UIP (A : Type) (H : IsHProp A) : UIP A := _. Example Contr_UIP (A : Type) (H : Contr A) : UIP A := _. Definition dec_eq {A} (x y : A) : Type := (x = y) + (x <> y). Class EqDec@{i} (A : Type@{i}) := eq_dec : forall x y : A, sum@{i i} (x = y) (x = y -> Empty). Class EqDecPoint (A : Type) (x : A) := eq_dec_point : forall y : A, (x = y) + (x <> y). #[export] Instance EqDec_EqDecPoint A `(EqDec A) (x : A) : EqDecPoint A x := eq_dec x. (** For treating impossible cases. Equations corresponding to impossible calls form instances of [ImpossibleCall (f args)]. *) Class ImpossibleCall {A : Type} (a : A) : Type := is_impossible_call : Empty. (** We have a trivial elimination operator for impossible calls. *) Definition elim_impossible_call {A} (a : A) {imp : ImpossibleCall a} (P : A -> Type) : P a := match is_impossible_call with end. (** The tactic tries to find a call of [f] and eliminate it. *) Ltac impossible_call f := on_call f ltac:(fun t => apply (elim_impossible_call t)). (** The [FunctionalInduction f] typeclass is meant to register functional induction principles associated to a function [f]. Such principles are automatically generated for definitions made using [Equations]. *) Polymorphic Class FunctionalInduction {A : Type} (f : A) := { fun_ind_prf_ty : Type; fun_ind_prf : fun_ind_prf_ty }. Register FunctionalInduction as equations.funind.class. (** The [FunctionalElimination f] class declares elimination principles produced from the functional induction principle for [f] to be used directly to eliminate a call to [f]. This is the preferred method of proving results about a function. [n] is the number of binders for parameters, predicates and methods of the eliminator. *) Polymorphic Class FunctionalElimination {A : Type} (f : A) (fun_elim_ty : Type) (n : nat) := fun_elim : fun_elim_ty. Register FunctionalElimination as equations.funelim.class. Coq-Equations-1.3.1-8.20/theories/HoTT/Constants.v000066400000000000000000000074131463127417400214120ustar00rootroot00000000000000Set Warnings "-notation-overridden". From Equations Require Import Init. Require Import Equations.HoTT.Logic Equations.HoTT.DepElim Equations.HoTT.EqDec Equations.HoTT.Classes. From HoTT Require Import Spaces.Nat. (** Naturals *) Register Basics.Overture.O as equations.nat.zero. Register Basics.Overture.S as equations.nat.succ. Register Basics.Overture.nat as equations.nat.type. (* Sigma Types *) Register Equations.Init.sigma as equations.sigma.type. Register Equations.Init.sigmaI as equations.sigma.intro. Register Equations.Init.pr1 as equations.sigma.pr1. Register Equations.Init.pr2 as equations.sigma.pr2. (** Classes *) Register DepElim.DependentEliminationPackage as equations.depelim.class. Register Classes.ImpossibleCall as equations.impossiblecall.class. (** Logic parameterization *) Derive Signature for paths. Register Basics.Overture.paths as equations.equality.type. Register Basics.Overture.idpath as equations.equality.refl. Register Equations.HoTT.Logic.transport_r as equations.equality.case. Register Equations.HoTT.Logic.paths_rect_dep_r as equations.equality.elim. Register Classes.EqDec as equations.eqdec.class. Register Classes.dec_eq as equations.eqdec.dec_eq. Register Classes.UIP as equations.uip.class. Register Classes.uip as equations.uip.uip. Register Basics.Overture.Empty as equations.bottom.type. Register Empty_rec as equations.bottom.case. Register Empty_ind as equations.bottom.elim. Register Basics.Overture.Unit as equations.top.type. Register Basics.Overture.tt as equations.top.intro. Register Basics.Overture.Unit_ind as equations.top.elim. (* Should be in HoTT? *) Register Init.Datatypes.prod as core.prod.type. Register Init.Datatypes.pair as core.prod.intro. Register Init.Datatypes.fst as core.prod.proj1. Register Init.Datatypes.snd as core.prod.proj2. Register Init.Datatypes.prod as equations.conj.type. Register Init.Datatypes.pair as equations.conj.intro. Register Basics.Overture.Unit as equations.unit.type. Register Basics.Overture.tt as equations.unit.intro. Register Init.Datatypes.prod as equations.product.type. Register Init.Datatypes.pair as equations.product.intro. (* FIXME not polymorphic *) Register Classes.WellFounded as equations.wellfounded.class. Register WellFounded.well_founded as equations.wellfounded.type. Register Basics.Overture.Relation as equations.relation.type. Register Relation.trans_clos as equations.relation.transitive_closure. (* Dependent elimination constants *) Register DepElim.solution_left as equations.depelim.solution_left. Register DepElim.solution_left_dep as equations.depelim.solution_left_dep. Register DepElim.solution_right as equations.depelim.solution_right. Register DepElim.solution_right_dep as equations.depelim.solution_right_dep. Register Classes.NoConfusionPackage as equations.noconfusion.class. Register Classes.apply_noConfusion as equations.depelim.apply_noConfusion. Register Classes.NoCyclePackage as equations.nocycle.class. Register Classes.apply_noCycle_left as equations.depelim.apply_noCycle_left. Register Classes.apply_noCycle_right as equations.depelim.apply_noCycle_right. Register DepElim.simplification_sigma1 as equations.depelim.simpl_sigma. Register DepElim.simplification_sigma1_dep as equations.depelim.simpl_sigma_dep. Register DepElim.simplification_sigma1_nondep_dep as equations.depelim.simpl_sigma_nondep_dep. Register DepElim.simplification_sigma1_dep_dep as equations.depelim.simpl_sigma_dep_dep. Register DepElim.simplify_ind_pack as equations.depelim.simplify_ind_pack. Register DepElim.simplify_ind_pack_inv as equations.depelim.simplify_ind_pack_inv. Register DepElim.opaque_ind_pack_inv as equations.depelim.opaque_ind_pack_eq_inv. Register DepElim.pack_sigma as equations.depelim.pack_sigma_eq. Register DepElim.simplification_K_uip as equations.depelim.simpl_uip. Coq-Equations-1.3.1-8.20/theories/HoTT/DepElim.v000066400000000000000000000672731463127417400207670ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (** Tactics related to (dependent) equality and proof irrelevance. *) Set Warnings "-notation-overridden". Require Import Coq.Program.Tactics. Require Export Equations.Init. Require Import Equations.Signature Equations.CoreTactics. Require Import Equations.HoTT.Logic. Require Import Equations.HoTT.Classes. Require Import Equations.HoTT.EqDec. Set Universe Polymorphism. Import Sigma_Notations. Local Open Scope equations_scope. (** FIXME should not polute users *) Global Set Keyed Unification. (** Support for the [Equations] command. These tactics implement the necessary machinery to solve goals produced by the [Equations] command relative to dependent pattern-matching. It is inspired from the "Eliminating Dependent Pattern-Matching" paper by Goguen, McBride and McKinna. *) (** The [DependentEliminationPackage] provides the default dependent elimination principle to be used by the [equations] resolver. It is especially useful to register the dependent elimination principles for things in [Prop] which are not automatically generated, but it can be used for modified eliminators too. *) Polymorphic Class DependentEliminationPackage (A : Type) := { elim_type : Type ; elim : elim_type }. (** A higher-order tactic to apply a registered eliminator. *) Ltac elim_tac tac p := let ty := type of p in let eliminator := eval simpl in (elim (A:=ty)) in tac p eliminator. (** Specialization to do case analysis or induction. Note: the [equations] tactic tries [case] before [elim_case]: there is no need to register generated induction principles. *) Ltac elim_case p := elim_tac ltac:(fun p el => destruct p using el) p. Ltac elim_ind p := elim_tac ltac:(fun p el => induction p using el) p. (** Lemmas used by the simplifier, mainly rephrasings of [eq_rect], [eq_ind]. *) Lemma solution_left@{i j |} : forall {A : Type@{i}} {B : A -> Type@{j}} (t : A), B t -> (forall x, x = t -> B x). Proof. intros A B t H x eq. destruct eq. apply H. Defined. Lemma eq_sym_invol {A} (x y : A) (e : x = y) : e^^ = e. Proof. destruct e. reflexivity. Defined. Lemma eq_symmetry_dep {A} {t : A} {B : forall (x : A), x = t -> Type} : (forall (x : A) (eq : t = x), B x (inverse eq)) -> (forall (x : A) (eq : x = t), B x eq). Proof. intros. rewrite <- eq_sym_invol. generalize (inverse eq). apply X. Defined. Local Open Scope path_scope. (* Carefully crafted to avoid introducing commutative cuts. *) Lemma solution_left_dep@{i j|} : forall {A : Type@{i}} (t : A) {B : forall (x : A), (x = t -> Type@{j})}, B t 1 -> (forall x (Heq : x = t), B x Heq). Proof. intros A t B H x eq. apply eq_symmetry_dep. clear eq. intros. destruct eq. exact H. Defined. Lemma solution_right@{i j|} : forall {A : Type@{i}} {B : A -> Type@{j}} (t : A), B t -> (forall x, t = x -> B x). Proof. intros A B t H x eq. destruct eq. apply H. Defined. Lemma solution_right_dep@{i j|} : forall {A : Type@{i}} (t : A) {B : forall (x : A), (t = x -> Type@{j})}, B t 1 -> (forall x (Heq : t = x), B x Heq). Proof. intros A t B H x eq. destruct eq. apply H. Defined. Lemma solution_left_let@{i j|} : forall {A : Type@{i}} {B : A -> Type@{j}} (b : A) (t : A), (b = t -> B t) -> (let x := b in x = t -> B x). Proof. intros A B b t H x eq. subst x. destruct eq. apply H. reflexivity. Defined. Lemma solution_right_let@{i j|} : forall {A : Type@{i}} {B : A -> Type@{j}} (b t : A), (t = b -> B t) -> (let x := b in t = x -> B x). Proof. intros A B b t H x eq. subst x. destruct eq. apply H. reflexivity. Defined. Lemma deletion@{i j|} : forall {A : Type@{i}} {B : Type@{j}} (t : A), B -> (t = t -> B). Proof. intros; assumption. Defined. Lemma simplification_sigma1@{i j |} {A : Type@{i}} {P : Type@{i}} {B : Type@{j}} (p q : A) (x : P) (y : P) : (p = q -> x = y -> B) -> ((p, x) = (q, y) -> B). Proof. intros * prf eq; revert prf. change p with ((p, x).1). change q with ((q, y).1). change x with ((p, x).2) at 2. change y with ((q, y).2) at 2. destruct eq. intros eq. exact (eq 1 1). Defined. Lemma simplification_sigma1_dep@{i j |} {A : Type@{i}} {P : A -> Type@{i}} {B : Type@{j}} (p q : A) (x : P p) (y : P q) : (forall e : paths@{i} p q, paths (transport@{i i} P e x) y -> B) -> ((p, x) = (q, y) -> B). Proof. intros. revert X. change p with (p, x).1. change q with (q, y).1. change x with ((p, x).2) at 3. change y with ((q, y).2) at 4. destruct X0. intros X. eapply (X 1). apply 1. Defined. Definition pack_sigma_nondep@{i|} {A : Type@{i}} {P : Type@{i}} {p q : A} {x : P} {y : P} (e' : p = q) (e : x = y) : (p, x) = (q, y). Proof. destruct e'. simpl in e. destruct e. apply 1. Defined. Lemma simplification_sigma1_nondep_dep@{i j |} {A : Type@{i}} {P : Type@{i}} (p q : A) (x : P) (y : P) {B : (p, x) = (q, y) -> Type@{j}} : (forall e' : p = q, forall e : x = y, B (pack_sigma_nondep e' e)) -> (forall e : (p, x) = (q, y), B e). Proof. intros. revert X. change p with ((p, x).1). change q with (q, y).1. change x with (p, x).2 at 2 4. change y with (q, y).2 at 2 4. destruct e. intros X. simpl in *. apply (X 1 1). Defined. Definition pack_sigma@{i|} {A : Type@{i}} {P : A -> Type@{i}} {p q : A} {x : P p} {y : P q} (e' : p = q) (e : e' # x = y) : (p, x) = (q, y). Proof. destruct e'. simpl in e. destruct e. apply 1. Defined. Lemma simplification_sigma1_dep_dep@{i j | } {A : Type@{i}} {P : A -> Type@{i}} (p q : A) (x : P p) (y : P q) {B : (p, x) = (q, y) -> Type@{j}} : (forall e' : p = q, forall e : (e' # x) = y, B (pack_sigma e' e)) -> (forall e : (p, x) = (q, y), B e). Proof. intros. revert X. change p with ((p, x).1). change q with (q, y).1. change x with (p, x).2 at 3 5. change y with (q, y).2 at 4 6. destruct e. intros X. simpl in *. apply (X 1 1). Defined. (* Lemma simplification_sigma1'@{i j} {A : Type@{i}} {P : A -> Type@{i}} {B : Type@{j}} (p q : A) (x : P p) (y : P q) : *) (* (forall e : paths p q, paths (paths_rew A p P x q e) y -> B) -> *) (* (paths ((p, x)) ((q, y)) -> B). *) (* Proof. *) (* intros. revert X. *) (* change p with (pr1 (p, x)). *) (* change q with (pr1 (q, y)). *) (* change x with (pr2 (p, x)) at 3. *) (* change y with (pr2 (q, y)) at 4. *) (* destruct X0. *) (* intros X. eapply (X id_refl). apply id_refl. *) (* Defined. *) Lemma pr2_inv_uip@{i|} {A : Type@{i}} {P : A -> Type@{i}} {x : A} {y y' : P x} : y = y' -> sigmaI@{i} P x y = sigmaI@{i} P x y'. Proof. exact (solution_right (B:=fun y' => (x, y) = (x, y')) y 1 y'). Defined. Lemma pr2_uip@{i|} {A : Type@{i}} {E : UIP A} {P : A -> Type@{i}} {x : A} {y y' : P x} : sigmaI@{i} P x y = sigmaI@{i} P x y' -> y = y'. Proof. refine (simplification_sigma1_dep_dep@{i i} _ _ _ _ _). intros e'. destruct (uip 1 e'). intros e ; exact e. Defined. Lemma pr2_uip_refl@{i| } {A : Type@{i}} {E : UIP A} (P : A -> Type@{i}) (x : A) (y : P x) : pr2_uip@{i} (@idpath _ (x, y)) = 1. Proof. unfold pr2_uip, simplification_sigma1_dep_dep. now rewrite uip_refl_refl. Defined. (** If we have decidable equality on [A] we use this version which is axiom-free! *) Lemma simplification_sigma2_uip@{i j|} {A : Type@{i}} {uip : UIP A} {P : A -> Type@{i}} {B : Type@{j}} (p : A) (x y : P p) : (x = y -> B) -> ((p , x) = (p, y) -> B). Proof. intros t e. apply t. exact (pr2_uip@{i} e). Defined. Lemma simplification_sigma2_uip_refl@{i j|} : forall {A : Type@{i}} {uip:UIP A} {P : A -> Type@{i}} {B : Type@{j}} (p : A) (x : P p) (G : x = x -> B), @simplification_sigma2_uip A uip P B p x x G 1 = G 1. Proof. intros. unfold simplification_sigma2_uip. now rewrite pr2_uip_refl. Defined. Arguments simplification_sigma2_uip : simpl never. Lemma simplification_K_uip@{i j|} {A : Type@{i}} `{UIP A} (x : A) {B : x = x -> Type@{j}} : B 1 -> (forall p : x = x, B p). Proof. apply UIP_K. Defined. Arguments simplification_K_uip : simpl never. Lemma simplification_K_uip_refl@{i j|} : forall {A : Type@{i}} `{UIP A} (x : A) {B : x = x -> Type@{j}} (p : B 1), simplification_K_uip x p 1 = p. Proof. intros. unfold simplification_K_uip, UIP_K. now rewrite uip_refl_refl. Defined. Definition ind_pack@{i} {A : Type@{i}} {B : A -> Type@{i}} {x : A} {p q : B x} (e : p = q) : @paths (sigma (fun x => B x)) (x, p) (x, q) := (pr2_inv_uip e). Definition ind_pack_inv_equiv@{i} {A : Type@{i}} {uip : UIP A} {B : A -> Type@{i}} {x : A} (p q : B x) (e : p = q) : pr2_uip (pr2_inv_uip e) = e. Proof. destruct e. apply pr2_uip_refl. Defined. Definition opaque_ind_pack_inv@{i j|} {A : Type@{i}} {uip : UIP A} {B : A -> Type@{i}} {x : A} {p q : B x} (G : p = q -> Type@{j}) (e : (x, p) = (x, q)) := G (pr2_uip@{i} e). Arguments opaque_ind_pack_inv : simpl never. Arguments pr2_uip : simpl never. Arguments pr2_inv_uip : simpl never. Lemma simplify_ind_pack@{i j|} {A : Type@{i}} {uip : UIP A} (B : A -> Type@{i}) (x : A) (p q : B x) (G : p = q -> Type@{j}) : (forall e : (x, p) = (x, q), opaque_ind_pack_inv G e) -> (forall e : p = q, G e). Proof. intros H. intros e. specialize (H (ind_pack e)). unfold opaque_ind_pack_inv in H. rewrite ind_pack_inv_equiv in H. apply H. Defined. Arguments simplify_ind_pack : simpl never. Lemma simplify_ind_pack_inv@{i j|} {A : Type@{i}} {uip : UIP A} (B : A -> Type@{i}) (x : A) (p : B x) (G : p = p -> Type@{j}) : G 1 -> opaque_ind_pack_inv G 1. Proof. intros H. unfold opaque_ind_pack_inv. destruct (pr2_uip_refl B x p). exact H. Defined. Arguments simplify_ind_pack_inv : simpl never. Definition simplified_ind_pack@{i j|} {A : Type@{i}} {uip : UIP A} (B : A -> Type@{i}) (x : A) (p : B x) (G : p = p -> Type@{j}) (t : opaque_ind_pack_inv G 1) := transport G (@pr2_uip_refl A uip B x p) t. Arguments simplified_ind_pack : simpl never. Lemma simplify_ind_pack_refl@{i j|} {A : Type@{i}} {uip : UIP A} (B : A -> Type@{i}) (x : A) (p : B x) (G : p = p -> Type@{j}) (t : forall (e : (x, p) = (x, p)), opaque_ind_pack_inv G e) : simplify_ind_pack B x p p G t 1 = simplified_ind_pack B x p G (t 1). Proof. reflexivity. Qed. Lemma simplify_ind_pack_elim@{i j|} {A : Type@{i}} {uip : UIP A} (B : A -> Type@{i}) (x : A) (p : B x) (G : p = p -> Type@{j}) (t : G 1) : simplified_ind_pack B x p G (simplify_ind_pack_inv B x p G t) = t. Proof. unfold simplified_ind_pack, simplify_ind_pack_inv. now destruct (pr2_uip_refl B x p). Qed. (** All the simplification rules involving UIP are treated as opaque when proving lemmas about definitions. To actually compute with these inside Coq, one has to make them transparent again. *) Global Opaque simplification_sigma2_uip (* simplification_sigma2_dec_point *) simplification_K_uip simplify_ind_pack simplified_ind_pack. Global Opaque opaque_ind_pack_inv. Ltac rewrite_sigma2_refl_noK := match goal with | |- context [@inj_right_sigma ?A ?H ?x ?P ?y ?y' _] => rewrite (@inj_right_sigma_refl A H x P y) | |- context [@simplification_sigma2_uip ?A ?H ?P ?B ?p ?x ?y ?X 1] => rewrite (@simplification_sigma2_uip_refl A H P B p x X); simpl (* | |- context [@simplification_sigma2_dec_point ?A ?p ?H ?P ?B ?x ?y ?X 1] => *) (* rewrite (@simplification_sigma2_dec_point_refl A p H P B x X); simpl *) | |- context [@simplification_K_uip ?A ?dec ?x ?B ?p 1] => rewrite (@simplification_K_uip_refl A dec x B p); simpl paths_ind | |- context [@simplify_ind_pack ?A ?uip ?B ?x ?p _ ?G _ 1] => rewrite (@simplify_ind_pack_refl A uip B x p G _) | |- context [@simplified_ind_pack ?A ?uip ?B ?x ?p ?G (simplify_ind_pack_inv _ _ _ _ ?t)] => rewrite (@simplify_ind_pack_elim A uip B x p G t) end. Ltac rewrite_sigma2_refl := rewrite_sigma2_refl_noK. (** This hint database and the following tactic can be used with [autounfold] to unfold everything to [eq_rect]s. *) #[export] Hint Unfold solution_left solution_right eq_sym_invol eq_symmetry_dep solution_left_dep solution_right_dep deletion simplification_sigma1 simplification_sigma1_dep apply_noConfusion transport paths_rec paths_rect paths_ind : equations. (** Makes these definitions disappear at extraction time *) Extraction Inline solution_right_dep solution_right solution_left solution_left_dep. Extraction Inline eq_sym_invol eq_symmetry_dep. Extraction Inline solution_right_let solution_left_let deletion. Extraction Inline simplification_sigma1 simplification_sigma2_uip. Extraction Inline simplification_K_uip. Extraction Inline simplification_sigma1 simplification_sigma1_dep. Extraction Inline simplification_sigma1_nondep_dep simplification_sigma1_dep_dep. (** Simply unfold as much as possible. *) Ltac unfold_equations := repeat progress autounfold with equations. Ltac unfold_equations_in H := repeat progress autounfold with equations in H. Ltac rewrite_refl_id := repeat (progress (autorewrite with refl_id) || (try rewrite_sigma2_refl)). Ltac simplify_equations_in e := repeat progress (autounfold with equations in e ; simpl in e). (** Using these we can make a simplifier that will perform the unification steps needed to put the goal in normalised form (provided there are only constructor forms). *) Ltac block_equality id := match type of id with | ?R ?A ?t ?u => change (let _ := block in (R A t u)) in id | _ => idtac end. Ltac revert_blocking_until id := Tactics.on_last_hyp ltac:(fun id' => match id' with | id => idtac | _ => block_equality id' ; revert id' ; revert_blocking_until id end). Ltac not_var x := try (is_var x; fail 1). (** These two tactics are dangerous as they can try to reduce terms to head-normal-form and take ages to fail. *) Ltac try_discriminate := discriminate. Ltac try_injection H := injection H. Ltac simplify_one_dep_elim := match goal with | [ |- context [transport _ 1 _]] => simpl transport | [ |- context [paths_rec _ _ _ _ 1]] => simpl paths_rec | [ |- context [paths_rect _ _ _ _ _ 1]] => simpl paths_rect | [ |- context [paths_ind _ _ _ _ 1]] => simpl paths_ind | [ |- context [noConfusion_inv _]] => simpl noConfusion_inv | [ |- @opaque_ind_pack_inv ?A ?uip ?B ?x ?p _ ?G 1] => apply (@simplify_ind_pack_inv A uip B x p G) | [ |- let _ := block in _ ] => fail 1 | [ |- _ ] => (simplify * || simplify ?); cbv beta | [ |- _ -> ?B ] => let ty := type of B in (* Works only with non-dependent products *) intro || (let H := fresh in intro H) | [ |- forall x, _ ] => let H := fresh x in intro H | [ |- _ ] => intro end. (** Repeat until no progress is possible. By construction, it should leave the goal with no remaining equalities generated by the [generalize_eqs] tactic. *) Ltac simplify_dep_elim := repeat simplify_one_dep_elim. (** Apply [noConfusion] on a given hypothsis. *) Ltac noconf H := block_goal; revert_until H; block_goal; on_last_hyp ltac:(fun H' => revert H'); simplify_dep_elim; intros_until_block; intros_until_block. (** Reverse and simplify. *) Ltac simpdep := reverse; simplify_dep_elim. (** Decompose existential packages. *) Ltac decompose_exists id id' := hnf in id ; match type of id with | @sigma _ _ => let xn := fresh id "'" in destruct id as [xn id]; decompose_exists xn id; cbv beta delta [ pr1 pr2 ] iota in id, id'; decompose_exists id id' | _ => cbv beta delta [ pr1 pr2 ] iota in id, id' end. (** Dependent generalization using existentials only. *) Ltac generalize_sig_gen id cont := let id' := fresh id in get_signature_pack id id'; hnf in (value of id'); hnf in (type of id'); lazymatch goal with | id' := ?v |- context[ id ] => generalize (@idpath _ id' : v = id') ; clearbody id'; simpl in id'; cont id id' id v | id' := ?v |- _ => let id'1 := fresh id' in let id'2 := fresh id' in set (id'2 := pr2 id'); set (id'1 := pr1 id') in id'2; hnf in (value of id'1), (value of id'2); try red in (type of id'2); match goal with [ id'1 := ?t |- _ ] => generalize (@idpath _ id'1 : t = id'1); clearbody id'2 id'1; clear id' id; try unfold signature in id'2; hnf in id'2; simpl in id'2; rename id'2 into id; cont id id id'1 t end end. Ltac generalize_sig id cont := generalize_sig_gen id ltac:(fun id id' id'1 t => (* Fails if id = id' *) try rename id into id', id' into id; cont id'1 id). Ltac generalize_sig_vars id cont := generalize_sig_gen id ltac:(fun id id' id'1 t => move_after_deps id' t; revert_until id'; rename id' into id; cont id'1 id). Ltac paths_generalize_sig_gen id cont := let id' := fresh id in get_signature_pack id id'; hnf in (value of id'); hnf in (type of id'); lazymatch goal with | id' := ?v |- context[ id ] => generalize (@idpath _ id' : paths id' id') ; unfold id' at 1; clearbody id'; simpl in id'; cont id id' id' v | id' := ?v |- _ => let id'1 := fresh id' in let id'2 := fresh id' in set (id'2 := pr2 id'); set (id'1 := pr1 id') in id'2; hnf in (value of id'1), (value of id'2); match goal with | [ id'1 := ?t |- _ ] => generalize (@idpath _ id'1 : paths t id'1); clearbody id'2 id'1; clear id' id; compute in id'2; rename id'2 into id; cont id id id'1 v end end. Ltac paths_generalize_sig id cont := paths_generalize_sig_gen id ltac:(fun id id' id'1 t => (* Fails if id = id' *) try rename id into id', id' into id; cont id'1 id). Ltac paths_generalize_sig_vars id cont := paths_generalize_sig_gen id ltac:(fun id id' id'1 t => move_after_deps id' t; revert_until id'; rename id' into id; cont id'1 id). Ltac generalize_sig_dest id := generalize_sig id ltac:(fun id id' => decompose_exists id id'). Ltac generalize_sig_vars_dest id := generalize_sig_vars id ltac:(fun id id' => decompose_exists id id'). Ltac generalize_eqs_sig id := (needs_generalization id ; generalize_sig_dest id) || idtac. Ltac generalize_eqs_vars_sig id := (needs_generalization id ; generalize_sig_vars_dest id) || idtac. (** The default implementation of generalization using sigma types. *) Ltac generalize_by_eqs id := generalize_eqs_sig id. Ltac generalize_by_eqs_vars id := generalize_eqs_vars_sig id. (** Do dependent elimination of the last hypothesis, but not simplifying yet (used internally). *) Ltac destruct_last := on_last_hyp ltac:(fun id => simpl in id ; generalize_by_eqs id ; destruct id). (** The rest is support tactics for the [Equations] command. *) Definition hide_pattern {A : Type} (t : A) := t. Definition add_pattern {B} (A : Type) (b : B) := A. (** To solve a goal by inversion on a particular target. *) Ltac do_empty id := elim Empty ; simpl in id ; solve [ generalize_by_eqs id ; destruct id ; simplify_dep_elim | apply id ; eauto with simp ]. (** If defining recursive functions, the prototypes come first. *) Ltac introduce p := first [ match p with _ => (* Already there, generalize dependent hyps *) generalize dependent p ; intros p end | intros until p | intros until 1 | intros ]. Ltac do_case p := introduce p ; (elim_case p || destruct p || (case p ; clear p)). Ltac do_ind p := introduce p ; (elim_ind p || induction p). (** The following tactics allow to do induction on an already instantiated inductive predicate by first generalizing it and adding the proper equalities to the context, in a maner similar to the BasicElim tactic of "Elimination with a motive" by Conor McBride. *) (** The [do_depelim] higher-order tactic takes an elimination tactic as argument and an hypothesis and starts a dependent elimination using this tactic. *) Ltac is_introduced H := match goal with | [ H' : _ |- _ ] => match H' with H => idtac end end. Tactic Notation "intro_block" hyp(H) := (is_introduced H ; block_goal ; revert_until H ; block_goal) || (let H' := fresh H in intros until H' ; block_goal) || (intros ; block_goal). Tactic Notation "intro_block_id" ident(H) := (is_introduced H ; block_goal ; revert_until H ; block_goal) || (let H' := fresh H in intros until H' ; block_goal) || (intros ; block_goal). Ltac unblock_dep_elim := match goal with | |- let _ := block in ?T => match T with context [ block ] => change T ; intros_until_block end | _ => unblock_goal end. (** A tactic that tries to remove trivial equality guards in induction hypotheses coming from [dependent induction]/[generalize_eqs] invocations. *) Ltac simplify_IH_hyps := repeat match goal with | [ hyp : context [ block ] |- _ ] => cbn beta in hyp; eqns_specialize_eqs_block hyp; cbn beta iota delta[paths_rect_r paths_rect] zeta in hyp end. Ltac simpl_dep_elim := simplify_dep_elim ; simplify_IH_hyps ; unblock_dep_elim. Ltac do_intros H := (try intros until H) ; (intro_block_id H || intro_block H) ; (try simpl in H ; simplify_equations_in H). Ltac do_depelim_nosimpl tac H := do_intros H ; generalize_by_eqs H ; tac H. Ltac do_depelim tac H := do_depelim_nosimpl tac H ; simpl_dep_elim; unblock_goal. Ltac do_depind tac H := (try intros until H) ; intro_block H ; (try simpl in H ; simplify_equations_in H) ; generalize_by_eqs_vars H ; tac H ; simpl_dep_elim; unblock_goal. (** To dependent elimination on some hyp. *) Ltac depelim id := do_depelim ltac:(fun hyp => do_case hyp) id. Ltac depelim_term c := let H := fresh "term" in set (H:=c) in *; clearbody H ; depelim H. (** Used internally. *) Ltac depelim_nosimpl id := do_depelim_nosimpl ltac:(fun hyp => do_case hyp) id. (** To dependent induction on some hyp. *) Ltac depind id := do_depind ltac:(fun hyp => do_ind hyp) id. (** A variant where generalized variables should be given by the user. *) Ltac do_depelim' tac H := (try intros until H) ; block_goal ; generalize_by_eqs H ; tac H ; simplify_dep_elim ; simplify_IH_hyps ; unblock_goal. (** Calls [destruct] on the generalized hypothesis, results should be similar to inversion. By default, we don't try to generalize the hyp by its variable indices. *) Tactic Notation "dependent" "destruction" ident(H) := do_depelim' ltac:(fun hyp => do_case hyp) H. Tactic Notation "dependent" "destruction" ident(H) "using" constr(c) := do_depelim' ltac:(fun hyp => destruct hyp using c) H. (** This tactic also generalizes the goal by the given variables before the elimination. *) Tactic Notation "dependent" "destruction" ident(H) "generalizing" ne_hyp_list(l) := do_depelim' ltac:(fun hyp => revert l ; do_case hyp) H. Tactic Notation "dependent" "destruction" ident(H) "generalizing" ne_hyp_list(l) "using" constr(c) := do_depelim' ltac:(fun hyp => revert l ; destruct hyp using c) H. (** Then we have wrappers for usual calls to induction. One can customize the induction tactic by writting another wrapper calling do_depelim. We suppose the hyp has to be generalized before calling [induction]. *) Tactic Notation "dependent" "induction" ident(H) := do_depind ltac:(fun hyp => do_ind hyp) H. Tactic Notation "dependent" "induction" ident(H) "using" constr(c) := do_depind ltac:(fun hyp => induction hyp using c) H. (** This tactic also generalizes the goal by the given variables before the induction. *) Tactic Notation "dependent" "induction" ident(H) "generalizing" ne_hyp_list(l) := do_depelim' ltac:(fun hyp => generalize l ; clear l ; do_ind hyp) H. Tactic Notation "dependent" "induction" ident(H) "generalizing" ne_hyp_list(l) "using" constr(c) := do_depelim' ltac:(fun hyp => generalize l ; clear l ; induction hyp using c) H. (** [solve_equation] is used to prove the equation lemmas for an existing definition. *) Ltac exfalso := cut Empty; [intros []|]. Ltac find_empty := simpl in * ; exfalso ; match goal with | [ H : _ |- _ ] => solve [ clear_except H ; dependent elimination H | eqns_specialize_eqs H ; assumption ] | [ H : _ <> _ |- _ ] => solve [ red in H ; eqns_specialize_eqs H ; assumption ] end. Ltac make_simplify_goal := match goal with | [ |- ?R ?A ?T ?U ] => let eqP := fresh "eqP" in set (eqP := fun x : A => R A x U) ; change (eqP T) end. Ltac hnf_gl := match goal with [ |- ?P ?T ] => let T' := eval hnf in T in change_no_check (P T') end. Ltac hnf_eq := match goal with | |- ?R ?x ?y => let x' := eval hnf in x in let y' := eval hnf in y in change_no_check (R x' y') end. Ltac red_eq_lhs := match goal with |- ?R ?x ?y => let x' := eval red in x in change_no_check (R x' y) end. Ltac red_one_eq := match goal with |- ?R ?x ?y => let x' := eval red in x in let y' := eval red in y in change_no_check (R x' y') end. Ltac red_eq := match goal with |- ?R ?x ?y => let rec reduce_eq x y := let x' := eval red in x in let y' := eval red in y in (reduce_eq x' y' || change_no_check (R x' y')) in reduce_eq x y end. Ltac red_gl := match goal with |- ?P ?x => let rec reduce x := let x' := eval red in x in (reduce x' || change_no_check (P x')) in reduce x end. Ltac rewrite_sigma2_rule_noK c := match c with | @inj_right_sigma ?A ?H ?x ?P ?y ?y' _ => rewrite (@inj_right_sigma_refl A H x P y) | @simplify_ind_pack ?A ?uip ?B ?x ?p _ ?G _ 1 => rewrite (@simplify_ind_pack_refl A uip B x p G _) | @simplification_sigma2_uip ?A ?H ?P ?B ?p ?x ?y ?X 1=> rewrite (@simplification_sigma2_uip_refl A H P B p x X); simpl (* | @simplification_sigma2_dec_point ?A ?p ?H ?P ?B ?x ?y ?X 1=> *) (* rewrite (@simplification_sigma2_dec_point_refl A p H P B x X); simpl *) | @simplification_K_uip ?A ?dec ?x ?B ?p 1=> rewrite (@simplification_K_uip_refl A dec x B p); simpl paths_rect end. Ltac rewrite_sigma2_rule c := rewrite_sigma2_rule_noK c. Ltac rewrite_sigma2_term x := match x with | ?f _ _ _ _ _ _ _ _ _ => rewrite_sigma2_rule f | ?f _ _ _ _ _ _ _ _ => rewrite_sigma2_rule f | ?f _ _ _ _ _ _ _ => rewrite_sigma2_rule f | ?f _ _ _ _ _ _ => rewrite_sigma2_rule f | ?f _ _ _ _ _ => rewrite_sigma2_rule f | ?f _ _ _ _ => rewrite_sigma2_rule f | ?f _ _ _ => rewrite_sigma2_rule f | ?f _ _ => rewrite_sigma2_rule f | ?f _ => rewrite_sigma2_rule f | ?f => rewrite_sigma2_rule f end. Ltac rewrite_sigma2_refl_eq := match goal with |- ?x = ?y => rewrite_sigma2_term x || rewrite_sigma2_term y end. Ltac rewrite_sigma2_refl_goal := match goal with | |- ?P ?x => rewrite_sigma2_term x end. (* Ltac simpl_equations := *) (* repeat (repeat (simpl; (hnf_eq || rewrite_sigma2_refl_eq || autorewrite with refl_id); simpl); *) (* try progress autounfold with equations). *) (* Ltac simplify_equation c := *) (* make_simplify_goal ; simpl ; *) (* repeat (try autounfoldify c; *) (* try (red_gl || rewrite_sigma2_refl_goal || autorewrite with refl_id) ; simpl). *) Ltac simpl_equations := repeat (repeat (simpl; hnf_eq; rewrite_refl_id); try progress autounfold with equations). Ltac simpl_equation_impl := repeat (unfold_equations; rewrite_refl_id). Ltac simplify_equation c := make_simplify_goal; simpl; repeat (try autounfold_ref c; progress (simpl; unfold_equations) || (progress (autorewrite with refl_id)) || reflexivity || (progress (rewrite_sigma2_refl))). Ltac solve_equation c := intros ; try simplify_equation c ; try (match goal with | [ |- ImpossibleCall _ ] => find_empty | _ => try red; try (reflexivity || discriminates) end). Definition depelim_module := tt. Register depelim_module as equations.depelim.module. Coq-Equations-1.3.1-8.20/theories/HoTT/EqDec.v000066400000000000000000000165701463127417400204230ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Set Warnings "-notation-overridden". Require Import Equations.Init. Require Import Equations.HoTT.Logic. Require Import Equations.HoTT.Classes. (** Decidable equality. We redevelop the derivation of [K] from decidable equality on [A] making everything transparent and moving to [Type] so that programs using this will actually be computable inside Coq. *) Set Universe Polymorphism. Import Sigma_Notations. Local Open Scope equations_scope. Local Open Scope path_scope. (** We rederive the UIP shifting proof transparently, and on type. Taken from Coq's stdlib. *) Definition UIP_refl_on_ X (x : X) := forall p : x = x, p = 1. Definition UIP_refl_ X := forall (x : X) (p : x = x), p = 1. Lemma Id_trans_r {A} (x y z : A) : x = y -> z = y -> x = z. Proof. destruct 1. destruct 1. exact 1. Defined. (** We rederive the UIP shifting proof transparently. *) Theorem UIP_shift_on@{i} (X : Type@{i}) (x : X) : UIP_refl_on_ X x -> forall y : x = x, UIP_refl_on_ (x = x) y. Proof. intros UIP_refl y. rewrite (UIP_refl y). intros z. assert (UIP:forall y' y'' : x = x, y' = y''). { intros. apply Id_trans_r with 1; apply UIP_refl. } transitivity (concat (concat (UIP 1 1) z) (inverse (UIP 1 1))). - destruct z. destruct (UIP _ _). reflexivity. - change (match 1 as y' in _ = x' return y' = y' -> Type@{i} with | 1 => fun z => z = 1 end (concat (concat (UIP 1 1) z) (inverse (UIP (1) (1))))). destruct z. destruct (UIP _ _). reflexivity. Defined. Theorem UIP_shift@{i} : forall {U : Type@{i}}, UIP_refl_@{i} U -> forall x:U, UIP_refl_@{i} (x = x). Proof. exact (fun U UIP_refl x => @UIP_shift_on U x (UIP_refl x)). Defined. (** This is the reduction rule of UIP. *) Lemma uip_refl_refl@{i} {A : Type@{i}} {E : UIP@{i} A} (x : A) : uip (x:=x) 1 1 = 1. Proof. apply UIP_shift@{i}. intros y e. apply uip@{i}. Defined. Theorem UIP_K@{i j} {A : Type@{i}} {U : UIP A} (x : A) : forall P : x = x -> Type@{j}, P 1 -> forall p : x = x, P p. Proof. intros P peq e. now elim (uip 1 e). Defined. (** Derivation of principles on sigma types whose domain is decidable. *) Section EqdepDec. Universe i. Context {A : Type@{i}} `{EqDec A}. Let comp {x y y':A} (eq1:x = y) (eq2:x = y') : y = y' := paths_ind _ (fun a _ => a = y') eq2 _ eq1. Remark trans_sym_eq : forall (x y:A) (u:x = y), comp u u = @idpath _ y. Proof. intros. case u; compute. exact 1. Defined. Variable x : A. Let nu {y:A} (u:x = y) : x = y := match eq_dec x y with | inl eqxy => eqxy | inr neqxy => Empty_rect (fun _ => _) (neqxy u) end. Let nu_constant : forall (y:A) (u v:x = y), nu u = nu v. intros. unfold nu in |- *. case (eq_dec x y); intros. - exact 1. - case e; trivial. Defined. Let nu_inv {y:A} (v:x = y) : x = y := comp (nu 1) v. Remark nu_left_inv : forall (y:A) (u:x = y), nu_inv (nu u) = u. Proof. intros. case u; unfold nu_inv in |- *. apply trans_sym_eq. Defined. Theorem eq_proofs_unicity : forall (y:A) (p1 p2:x = y), p1 = p2. Proof. intros. elim nu_left_inv with (u := p1). elim nu_left_inv with (u := p2). elim nu_constant with y p1 p2. reflexivity. Defined. Theorem K_dec : forall P:x = x -> Type@{i}, P 1 -> forall p:x = x, P p. Proof. intros. elim eq_proofs_unicity with x 1 p. trivial. Defined. Lemma eq_dec_refl : eq_dec x x = inl 1. Proof. case eq_dec; intros. - apply ap. apply eq_proofs_unicity. - elim e. apply 1. Defined. (** The corollary *) (* On [sigma] *) Let projs {P:A -> Type@{i}} (exP:sigma P) (def:P x) : P x := match exP with | sigmaI x' prf => match eq_dec x' x with | inl eqprf => paths_ind x' (fun x _ => P x) prf x eqprf | _ => def end end. Theorem inj_right_sigma {P : A -> Type@{i}} {y y':P x} : (x, y) = (x, y') -> y = y'. Proof. intros. cut (projs (x, y) y = projs (sigmaI P x y') y). - unfold projs. case (eq_dec x x). -- intro e. elim e using K_dec. trivial. -- intros. case e; reflexivity. - case X; reflexivity. Defined. Lemma inj_right_sigma_refl (P : A -> Type@{i}) (y : P x) : inj_right_sigma (y:=y) (y':=y) 1 = 1. Proof. unfold inj_right_sigma. intros. unfold paths_ind. unfold projs. destruct (inverse@{i} eq_dec_refl). unfold K_dec. simpl. unfold eq_proofs_unicity. subst projs. simpl. unfold nu_inv, comp, nu. simpl. unfold paths_ind, nu_left_inv, trans_sym_eq, paths_rect, nu_constant. destruct (inverse@{i} eq_dec_refl). reflexivity. Defined. End EqdepDec. #[export] Instance eq_eqdec {A} `{EqDec A} : forall x y : A, EqDec (x = y). Proof. intros. red. intros. exact (inl (eq_proofs_unicity _ _ x0 y0)). Defined. #[export] Instance eqdec_uip {A} (E : EqDec A) : UIP A := fun x y e e' => eq_proofs_unicity _ _ e e'. #[export] Instance eq_uip {A} (E : UIP A) : forall x : A, UIP (x = x). Proof. intros y e e'. intros e''. destruct e''. assert (Us := @UIP_shift A). compute in Us. forward Us. - intros; apply E. - intros. apply inverse. apply Us. Qed. #[export] Instance eqdec_hset (A : Type) `(UIP A) : IsHSet A. Proof. constructor. intros x y. constructor. intros p q. eapply Build_Contr with (uip p q). intros e. destruct p. apply uip. Defined. Lemma sigma_eq@{i} (A : Type@{i}) (P : A -> Type@{i}) (x y : sigma P) : x = y -> Σ p : (x.1 = y.1), p # x.2 = y.2. Proof. intros H; destruct H. destruct x as [x px]. simpl. refine (1, 1). Defined. Lemma is_hset {A} `{H : IsHSet A} {x y : A} (p q : x = y) : p = q. Proof. apply HSet.hset_path2. Defined. Theorem inj_sigma_r@{i} {A : Type@{i}} `{H : IsHSet A} {P : A -> Type@{i}} {x} {y y':P x} : sigmaI P x y = sigmaI P x y' -> y = y'. Proof. intros [H' H'']%sigma_eq. cbn in *. pose (i := is_hset H' 1). apply (transport (fun h => transport _ h y = y') i H''). Defined. Definition apd_eq {A} {x y : A} (p : x = y) {z} (q : z = x) : transport (@paths A z) p q = q @ p. Proof. now destruct p, q. Defined. Require Import HoTT.Basics.Trunc. Lemma hprop_hset {A} (h : IsHProp A) : IsHSet A. Proof. apply istrunc_hprop. Defined. (** Proof that equality proofs in 0-truncated types are connected *) Lemma hset_pi {A} `{H : IsHSet A} (x y : A) (p q : x = y) (r : p = q) : is_hset p q = r. Proof. pose (hprop_hset (H x y)). unfold is_hset. apply HSet.hset_path2. Defined. Lemma is_hset_refl {A} `{H : IsHSet A} (x : A) : is_hset (@idpath _ x) 1 = 1%path. Proof. apply hset_pi. Defined. Lemma inj_sigma_r_refl@{i} (A : Type@{i}) (H : IsHSet A) (P : A -> Type@{i}) x (y : P x) : inj_sigma_r (y:=y) (y':=y) 1 = 1%path. Proof. unfold inj_sigma_r. intros. simpl. now rewrite HSet.axiomK_idpath. Defined. Theorem K {A} `{IsHSet A} (x : A) (P : x = x -> Type) : P 1%path -> forall p : x = x, P p. Proof. intros. exact (is_hset 1 p # X). Defined. Coq-Equations-1.3.1-8.20/theories/HoTT/EqDecInstances.v000066400000000000000000000066461463127417400222760ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Set Warnings "-notation-overridden". From Equations Require Import Init. Require Import Equations.HoTT.Logic Equations.HoTT.Classes Equations.HoTT.DepElim Equations.HoTT.Constants Equations.HoTT.Tactics Equations.HoTT.EqDec Equations.HoTT.NoConfusion. From HoTT Require Import Spaces.List.Core. Local Open Scope equations_scope. Import Sigma_Notations. Set Universe Polymorphism. (** Tactic to solve EqDec goals, destructing recursive calls for the recursive structure of the type and calling instances of eq_dec on other types. *) Ltac eqdec_loop t u := (left; reflexivity) || (solve [right; simplify *]) || (let x := match t with | context C [ _ ?x ] => constr:(x) end in let y := match u with | context C [ _ ?y ] => constr:(y) end in let contrad := let Hn := fresh in intro Hn; right; intros He; apply Hn; revert He; simplify *; reflexivity in let good := let Heq := fresh in intros Heq; destruct Heq; let t' := match t with | context C [ ?x _ ] => constr:(x) end in let u' := match u with | context C [ ?y _ ] => constr:(y) end in (* idtac "there" t' u'; *) try (eqdec_loop t' u') in (* idtac "here" x y; *) match goal with | [ H : forall z, sum (_ = z) _ |- _ ] => case (H y); [good|contrad] | _ => case (eq_dec x y); [good|contrad] end) || idtac. Ltac eqdec_proof := try red; intros; match goal with | |- sum (?x = ?y) _ => revert y; do_ind x; intros until y; depelim y; match goal with |- sum (?x' = ?y') _ => eqdec_loop x' y' end end. Ltac Equations.Init.solve_eqdec ::= eqdec_proof. (** Standard instances. *) #[export] Instance unit_eqdec : EqDec Unit. Proof. eqdec_proof. Defined. #[export] Instance bool_eqdec : EqDec Bool.Bool. Proof. eqdec_proof. Defined. #[export] Instance nat_eqdec : EqDec nat. Proof. eqdec_proof. Defined. #[export] Instance prod_eqdec {A B} `(EqDec A) `(EqDec B) : EqDec (prod A B). Proof. eqdec_proof. Defined. #[export] Instance sum_eqdec {A B} `(EqDec A) `(EqDec B) : EqDec (A + B). Proof. eqdec_proof. Defined. #[export] Instance list_eqdec {A} `(EqDec A) : EqDec (list A). Proof. eqdec_proof. Defined. (** Any signature made up entirely of decidable types is decidable. *) Polymorphic Definition eqdec_sigma_Id@{i} {A : Type@{i}} {B : A -> Type@{i}} `(EqDec A) `(forall a, EqDec (B a)) : EqDec@{i} (sigma B). Proof. Set Printing Universes. intros. intros [xa xb] [ya yb]. case (eq_dec xa ya). - intros Hxya. destruct Hxya. case (eq_dec xb yb). + intros He; destruct He. left. reflexivity. + intros. right. apply simplification_sigma2_uip@{i i}. apply e. - intros. right. refine (simplification_sigma1_dep@{i i} _ _ _ _ _). intros He _; revert He. apply e. Defined. #[export] Existing Instance eqdec_sigma_Id. Coq-Equations-1.3.1-8.20/theories/HoTT/FunctionalInduction.v000066400000000000000000000163461463127417400234220ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Set Warnings "-notation-overridden". Require Import Equations.CoreTactics. Require Import Equations.HoTT.Logic Equations.HoTT.Classes Equations.HoTT.EqDec Equations.HoTT.DepElim. Require Import HoTT.Spaces.Nat. Local Open Scope nat_scope. Local Open Scope equations_scope. Import Sigma_Notations. (** The tactic [funind c Hc] applies functional induction on the application [c] which must be of the form [f args] where [f] has a [FunctionalInduction] instance. [Hc] is the name given to the call, used to generate hypothesis names. *) Ltac funind c Hcall := match c with context C [ ?f ] => let x := constr:(fun_ind_prf (f:=f)) in (let prf := eval simpl in x in let p := context C [ prf ] in let prf := fresh in let call := fresh in assert(prf:=p) ; (* Abstract the call *) set(call:=c) in *; generalize (@idpath _ call : call = c); clearbody call ; intro Hcall ; (* Now do dependent elimination and simplifications *) dependent induction prf ; simplify_IH_hyps ; (* Use the simplifiers for the constant to get a nicer goal. *) try simpc f in * ; try on_last_hyp ltac:(fun id => simpc f in id ; noconf id)) || fail 1 "Internal error in funind" end || fail "Maybe you didn't declare the functional induction principle for" c. Ltac funind_call f H := on_call f ltac:(fun call => funind call H). Ltac make_refine n c := match constr:(n) with | 0 => uconstr:(c) | S ?n => make_refine n uconstr:(c _) end. Ltac constr_head c := let rec aux c := match c with | ?f _ => aux f | ?f => f end in aux c. Ltac with_last_secvar_aux tac := match goal with [ H : _ |- _ ] => is_secvar H; tac H end. Ltac with_last_secvar tac orelse := with_last_secvar_aux tac + (* No section variables *) orelse. Ltac get_elim c := match c with | context [?f] => constr:(fun_elim (f:=f)) end. Ltac clear_non_secvar := repeat match goal with | [ H : _ |- _ ] => tryif is_secvar H then fail else clear H end. Ltac remember_let H := lazymatch goal with | [ H := ?body : ?type |- _ ] => generalize (1%path : H = body) end. Ltac unfold_packcall packcall := lazymatch goal with |- ?R ?x ?y -> ?P => let y' := eval unfold packcall in y in change (R x y' -> P) end. Ltac simplify_IHs_call := repeat match goal with | [ hyp : context [ block ] |- _ ] => cbn beta in hyp; eqns_specialize_eqs_block hyp 2; cbn beta iota delta[transport paths_rect paths_rec paths_ind] zeta in hyp end. Ltac make_packcall packcall c := match goal with | [ packcall : ?type |- _ ] => change (let _ := c in type) in (type of packcall) end. Ltac funelim_sig_tac c Heq simp_IHs tac := let elimc := get_elim c in let packcall := fresh "packcall" in let packcall_fn := fresh "packcall_fn" in let elimfn := match elimc with fun_elim (f:=?f) => constr:(f) end in let elimn := match elimc with fun_elim (n:=?n) => constr:(n) end in block_goal; uncurry_call elimfn c packcall packcall_fn; remember_let packcall_fn; unfold_packcall packcall; (refine (simplification_sigma1 _ _ _ _ _) || refine (simplification_sigma1_nondep_dep _ _ _ _ _) || refine (simplification_sigma1_dep _ _ _ _ _)); let H := fresh "eqargs" in let Heqfresh := fresh "__Heq__" in intros H Heqfresh; revert Heqfresh; block_goal; revert H; subst packcall_fn; clearbody packcall; make_packcall packcall elimfn; with_last_secvar ltac:(fun eos => move packcall before eos) ltac:(move packcall at top); revert_until packcall; block_goal; cbv zeta in packcall; revert packcall; curry; let elimt := make_refine elimn elimc in unshelve refine_ho elimt; intros; cbv beta; simplify_dep_elim; intros_until_block; simplify_dep_elim; cbn beta iota delta [transport paths_rec paths_rect paths_ind pack_sigma pack_sigma_nondep] in *; simp_IHs; intros _ Heqfresh; unblock_goal; try (rewrite <- Heqfresh); try (rename Heqfresh into Heq || (let Heqf := fresh Heq in rename Heq into Heqf; rename Heqfresh into Heq)); tac c. Ltac funelim_constr_as c h simp_IHs := funelim_sig_tac c h simp_IHs ltac:(fun _ => idtac). Ltac get_first_elim c := match c with | ?x ?a ?b ?c ?d ?e ?f ?g ?h ?i ?j ?k ?l ?m => get_elim (x a b c d e f g h i j k l m) | ?x ?a ?b ?c ?d ?e ?f ?g ?h ?i ?j ?k ?l => get_elim (x a b c d e f g h i j k l) | ?x ?a ?b ?c ?d ?e ?f ?g ?h ?i ?j ?k => get_elim (x a b c d e f g h i j k) | ?x ?a ?b ?c ?d ?e ?f ?g ?h ?i ?j => get_elim (x a b c d e f g h i j) | ?x ?a ?b ?c ?d ?e ?f ?g ?h ?i => get_elim (x a b c d e f g h i) | ?x ?a ?b ?c ?d ?e ?f ?g ?h => get_elim (x a b c d e f g h) | ?x ?a ?b ?c ?d ?e ?f ?g => get_elim (x a b c d e f g) | ?x ?a ?b ?c ?d ?e ?f => get_elim (x a b c d e f) | ?x ?a ?b ?c ?d ?e => get_elim (x a b c d e) | ?x ?a ?b ?c ?d => get_elim (x a b c d) | ?x ?a ?b ?c => get_elim (x a b c) | ?x ?a ?b => get_elim (x a b) | ?x ?a => get_elim (x a) end. (** An alternative tactic that does not generalize over the arguments. BEWARE: It might render the goal unprovable. *) Ltac apply_funelim c := let elimc := get_first_elim c in let elimfn := match elimc with fun_elim (f:=?f) => constr:(f) end in let elimn := match elimc with fun_elim (n:=?n) => constr:(n) end in let elimt := make_refine elimn elimc in apply_args c elimt ltac:(fun elimc => unshelve refine_ho elimc; cbv beta). (** A special purpose database used to prove the elimination principle. *) Create HintDb funelim. (** Solve reflexivity goals. *) #[export] Hint Extern 0 (_ = _) => constructor : funelim. (** Specialize hypotheses begining with equalities. *) Ltac specialize_hyps := match goal with | [ H : forall _ : paths ?x ?x, _ |- _ ] => specialize (H 1%path); unfold paths_rect, paths_rec, transport, paths_ind in H ; simpl in H end. #[export] Hint Extern 100 => specialize_hyps : funelim. (** Destruct conjunctions everywhere, starting with the hypotheses. This tactic allows to close functional induction proofs involving multiple nested and/or mutual recursive definitions. *) Lemma uncurry_prod (A B C : Type) : (A * B -> C) -> (A -> B -> C). Proof. intros H a b. exact (H (pair a b)). Defined. Ltac specialize_mutual_nested := match goal with | [ H : _ * _ |- _ ] => destruct H | [ |- _ * _ ] => split end. #[export] Hint Extern 50 => specialize_mutual_nested : funelim. Ltac specialize_mutual := match goal with [ H : _ * _ |- _ ] => destruct H (* Fragile, might render later goals unprovable *) | [ H : ?X -> _, H' : ?X |- _ ] => match X with | forall (_ : _), _ => specialize (H H') end | [ H : (?A * ?B) -> ?C |- _ ] => apply (uncurry_prod A B C) in H end. Ltac Equations.Init.specialize_mutfix ::= repeat specialize_mutual. Coq-Equations-1.3.1-8.20/theories/HoTT/Loader.v000066400000000000000000000026421463127417400206430ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (** The set of libraries required to run Equations with all features. *) (** This exports tactics *) Set Warnings "-notation-overridden". From Equations Require Export Init Signature. Require Import Equations.CoreTactics. Require Export Equations.HoTT.Logic Equations.HoTT.Classes. Require Import Equations.HoTT.WellFounded. Require Import Equations.HoTT.DepElim Equations.HoTT.EqDec Equations.HoTT.Constants. Require Export Equations.HoTT.EqDecInstances. Require Export Equations.HoTT.NoConfusion. Require Import Equations.HoTT.Subterm. Require Export Equations.HoTT.Tactics. Require Export Equations.HoTT.FunctionalInduction. (* funelim tactic *) Global Obligation Tactic := Equations.CoreTactics.equations_simpl. (** Tactic to solve well-founded proof obligations by default *) Ltac solve_rec := simpl in * ; cbv zeta ; intros ; try typeclasses eauto with subterm_relation simp rec_decision. Export EquationsNotations. Coq-Equations-1.3.1-8.20/theories/HoTT/Logic.v000066400000000000000000000036641463127417400204770ustar00rootroot00000000000000From Equations Require Import Init. Require Export HoTT.Basics.Overture. Require Export HoTT.Basics.Tactics. Set Warnings "-notation-overridden". Set Universe Polymorphism. (** Fixes to the HoTT library *) Register idpath as core.identity.refl. Register paths_rect as core.identity.ind. Register paths as core.identity.type. Register inverse as core.identity.sym. (** This allows [rewrite] to both in left-to-right and right-to left directions. *) Definition paths_rect_r (A : Type) (x : A) (P : A -> Type) (p : P x) (y : A) (e : paths y x) : P y := paths_rect A x (fun y e => P y) p y (inverse e). Register concat as core.identity.trans. Register ap as core.identity.congr. (** [path_inspect x] allows to pattern-match x while retaining a propositional equality with [x] *) Definition path_inspect {A : Type} (x : A) : { y | paths x y } := (x ; idpath). Require Import HoTT.Types.Bool. (* For compatibility with Coq's [induction] *) Definition Bool_rect := Bool_ind. (** /End of fixes to the HoTT library *) (** The polymorphic equality type used by Equations when working with equality in Type. *) Definition transport_r {A} (P : A -> Type) {x y : A} (e : y = x) : P x -> P y := fun x => match (inverse e) with 1%path => x end. Lemma paths_rect_dep_r {A} (x : A) (P : forall a, a = x -> Type) (p : P x 1%path) (y : A) (e : y = x) : P y e. Proof. destruct e. apply p. Defined. Module Sigma_Notations. Notation "'Σ' x .. y , P" := (sigma (fun x => .. (sigma (fun y => P)) ..)) (at level 200, x binder, y binder, right associativity, format "'[ ' '[ ' Σ x .. y ']' , '/' P ']'") : type_scope. Notation "( x , .. , y , z )" := (@sigmaI _ _ x .. (@sigmaI _ _ y z) ..) (right associativity, at level 0, format "( x , .. , y , z )") : equations_scope. Notation "x .1" := (Equations.Init.pr1 x) : equations_scope. Notation "x .2" := (Equations.Init.pr2 x) : equations_scope. End Sigma_Notations. Import Sigma_Notations. Coq-Equations-1.3.1-8.20/theories/HoTT/NoConfusion.v000066400000000000000000000025571463127417400217020ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (** Instances of [NoConfusion] for the standard datatypes. To be used by [equations] when it needs applications of injectivity or discrimination on some equation. *) Set Warnings "-notation-overridden". From Equations Require Import Init Signature. Require Import Equations.CoreTactics. Require Import Equations.HoTT.Logic Equations.HoTT.Classes Equations.HoTT.EqDec Equations.HoTT.Constants. Require Import Equations.HoTT.DepElim Equations.HoTT.Tactics. Require Import HoTT.Spaces.List.Core. (** Parameterized inductive types just need NoConfusion. *) Local Set Universe Minimization ToSet. Derive NoConfusion for Unit Bool.Bool nat option sum prod list. #[export] Instance Bool_depelim : DependentEliminationPackage Bool.Bool := { elim := @Bool.Bool_ind }. (* FIXME should be done by the derive command *) Extraction Inline noConfusion NoConfusionPackage_nat. Coq-Equations-1.3.1-8.20/theories/HoTT/Relation.v000066400000000000000000000156671463127417400212250ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * INRIA, CNRS and contributors - Copyright 1999-2018 *) (* Type := | t_step (y:A) : R x y -> trans_clos x y | t_trans (y z:A) : trans_clos x y -> trans_clos y z -> trans_clos x z. (** Alternative definition by transitive extension on the left *) Inductive trans_clos_1n (x: A) : A -> Type := | t1n_step (y:A) : R x y -> trans_clos_1n x y | t1n_trans (y z:A) : R x y -> trans_clos_1n y z -> trans_clos_1n x z. (** Alternative definition by transitive extension on the right *) Inductive trans_clos_n1 (x: A) : A -> Type := | tn1_step (y:A) : R x y -> trans_clos_n1 x y | tn1_trans (y z:A) : R y z -> trans_clos_n1 x y -> trans_clos_n1 x z. End Transitive_Closure. (** ** Reflexive closure *) Section Reflexive_Closure. Context {A : Type} (R : Relation A). (** Definition by direct transitive closure *) Inductive clos_refl (x: A) : A -> Type := | r_step (y:A) : R x y -> clos_refl x y | r_refl : clos_refl x x. End Reflexive_Closure. (** ** Reflexive-transitive closure *) Section Reflexive_Transitive_Closure. Context {A : Type} (R : Relation A). (** Definition by direct reflexive-transitive closure *) Inductive clos_refl_trans (x:A) : A -> Type := | rt_step (y:A) : R x y -> clos_refl_trans x y | rt_refl : clos_refl_trans x x | rt_trans (y z:A) : clos_refl_trans x y -> clos_refl_trans y z -> clos_refl_trans x z. (** Alternative definition by transitive extension on the left *) Inductive clos_refl_trans_1n (x: A) : A -> Type := | rt1n_refl : clos_refl_trans_1n x x | rt1n_trans (y z:A) : R x y -> clos_refl_trans_1n y z -> clos_refl_trans_1n x z. (** Alternative definition by transitive extension on the right *) Inductive clos_refl_trans_n1 (x: A) : A -> Type := | rtn1_refl : clos_refl_trans_n1 x x | rtn1_trans (y z:A) : R y z -> clos_refl_trans_n1 x y -> clos_refl_trans_n1 x z. End Reflexive_Transitive_Closure. (** ** Reflexive-symmetric-transitive closure *) Section Reflexive_Symmetric_Transitive_Closure. Context {A : Type} (R : Relation A). (** Definition by direct reflexive-symmetric-transitive closure *) Inductive clos_refl_sym_trans : Relation A := | rst_step (x y:A) : R x y -> clos_refl_sym_trans x y | rst_refl (x:A) : clos_refl_sym_trans x x | rst_sym (x y:A) : clos_refl_sym_trans x y -> clos_refl_sym_trans y x | rst_trans (x y z:A) : clos_refl_sym_trans x y -> clos_refl_sym_trans y z -> clos_refl_sym_trans x z. (** Alternative definition by symmetric-transitive extension on the left *) Inductive clos_refl_sym_trans_1n (x: A) : A -> Type := | rst1n_refl : clos_refl_sym_trans_1n x x | rst1n_trans (y z:A) : R x y + R y x -> clos_refl_sym_trans_1n y z -> clos_refl_sym_trans_1n x z. (** Alternative definition by symmetric-transitive extension on the right *) Inductive clos_refl_sym_trans_n1 (x: A) : A -> Type := | rstn1_refl : clos_refl_sym_trans_n1 x x | rstn1_trans (y z:A) : R y z + R z y -> clos_refl_sym_trans_n1 x y -> clos_refl_sym_trans_n1 x z. End Reflexive_Symmetric_Transitive_Closure. (** ** Converse of a Relation *) Section Converse. Context {A : Type} (R : Relation A). Definition transp (x y:A) := R y x. End Converse. (** ** Union of Relations *) Section Union. Context {A : Type} (R1 R2 : Relation A). Definition union (x y:A) := (R1 x y + R2 x y)%type. End Union. (** ** Disjoint union of Relations *) Section Disjoint_Union. Context {A B : Type}. Variable leA : A -> A -> Type. Variable leB : B -> B -> Type. Inductive le_AsB : A + B -> A + B -> Type := | le_aa (x y:A) : leA x y -> le_AsB (inl x) (inl y) | le_ab (x:A) (y:B) : le_AsB (inl x) (inr y) | le_bb (x y:B) : leB x y -> le_AsB (inr x) (inr y). End Disjoint_Union. (** ** Lexicographic order on dependent pairs *) Section Lexicographic_Product. Context {A : Type} {B : A -> Type}. Variable leA : A -> A -> Type. Variable leB : forall x:A, B x -> B x -> Type. Inductive lexprod : sigma B -> sigma B -> Type := | left_lex : forall (x x':A) (y:B x) (y':B x'), leA x x' -> lexprod (x, y) (x', y') | right_lex : forall (x:A) (y y':B x), leB x y y' -> lexprod (x, y) (x, y'). End Lexicographic_Product. (** ** Product of Relations *) Section Symmetric_Product. Variable A : Type. Variable B : Type. Variable leA : A -> A -> Type. Variable leB : B -> B -> Type. Inductive symprod : A * B -> A * B -> Type := | left_sym : forall x x':A, leA x x' -> forall y:B, symprod (pair x y) (pair x' y) | right_sym : forall y y':B, leB y y' -> forall x:A, symprod (pair x y) (pair x y'). End Symmetric_Product. (** ** Multiset of two Relations *) Section Swap. Variable A : Type. Variable R : A -> A -> Type. Inductive swapprod : A * A -> A * A -> Type := | sp_noswap x y (p:A * A) : symprod A A R R (pair x y) p -> swapprod (pair x y) p | sp_swap x y (p:A * A) : symprod A A R R (pair x y) p -> swapprod (pair y x) p. End Swap. Local Open Scope equations_scope. From HoTT Require Import Spaces.List.Core. Local Open Scope list_scope. Section Lexicographic_Exponentiation. Variable A : Set. Variable leA : A -> A -> Type. Let Nil := nil (A:=A). Let List := list A. Inductive Ltl : List -> List -> Type := | Lt_nil (a:A) (x:List) : Ltl Nil (a :: x) | Lt_hd (a b:A) : leA a b -> forall x y:list A, Ltl (a :: x) (b :: y) | Lt_tl (a:A) (x y:List) : Ltl x y -> Ltl (a :: x) (a :: y). Inductive Desc : List -> Type := | d_nil : Desc Nil | d_one (x:A) : Desc (x :: Nil) | d_conc (x y:A) (l:List) : clos_refl leA x y -> Desc (l ++ y :: Nil) -> Desc ((l ++ y :: Nil) ++ x :: Nil). Definition Pow : Type := sigma Desc. Definition lex_exp (a b:Pow) : Type := Ltl a.1 b.1. End Lexicographic_Exponentiation. #[export] Hint Unfold transp union: Relations. #[export] Hint Resolve t_step rt_step rt_refl rst_step rst_refl: Relations. #[export] Hint Immediate rst_sym: Relations. Coq-Equations-1.3.1-8.20/theories/HoTT/Relation_Properties.v000066400000000000000000000275071463127417400234350ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * INRIA, CNRS and contributors - Copyright 1999-2018 *) (* S x y. Class Equivalence {A} (R : Relation A) := { Equivalence_Reflexive : Reflexive R | 2 ; Equivalence_Symmetric : Symmetric R | 2 ; Equivalence_Transitive : Transitive R | 2 }. Notation inclusion R R' := (subrelation R R'). #[export] Hint Constructors sum : Relations. Section Properties. Context{A : Type}. Variable R : Relation A. Section Clos_Refl_Trans. Local Notation "R *" := (clos_refl_trans R) (at level 8, no associativity, format "R *"). (** Correctness of the reflexive-transitive closure operator *) Lemma clos_rt_is_preorder : PreOrder R*. Proof. constructor. - exact (rt_refl R). - exact (rt_trans R). Defined. (** Idempotency of the reflexive-transitive closure operator *) Lemma clos_rt_idempotent : subrelation (R*)* R*. Proof. red. induction 1; auto with Relations. intros. apply rt_trans with y; auto with Relations. Defined. End Clos_Refl_Trans. Section Clos_Refl_Sym_Trans. (** Reflexive-transitive closure is included in the reflexive-symmetric-transitive closure *) Lemma clos_rt_clos_rst : subrelation (clos_refl_trans R) (clos_refl_sym_trans R). Proof. red. induction 1; auto with Relations. apply rst_trans with y; auto with Relations. Defined. (** Reflexive closure is included in the reflexive-transitive closure *) Lemma clos_r_clos_rt : inclusion (clos_refl R) (clos_refl_trans R). Proof. induction 1 as [? ?| ]. - constructor; auto. - constructor 2. Defined. Lemma clos_rt_t : forall x y z, clos_refl_trans R x y -> trans_clos R y z -> trans_clos R x z. Proof. induction 1 as [b d H1|b|a b d H1 H2 IH1 IH2]; auto. intro H. apply t_trans with (y:=d); auto. constructor. auto. Defined. (** Correctness of the reflexive-symmetric-transitive closure *) Lemma clos_rst_is_equiv : Equivalence (clos_refl_sym_trans R). Proof. constructor. - exact (rst_refl R). - exact (rst_sym R). - exact (rst_trans R). Defined. (** Idempotency of the reflexive-symmetric-transitive closure operator *) Lemma clos_rst_idempotent : inclusion (clos_refl_sym_trans (clos_refl_sym_trans R)) (clos_refl_sym_trans R). Proof. red. induction 1; auto with Relations. apply rst_trans with y; auto with Relations. Defined. End Clos_Refl_Sym_Trans. Section Equivalences. (** *** Equivalences between the different definition of the reflexive, symmetric, transitive closures *) (** *** Contributed by P. Castéran *) (** Direct transitive closure vs left-step extension *) Lemma clos_t1n_trans : forall x y, trans_clos_1n R x y -> trans_clos R x y. Proof. induction 1. - left; assumption. - right with y; auto. left; auto. Defined. Lemma trans_clos_t1n : forall x y, trans_clos R x y -> trans_clos_1n R x y. Proof. induction 1. - left; assumption. - generalize IHX2; clear IHX2; induction IHX1. -- right with y; auto. -- right with y; auto. eapply IHIHX1; auto. apply clos_t1n_trans; auto. Defined. Lemma trans_clos_t1n_iff : forall x y, trans_clos R x y <-> trans_clos_1n R x y. Proof. intros x y. split. - apply (trans_clos_t1n x y). - apply (clos_t1n_trans x y). Defined. (** Direct transitive closure vs right-step extension *) Lemma clos_tn1_trans : forall x y, trans_clos_n1 R x y -> trans_clos R x y. Proof. induction 1. - left; assumption. - right with y; auto. left; assumption. Defined. Lemma trans_clos_tn1 : forall x y, trans_clos R x y -> trans_clos_n1 R x y. Proof. induction 1. - left; assumption. - elim IHX2. -- intro y0; right with y; auto. -- intros. right with y0; auto. Defined. Lemma trans_clos_tn1_iff : forall x y, trans_clos R x y <-> trans_clos_n1 R x y. Proof. split. - apply trans_clos_tn1. - apply clos_tn1_trans. Defined. (** Direct reflexive-transitive closure is equivalent to transitivity by left-step extension *) Lemma clos_rt1n_step : forall x y, R x y -> clos_refl_trans_1n R x y. Proof. intros x y H. right with y;[assumption|left]. Defined. Lemma clos_rtn1_step : forall x y, R x y -> clos_refl_trans_n1 R x y. Proof. intros x y H. right with x;[assumption|left]. Defined. Lemma clos_rt1n_rt : forall x y, clos_refl_trans_1n R x y -> clos_refl_trans R x y. Proof. induction 1. - constructor 2. - constructor 3 with y; auto. constructor 1; auto. Defined. Lemma clos_rt_rt1n : forall x y, clos_refl_trans R x y -> clos_refl_trans_1n R x y. Proof. induction 1. - apply clos_rt1n_step; assumption. - left. - generalize IHX2; clear IHX2; induction IHX1; auto. right with y; auto. eapply IHIHX1; auto. apply clos_rt1n_rt; auto. Defined. Lemma clos_rt_rt1n_iff : forall x y, clos_refl_trans R x y <-> clos_refl_trans_1n R x y. Proof. split. - apply clos_rt_rt1n. - apply clos_rt1n_rt. Defined. (** Direct reflexive-transitive closure is equivalent to transitivity by right-step extension *) Lemma clos_rtn1_rt : forall x y, clos_refl_trans_n1 R x y -> clos_refl_trans R x y. Proof. induction 1. - constructor 2. - constructor 3 with y; auto. constructor 1; assumption. Defined. Lemma clos_rt_rtn1 : forall x y, clos_refl_trans R x y -> clos_refl_trans_n1 R x y. Proof. induction 1. - apply clos_rtn1_step; auto. - left. - elim IHX2; auto. intros. right with y0; auto. Defined. Lemma clos_rt_rtn1_iff : forall x y, clos_refl_trans R x y <-> clos_refl_trans_n1 R x y. Proof. split. - apply clos_rt_rtn1. - apply clos_rtn1_rt. Defined. (** Induction on the left transitive step *) Lemma clos_refl_trans_ind_left : forall (x:A) (P:A -> Type), P x -> (forall y z:A, clos_refl_trans R x y -> P y -> R y z -> P z) -> forall z:A, clos_refl_trans R x z -> P z. Proof. intros. revert X X0. induction X1; intros; auto with Relations. { apply X0 with x; auto with Relations. } apply IHX1_2. { apply IHX1_1; auto with Relations. } intros. apply X0 with y0; auto with Relations. apply rt_trans with y; auto with Relations. Defined. (** Induction on the right transitive step *) Lemma rt1n_ind_right : forall (P : A -> Type) (z:A), P z -> (forall x y, R x y -> clos_refl_trans_1n R y z -> P y -> P x) -> forall x, clos_refl_trans_1n R x z -> P x. induction 3; auto. apply X0 with y; auto. Defined. Lemma clos_refl_trans_ind_right : forall (P : A -> Type) (z:A), P z -> (forall x y, R x y -> P y -> clos_refl_trans R y z -> P x) -> forall x, clos_refl_trans R x z -> P x. intros P z Hz IH x Hxz. apply clos_rt_rt1n_iff in Hxz. elim Hxz using rt1n_ind_right; auto. clear x Hxz. intros x y Hxy Hyz Hy. apply clos_rt_rt1n_iff in Hyz. eauto. Defined. (** Direct reflexive-symmetric-transitive closure is equivalent to transitivity by symmetric left-step extension *) Lemma clos_rst1n_rst : forall x y, clos_refl_sym_trans_1n R x y -> clos_refl_sym_trans R x y. Proof. induction 1. - constructor 2. - constructor 4 with y; auto. case s; [constructor 1 | constructor 3; constructor 1]; auto. Defined. Lemma clos_rst1n_trans : forall x y z, clos_refl_sym_trans_1n R x y -> clos_refl_sym_trans_1n R y z -> clos_refl_sym_trans_1n R x z. induction 1. - auto. - intros; right with y; eauto. Defined. Lemma clos_rst1n_sym : forall x y, clos_refl_sym_trans_1n R x y -> clos_refl_sym_trans_1n R y x. Proof. intros x y H; elim H. - constructor 1. - intros x0 y0 z D H0 H1; apply clos_rst1n_trans with y0; auto. right with x0. + destruct D; [right|left]; auto. + left. Defined. Lemma clos_rst_rst1n : forall x y, clos_refl_sym_trans R x y -> clos_refl_sym_trans_1n R x y. induction 1. - constructor 2 with y; auto with Relations; constructor 1; assumption. - constructor 1. - apply clos_rst1n_sym; auto. - eapply clos_rst1n_trans; eauto. Defined. Lemma clos_rst_rst1n_iff : forall x y, clos_refl_sym_trans R x y <-> clos_refl_sym_trans_1n R x y. Proof. split. - apply clos_rst_rst1n. - apply clos_rst1n_rst. Defined. (** Direct reflexive-symmetric-transitive closure is equivalent to transitivity by symmetric right-step extension *) Lemma clos_rstn1_rst : forall x y, clos_refl_sym_trans_n1 R x y -> clos_refl_sym_trans R x y. Proof. induction 1. - constructor 2. - constructor 4 with y; auto. case s; [constructor 1 | constructor 3; constructor 1]; auto. Defined. Lemma clos_rstn1_trans : forall x y z, clos_refl_sym_trans_n1 R x y -> clos_refl_sym_trans_n1 R y z -> clos_refl_sym_trans_n1 R x z. Proof. intros x y z H1 H2. induction H2. - auto. - intros. right with y0; eauto. Defined. Lemma clos_rstn1_sym : forall x y, clos_refl_sym_trans_n1 R x y -> clos_refl_sym_trans_n1 R y x. Proof. intros x y H; elim H. - constructor 1. - intros y0 z D H0 H1. apply clos_rstn1_trans with y0; auto. right with z. + destruct D; auto with Relations. + left. Defined. Lemma clos_rst_rstn1 : forall x y, clos_refl_sym_trans R x y -> clos_refl_sym_trans_n1 R x y. Proof. induction 1. - constructor 2 with x; auto with Relations. constructor 1. - constructor 1. - apply clos_rstn1_sym; auto. - eapply clos_rstn1_trans; eauto. Defined. Lemma clos_rst_rstn1_iff : forall x y, clos_refl_sym_trans R x y <-> clos_refl_sym_trans_n1 R x y. Proof. split. - apply clos_rst_rstn1. - apply clos_rstn1_rst. Defined. End Equivalences. Lemma trans_clos_transp_permute : forall x y, transp (trans_clos R) x y <-> trans_clos (transp R) x y. Proof. split; induction 1; (apply t_step; assumption) || eapply t_trans; eassumption. Defined. End Properties. Coq-Equations-1.3.1-8.20/theories/HoTT/Subterm.v000066400000000000000000000234121463127417400210540ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Set Warnings "-notation-overridden". From Equations Require Import Init Signature. Require Import Equations.CoreTactics. Require Import Equations.HoTT.Logic Equations.HoTT.Classes Equations.HoTT.EqDec Equations.HoTT.Relation Equations.HoTT.WellFounded Equations.HoTT.DepElim Equations.HoTT.Constants. From HoTT Require Import Basics.Tactics Spaces.Nat. Set Universe Polymorphism. Generalizable Variables A R S B. (** The fixpoint combinator associated to a well-founded relation, just reusing the [WellFounded.Fix] combinator. *) Definition FixWf `{WF:WellFounded A R} (P : A -> Type) (step : forall x : A, (forall y : A, R y x -> P y) -> P x) : forall x : A, P x := Fix wellfounded P step. Definition step_fn_ext {A} {R} (P : A -> Type) := fun step : forall x : A, (forall y : A, R y x -> P y) -> P x => forall x (f g : forall y (H : R y x), P y), (forall y H, f y H = g y H) -> step x f = step x g. Lemma FixWf_unfold `{WF : WellFounded A R} (P : A -> Type) (step : forall x : A, (forall y : A, R y x -> P y) -> P x) (step_ext : step_fn_ext P step) (x : A) : FixWf P step x = step x (fun y _ => FixWf P step y). Proof. intros. unfold FixWf. rewrite WellFounded.Fix_eq. - apply step_ext. intros. reflexivity. - intros x' f g H. apply step_ext. apply H. Defined. Lemma FixWf_unfold_step : forall (A : Type) (R : Relation A) (WF : WellFounded R) (P : A -> Type) (step : forall x : A, (forall y : A, R y x -> P y) -> P x) (x : A) (step_ext : step_fn_ext P step) (step' : forall y : A, R y x -> P y), step' = (fun (y : A) (_ : R y x) => FixWf P step y) -> FixWf P step x = step x step'. Proof. intros. rewrite FixWf_unfold, X. - reflexivity. - apply step_ext. Defined. Ltac unfold_FixWf := match goal with |- context [ @FixWf ?A ?R ?WF ?P ?f ?x ] => let step := fresh in set(step := fun y (_ : R y x) => @FixWf A R WF P f y) in *; unshelve erewrite (@FixWf_unfold_step A R WF P f x _ step idpath); [red; intros; simp_sigmas; red_one_eq (* Extensionality proof *) |hidebody step; try red_eq_lhs (* Unfold the functional *)] end. Ltac unfold_recursor := unfold_FixWf. Lemma FixWf_unfold_ext `{Funext} `{WF : WellFounded A R} (P : A -> Type) (step : forall x : A, (forall y : A, R y x -> P y) -> P x) (x : A) : FixWf P step x = step x (fun y _ => FixWf P step y). Proof. intros. unfold FixWf, Fix. destruct wellfounded. simpl. apply ap. funext y h. apply ap. apply Acc_prop. Defined. #[export] Hint Rewrite @FixWf_unfold_ext : Recursors. Lemma FixWf_unfold_ext_step `{Funext} : forall (A : Type) (R : Relation A) (WF : WellFounded R) (P : A -> Type) (step : forall x : A, (forall y : A, R y x -> P y) -> P x) (x : A) (step' : forall y : A, R y x -> P y), step' = (fun (y : A) (_ : R y x) => FixWf P step y) -> FixWf P step x = step x step'. Proof. intros. rewrite FixWf_unfold_ext, X. reflexivity. Defined. #[export] Hint Rewrite @FixWf_unfold_ext_step : Recursors. Ltac unfold_FixWf_ext := match goal with |- context [ @FixWf ?A ?R ?WF ?P ?f ?x ] => let step := fresh in set(step := fun y (_ : R y x) => @FixWf A R WF P f y) in *; eapply concat; [exact (@FixWf_unfold_ext_step (_ : Funext) A R WF P f x step idpath) | hidebody step; try red_eq_lhs (* Unfold the functional *)] end. Ltac unfold_recursor_ext := unfold_FixWf_ext. #[export] Hint Rewrite @FixWf_unfold : Recursors. (** Inline so that we get back a term using general recursion. *) Extraction Inline FixWf Fix Fix_F. (** This hint database contains the constructors of every derived subterm relation. It is used to automatically find proofs that a term is a subterm of another. *) Create HintDb subterm_relation discriminated. Create HintDb rec_decision discriminated. (** This is used to simplify the proof-search for recursive call obligations. *) Ltac simpl_let := match goal with [ H : let _ := ?t in _ |- _ ] => match t with | fixproto => fail 1 | _ => cbv zeta in H end end. #[export] Hint Extern 40 => progress (cbv beta in * || simpl_let) : rec_decision. (* This expands lets in the context to simplify proof search for recursive call obligations, as [eauto] does not do matching up-to unfolding of let-bound variables. *) #[export] Hint Extern 10 => match goal with [ x := _ |- _ ] => lazymatch goal with |- context [ x ] => subst x end end : rec_decision. (** We can automatically use the well-foundedness of a relation to get the well-foundedness of its transitive closure. Note that this definition is transparent as well as [wf_clos_trans], to allow computations with functions defined by well-founded recursion. *) Lemma WellFounded_trans_clos `(WF : WellFounded A R) : WellFounded (trans_clos R). Proof. apply wf_trans_clos. apply WF. Defined. #[export] Hint Extern 4 (WellFounded (trans_clos _)) => apply @WellFounded_trans_clos : typeclass_instances. #[export] Instance wf_inverse_image {A R} `(WellFounded A R) {B} (f : B -> A) : WellFounded (inverse_image R f). Proof. red. apply wf_inverse_image. apply H. Defined. (* (* Do not apply [wf_MR] agressively, as Coq's unification could "invent" an [f] otherwise *) (* to unify. *) *) (* #[export] Hint Extern 0 (WellFounded (inverse_image _ _)) => apply @wf_inverse_image : typeclass_instances. *) #[export] Hint Extern 0 (inverse_image _ _ _ _) => red : rec_decision. (** We also add hints for transitive closure, not using [t_trans] but forcing to build the proof by successive applications of the inner relation. *) #[export] Hint Resolve t_step : subterm_relation. Lemma trans_clos_stepr A (R : Relation A) (x y z : A) : R y z -> trans_clos R x y -> trans_clos R x z. Proof. intros Hyz Hxy. exact (t_trans _ x y z Hxy (t_step _ _ _ Hyz)). Defined. #[export] Hint Resolve trans_clos_stepr : subterm_relation. (** The default tactic to build proofs of well foundedness of subterm relations. *) Create HintDb solve_subterm discriminated. #[export] Hint Extern 4 (_ = _) => reflexivity : solve_subterm. #[export] Hint Extern 10 => eapply_hyp : solve_subterm. Ltac solve_subterm := intros; apply WellFounded_trans_clos; red; intros; simp_sigmas; on_last_hyp ltac:(fun H => depind H); constructor; intros; simp_sigmas; on_last_hyp ltac:(fun HR => depind HR); simplify_dep_elim; try typeclasses eauto with solve_subterm. (** A tactic to launch a well-founded recursion. *) Ltac rec_wf_fix recname kont := let hyps := fresh in intros hyps; intro; on_last_hyp ltac:(fun x => rename x into recname; unfold inverse_image at 1 in recname) ; destruct_right_sigma hyps; try curry recname; simpl in recname; kont recname. (* Ltac rec_wf_fix x recname fixterm := *) (* apply fixterm ; clear_local ; *) (* intros until 1 ; simp_sigmas ; *) (* on_last_hyp ltac:(fun x => rename x into recname) ; *) (* simplify_dep_elim ; intros ; unblock_goal ; intros ; *) (* move recname at bottom ; try curry recname ; simpl in recname. *) (** The [do] tactic but using a Coq-side nat. *) Local Open Scope nat_scope. Ltac do_nat n tac := match n with | 0 => idtac | S ?n' => tac ; do_nat n' tac end. (** Generalize an object [x], packing it in a sigma type if necessary. *) Ltac sigma_pack n t := let packhyps := fresh "hypspack" in let xpack := fresh "pack" in let eos' := fresh "eos" in match constr:(n) with | 0%nat => set (eos' := the_end_of_the_section); move eos' at top | _ => do_nat n ltac:(idtac; revert_last); set (eos' := the_end_of_the_section); do_nat n intro end; uncurry_hyps packhyps; (progress (set(xpack := t) in |- ; cbv beta iota zeta in xpack; revert xpack; pattern sigma packhyps; clearbody packhyps; revert packhyps; clear_nonsection; clear eos')). (** We specialize the tactic for [x] of type [A], first packing [x] with its indices into a sigma type and finding the declared relation on this type. *) Ltac rec_wf recname t kont := sigma_pack 0 t; match goal with [ |- forall (s : ?T) (s0 := @?b s), @?P s ] => let fn := constr:(fun s : T => b s) in let c := constr:(wellfounded (R:=inverse_image _ fn)) in let wf := constr:(FixWf (WF:=c)) in intros s _; revert s; refine (wf P _); simpl ; rec_wf_fix recname kont end. Ltac rec_wf_eqns recname x := rec_wf recname x ltac:(fun rechyp => add_pattern (hide_pattern rechyp)). Ltac rec_wf_rel_aux recname n t rel kont := sigma_pack n t; match goal with [ |- forall (s : ?T) (s0 := @?b s), @?P s ] => let fn := constr:(fun s : T => b s) in let c := constr:(wellfounded (R:=inverse_image rel fn)) in let wf := constr:(FixWf (WF:=c)) in intros s _; revert s; refine (wf P _); simpl ; rec_wf_fix recname kont end. Ltac rec_wf_rel recname x rel := rec_wf_rel_aux recname 0 x rel ltac:(fun rechyp => idtac). (* NoCycle from well-foundedness. *) Definition NoCycle_WellFounded {A} (R : Relation A) (wfR : WellFounded R) : NoCyclePackage A := {| NoCycle := R; noCycle := well_founded_irreflexive (wfR:=wfR) |}. #[export] Existing Instance NoCycle_WellFounded. #[export] Hint Extern 30 (@NoCycle ?A (NoCycle_WellFounded ?R ?wfr) _ _) => hnf; typeclasses eauto with subterm_relation : typeclass_instances. Coq-Equations-1.3.1-8.20/theories/HoTT/Tactics.v000066400000000000000000000103571463127417400210310ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Set Warnings "-notation-overridden". Require Import Equations.CoreTactics Equations.HoTT.Logic Equations.HoTT.DepElim Equations.HoTT.Subterm Equations.HoTT.EqDec Equations.HoTT.WellFounded Equations.HoTT.FunctionalInduction. Ltac Equations.Init.simpl_equations ::= Equations.HoTT.DepElim.simpl_equations. Ltac Equations.Init.simplify_equalities ::= Equations.HoTT.DepElim.simplify_dep_elim. Ltac Equations.Init.depelim H ::= dependent elimination H; cbn in *. Ltac Equations.Init.depind H ::= Equations.HoTT.DepElim.depind H. Ltac Equations.Init.simp_IHs_tac ::= Equations.HoTT.FunctionalInduction.simplify_IHs_call. Ltac Equations.Init.funelim_constr_as H H' tac ::= Equations.HoTT.FunctionalInduction.funelim_constr_as H H' tac. Ltac Equations.Init.apply_funelim H ::= Equations.HoTT.FunctionalInduction.apply_funelim H. Ltac Equations.Init.noconf H ::= Equations.HoTT.DepElim.noconf H. Create HintDb solve_subterm discriminated. #[export] Hint Extern 4 (_ = _) => reflexivity : solve_subterm. #[export] Hint Extern 10 => eapply_hyp : solve_subterm. Ltac solve_subterm := intros; apply WellFounded.wf_trans_clos; red; intros; simp_sigmas; on_last_hyp ltac:(fun H => depind H); constructor; intros; simp_sigmas; on_last_hyp ltac:(fun HR => Equations.Init.depelim HR); simplify_dep_elim; try typeclasses eauto with solve_subterm. Ltac Equations.Init.solve_subterm ::= solve_subterm. Ltac Equations.Init.unfold_recursor ::= Equations.HoTT.Subterm.unfold_recursor. Ltac Equations.Init.unfold_recursor_ext ::= Equations.HoTT.Subterm.unfold_recursor_ext. Ltac solve_noconf_inv_eq a b := destruct_sigma a; destruct_sigma b; do_case a; intros; depelim b; simpl in * |-; on_last_hyp ltac:(fun id => hnf in id; destruct_tele_eq id || destruct id); solve [constructor]. Ltac solve_noconf_inv := intros; match goal with | |- (?f ?a ?b _) = _ => solve_noconf_inv_eq a b | |- ?R ?a ?b => destruct_sigma a; destruct_sigma b; do_case a ; intros; depelim b; simpl in * |-; on_last_hyp ltac:(fun id => hnf in id; destruct_tele_eq id || destruct id); solve [constructor] end. Ltac solve_noconf_prf := intros; (* Subtitute a = b *) on_last_hyp ltac:(fun id => destruct id) ; (* Destruct the inductive object a *) on_last_hyp ltac:(fun id => destruct_sigma id; do_case id; intros) ; simpl; constructor. Ltac solve_noconf := simpl; intros; match goal with | [ H : _ = _ |- _ ] => try solve_noconf_prf | [ |- _ = _ ] => try solve_noconf_inv end. Ltac solve_noconf_hom_inv_eq a b := destruct_sigma a; destruct_sigma b; do_case a; intros; depelim b; simpl in * |-; on_last_hyp ltac:(fun id => hnf in id; destruct_tele_eq id || depelim id); solve [constructor || simpl_equations; constructor]. Ltac solve_noconf_hom_inv := intros; match goal with | |- (?f ?a ?b _) = _ => solve_noconf_hom_inv_eq a b | |- ?R ?a ?b => destruct_sigma a; destruct_sigma b; do_case a ; intros; depelim b; simpl in * |-; on_last_hyp ltac:(fun id => hnf in id; destruct_tele_eq id || depelim id); solve [constructor || simpl_equations; constructor] end. Ltac solve_noconf_hom_inv_equiv := intros; (* Subtitute a = b *) on_last_hyp ltac:(fun id => do_case id) ; (* Destruct the inductive object a using dependent elimination to handle UIP cases. *) on_last_hyp ltac:(fun id => destruct_sigma id; depelim id) ; simpl; simpl_equations; constructor. Ltac solve_noconf_hom := simpl; intros; match goal with [ H : _ = _ |- _ = _ ] => try solve_noconf_hom_inv_equiv | [ H : _ = _ |- _ ] => try solve_noconf_prf | [ |- _ = _ ] => try solve_noconf_hom_inv end. Ltac Equations.Init.solve_noconf ::= solve_noconf. Ltac Equations.Init.solve_noconf_hom ::= solve_noconf_hom. Coq-Equations-1.3.1-8.20/theories/HoTT/Telescopes.v000066400000000000000000000201631463127417400215410ustar00rootroot00000000000000Set Warnings "-notation-overridden". Require Import Equations.HoTT.Loader. Require Import Equations.HoTT.Logic. Require Import Equations.HoTT.WellFounded. Require Import Equations.HoTT.DepElim. Require Import Equations.HoTT.Tactics. Require Import Equations.HoTT.FunctionalInduction. Require Import Equations.HoTT.Subterm. From HoTT Require Import Basics.Tactics. (** Telescopes: allows treating variable arity fixpoints *) Set Universe Polymorphism. Import Sigma_Notations. Local Open Scope equations_scope. Set Equations Transparent. Cumulative Inductive tele@{i} : Type := | tip (A : Type@{i}) | ext (A : Type@{i}) (B : A -> tele) : tele. Register tele as equations.tele.type. Register tip as equations.tele.tip. Register ext as equations.tele.ext. Section TeleSigma. Universe i. Equations tele_sigma (t : tele@{i}) : Type@{i} := tele_sigma (tip A) := A; tele_sigma (ext A B) := @sigma A (fun x => tele_sigma (B x)). Coercion tele_sigma : tele >-> Sortclass. Inductive tele_val : tele@{i} -> Type@{i+1} := | tip_val {A} (a : A) : tele_val (tip A) | ext_val {A B} (a : A) (b : tele_val (B a)) : tele_val (ext A B). Equations tele_pred : tele -> Type := | tip A := A -> Type; | ext A B := forall x : A, tele_pred (B x). Equations tele_rel : tele -> tele -> Type := | tip A | tip B := A -> B -> Type; | ext A B | ext A' B' := forall (x : A) (y : A'), tele_rel (B x) (B' y); | _ | _ := Empty. Equations tele_rel_app (T U : tele) (P : tele_rel T U) (x : tele_sigma T) (y : tele_sigma U) : Type := tele_rel_app (tip A) (tip A') P a a' := P a a'; tele_rel_app (ext A B) (ext A' B') P (a, b) (a', b') := tele_rel_app (B a) (B' a') (P a a') b b'. Universes j k. Equations tele_fn : tele@{i} -> Type@{j} -> Type@{k} := | tip A | concl := A -> concl; | ext A B | concl := forall x : A, tele_fn (B x) concl. Equations tele_MR (T : tele@{i}) (A : Type@{j}) (f : tele_fn T A) : T -> A := tele_MR (tip A) C f := f; tele_MR (ext A B) C f := fun x => tele_MR (B x.1) C (f x.1) x.2. Equations tele_measure (T : tele@{i}) (A : Type@{j}) (f : tele_fn T A) (R : A -> A -> Type@{k}) : T -> T -> Type@{k} := tele_measure T C f R := fun x y => R (tele_MR T C f x) (tele_MR T C f y). Equations tele_type : tele@{i} -> Type@{k} := | tip A := A -> Type@{j}; | ext A B := forall x : A, tele_type (B x). Equations tele_type_app (T : tele@{i}) (P : tele_type T) (x : tele_sigma T) : Type@{k} := tele_type_app (tip A) P a := P a; tele_type_app (ext A B) P (a, b) := tele_type_app (B a) (P a) b. Equations tele_forall (T : tele@{i}) (P : tele_type T) : Type@{k} := | tip A | P := forall x : A, P x; | ext A B | P := forall x : A, tele_forall (B x) (P x). Equations tele_forall_impl (T : tele@{i}) (P : tele_type T) (Q : tele_type T) : Type := | tip A | P | Q := forall x : A, P x -> Q x; | ext A B | P | Q := forall x : A, tele_forall_impl (B x) (P x) (Q x). Equations tele_forall_app (T : tele@{i}) (P : tele_type T) (f : tele_forall T P) (x : T) : tele_type_app T P x := tele_forall_app (tip A) P f x := f x; tele_forall_app (ext A B) P f x := tele_forall_app (B x.1) (P x.1) (f x.1) x.2. Equations tele_forall_type_app (T : tele@{i}) (P : tele_type T) (fn : forall t, tele_type_app T P t) : tele_forall T P := | (tip A) | P | fn := fn; | ext A B | P | fn := fun a : A => tele_forall_type_app (B a) (P a) (fun b => fn (a, b)). Lemma tele_forall_app_type (T : tele@{i}) (P : tele_type T) (f : forall t, tele_type_app T P t) : forall x, tele_forall_app T P (tele_forall_type_app T P f) x = f x. Proof. induction T; simpl. - reflexivity. - cbn. intros [a b]. simpl. apply X. Defined. Equations tele_forall_uncurry (T : tele@{i}) (P : T -> Type@{j}) : Type@{k} := | tip A | P := forall x : A, P x; | ext A B | P := forall x : A, tele_forall_uncurry (B x) (fun y : tele_sigma (B x) => P (x, y)). Equations tele_rel_pack (T U : tele) (x : tele_rel T U) : tele_sigma T -> tele_sigma U -> Type by struct T := tele_rel_pack (tip A) (tip A') P := P; tele_rel_pack (ext A B) (ext A' B') P := fun x y => tele_rel_pack (B x.1) (B' y.1) (P _ _) x.2 y.2. Equations tele_pred_pack (T : tele) (P : tele_pred T) : tele_sigma T -> Type := tele_pred_pack (tip A) P := P; tele_pred_pack (ext A B) P := fun x => tele_pred_pack (B x.1) (P x.1) x.2. Equations tele_type_unpack (T : tele) (P : tele_sigma T -> Type) : tele_type T := tele_type_unpack (tip A) P := P; tele_type_unpack (ext A B) P := fun x => tele_type_unpack (B x) (fun y => P (x, y)). Equations tele_pred_fn_pack (T U : tele) (P : tele_fn T (tele_pred U)) : tele_sigma T -> tele_sigma U -> Type := tele_pred_fn_pack (tip A) U P := fun x => tele_pred_pack U (P x); tele_pred_fn_pack (ext A B) U P := fun x => tele_pred_fn_pack (B x.1) U (P x.1) x.2. Definition tele_rel_curried T := tele_fn T (tele_pred T). Equations tele_forall_pack (T : tele) (P : T -> Type) (f : tele_forall_uncurry T P) (t : T) : P t := | (tip A) | P | f | t := f t; | ext A B | P | f | (a, b) := tele_forall_pack (B a) (fun b => P (a, b)) (f a) b. Equations tele_forall_unpack (T : tele@{i}) (P : T -> Type@{j}) (f : forall (t : T), P t) : tele_forall_uncurry T P := | (tip A) | P | f := f; | ext A B | P | f := fun a : A => tele_forall_unpack (B a) (fun b => P (a, b)) (fun b => f (a, b)). Lemma tele_forall_pack_unpack (T : tele) (P : T -> Type) (f : forall t, P t) : forall x, tele_forall_pack T P (tele_forall_unpack T P f) x = f x. Proof. induction T; simpl. - reflexivity. - intros [a b]. simpl. apply (X a (fun b => P (a, b))). Defined. End TeleSigma. Register tele_sigma as equations.tele.interp. Register tele_measure as equations.tele.measure. (* We allow the relation to be at a higher universe level. *) #[export] Instance wf_tele_measure@{i j k| i <= k, j <= k} {T : tele@{i}} (A : Type@{j}) (f : tele_fn@{i j k} T A) (R : A -> A -> Type@{k}) : WellFounded R -> WellFounded (tele_measure T A f R). Proof. intros. apply wf_inverse_image@{i j k k}. apply X. Defined. Set Strict Universe Declaration. Section Fix. Universe i j k. Context {T : tele@{i}} (R : T -> T -> Type@{j}). Context (wf : WellFounded R). Context (P : tele_type@{i j k} T). (* (forall x : A, (forall y : A, R y x -> P y) -> P x) -> forall x : A, P x *) Definition tele_fix_functional_type := tele_forall_uncurry T (fun x => ((tele_forall_uncurry@{i k k} T (fun y => R y x -> tele_type_app T P y))) -> tele_type_app T P x). Context (fn : tele_fix_functional_type). Definition tele_fix : tele_forall T P := tele_forall_type_app _ _ (@FixWf (tele_sigma T) _ wf (tele_type_app T P) (fun x H => tele_forall_pack T _ fn x (tele_forall_unpack T _ H))). End Fix. Register tele_fix as equations.tele.fix. Register tele_MR as equations.tele.MR. Register tele_fix_functional_type as equations.tele.fix_functional_type. Register tele_type_app as equations.tele.type_app. Register tele_forall_type_app as equations.tele.forall_type_app. Register tele_forall_uncurry as equations.tele.forall_uncurry. Register tele_forall as equations.tele.forall. Register tele_forall_pack as equations.tele.forall_pack. Register tele_forall_unpack as equations.tele.forall_unpack. Extraction Inline tele_forall_pack tele_forall_unpack tele_forall_type_app tele_fix. Section FixUnfold. Universes i j k. Context `{Funext}. Context {T : tele@{i}} (x : T) (R : T -> T -> Type@{j}). Context (wf : well_founded R). Context (P : tele_type@{i j k} T). (* (forall x : A, (forall y : A, R y x -> P y) -> P x) -> forall x : A, P x *) Context (fn : tele_fix_functional_type@{i j k} R P). Lemma tele_fix_unfold : tele_forall_app T P (tele_fix R wf P fn) x = tele_forall_pack T _ fn x (tele_forall_unpack T _ (fun y _ => tele_forall_app T P (tele_fix R wf P fn) y)). Proof. intros. unfold tele_fix, Subterm.FixWf, Fix. rewrite tele_forall_app_type@{i j k}. destruct (wellfounded x). simpl. apply ap. apply ap. funext y. funext h. eapply concat. 2:{ apply inverse. apply tele_forall_app_type. } apply ap@{k k}. apply Acc_prop. Defined. End FixUnfold. Register tele_fix_unfold as equations.tele.fix_unfold. Coq-Equations-1.3.1-8.20/theories/HoTT/WellFounded.v000066400000000000000000000113561463127417400216470ustar00rootroot00000000000000Set Warnings "-notation-overridden". Require Import Equations.Init Equations.CoreTactics. Require Import Equations.HoTT.Logic Equations.HoTT.Relation Equations.HoTT.Relation_Properties. Require Import HoTT.Basics.Tactics. Set Universe Polymorphism. Import Sigma_Notations. (** Well-founded relations in Type *) Section Acc. Universes i j. Context {A : Type@{i}} (R : Relation@{i j} A). Cumulative Inductive Acc (x : A) : Type := | Acc_intro : (forall y, R y x -> Acc y) -> Acc x. Definition Acc_inv {x} (H : Acc x) : forall y, R y x -> Acc y. Proof. intros y Hy. destruct H. exact (a _ Hy). Defined. Definition well_founded := forall x, Acc x. Context (P : A -> Type). Context (step : forall x : A, (forall y : A, R y x -> P y) -> P x). Fixpoint Fix_F (x : A) (a : Acc x) : P x := step x (fun y r => Fix_F y (Acc_inv a y r)). End Acc. Lemma Acc_prop `{Funext} {A} (R : Relation A) i (x y : Acc R i) : x = y. Proof. revert y. induction x as [y Accy IHy]. intros [Accy']. apply ap. funext H'. funext H''. apply IHy. Qed. Section FixWf. Context {A R} (WF : @well_founded A R). Context (P : A -> Type). Context (step : forall x : A, (forall y : A, R y x -> P y) -> P x). Definition Fix (x : A) : P x := Fix_F R P step x (WF x). Lemma Fix_F_eq : forall (x:A) (r:Acc R x), step _ (fun (y:A) (p:R y x) => Fix_F R P step y (Acc_inv R r _ p)) = Fix_F R P step x r. Proof. destruct r; reflexivity. Defined. Hypothesis step_ext : forall (x:A) (f g:forall y:A, R y x -> P y), (forall (y:A) (p:R y x), f y p = g y p) -> step _ f = step _ g. Lemma Fix_step_inv : forall (x:A) (r s:Acc R x), Fix_F R P step _ r = Fix_F R P step _ s. Proof. intro x; induction (WF x); intros. rewrite <- (Fix_F_eq _ r); rewrite <- (Fix_F_eq _ s); intros. apply step_ext; auto. Defined. Lemma Fix_eq : forall x:A, Fix x = step _ (fun (y:A) (p:R y x) => Fix y). Proof. intro x; unfold Fix. rewrite <- Fix_F_eq. apply step_ext; intros. apply Fix_step_inv. Defined. End FixWf. Lemma well_founded_irreflexive {A} {R : Relation A} {wfR : well_founded R} : forall x y : A, R x y -> x = y -> Empty. Proof. intros x y Ryy. intros e. destruct e. red in wfR. induction (wfR x) as [y accy IHy]. apply (IHy _ Ryy Ryy). Qed. Lemma well_founded_antisym@{i j} {A : Type@{i}} {R : Relation@{i j} A}{wfR : well_founded R} : forall x y : A, R x y -> R y x -> Empty. Proof. intros x y Rxy Ryx. red in wfR. induction (wfR y) as [y accy IHy] in x, Rxy, Ryx. specialize (IHy _ Rxy). apply (IHy _ Ryx Rxy). Qed. Section Wf_Transitive_Closure. (** Original author: Bruno Barras, adapted to Type *) Context {A : Type} (R : Relation A). Notation trans_clos := (trans_clos R). Lemma incl_trans_clos : inclusion R trans_clos. red; auto with Relations. Defined. Lemma Acc_trans_clos : forall x:A, Acc R x -> Acc trans_clos x. induction 1 as [x0 _ H1]. apply Acc_intro. intros y H2. induction H2; auto with Relations. apply Acc_inv with y; auto with Relations. Defined. Hint Resolve Acc_trans_clos : core. Lemma Acc_inv_trans : forall x y:A, trans_clos y x -> Acc R x -> Acc R y. Proof. induction 1 as [| x y]; auto with Relations. intro; apply Acc_inv with y; assumption. Defined. Theorem wf_trans_clos : well_founded R -> well_founded trans_clos. Proof. unfold well_founded; auto with Relations. Defined. End Wf_Transitive_Closure. (** Author: Bruno Barras *) Section Inverse_Image. Context {A B : Type} (R : Relation B) (f : A -> B). Definition inverse_image := fun x y => R (f x) (f y). Remark Acc_lemma : forall y : B, Acc R y -> forall x : A, y = f x -> Acc inverse_image x. Proof. induction 1 as [y _ IHAcc]; intros x H. apply Acc_intro; intros y0 H1. apply (IHAcc (f y0)); try trivial. apply inverse in H. destruct H; trivial. Defined. Lemma Acc_inverse_image : forall x:A, Acc R (f x) -> Acc inverse_image x. Proof. intros; apply (Acc_lemma (f x)); trivial. Defined. Theorem wf_inverse_image : well_founded R -> well_founded inverse_image. Proof. red; intros; apply Acc_inverse_image; auto. Defined. (* Variable F : A -> B -> Type. *) (* Let RoF (x y:A) := *) (* exists2 b : B, F x b & (forall c:B, F y c -> R b c). *) (* Lemma Acc_inverse_rel : forall b:B, Acc R b -> forall x:A, F x b -> Acc RoF x. *) (* Proof. *) (* induction 1 as [x _ IHAcc]; intros x0 H2. *) (* constructor; intros y H3. *) (* destruct H3. *) (* apply (IHAcc x1); auto. *) (* Qed. *) (* Theorem wf_inverse_rel : well_founded R -> well_founded RoF. *) (* Proof. *) (* red; constructor; intros. *) (* case H0; intros. *) (* apply (Acc_inverse_rel x); auto. *) (* Qed. *) End Inverse_Image. Coq-Equations-1.3.1-8.20/theories/HoTT/WellFoundedInstances.v000066400000000000000000000053321463127417400235140ustar00rootroot00000000000000Set Warnings "-notation-overridden". Require Import Equations.HoTT.Loader Equations.HoTT.Relation Equations.HoTT.WellFounded. From HoTT Require Import Spaces.Nat. Section Lt. (* These are just natural numbers, allow minimizing to Set. *) Local Set Universe Minimization ToSet. Inductive le : nat -> nat -> Set := | le_0 x : le 0 x | le_S {x y} : le x y -> le (S x) (S y). Definition lt x y := le (S x) y. Lemma le_eq_lt x y : le x y -> (x = y) + (lt x y). Proof. induction 1. - destruct x. + left; constructor. + right; constructor. constructor. - dependent elimination IHle as [inl 1%path|inr Hlt]. + left; constructor. + right; now constructor. Defined. Theorem lt_wf@{} : WellFounded lt. Proof. do 2 red. apply nat_rect@{Set}; intros. - constructor. intros y Hy. depelim Hy. - constructor. intros y Hy. dependent elimination Hy as [@le_S y x Hle]. apply le_eq_lt in Hle. dependent elimination Hle as [inl idpath|inr Hlt]. + assumption. + destruct H. now apply a. Defined. Lemma lt_n_Sn@{} n : lt n (S n). Proof. constructor. revert n. apply nat_rect@{Set}; intros; now constructor. Defined. End Lt. (* Use refine to ensure proper treatment of cumulativity. *) #[export] Hint Extern 0 (@WellFounded nat _) => refine lt_wf : typeclass_instances. #[export] Hint Resolve lt_n_Sn : rec_decision. (** Define non-dependent lexicographic products *) Import Sigma_Notations. Local Open Scope equations_scope. Section Lexicographic_Product. Variable A : Type. Variable B : Type. Variable leA : Relation A. Variable leB : Relation B. Inductive lexprod : A * B -> A * B -> Type := | left_lex : forall {x x':A} {y:B} {y':B}, leA x x' -> lexprod (pair x y) (pair x' y') | right_lex : forall {x:A} {y y':B}, leB y y' -> lexprod (pair x y) (pair x y'). Lemma acc_A_B_lexprod : forall x:A, Acc leA x -> (well_founded leB) -> forall y:B, Acc leB y -> Acc lexprod (pair x y). Proof. induction 1 as [x _ IHAcc]; intros H2 y. induction 1 as [x0 H IHAcc0]. apply Acc_intro. destruct y as [x2 y1]; intro Hlex. depelim Hlex. - apply IHAcc; auto with Relations. - now apply IHAcc0. Defined. Theorem wf_lexprod : well_founded leA -> well_founded leB -> well_founded lexprod. Proof. intros wfA wfB; unfold well_founded. destruct x. apply acc_A_B_lexprod; auto with Relations; intros. Defined. End Lexicographic_Product. #[export] Instance wellfounded_lexprod A B R S `(wfR : WellFounded A R, wfS : WellFounded B S) : WellFounded (lexprod A B R S) := wf_lexprod A B R S wfR wfS. #[export] Hint Constructors lexprod : rec_decision. Coq-Equations-1.3.1-8.20/theories/HoTT/dune000066400000000000000000000002161463127417400201170ustar00rootroot00000000000000(coq.theory (name Equations.HoTT) (package coq-equations) (modules :standard) (theories Equations) (flags ("-noinit" "-indices-matter")))Coq-Equations-1.3.1-8.20/theories/Init.v000066400000000000000000000147651463127417400175330ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Require Import Coq.Unicode.Utf8_core Extraction. Declare ML Module "ltac_plugin". Declare ML Module "equations_plugin:coq-equations.plugin". (** A notation scope for equations declarations. The general mechanism of notations forces us to define notations in this scope in separate modules that we can avoid to export to remain compatible with user developments. *) Declare Scope equations_scope. Delimit Scope equations_scope with equations. Local Open Scope equations_scope. Global Unset Auto Template Polymorphism. (** We use this inductive internally *) Variant equations_tag@{} : Set := the_equations_tag. (** Notation for empty patterns. *) Definition bang := the_equations_tag. Opaque bang. Register bang as equations.internal.bang. (** Notation for inaccessible patterns. *) Definition inaccessible_pattern {A : Type} (t : A) := t. Module EquationsNotations. Notation "!" := bang : equations_scope. Notation "?( t )" := (inaccessible_pattern t) (format "?( t )") : equations_scope. End EquationsNotations. Register inaccessible_pattern as equations.internal.inaccessible_pattern. (** A marker for fixpoint prototypes in the context *) Definition fixproto := the_equations_tag. Register fixproto as equations.fixproto. (** A constant to avoid displaying large let-defined terms in the context. *) Definition hidebody {A : Type} {a : A} := a. Register hidebody as equations.internal.hidebody. Extraction Inline hidebody. Ltac hidebody H := match goal with [ H := ?b |- _ ] => change (@hidebody _ b) in (value of H) end. (** The sigma type used by Equations. *) Set Primitive Projections. Global Unset Printing Primitive Projection Parameters. Polymorphic Cumulative Record sigma@{i} {A : Type@{i}} {B : forall (_ : A), Type@{i}} : Type@{i} := sigmaI { pr1 : A; pr2 : B pr1 }. Unset Primitive Projections. Arguments sigma {A} B. Arguments sigmaI {A} B pr1 pr2. Extraction Inline pr1 pr2. (** Forward reference for the no-confusion tactic. *) Ltac noconf H := fail "Equations.Init.noconf has not been bound yet". (** Forward reference for the simplifier of equalities *) Ltac simplify_equalities := fail "Equations.Init.simplify_equalities has not been bound yet". (** Forward reference for simplification of equations internal constants *) Ltac simpl_equations := fail "Equations.Init.simpl_equations has not been bound yet". (** Forward reference for Equations' [depelim] tactic, which will be defined in [DepElim]. *) Ltac depelim x := fail "Equations.Init.depelim has not been bound yet". (** Forward reference for Equations' [depind] tactic, which will be defined in [DepElim]. *) Ltac depind x := fail "Equations.Init.depind has not been bound yet". Ltac simp_IHs_tac := fail "Equations.Init.simp_IHs_tac has not been bound yet". (** Forward reference for Equations' [funelim] tactic, which will be defined in [FunctionalInduction]. *) Ltac funelim_constr_as x h simp_IHs := fail "Equations.Init.funelim_constr_as has not been bound yet". (* We allow patterns, using the following trick. *) Tactic Notation "funelim_simp_IHs" uconstr(p) ident(H) tactic(simp_IHs) := let call := fresh "call" in set (call:=p); lazymatch goal with [ call := ?fp |- _ ] => subst call; funelim_constr_as fp H simp_IHs end. Tactic Notation "funelim" uconstr(p) ident(H) := funelim_simp_IHs p H simp_IHs_tac. Tactic Notation "funelim" uconstr(p) := let Heq := fresh "Heqcall" in funelim p Heq. Tactic Notation "funelim_nosimp" uconstr(p) ident(H) := funelim_simp_IHs p H ltac:(idtac). Tactic Notation "funelim_nosimp" uconstr(p) := let Heq := fresh "Heqcall" in funelim_nosimp p Heq. (** Forward reference for [apply_funelim]. A simpler minded variant that does no generalization by equalities. Use it if you want to do the induction loading by yourself. *) Ltac apply_funelim x := fail "Equations.Init.funelim has not been bound yet". (** Forward reference for Equations' [solve_eqdec] tactic, which will be defined later in [EqDec]. It is used to derive decidable equality on an inductive family. *) Ltac solve_eqdec := fail "Equations.Init.solve_eqdec has not been bound yet". (** Forward reference for Equations' [solve_subterm] tactic, which will be defined in [Subterm]. It is used to derive the well-foundedness of the subterm relation. *) Ltac solve_subterm := fail "Equations.Init.solve_subterm has not been bound yet". (** Forward reference for Equations' [solve_noconf] tactic, which will be defined later. It is used to derive the heterogeneous no-confusion property of an inductive family. *) Ltac solve_noconf := fail "Equations.Init.solve_noconf has not been bound yet". (** Forward reference for Equations' [solve_noconf_hom] tactic, which will be defined later. It is used to derive the homogeneous no-confusion property of an inductive family. *) Ltac solve_noconf_hom := fail "Equations.Init.solve_noconf_hom has not been bound yet". (** Such a useful tactic it should be part of the stdlib. *) Ltac forward_gen H tac := match type of H with | forall (_ : ?X), _ => let H' := fresh in assert (H':X) ; [tac|specialize (H H'); clear H'] end. Tactic Notation "forward" constr(H) := forward_gen H ltac:(idtac). Tactic Notation "forward" constr(H) "by" tactic(tac) := forward_gen H tac. (** A hintdb for transparency information of definitions when doing proof search to solve recursive calls obligations or during [simp] calls. *) Create HintDb simp discriminated. Hint Variables Opaque : simp. Hint Constants Opaque : simp. Hint Projections Opaque : simp. (** Forward reference to an internal tactic to unfold well-founded fixpoints *) Ltac unfold_recursor := fail "Equations.Init.unfold_recursor has not been bound yet". (** Forward reference to an internal tactic to unfold well-founded fixpoints using funext *) Ltac unfold_recursor_ext := fail "Equations.Init.unfold_recursor_ext has not been bound yet". (** Forward reference to an internal tactic to combine eliminators for mutual and nested definitions *) Ltac specialize_mutfix := fail "Equations.Init.specialize_mutfix has not been bound yet". Coq-Equations-1.3.1-8.20/theories/Prop/000077500000000000000000000000001463127417400173445ustar00rootroot00000000000000Coq-Equations-1.3.1-8.20/theories/Prop/Classes.v000066400000000000000000000122201463127417400211250ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Require Import Equations.Init Equations.CoreTactics. From Coq Require Import Extraction Relation_Definitions. Require Import Equations.Prop.Logic. (** A class for well foundedness proofs. Instances can be derived automatically using [Derive Subterm for ind]. *) Class WellFounded {A : Type} (R : relation A) := wellfounded : well_founded R. (** This class contains no-cyclicity proofs. They can be derived from well-foundedness proofs for example. *) (** The proofs of [NoCycle] can be arbitrarily large, it doesn't actually matter in the sense that they are used to prove absurdity. *) Class NoCyclePackage (A : Type) := { NoCycle : A -> A -> Prop; noCycle : forall {a b}, NoCycle a b -> (a = b -> False) }. (** These lemmas explains how to apply it during simplification. *) (** We always generate a goal of the form [NoCycle x C[x]], using either the left or right versions of the following lemma. *) Lemma apply_noCycle_left {A} {noconf : NoCyclePackage A} (p q : A) {B : p = q -> Type} : NoCycle p q -> (forall H : p = q, B H). Proof. intros. destruct (noCycle H H0). Defined. Lemma apply_noCycle_right {A} {noconf : NoCyclePackage A} (p q : A) {B : p = q -> Type} : NoCycle q p -> (forall H : p = q, B H). Proof. intros. destruct (noCycle H (eq_sym H0)). Defined. Extraction Inline apply_noCycle_left apply_noCycle_right. (** The NoConfusionPackage class provides a method for solving injectivity and discrimination of constructors, represented by an equality on an inductive type [I]. The type of [noConfusion] should be of the form [ Π Δ, (x y : I Δ) (x = y) -> NoConfusion x y ], where [NoConfusion x y] for constructor-headed [x] and [y] will give equality of their arguments or the absurd proposition in case of conflict. This gives a general method for simplifying by discrimination or injectivity of constructors. Some actual instances are defined later in the file using the more primitive [discriminate] and [injection] tactics on which we can always fall back. *) Class NoConfusionPackage (A : Type) := { NoConfusion : A -> A -> Prop; noConfusion : forall {a b}, NoConfusion a b -> a = b; noConfusion_inv : forall {a b}, a = b -> NoConfusion a b; noConfusion_sect : forall {a b} (e : NoConfusion a b), noConfusion_inv (noConfusion e) = e; noConfusion_retr : forall {a b} (e : a = b), noConfusion (noConfusion_inv e) = e; }. (** This lemma explains how to apply it during simplification. *) Lemma apply_noConfusion {A} {noconf : NoConfusionPackage A} (p q : A) {B : p = q -> Type} : (forall H : NoConfusion p q, B (noConfusion H)) -> (forall H : p = q, B H). Proof. intros. generalize (noConfusion_retr H). intros e. destruct e. apply X. Defined. Extraction Inline apply_noConfusion. (** Classes for types with UIP or decidable equality. *) Class UIP (A : Type) := uip : forall {x y : A} (e e' : x = y), e = e'. Definition dec_eq {A} (x y : A) := { x = y } + { x <> y }. Class EqDec (A : Type) := eq_dec : forall x y : A, { x = y } + { x <> y }. Class EqDecPoint (A : Type) (x : A) := eq_dec_point : forall y : A, { x = y } + { x <> y }. #[export] Instance EqDec_EqDecPoint A `(EqDec A) (x : A) : EqDecPoint A x := eq_dec x. (** For treating impossible cases. Equations corresponding to impossible calls form instances of [ImpossibleCall (f args)]. *) Class ImpossibleCall {A : Type} (a : A) : Type := is_impossible_call : False. (** We have a trivial elimination operator for impossible calls. *) Definition elim_impossible_call {A} (a : A) {imp : ImpossibleCall a} (P : A -> Type) : P a := match is_impossible_call with end. (** The tactic tries to find a call of [f] and eliminate it. *) Ltac impossible_call f := on_call f ltac:(fun t => apply (elim_impossible_call t)). (** The [FunctionalInduction f] typeclass is meant to register functional induction principles associated to a function [f]. Such principles are automatically generated for definitions made using [Equations]. *) Polymorphic Class FunctionalInduction {A : Type} (f : A) := { fun_ind_prf_ty : Type; fun_ind_prf : fun_ind_prf_ty }. Register FunctionalInduction as equations.funind.class. (** The [FunctionalElimination f] class declares elimination principles produced from the functional induction principle for [f] to be used directly to eliminate a call to [f]. This is the preferred method of proving results about a function. [n] is the number of binders for parameters, predicates and methods of the eliminator. *) Polymorphic Class FunctionalElimination {A : Type} (f : A) (fun_elim_ty : Type) (n : nat) := fun_elim : fun_elim_ty. Register FunctionalElimination as equations.funelim.class. Coq-Equations-1.3.1-8.20/theories/Prop/Constants.v000066400000000000000000000074271463127417400215210ustar00rootroot00000000000000From Equations Require Import Init. Require Import Equations.Prop.Classes Equations.Prop.EqDec Equations.Prop.DepElim. From Coq Require Import Relations. (** Naturals *) Register Init.Datatypes.O as equations.nat.zero. Register Init.Datatypes.S as equations.nat.succ. Register Init.Datatypes.nat as equations.nat.type. (* Sigma Types *) Register Equations.Init.sigma as equations.sigma.type. Register Equations.Init.sigmaI as equations.sigma.intro. Register Equations.Init.pr1 as equations.sigma.pr1. Register Equations.Init.pr2 as equations.sigma.pr2. (** Classes *) Register DepElim.DependentEliminationPackage as equations.depelim.class. Register Classes.ImpossibleCall as equations.impossiblecall.class. (** Logic parameterization *) Register Init.Logic.eq as equations.equality.type. Register Init.Logic.eq_refl as equations.equality.refl. Register Equations.Prop.Logic.transport_r as equations.equality.case. Register Equations.Prop.Logic.eq_elim_r as equations.equality.elim. Register Classes.EqDec as equations.eqdec.class. Register Classes.dec_eq as equations.eqdec.dec_eq. Register Classes.UIP as equations.uip.class. Register Classes.uip as equations.uip.uip. Register Init.Logic.False as equations.bottom.type. Register Init.Logic.False_rect as equations.bottom.case. Register Logic.False_rect_dep as equations.bottom.elim. Register Init.Logic.True as equations.top.type. Register Init.Logic.I as equations.top.intro. Register Logic.True_rect_dep as equations.top.elim. Register Init.Logic.and as equations.conj.type. Register Init.Logic.conj as equations.conj.intro. Register Init.Datatypes.unit as equations.unit.type. Register Init.Datatypes.tt as equations.unit.intro. Register Init.Datatypes.prod as equations.product.type. Register Init.Datatypes.pair as equations.product.intro. Register Classes.WellFounded as equations.wellfounded.class. Register Init.Wf.well_founded as equations.wellfounded.type. Register Relations.Relation_Definitions.relation as equations.relation.type. Register Relations.Relation_Operators.clos_trans as equations.relation.transitive_closure. (* Dependent elimination constants *) Register Equations.Prop.DepElim.solution_left as equations.depelim.solution_left. Register Equations.Prop.DepElim.solution_left_dep as equations.depelim.solution_left_dep. Register Equations.Prop.DepElim.solution_right as equations.depelim.solution_right. Register Equations.Prop.DepElim.solution_right_dep as equations.depelim.solution_right_dep. Register Equations.Prop.Classes.NoConfusionPackage as equations.noconfusion.class. Register Equations.Prop.Classes.apply_noConfusion as equations.depelim.apply_noConfusion. Register Equations.Prop.Classes.NoCyclePackage as equations.nocycle.class. Register Equations.Prop.Classes.apply_noCycle_left as equations.depelim.apply_noCycle_left. Register Equations.Prop.Classes.apply_noCycle_right as equations.depelim.apply_noCycle_right. Register Equations.Prop.DepElim.eq_simplification_sigma1 as equations.depelim.simpl_sigma. Register Equations.Prop.DepElim.eq_simplification_sigma1_dep as equations.depelim.simpl_sigma_dep. Register Equations.Prop.DepElim.eq_simplification_sigma1_nondep_dep as equations.depelim.simpl_sigma_nondep_dep. Register Equations.Prop.DepElim.eq_simplification_sigma1_dep_dep as equations.depelim.simpl_sigma_dep_dep. Register Equations.Prop.DepElim.simplify_ind_pack as equations.depelim.simplify_ind_pack. Register Equations.Prop.DepElim.simplify_ind_pack_inv as equations.depelim.simplify_ind_pack_inv. Register Equations.Prop.DepElim.opaque_ind_pack_eq_inv as equations.depelim.opaque_ind_pack_eq_inv. Register Equations.Prop.DepElim.pack_sigma_eq as equations.depelim.pack_sigma_eq. Register Equations.Prop.DepElim.simplification_K_uip as equations.depelim.simpl_uip. (* Now we can use the deriving support *) Derive Signature for eq. Coq-Equations-1.3.1-8.20/theories/Prop/DepElim.v000066400000000000000000000672321463127417400210640ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (** Tactics related to (dependent) equality and proof irrelevance. *) Require Import Coq.Program.Tactics. Require Export Equations.Init. Require Import Equations.CoreTactics. Require Import Equations.Signature. Require Import Equations.Prop.Logic. Require Import Equations.Prop.Classes. Require Import Equations.Prop.EqDec. Import Sigma_Notations. Local Open Scope equations_scope. Local Set Keyed Unification. (** Support for the [Equations] command. These tactics implement the necessary machinery to solve goals produced by the [Equations] command relative to dependent pattern-matching. It is inspired from the "Eliminating Dependent Pattern-Matching" paper by Goguen, McBride and McKinna. *) (** The [DependentEliminationPackage] provides the default dependent elimination principle to be used by the [equations] resolver. It is especially useful to register the dependent elimination principles for things in [Prop] which are not automatically generated. *) Polymorphic Class DependentEliminationPackage (A : Type) := { elim_type : Type ; elim : elim_type }. (** A higher-order tactic to apply a registered eliminator. *) Ltac elim_tac tac p := let ty := type of p in let eliminator := eval simpl in (elim (A:=ty)) in tac p eliminator. (** Specialization to do case analysis or induction. Note: the [equations] tactic tries [case] before [elim_case]: there is no need to register generated induction principles. *) Ltac elim_case p := elim_tac ltac:(fun p el => destruct p using el) p. Ltac elim_ind p := elim_tac ltac:(fun p el => induction p using el) p. (** Lemmas used by the simplifier, mainly rephrasings of [eq_rect], [eq_ind]. *) Notation "p # t" := (transport _ p t) (right associativity, at level 65) : equations_scope. Definition solution_left {A} {B : A -> Type} (t : A) (p : B t) (x : A) (e : x = t) : B x := eq_sym e # p. Lemma eq_sym_invol {A} (x y : A) (e : x = y) : eq_sym (eq_sym e) = e. Proof. destruct e. reflexivity. Defined. Lemma eq_symmetry_dep {A} {t : A} {B : forall (x : A), x = t -> Type} : (forall (x : A) (eq : t = x), B x (eq_sym eq)) -> (forall (x : A) (eq : x = t), B x eq). Proof. intros. rewrite <- eq_sym_invol. generalize (eq_sym eq). apply X. Defined. Lemma solution_left_dep : forall {A} (t : A) {B : forall (x : A), (x = t -> Type)}, B t eq_refl -> (forall x (Heq : x = t), B x Heq). Proof. intros A t B H x eq. apply eq_symmetry_dep. clear eq. intros. destruct eq. exact H. Defined. Definition solution_right {A} {P : A -> Type} (t : A) (p : P t) x (e : t = x) : P x := transport P e p. Lemma solution_right_dep : forall {A} (t : A) {B : forall (x : A), (t = x -> Type)}, B t eq_refl -> (forall x (Heq : t = x), B x Heq). Proof. intros A t B H x eq. destruct eq. apply H. Defined. Lemma solution_left_let : forall {A} {B : A -> Type} (b : A) (t : A), (b = t -> B t) -> (let x := b in x = t -> B x). Proof. intros A B b t H x eq. subst x. destruct eq. apply H. reflexivity. Defined. Lemma solution_right_let : forall {A} {B : A -> Type} (b t : A), (t = b -> B t) -> (let x := b in t = x -> B x). Proof. intros A B b t H x eq. subst x. destruct eq. apply H. reflexivity. Defined. Lemma deletion : forall {A B} (t : A), B -> (t = t -> B). Proof. intros; assumption. Defined. (** Old-style sigma types. *) Lemma simplification_existT1 : forall {A} {P : A -> Type} {B} (p q : A) (x : P p) (y : P q), (p = q -> existT P p x = existT P q y -> B) -> (existT P p x = existT P q y -> B). Proof. intros. refine (X _ H). change (projT1 (existT P p x) = projT1 (existT P q y)). now destruct H. Defined. Polymorphic Lemma simplification_sigma1@{i j | i <= eq.u0} : forall {A : Type@{i}} {P : A -> Type@{i}} {B : Type@{j}} (p q : A) (x : P p) (y : P q), (p = q -> (p, x) = (q, y) -> B) -> ((p, x) = (q, y) -> B). Proof. intros. refine (X _ H). change (pr1 (p, x) = pr1 (q, y)). now destruct H. Defined. Polymorphic Lemma eq_simplification_sigma1@{i j | i <= eq.u0} {A : Type@{i}} {P : Type@{i}} {B : Type@{j}} (p q : A) (x : P) (y : P) : (p = q -> x = y -> B) -> ((p, x) = (q, y) -> B). Proof. intros. revert X. change p with (pr1 (p, x)). change q with (pr1 (q, y)). change x with (pr2 (p, x)) at 2. change y with (pr2 (q, y)) at 2. destruct H. intros X. eapply (X eq_refl). apply eq_refl. Defined. Polymorphic Lemma eq_simplification_sigma1_dep@{i j | i <= eq.u0 +} {A : Type@{i}} {P : A -> Type@{i}} {B : Type@{j}} (p q : A) (x : P p) (y : P q) : (forall e : p = q, (@eq_rect A p P x q e) = y -> B) -> ((p, x) = (q, y) -> B). Proof. intros. revert X. change p with (pr1 (p, x)). change q with (pr1 (q, y)). change x with (pr2 (p, x)) at 3. change y with (pr2 (q, y)) at 4. destruct H. intros X. eapply (X eq_refl). apply eq_refl. Defined. Polymorphic Definition pack_sigma_eq_nondep@{i | i <= eq.u0} {A : Type@{i}} {P : Type@{i}} {p q : A} {x : P} {y : P} (e' : p = q) (e : x = y) : (p, x) = (q, y). Proof. destruct e'. simpl in e. destruct e. apply eq_refl. Defined. Polymorphic Lemma eq_simplification_sigma1_nondep_dep@{i j | i <= eq.u0} {A : Type@{i}} {P : Type@{i}} (p q : A) (x : P) (y : P) {B : (p, x) = (q, y) -> Type@{j}} : (forall e' : p = q, forall e : x = y, B (pack_sigma_eq_nondep e' e)) -> (forall e : sigmaI (fun _ => P) p x = sigmaI (fun _ => P) q y, B e). Proof. intros. revert X. change p with (pr1 (p, x)). change q with (pr1 (q, y)). change x with (pr2 (p, x)) at 2 4. change y with (pr2 (q, y)) at 2 4. destruct e. intros X. simpl in *. apply (X eq_refl eq_refl). Defined. Polymorphic Definition pack_sigma_eq@{i | +} {A : Type@{i}} {P : A -> Type@{i}} {p q : A} {x : P p} {y : P q} (e' : p = q) (e : @eq_rect A p P x q e' = y) : (p, x) = (q, y). Proof. destruct e'. simpl in e. destruct e. apply eq_refl. Defined. Polymorphic Lemma eq_simplification_sigma1_dep_dep@{i j | i <= eq.u0 +} {A : Type@{i}} {P : A -> Type@{i}} (p q : A) (x : P p) (y : P q) {B : (p, x) = (q, y) -> Type@{j}} : (forall e' : p = q, forall e : @eq_rect A p P x q e' = y, B (pack_sigma_eq e' e)) -> (forall e : (p, x) = (q, y), B e). Proof. intros. revert X. change p with (pr1 (p, x)). change q with (pr1 (q, y)). change x with (pr2 (p, x)) at 3 5. change y with (pr2 (q, y)) at 4 6. destruct e. intros X. simpl in *. apply (X eq_refl eq_refl). Defined. Set Printing Universes. Polymorphic Lemma pr2_inv_uip@{i| i <= eq.u0 +} {A : Type@{i}} {P : A -> Type@{i}} {x : A} {y y' : P x} : y = y' -> sigmaI@{i} P x y = sigmaI@{i} P x y'. Proof. exact (solution_right (P:=fun y' => (x, y) = (x, y')) y eq_refl y'). Defined. Polymorphic Lemma pr2_uip@{i | +} {A : Type@{i}} {E : UIP A} {P : A -> Type@{i}} {x : A} {y y' : P x} : sigmaI@{i} P x y = sigmaI@{i} P x y' -> y = y'. Proof. refine (eq_simplification_sigma1_dep_dep@{i i} _ _ _ _ _). intros e'. destruct (uip eq_refl e'). intros e ; exact e. Defined. Polymorphic Lemma pr2_uip_refl@{i | +} {A : Type@{i}} {E : UIP A} (P : A -> Type@{i}) (x : A) (y : P x) : pr2_uip@{i} (@eq_refl _ (x, y)) = eq_refl. Proof. unfold pr2_uip, eq_simplification_sigma1_dep_dep. now rewrite uip_refl_refl. Defined. (** If we have decidable equality on [A] we use this version which is axiom-free! *) Polymorphic Lemma simplification_sigma2_uip@{i j |+} : forall {A : Type@{i}} `{UIP A} {P : A -> Type@{i}} {B : Type@{j}} (p : A) (x y : P p), (x = y -> B) -> ((p , x) = (p, y) -> B). Proof. intros. apply X. apply pr2_uip@{i} in H0. assumption. Defined. Polymorphic Lemma simplification_sigma2_uip_refl@{i j | +} : forall {A : Type@{i}} {uip:UIP A} {P : A -> Type@{i}} {B : Type@{j}} (p : A) (x : P p) (G : x = x -> B), @simplification_sigma2_uip A uip P B p x x G eq_refl = G eq_refl. Proof. intros. unfold simplification_sigma2_uip. now rewrite pr2_uip_refl. Defined. Arguments simplification_sigma2_uip : simpl never. Polymorphic Lemma simplification_sigma2_dec_point : forall {A : Type} (p : A) `{EqDecPoint A p} {P : A -> Type} {B : Type} (x y : P p), (x = y -> B) -> ((p, x) = (p, y) -> B). Proof. intros. apply X. apply inj_right_sigma_point in H0. assumption. Defined. Polymorphic Lemma simplification_sigma2_dec_point_refl@{i +} : forall {A} (p : A) `{eqdec:EqDecPoint A p} {P : A -> Type} {B} (x : P p) (G : x = x -> B), @simplification_sigma2_dec_point A p eqdec P B x x G eq_refl = G eq_refl. Proof. intros. unfold simplification_sigma2_dec_point. rewrite inj_right_sigma_refl_point. reflexivity. Defined. Arguments simplification_sigma2_dec_point : simpl never. Polymorphic Lemma simplification_K_uip@{i j| i <= eq.u0 +} {A : Type@{i}} `{UIP A} (x : A) {B : x = x -> Type@{j}} : B eq_refl -> (forall p : x = x, B p). Proof. apply UIP_K. Defined. Arguments simplification_K_uip : simpl never. Polymorphic Lemma simplification_K_uip_refl : forall {A} `{UIP A} (x : A) {B : x = x -> Type} (p : B eq_refl), simplification_K_uip x p eq_refl = p. Proof. intros. unfold simplification_K_uip, UIP_K. now rewrite uip_refl_refl. Defined. Polymorphic Definition ind_pack_eq@{i | +} {A : Type@{i}} {B : A -> Type@{i}} {x : A} {p q : B x} (e : p = q) : @eq (sigma (fun x => B x)) (x, p) (x, q) := (pr2_inv_uip e). Polymorphic Definition ind_pack_eq_inv_equiv@{i} {A : Type@{i}} {uip : UIP A} {B : A -> Type@{i}} {x : A} (p q : B x) (e : p = q) : pr2_uip (pr2_inv_uip e) = e. Proof. destruct e. apply pr2_uip_refl. Defined. Polymorphic Definition opaque_ind_pack_eq_inv@{i j} {A : Type@{i}} {uip : UIP A} {B : A -> Type@{i}} {x : A} {p q : B x} (G : p = q -> Type@{j}) (e : (x, p) = (x, q)) := G (pr2_uip@{i} e). Arguments opaque_ind_pack_eq_inv : simpl never. Arguments pr2_uip : simpl never. Arguments pr2_inv_uip : simpl never. Polymorphic Lemma simplify_ind_pack@{i j | +} {A : Type@{i}} {uip : UIP A} (B : A -> Type@{i}) (x : A) (p q : B x) (G : p = q -> Type@{j}) : (forall e : (x, p) = (x, q), opaque_ind_pack_eq_inv G e) -> (forall e : p = q, G e). Proof. intros H. intros e. specialize (H (ind_pack_eq e)). unfold opaque_ind_pack_eq_inv in H. rewrite ind_pack_eq_inv_equiv in H. apply H. Defined. Arguments simplify_ind_pack : simpl never. Polymorphic Lemma simplify_ind_pack_inv@{i j | +} {A : Type@{i}} {uip : UIP A} (B : A -> Type@{i}) (x : A) (p : B x) (G : p = p -> Type@{j}) : G eq_refl -> opaque_ind_pack_eq_inv G eq_refl. Proof. intros H. unfold opaque_ind_pack_eq_inv. destruct (pr2_uip_refl B x p). exact H. Defined. Arguments simplify_ind_pack_inv : simpl never. Polymorphic Definition simplified_ind_pack@{i j | +} {A : Type@{i}} {uip : UIP A} (B : A -> Type@{i}) (x : A) (p : B x) (G : p = p -> Type@{j}) (t : opaque_ind_pack_eq_inv G eq_refl) := eq_rect _ G t _ (@pr2_uip_refl A uip B x p). Arguments simplified_ind_pack : simpl never. Polymorphic Lemma simplify_ind_pack_refl@{i j | +} {A : Type@{i}} {uip : UIP A} (B : A -> Type@{i}) (x : A) (p : B x) (G : p = p -> Type@{j}) (t : forall (e : (x, p) = (x, p)), opaque_ind_pack_eq_inv G e) : simplify_ind_pack B x p p G t eq_refl = simplified_ind_pack B x p G (t eq_refl). Proof. reflexivity. Qed. Polymorphic Lemma simplify_ind_pack_elim@{i j | +} {A : Type@{i}} {uip : UIP A} (B : A -> Type@{i}) (x : A) (p : B x) (G : p = p -> Type@{j}) (t : G eq_refl) : simplified_ind_pack B x p G (simplify_ind_pack_inv B x p G t) = t. Proof. unfold simplified_ind_pack, simplify_ind_pack_inv. now destruct (pr2_uip_refl B x p). Qed. (** All the simplification rules involving axioms are treated as opaque when proving lemmas about definitions. To actually compute with these inside Coq, one has to make them transparent again. *) Global Opaque simplification_sigma2_uip simplification_sigma2_dec_point simplification_K_uip simplify_ind_pack simplified_ind_pack. Global Opaque opaque_ind_pack_eq_inv. Ltac rewrite_sigma2_refl_noK := match goal with | |- context [@inj_right_sigma ?A ?H ?x ?P ?y ?y' _] => rewrite (@inj_right_sigma_refl A H x P y) | |- context [@simplification_sigma2_uip ?A ?H ?P ?B ?p ?x ?y ?X eq_refl] => rewrite (@simplification_sigma2_uip_refl A H P B p x X); simpl | |- context [@simplification_sigma2_dec_point ?A ?p ?H ?P ?B ?x ?y ?X eq_refl] => rewrite (@simplification_sigma2_dec_point_refl A p H P B x X); simpl | |- context [@simplification_K_uip ?A ?dec ?x ?B ?p eq_refl] => rewrite (@simplification_K_uip_refl A dec x B p); simpl eq_rect (* | |- context [@HSets.inj_sigma_r ?A ?H ?P ?x ?y ?y' _] => *) (* rewrite (@HSets.inj_sigma_r_refl A H P x y) *) | |- context [@simplify_ind_pack ?A ?uip ?B ?x ?p _ ?G _ eq_refl] => rewrite (@simplify_ind_pack_refl A uip B x p G _) | |- context [@simplified_ind_pack ?A ?uip ?B ?x ?p ?G (simplify_ind_pack_inv _ _ _ _ ?t)] => rewrite (@simplify_ind_pack_elim A uip B x p G t) end. Ltac rewrite_sigma2_refl := rewrite_sigma2_refl_noK. (** This hint database and the following tactic can be used with [autounfold] to unfold everything to [eq_rect]s. *) #[global] Hint Unfold solution_left solution_right eq_sym_invol eq_symmetry_dep solution_left_dep solution_right_dep deletion simplification_existT1 simplification_sigma1 eq_simplification_sigma1 eq_simplification_sigma1_dep apply_noConfusion eq_rect_r eq_rec eq_ind eq_ind_r : equations. (** Makes these definitions disappear at extraction time *) Extraction Inline solution_right_dep solution_right solution_left solution_left_dep. Extraction Inline eq_sym_invol eq_symmetry_dep. Extraction Inline solution_right_let solution_left_let deletion. Extraction Inline simplification_existT1. Extraction Inline simplification_sigma1 simplification_sigma2_uip. Extraction Inline simplification_K_uip. Extraction Inline eq_simplification_sigma1 eq_simplification_sigma1_dep. Extraction Inline eq_simplification_sigma1_nondep_dep eq_simplification_sigma1_dep_dep. (** Simply unfold as much as possible. *) Ltac unfold_equations := repeat progress autounfold with equations. Ltac unfold_equations_in H := repeat progress autounfold with equations in H. Ltac rewrite_refl_id := repeat (progress (autorewrite with refl_id) || (try rewrite_sigma2_refl)). Ltac simplify_equations_in e := repeat progress (autounfold with equations in e ; simpl in e). (** Using these we can make a simplifier that will perform the unification steps needed to put the goal in normalised form (provided there are only constructor forms). Compare with the lemma 16 of the paper. We don't have a [noCycle] procedure yet. *) (** These two tactics are dangerous as they can try to reduce terms to head-normal-form and take ages to fail. *) Ltac try_discriminate := discriminate. Ltac try_injection H := injection H. Ltac simplify_one_dep_elim := match goal with | [ |- context [eq_rect_r _ _ eq_refl]] => progress simpl eq_rect_r | [ |- context [eq_rect _ _ _ _ eq_refl]] => progress simpl eq_rect | [ |- context [transport _ eq_refl _]] => progress simpl transport | [ |- context [@eq_elim _ _ _ _ _ eq_refl]] => progress simpl eq_rect | [ |- context [@eq_elim_r _ _ _ _ _ eq_refl]] => progress simpl eq_elim_r | [ |- context [noConfusion_inv _]] => progress simpl noConfusion_inv | [ |- @opaque_ind_pack_eq_inv ?A ?uip ?B ?x ?p _ ?G eq_refl] => apply (@simplify_ind_pack_inv A uip B x p G) | [ |- let _ := block in _ ] => fail 1 | [ |- _ ] => (simplify * || simplify ?); cbv beta | [ |- _ -> ?B ] => let ty := type of B in (* Works only with non-dependent products *) intro || (let H := fresh in intro H) | [ |- forall x, _ ] => let H := fresh x in intro H | [ |- _ ] => intro end. (** Repeat until no progress is possible. By construction, it should leave the goal with no remaining equalities generated by the [generalize_eqs] tactic. *) Ltac simplify_dep_elim := repeat simplify_one_dep_elim. (** Apply [noConfusion] on a given hypothsis. *) Ltac noconf H := block_goal; revert_until H; block_goal; on_last_hyp ltac:(fun H' => revert H'); simplify_dep_elim; intros_until_block; intros_until_block. (** Reverse and simplify. *) Ltac simpdep := reverse; simplify_dep_elim. (** Decompose existential packages. *) Ltac decompose_exists id id' := hnf in id ; match type of id with | @sigma _ _ => let xn := fresh id "'" in destruct id as [xn id]; decompose_exists xn id; cbv beta delta [ pr1 pr2 ] iota in id, id'; decompose_exists id id' | _ => cbv beta delta [ pr1 pr2 ] iota in id, id' end. (** Dependent generalization using existentials only. *) Ltac generalize_sig_gen id cont := let id' := fresh id in get_signature_pack id id'; hnf in (value of id'); hnf in (type of id'); lazymatch goal with | id' := ?v |- context[ id ] => generalize (@eq_refl _ id' : v = id') ; clearbody id'; simpl in id'; cont id id' id v | id' := ?v |- _ => let id'1 := fresh id' in let id'2 := fresh id' in set (id'2 := pr2 id'); set (id'1 := pr1 id') in id'2; hnf in (value of id'1), (value of id'2); try red in (type of id'2); match goal with [ id'1 := ?t |- _ ] => generalize (@eq_refl _ id'1 : t = id'1); clearbody id'2 id'1; clear id' id; try unfold signature in id'2; hnf in id'2; simpl in id'2; rename id'2 into id; cont id id id'1 t end end. Ltac generalize_sig id cont := generalize_sig_gen id ltac:(fun id id' id'1 t => (* Fails if id = id' *) try rename id into id', id' into id; cont id'1 id). Ltac generalize_sig_vars id cont := generalize_sig_gen id ltac:(fun id id' id'1 t => move_after_deps id' t; revert_until id'; rename id' into id; cont id'1 id). Ltac generalize_sig_dest id := generalize_sig id ltac:(fun id id' => decompose_exists id id'). Ltac generalize_sig_vars_dest id := generalize_sig_vars id ltac:(fun id id' => decompose_exists id id'). Ltac generalize_eqs_sig id := (needs_generalization id ; generalize_sig_dest id) || idtac. Ltac generalize_eqs_vars_sig id := (needs_generalization id ; generalize_sig_vars_dest id) || idtac. (** The default implementation of generalization using sigma types. *) Ltac generalize_by_eqs id := generalize_eqs_sig id. Ltac generalize_by_eqs_vars id := generalize_eqs_vars_sig id. (** Do dependent elimination of the last hypothesis, but not simplifying yet (used internally). *) Ltac destruct_last := on_last_hyp ltac:(fun id => simpl in id ; generalize_by_eqs id ; destruct id). (** The rest is support tactics for the [Equations] command. *) (** To solve a goal by inversion on a particular target. *) Ltac do_empty id := exfalso ; simpl in id ; solve [ generalize_by_eqs id ; destruct id ; simplify_dep_elim | apply id ; eauto with simp ]. (** If defining recursive functions, the prototypes come first. *) Ltac introduce p := first [ match p with _ => (* Already there, generalize dependent hyps *) generalize dependent p ; intros p end | intros until p | intros until 1 | intros ]. Ltac do_case p := introduce p ; (elim_case p || destruct p || (case p ; clear p)). Ltac do_ind p := introduce p ; (elim_ind p || induction p). (** The following tactics allow to do induction on an already instantiated inductive predicate by first generalizing it and adding the proper equalities to the context, in a maner similar to the BasicElim tactic of "Elimination with a motive" by Conor McBride. *) (** The [do_depelim] higher-order tactic takes an elimination tactic as argument and an hypothesis and starts a dependent elimination using this tactic. *) Ltac is_introduced H := match goal with | [ H' : _ |- _ ] => match H' with H => idtac end end. Tactic Notation "intro_block" hyp(H) := (is_introduced H ; block_goal ; revert_until H ; block_goal) || (let H' := fresh H in intros until H' ; block_goal) || (intros ; block_goal). Tactic Notation "intro_block_id" ident(H) := (is_introduced H ; block_goal ; revert_until H ; block_goal) || (let H' := fresh H in intros until H' ; block_goal) || (intros ; block_goal). Ltac unblock_dep_elim := match goal with | |- let _ := block in ?T => match T with context [ block ] => change T ; intros_until_block end | _ => unblock_goal end. (** A tactic that tries to remove trivial equality guards in induction hypotheses coming from [dependent induction]/[generalize_eqs] invocations. *) Ltac simplify_IH_hyps := repeat match goal with | [ hyp : context [ block ] |- _ ] => cbn beta in hyp; eqns_specialize_eqs_block hyp; cbn beta iota delta[eq_rect_r eq_rect] zeta in hyp end. Ltac simpl_dep_elim := simplify_dep_elim ; simplify_IH_hyps ; unblock_dep_elim. Ltac do_intros H := (try intros until H) ; (intro_block_id H || intro_block H) ; (try simpl in H ; simplify_equations_in H). Ltac do_depelim_nosimpl tac H := do_intros H ; generalize_by_eqs H ; tac H. Ltac do_depelim tac H := do_depelim_nosimpl tac H ; simpl_dep_elim; unblock_goal. Ltac do_depind tac H := (try intros until H) ; intro_block H ; (try simpl in H ; simplify_equations_in H) ; generalize_by_eqs_vars H ; block_goal ; tac H ; intros_until_block; simpl_dep_elim; unblock_goal. (** To dependent elimination on some hyp. *) Ltac depelim id := do_depelim ltac:(fun hyp => do_case hyp) id. Ltac depelim_term c := let H := fresh "term" in set (H:=c) in *; clearbody H ; depelim H. (** Used internally. *) Ltac depelim_nosimpl id := do_depelim_nosimpl ltac:(fun hyp => do_case hyp) id. (** To dependent induction on some hyp. *) Ltac depind id := do_depind ltac:(fun hyp => do_ind hyp) id. (** A variant where generalized variables should be given by the user. *) Ltac do_depelim' tac H := (try intros until H) ; block_goal ; generalize_by_eqs H ; tac H ; simplify_dep_elim ; simplify_IH_hyps ; unblock_goal. (** Calls [destruct] on the generalized hypothesis, results should be similar to inversion. By default, we don't try to generalize the hyp by its variable indices. *) Tactic Notation "dependent" "destruction" ident(H) := do_depelim' ltac:(fun hyp => do_case hyp) H. Tactic Notation "dependent" "destruction" ident(H) "using" constr(c) := do_depelim' ltac:(fun hyp => destruct hyp using c) H. (** This tactic also generalizes the goal by the given variables before the elimination. *) Tactic Notation "dependent" "destruction" ident(H) "generalizing" ne_hyp_list(l) := do_depelim' ltac:(fun hyp => revert l ; do_case hyp) H. Tactic Notation "dependent" "destruction" ident(H) "generalizing" ne_hyp_list(l) "using" constr(c) := do_depelim' ltac:(fun hyp => revert l ; destruct hyp using c) H. (** Then we have wrappers for usual calls to induction. One can customize the induction tactic by writting another wrapper calling do_depelim. We suppose the hyp has to be generalized before calling [induction]. *) Tactic Notation "dependent" "induction" ident(H) := do_depind ltac:(fun hyp => do_ind hyp) H. Tactic Notation "dependent" "induction" ident(H) "using" constr(c) := do_depind ltac:(fun hyp => induction hyp using c) H. (** This tactic also generalizes the goal by the given variables before the induction. *) Tactic Notation "dependent" "induction" ident(H) "generalizing" ne_hyp_list(l) := do_depelim' ltac:(fun hyp => generalize l ; clear l ; do_ind hyp) H. Tactic Notation "dependent" "induction" ident(H) "generalizing" ne_hyp_list(l) "using" constr(c) := do_depelim' ltac:(fun hyp => generalize l ; clear l ; induction hyp using c) H. (** [solve_equation] is used to prove the equation lemmas for an existing definition. *) Ltac find_empty := simpl in * ; exfalso ; match goal with | [ H : _ |- _ ] => solve [ clear_except H ; dependent elimination H | eqns_specialize_eqs H ; assumption ] | [ H : _ <> _ |- _ ] => solve [ red in H ; eqns_specialize_eqs H ; assumption ] end. Ltac make_simplify_goal := match goal with [ |- @eq ?A ?T ?U ] => let eqP := fresh "eqP" in set (eqP := fun x : A => x = U) ; change (eqP T) end. Ltac hnf_gl := match goal with [ |- ?P ?T ] => let T' := eval hnf in T in change_no_check (P T') end. Ltac hnf_eq := match goal with |- ?x = ?y => let x' := eval hnf in x in let y' := eval hnf in y in change_no_check (x' = y') end. Ltac red_eq_lhs := match goal with |- ?R ?x ?y => let x' := eval red in x in change_no_check (R x' y) end. Ltac red_one_eq := match goal with |- ?R ?x ?y => let x' := eval red in x in let y' := eval red in y in change_no_check (R x' y') end. Ltac red_eq := match goal with |- ?x = ?y => let rec reduce_eq x y := let x' := eval red in x in let y' := eval red in y in (reduce_eq x' y' || change_no_check (x' = y')) in reduce_eq x y end. Ltac red_gl := match goal with |- ?P ?x => let rec reduce x := let x' := eval red in x in (reduce x' || change_no_check (P x')) in reduce x end. Ltac rewrite_sigma2_rule_noK c := match c with | @inj_right_sigma ?A ?H ?x ?P ?y ?y' _ => rewrite (@inj_right_sigma_refl A H x P y) | @simplify_ind_pack ?A ?uip ?B ?x ?p _ ?G _ eq_refl=> rewrite (@simplify_ind_pack_refl A uip B x p G _) | @simplification_sigma2_uip ?A ?H ?P ?B ?p ?x ?y ?X eq_refl=> rewrite (@simplification_sigma2_uip_refl A H P B p x X); simpl | @simplification_sigma2_dec_point ?A ?p ?H ?P ?B ?x ?y ?X eq_refl=> rewrite (@simplification_sigma2_dec_point_refl A p H P B x X); simpl | @simplification_K_uip ?A ?dec ?x ?B ?p eq_refl=> rewrite (@simplification_K_uip_refl A dec x B p); simpl eq_rect end. Ltac rewrite_sigma2_rule c := rewrite_sigma2_rule_noK c. Ltac rewrite_sigma2_term x := match x with | ?f _ _ _ _ _ _ _ _ _ => rewrite_sigma2_rule f | ?f _ _ _ _ _ _ _ _ => rewrite_sigma2_rule f | ?f _ _ _ _ _ _ _ => rewrite_sigma2_rule f | ?f _ _ _ _ _ _ => rewrite_sigma2_rule f | ?f _ _ _ _ _ => rewrite_sigma2_rule f | ?f _ _ _ _ => rewrite_sigma2_rule f | ?f _ _ _ => rewrite_sigma2_rule f | ?f _ _ => rewrite_sigma2_rule f | ?f _ => rewrite_sigma2_rule f | ?f => rewrite_sigma2_rule f end. Ltac rewrite_sigma2_refl_eq := match goal with |- ?x = ?y => rewrite_sigma2_term x || rewrite_sigma2_term y end. Ltac rewrite_sigma2_refl_goal := match goal with | |- ?P ?x => rewrite_sigma2_term x end. (* Ltac simpl_equations := *) (* repeat (repeat (simpl; (hnf_eq || rewrite_sigma2_refl_eq || autorewrite with refl_id); simpl); *) (* try progress autounfold with equations). *) (* Ltac simplify_equation c := *) (* make_simplify_goal ; simpl ; *) (* repeat (try autounfoldify c; *) (* try (red_gl || rewrite_sigma2_refl_goal || autorewrite with refl_id) ; simpl). *) Ltac simpl_equations := repeat (repeat (simpl; hnf_eq; rewrite_refl_id); try progress autounfold with equations). Ltac simpl_equation_impl := repeat (unfold_equations; rewrite_refl_id). Ltac simplify_equation c := make_simplify_goal; simpl; repeat (try autounfold_ref c; progress (simpl; unfold_equations) || (progress (autorewrite with refl_id)) || reflexivity || (progress (rewrite_sigma2_refl))). Ltac solve_equation c := intros ; try simplify_equation c ; try (match goal with | [ |- ImpossibleCall _ ] => find_empty | _ => try red; try (reflexivity || discriminates) end). Definition depelim_module := tt. Register depelim_module as equations.depelim.module. Coq-Equations-1.3.1-8.20/theories/Prop/EqDec.v000066400000000000000000000246531463127417400205260ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Require Import Equations.Init. Require Import Equations.Prop.Classes. (** Decidable equality. We redevelop the derivation of [UIP] from decidable equality on [A] making everything transparent so that programs using this will actually be computable inside Coq. *) Definition UIP_refl_on_ X (x : X) := forall p : x = x, p = eq_refl. Definition UIP_refl_ X := forall (x : X) (p : x = x), p = eq_refl. Set Implicit Arguments. Lemma Id_trans_r {A} (x y z : A) : x = y -> z = y -> x = z. Proof. destruct 1. destruct 1. exact eq_refl. Defined. (** We rederive the UIP shifting proof transparently. *) Theorem UIP_shift_on (X : Type) (x : X) : UIP_refl_on_ X x -> forall y : x = x, UIP_refl_on_ (x = x) y. Proof. intros UIP_refl y. rewrite (UIP_refl y). intros z. assert (UIP:forall y' y'' : x = x, y' = y''). { intros. apply eq_trans_r with eq_refl; apply UIP_refl. } transitivity (eq_trans (eq_trans (UIP eq_refl eq_refl) z) (eq_sym (UIP eq_refl eq_refl))). - destruct z. destruct (UIP _ _). reflexivity. - change (match eq_refl as y' in _ = x' return y' = y' -> Prop with | eq_refl => fun z => z = eq_refl end (eq_trans (eq_trans (UIP (eq_refl) (eq_refl)) z) (eq_sym (UIP (eq_refl) (eq_refl))))). destruct z. destruct (UIP _ _). reflexivity. Defined. Theorem UIP_shift : forall U, UIP_refl_ U -> forall x:U, UIP_refl_ (x = x). Proof. exact (fun U UIP_refl x => @UIP_shift_on U x (UIP_refl x)). Defined. (** This is the reduction rule of UIP. *) Lemma uip_refl_refl {A} {E : UIP A} (x : A) : uip (x:=x) eq_refl eq_refl = eq_refl. Proof. assert (Us:=UIP_shift). specialize (Us A). compute in Us. apply Us. intros. apply uip. Defined. Theorem UIP_K {A} {U : UIP A} (x : A) : forall P : x = x -> Type, P eq_refl -> forall p : x = x, P p. Proof. intros P peq e. now elim (uip refl_equal e). Defined. (** Derivation of principles on sigma types whose domain is decidable. *) Section EqdepDec. Context {A : Type} `{EqDec A}. Let comp (x y y':A) (eq1:x = y) (eq2:x = y') : y = y' := eq_ind _ (fun a => a = y') eq2 _ eq1. Remark trans_sym_eq : forall (x y:A) (u:x = y), comp u u = refl_equal. Proof. intros. case u; trivial. Defined. Variable x : A. Let nu (y:A) (u:x = y) : x = y := match eq_dec x y with | left eqxy => eqxy | right neqxy => False_ind _ (neqxy u) end. Let nu_constant : forall (y:A) (u v:x = y), nu u = nu v. intros. unfold nu in |- *. case (eq_dec x y); intros. reflexivity. case n; trivial. Defined. Let nu_inv (y:A) (v:x = y) : x = y := comp (nu refl_equal) v. Remark nu_left_inv : forall (y:A) (u:x = y), nu_inv (nu u) = u. Proof. intros. case u; unfold nu_inv in |- *. apply trans_sym_eq. Defined. Theorem eq_proofs_unicity : forall (y:A) (p1 p2:x = y), p1 = p2. Proof. intros. elim nu_left_inv with (u := p1). elim nu_left_inv with (u := p2). elim nu_constant with y p1 p2. reflexivity. Defined. Theorem K_dec : forall P:x = x -> Type, P refl_equal -> forall p:x = x, P p. Proof. intros. elim eq_proofs_unicity with x refl_equal p. trivial. Defined. Lemma eq_dec_refl : eq_dec x x = left _ eq_refl. Proof. case eq_dec. intros. f_equal. apply eq_proofs_unicity. intro. congruence. Defined. (** The corollary *) Let proj (P:A -> Type) (exP:sigT P) (def:P x) : P x := match exP with | existT _ x' prf => match eq_dec x' x with | left eqprf => eq_rect x' P prf x eqprf | _ => def end end. Theorem inj_right_pair : forall (P:A -> Type) (y y':P x), existT P x y = existT P x y' -> y = y'. Proof. intros. cut (proj (existT P x y) y = proj (existT P x y') y). simpl in |- *. case (eq_dec x x). intro e. elim e using K_dec; trivial. intros. case n; trivial. case H0. reflexivity. Defined. Lemma inj_right_pair_refl (P : A -> Type) (y : P x) : inj_right_pair (y:=y) (y':=y) eq_refl = eq_refl. Proof. unfold inj_right_pair. intros. unfold eq_rect. unfold proj. rewrite eq_dec_refl. unfold K_dec. simpl. unfold eq_proofs_unicity. subst proj. simpl. unfold nu_inv, comp, nu. simpl. unfold eq_ind, nu_left_inv, trans_sym_eq, eq_rect, nu_constant. rewrite eq_dec_refl. reflexivity. Defined. End EqdepDec. (** Derivation of principles on sigma types whose domain is decidable. *) Section PointEqdepDec. Context {A : Type} {x : A} `{EqDecPoint A x}. Let comp (x y y':A) (eq1:x = y) (eq2:x = y') : y = y' := eq_ind _ (fun a => a = y') eq2 _ eq1. Remark point_trans_sym_eq : forall (x y:A) (u:x = y), comp u u = refl_equal. Proof. intros. case u; trivial. Defined. Let nu (y:A) (u:x = y) : x = y := match eq_dec_point y with | left eqxy => eqxy | right neqxy => False_ind _ (neqxy u) end. Let nu_constant : forall (y:A) (u v:x = y), nu u = nu v. intros. unfold nu in |- *. case (eq_dec_point y); intros. reflexivity. case n; trivial. Defined. Let nu_inv (y:A) (v:x = y) : x = y := comp (nu refl_equal) v. Remark nu_left_inv_point : forall (y:A) (u:x = y), nu_inv (nu u) = u. Proof. intros. case u; unfold nu_inv in |- *. apply trans_sym_eq. Defined. Theorem eq_proofs_unicity_point : forall (y:A) (p1 p2:x = y), p1 = p2. Proof. intros. elim nu_left_inv_point with (u := p1). elim nu_left_inv_point with (u := p2). elim nu_constant with y p1 p2. reflexivity. Defined. Theorem K_dec_point : forall P:x = x -> Type, P refl_equal -> forall p:x = x, P p. Proof. intros. elim eq_proofs_unicity_point with x refl_equal p. trivial. Defined. Lemma eq_dec_refl_point : eq_dec_point x = left _ eq_refl. Proof. case eq_dec_point. intros. f_equal. apply eq_proofs_unicity_point. intro. congruence. Defined. (** The corollary *) Let proj (P:A -> Type) (exP:sigma P) (def:P x) : P x := match exP with | sigmaI _ x' prf => match eq_dec_point x' with | left eqprf => eq_rect x' P prf x (eq_sym eqprf) | _ => def end end. Theorem inj_right_sigma_point : forall (P:A -> Type) (y y':P x), sigmaI P x y = sigmaI P x y' -> y = y'. Proof. intros. cut (proj (sigmaI P x y) y = proj (sigmaI P x y') y). unfold proj. simpl in |- *. case (eq_dec_point x). intro e. elim e using K_dec_point; trivial. intros. unfold proj in H1. case n; trivial. case H0. reflexivity. Defined. Lemma inj_right_sigma_refl_point (P : A -> Type) (y : P x) : inj_right_sigma_point (y:=y) (y':=y) eq_refl = eq_refl. Proof. unfold inj_right_sigma_point. intros. unfold eq_rect. unfold proj. rewrite eq_dec_refl_point. unfold K_dec_point. simpl. unfold eq_proofs_unicity_point. subst proj. simpl. unfold nu_inv, comp, nu. simpl. unfold eq_ind, nu_left_inv, trans_sym_eq, eq_rect, nu_constant. rewrite eq_dec_refl_point. reflexivity. Defined. End PointEqdepDec. Section PEqdepDec. Context {A : Type} `{EqDec A}. Let comp (x y y':A) (eq1:x = y) (eq2:x = y') : y = y' := eq_ind _ (fun a => a = y') eq2 _ eq1. Remark ptrans_sym_eq : forall (x y:A) (u:x = y), comp u u = refl_equal. Proof. intros. case u; trivial. Defined. Variable x : A. Let nu (y:A) (u:x = y) : x = y := match eq_dec x y with | left eqxy => eqxy | right neqxy => False_ind _ (neqxy u) end. Let nu_constant : forall (y:A) (u v:x = y), nu u = nu v. intros. unfold nu in |- *. case (eq_dec x y); intros. reflexivity. case n; trivial. Defined. Let nu_inv (y:A) (v:x = y) : x = y := comp (nu refl_equal) v. Remark pnu_left_inv : forall (y:A) (u:x = y), nu_inv (nu u) = u. Proof. intros. case u; unfold nu_inv in |- *. apply ptrans_sym_eq. Defined. Theorem peq_proofs_unicity : forall (y:A) (p1 p2:x = y), p1 = p2. Proof. intros. elim pnu_left_inv with (u := p1). elim pnu_left_inv with (u := p2). elim nu_constant with y p1 p2. reflexivity. Defined. Theorem pK_dec : forall P:x = x -> Prop, P refl_equal -> forall p:x = x, P p. Proof. intros. elim peq_proofs_unicity with x refl_equal p. trivial. Defined. Lemma peq_dec_refl : eq_dec x x = left _ eq_refl. Proof. case eq_dec. intros. f_equal. apply peq_proofs_unicity. intro. congruence. Defined. (* On [sigma] *) Let projs (P:A -> Type) (exP:sigma P) (def:P x) : P x := match exP with | sigmaI _ x' prf => match eq_dec x' x with | left eqprf => eq_rect x' P prf x eqprf | _ => def end end. Theorem inj_right_sigma : forall (P:A -> Type) (y y':P x), sigmaI P x y = sigmaI P x y' -> y = y'. Proof. intros. cut (projs (sigmaI P x y) y = projs (sigmaI P x y') y). unfold projs. case (eq_dec x x). intro e. elim e using pK_dec. trivial. intros. case n; trivial. case H0. reflexivity. Defined. Lemma inj_right_sigma_refl (P : A -> Type) (y : P x) : inj_right_sigma (y:=y) (y':=y) eq_refl = eq_refl. Proof. unfold inj_right_sigma. intros. unfold eq_rect. unfold projs. rewrite peq_dec_refl. unfold pK_dec. simpl. unfold peq_proofs_unicity. subst projs. simpl. unfold nu_inv, comp, nu. simpl. unfold eq_ind, nu_left_inv, ptrans_sym_eq, eq_rect, nu_constant. rewrite peq_dec_refl. reflexivity. Defined. End PEqdepDec. Arguments inj_right_sigma {A} {H} {x} P y y' e. #[export] Instance eq_eqdec {A} `{EqDec A} : forall x y : A, EqDec (x = y). Proof. intros. red. intros. exact (left (eq_proofs_unicity x0 y0)). Defined. #[export] Instance eqdec_uip {A} (E : EqDec A) : UIP A := fun x y e e' => eq_proofs_unicity e e'. #[export] Instance eq_uip {A} (E : UIP A) : forall x : A, UIP (x = x). Proof. intros y e e'. intros e'' ->. assert (Us := @UIP_shift A). compute in Us. forward Us. intros; apply E. intros. apply Us. Qed. Coq-Equations-1.3.1-8.20/theories/Prop/EqDecInstances.v000066400000000000000000000033671463127417400223750ustar00rootroot00000000000000Require Import Equations.Prop.Classes Equations.Prop.EqDec Equations.Prop.DepElim Equations.Prop.NoConfusion Equations.Prop.Tactics. (** Standard instances. *) Derive EqDec for unit bool nat. #[export] Polymorphic Instance prod_eqdec {A B} `(EqDec A) `(EqDec B) : EqDec (prod A B). Proof. eqdec_proof. Defined. #[export] Polymorphic Instance sum_eqdec {A B} `(EqDec A) `(EqDec B) : EqDec (A + B). Proof. eqdec_proof. Defined. #[export] Polymorphic Instance list_eqdec {A} `(EqDec A) : EqDec (list A). Proof. eqdec_proof. Defined. Local Set Equations With UIP. #[export] Polymorphic Instance sigma_uip {A B} `(UIP A) `(forall x, UIP (B x)) : UIP {x : A & B x}. Proof. red. intros [x p] [y q]. repeat (simplify * || intro). reflexivity. Defined. #[export] Polymorphic Instance sigma_eqdec {A B} `(EqDec A) `(forall x, EqDec (B x)) : EqDec {x : A & B x}. Proof. eqdec_proof. Defined. Polymorphic Definition eqdec_sig@{i} {A : Type@{i}} {B : A -> Type@{i}} `(EqDec A) `(forall a, EqDec (B a)) : EqDec (sigma B). Proof. intros. intros [x0 x1] [y0 y1]. case (eq_dec x0 y0). intros ->. case (eq_dec x1 y1). intros ->. left. reflexivity. intros. right. red. apply simplification_sigma2_uip@{i Set}. apply n. intros. right. red. apply simplification_sigma1@{i Set}. intros e _; revert e. apply n. Defined. #[export] Existing Instance eqdec_sig. Polymorphic Definition uip_sig@{i} {A : Type@{i}} {B : A -> Type@{i}} `(UIP A) `(forall a, UIP (B a)) : UIP (sigma@{i} B). Proof. intros. intros x y <-. destruct x. refine (eq_simplification_sigma1_dep_dep@{i Set} _ _ _ _ _). intros e'. destruct (uip eq_refl e'). simpl. intros e'. destruct (uip eq_refl e'). constructor. Defined. #[export] Existing Instance uip_sig. Coq-Equations-1.3.1-8.20/theories/Prop/Equations.v000066400000000000000000000022571463127417400215110ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (** The set of libraries required to run Equations with all features. *) Require Export Equations.Prop.Loader. Require Import Equations.Prop.Telescopes. #[export] Existing Instance wf_tele_measure. Require Import Program.Tactics. (* program_solve_wf launches auto on well-founded and propositional (i.e. in Prop) goals *) Global Obligation Tactic := simpl in *; program_simplify; Equations.CoreTactics.equations_simpl; try program_solve_wf. (** Tactic to solve well-founded proof obligations by default *) Ltac solve_rec := simpl in * ; cbv zeta ; intros ; try typeclasses eauto with subterm_relation simp rec_decision. Open Scope equations_scope.Coq-Equations-1.3.1-8.20/theories/Prop/FunctionalInduction.v000066400000000000000000000172371463127417400235240ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Require Import Equations.CoreTactics. Require Import Equations.Prop.Logic Equations.Prop.Classes Equations.Prop.EqDec Equations.Prop.DepElim. (** The tactic [funind c Hc] applies functional induction on the application [c] which must be of the form [f args] where [f] has a [FunctionalInduction] instance. [Hc] is the name given to the call, used to generate hypothesis names. *) Ltac funind c Hcall := match c with context C [ ?f ] => let x := constr:(fun_ind_prf (f:=f)) in (let prf := eval simpl in x in let p := context C [ prf ] in let prf := fresh in let call := fresh in assert(prf:=p) ; (* Abstract the call *) set(call:=c) in *; generalize (refl_equal : call = c); clearbody call ; intro Hcall ; (* Now do dependent elimination and simplifications *) dependent induction prf ; simplify_IH_hyps ; (* Use the simplifiers for the constant to get a nicer goal. *) try simpc f in * ; try on_last_hyp ltac:(fun id => simpc f in id ; noconf id)) || fail 1 "Internal error in funind" end || fail "Maybe you didn't declare the functional induction principle for" c. Ltac funind_call f H := on_call f ltac:(fun call => funind call H). Ltac make_refine n c := match constr:(n) with | 0 => uconstr:(c) | S ?n => make_refine n uconstr:(c _) end. Ltac constr_head c := let rec aux c := match c with | ?f _ => aux f | ?f => f end in aux c. Ltac with_last_secvar_aux tac := match goal with [ H : _ |- _ ] => is_secvar H; tac H end. Ltac with_last_secvar tac orelse := with_last_secvar_aux tac + (* No section variables *) orelse. Ltac get_elim c := match c with | context [?f] => constr:(fun_elim (f:=f)) end. Ltac clear_non_secvar := repeat match goal with | [ H : _ |- _ ] => tryif is_secvar H then fail else clear H end. Ltac remember_let H := lazymatch goal with | [ H := ?body : ?type |- _ ] => generalize (eq_refl : H = body) end. Ltac unfold_packcall packcall := lazymatch goal with |- ?x = ?y -> ?P => let y' := eval unfold packcall in y in change (x = y' -> P) end. Ltac make_packcall packcall c := match goal with | [ packcall : ?type |- _ ] => change (let _ := c in type) in (type of packcall) end. Ltac simplify_IHs_call := repeat match goal with | [ hyp : context [ block ] |- _ ] => cbn beta in hyp; eqns_specialize_eqs_block hyp 2; cbn beta iota delta[eq_rect_r eq_rect] zeta in hyp end. Ltac funelim_sig_tac c Heq simpl_IHs tac := let elimc := get_elim c in let packcall := fresh "packcall" in let packcall_fn := fresh "packcall_fn" in let elimfn := match elimc with fun_elim (f:=?f) => constr:(f) end in let elimn := match elimc with fun_elim (n:=?n) => constr:(n) end in block_goal; uncurry_call elimfn c packcall packcall_fn; remember_let packcall_fn; unfold_packcall packcall; (refine (eq_simplification_sigma1 _ _ _ _ _) || refine (eq_simplification_sigma1_nondep_dep _ _ _ _ _) || refine (eq_simplification_sigma1_dep _ _ _ _ _)); let H := fresh "eqargs" in let Heqfresh := fresh "__Heq__" in intros H Heqfresh; revert Heqfresh; block_goal; revert H; subst packcall_fn; clearbody packcall; make_packcall packcall elimfn; with_last_secvar ltac:(fun eos => move packcall before eos) ltac:(move packcall at top); revert_until packcall; block_goal; cbv zeta in packcall; revert packcall; curry; let elimt := make_refine elimn elimc in unshelve refine_ho elimt; intros; cbv beta; simplify_dep_elim; intros_until_block; simplify_dep_elim; cbn beta iota delta [transport eq_elim eq_elim_r eq_rect pack_sigma_eq pack_sigma_eq_nondep] in *; simpl_IHs; intros _ Heqfresh; unblock_goal; try (rewrite <- Heqfresh); try (rename Heqfresh into Heq || (let Heqf := fresh Heq in rename Heq into Heqf; rename Heqfresh into Heq)); tac c. Ltac funelim_constr_as c h simp_IHs := funelim_sig_tac c h simp_IHs ltac:(fun _ => idtac). Ltac get_first_elim c := match c with | ?x ?a ?b ?c ?d ?e ?f ?g ?h ?i ?j ?k ?l ?m => get_elim (x a b c d e f g h i j k l m) | ?x ?a ?b ?c ?d ?e ?f ?g ?h ?i ?j ?k ?l => get_elim (x a b c d e f g h i j k l) | ?x ?a ?b ?c ?d ?e ?f ?g ?h ?i ?j ?k => get_elim (x a b c d e f g h i j k) | ?x ?a ?b ?c ?d ?e ?f ?g ?h ?i ?j => get_elim (x a b c d e f g h i j) | ?x ?a ?b ?c ?d ?e ?f ?g ?h ?i => get_elim (x a b c d e f g h i) | ?x ?a ?b ?c ?d ?e ?f ?g ?h => get_elim (x a b c d e f g h) | ?x ?a ?b ?c ?d ?e ?f ?g => get_elim (x a b c d e f g) | ?x ?a ?b ?c ?d ?e ?f => get_elim (x a b c d e f) | ?x ?a ?b ?c ?d ?e => get_elim (x a b c d e) | ?x ?a ?b ?c ?d => get_elim (x a b c d) | ?x ?a ?b ?c => get_elim (x a b c) | ?x ?a ?b => get_elim (x a b) | ?x ?a => get_elim (x a) end. (** An alternative tactic that does not generalize over the arguments. BEWARE: It might render the goal unprovable. *) Ltac apply_funelim c := let elimc := get_first_elim c in let elimfn := match elimc with fun_elim (f:=?f) => constr:(f) end in let elimn := match elimc with fun_elim (n:=?n) => constr:(n) end in let elimt := make_refine elimn elimc in apply_args c elimt ltac:(fun elimc => unshelve refine_ho elimc; cbv beta). (** A special purpose database used to prove the elimination principle. *) Create HintDb funelim. (** Solve reflexivity goals. *) #[global] Hint Extern 0 (_ = _) => reflexivity : funelim. (** Specialize hypotheses begining with equalities. *) Ltac specialize_hyps := match goal with [ H : forall _ : ?x = ?x, _ |- _ ] => specialize (H (@eq_refl _ x)); unfold eq_rect_r, eq_rect in H ; simpl in H (* | [ H : forall _ : @Id _ ?x ?x, _ |- _ ] => *) (* specialize (H (@id_refl _ x)); unfold Id_rect_dep_r, Id_rect_r, Id_rect in H ; simpl in H *) end. #[export] Hint Extern 100 => specialize_hyps : funelim. (** Destruct conjunctions everywhere, starting with the hypotheses. This tactic allows to close functional induction proofs involving multiple nested and/or mutual recursive definitions. *) (** TODO: make it generic, won't work with another logic *) Lemma uncurry_conj (A B C : Prop) : (A /\ B -> C) -> (A -> B -> C). Proof. intros H a b. exact (H (conj a b)). Defined. Lemma uncurry_prod (A B C : Type) : (A * B -> C) -> (A -> B -> C). Proof. intros H a b. exact (H (pair a b)). Defined. Ltac specialize_mutual_nested := match goal with [ H : _ /\ _ |- _ ] => destruct H | [ |- _ /\ _ ] => split | [ H : _ * _ |- _ ] => destruct H | [ |- _ * _ ] => split end. #[global] Hint Extern 50 => specialize_mutual_nested : funelim. Ltac specialize_mutual := match goal with [ H : _ /\ _ |- _ ] => destruct H | [ H : _ * _ |- _ ] => destruct H (* Fragile, might render later goals unprovable *) | [ H : ?X -> _, H' : ?X |- _ ] => match X with | forall (_ : _), _ => specialize (H H') end | [ H : (?A /\ ?B) -> ?C |- _ ] => apply (uncurry_conj A B C) in H end. Ltac Equations.Init.specialize_mutfix ::= repeat specialize_mutual. (** Destruct existentials, including [existsT]'s. *) (* Hint Extern 101 => progress (destruct_exists; try (is_ground_goal; simplify_eqs)) : funelim. *) Coq-Equations-1.3.1-8.20/theories/Prop/Loader.v000066400000000000000000000022651463127417400207460ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (** The set of libraries required to run Equations with all features. *) Require Import Extraction. (** This exports tactics *) Declare ML Module "coq-equations.plugin". From Equations Require Export Init Signature. Require Import Equations.CoreTactics. Require Export Equations.Prop.SigmaNotations. Require Export Equations.Prop.Classes. Require Import Equations.Prop.DepElim Equations.Prop.Constants. Require Export Equations.Prop.EqDec. Require Export Equations.Prop.EqDecInstances. Require Export Equations.Prop.NoConfusion Equations.Prop.Subterm. Require Export Equations.Prop.Tactics. Require Export Equations.Prop.FunctionalInduction. (* funelim tactic *) Export EquationsNotations.Coq-Equations-1.3.1-8.20/theories/Prop/Logic.v000066400000000000000000000034001463127417400205650ustar00rootroot00000000000000From Equations Require Import Init. From Equations.Prop Require Export SigmaNotations. From Coq Require Import Extraction Relation_Definitions. (** The regular dependent eliminator of equality *) Scheme eq_elim := Induction for eq Sort Type. (** A symmetric variant taking a proof of [y = x] instead of [x = y]. (Used in functional elimination principles in case of dependent "with" nodes) *) Lemma eq_elim_r {A} (x : A) (P : forall a, a = x -> Type) (p : P x eq_refl) (y : A) (e : y = x) : P y e. Proof. destruct e. apply p. Defined. Extraction Inline eq_rect eq_rect_r eq_rec eq_ind eq_elim_r eq_elim. (** Transport is a rephrasing of the non-dependent elimination principle of equality. *) Definition transport {A : Type} (P : A -> Type) {x y : A} (e : x = y) : P x -> P y := fun x => match e with eq_refl => x end. (** [transport_r] is a symmetric variant. *) Definition transport_r {A : Type} (P : A -> Type) {x y : A} (e : y = x) : P x -> P y := transport P (eq_sym e). Extraction Inline transport transport_r. (** [inspect x] allows to pattern-match x while retaining a propositional equality with [x] *) Definition inspect {A : Type} (x : A) : { y : A | x = y } := exist _ x eq_refl. (** Extract sigma to a (non-dependent) pair in OCaml *) Extract Inductive sigma => "( * )" ["(,)"]. (** Notation for the single element of [x = x]. *) Arguments eq_refl {A} {x}. (** Depdent eliminators for proofs, not derived automatically by Coq. *) Lemma False_rect_dep (P : False -> Type) : forall e : False, P e. Proof. intros e. destruct e. Defined. Extraction Inline False_rect False_rect_dep. Lemma True_rect_dep (P : True -> Type) (m : P I) : forall e : True, P e. Proof. intros e. destruct e. exact m. Defined. Extraction Inline True_rect True_rect_dep. Coq-Equations-1.3.1-8.20/theories/Prop/NoConfusion.v000066400000000000000000000023051463127417400217730ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (** Instances of [NoConfusion] for the standard datatypes. To be used by [equations] when it needs applications of injectivity or discrimination on some equation. *) Require Import Coq.Program.Tactics Bvector List. From Equations Require Import Init Signature. Require Import Equations.CoreTactics. Require Import Equations.Prop.Classes Equations.Prop.EqDec Equations.Prop.Constants. Require Import Equations.Prop.DepElim Equations.Prop.Tactics. (** Simple of parameterized inductive types just need NoConfusion. *) Derive NoConfusion for unit bool nat option sum Datatypes.prod list sigT sig. (* FIXME should be done by the derive command *) Extraction Inline noConfusion NoConfusionPackage_nat. Coq-Equations-1.3.1-8.20/theories/Prop/NoConfusion_UIP.v000066400000000000000000000015071463127417400225130ustar00rootroot00000000000000From Equations Require Import Equations. Import Sigma_Notations. (** This noConfusion instance corresponds to the rule for simplifying injectivity with UIP. *) Set Equations With UIP. Section NoConfusionUIP. Context {A} (B : A -> Type) `(E : UIP A). Definition NoConfusion_UIP: forall x, B x -> B x -> Prop := fun x y z => (x, y) = (x, z). Definition UIP_NoConfusionPackage : forall x, NoConfusionPackage (B x). Proof. intros. unshelve refine ({| NoConfusion := NoConfusion_UIP x |}). intros a b. simplify ?. simpl. trivial. intros a b. simplify *. constructor. intros a b. simpl. unfold NoConfusion_UIP. simplify *. simpl. rewrite DepElim.simplification_K_uip_refl. reflexivity. intros. simpl. destruct e. simpl. now rewrite DepElim.simplification_K_uip_refl. Defined. End NoConfusionUIP. Coq-Equations-1.3.1-8.20/theories/Prop/NoCycle.v000066400000000000000000000052621463127417400210740ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Require Import Equations.Prop.Equations. Ltac find_noCycle_proof H := let rec aux t ty := match ty with | ?x <> ?x => apply (t eq_refl) | (?A * ?B)%type => aux (fst t) A || aux (snd t) B end in simpl in H; let t := type of H in aux H t. Inductive tree : Set := | leaf | node (l r : tree). Derive NoConfusion for tree. Derive DependentElimination for tree. Derive Below for tree. Local Definition nlt x y := Below_tree (fun y => x <> y) y. Local Definition nle (x y : tree) := ((x <> y) * nlt x y)%type. Notation "x ¬< y " := (nlt x y) (at level 80). Notation "x ¬≤ y " := (nle x y) (at level 80). Lemma noCycle_tree : forall x y : tree, x = y -> nlt x y. Proof with trivial. intros x y <-. induction x as [|l Hl r Hr]. * now simpl. * split. { change (nle (node l r) r). revert Hr. generalize r at 2 4 as r'. induction r'; intros Hr. split... intro H; noconf H. split. + intro Heq; noconf Heq. now apply (fst (fst Hr)). + firstorder. } { change (nle (node l r) l). revert Hl. generalize l at 2 4 as l'. induction l'; intros Hl'. split... - now intro H; noconf H. - split. + intro Heq; noconf Heq; firstorder. + firstorder. } Qed. Require Import CRelationClasses. #[export] Instance nlt_refl : Reflexive nlt. Proof. intros x. now apply noCycle_tree. Defined. (* Neither transivite nor symmetric *) Lemma noCycle_test l r : node l r <> r. Proof. intros H; pose proof (noCycle_tree r (node l r) (eq_sym H)). simpl in X. find_noCycle_proof X. Qed. Lemma noCycle_test2 l r : node (node l l) r <> r. Proof. intros H; pose proof (noCycle_tree _ _ (eq_sym H)). find_noCycle_proof X. Qed. Lemma noCycle_test3 l r k u : node (node k (node l l)) (node u r) <> r. Proof. intros H; pose proof (noCycle_tree _ _ (eq_sym H)). find_noCycle_proof X. Qed. Equations build_tree (l r : tree) (n : nat) : tree := build_tree l r 0 := node r r; build_tree l r (S n) := node l (build_tree l r n). Transparent build_tree. Lemma noCycle_bigtest l r : build_tree l r 10 <> r. Proof. intros H; pose proof (noCycle_tree _ _ (eq_sym H)). find_noCycle_proof X. Qed. Coq-Equations-1.3.1-8.20/theories/Prop/OpaqueEquations.v000066400000000000000000000015501463127417400226570ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (** This module sets the set constants of Equations to opaque mode so that computation is not possible inside Coq, the tactics need this to solve obligations. *) Require Import Equations.Prop.DepElim. Global Opaque simplification_sigma2_uip simplification_sigma2_dec_point simplification_K_uip simplify_ind_pack simplified_ind_pack. Coq-Equations-1.3.1-8.20/theories/Prop/SigmaNotations.v000066400000000000000000000011471463127417400224750ustar00rootroot00000000000000From Equations Require Import Init. Set Warnings "-notation-overridden". Module Sigma_Notations. Notation "'Σ' x .. y , P" := (sigma (fun x => .. (sigma (fun y => P)) ..)) (at level 200, x binder, y binder, right associativity, format "'[ ' '[ ' Σ x .. y ']' , '/' P ']'") : type_scope. Notation "( x , .. , y , z )" := (@sigmaI _ _ x .. (@sigmaI _ _ y z) ..) (right associativity, at level 0, format "( x , .. , y , z )") : equations_scope. Notation "x .1" := (pr1 x) : equations_scope. Notation "x .2" := (pr2 x) : equations_scope. End Sigma_Notations. Import Sigma_Notations.Coq-Equations-1.3.1-8.20/theories/Prop/Subterm.v000066400000000000000000000270341463127417400211620ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) From Coq Require Import Wf_nat Relations. From Coq Require Import Wellfounded Relation_Definitions. From Coq Require Import Relation_Operators Lexicographic_Product Wf_nat. From Coq Require Export Program.Wf FunctionalExtensionality. From Equations Require Import Init Signature. Require Import Equations.CoreTactics. Require Import Equations.Prop.Classes Equations.Prop.EqDec Equations.Prop.DepElim Equations.Prop.Constants. Generalizable Variables A R S B. Scheme Acc_dep := Induction for Acc Sort Prop. (** The fixpoint combinator associated to a well-founded relation, just reusing the [Wf.Fix] combinator. *) Definition FixWf `{WF:WellFounded A R} (P : A -> Type) (step : forall x : A, (forall y : A, R y x -> P y) -> P x) : forall x : A, P x := Fix wellfounded P step. Definition step_fn_ext {A} {R} (P : A -> Type) := fun step : forall x : A, (forall y : A, R y x -> P y) -> P x => forall x (f g : forall y (H : R y x), P y), (forall y H, f y H = g y H) -> step x f = step x g. Lemma FixWf_unfold `{WF : WellFounded A R} (P : A -> Type) (step : forall x : A, (forall y : A, R y x -> P y) -> P x) (step_ext : step_fn_ext P step) (x : A) : FixWf P step x = step x (fun y _ => FixWf P step y). Proof. intros. unfold FixWf. rewrite Init.Wf.Fix_eq. apply step_ext. intros. reflexivity. intros x' f g H. apply step_ext. apply H. Qed. Lemma FixWf_unfold_step : forall (A : Type) (R : relation A) (WF : WellFounded R) (P : A -> Type) (step : forall x : A, (forall y : A, R y x -> P y) -> P x) (x : A) (step_ext : step_fn_ext P step) (step' : forall y : A, R y x -> P y), step' = (fun (y : A) (_ : R y x) => FixWf P step y) -> FixWf P step x = step x step'. Proof. intros. rewrite FixWf_unfold, H. reflexivity. apply step_ext. Qed. Ltac unfold_FixWf := match goal with |- context [ @FixWf ?A ?R ?WF ?P ?f ?x ] => let step := fresh in set(step := fun y (_ : R y x) => @FixWf A R WF P f y) in *; unshelve erewrite (@FixWf_unfold_step A R WF P f x _ step); [red; intros; simp_sigmas; red_one_eq (* Extensionality proof *) |hidebody step; red_eq_lhs (* Unfold the functional *) |reflexivity] end. Ltac unfold_recursor := unfold_FixWf. Lemma Acc_pi (A : Type) (R : relation A) i (x y : Acc R i) : x = y. Proof. revert y. induction x using Acc_dep. intros. destruct y. f_equal. extensionality y. extensionality H'. apply H. Qed. Lemma FixWf_unfold_ext `{WF : WellFounded A R} (P : A -> Type) (step : forall x : A, (forall y : A, R y x -> P y) -> P x) (x : A) : FixWf P step x = step x (fun y _ => FixWf P step y). Proof. intros. unfold FixWf, Fix. destruct wellfounded. simpl. f_equal. extensionality y. extensionality h. f_equal. apply Acc_pi. Qed. #[global] Hint Rewrite @FixWf_unfold_ext : Recursors. Lemma FixWf_unfold_ext_step : forall (A : Type) (R : relation A) (WF : WellFounded R) (P : A -> Type) (step : forall x : A, (forall y : A, R y x -> P y) -> P x) (x : A) (step' : forall y : A, R y x -> P y), step' = (fun (y : A) (_ : R y x) => FixWf P step y) -> FixWf P step x = step x step'. Proof. intros. rewrite FixWf_unfold_ext, H. reflexivity. Qed. #[global] Hint Rewrite @FixWf_unfold_ext_step : Recursors. Ltac unfold_FixWf_ext := match goal with |- context [ @FixWf ?A ?R ?WF ?P ?f ?x ] => let step := fresh in set(step := fun y (_ : R y x) => @FixWf A R WF P f y) in *; rewrite (@FixWf_unfold_ext_step A R WF P f x step); [hidebody step; try red_eq_lhs (* Unfold the functional *) |reflexivity] end. Ltac unfold_recursor_ext := unfold_FixWf_ext. (** Inline so that we get back a term using general recursion. *) Extraction Inline FixWf Fix Fix_F. (** This hint database contains the constructors of every derived subterm relation. It is used to automatically find proofs that a term is a subterm of another. *) Create HintDb subterm_relation discriminated. Create HintDb rec_decision discriminated. (** This is used to simplify the proof-search for recursive call obligations. *) Ltac simpl_let := match goal with [ H : let _ := ?t in _ |- _ ] => match t with | fixproto => fail 1 | _ => cbv zeta in H end end. #[global] Hint Extern 40 => progress (cbv beta in * || simpl_let) : rec_decision. (* This expands lets in the context to simplify proof search for recursive call obligations, as [eauto] does not do matching up-to unfolding of let-bound variables. *) #[global] Hint Extern 10 => match goal with [ x := _ |- _ ] => lazymatch goal with |- context [ x ] => subst x end end : rec_decision. (** We can automatically use the well-foundedness of a relation to get the well-foundedness of its transitive closure. Note that this definition is transparent as well as [wf_clos_trans], to allow computations with functions defined by well-founded recursion. *) Lemma WellFounded_trans_clos `(WF : WellFounded A R) : WellFounded (clos_trans A R). Proof. apply wf_clos_trans. apply WF. Defined. #[global] Hint Extern 4 (WellFounded (clos_trans _ _)) => apply @WellFounded_trans_clos : typeclass_instances. Lemma wf_MR {A R} `(WellFounded A R) {B} (f : B -> A) : WellFounded (MR R f). Proof. red. apply measure_wf. apply H. Defined. (* Do not apply [wf_MR] agressively, as Coq's unification could "invent" an [f] otherwise to unify. *) #[global] Hint Extern 0 (WellFounded (MR _ _)) => apply @wf_MR : typeclass_instances. #[global] Hint Extern 0 (MR _ _ _ _) => red : rec_decision. #[export] Instance lt_wf : WellFounded lt := lt_wf. #[global] Hint Resolve PeanoNat.Nat.lt_succ_diag_r : rec_decision. (** We also add hints for transitive closure, not using [t_trans] but forcing to build the proof by successive applications of the inner relation. *) #[global] Hint Resolve t_step : subterm_relation. Lemma clos_trans_stepr A (R : relation A) (x y z : A) : R y z -> clos_trans A R x y -> clos_trans A R x z. Proof. intros Hyz Hxy. exact (t_trans _ _ x y z Hxy (t_step _ _ _ _ Hyz)). Defined. #[global] Hint Resolve clos_trans_stepr : subterm_relation. (** The default tactic to build proofs of well foundedness of subterm relations. *) Create HintDb solve_subterm discriminated. #[global] Hint Extern 4 (_ = _) => reflexivity : solve_subterm. #[global] Hint Extern 10 => eapply_hyp : solve_subterm. Ltac solve_subterm := intros; apply Transitive_Closure.wf_clos_trans; red; intros; simp_sigmas; on_last_hyp ltac:(fun H => depind H); constructor; intros; simp_sigmas; on_last_hyp ltac:(fun HR => depind HR); simplify_dep_elim; try typeclasses eauto with solve_subterm. (** A tactic to launch a well-founded recursion. *) Ltac rec_wf_fix recname kont := let hyps := fresh in intros hyps; intro; on_last_hyp ltac:(fun x => rename x into recname; unfold MR at 1 in recname) ; destruct_right_sigma hyps; try curry recname; simpl in recname; kont recname. (* Ltac rec_wf_fix x recname fixterm := *) (* apply fixterm ; clear_local ; *) (* intros until 1 ; simp_sigmas ; *) (* on_last_hyp ltac:(fun x => rename x into recname) ; *) (* simplify_dep_elim ; intros ; unblock_goal ; intros ; *) (* move recname at bottom ; try curry recname ; simpl in recname. *) (** The [do] tactic but using a Coq-side nat. *) Ltac do_nat n tac := match n with | 0 => idtac | S ?n' => tac ; do_nat n' tac end. (** Generalize an object [x], packing it in a sigma type if necessary. *) Ltac sigma_pack n t := let packhyps := fresh "hypspack" in let xpack := fresh "pack" in let eos' := fresh "eos" in match constr:(n) with | 0%nat => set (eos' := the_end_of_the_section); move eos' at top | _ => do_nat n ltac:(idtac; revert_last); set (eos' := the_end_of_the_section); do_nat n intro end; uncurry_hyps packhyps; (progress (set(xpack := t) in |- ; cbv beta iota zeta in xpack; revert xpack; pattern sigma packhyps; clearbody packhyps; revert packhyps; clear_nonsection; clear eos')). (** We specialize the tactic for [x] of type [A], first packing [x] with its indices into a sigma type and finding the declared relation on this type. *) Ltac rec_wf recname t kont := sigma_pack 0 t; match goal with [ |- forall (s : ?T) (s0 := @?b s), @?P s ] => let fn := constr:(fun s : T => b s) in let c := constr:(wellfounded (R:=MR _ fn)) in let wf := constr:(FixWf (WF:=c)) in intros s _; revert s; refine (wf P _); simpl ; rec_wf_fix recname kont end. Ltac rec_wf_eqns recname x := rec_wf recname x ltac:(fun rechyp => add_pattern (hide_pattern rechyp)). Ltac rec_wf_rel_aux recname n t rel kont := sigma_pack n t; match goal with [ |- forall (s : ?T) (s0 := @?b s), @?P s ] => let fn := constr:(fun s : T => b s) in let c := constr:(wellfounded (R:=MR rel fn)) in let wf := constr:(FixWf (WF:=c)) in intros s _; revert s; refine (wf P _); simpl ; rec_wf_fix recname kont end. Ltac rec_wf_rel recname x rel := rec_wf_rel_aux recname 0 x rel ltac:(fun rechyp => idtac). (** Define non-dependent lexicographic products *) #[global] Arguments lexprod [A] [B] _ _. Section Lexicographic_Product. Variable A : Type. Variable B : Type. Variable leA : A -> A -> Prop. Variable leB : B -> B -> Prop. Inductive lexprod : A * B -> A * B -> Prop := | left_lex : forall (x x':A) (y:B) (y':B), leA x x' -> lexprod (x, y) (x', y') | right_lex : forall (x:A) (y y':B), leB y y' -> lexprod (x, y) (x, y'). Lemma acc_A_B_lexprod : forall x:A, Acc leA x -> (well_founded leB) -> forall y:B, Acc leB y -> Acc lexprod (x, y). Proof. induction 1 as [x _ IHAcc]; intros H2 y. induction 1 as [x0 H IHAcc0]; intros. apply Acc_intro. destruct y as [x2 y1]; intro H6. simple inversion H6; intro. injection H1. injection H3. intros. subst. clear H1 H3. apply IHAcc; auto with sets. injection H1. intros; subst. injection H3. intros; subst. auto. Defined. Theorem wf_lexprod : well_founded leA -> well_founded leB -> well_founded lexprod. Proof. intros wfA wfB; unfold well_founded. destruct a. apply acc_A_B_lexprod; auto with sets; intros. Defined. End Lexicographic_Product. #[export] Instance wellfounded_lexprod A B R S `(wfR : WellFounded A R, wfS : WellFounded B S) : WellFounded (lexprod A B R S) := wf_lexprod A B R S wfR wfS. #[global] Hint Constructors lexprod : rec_decision. (* NoCycle from well-foundedness. *) Lemma well_founded_irreflexive {A} {R : relation A}{wfR : WellFounded R} : forall x y : A, R x y -> x = y -> False. Proof. intros x y Ryy ->. red in wfR. induction (wfR y) as [y accy IHy]. apply (IHy _ Ryy Ryy). Qed. Definition NoCycle_WellFounded {A} (R : relation A) (wfR : WellFounded R) : NoCyclePackage A := {| NoCycle := R; noCycle := well_founded_irreflexive |}. #[export] Existing Instance NoCycle_WellFounded. #[global] Hint Extern 30 (@NoCycle ?A (NoCycle_WellFounded ?R ?wfr) _ _) => hnf; typeclasses eauto with subterm_relation : typeclass_instances. Coq-Equations-1.3.1-8.20/theories/Prop/Tactics.v000066400000000000000000000130511463127417400211250ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Require Import Equations.CoreTactics Equations.Prop.Classes Equations.Prop.DepElim Equations.Prop.Subterm Equations.Prop.FunctionalInduction. Ltac Equations.Init.simpl_equations ::= Equations.Prop.DepElim.simpl_equations. Ltac Equations.Init.simplify_equalities ::= Equations.Prop.DepElim.simplify_dep_elim. Ltac Equations.Init.depelim H ::= Equations.Prop.DepElim.depelim H. Ltac Equations.Init.depind H ::= Equations.Prop.DepElim.depind H. Ltac Equations.Init.noconf H ::= Equations.Prop.DepElim.noconf H. Ltac Equations.Init.simp_IHs_tac ::= Equations.Prop.FunctionalInduction.simplify_IHs_call. Ltac Equations.Init.funelim_constr_as x H simp_IHs ::= Equations.Prop.FunctionalInduction.funelim_constr_as x H simp_IHs. Ltac Equations.Init.apply_funelim H ::= Equations.Prop.FunctionalInduction.apply_funelim H. (** Tactic to solve EqDec goals, destructing recursive calls for the recursive structure of the type and calling instances of eq_dec on other types. *) #[global] Hint Extern 2 (@EqDecPoint ?A ?x) => lazymatch goal with | [ H : forall y, { x = _ } + { _ <> _ } |- _ ] => exact H | [ H : forall y, dec_eq x y |- _ ] => exact H end : typeclass_instances. Ltac eqdec_one x y := let good := intros -> in let contrad := let Hn := fresh in intro Hn; right; red; simplify_dep_elim; apply Hn; reflexivity in try match goal with | [ H : forall z, dec_eq x z |- _ ] => case (H y); [good|contrad] | [ H : forall z, { x = z } + { _ } |- _ ] => case (H y); [good|contrad] | _ => tryif unify x y then idtac (* " finished " x y *) else (case (eq_dec_point (x:=x) y); [good|contrad]) end. Ltac eqdec_loop t u := match t with | context C [ ?t ?x ] => match u with | context C [ ?u ?y] => eqdec_loop t u; eqdec_one x y end | _ => eqdec_one t u end. Ltac eqdec_proof := try red; intros; match goal with |- dec_eq ?x ?y => revert y; induction x; intros until y; depelim y; match goal with |- dec_eq ?x ?y => eqdec_loop x y end | |- { ?x = ?y } + { _ } => revert y; induction x; intros until y; depelim y; match goal with |- { ?x = ?y } + { _ } => eqdec_loop x y end end; try solve[left; reflexivity | right; red; simplify_dep_elim]. Ltac Equations.Init.solve_eqdec ::= eqdec_proof. Ltac Equations.Init.solve_subterm ::= Equations.Prop.Subterm.solve_subterm. Ltac Equations.Init.unfold_recursor ::= Equations.Prop.Subterm.unfold_recursor. Ltac Equations.Init.unfold_recursor_ext ::= Equations.Prop.Subterm.unfold_recursor_ext. Ltac solve_noconf_prf := intros; on_last_hyp ltac:(fun id => destruct id) ; (* Subtitute a = b *) on_last_hyp ltac:(fun id => destruct_sigma id; destruct id) ; (* Destruct the inductive object a *) constructor. Ltac solve_noconf_inv_eq a b := destruct_sigma a; destruct_sigma b; destruct a ; depelim b; simpl in * |-; on_last_hyp ltac:(fun id => hnf in id; destruct_tele_eq id || destruct id); solve [constructor]. Ltac solve_noconf_inv := intros; match goal with |- ?R ?a ?b => destruct_sigma a; destruct_sigma b; destruct a ; depelim b; simpl in * |-; on_last_hyp ltac:(fun id => hnf in id; destruct_tele_eq id || destruct id); solve [constructor] | |- @eq _ (?f ?a ?b _) _ => solve_noconf_inv_eq a b end. Ltac solve_noconf_inv_equiv := intros; (* Subtitute a = b *) on_last_hyp ltac:(fun id => destruct id) ; (* Destruct the inductive object a *) on_last_hyp ltac:(fun id => destruct_sigma id; destruct id) ; simpl; constructor. Ltac solve_noconf := simpl; intros; match goal with [ H : @eq _ _ _ |- @eq _ _ _ ] => try solve_noconf_inv_equiv | [ H : @eq _ _ _ |- _ ] => try solve_noconf_prf | [ |- @eq _ _ _ ] => try solve_noconf_inv end. Ltac solve_noconf_hom_inv_eq a b := destruct_sigma a; destruct_sigma b; destruct a ; depelim b; simpl in * |-; on_last_hyp ltac:(fun id => hnf in id; destruct_tele_eq id || depelim id); solve [constructor || simpl_equations; constructor]. Ltac solve_noconf_hom_inv := intros; match goal with | |- @eq _ (?f ?a ?b _) _ => solve_noconf_hom_inv_eq a b | |- ?R ?a ?b => destruct_sigma a; destruct_sigma b; destruct a ; depelim b; simpl in * |-; on_last_hyp ltac:(fun id => hnf in id; destruct_tele_eq id || depelim id); solve [constructor || simpl_equations; constructor] end. Ltac solve_noconf_hom_inv_equiv := intros; (* Subtitute a = b *) on_last_hyp ltac:(fun id => destruct id) ; (* Destruct the inductive object a using dependent elimination to handle UIP cases. *) on_last_hyp ltac:(fun id => destruct_sigma id; depelim id) ; simpl; simpl_equations; constructor. Ltac solve_noconf_hom := simpl; intros; match goal with [ H : @eq _ _ _ |- @eq _ _ _ ] => try solve_noconf_hom_inv_equiv | [ H : @eq _ _ _ |- _ ] => try solve_noconf_prf | [ |- @eq _ _ _ ] => try solve_noconf_hom_inv end. Ltac Equations.Init.solve_noconf ::= solve_noconf. Ltac Equations.Init.solve_noconf_hom ::= solve_noconf_hom. Coq-Equations-1.3.1-8.20/theories/Prop/Telescopes.v000066400000000000000000000200331463127417400216370ustar00rootroot00000000000000Require Import Equations.Prop.Loader. From Coq Require Import FunctionalExtensionality. Require Import Equations.Prop.DepElim. Require Import Equations.Prop.Tactics. Require Import Equations.Prop.FunctionalInduction. (** Telescopes: allows treating variable arity fixpoints *) Set Universe Polymorphism. Import Sigma_Notations. Local Open Scope equations_scope. Set Equations Transparent. Cumulative Inductive tele@{i} : Type := | tip (A : Type@{i}) | ext (A : Type@{i}) (B : A -> tele) : tele. Register tele as equations.tele.type. Register tip as equations.tele.tip. Register ext as equations.tele.ext. Section TeleSigma. Universe i. Equations tele_sigma (t : tele@{i}) : Type@{i} := tele_sigma (tip A) := A ; tele_sigma (ext A B) := @sigma A (fun x => tele_sigma (B x)). Coercion tele_sigma : tele >-> Sortclass. Inductive tele_val : tele@{i} -> Type@{i+1} := | tip_val {A} (a : A) : tele_val (tip A) | ext_val {A B} (a : A) (b : tele_val (B a)) : tele_val (ext A B). Equations tele_pred : tele -> Type := | tip A := A -> Prop; | ext A B := forall x : A, tele_pred (B x). Equations tele_rel : tele -> tele -> Type := | tip A | tip B := A -> B -> Prop; | ext A B | ext A' B' := forall (x : A) (y : A'), tele_rel (B x) (B' y); | _ | _ := False. Equations tele_rel_app (T U : tele) (P : tele_rel T U) (x : tele_sigma T) (y : tele_sigma U) : Type := tele_rel_app (tip A) (tip A') P a a' := P a a'; tele_rel_app (ext A B) (ext A' B') P (a, b) (a', b') := tele_rel_app (B a) (B' a') (P a a') b b'. Universes j k. Equations tele_fn : tele@{i} -> Type@{j} -> Type@{k} := | tip A | concl := A -> concl; | ext A B | concl := forall x : A, tele_fn (B x) concl. Equations tele_MR (T : tele@{i}) (A : Type@{j}) (f : tele_fn T A) : T -> A := tele_MR (tip A) C f := f; tele_MR (ext A B) C f := fun x => tele_MR (B x.1) C (f x.1) x.2. Equations tele_measure (T : tele@{i}) (A : Type@{i}) (f : tele_fn T A) (R : A -> A -> Prop) : T -> T -> Prop := tele_measure T C f R := fun x y => R (tele_MR T C f x) (tele_MR T C f y). Equations tele_type : tele@{i} -> Type@{k} := | tip A := A -> Type@{j}; | ext A B := forall x : A, tele_type (B x). Equations tele_type_app (T : tele@{i}) (P : tele_type T) (x : tele_sigma T) : Type@{k} := tele_type_app (tip A) P a := P a; tele_type_app (ext A B) P (a, b) := tele_type_app (B a) (P a) b. Equations tele_forall (T : tele@{i}) (P : tele_type T) : Type@{k} := | tip A | P := forall x : A, P x; | ext A B | P := forall x : A, tele_forall (B x) (P x). Equations tele_forall_impl (T : tele@{i}) (P : tele_type T) (Q : tele_type T) : Type := | tip A | P | Q := forall x : A, P x -> Q x; | ext A B | P | Q := forall x : A, tele_forall_impl (B x) (P x) (Q x). Equations tele_forall_app (T : tele@{i}) (P : tele_type T) (f : tele_forall T P) (x : T) : tele_type_app T P x := tele_forall_app (tip A) P f x := f x; tele_forall_app (ext A B) P f x := tele_forall_app (B x.1) (P x.1) (f x.1) x.2. Equations tele_forall_type_app (T : tele@{i}) (P : tele_type T) (fn : forall t, tele_type_app T P t) : tele_forall T P := | (tip A) | P | fn := fn; | ext A B | P | fn := fun a : A => tele_forall_type_app (B a) (P a) (fun b => fn (a, b)). Lemma tele_forall_app_type (T : tele@{i}) (P : tele_type T) (f : forall t, tele_type_app T P t) : forall x, tele_forall_app T P (tele_forall_type_app T P f) x = f x. Proof. induction T; simpl. reflexivity. cbn. intros [a b]. simpl. rewrite H. reflexivity. Defined. Equations tele_forall_uncurry (T : tele@{i}) (P : T -> Type@{j}) : Type@{k} := | tip A | P := forall x : A, P x; | ext A B | P := forall x : A, tele_forall_uncurry (B x) (fun y : tele_sigma (B x) => P (x, y)). Equations tele_rel_pack (T U : tele) (x : tele_rel T U) : tele_sigma T -> tele_sigma U -> Prop by struct T := tele_rel_pack (tip A) (tip A') P := P; tele_rel_pack (ext A B) (ext A' B') P := fun x y => tele_rel_pack (B x.1) (B' y.1) (P _ _) x.2 y.2. Equations tele_pred_pack (T : tele) (P : tele_pred T) : tele_sigma T -> Prop := tele_pred_pack (tip A) P := P; tele_pred_pack (ext A B) P := fun x => tele_pred_pack (B x.1) (P x.1) x.2. Equations tele_type_unpack (T : tele) (P : tele_sigma T -> Type) : tele_type T := tele_type_unpack (tip A) P := P; tele_type_unpack (ext A B) P := fun x => tele_type_unpack (B x) (fun y => P (x, y)). Equations tele_pred_fn_pack (T U : tele) (P : tele_fn T (tele_pred U)) : tele_sigma T -> tele_sigma U -> Prop := tele_pred_fn_pack (tip A) U P := fun x => tele_pred_pack U (P x); tele_pred_fn_pack (ext A B) U P := fun x => tele_pred_fn_pack (B x.1) U (P x.1) x.2. Definition tele_rel_curried T := tele_fn T (tele_pred T). Equations tele_forall_pack (T : tele) (P : T -> Type) (f : tele_forall_uncurry T P) (t : T) : P t := | (tip A) | P | f | t := f t; | ext A B | P | f | (a, b) := tele_forall_pack (B a) (fun b => P (a, b)) (f a) b. Equations tele_forall_unpack (T : tele@{i}) (P : T -> Type@{j}) (f : forall (t : T), P t) : tele_forall_uncurry T P := | (tip A) | P | f := f; | ext A B | P | f := fun a : A => tele_forall_unpack (B a) (fun b => P (a, b)) (fun b => f (a, b)). Lemma tele_forall_pack_unpack (T : tele) (P : T -> Type) (f : forall t, P t) : forall x, tele_forall_pack T P (tele_forall_unpack T P f) x = f x. Proof. induction T; simpl. reflexivity. intros [a b]. simpl. rewrite H. reflexivity. Defined. End TeleSigma. Register tele_sigma as equations.tele.interp. Register tele_measure as equations.tele.measure. #[export] Instance wf_tele_measure@{i j k} {T : tele@{i}} (A : Type@{j}) (f : tele_fn@{i j k} T A) (R : A -> A -> Prop) : WellFounded R -> WellFounded (tele_measure@{i j k} T A f R). Proof. intros. apply Program.Wf.measure_wf. apply H. Defined. Section Fix. Universe i j k. Context {T : tele@{i}} (R : T -> T -> Prop). Context (wf : WellFounded R). Context (P : tele_type@{i j k} T). (* (forall x : A, (forall y : A, R y x -> P y) -> P x) -> forall x : A, P x *) Definition tele_fix_functional_type := tele_forall_uncurry T (fun x => ((tele_forall_uncurry T (fun y => R y x -> tele_type_app T P y))) -> tele_type_app T P x). Context (fn : tele_fix_functional_type). Lemma tele_fix : tele_forall T P. Proof. refine (tele_forall_type_app _ _ (@Subterm.FixWf (tele_sigma T) _ wf (tele_type_app T P) (fun x H => tele_forall_pack T _ fn x (tele_forall_unpack T _ H)))). Defined. End Fix. Register tele_fix as equations.tele.fix. Register tele_MR as equations.tele.MR. Register tele_fix_functional_type as equations.tele.fix_functional_type. Register tele_type_app as equations.tele.type_app. Register tele_forall_type_app as equations.tele.forall_type_app. Register tele_forall_uncurry as equations.tele.forall_uncurry. Register tele_forall as equations.tele.forall. Register tele_forall_pack as equations.tele.forall_pack. Register tele_forall_unpack as equations.tele.forall_unpack. Extraction Inline tele_forall_pack tele_forall_unpack tele_forall_type_app tele_fix. Lemma poly_f_equal@{i j} : forall (A : Type@{i}) (B : Type@{j}) (f : A -> B) (x y : A), x = y -> f x = f y. Proof. intros. destruct H. reflexivity. Defined. Section FixUnfold. Universes i j k. Context {T : tele@{i}} (x : T) (R : T -> T -> Prop). Context (wf : well_founded R). Context (P : tele_type@{i j k} T). (* (forall x : A, (forall y : A, R y x -> P y) -> P x) -> forall x : A, P x *) Context (fn : tele_fix_functional_type@{i j k} R P). Lemma tele_fix_unfold : tele_forall_app T P (tele_fix R wf P fn) x = tele_forall_pack T _ fn x (tele_forall_unpack T _ (fun y _ => tele_forall_app T P (tele_fix R wf P fn) y)). Proof. intros. unfold tele_fix, Subterm.FixWf, Fix. rewrite tele_forall_app_type@{i j k}. destruct (wellfounded x). simpl. apply poly_f_equal@{k k}. apply poly_f_equal@{k k}. extensionality y. extensionality h. rewrite tele_forall_app_type@{i j k}. apply poly_f_equal@{k k}. apply Subterm.Acc_pi. Defined. End FixUnfold. Register tele_fix_unfold as equations.tele.fix_unfold. Coq-Equations-1.3.1-8.20/theories/Prop/TransparentEquations.v000066400000000000000000000014401463127417400237240ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (** This module sets the set constants of Equations to transparent mode so that computation is possible inside Coq. *) Require Import Equations.Prop.DepElim. Global Transparent simplification_sigma2_uip simplification_K_uip simplify_ind_pack simplified_ind_pack. Coq-Equations-1.3.1-8.20/theories/Prop/dune000066400000000000000000000002461463127417400202240ustar00rootroot00000000000000(coq.theory ; This determines the -R flag (name Equations.Prop) (package coq-equations) (synopsis "Equations Plugin") (theories Equations) (modules :standard)) Coq-Equations-1.3.1-8.20/theories/Signature.v000066400000000000000000000033401463127417400205540ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2021 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (** Signatures for dependent types. A signature for [A] is a sigma-type in which any [A] can be packed. *) From Equations Require Import Init. Polymorphic Class Signature@{i} (fam : Type@{i}) (signature_index : Type@{i}) (signature : forall (_ : signature_index), Type@{i}) : Type@{i} := signature_pack : forall (_ : fam), sigma signature. #[global] Hint Mode Signature ! - - : typeclass_instances. Polymorphic Definition signature {fam index sig} `{S : @Signature fam index sig} := sig. Register Equations.Signature.Signature as equations.signature.class. Register Equations.Signature.signature as equations.signature.signature. Register Equations.Signature.signature_pack as equations.signature.pack. Extraction Inline signature signature_pack. (* For right associated sigmas *) Ltac destruct_right_sigma H := match type of H with | @sigma _ (fun x => _) => let H' := fresh H in destruct H as [? H']; destruct_right_sigma H' | @sigma _ _ => let H' := fresh H in destruct H as [? H']; destruct_right_sigma H' | _ => idtac end. Ltac destruct_one_sigma := match goal with | [ H : @sigma _ _ |- _ ] => destruct_right_sigma H end. Ltac simp_sigmas := repeat destruct_one_sigma. Coq-Equations-1.3.1-8.20/theories/Type/000077500000000000000000000000001463127417400173455ustar00rootroot00000000000000Coq-Equations-1.3.1-8.20/theories/Type/All.v000066400000000000000000000021041463127417400202410ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (** The set of libraries required to run Equations using an equality in Type with all features. *) Set Warnings "-notation-overridden". Require Export Equations.Type.Loader. Require Export Equations.Type.Telescopes. Require Export Equations.Type.WellFoundedInstances. Global Obligation Tactic := Equations.CoreTactics.equations_simpl. (** Tactic to solve well-founded proof obligations by default *) Ltac solve_rec := simpl in * ; cbv zeta ; intros ; try typeclasses eauto with subterm_relation simp rec_decision. Export EquationsNotations. Open Scope equations_scope.Coq-Equations-1.3.1-8.20/theories/Type/Classes.v000066400000000000000000000140301463127417400211270ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) From Coq Require Import Extraction CRelationClasses. Require Import Equations.Init Equations.CoreTactics. Set Warnings "-notation-overridden". Require Import Equations.Type.Logic Equations.Type.Relation Equations.Type.Relation_Properties Equations.Type.WellFounded. Set Universe Polymorphism. Import Id_Notations. (** A class for well foundedness proofs. Instances can be derived automatically using [Derive Subterm for ind]. *) Class WellFounded {A : Type} (R : relation A) := wellfounded : well_founded R. (** This class contains no-cyclicity proofs. They can be derived from well-foundedness proofs for example. *) (** The proofs of [NoCycle] can be arbitrarily large, it doesn't actually matter in the sense that they are used to prove absurdity. *) Cumulative Class NoCyclePackage@{i|} (A : Type@{i}) := { NoCycle : A -> A -> Type@{i}; noCycle : forall {a b}, NoCycle a b -> (a = b -> Empty) }. (** These lemmas explains how to apply it during simplification. *) (** We always generate a goal of the form [NoCycle x C[x]], using either the left or right versions of the following lemma. *) Lemma apply_noCycle_left@{i j|} {A : Type@{i}} {noconf : NoCyclePackage A} (p q : A) {B : p = q -> Type@{j}} : NoCycle@{i} p q -> (forall H : p = q, B H). Proof. intros noc eq. destruct (noCycle noc eq). Defined. Lemma apply_noCycle_right@{i j|} {A : Type@{i}} {noconf : NoCyclePackage A} (p q : A) {B : p = q -> Type@{j}} : NoCycle q p -> (forall H : p = q, B H). Proof. intros noc eq. destruct (noCycle noc (id_sym eq)). Defined. Extraction Inline apply_noCycle_left apply_noCycle_right. (** NoCycle can be decided using the well-founded subterm relation. *) Definition NoCycle_WellFounded {A} (R : relation A) (wfR : WellFounded R) : NoCyclePackage A := {| NoCycle := R; noCycle := WellFounded.well_founded_irreflexive (wfR:=wfR) |}. #[export] Existing Instance NoCycle_WellFounded. #[export] Hint Extern 30 (@NoCycle ?A (NoCycle_WellFounded ?R ?wfr) _ _) => hnf; typeclasses eauto with subterm_relation : typeclass_instances. (** The NoConfusionPackage class provides a method for solving injectivity and discrimination of constructors, represented by an equality on an inductive type [I]. The type of [noConfusion] should be of the form [ Π Δ, (x y : I Δ) (x = y) -> NoConfusion x y ], where [NoConfusion x y] for constructor-headed [x] and [y] will give equality of their arguments or the absurd proposition in case of conflict. This gives a general method for simplifying by discrimination or injectivity of constructors. Some actual instances are defined later in the file using the more primitive [discriminate] and [injection] tactics on which we can always fall back. *) Cumulative Class NoConfusionPackage@{i|} (A : Type@{i}) := { NoConfusion : A -> A -> Type@{i}; noConfusion : forall {a b}, NoConfusion a b -> Id a b; noConfusion_inv : forall {a b}, Id a b -> NoConfusion a b; noConfusion_sect : forall {a b} (e : NoConfusion a b), Id (noConfusion_inv (noConfusion e)) e; noConfusion_retr : forall {a b} (e : Id a b), Id (noConfusion (noConfusion_inv e)) e; }. (** This lemma explains how to apply it during simplification. *) Polymorphic Lemma apply_noConfusion@{i j|} {A : Type@{i}} {noconf : NoConfusionPackage A} (p q : A) {B : Id p q -> Type@{j}} : (forall e : NoConfusion p q, B (noConfusion e)) -> (forall e : Id p q, B e). Proof. intros. generalize (noConfusion_retr e). destruct e. intros eq. destruct eq. apply X. Defined. Extraction Inline apply_noConfusion. (** Classes for types with UIP or decidable equality. *) Class UIP@{i|} (A : Type@{i}) := uip : forall {x y : A} (e e' : x = y), e = e'. #[export] Instance UIP_hSet (A : Type) (H : HSet A) : UIP A := H. Definition dec_eq {A} (x y : A) : Type := (x = y) + (x <> y). Class EqDec@{i|} (A : Type@{i}) := eq_dec : forall x y : A, sum@{i} (x = y) (x = y -> Empty). Class EqDecPoint (A : Type) (x : A) := eq_dec_point : forall y : A, (x = y) + (x <> y). #[export] Instance EqDec_EqDecPoint A `(EqDec A) (x : A) : EqDecPoint A x := eq_dec x. (** For treating impossible cases. Equations corresponding to impossible calls form instances of [ImpossibleCall (f args)]. *) Class ImpossibleCall@{i} {A : Type@{i}} (a : A) : Type@{i} := is_impossible_call : False. (** We have a trivial elimination operator for impossible calls. *) Definition elim_impossible_call {A} (a : A) {imp : ImpossibleCall a} (P : A -> Type) : P a := match is_impossible_call with end. (** The tactic tries to find a call of [f] and eliminate it. *) Ltac impossible_call f := on_call f ltac:(fun t => apply (elim_impossible_call t)). (** The [FunctionalInduction f] typeclass is meant to register functional induction principles associated to a function [f]. Such principles are automatically generated for definitions made using [Equations]. *) Polymorphic Class FunctionalInduction {A : Type} (f : A) := { fun_ind_prf_ty : Type; fun_ind_prf : fun_ind_prf_ty }. Register FunctionalInduction as equations.funind.class. (** The [FunctionalElimination f] class declares elimination principles produced from the functional induction principle for [f] to be used directly to eliminate a call to [f]. This is the preferred method of proving results about a function. [n] is the number of binders for parameters, predicates and methods of the eliminator. *) Polymorphic Class FunctionalElimination {A : Type} (f : A) (fun_elim_ty : Type) (n : nat) := fun_elim : fun_elim_ty. Register FunctionalElimination as equations.funelim.class. Coq-Equations-1.3.1-8.20/theories/Type/Constants.v000066400000000000000000000066301463127417400215150ustar00rootroot00000000000000Set Warnings "-notation-overridden". From Equations Require Import Init. Require Import Equations.Type.Logic Equations.Type.DepElim Equations.Type.EqDec Equations.Type.Classes. From Coq Require Import CRelationClasses Relations. (** Naturals *) Register Init.Datatypes.O as equations.nat.zero. Register Init.Datatypes.S as equations.nat.succ. Register Init.Datatypes.nat as equations.nat.type. (* Sigma Types *) Register Equations.Init.sigma as equations.sigma.type. Register Equations.Init.sigmaI as equations.sigma.intro. Register Equations.Init.pr1 as equations.sigma.pr1. Register Equations.Init.pr2 as equations.sigma.pr2. (** Classes *) Register DepElim.DependentEliminationPackage as equations.depelim.class. Register Classes.ImpossibleCall as equations.impossiblecall.class. (** Logic parameterization *) Derive Signature for Id. Register Logic.Id as equations.equality.type. Register Logic.id_refl as equations.equality.refl. Register Logic.Id_case as equations.equality.case. Register Logic.Id_rect_r as equations.equality.elim. Register Classes.EqDec as equations.eqdec.class. Register Classes.dec_eq as equations.eqdec.dec_eq. Register Logic.Empty as equations.bottom.type. Register Logic.Empty_case as equations.bottom.case. Register Logic.Empty_rect as equations.bottom.elim. Register Coq.Init.Datatypes.unit as equations.top.type. Register Coq.Init.Datatypes.tt as equations.top.intro. Register Equations.Type.Logic.unit_rect as equations.top.elim. Register Logic.prod as equations.conj.type. Register Equations.Init.sigmaI as equations.conj.intro. Register Init.Datatypes.unit as equations.unit.type. Register Init.Datatypes.tt as equations.unit.intro. Register Logic.prod as equations.product.type. Register Equations.Init.sigmaI as equations.product.intro. (* FIXME not polymorphic *) Register Classes.WellFounded as equations.wellfounded.class. Register WellFounded.well_founded as equations.wellfounded.type. Register Relation.relation as equations.relation.type. Register Relation.trans_clos as equations.relation.transitive_closure. (* Dependent elimination constants *) Register DepElim.solution_left as equations.depelim.solution_left. Register DepElim.solution_left_dep as equations.depelim.solution_left_dep. Register DepElim.solution_right as equations.depelim.solution_right. Register DepElim.solution_right_dep as equations.depelim.solution_right_dep. Register Classes.NoConfusionPackage as equations.noconfusion.class. Register Classes.apply_noConfusion as equations.depelim.apply_noConfusion. Register Classes.NoCyclePackage as equations.nocycle.class. Register Classes.apply_noCycle_left as equations.depelim.apply_noCycle_left. Register Classes.apply_noCycle_right as equations.depelim.apply_noCycle_right. Register DepElim.simplification_sigma1 as equations.depelim.simpl_sigma. Register DepElim.simplification_sigma1_dep as equations.depelim.simpl_sigma_dep. Register DepElim.simplification_sigma1_nondep_dep as equations.depelim.simpl_sigma_nondep_dep. Register DepElim.simplification_sigma1_dep_dep as equations.depelim.simpl_sigma_dep_dep. Register DepElim.simplify_ind_pack as equations.depelim.simplify_ind_pack. Register DepElim.simplify_ind_pack_inv as equations.depelim.simplify_ind_pack_inv. Register DepElim.opaque_ind_pack_inv as equations.depelim.opaque_ind_pack_eq_inv. Register DepElim.pack_sigma as equations.depelim.pack_sigma_eq. Register DepElim.simplification_K_uip as equations.depelim.simpl_K. Coq-Equations-1.3.1-8.20/theories/Type/DepElim.v000066400000000000000000000700201463127417400210520ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (** Tactics related to (dependent) equality and proof irrelevance. *) Set Warnings "-notation-overridden". Require Import Coq.Program.Tactics. Require Export Equations.Init. Require Import Equations.Signature Equations.CoreTactics. Require Import Equations.Type.Logic. Require Import Equations.Type.Classes. Require Import Equations.Type.EqDec. Set Universe Polymorphism. Import Sigma_Notations. Import Id_Notations. Local Open Scope equations_scope. Local Set Keyed Unification. (** Support for the [Equations] command. These tactics implement the necessary machinery to solve goals produced by the [Equations] command relative to dependent pattern-matching. It is inspired from the "Eliminating Dependent Pattern-Matching" paper by Goguen, McBride and McKinna. *) (** The [DependentEliminationPackage] provides the default dependent elimination principle to be used by the [equations] resolver. It is especially useful to register the dependent elimination principles for things in [Prop] which are not automatically generated, but it can be used for modified eliminators too. *) Polymorphic Class DependentEliminationPackage (A : Type) := { elim_type : Type ; elim : elim_type }. (** A higher-order tactic to apply a registered eliminator. *) Ltac elim_tac tac p := let ty := type of p in let eliminator := eval simpl in (elim (A:=ty)) in tac p eliminator. (** Specialization to do case analysis or induction. Note: the [equations] tactic tries [case] before [elim_case]: there is no need to register generated induction principles. *) Ltac elim_case p := elim_tac ltac:(fun p el => destruct p using el) p. Ltac elim_ind p := elim_tac ltac:(fun p el => induction p using el) p. (** Lemmas used by the simplifier, mainly rephrasings of [eq_rect], [eq_ind]. *) Lemma solution_left@{i j|} : forall {A : Type@{i}} {B : A -> Type@{j}} (t : A), B t -> (forall x, Id x t -> B x). Proof. intros A B t H x eq. destruct eq. apply H. Defined. Notation " e # t " := (Id_case _ _ t _ e) (right associativity, at level 65) : equations_scope. Notation " e # [ P ] t " := (Id_rect_r _ P t _ e) (right associativity, at level 65) : equations_scope. Lemma Id_sym_invol {A} (x y : A) (e : x = y) : id_sym (id_sym e) = e. Proof. destruct e. reflexivity. Defined. Lemma Id_symmetry_dep {A} {t : A} {B : forall (x : A), x = t -> Type} : (forall (x : A) (eq : t = x), B x (id_sym eq)) -> (forall (x : A) (eq : x = t), B x eq). Proof. intros. rewrite <- Id_sym_invol. generalize (id_sym eq). apply X. Defined. (* Carefully crafted to avoid introducing commutative cuts. *) Lemma solution_left_dep@{i j|} : forall {A : Type@{i}} (t : A) {B : forall (x : A), (x = t -> Type@{j})}, B t 1 -> (forall x (Heq : x = t), B x Heq). Proof. intros A t B H x eq. apply Id_symmetry_dep. clear eq. intros. destruct eq. exact H. Defined. Lemma solution_right@{i j|} : forall {A : Type@{i}} {B : A -> Type@{j}} (t : A), B t -> (forall x, t = x -> B x). Proof. intros A B t H x eq. destruct eq. apply H. Defined. Lemma solution_right_dep@{i j|} : forall {A : Type@{i}} (t : A) {B : forall (x : A), (t = x -> Type@{j})}, B t 1 -> (forall x (Heq : t = x), B x Heq). Proof. intros A t B H x eq. destruct eq. apply H. Defined. Lemma solution_left_let@{i j|} : forall {A : Type@{i}} {B : A -> Type@{j}} (b : A) (t : A), (b = t -> B t) -> (let x := b in x = t -> B x). Proof. intros A B b t H x eq. subst x. destruct eq. apply H. reflexivity. Defined. Lemma solution_right_let@{i j|} : forall {A : Type@{i}} {B : A -> Type@{j}} (b t : A), (t = b -> B t) -> (let x := b in t = x -> B x). Proof. intros A B b t H x eq. subst x. destruct eq. apply H. reflexivity. Defined. Lemma deletion@{i j|} : forall {A : Type@{i}} {B : Type@{j}} (t : A), B -> (t = t -> B). Proof. intros; assumption. Defined. Lemma simplification_sigma1@{i j|} {A : Type@{i}} {P : Type@{i}} {B : Type@{j}} (p q : A) (x : P) (y : P) : (p = q -> x = y -> B) -> ((p, x) = (q, y) -> B). Proof. intros * prf eq; revert prf. change p with (pr1 (p, x)). change q with (pr1 (q, y)). change x with (pr2 (p, x)) at 2. change y with (pr2 (q, y)) at 2. destruct eq. intros eq. exact (eq 1 1). Defined. Lemma simplification_sigma1_dep@{i j|} {A : Type@{i}} {P : A -> Type@{i}} {B : Type@{j}} (p q : A) (x : P p) (y : P q) : (forall e : Id@{i} p q, Id (@Id_rew@{i i} A p P x q e) y -> B) -> (Id ((p, x)) ((q, y)) -> B). Proof. intros. revert X. change p with (pr1 (p, x)). change q with (pr1 (q, y)). change x with (pr2 (p, x)) at 3. change y with (pr2 (q, y)) at 4. destruct X0. intros X. eapply (X id_refl). apply id_refl. Defined. Definition pack_sigma_nondep@{i} {A : Type@{i}} {P : Type@{i}} {p q : A} {x : P} {y : P} (e' : Id p q) (e : Id x y) : Id (p, x) (q, y). Proof. destruct e'. simpl in e. destruct e. apply id_refl. Defined. Lemma simplification_sigma1_nondep_dep@{i j|} {A : Type@{i}} {P : Type@{i}} (p q : A) (x : P) (y : P) {B : Id (p, x) (q, y) -> Type@{j}} : (forall e' : Id p q, forall e : Id x y, B (pack_sigma_nondep e' e)) -> (forall e : Id (sigmaI (fun _ => P) p x) (sigmaI (fun _ => P) q y), B e). Proof. intros. revert X. change p with (pr1 (p, x)). change q with (pr1 (q, y)). change x with (pr2 (p, x)) at 2 4. change y with (pr2 (q, y)) at 2 4. destruct e. intros X. simpl in *. apply (X id_refl id_refl). Defined. Definition pack_sigma@{i} {A : Type@{i}} {P : A -> Type@{i}} {p q : A} {x : P p} {y : P q} (e' : Id p q) (e : Id (@Id_rew A p P x q e') y) : Id (p, x) (q, y). Proof. destruct e'. simpl in e. destruct e. apply id_refl. Defined. Lemma simplification_sigma1_dep_dep@{i j|} {A : Type@{i}} {P : A -> Type@{i}} (p q : A) (x : P p) (y : P q) {B : Id (p, x) (q, y) -> Type@{j}} : (forall e' : Id p q, forall e : Id (@Id_rew A p P x q e') y, B (pack_sigma e' e)) -> (forall e : Id ((p, x)) ((q, y)), B e). Proof. intros. revert X. change p with (pr1 (p, x)). change q with (pr1 (q, y)). change x with (pr2 (p, x)) at 3 5. change y with (pr2 (q, y)) at 4 6. destruct e. intros X. simpl in *. apply (X id_refl id_refl). Defined. (* Lemma simplification_sigma1'@{i j} {A : Type@{i}} {P : A -> Type@{i}} {B : Type@{j}} (p q : A) (x : P p) (y : P q) : *) (* (forall e : Id p q, Id (Id_rew A p P x q e) y -> B) -> *) (* (Id ((p, x)) ((q, y)) -> B). *) (* Proof. *) (* intros. revert X. *) (* change p with (pr1 (p, x)). *) (* change q with (pr1 (q, y)). *) (* change x with (pr2 (p, x)) at 3. *) (* change y with (pr2 (q, y)) at 4. *) (* destruct X0. *) (* intros X. eapply (X id_refl). apply id_refl. *) (* Defined. *) Lemma pr2_inv_uip@{i|} {A : Type@{i}} {P : A -> Type@{i}} {x : A} {y y' : P x} : y = y' -> sigmaI@{i} P x y = sigmaI@{i} P x y'. Proof. exact (solution_right (B:=fun y' => (x, y) = (x, y')) y 1 y'). Defined. Lemma pr2_uip@{i|} {A : Type@{i}} {E : UIP A} {P : A -> Type@{i}} {x : A} {y y' : P x} : sigmaI@{i} P x y = sigmaI@{i} P x y' -> y = y'. Proof. refine (simplification_sigma1_dep_dep@{i i} _ _ _ _ _). intros e'. destruct (uip 1 e'). intros e ; exact e. Defined. Lemma pr2_uip_refl@{i|} {A : Type@{i}} {E : UIP A} (P : A -> Type@{i}) (x : A) (y : P x) : pr2_uip@{i} (@id_refl _ (x, y)) = 1. Proof. unfold pr2_uip, simplification_sigma1_dep_dep. now rewrite uip_refl_refl. Defined. (** If we have decidable equality on [A] we use this version which is axiom-free! *) Lemma simplification_sigma2_uip@{i j |} {A : Type@{i}} {uip : UIP A} {P : A -> Type@{i}} {B : Type@{j}} (p : A) (x y : P p) : (x = y -> B) -> ((p , x) = (p, y) -> B). Proof. intros t e. apply t. exact (pr2_uip@{i} e). Defined. Lemma simplification_sigma2_uip_refl@{i j|} : forall {A : Type@{i}} {uip:UIP A} {P : A -> Type@{i}} {B : Type@{j}} (p : A) (x : P p) (G : x = x -> B), @simplification_sigma2_uip A uip P B p x x G 1 = G 1. Proof. intros. unfold simplification_sigma2_uip. now rewrite pr2_uip_refl. Defined. Arguments simplification_sigma2_uip : simpl never. Lemma simplification_K_uip@{i j|} {A : Type@{i}} `{UIP A} (x : A) {B : x = x -> Type@{j}} : B 1 -> (forall p : x = x, B p). Proof. apply UIP_K. Defined. Arguments simplification_K_uip : simpl never. Lemma simplification_K_uip_refl@{i j|} : forall {A : Type@{i}} `{UIP A} (x : A) {B : x = x -> Type@{j}} (p : B 1), simplification_K_uip x p 1 = p. Proof. intros. unfold simplification_K_uip, UIP_K. now rewrite uip_refl_refl. Defined. Definition ind_pack@{i|} {A : Type@{i}} {B : A -> Type@{i}} {x : A} {p q : B x} (e : p = q) : @Id (sigma (fun x => B x)) (x, p) (x, q) := (pr2_inv_uip e). Definition ind_pack_inv_equiv@{i|} {A : Type@{i}} {uip : UIP A} {B : A -> Type@{i}} {x : A} (p q : B x) (e : p = q) : pr2_uip (pr2_inv_uip e) = e. Proof. destruct e. apply pr2_uip_refl. Defined. Definition opaque_ind_pack_inv@{i j|} {A : Type@{i}} {uip : UIP A} {B : A -> Type@{i}} {x : A} {p q : B x} (G : p = q -> Type@{j}) (e : (x, p) = (x, q)) := G (pr2_uip@{i} e). Arguments opaque_ind_pack_inv : simpl never. Arguments pr2_uip : simpl never. Arguments pr2_inv_uip : simpl never. Lemma simplify_ind_pack@{i j|} {A : Type@{i}} {uip : UIP A} (B : A -> Type@{i}) (x : A) (p q : B x) (G : p = q -> Type@{j}) : (forall e : (x, p) = (x, q), opaque_ind_pack_inv G e) -> (forall e : p = q, G e). Proof. intros H. intros e. specialize (H (ind_pack e)). unfold opaque_ind_pack_inv in H. rewrite ind_pack_inv_equiv in H. apply H. Defined. Arguments simplify_ind_pack : simpl never. Lemma simplify_ind_pack_inv@{i j|} {A : Type@{i}} {uip : UIP A} (B : A -> Type@{i}) (x : A) (p : B x) (G : p = p -> Type@{j}) : G 1 -> opaque_ind_pack_inv G 1. Proof. intros H. unfold opaque_ind_pack_inv. destruct (pr2_uip_refl B x p). exact H. Defined. Arguments simplify_ind_pack_inv : simpl never. Definition simplified_ind_pack@{i j|} {A : Type@{i}} {uip : UIP A} (B : A -> Type@{i}) (x : A) (p : B x) (G : p = p -> Type@{j}) (t : opaque_ind_pack_inv G 1) := Id_rew _ G t _ (@pr2_uip_refl A uip B x p). Arguments simplified_ind_pack : simpl never. Lemma simplify_ind_pack_refl@{i j|} {A : Type@{i}} {uip : UIP A} (B : A -> Type@{i}) (x : A) (p : B x) (G : p = p -> Type@{j}) (t : forall (e : (x, p) = (x, p)), opaque_ind_pack_inv G e) : simplify_ind_pack B x p p G t 1 = simplified_ind_pack B x p G (t 1). Proof. reflexivity. Qed. Lemma simplify_ind_pack_elim@{i j|} {A : Type@{i}} {uip : UIP A} (B : A -> Type@{i}) (x : A) (p : B x) (G : p = p -> Type@{j}) (t : G 1) : simplified_ind_pack B x p G (simplify_ind_pack_inv B x p G t) = t. Proof. unfold simplified_ind_pack, simplify_ind_pack_inv. now destruct (pr2_uip_refl B x p). Qed. (** All the simplification rules involving UIP are treated as opaque when proving lemmas about definitions. To actually compute with these inside Coq, one has to make them transparent again. *) Global Opaque simplification_sigma2_uip (* simplification_sigma2_dec_point *) simplification_K_uip simplify_ind_pack simplified_ind_pack. Global Opaque opaque_ind_pack_inv. Ltac rewrite_sigma2_refl_noK := match goal with | |- context [@inj_right_sigma ?A ?H ?x ?P ?y ?y' _] => rewrite (@inj_right_sigma_refl A H x P y) | |- context [@simplification_sigma2_uip ?A ?H ?P ?B ?p ?x ?y ?X 1] => rewrite (@simplification_sigma2_uip_refl A H P B p x X); simpl (* | |- context [@simplification_sigma2_dec_point ?A ?p ?H ?P ?B ?x ?y ?X 1] => *) (* rewrite (@simplification_sigma2_dec_point_refl A p H P B x X); simpl *) | |- context [@simplification_K_uip ?A ?dec ?x ?B ?p 1] => rewrite (@simplification_K_uip_refl A dec x B p); simpl eq_rect | |- context [@simplify_ind_pack ?A ?uip ?B ?x ?p _ ?G _ 1] => rewrite (@simplify_ind_pack_refl A uip B x p G _) | |- context [@simplified_ind_pack ?A ?uip ?B ?x ?p ?G (simplify_ind_pack_inv _ _ _ _ ?t)] => rewrite (@simplify_ind_pack_elim A uip B x p G t) end. Ltac rewrite_sigma2_refl := rewrite_sigma2_refl_noK. (** This hint database and the following tactic can be used with [autounfold] to unfold everything to [eq_rect]s. *) #[global] Hint Unfold solution_left solution_right Id_sym_invol Id_symmetry_dep solution_left_dep solution_right_dep deletion simplification_sigma1 simplification_sigma1_dep apply_noConfusion Id_rect_r Id_rec Id_ind Id_rew Id_rew_r Id_case : equations. (** Makes these definitions disappear at extraction time *) Extraction Inline solution_right_dep solution_right solution_left solution_left_dep. Extraction Inline Id_sym_invol Id_symmetry_dep. Extraction Inline solution_right_let solution_left_let deletion. Extraction Inline simplification_sigma1 simplification_sigma2_uip. Extraction Inline simplification_K_uip. Extraction Inline simplification_sigma1 simplification_sigma1_dep. Extraction Inline simplification_sigma1_nondep_dep simplification_sigma1_dep_dep. (** Simply unfold as much as possible. *) Ltac unfold_equations := repeat progress autounfold with equations. Ltac unfold_equations_in H := repeat progress autounfold with equations in H. Ltac rewrite_refl_id := repeat (progress (autorewrite with refl_id) || (try rewrite_sigma2_refl)). Ltac simplify_equations_in e := repeat progress (autounfold with equations in e ; simpl in e). (** Using these we can make a simplifier that will perform the unification steps needed to put the goal in normalised form (provided there are only constructor forms). Compare with the lemma 16 of the paper. We don't have a [noCycle] procedure yet. *) Ltac block_equality id := match type of id with | @eq ?A ?t ?u => change (let _ := block in (@eq A t u)) in id | _ => idtac end. Ltac revert_blocking_until id := Tactics.on_last_hyp ltac:(fun id' => match id' with | id => idtac | _ => block_equality id' ; revert id' ; revert_blocking_until id end). Ltac not_var x := try (is_var x; fail 1). (** These two tactics are dangerous as they can try to reduce terms to head-normal-form and take ages to fail. *) Ltac try_discriminate := discriminate. Ltac try_injection H := injection H. Ltac simplify_one_dep_elim := match goal with | [ |- context [Id_rect _ _ _ _ _ 1]] => simpl Id_rect | [ |- context [Id_rew _ _ _ _ 1]] => simpl Id_rew | [ |- context [Id_rew_r _ _ _ _ 1]] => simpl Id_rew_r | [ |- context [@Id_rect_r _ _ _ _ _ id_refl]] => simpl Id_rect_r | [ |- context [noConfusion_inv _]] => simpl noConfusion_inv | [ |- @opaque_ind_pack_inv ?A ?uip ?B ?x ?p _ ?G 1] => apply (@simplify_ind_pack_inv A uip B x p G) | [ |- let _ := block in _ ] => fail 1 | [ |- _ ] => (simplify * || simplify ?); cbv beta | [ |- _ -> ?B ] => let ty := type of B in (* Works only with non-dependent products *) intro || (let H := fresh in intro H) | [ |- forall x, _ ] => let H := fresh x in intro H | [ |- _ ] => intro end. (** Repeat until no progress is possible. By construction, it should leave the goal with no remaining equalities generated by the [generalize_eqs] tactic. *) Ltac simplify_dep_elim := repeat simplify_one_dep_elim. (** Apply [noConfusion] on a given hypothsis. *) Ltac noconf H := block_goal; revert_until H; block_goal; on_last_hyp ltac:(fun H' => revert H'); simplify_dep_elim; intros_until_block; intros_until_block. (** Reverse and simplify. *) Ltac simpdep := reverse; simplify_dep_elim. (** Decompose existential packages. *) Ltac decompose_exists id id' := hnf in id ; match type of id with | @sigma _ _ => let xn := fresh id "'" in destruct id as [xn id]; decompose_exists xn id; cbv beta delta [ pr1 pr2 ] iota in id, id'; decompose_exists id id' | _ => cbv beta delta [ pr1 pr2 ] iota in id, id' end. (** Dependent generalization using existentials only. *) Ltac generalize_sig_gen id cont := let id' := fresh id in get_signature_pack id id'; hnf in (value of id'); hnf in (type of id'); lazymatch goal with | id' := ?v |- context[ id ] => generalize (@id_refl _ id' : v = id') ; clearbody id'; simpl in id'; cont id id' id v | id' := ?v |- _ => let id'1 := fresh id' in let id'2 := fresh id' in set (id'2 := pr2 id'); set (id'1 := pr1 id') in id'2; hnf in (value of id'1), (value of id'2); try red in (type of id'2); match goal with [ id'1 := ?t |- _ ] => generalize (@id_refl _ id'1 : t = id'1); clearbody id'2 id'1; clear id' id; try unfold signature in id'2; hnf in id'2; simpl in id'2; rename id'2 into id; cont id id id'1 t end end. Ltac generalize_sig id cont := generalize_sig_gen id ltac:(fun id id' id'1 t => (* Fails if id = id' *) try rename id into id', id' into id; cont id'1 id). Ltac generalize_sig_vars id cont := generalize_sig_gen id ltac:(fun id id' id'1 t => move_after_deps id' t; revert_until id'; rename id' into id; cont id'1 id). Ltac Id_generalize_sig_gen id cont := let id' := fresh id in get_signature_pack id id'; hnf in (value of id'); hnf in (type of id'); lazymatch goal with | id' := ?v |- context[ id ] => generalize (@id_refl _ id' : Id id' id') ; unfold id' at 1; clearbody id'; simpl in id'; cont id id' id' v | id' := ?v |- _ => let id'1 := fresh id' in let id'2 := fresh id' in set (id'2 := pr2 id'); set (id'1 := pr1 id') in id'2; hnf in (value of id'1), (value of id'2); match goal with | [ id'1 := ?t |- _ ] => generalize (@id_refl _ id'1 : Id t id'1); clearbody id'2 id'1; clear id' id; compute in id'2; rename id'2 into id; cont id id id'1 v end end. Ltac Id_generalize_sig id cont := Id_generalize_sig_gen id ltac:(fun id id' id'1 t => (* Fails if id = id' *) try rename id into id', id' into id; cont id'1 id). Ltac Id_generalize_sig_vars id cont := Id_generalize_sig_gen id ltac:(fun id id' id'1 t => move_after_deps id' t; revert_until id'; rename id' into id; cont id'1 id). Ltac generalize_sig_dest id := generalize_sig id ltac:(fun id id' => decompose_exists id id'). Ltac generalize_sig_vars_dest id := generalize_sig_vars id ltac:(fun id id' => decompose_exists id id'). Ltac generalize_eqs_sig id := (needs_generalization id ; generalize_sig_dest id) || idtac. Ltac generalize_eqs_vars_sig id := (needs_generalization id ; generalize_sig_vars_dest id) || idtac. (** The default implementation of generalization using sigma types. *) Ltac generalize_by_eqs id := generalize_eqs_sig id. Ltac generalize_by_eqs_vars id := generalize_eqs_vars_sig id. (** Do dependent elimination of the last hypothesis, but not simplifying yet (used internally). *) Ltac destruct_last := on_last_hyp ltac:(fun id => simpl in id ; generalize_by_eqs id ; destruct id). (** The rest is support tactics for the [Equations] command. *) Definition hide_pattern {A : Type} (t : A) := t. Definition add_pattern {B} (A : Type) (b : B) := A. (** To solve a goal by inversion on a particular target. *) Ltac do_empty id := exfalso ; simpl in id ; solve [ generalize_by_eqs id ; destruct id ; simplify_dep_elim | apply id ; eauto with simp ]. (** If defining recursive functions, the prototypes come first. *) Ltac introduce p := first [ match p with _ => (* Already there, generalize dependent hyps *) generalize dependent p ; intros p end | intros until p | intros until 1 | intros ]. Ltac do_case p := introduce p ; (elim_case p || destruct p || (case p ; clear p)). Ltac do_ind p := introduce p ; (elim_ind p || induction p). (** The following tactics allow to do induction on an already instantiated inductive predicate by first generalizing it and adding the proper equalities to the context, in a maner similar to the BasicElim tactic of "Elimination with a motive" by Conor McBride. *) (** The [do_depelim] higher-order tactic takes an elimination tactic as argument and an hypothesis and starts a dependent elimination using this tactic. *) Ltac is_introduced H := match goal with | [ H' : _ |- _ ] => match H' with H => idtac end end. Tactic Notation "intro_block" hyp(H) := (is_introduced H ; block_goal ; revert_until H ; block_goal) || (let H' := fresh H in intros until H' ; block_goal) || (intros ; block_goal). Tactic Notation "intro_block_id" ident(H) := (is_introduced H ; block_goal ; revert_until H ; block_goal) || (let H' := fresh H in intros until H' ; block_goal) || (intros ; block_goal). Ltac unblock_dep_elim := match goal with | |- let _ := block in ?T => match T with context [ block ] => change T ; intros_until_block end | _ => unblock_goal end. (** A tactic that tries to remove trivial equality guards in induction hypotheses coming from [dependent induction]/[generalize_eqs] invocations. *) Ltac simplify_IH_hyps := repeat match goal with | [ hyp : context [ block ] |- _ ] => cbn beta in hyp; eqns_specialize_eqs_block hyp; cbn beta iota delta[eq_rect_r eq_rect] zeta in hyp end. Ltac simpl_dep_elim := simplify_dep_elim ; simplify_IH_hyps ; unblock_dep_elim. Ltac do_intros H := (try intros until H) ; (intro_block_id H || intro_block H) ; (try simpl in H ; simplify_equations_in H). Ltac do_depelim_nosimpl tac H := do_intros H ; generalize_by_eqs H ; tac H. Ltac do_depelim tac H := do_depelim_nosimpl tac H ; simpl_dep_elim; unblock_goal. Ltac do_depind tac H := (try intros until H) ; intro_block H ; (try simpl in H ; simplify_equations_in H) ; generalize_by_eqs_vars H ; tac H ; simpl_dep_elim; unblock_goal. (** To dependent elimination on some hyp. *) Ltac depelim id := do_depelim ltac:(fun hyp => do_case hyp) id. Ltac depelim_term c := let H := fresh "term" in set (H:=c) in *; clearbody H ; depelim H. (** Used internally. *) Ltac depelim_nosimpl id := do_depelim_nosimpl ltac:(fun hyp => do_case hyp) id. (** To dependent induction on some hyp. *) Ltac depind id := do_depind ltac:(fun hyp => do_ind hyp) id. (** A variant where generalized variables should be given by the user. *) Ltac do_depelim' tac H := (try intros until H) ; block_goal ; generalize_by_eqs H ; tac H ; simplify_dep_elim ; simplify_IH_hyps ; unblock_goal. (** Calls [destruct] on the generalized hypothesis, results should be similar to inversion. By default, we don't try to generalize the hyp by its variable indices. *) Tactic Notation "dependent" "destruction" ident(H) := do_depelim' ltac:(fun hyp => do_case hyp) H. Tactic Notation "dependent" "destruction" ident(H) "using" constr(c) := do_depelim' ltac:(fun hyp => destruct hyp using c) H. (** This tactic also generalizes the goal by the given variables before the elimination. *) Tactic Notation "dependent" "destruction" ident(H) "generalizing" ne_hyp_list(l) := do_depelim' ltac:(fun hyp => revert l ; do_case hyp) H. Tactic Notation "dependent" "destruction" ident(H) "generalizing" ne_hyp_list(l) "using" constr(c) := do_depelim' ltac:(fun hyp => revert l ; destruct hyp using c) H. (** Then we have wrappers for usual calls to induction. One can customize the induction tactic by writting another wrapper calling do_depelim. We suppose the hyp has to be generalized before calling [induction]. *) Tactic Notation "dependent" "induction" ident(H) := do_depind ltac:(fun hyp => do_ind hyp) H. Tactic Notation "dependent" "induction" ident(H) "using" constr(c) := do_depind ltac:(fun hyp => induction hyp using c) H. (** This tactic also generalizes the goal by the given variables before the induction. *) Tactic Notation "dependent" "induction" ident(H) "generalizing" ne_hyp_list(l) := do_depelim' ltac:(fun hyp => generalize l ; clear l ; do_ind hyp) H. Tactic Notation "dependent" "induction" ident(H) "generalizing" ne_hyp_list(l) "using" constr(c) := do_depelim' ltac:(fun hyp => generalize l ; clear l ; induction hyp using c) H. (** [solve_equation] is used to prove the equation lemmas for an existing definition. *) Ltac find_empty := simpl in * ; exfalso ; match goal with | [ H : _ |- _ ] => solve [ clear_except H ; dependent elimination H | eqns_specialize_eqs H ; assumption ] | [ H : _ <> _ |- _ ] => solve [ red in H ; eqns_specialize_eqs H ; assumption ] end. Ltac make_simplify_goal := match goal with | [ |- ?R ?A ?T ?U ] => let eqP := fresh "eqP" in set (eqP := fun x : A => R A x U) ; change (eqP T) end. Ltac hnf_gl := match goal with [ |- ?P ?T ] => let T' := eval hnf in T in change_no_check (P T') end. Ltac hnf_eq := match goal with | |- ?R ?x ?y => let x' := eval hnf in x in let y' := eval hnf in y in change_no_check (R x' y') end. Ltac red_one_eq := match goal with |- ?R ?x ?y => let x' := eval red in x in let y' := eval red in y in change_no_check (R x' y') end. Ltac red_eq := match goal with |- ?R ?x ?y => let rec reduce_eq x y := let x' := eval red in x in let y' := eval red in y in (reduce_eq x' y' || change_no_check (R x' y')) in reduce_eq x y end. Ltac red_eq_lhs := match goal with |- ?R ?x ?y => let x' := eval red in x in change_no_check (R x' y) end. Ltac red_gl := match goal with |- ?P ?x => let rec reduce x := let x' := eval red in x in (reduce x' || change_no_check (P x')) in reduce x end. Ltac rewrite_sigma2_rule_noK c := match c with | @inj_right_sigma ?A ?H ?x ?P ?y ?y' _ => rewrite (@inj_right_sigma_refl A H x P y) | @simplify_ind_pack ?A ?uip ?B ?x ?p _ ?G _ 1 => rewrite (@simplify_ind_pack_refl A uip B x p G _) | @simplification_sigma2_uip ?A ?H ?P ?B ?p ?x ?y ?X 1=> rewrite (@simplification_sigma2_uip_refl A H P B p x X); simpl (* | @simplification_sigma2_dec_point ?A ?p ?H ?P ?B ?x ?y ?X 1=> *) (* rewrite (@simplification_sigma2_dec_point_refl A p H P B x X); simpl *) | @simplification_K_uip ?A ?dec ?x ?B ?p 1=> rewrite (@simplification_K_uip_refl A dec x B p); simpl eq_rect end. Ltac rewrite_sigma2_rule c := rewrite_sigma2_rule_noK c. Ltac rewrite_sigma2_term x := match x with | ?f _ _ _ _ _ _ _ _ _ => rewrite_sigma2_rule f | ?f _ _ _ _ _ _ _ _ => rewrite_sigma2_rule f | ?f _ _ _ _ _ _ _ => rewrite_sigma2_rule f | ?f _ _ _ _ _ _ => rewrite_sigma2_rule f | ?f _ _ _ _ _ => rewrite_sigma2_rule f | ?f _ _ _ _ => rewrite_sigma2_rule f | ?f _ _ _ => rewrite_sigma2_rule f | ?f _ _ => rewrite_sigma2_rule f | ?f _ => rewrite_sigma2_rule f | ?f => rewrite_sigma2_rule f end. Ltac rewrite_sigma2_refl_eq := match goal with |- ?x = ?y => rewrite_sigma2_term x || rewrite_sigma2_term y end. Ltac rewrite_sigma2_refl_goal := match goal with | |- ?P ?x => rewrite_sigma2_term x end. (* Ltac simpl_equations := *) (* repeat (repeat (simpl; (hnf_eq || rewrite_sigma2_refl_eq || autorewrite with refl_id); simpl); *) (* try progress autounfold with equations). *) (* Ltac simplify_equation c := *) (* make_simplify_goal ; simpl ; *) (* repeat (try autounfoldify c; *) (* try (red_gl || rewrite_sigma2_refl_goal || autorewrite with refl_id) ; simpl). *) Ltac simpl_equations := repeat (repeat (simpl; hnf_eq; rewrite_refl_id); try progress autounfold with equations). Ltac simpl_equation_impl := repeat (unfold_equations; rewrite_refl_id). Ltac simplify_equation c := make_simplify_goal; simpl; repeat (try autounfold_ref c; progress (simpl; unfold_equations) || (progress (autorewrite with refl_id)) || reflexivity || (progress (rewrite_sigma2_refl))). Ltac solve_equation c := intros ; try simplify_equation c ; try (match goal with | [ |- ImpossibleCall _ ] => find_empty | _ => try red; try (reflexivity || discriminates) end). Definition depelim_module := tt. Register depelim_module as equations.depelim.module. Coq-Equations-1.3.1-8.20/theories/Type/EqDec.v000066400000000000000000000207211463127417400205170ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Set Warnings "-notation-overridden". Require Import Equations.Init. Require Import Equations.Type.Logic. Require Import Equations.Type.Classes. (** Decidable equality. We redevelop the derivation of [K] from decidable equality on [A] making everything transparent and moving to [Type] so that programs using this will actually be computable inside Coq. *) Set Universe Polymorphism. Import Id_Notations. Import Sigma_Notations. Local Open Scope equations_scope. (** We rederive the UIP shifting proof transparently, and on type. Taken from Coq's stdlib. *) Definition UIP_refl_on_ X (x : X) := forall p : x = x, p = 1. Definition UIP_refl_ X := forall (x : X) (p : x = x), p = 1. Lemma Id_trans_r {A} (x y z : A) : x = y -> z = y -> x = z. Proof. destruct 1. destruct 1. exact 1. Defined. (** We rederive the UIP shifting proof transparently. *) Theorem UIP_shift_on@{i} (X : Type@{i}) (x : X) : UIP_refl_on_ X x -> forall y : x = x, UIP_refl_on_ (x = x) y. Proof. intros UIP_refl y. rewrite (UIP_refl y). intros z. assert (UIP:forall y' y'' : x = x, y' = y''). { intros. apply Id_trans_r with 1; apply UIP_refl. } transitivity (id_trans (id_trans (UIP 1 1) z) (id_sym (UIP 1 1))). - destruct z. destruct (UIP _ _). reflexivity. - change (match 1 as y' in _ = x' return y' = y' -> Type@{i} with | 1 => fun z => z = 1 end (id_trans (id_trans (UIP 1 1) z) (id_sym (UIP (1) (1))))). destruct z. destruct (UIP _ _). reflexivity. Defined. Theorem UIP_shift@{i} : forall {U : Type@{i}}, UIP_refl_@{i} U -> forall x:U, UIP_refl_@{i} (x = x). Proof. exact (fun U UIP_refl x => @UIP_shift_on U x (UIP_refl x)). Defined. (** This is the reduction rule of UIP. *) Lemma uip_refl_refl@{i} {A : Type@{i}} {E : UIP@{i} A} (x : A) : uip (x:=x) 1 1 = 1. Proof. apply UIP_shift@{i}. intros y e. apply uip@{i}. Defined. Theorem UIP_K@{i j} {A : Type@{i}} {U : UIP A} (x : A) : forall P : x = x -> Type@{j}, P 1 -> forall p : x = x, P p. Proof. intros P peq e. now elim (uip 1 e). Defined. (** Tactic to solve EqDec goals, destructing recursive calls for the recursive structure of the type and calling instances of eq_dec on other types. *) Ltac eqdec_loop t u := (left; reflexivity) || (solve [right; intro He; inversion He]) || (let x := match t with | context C [ _ ?x ] => constr:(x) end in let y := match u with | context C [ _ ?y ] => constr:(y) end in let contrad := let Hn := fresh in intro Hn; right; intro He; apply Hn; inversion He; reflexivity in let good := intros ->; let t' := match t with | context C [ ?x _ ] => constr:(x) end in let u' := match u with | context C [ ?y _ ] => constr:(y) end in (* idtac "there" t' u'; *) try (eqdec_loop t' u') in (* idtac "here" x y; *) match goal with | [ H : forall z, sum (Id _ z) _ |- _ ] => case (H y); [good|contrad] | _ => case (eq_dec x y); [good|contrad] end) || idtac. Ltac eqdec_proof := try red; intros; match goal with | |- sum (Id ?x ?y) _ => revert y; induction x; intros until y; depelim y; match goal with |- sum (Id ?x ?y) _ => eqdec_loop x y end end. (** Derivation of principles on sigma types whose domain is decidable. *) Section EqdepDec. Universe i. Context {A : Type@{i}} `{EqDec A}. Let comp {x y y':A} (eq1:x = y) (eq2:x = y') : y = y' := Id_rect _ _ (fun a _ => a = y') eq2 _ eq1. Remark trans_sym_eq : forall (x y:A) (u:x = y), comp u u = id_refl y. Proof. intros. case u; compute. apply id_refl. Defined. Variable x : A. Let nu {y:A} (u:x = y) : x = y := match eq_dec x y with | inl eqxy => eqxy | inr neqxy => Empty_rect (fun _ => _) (neqxy u) end. Let nu_constant : forall (y:A) (u v:x = y), nu u = nu v. intros. unfold nu in |- *. case (eq_dec x y); intros. reflexivity. case e; trivial. Defined. Let nu_inv {y:A} (v:x = y) : x = y := comp (nu (id_refl x)) v. Remark nu_left_inv : forall (y:A) (u:x = y), nu_inv (nu u) = u. Proof. intros. case u; unfold nu_inv in |- *. apply trans_sym_eq. Defined. Theorem eq_proofs_unicity : forall (y:A) (p1 p2:x = y), p1 = p2. Proof. intros. elim nu_left_inv with (u := p1). elim nu_left_inv with (u := p2). elim nu_constant with y p1 p2. reflexivity. Defined. Theorem K_dec : forall P:x = x -> Type@{i}, P (id_refl x) -> forall p:x = x, P p. Proof. intros. elim eq_proofs_unicity with x (id_refl x) p. trivial. Defined. Lemma eq_dec_refl : eq_dec x x = inl (id_refl x). Proof. case eq_dec; intros. apply ap. apply eq_proofs_unicity. elim e. apply id_refl. Defined. (** The corollary *) (* On [sigma] *) Let projs {P:A -> Type@{i}} (exP:sigma P) (def:P x) : P x := match exP with | sigmaI _ x' prf => match eq_dec x' x with | inl eqprf => Id_rect _ x' (fun x _ => P x) prf x eqprf | _ => def end end. Theorem inj_right_sigma {P : A -> Type@{i}} {y y':P x} : (x, y) = (x, y') -> y = y'. Proof. intros. cut (projs (x, y) y = projs (sigmaI P x y') y). unfold projs. case (eq_dec x x). intro e. elim e using K_dec. trivial. intros. case e; reflexivity. case X; reflexivity. Defined. Lemma inj_right_sigma_refl (P : A -> Type@{i}) (y : P x) : inj_right_sigma (y:=y) (y':=y) 1 = (id_refl _). Proof. unfold inj_right_sigma. intros. unfold eq_rect. unfold projs. destruct (id_sym@{i} eq_dec_refl). unfold K_dec. simpl. unfold eq_proofs_unicity. subst projs. simpl. unfold nu_inv, comp, nu. simpl. unfold eq_ind, nu_left_inv, trans_sym_eq, eq_rect, nu_constant. destruct (id_sym@{i} eq_dec_refl). reflexivity. Defined. End EqdepDec. Definition transport {A : Type} {P : A -> Type} {x y : A} (p : x = y) : P x -> P y := match p with id_refl => fun h => h end. Lemma sigma_eq@{i} (A : Type@{i}) (P : A -> Type@{i}) (x y : sigma P) : x = y -> Σ p : (x.1 = y.1), transport p x.2 = y.2. Proof. intros H; destruct H. destruct x as [x px]. simpl. refine (id_refl, id_refl). Defined. Theorem inj_sigma_r@{i} {A : Type@{i}} `{H : HSet A} {P : A -> Type@{i}} {x} {y y':P x} : sigmaI P x y = sigmaI P x y' -> y = y'. Proof. intros [H' H'']%sigma_eq. cbn in *. pose (is_hset H' id_refl). apply (transport (P:=fun h => transport h y = y') i H''). Defined. Definition apd {A} {B : A -> Type} (f : forall x : A, B x) {x y : A} (p : x = y) : transport p (f x) = f y. Proof. now destruct p. Defined. Definition apd_eq {A} {x y : A} (p : x = y) {z} (q : z = x) : transport (P:=@Id A z) p q = id_trans q p. Proof. now destruct p, q. Defined. Lemma id_trans_sym {A} (x y z : A) (p : x = y) (q : y = z) (r : x = z) : id_trans p q = r -> q = id_trans (id_sym p) r. Proof. destruct p, q. destruct 1. exact 1. Defined. Lemma hprop_hset {A} (h : HProp A) : HSet A. Proof. intro x. set (g y := h x y). intros y z w. assert (forall y z (p : y = z), p = id_trans (id_sym (g y)) (g z)). intros. apply id_trans_sym. destruct (apd_eq p (g y0)). apply apd. rewrite X. now rewrite (X _ _ z). Defined. (** Proof that equality proofs in 0-truncated types are connected *) Lemma hset_pi {A} `{HSet A} (x y : A) (p q : x = y) (r : p = q) : is_hset p q = r. Proof. red in H. pose (hprop_hset (H x y)). apply h. Defined. Lemma is_hset_refl {A} `{HSet A} (x : A) : is_hset (id_refl x) id_refl = id_refl. Proof. apply hset_pi. Defined. Lemma inj_sigma_r_refl@{i} (A : Type@{i}) (H : HSet A) (P : A -> Type@{i}) x (y : P x) : inj_sigma_r (y:=y) (y':=y) 1 = (id_refl _). Proof. unfold inj_sigma_r. intros. simpl. now rewrite is_hset_refl. Defined. Theorem K {A} `{HSet A} (x : A) (P : x = x -> Type) : P (id_refl x) -> forall p : x = x, P p. Proof. intros. exact (transport (is_hset id_refl p) X). Defined. Coq-Equations-1.3.1-8.20/theories/Type/EqDecInstances.v000066400000000000000000000041421463127417400223660ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Set Warnings "-notation-overridden". From Equations Require Import Init. Require Import Equations.Type.Logic Equations.Type.Classes Equations.Type.DepElim Equations.Type.Tactics Equations.Type.EqDec. Local Open Scope equations_scope. Import Id_Notations Sigma_Notations. Set Universe Polymorphism. #[export] Instance eqdec_hset (A : Type) `(EqDec A) : HSet A. Proof. red. red. apply eq_proofs_unicity. Defined. (** Standard instances. *) #[export] Instance unit_eqdec : EqDec unit. Proof. eqdec_proof. Defined. #[export] Instance bool_eqdec : EqDec bool. Proof. eqdec_proof. Defined. #[export] Instance nat_eqdec : EqDec nat. Proof. eqdec_proof. Defined. #[export] Instance prod_eqdec {A B} `(EqDec A) `(EqDec B) : EqDec (prod A B). Proof. eqdec_proof. Defined. #[export] Instance sum_eqdec {A B} `(EqDec A) `(EqDec B) : EqDec (A + B). Proof. eqdec_proof. Defined. #[export] Instance list_eqdec {A} `(EqDec A) : EqDec (list A). Proof. eqdec_proof. Defined. (** Any signature made up entirely of decidable types is decidable. *) Polymorphic Definition eqdec_sig_Id@{i} {A : Type@{i}} {B : A -> Type@{i}} `(EqDec A) `(forall a, EqDec (B a)) : EqDec@{i} (sigma B). Proof. Set Printing Universes. intros. intros [xa xb] [ya yb]. case (eq_dec xa ya). intros Hxya. destruct Hxya. case (eq_dec xb yb). + intros He; destruct He. apply inl@{i}. reflexivity. + intros. apply inr@{i}. apply simplification_sigma2_uip@{i i}. apply e. + intros. apply inr@{i}. refine (simplification_sigma1_dep@{i i} _ _ _ _ _). intros He _; revert He. apply e. Defined. #[export] Existing Instance eqdec_sig_Id. Coq-Equations-1.3.1-8.20/theories/Type/FunctionalExtensionality.v000066400000000000000000000010201463127417400245670ustar00rootroot00000000000000Set Warnings "-notation-overridden". From Equations Require Import Init. From Coq Require Import CRelationClasses. Require Import Equations.Type.Logic. (** The polymorphic equality type used by Equations when working with equality in Type. *) Set Universe Polymorphism. Import Id_Notations. Section FunExt. Context (A : Type) (B : A -> Type). Definition functional_extensionality := forall (f g : forall x, B x), (forall x, f x = g x) -> f = g. End FunExt. Axiom funext : forall A B, functional_extensionality A B. Coq-Equations-1.3.1-8.20/theories/Type/FunctionalInduction.v000066400000000000000000000163051463127417400235200ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Set Warnings "-notation-overridden". Require Import Equations.CoreTactics. Require Import Equations.Type.Logic Equations.Type.Classes Equations.Type.EqDec Equations.Type.DepElim. Set Universe Polymorphism. Local Open Scope equations_scope. Import Sigma_Notations. (** The tactic [funind c Hc] applies functional induction on the application [c] which must be of the form [f args] where [f] has a [FunctionalInduction] instance. [Hc] is the name given to the call, used to generate hypothesis names. *) Ltac funind c Hcall := match c with context C [ ?f ] => let x := constr:(fun_ind_prf (f:=f)) in (let prf := eval simpl in x in let p := context C [ prf ] in let prf := fresh in let call := fresh in assert(prf:=p) ; (* Abstract the call *) set(call:=c) in *; generalize (@id_refl _ call : call = c); clearbody call ; intro Hcall ; (* Now do dependent elimination and simplifications *) dependent induction prf ; simplify_IH_hyps ; (* Use the simplifiers for the constant to get a nicer goal. *) try simpc f in * ; try on_last_hyp ltac:(fun id => simpc f in id ; noconf id)) || fail 1 "Internal error in funind" end || fail "Maybe you didn't declare the functional induction principle for" c. Ltac funind_call f H := on_call f ltac:(fun call => funind call H). Ltac make_refine n c := match constr:(n) with | 0 => uconstr:(c) | S ?n => make_refine n uconstr:(c _) end. Ltac constr_head c := let rec aux c := match c with | ?f _ => aux f | ?f => f end in aux c. Ltac with_last_secvar_aux tac := match goal with [ H : _ |- _ ] => is_secvar H; tac H end. Ltac with_last_secvar tac orelse := with_last_secvar_aux tac + (* No section variables *) orelse. Ltac get_elim c := match c with | context [?f] => constr:(fun_elim (f:=f)) end. Ltac clear_non_secvar := repeat match goal with | [ H : _ |- _ ] => tryif is_secvar H then fail else clear H end. Ltac remember_let H := lazymatch goal with | [ H := ?body : ?type |- _ ] => generalize (1 : H = body) end. Ltac unfold_packcall packcall := lazymatch goal with |- ?R ?x ?y -> ?P => let y' := eval unfold packcall in y in change (R x y' -> P) end. Ltac simplify_IHs_call := repeat match goal with | [ hyp : context [ block ] |- _ ] => cbn beta in hyp; eqns_specialize_eqs_block hyp 2; cbn beta iota delta[Id_rect_r Id_rect] zeta in hyp end. Ltac make_packcall packcall c := match goal with | [ packcall : ?type |- _ ] => change (let _ := c in type) in (type of packcall) end. Ltac funelim_sig_tac c Heq simp_IHs tac := let elimc := get_elim c in let packcall := fresh "packcall" in let packcall_fn := fresh "packcall_fn" in let elimfn := match elimc with fun_elim (f:=?f) => constr:(f) end in let elimn := match elimc with fun_elim (n:=?n) => constr:(n) end in block_goal; uncurry_call elimfn c packcall packcall_fn; remember_let packcall_fn; unfold_packcall packcall; (refine (simplification_sigma1 _ _ _ _ _) || refine (simplification_sigma1_nondep_dep _ _ _ _ _) || refine (simplification_sigma1_dep _ _ _ _ _)); let H := fresh "eqargs" in let Heqfresh := fresh "__Heq__" in intros H Heqfresh; revert Heqfresh; block_goal; revert H; subst packcall_fn; clearbody packcall; make_packcall packcall elimfn; with_last_secvar ltac:(fun eos => move packcall before eos) ltac:(move packcall at top); revert_until packcall; block_goal; cbv zeta in packcall; revert packcall; curry; let elimt := make_refine elimn elimc in unshelve refine_ho elimt; intros; cbv beta; simplify_dep_elim; intros_until_block; simplify_dep_elim; cbn beta iota delta [Id_rect_r Id_rect pack_sigma pack_sigma_nondep] in *; simp_IHs; intros _ Heqfresh; unblock_goal; try (rewrite <- Heqfresh); try (rename Heqfresh into Heq || (let Heqf := fresh Heq in rename Heq into Heqf; rename Heqfresh into Heq)); tac c. Ltac funelim_constr_as c h simp_IHs := funelim_sig_tac c h simp_IHs ltac:(fun _ => idtac). Ltac get_first_elim c := match c with | ?x ?a ?b ?c ?d ?e ?f ?g ?h ?i ?j ?k ?l ?m => get_elim (x a b c d e f g h i j k l m) | ?x ?a ?b ?c ?d ?e ?f ?g ?h ?i ?j ?k ?l => get_elim (x a b c d e f g h i j k l) | ?x ?a ?b ?c ?d ?e ?f ?g ?h ?i ?j ?k => get_elim (x a b c d e f g h i j k) | ?x ?a ?b ?c ?d ?e ?f ?g ?h ?i ?j => get_elim (x a b c d e f g h i j) | ?x ?a ?b ?c ?d ?e ?f ?g ?h ?i => get_elim (x a b c d e f g h i) | ?x ?a ?b ?c ?d ?e ?f ?g ?h => get_elim (x a b c d e f g h) | ?x ?a ?b ?c ?d ?e ?f ?g => get_elim (x a b c d e f g) | ?x ?a ?b ?c ?d ?e ?f => get_elim (x a b c d e f) | ?x ?a ?b ?c ?d ?e => get_elim (x a b c d e) | ?x ?a ?b ?c ?d => get_elim (x a b c d) | ?x ?a ?b ?c => get_elim (x a b c) | ?x ?a ?b => get_elim (x a b) | ?x ?a => get_elim (x a) end. (** An alternative tactic that does not generalize over the arguments. BEWARE: It might render the goal unprovable. *) Ltac apply_funelim c := let elimc := get_first_elim c in let elimfn := match elimc with fun_elim (f:=?f) => constr:(f) end in let elimn := match elimc with fun_elim (n:=?n) => constr:(n) end in let elimt := make_refine elimn elimc in apply_args c elimt ltac:(fun elimc => unshelve refine_ho elimc; cbv beta). (** A special purpose database used to prove the elimination principle. *) Create HintDb funelim. (** Solve reflexivity goals. *) #[global] Hint Extern 0 (Id _ _) => constructor : funelim. (** Specialize hypotheses begining with equalities. *) Ltac specialize_hyps := match goal with | [ H : forall _ : @Id _ ?x ?x, _ |- _ ] => specialize (H (@id_refl _ x)); unfold Id_rew_r, Id_rect_r, Id_rect in H ; simpl in H end. #[global] Hint Extern 100 => specialize_hyps : funelim. (** Destruct conjunctions everywhere, starting with the hypotheses. This tactic allows to close functional induction proofs involving multiple nested and/or mutual recursive definitions. *) (** TODO: make it generic, won't work with another logic *) Lemma uncurry_prod (A B C : Type) : (A * B -> C) -> (A -> B -> C). Proof. intros H a b. exact (H (a, b)). Defined. Ltac specialize_mutual_nested := match goal with | [ H : _ * _ |- _ ] => destruct H | [ |- _ * _ ] => split end. #[global] Hint Extern 50 => specialize_mutual_nested : funelim. Ltac specialize_mutual := match goal with [ H : _ * _ |- _ ] => destruct H (* Fragile, might render later goals unprovable *) | [ H : ?X -> _, H' : ?X |- _ ] => match X with | forall (_ : _), _ => specialize (H H') end | [ H : (?A * ?B) -> ?C |- _ ] => apply (uncurry_prod A B C) in H end. Ltac Equations.Init.specialize_mutfix ::= repeat specialize_mutual. Coq-Equations-1.3.1-8.20/theories/Type/Loader.v000066400000000000000000000027471463127417400207540ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (** The set of libraries required to run Equations with all features. *) Require Import Extraction. (** This exports tactics *) Declare ML Module "coq-equations.plugin". Set Warnings "-notation-overridden". From Equations Require Export Init Signature. Require Import Equations.CoreTactics. Require Export Equations.Type.Logic Equations.Type.Classes. Require Import Equations.Type.WellFounded. Require Import Equations.Type.DepElim Equations.Type.EqDec Equations.Type.Constants. Require Export Equations.Type.EqDecInstances. Require Import Equations.Type.NoConfusion. Require Import Equations.Type.Subterm. Require Export Equations.Type.Tactics. Require Export Equations.Type.FunctionalInduction. (* funelim tactic *) Global Obligation Tactic := Equations.CoreTactics.equations_simpl. (** Tactic to solve well-founded proof obligations by default *) Ltac solve_rec := simpl in * ; cbv zeta ; intros ; try typeclasses eauto with subterm_relation simp rec_decision. Export EquationsNotations.Coq-Equations-1.3.1-8.20/theories/Type/Logic.v000066400000000000000000000102261463127417400205720ustar00rootroot00000000000000From Equations Require Import Init. From Coq Require Import Extraction CRelationClasses. Set Warnings "-notation-overridden". (** The polymorphic equality type used by Equations when working with equality in Type. *) Set Universe Polymorphism. (* Let's leave empty at Set, it can live in any higher universe. *) Inductive Empty : Set :=. Scheme Empty_case := Minimality for Empty Sort Type. Definition unit_rect@{i} (P : unit -> Type@{i}) (p : P tt) (u : unit) : P u := match u with tt => p end. Definition prod (A : Type) (B : Type) := sigma (fun _ : A => B). Notation " A * B " := (prod A B) : type_scope. Definition BiImpl (A B : Type) : Type := (A -> B) * (B -> A). Notation "A <-> B" := (BiImpl A B) (at level 95) : type_scope. Cumulative Inductive Id@{i} {A : Type@{i}} (a : A) : A -> Type@{i} := id_refl : Id a a. Arguments id_refl {A a}, [A] a. Local Open Scope equations_scope. Module Id_Notations. Notation " x = y " := (@Id _ x y) : equations_scope. Notation " x = y " := (@Id _ x y) : type_scope. Notation " x <> y " := (@Id _ x y -> Empty) : equations_scope. Notation " x <> y " := (@Id _ x y -> Empty) : type_scope. Notation " 1 " := (@id_refl _ _) : equations_scope. End Id_Notations. Import Id_Notations. Module Sigma_Notations. Notation "'Σ' x .. y , P" := (sigma (fun x => .. (sigma (fun y => P)) ..)) (at level 200, x binder, y binder, right associativity, format "'[ ' '[ ' Σ x .. y ']' , '/' P ']'") : type_scope. Notation "( x , .. , y , z )" := (@sigmaI _ _ x .. (@sigmaI _ _ y z) ..) (right associativity, at level 0, format "( x , .. , y , z )") : equations_scope. Notation "x .1" := (pr1 x) : equations_scope. Notation "x .2" := (pr2 x) : equations_scope. End Sigma_Notations. Import Sigma_Notations. Section IdTheory. Universe i. Context {A : Type@{i}}. Import Id_Notations. Lemma id_sym {x y : A} : x = y -> y = x. Proof. destruct 1. apply 1. Defined. Lemma id_trans {x y z : A} : x = y -> y = z -> x = z. Proof. destruct 1. destruct 1. apply 1. Defined. Definition transport (x : A) (P : A -> Type) : P x -> forall y : A, Id x y -> P y. Proof. intros Px y e. destruct e. exact Px. Defined. Definition Id_rew := transport. Definition Id_case (P : A -> Type) {x y : A} : Id y x -> P x -> P y. Proof. intros e Px. eapply (transport x _ Px y (id_sym e)). Defined. Definition Id_rew_r {x y : A} (P : A -> Type) : P y -> Id x y -> P x. Proof. intros Px e. eapply (transport y _ Px x (id_sym e)). Defined. Lemma Id_rect_r {x : A} (P : forall a, Id a x -> Type) (p : P x id_refl) {y : A} (e : Id y x) : P y e. Proof. destruct e. apply p. Defined. Import Sigma_Notations. Definition id_inspect (x : A) : Σ y , x = y := (x , 1). End IdTheory. Class HProp A := is_hprop : forall x y : A, x = y. Class HSet A := is_hset : forall {x y : A}, HProp (x = y). Cumulative Inductive sum@{i} (A : Type@{i}) (B : Type@{i}) := | inl : A -> A + B | inr : B -> A + B where " A + B " := (sum A B) : type_scope. Arguments inl {A} {B} a. Arguments inr {A} {B} b. Definition Sect {A B : Type} (s : A -> B) (r : B -> A) := forall x : A, r (s x) = x. Definition ap {A B : Type} (f : A -> B) {x y : A} (p : x = y) : f x = f y := match p with 1 => 1 end. Class IsEquiv {A B : Type} (f : A -> B) := BuildIsEquiv { equiv_inv : B -> A ; eisretr : Sect equiv_inv f; eissect : Sect f equiv_inv; eisadj : forall x : A, eisretr (f x) = ap f (eissect x) }. Arguments eisretr {A B}%type_scope f%function_scope {_} _. Arguments eissect {A B}%type_scope f%function_scope {_} _. Arguments eisadj {A B}%type_scope f%function_scope {_} _. Arguments IsEquiv {A B}%type_scope f%function_scope. (** A record that includes all the data of an adjoint equivalence. *) Record Equiv A B := BuildEquiv { equiv_fun : A -> B ; equiv_isequiv : IsEquiv equiv_fun }. Coercion equiv_fun : Equiv >-> Funclass. Global Existing Instance equiv_isequiv. Arguments equiv_fun {A B} _ _. Arguments equiv_isequiv {A B} _. Declare Scope equiv_scope. Bind Scope equiv_scope with Equiv. Reserved Infix "<~>" (at level 85). Notation "A <~> B" := (Equiv A B) (at level 85) : type_scope. Notation "f ^^-1" := (@equiv_inv _ _ f _) (at level 3). Coq-Equations-1.3.1-8.20/theories/Type/NoConfusion.v000066400000000000000000000022111463127417400217700ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) (** Instances of [NoConfusion] for the standard datatypes. To be used by [equations] when it needs applications of injectivity or discrimination on some equation. *) From Equations Require Import Init Signature. Require Import Equations.CoreTactics. Require Import Equations.Type.Classes Equations.Type.EqDec Equations.Type.Constants. Require Import Equations.Type.DepElim Equations.Type.Tactics. (** Parameterized inductive types just need NoConfusion. *) Derive NoConfusion for unit bool nat option sum Datatypes.prod list sigT sig. (* FIXME should be done by the derive command *) Extraction Inline noConfusion NoConfusionPackage_nat. Coq-Equations-1.3.1-8.20/theories/Type/Relation.v000066400000000000000000000156611463127417400213220ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * INRIA, CNRS and contributors - Copyright 1999-2018 *) (* A -> Type. (** ** Transitive closure *) Section Transitive_Closure. Context {A : Type} (R : relation A). (** Definition by direct transitive closure *) Inductive trans_clos (x: A) : A -> Type := | t_step (y:A) : R x y -> trans_clos x y | t_trans (y z:A) : trans_clos x y -> trans_clos y z -> trans_clos x z. (** Alternative definition by transitive extension on the left *) Inductive trans_clos_1n (x: A) : A -> Type := | t1n_step (y:A) : R x y -> trans_clos_1n x y | t1n_trans (y z:A) : R x y -> trans_clos_1n y z -> trans_clos_1n x z. (** Alternative definition by transitive extension on the right *) Inductive trans_clos_n1 (x: A) : A -> Type := | tn1_step (y:A) : R x y -> trans_clos_n1 x y | tn1_trans (y z:A) : R y z -> trans_clos_n1 x y -> trans_clos_n1 x z. End Transitive_Closure. (** ** Reflexive closure *) Section Reflexive_Closure. Context {A : Type} (R : relation A). (** Definition by direct transitive closure *) Inductive clos_refl (x: A) : A -> Type := | r_step (y:A) : R x y -> clos_refl x y | r_refl : clos_refl x x. End Reflexive_Closure. (** ** Reflexive-transitive closure *) Section Reflexive_Transitive_Closure. Context {A : Type} (R : relation A). (** Definition by direct reflexive-transitive closure *) Inductive clos_refl_trans (x:A) : A -> Type := | rt_step (y:A) : R x y -> clos_refl_trans x y | rt_refl : clos_refl_trans x x | rt_trans (y z:A) : clos_refl_trans x y -> clos_refl_trans y z -> clos_refl_trans x z. (** Alternative definition by transitive extension on the left *) Inductive clos_refl_trans_1n (x: A) : A -> Type := | rt1n_refl : clos_refl_trans_1n x x | rt1n_trans (y z:A) : R x y -> clos_refl_trans_1n y z -> clos_refl_trans_1n x z. (** Alternative definition by transitive extension on the right *) Inductive clos_refl_trans_n1 (x: A) : A -> Type := | rtn1_refl : clos_refl_trans_n1 x x | rtn1_trans (y z:A) : R y z -> clos_refl_trans_n1 x y -> clos_refl_trans_n1 x z. End Reflexive_Transitive_Closure. (** ** Reflexive-symmetric-transitive closure *) Section Reflexive_Symmetric_Transitive_Closure. Context {A : Type} (R : relation A). (** Definition by direct reflexive-symmetric-transitive closure *) Inductive clos_refl_sym_trans : relation A := | rst_step (x y:A) : R x y -> clos_refl_sym_trans x y | rst_refl (x:A) : clos_refl_sym_trans x x | rst_sym (x y:A) : clos_refl_sym_trans x y -> clos_refl_sym_trans y x | rst_trans (x y z:A) : clos_refl_sym_trans x y -> clos_refl_sym_trans y z -> clos_refl_sym_trans x z. (** Alternative definition by symmetric-transitive extension on the left *) Inductive clos_refl_sym_trans_1n (x: A) : A -> Type := | rst1n_refl : clos_refl_sym_trans_1n x x | rst1n_trans (y z:A) : R x y + R y x -> clos_refl_sym_trans_1n y z -> clos_refl_sym_trans_1n x z. (** Alternative definition by symmetric-transitive extension on the right *) Inductive clos_refl_sym_trans_n1 (x: A) : A -> Type := | rstn1_refl : clos_refl_sym_trans_n1 x x | rstn1_trans (y z:A) : R y z + R z y -> clos_refl_sym_trans_n1 x y -> clos_refl_sym_trans_n1 x z. End Reflexive_Symmetric_Transitive_Closure. (** ** Converse of a relation *) Section Converse. Context {A : Type} (R : relation A). Definition transp (x y:A) := R y x. End Converse. (** ** Union of relations *) Section Union. Context {A : Type} (R1 R2 : relation A). Definition union (x y:A) := (R1 x y + R2 x y)%type. End Union. (** ** Disjoint union of relations *) Section Disjoint_Union. Context {A B : Type}. Variable leA : A -> A -> Type. Variable leB : B -> B -> Type. Inductive le_AsB : A + B -> A + B -> Type := | le_aa (x y:A) : leA x y -> le_AsB (inl x) (inl y) | le_ab (x:A) (y:B) : le_AsB (inl x) (inr y) | le_bb (x y:B) : leB x y -> le_AsB (inr x) (inr y). End Disjoint_Union. (** ** Lexicographic order on dependent pairs *) Section Lexicographic_Product. Context {A : Type} {B : A -> Type}. Variable leA : A -> A -> Type. Variable leB : forall x:A, B x -> B x -> Type. Inductive lexprod : sigma B -> sigma B -> Type := | left_lex : forall (x x':A) (y:B x) (y':B x'), leA x x' -> lexprod (x, y) (x', y') | right_lex : forall (x:A) (y y':B x), leB x y y' -> lexprod (x, y) (x, y'). End Lexicographic_Product. (** ** Product of relations *) Section Symmetric_Product. Variable A : Type. Variable B : Type. Variable leA : A -> A -> Type. Variable leB : B -> B -> Type. Inductive symprod : A * B -> A * B -> Type := | left_sym : forall x x':A, leA x x' -> forall y:B, symprod (x, y) (x', y) | right_sym : forall y y':B, leB y y' -> forall x:A, symprod (x, y) (x, y'). End Symmetric_Product. (** ** Multiset of two relations *) Section Swap. Variable A : Type. Variable R : A -> A -> Type. Inductive swapprod : A * A -> A * A -> Type := | sp_noswap x y (p:A * A) : symprod A A R R (x, y) p -> swapprod (x, y) p | sp_swap x y (p:A * A) : symprod A A R R (x, y) p -> swapprod (y, x) p. End Swap. Local Open Scope list_scope. Section Lexicographic_Exponentiation. Variable A : Set. Variable leA : A -> A -> Type. Let Nil := nil (A:=A). Let List := list A. Inductive Ltl : List -> List -> Type := | Lt_nil (a:A) (x:List) : Ltl Nil (a :: x) | Lt_hd (a b:A) : leA a b -> forall x y:list A, Ltl (a :: x) (b :: y) | Lt_tl (a:A) (x y:List) : Ltl x y -> Ltl (a :: x) (a :: y). Inductive Desc : List -> Type := | d_nil : Desc Nil | d_one (x:A) : Desc (x :: Nil) | d_conc (x y:A) (l:List) : clos_refl leA x y -> Desc (l ++ y :: Nil) -> Desc ((l ++ y :: Nil) ++ x :: Nil). Definition Pow : Type := sigma Desc. Definition lex_exp (a b:Pow) : Type := Ltl a.1 b.1. End Lexicographic_Exponentiation. #[export] Hint Unfold transp union: relations. #[export] Hint Resolve t_step rt_step rt_refl rst_step rst_refl: relations. #[export] Hint Immediate rst_sym: relations. Coq-Equations-1.3.1-8.20/theories/Type/Relation_Properties.v000066400000000000000000000271301463127417400235300ustar00rootroot00000000000000(************************************************************************) (* * The Coq Proof Assistant / The Coq Development Team *) (* v * INRIA, CNRS and contributors - Copyright 1999-2018 *) (* trans_clos R y z -> trans_clos R x z. Proof. induction 1 as [b d H1|b|a b d H1 H2 IH1 IH2]; auto. intro H. apply t_trans with (y:=d); auto. constructor. auto. Defined. (** Correctness of the reflexive-symmetric-transitive closure *) Lemma clos_rst_is_equiv : Equivalence (clos_refl_sym_trans R). Proof. constructor. - exact (rst_refl R). - exact (rst_sym R). - exact (rst_trans R). Defined. (** Idempotency of the reflexive-symmetric-transitive closure operator *) Lemma clos_rst_idempotent : inclusion (clos_refl_sym_trans (clos_refl_sym_trans R)) (clos_refl_sym_trans R). Proof. red. induction 1; auto with relations. apply rst_trans with y; auto with relations. Defined. End Clos_Refl_Sym_Trans. Section Equivalences. (** *** Equivalences between the different definition of the reflexive, symmetric, transitive closures *) (** *** Contributed by P. Castéran *) (** Direct transitive closure vs left-step extension *) Lemma clos_t1n_trans : forall x y, trans_clos_1n R x y -> trans_clos R x y. Proof. induction 1. - left; assumption. - right with y; auto. left; auto. Defined. Lemma trans_clos_t1n : forall x y, trans_clos R x y -> trans_clos_1n R x y. Proof. induction 1. - left; assumption. - generalize IHX2; clear IHX2; induction IHX1. -- right with y; auto. -- right with y; auto. eapply IHIHX1; auto. apply clos_t1n_trans; auto. Defined. Lemma trans_clos_t1n_iff : forall x y, trans_clos R x y <-> trans_clos_1n R x y. Proof. intros x y. exists (trans_clos_t1n x y). apply (clos_t1n_trans x y). Defined. (** Direct transitive closure vs right-step extension *) Lemma clos_tn1_trans : forall x y, trans_clos_n1 R x y -> trans_clos R x y. Proof. induction 1. - left; assumption. - right with y; auto. left; assumption. Defined. Lemma trans_clos_tn1 : forall x y, trans_clos R x y -> trans_clos_n1 R x y. Proof. induction 1. - left; assumption. - elim IHX2. -- intro y0; right with y; auto. -- intros. right with y0; auto. Defined. Lemma trans_clos_tn1_iff : forall x y, trans_clos R x y <-> trans_clos_n1 R x y. Proof. split. - apply trans_clos_tn1. - apply clos_tn1_trans. Defined. (** Direct reflexive-transitive closure is equivalent to transitivity by left-step extension *) Lemma clos_rt1n_step : forall x y, R x y -> clos_refl_trans_1n R x y. Proof. intros x y H. right with y;[assumption|left]. Defined. Lemma clos_rtn1_step : forall x y, R x y -> clos_refl_trans_n1 R x y. Proof. intros x y H. right with x;[assumption|left]. Defined. Lemma clos_rt1n_rt : forall x y, clos_refl_trans_1n R x y -> clos_refl_trans R x y. Proof. induction 1. - constructor 2. - constructor 3 with y; auto. constructor 1; auto. Defined. Lemma clos_rt_rt1n : forall x y, clos_refl_trans R x y -> clos_refl_trans_1n R x y. Proof. induction 1. - apply clos_rt1n_step; assumption. - left. - generalize IHX2; clear IHX2; induction IHX1; auto. right with y; auto. eapply IHIHX1; auto. apply clos_rt1n_rt; auto. Defined. Lemma clos_rt_rt1n_iff : forall x y, clos_refl_trans R x y <-> clos_refl_trans_1n R x y. Proof. split. - apply clos_rt_rt1n. - apply clos_rt1n_rt. Defined. (** Direct reflexive-transitive closure is equivalent to transitivity by right-step extension *) Lemma clos_rtn1_rt : forall x y, clos_refl_trans_n1 R x y -> clos_refl_trans R x y. Proof. induction 1. - constructor 2. - constructor 3 with y; auto. constructor 1; assumption. Defined. Lemma clos_rt_rtn1 : forall x y, clos_refl_trans R x y -> clos_refl_trans_n1 R x y. Proof. induction 1. - apply clos_rtn1_step; auto. - left. - elim IHX2; auto. intros. right with y0; auto. Defined. Lemma clos_rt_rtn1_iff : forall x y, clos_refl_trans R x y <-> clos_refl_trans_n1 R x y. Proof. split. - apply clos_rt_rtn1. - apply clos_rtn1_rt. Defined. (** Induction on the left transitive step *) Lemma clos_refl_trans_ind_left : forall (x:A) (P:A -> Type), P x -> (forall y z:A, clos_refl_trans R x y -> P y -> R y z -> P z) -> forall z:A, clos_refl_trans R x z -> P z. Proof. intros. revert X X0. induction X1; intros; auto with relations. { apply X0 with x; auto with relations. } apply IHX1_2. { apply IHX1_1; auto with relations. } intros. apply X0 with y0; auto with relations. apply rt_trans with y; auto with relations. Defined. (** Induction on the right transitive step *) Lemma rt1n_ind_right : forall (P : A -> Type) (z:A), P z -> (forall x y, R x y -> clos_refl_trans_1n R y z -> P y -> P x) -> forall x, clos_refl_trans_1n R x z -> P x. induction 3; auto. apply X0 with y; auto. Defined. Lemma clos_refl_trans_ind_right : forall (P : A -> Type) (z:A), P z -> (forall x y, R x y -> P y -> clos_refl_trans R y z -> P x) -> forall x, clos_refl_trans R x z -> P x. intros P z Hz IH x Hxz. apply clos_rt_rt1n_iff in Hxz. elim Hxz using rt1n_ind_right; auto. clear x Hxz. intros x y Hxy Hyz Hy. apply clos_rt_rt1n_iff in Hyz. eauto. Defined. (** Direct reflexive-symmetric-transitive closure is equivalent to transitivity by symmetric left-step extension *) Lemma clos_rst1n_rst : forall x y, clos_refl_sym_trans_1n R x y -> clos_refl_sym_trans R x y. Proof. induction 1. - constructor 2. - constructor 4 with y; auto. case s; [constructor 1 | constructor 3; constructor 1]; auto. Defined. Lemma clos_rst1n_trans : forall x y z, clos_refl_sym_trans_1n R x y -> clos_refl_sym_trans_1n R y z -> clos_refl_sym_trans_1n R x z. induction 1. - auto. - intros; right with y; eauto. Defined. Lemma clos_rst1n_sym : forall x y, clos_refl_sym_trans_1n R x y -> clos_refl_sym_trans_1n R y x. Proof. intros x y H; elim H. - constructor 1. - intros x0 y0 z D H0 H1; apply clos_rst1n_trans with y0; auto. right with x0. + destruct D; [right|left]; auto. + left. Defined. Lemma clos_rst_rst1n : forall x y, clos_refl_sym_trans R x y -> clos_refl_sym_trans_1n R x y. induction 1. - constructor 2 with y; auto with relations. constructor 1. - constructor 1. - apply clos_rst1n_sym; auto. - eapply clos_rst1n_trans; eauto. Defined. Lemma clos_rst_rst1n_iff : forall x y, clos_refl_sym_trans R x y <-> clos_refl_sym_trans_1n R x y. Proof. split. - apply clos_rst_rst1n. - apply clos_rst1n_rst. Defined. (** Direct reflexive-symmetric-transitive closure is equivalent to transitivity by symmetric right-step extension *) Lemma clos_rstn1_rst : forall x y, clos_refl_sym_trans_n1 R x y -> clos_refl_sym_trans R x y. Proof. induction 1. - constructor 2. - constructor 4 with y; auto. case s; [constructor 1 | constructor 3; constructor 1]; auto. Defined. Lemma clos_rstn1_trans : forall x y z, clos_refl_sym_trans_n1 R x y -> clos_refl_sym_trans_n1 R y z -> clos_refl_sym_trans_n1 R x z. Proof. intros x y z H1 H2. induction H2. - auto. - intros. right with y0; eauto. Defined. Lemma clos_rstn1_sym : forall x y, clos_refl_sym_trans_n1 R x y -> clos_refl_sym_trans_n1 R y x. Proof. intros x y H; elim H. - constructor 1. - intros y0 z D H0 H1. apply clos_rstn1_trans with y0; auto. right with z. + destruct D; auto with relations. + left. Defined. Lemma clos_rst_rstn1 : forall x y, clos_refl_sym_trans R x y -> clos_refl_sym_trans_n1 R x y. Proof. induction 1. - constructor 2 with x; auto with relations. constructor 1. - constructor 1. - apply clos_rstn1_sym; auto. - eapply clos_rstn1_trans; eauto. Defined. Lemma clos_rst_rstn1_iff : forall x y, clos_refl_sym_trans R x y <-> clos_refl_sym_trans_n1 R x y. Proof. split. - apply clos_rst_rstn1. - apply clos_rstn1_rst. Defined. End Equivalences. Lemma trans_clos_transp_permute : forall x y, transp (trans_clos R) x y <-> trans_clos (transp R) x y. Proof. split; induction 1; (apply t_step; assumption) || eapply t_trans; eassumption. Defined. End Properties. Coq-Equations-1.3.1-8.20/theories/Type/Subterm.v000066400000000000000000000233431463127417400211620ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Set Warnings "-notation-overridden". From Equations Require Import Init Signature. Require Import Equations.CoreTactics. Require Import Equations.Type.Logic Equations.Type.Classes Equations.Type.EqDec Equations.Type.Relation Equations.Type.FunctionalExtensionality Equations.Type.WellFounded Equations.Type.DepElim Equations.Type.Constants. Set Universe Polymorphism. Import Id_Notations. Generalizable Variables A R S B. (** The fixpoint combinator associated to a well-founded relation, just reusing the [WellFounded.Fix] combinator. *) Definition FixWf `{WF:WellFounded A R} (P : A -> Type) (step : forall x : A, (forall y : A, R y x -> P y) -> P x) : forall x : A, P x := Fix wellfounded P step. Definition step_fn_ext {A} {R} (P : A -> Type) := fun step : forall x : A, (forall y : A, R y x -> P y) -> P x => forall x (f g : forall y (H : R y x), P y), (forall y H, f y H = g y H) -> step x f = step x g. Lemma FixWf_unfold `{WF : WellFounded A R} (P : A -> Type) (step : forall x : A, (forall y : A, R y x -> P y) -> P x) (step_ext : step_fn_ext P step) (x : A) : FixWf P step x = step x (fun y _ => FixWf P step y). Proof. intros. unfold FixWf. Admitted. (* rewrite Fix_eq. apply step_ext. intros. reflexivity. *) (* intros x' f g H. apply step_ext. apply H. *) (* Qed. *) Lemma FixWf_unfold_step : forall (A : Type) (R : relation A) (WF : WellFounded R) (P : A -> Type) (step : forall x : A, (forall y : A, R y x -> P y) -> P x) (x : A) (step_ext : step_fn_ext P step) (step' : forall y : A, R y x -> P y), step' = (fun (y : A) (_ : R y x) => FixWf P step y) -> FixWf P step x = step x step'. Proof. intros. rewrite FixWf_unfold, X. reflexivity. apply step_ext. Qed. Ltac unfold_FixWf := match goal with |- context [ @FixWf ?A ?R ?WF ?P ?f ?x ] => let step := fresh in set(step := fun y (_ : R y x) => @FixWf A R WF P f y) in *; unshelve erewrite (@FixWf_unfold_step A R WF P f x _ step); [red; intros; simp_sigmas; red_one_eq (* Extensionality proof *) |hidebody step; red_eq_lhs (* Unfold the functional *) |reflexivity] end. Ltac unfold_recursor := unfold_FixWf. Lemma FixWf_unfold_ext `{WF : WellFounded A R} (P : A -> Type) (step : forall x : A, (forall y : A, R y x -> P y) -> P x) (x : A) : FixWf P step x = step x (fun y _ => FixWf P step y). Proof. intros. unfold FixWf, Fix. destruct wellfounded. simpl. apply ap. apply funext. intros y. apply funext; intros h. apply ap. apply Acc_prop. Qed. #[global] Hint Rewrite @FixWf_unfold_ext : Recursors. Lemma FixWf_unfold_ext_step : forall (A : Type) (R : relation A) (WF : WellFounded R) (P : A -> Type) (step : forall x : A, (forall y : A, R y x -> P y) -> P x) (x : A) (step' : forall y : A, R y x -> P y), step' = (fun (y : A) (_ : R y x) => FixWf P step y) -> FixWf P step x = step x step'. Proof. intros * eq. rewrite FixWf_unfold_ext. destruct eq. reflexivity. Qed. #[global] Hint Rewrite @FixWf_unfold_ext_step : Recursors. Ltac unfold_FixWf_ext := match goal with |- context [ @FixWf ?A ?R ?WF ?P ?f ?x ] => let step := fresh in set(step := fun y (_ : R y x) => @FixWf A R WF P f y) in *; rewrite (@FixWf_unfold_ext_step A R WF P f x step); [hidebody step; try red_eq_lhs (* Unfold the functional *)|reflexivity] end. Ltac unfold_recursor_ext := unfold_FixWf_ext. (** Inline so that we get back a term using general recursion. *) Extraction Inline FixWf Fix Fix_F. (** This hint database contains the constructors of every derived subterm relation. It is used to automatically find proofs that a term is a subterm of another. *) Create HintDb subterm_relation discriminated. Create HintDb rec_decision discriminated. (** This is used to simplify the proof-search for recursive call obligations. *) Ltac simpl_let := match goal with [ H : let _ := ?t in _ |- _ ] => match t with | fixproto => fail 1 | _ => cbv zeta in H end end. #[global] Hint Extern 40 => progress (cbv beta in * || simpl_let) : rec_decision. (* This expands lets in the context to simplify proof search for recursive call obligations, as [eauto] does not do matching up-to unfolding of let-bound variables. *) #[global] Hint Extern 10 => match goal with [ x := _ |- _ ] => lazymatch goal with |- context [ x ] => subst x end end : rec_decision. (** We can automatically use the well-foundedness of a relation to get the well-foundedness of its transitive closure. Note that this definition is transparent as well as [wf_clos_trans], to allow computations with functions defined by well-founded recursion. *) Lemma WellFounded_trans_clos `(WF : WellFounded A R) : WellFounded (trans_clos R). Proof. apply wf_trans_clos. apply WF. Defined. #[global] Hint Extern 4 (WellFounded (trans_clos _)) => apply @WellFounded_trans_clos : typeclass_instances. #[export] Instance wf_inverse_image {A R} `(WellFounded A R) {B} (f : B -> A) : WellFounded (inverse_image R f) | (WellFounded (inverse_image _ _)). Proof. red. apply wf_inverse_image. apply H. Defined. (* (* Do not apply [wf_MR] agressively, as Coq's unification could "invent" an [f] otherwise *) (* to unify. *) *) (* Hint Extern 0 (WellFounded (inverse_image _ _)) => apply @wf_inverse_image : typeclass_instances. *) #[global] Hint Extern 0 (inverse_image _ _ _ _) => red : rec_decision. (** We also add hints for transitive closure, not using [t_trans] but forcing to build the proof by successive applications of the inner relation. *) #[global] Hint Resolve t_step : subterm_relation. Lemma trans_clos_stepr A (R : relation A) (x y z : A) : R y z -> trans_clos R x y -> trans_clos R x z. Proof. intros Hyz Hxy. exact (t_trans _ x y z Hxy (t_step _ _ _ Hyz)). Defined. #[global] Hint Resolve trans_clos_stepr : subterm_relation. (** The default tactic to build proofs of well foundedness of subterm relations. *) Create HintDb solve_subterm discriminated. #[global] Hint Extern 4 (_ = _) => reflexivity : solve_subterm. #[global] Hint Extern 10 => eapply_hyp : solve_subterm. Ltac solve_subterm := intros; apply WellFounded_trans_clos; red; intros; simp_sigmas; on_last_hyp ltac:(fun H => depind H); constructor; intros; simp_sigmas; on_last_hyp ltac:(fun HR => depind HR); simplify_dep_elim; try typeclasses eauto with solve_subterm. (** A tactic to launch a well-founded recursion. *) Ltac rec_wf_fix recname kont := let hyps := fresh in intros hyps; intro; on_last_hyp ltac:(fun x => rename x into recname; unfold inverse_image at 1 in recname) ; destruct_right_sigma hyps; try curry recname; simpl in recname; kont recname. (* Ltac rec_wf_fix x recname fixterm := *) (* apply fixterm ; clear_local ; *) (* intros until 1 ; simp_sigmas ; *) (* on_last_hyp ltac:(fun x => rename x into recname) ; *) (* simplify_dep_elim ; intros ; unblock_goal ; intros ; *) (* move recname at bottom ; try curry recname ; simpl in recname. *) (** The [do] tactic but using a Coq-side nat. *) Ltac do_nat n tac := match n with | 0 => idtac | S ?n' => tac ; do_nat n' tac end. (** Generalize an object [x], packing it in a sigma type if necessary. *) Ltac sigma_pack n t := let packhyps := fresh "hypspack" in let xpack := fresh "pack" in let eos' := fresh "eos" in match constr:(n) with | 0%nat => set (eos' := the_end_of_the_section); move eos' at top | _ => do_nat n ltac:(idtac; revert_last); set (eos' := the_end_of_the_section); do_nat n intro end; uncurry_hyps packhyps; (progress (set(xpack := t) in |- ; cbv beta iota zeta in xpack; revert xpack; pattern sigma packhyps; clearbody packhyps; revert packhyps; clear_nonsection; clear eos')). (** We specialize the tactic for [x] of type [A], first packing [x] with its indices into a sigma type and finding the declared relation on this type. *) Ltac rec_wf recname t kont := sigma_pack 0 t; match goal with [ |- forall (s : ?T) (s0 := @?b s), @?P s ] => let fn := constr:(fun s : T => b s) in let c := constr:(wellfounded (R:=inverse_image _ fn)) in let wf := constr:(FixWf (WF:=c)) in intros s _; revert s; refine (wf P _); simpl ; rec_wf_fix recname kont end. Ltac rec_wf_eqns recname x := rec_wf recname x ltac:(fun rechyp => add_pattern (hide_pattern rechyp)). Ltac rec_wf_rel_aux recname n t rel kont := sigma_pack n t; match goal with [ |- forall (s : ?T) (s0 := @?b s), @?P s ] => let fn := constr:(fun s : T => b s) in let c := constr:(wellfounded (R:=inverse_image rel fn)) in let wf := constr:(FixWf (WF:=c)) in intros s _; revert s; refine (wf P _); simpl ; rec_wf_fix recname kont end. Ltac rec_wf_rel recname x rel := rec_wf_rel_aux recname 0 x rel ltac:(fun rechyp => idtac). (* NoCycle from well-foundedness. *) Definition NoCycle_WellFounded {A} (R : relation A) (wfR : WellFounded R) : NoCyclePackage A := {| NoCycle := R; noCycle := well_founded_irreflexive (wfR:=wfR) |}. #[export] Existing Instance NoCycle_WellFounded. #[global] Hint Extern 30 (@NoCycle ?A (NoCycle_WellFounded ?R ?wfr) _ _) => hnf; typeclasses eauto with subterm_relation : typeclass_instances. Coq-Equations-1.3.1-8.20/theories/Type/Tactics.v000066400000000000000000000111271463127417400211300ustar00rootroot00000000000000(**********************************************************************) (* Equations *) (* Copyright (c) 2009-2020 Matthieu Sozeau *) (**********************************************************************) (* This file is distributed under the terms of the *) (* GNU Lesser General Public License Version 2.1 *) (**********************************************************************) Set Warnings "-notation-overridden". Require Import Equations.CoreTactics Equations.Type.Logic Equations.Type.DepElim Equations.Type.EqDec Equations.Type.Subterm Equations.Type.WellFounded Equations.Type.FunctionalInduction. Ltac Equations.Init.simpl_equations ::= Equations.Type.DepElim.simpl_equations. Ltac Equations.Init.simplify_equalities ::= Equations.Type.DepElim.simplify_dep_elim. Ltac Equations.Init.depelim H ::= dependent elimination H; cbn in *. Ltac Equations.Init.depind H ::= Equations.Type.DepElim.depind H. Ltac Equations.Init.simp_IHs_tac ::= Equations.Type.FunctionalInduction.simplify_IHs_call. Ltac Equations.Init.funelim_constr_as H H' tac ::= Equations.Type.FunctionalInduction.funelim_constr_as H H' tac. Ltac Equations.Init.apply_funelim H ::= Equations.Type.FunctionalInduction.apply_funelim H. Ltac Equations.Init.noconf H ::= Equations.Type.DepElim.noconf H. Create HintDb solve_subterm discriminated. #[global] Hint Extern 4 (_ = _) => reflexivity : solve_subterm. #[global] Hint Extern 10 => eapply_hyp : solve_subterm. Ltac solve_subterm := intros; apply WellFounded.wf_trans_clos; red; intros; simp_sigmas; on_last_hyp ltac:(fun H => depind H); constructor; intros; simp_sigmas; on_last_hyp ltac:(fun HR => depind HR); simplify_dep_elim; try typeclasses eauto with solve_subterm. Ltac Equations.Init.solve_subterm ::= solve_subterm. Ltac Equations.Init.solve_eqdec ::= eqdec_proof. Ltac Equations.Init.unfold_recursor ::= Equations.Type.Subterm.unfold_recursor. Ltac Equations.Init.unfold_recursor_ext ::= Equations.Type.Subterm.unfold_recursor_ext. Ltac solve_noconf_prf := intros; on_last_hyp ltac:(fun id => destruct id) ; (* Subtitute a = b *) on_last_hyp ltac:(fun id => destruct_sigma id; destruct id) ; (* Destruct the inductive object a *) constructor. Ltac solve_noconf_inv_eq a b := destruct_sigma a; destruct_sigma b; destruct a ; depelim b; simpl in * |-; on_last_hyp ltac:(fun id => hnf in id; destruct_tele_eq id || destruct id); solve [constructor]. Ltac solve_noconf_inv := intros; match goal with |- ?R ?a ?b => destruct_sigma a; destruct_sigma b; destruct a ; depelim b; simpl in * |-; on_last_hyp ltac:(fun id => hnf in id; destruct_tele_eq id || destruct id); solve [constructor] | |- @Id _ (?f ?a ?b _) _ => solve_noconf_inv_eq a b end. Ltac solve_noconf_inv_equiv := intros; (* Subtitute a = b *) on_last_hyp ltac:(fun id => destruct id) ; (* Destruct the inductive object a *) on_last_hyp ltac:(fun id => destruct_sigma id; destruct id) ; simpl; constructor. Ltac solve_noconf := simpl; intros; match goal with [ H : @Id _ _ _ |- @Id _ _ _ ] => try solve_noconf_inv_equiv | [ H : @Id _ _ _ |- _ ] => try solve_noconf_prf | [ |- @Id _ _ _ ] => try solve_noconf_inv end. Ltac solve_noconf_hom_inv_eq a b := destruct_sigma a; destruct_sigma b; destruct a ; depelim b; simpl in * |-; on_last_hyp ltac:(fun id => hnf in id; destruct_tele_eq id || depelim id); solve [constructor || simpl_equations; constructor]. Ltac solve_noconf_hom_inv := intros; match goal with | |- @Id _ (?f ?a ?b _) _ => solve_noconf_hom_inv_eq a b | |- ?R ?a ?b => destruct_sigma a; destruct_sigma b; destruct a ; depelim b; simpl in * |-; on_last_hyp ltac:(fun id => hnf in id; destruct_tele_eq id || depelim id); solve [constructor || simpl_equations; constructor] end. Ltac solve_noconf_hom_inv_equiv := intros; (* Subtitute a = b *) on_last_hyp ltac:(fun id => destruct id) ; (* Destruct the inductive object a using dependent elimination to handle UIP cases. *) on_last_hyp ltac:(fun id => destruct_sigma id; depelim id) ; simpl; simpl_equations; constructor. Ltac solve_noconf_hom := simpl; intros; match goal with [ H : @Id _ _ _ |- @Id _ _ _ ] => try solve_noconf_hom_inv_equiv | [ H : @Id _ _ _ |- _ ] => try solve_noconf_prf | [ |- @Id _ _ _ ] => try solve_noconf_hom_inv end. Ltac Equations.Init.solve_noconf ::= solve_noconf. Ltac Equations.Init.solve_noconf_hom ::= solve_noconf_hom. Coq-Equations-1.3.1-8.20/theories/Type/Telescopes.v000066400000000000000000000202241463127417400216420ustar00rootroot00000000000000Set Warnings "-notation-overridden". Require Import Equations.Type.Loader. Require Import Equations.Type.Logic. Require Import Equations.Type.FunctionalExtensionality. Require Import Equations.Type.WellFounded. Require Import Equations.Type.DepElim. Require Import Equations.Type.Subterm. Require Import Equations.Type.Tactics. Require Import Equations.Type.FunctionalInduction. (** Telescopes: allows treating variable arity fixpoints *) Set Universe Polymorphism. Import Sigma_Notations Id_Notations. Local Open Scope equations_scope. Set Equations Transparent. Cumulative Inductive tele@{i} : Type := | tip (A : Type@{i}) | ext (A : Type@{i}) (B : A -> tele) : tele. Register tele as equations.tele.type. Register tip as equations.tele.tip. Register ext as equations.tele.ext. Section TeleSigma. Universe i. Equations tele_sigma (t : tele@{i}) : Type@{i} := | tip A := A | ext A B := @sigma A (fun x => tele_sigma (B x)). Coercion tele_sigma : tele >-> Sortclass. Inductive tele_val : tele@{i} -> Type@{i+1} := | tip_val {A} (a : A) : tele_val (tip A) | ext_val {A B} (a : A) (b : tele_val (B a)) : tele_val (ext A B). Equations tele_pred : tele -> Type := | tip A := A -> Type | ext A B := forall x : A, tele_pred (B x). Equations tele_rel : tele -> tele -> Type := | tip A | tip B := A -> B -> Type | ext A B | ext A' B' := forall (x : A) (y : A'), tele_rel (B x) (B' y) | _ | _ := False. Equations tele_rel_app (T U : tele) (P : tele_rel T U) (x : tele_sigma T) (y : tele_sigma U) : Type := | tip A, tip A', P, a, a' := P a a' | ext A B, ext A' B', P, (a, b), (a', b') := tele_rel_app (B a) (B' a') (P a a') b b'. Universes j k. Equations tele_fn : tele@{i} -> Type@{j} -> Type@{k} := | tip A, concl := A -> concl | ext A B, concl := forall x : A, tele_fn (B x) concl. Equations tele_MR (T : tele@{i}) (A : Type@{j}) (f : tele_fn T A) : T -> A := tele_MR (tip A) C f => f; tele_MR (ext A B) C f => fun x => tele_MR (B x.1) C (f x.1) x.2. Equations tele_measure (T : tele@{i}) (A : Type@{j}) (f : tele_fn T A) (R : A -> A -> Type@{k}) : T -> T -> Type@{k} := tele_measure T C f R := fun x y => R (tele_MR T C f x) (tele_MR T C f y). Equations tele_type : tele@{i} -> Type@{k} := | tip A := A -> Type@{j}; | ext A B := forall x : A, tele_type (B x). Equations tele_type_app (T : tele@{i}) (P : tele_type T) (x : tele_sigma T) : Type@{k} := tele_type_app (tip A) P a := P a; tele_type_app (ext A B) P (a, b) := tele_type_app (B a) (P a) b. Equations tele_forall (T : tele@{i}) (P : tele_type T) : Type@{k} := | tip A, P := forall x : A, P x; | ext A B, P := forall x : A, tele_forall (B x) (P x). Equations tele_forall_impl (T : tele@{i}) (P : tele_type T) (Q : tele_type T) : Type := | tip A, P, Q := forall x : A, P x -> Q x; | ext A B, P, Q := forall x : A, tele_forall_impl (B x) (P x) (Q x). Equations tele_forall_app (T : tele@{i}) (P : tele_type T) (f : tele_forall T P) (x : T) : tele_type_app T P x := tele_forall_app (tip A) P f x := f x; tele_forall_app (ext A B) P f x := tele_forall_app (B x.1) (P x.1) (f x.1) x.2. Equations tele_forall_type_app (T : tele@{i}) (P : tele_type T) (fn : forall t, tele_type_app T P t) : tele_forall T P := | tip A, P, fn := fn | ext A B, P, fn := fun a : A => tele_forall_type_app (B a) (P a) (fun b => fn (a, b)). Lemma tele_forall_app_type (T : tele@{i}) (P : tele_type T) (f : forall t, tele_type_app T P t) : forall x, tele_forall_app T P (tele_forall_type_app T P f) x = f x. Proof. induction T; simpl. reflexivity. cbn. intros [a b]. simpl. apply X. Defined. Equations tele_forall_uncurry (T : tele@{i}) (P : T -> Type@{j}) : Type@{k} := | tip A , P := forall x : A, P x | ext A B , P := forall x : A, tele_forall_uncurry (B x) (fun y : tele_sigma (B x) => P (x, y)). Equations tele_rel_pack (T U : tele) (x : tele_rel T U) : tele_sigma T -> tele_sigma U -> Type by struct T := tele_rel_pack (tip A) (tip A') P := P; tele_rel_pack (ext A B) (ext A' B') P := fun x y => tele_rel_pack (B x.1) (B' y.1) (P _ _) x.2 y.2. Equations tele_pred_pack (T : tele) (P : tele_pred T) : tele_sigma T -> Type := | tip A, P := P | ext A B, P := fun x => tele_pred_pack (B x.1) (P x.1) x.2. Equations tele_type_unpack (T : tele) (P : tele_sigma T -> Type) : tele_type T := tele_type_unpack (tip A) P := P; tele_type_unpack (ext A B) P := fun x => tele_type_unpack (B x) (fun y => P (x, y)). Equations tele_pred_fn_pack (T U : tele) (P : tele_fn T (tele_pred U)) : tele_sigma T -> tele_sigma U -> Type := tele_pred_fn_pack (tip A) U P := fun x => tele_pred_pack U (P x); tele_pred_fn_pack (ext A B) U P := fun x => tele_pred_fn_pack (B x.1) U (P x.1) x.2. Definition tele_rel_curried T := tele_fn T (tele_pred T). Equations tele_forall_pack (T : tele) (P : T -> Type) (f : tele_forall_uncurry T P) (t : T) : P t := | tip A | P | f | t := f t; | ext A B | P | f | (a, b) := tele_forall_pack (B a) (fun b => P (a, b)) (f a) b. Equations tele_forall_unpack (T : tele@{i}) (P : T -> Type@{j}) (f : forall (t : T), P t) : tele_forall_uncurry T P := | tip A | P | f := f | ext A B | P | f := fun a : A => tele_forall_unpack (B a) (fun b => P (a, b)) (fun b => f (a, b)). Lemma tele_forall_pack_unpack (T : tele) (P : T -> Type) (f : forall t, P t) : forall x, tele_forall_pack T P (tele_forall_unpack T P f) x = f x. Proof. induction T; simpl. reflexivity. intros [a b]. simpl. apply (X a (fun b => P (a, b))). Defined. End TeleSigma. Register tele_sigma as equations.tele.interp. Register tele_measure as equations.tele.measure. #[export] Instance wf_tele_measure@{i j k| i <= k, j <= k} {T : tele@{i}} (A : Type@{j}) (f : tele_fn@{i j k} T A) (R : A -> A -> Type@{k}) : WellFounded R -> WellFounded (tele_measure T A f R) | (WellFounded (tele_measure _ _ _ _)). Proof. intros. apply wf_inverse_image@{i j k k}. apply X. Defined. Section Fix. Universes i j k l m. Constraint k < l. Constraint i <= l. Constraint j <= m, l <= m. Context {T : tele@{i}} (R : T -> T -> Type@{j}). Context (wf : WellFounded R). Context (P : tele_type@{i k l} T). (* (forall x : A, (forall y : A, R y x -> P y) -> P x) -> forall x : A, P x *) Definition tele_fix_functional_type := tele_forall_uncurry@{i m m} T (fun x => ((tele_forall_uncurry@{i m m} T (fun y => R y x -> tele_type_app T P y))) -> tele_type_app T P x). Context (fn : tele_fix_functional_type). Definition tele_fix : tele_forall T P := tele_forall_type_app _ _ (@FixWf (tele_sigma T) _ wf (tele_type_app T P) (fun x H => tele_forall_pack T _ fn x (tele_forall_unpack T _ H))). End Fix. Register tele_fix as equations.tele.fix. Register tele_MR as equations.tele.MR. Register tele_fix_functional_type as equations.tele.fix_functional_type. Register tele_type_app as equations.tele.type_app. Register tele_forall_type_app as equations.tele.forall_type_app. Register tele_forall_uncurry as equations.tele.forall_uncurry. Register tele_forall as equations.tele.forall. Register tele_forall_pack as equations.tele.forall_pack. Register tele_forall_unpack as equations.tele.forall_unpack. Extraction Inline tele_forall_pack tele_forall_unpack tele_forall_type_app tele_fix. Section FixUnfold. Universes i j k l m. Constraint k < l. Constraint i <= l. Constraint j <= m, l <= m. Context {T : tele@{i}} (x : T) (R : T -> T -> Type@{j}). Context (wf : well_founded R). Context (P : tele_type@{i k l} T). (* (forall x : A, (forall y : A, R y x -> P y) -> P x) -> forall x : A, P x *) Context (fn : tele_fix_functional_type@{i j k l m} R P). Lemma tele_fix_unfold : tele_forall_app T P (tele_fix R wf P fn) x = tele_forall_pack T _ fn x (tele_forall_unpack T _ (fun y _ => tele_forall_app T P (tele_fix R wf P fn) y)). Proof. intros. unfold tele_fix, Subterm.FixWf, Fix. rewrite tele_forall_app_type@{i k l}. destruct (wellfounded x). simpl. apply ap. apply ap. apply funext. intros y. apply funext; intros h. eapply id_trans. 2:{ apply id_sym. apply tele_forall_app_type. } apply ap@{l l}. apply Acc_prop. Defined. End FixUnfold. Register tele_fix_unfold as equations.tele.fix_unfold. Coq-Equations-1.3.1-8.20/theories/Type/WellFounded.v000066400000000000000000000101421463127417400217420ustar00rootroot00000000000000Set Warnings "-notation-overridden". From Coq Require Import Extraction CRelationClasses. Require Import Equations.Init Equations.CoreTactics. Require Import Equations.Type.Logic Equations.Type.FunctionalExtensionality Equations.Type.Relation Equations.Type.Relation_Properties. Set Universe Polymorphism. Import Id_Notations. Import Sigma_Notations. (** Well-founded relations in Type *) Section Acc. Universes i j. Context {A : Type@{i}} (R : relation@{i j} A). Cumulative Inductive Acc (x : A) : Type@{max(i,j)} := | Acc_intro : (forall y, R y x -> Acc y) -> Acc x. Definition Acc_inv {x} (H : Acc x) : forall y, R y x -> Acc y. Proof. intros y Hy. destruct H. exact (a _ Hy). Defined. Lemma Acc_prop i (x y : Acc i) : x = y. Proof. revert y. induction x as [y Accy IHy]. intros [Accy']. apply ap. apply funext. intros H. apply funext. intros H'. apply IHy. Qed. Definition well_founded := forall x, Acc x. Context (P : A -> Type). Context (step : forall x : A, (forall y : A, R y x -> P y) -> P x). Fixpoint Fix_F (x : A) (a : Acc x) : P x := step x (fun y r => Fix_F y (Acc_inv a y r)). End Acc. Section FixWf. Context {A R} (WF : @well_founded A R). Context (P : A -> Type). Context (step : forall x : A, (forall y : A, R y x -> P y) -> P x). Definition Fix (x : A) : P x := Fix_F R P step x (WF x). End FixWf. Lemma well_founded_irreflexive {A} {R : relation A} {wfR : well_founded R} : forall x y : A, R x y -> x = y -> Empty. Proof. intros x y Ryy. intros e. destruct e. red in wfR. induction (wfR x) as [y accy IHy]. apply (IHy _ Ryy Ryy). Qed. Lemma well_founded_antisym@{i j} {A : Type@{i}} {R : relation@{i j} A}{wfR : well_founded R} : forall x y : A, R x y -> R y x -> Empty. Proof. intros x y Rxy Ryx. red in wfR. induction (wfR y) as [y accy IHy] in x, Rxy, Ryx. specialize (IHy _ Rxy). apply (IHy _ Ryx Rxy). Qed. Section Wf_Transitive_Closure. (** Original author: Bruno Barras, adapted to Type *) Context {A : Type} (R : relation A). Notation trans_clos := (trans_clos R). Lemma incl_trans_clos : inclusion R trans_clos. red; auto with relations. Defined. Lemma Acc_trans_clos : forall x:A, Acc R x -> Acc trans_clos x. induction 1 as [x0 _ H1]. apply Acc_intro. intros y H2. induction H2; auto with relations. apply Acc_inv with y; auto with relations. Defined. Hint Resolve Acc_trans_clos : core. Lemma Acc_inv_trans : forall x y:A, trans_clos y x -> Acc R x -> Acc R y. Proof. induction 1 as [| x y]; auto with relations. intro; apply Acc_inv with y; assumption. Defined. Theorem wf_trans_clos : well_founded R -> well_founded trans_clos. Proof. unfold well_founded; auto with relations. Defined. End Wf_Transitive_Closure. (** Author: Bruno Barras *) Section Inverse_Image. Context {A B : Type} (R : relation B) (f : A -> B). Definition inverse_image := fun x y => R (f x) (f y). Remark Acc_lemma : forall y : B, Acc R y -> forall x : A, y = f x -> Acc inverse_image x. Proof. induction 1 as [y _ IHAcc]; intros x H. apply Acc_intro; intros y0 H1. apply (IHAcc (f y0)); try trivial. apply id_sym in H. destruct H; trivial. constructor. Defined. Lemma Acc_inverse_image : forall x:A, Acc R (f x) -> Acc inverse_image x. Proof. intros; apply (Acc_lemma (f x)); trivial. constructor. Defined. Theorem wf_inverse_image : well_founded R -> well_founded inverse_image. Proof. red; intros; apply Acc_inverse_image; auto. Defined. (* Variable F : A -> B -> Type. *) (* Let RoF (x y:A) := *) (* exists2 b : B, F x b & (forall c:B, F y c -> R b c). *) (* Lemma Acc_inverse_rel : forall b:B, Acc R b -> forall x:A, F x b -> Acc RoF x. *) (* Proof. *) (* induction 1 as [x _ IHAcc]; intros x0 H2. *) (* constructor; intros y H3. *) (* destruct H3. *) (* apply (IHAcc x1); auto. *) (* Qed. *) (* Theorem wf_inverse_rel : well_founded R -> well_founded RoF. *) (* Proof. *) (* red; constructor; intros. *) (* case H0; intros. *) (* apply (Acc_inverse_rel x); auto. *) (* Qed. *) End Inverse_Image. Coq-Equations-1.3.1-8.20/theories/Type/WellFoundedInstances.v000066400000000000000000000045231463127417400236200ustar00rootroot00000000000000Set Warnings "-notation-overridden". Require Import Equations.Type.Loader Equations.Type.Relation Equations.Type.WellFounded. Import Id_Notations. Section Lt. Inductive le : nat -> nat -> Set := | le_0 x : le 0 x | le_S {x y} : le x y -> le (S x) (S y). Definition lt x y := le (S x) y. Lemma le_eq_lt x y : le x y -> (x = y) + (lt x y). Proof. induction 1. destruct x. left; constructor. right; constructor. constructor. dependent elimination IHle as [inl id_refl|inr Hlt]. left; constructor. right; now constructor. Defined. Global Instance lt_wf : WellFounded lt. Proof. intros x. induction x. constructor. intros y Hy. depelim Hy. constructor. intros y Hy. dependent elimination Hy as [@le_S y x Hle]. apply le_eq_lt in Hle. dependent elimination Hle as [inl id_refl|inr Hlt]. assumption. destruct IHx. now apply a. Defined. Lemma lt_n_Sn n : lt n (S n). Proof. constructor. induction n; now constructor. Defined. End Lt. #[global] Hint Resolve lt_n_Sn : rec_decision. (** Define non-dependent lexicographic products *) Import Sigma_Notations. Local Open Scope equations_scope. Section Lexicographic_Product. Variable A : Type. Variable B : Type. Variable leA : relation A. Variable leB : relation B. Inductive lexprod : A * B -> A * B -> Type := | left_lex : forall {x x':A} {y:B} {y':B}, leA x x' -> lexprod (x, y) (x', y') | right_lex : forall {x:A} {y y':B}, leB y y' -> lexprod (x, y) (x, y'). Lemma acc_A_B_lexprod : forall x:A, Acc leA x -> (well_founded leB) -> forall y:B, Acc leB y -> Acc lexprod (x, y). Proof. induction 1 as [x _ IHAcc]; intros H2 y. induction 1 as [x0 H IHAcc0]. apply Acc_intro. destruct y as [x2 y1]; intro Hlex. depelim Hlex. apply IHAcc; auto with relations. now apply IHAcc0. Defined. Theorem wf_lexprod : well_founded leA -> well_founded leB -> well_founded lexprod. Proof. intros wfA wfB; unfold well_founded. destruct x. apply acc_A_B_lexprod; auto with relations; intros. Defined. End Lexicographic_Product. #[export] Instance wellfounded_lexprod A B R S `(wfR : WellFounded A R, wfS : WellFounded B S) : WellFounded (lexprod A B R S) := wf_lexprod A B R S wfR wfS. #[global] Hint Constructors lexprod : rec_decision. Coq-Equations-1.3.1-8.20/theories/Type/dune000066400000000000000000000002461463127417400202250ustar00rootroot00000000000000(coq.theory ; This determines the -R flag (name Equations.Type) (package coq-equations) (synopsis "Equations Plugin") (theories Equations) (modules :standard)) Coq-Equations-1.3.1-8.20/theories/dune000066400000000000000000000003561463127417400173060ustar00rootroot00000000000000(coq.theory ; This determines the -R flag (name Equations) (package coq-equations) (synopsis "Equations Plugin") (libraries coq-core.plugins.extraction coq-equations.plugin) (modules :standard \ IdDec NoCycle)) (include_subdirs no)