pax_global_header00006660000000000000000000000064147504452060014521gustar00rootroot0000000000000052 comment=b8dbe6370d36a6a11a466d5f0ee285804103e030 OpenSC-pam_pkcs11-87edc72/000077500000000000000000000000001475044520600152325ustar00rootroot00000000000000OpenSC-pam_pkcs11-87edc72/.github/000077500000000000000000000000001475044520600165725ustar00rootroot00000000000000OpenSC-pam_pkcs11-87edc72/.github/workflows/000077500000000000000000000000001475044520600206275ustar00rootroot00000000000000OpenSC-pam_pkcs11-87edc72/.github/workflows/build.yml000066400000000000000000000031771475044520600224610ustar00rootroot00000000000000name: build # Controls when the action will run. Triggers the workflow on push or pull request # events but only for the master branch on: [push, pull_request] # A workflow run is made up of one or more jobs that can run # sequentially or in parallel jobs: # This workflow contains a single job called "build" build: runs-on: ubuntu-latest strategy: matrix: configure_args: [ "", "--with-curl", "--with-nss", ] # Steps represent a sequence of tasks that will be executed as part of the job steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job # can access it - uses: actions/checkout@v2 - name: setup prerequisites shell: bash run: | sudo apt update sudo apt install \ docbook-xsl \ doxygen \ gettext \ libcurl4-openssl-dev \ libldap2-dev \ libnss3-dev \ libpam-dev \ libpcsclite-dev \ libssl-dev \ pkg-config \ xsltproc - name: compile shell: bash run: | ./bootstrap export CFLAGS="-Wall -Wextra -Wformat -Wformat-security -Wmissing-declarations -Wmissing-prototypes -Wold-style-definition -Wpointer-arith -Wredundant-decls -Wshadow -Wstrict-prototypes -Wswitch-enum -Wundef -Wuninitialized -Wunused -Wwrite-strings -Wmissing-noreturn -flto=auto -O2 -Wp,-D_FORTIFY_SOURCE=2" ./configure ${{ matrix.configure_args }} make V=1 - name: distcheck shell: bash run: | make distcheck OpenSC-pam_pkcs11-87edc72/.github/workflows/codespell.yml000066400000000000000000000005671475044520600233340ustar00rootroot00000000000000--- name: Codespell on: pull_request: push: jobs: codespell: name: Check for spelling errors runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - uses: codespell-project/actions-codespell@v1 with: skip: ./po/de.po,./po/fr.po,./po/nl.po,./po/pt_BR.po,./po/it.po ignore_words_file: codespell_ignore_words.txt OpenSC-pam_pkcs11-87edc72/.gitignore000066400000000000000000000022021475044520600172160ustar00rootroot00000000000000# C-Outputs *.o *.lo *.la # gettext catalogs *.gmo po/POTFILES po/stamp-po # autoconf-related aclocal/codeset.m4 aclocal/glibc21.m4 aclocal/glibc2.m4 aclocal/intdiv0.m4 aclocal/intldir.m4 aclocal/intl.m4 aclocal/intlmacosx.m4 aclocal/intmax.m4 aclocal/inttypes_h.m4 aclocal/inttypes-pri.m4 aclocal/lcmessage.m4 aclocal/libtool.m4 aclocal/lock.m4 aclocal/longlong.m4 aclocal/lt~obsolete.m4 aclocal/ltoptions.m4 aclocal/ltsugar.m4 aclocal/ltversion.m4 aclocal.m4 aclocal/printf-posix.m4 aclocal/size_max.m4 aclocal/stdint_h.m4 aclocal/uintmax_t.m4 aclocal/visibility.m4 aclocal/wchar_t.m4 aclocal/wint_t.m4 aclocal/xsize.m4 ar-lib autom4te.cache/ compile config.guess config.h* config.log config.status config.sub configure depcomp install-sh libtool ltmain.sh missing stamp-h1 **/.deps **/.libs **/Makefile **/Makefile.in # output ABOUT-NLS ChangeLog.git doc/api doc/doxygen.conf doc/mappers_api.html doc/pam_pkcs11.8 doc/pam_pkcs11.html etc/pam.d_login.example etc/pam_pkcs11.conf.example intl/ m4/ src/tools/card_eventmgr src/tools/pkcs11_eventmgr src/tools/pkcs11_inspect src/tools/pkcs11_listcerts src/tools/pkcs11_setup src/tools/pklogin_finder OpenSC-pam_pkcs11-87edc72/AUTHORS000066400000000000000000000014111475044520600162770ustar00rootroot00000000000000Mario Strasser Original pam-pkcs11 code Juan Antonio Martinez Configuration file mgmt Dynamic cert-to-login module mappers Antti Tapaninen Timo Sirainen scconf configuration file library Ludovic Rousseau Many fixes and improvements card_eventmgr maintainer Andreas Jellinghaus OpenSC and OpenSSH mappers original code from PAM_p11 libraries Dominik Fischer LDAP Mapper Some improvements Ville Skyttä Original pam_pkcs11.spec file Paul Wolneykien Additional features and fixes (card_only, wait_for_card, openssl versions). Also Thanks to all the people at the OpenSC project OpenSC-pam_pkcs11-87edc72/COPYING000066400000000000000000000634761475044520600163050ustar00rootroot00000000000000 GNU LESSER GENERAL PUBLIC LICENSE Version 2.1, February 1999 Copyright (C) 1991, 1999 Free Software Foundation, Inc. 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. [This is the first released version of the Lesser GPL. It also counts as the successor of the GNU Library Public License, version 2, hence the version number 2.1.] Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below. When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things. To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it. For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights. We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library. To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others. Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license. Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs. When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library. We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances. For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License. In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system. Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library. The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run. GNU LESSER GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you". A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables. The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".) "Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library. Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does. 1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) The modified work must itself be a software library. b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change. c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License. d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful. (For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library. In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices. Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy. This option is useful when you wish to copy part of the code of the Library into a program that is not a library. 4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange. If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code. 5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License. However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables. When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law. If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.) Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself. 6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications. You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things: a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.) b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with. c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution. d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place. e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy. For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute. 7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things: a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above. b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. 8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it. 10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License. 11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation. 14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Libraries If you develop a new library, and you want it to be of the greatest possible use to the public, we recommend making it free software that everyone can redistribute and change. You can do so by permitting redistribution under these terms (or, alternatively, under the terms of the ordinary General Public License). To apply these terms, attach the following notices to the library. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA Also add information on how to contact you by electronic and paper mail. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the library, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the library `Frob' (a library for tweaking knobs) written by James Random Hacker. , 1 April 1990 Ty Coon, President of Vice That's all there is to it! OpenSC-pam_pkcs11-87edc72/ChangeLog000066400000000000000000000207711475044520600170130ustar00rootroot0000000000000004- Feb 2025 - Version 0.6.13. - Added pkcs11-eventmgr systemd service unit. - Updated Russian translations for pam_pkcs11 (thx Max Kosmach and Andrey Cherepanov). - Fixed possible authentication bypass (CVE-2025-24032): Use signatures to verify authentication by default (thx Frank Morgner). - Fixed possible authentication bypass (CVE-2025-24531): Restoring the original card_only / wait_for_card behavior (thx Matthias Gerstner, Frank Morgner). - Move pam_securetty.so upward in the example PAM config. - Set 'slot_num' configuration parameter to 0 by default (thx Jpereyra316). - Print details about configuration parse errors (thx Jpereyra316). - Updated RPM spec file (thx Etienne Barbier). - Add Chinese (Simplified) translation. - Capitalize all PAM messages (thx Alynx Zhou). - Cleaned up (some?) memory leaks. - Made pkcs11_make_hash_link support whitespaces in file names (thx Ivan Skorikov). - Fixed LGTM.com errors (thx Dimitri Papadopoulos). - Fix typos found by codespell (thx Dimitri Papadopoulos). - Fixed missing X509_CRL_free() in check_for_revocation. 31- May 2021 - Version 0.6.12 - Limit signature length to 65536 bytes. - Workaround for buggy PKCS#11 C_Sign() implementation that always returns the same signature length along with CKR_BUFFER_TOO_SMALL. - Don't stuck if wait_for_card=false and ignore the token not found error when the authentication isn't restricted to card only (either by the option `card_only` or by PKCS11_LOGIN_TOKEN_NAME environment variable). - Added the example PAM configuration which uses the ignore status result. - Added `screen_savers` to example config. - Fixes to deal with old and new OpenSSL versions. - Various small fixes including type casts and printf() formats. 22- May 2019 - Version 0.6.11 - Support OpenSSL 1.1.0 - use green instead of blue text for logs on the console - Solaris runs build process outside of srcdir - Fix openssh_mapper_match_keys() for OpenSSL 1.0 & 1.1 - Fix 64-bit pkcs11_inspect(1) fails on SPARC with a SIBGUS due to misaligned access - Add support of ECDSA signature in addition to RSA 12- Sep 2018 - Version 0.6.10 is out. - Fixed some security issues (thx @frankmorgner): (https://www.x41-dsec.de/lab/advisories/x41-2018-003-pam_pkcs11/) -- fixed buffer overflow with long home directory; -- fixed wiping secrets (now using OpenSSL_cleanse()); -- verify using a nonce from the system, not the card. 08- Sep 2005 - Fixes to pam_pkcs11.spec 07- Sep 2005 - Conditional compilation of pcsc-lite, curl and ldap dependent modules - Added LDAP mapper. Thanks to Dominik Fisher - TODO roadmap updated 06- Sep 2005 - Add new OpenSSH mapper. Thanks again Andreas for pam_openssh module 03- Sep 2005 - Add base64 encoding functions 02- Sep 2005 - Make source doxygen friendly 01- Sep 2005 - Finished OpenSC mapper. Thanks to Andreas for their pam_opensc module - New .spec file - Cleaning tasks to prepare next release 31- Aug 2005 - Include HOWTO in Pam-pkcs11 manual - Several fixes in src/mappers/Makefile.am to properly manage libraries 30- Aug 2005 - Allow static mapper to be instantiated with defaults if no configuration block defined - Documentation updates - Make pam_pkcs11.so to be installed at /lib/security - Debugging improvements 29- Aug 2005 - Mappers can be now statically linked - Added Mapper API documentation - Moved most of mappers to be static 28- Aug 2005 - Documentation Updates - Created libmapper - pam_sm_authenticate fail on xdmcp remote users 05- Jul 2005 - Many improvements to code by Ludovic Rousseau - Configure.in library checks improvements - Do not compile card-eventmgr if pcsc is not installed 09- Jun 2005 - Preliminary works on OpenSC mapper 06- Jun 2005 - Added openssh mapper - Now pam_pkcs11 generates PEM data for cert and pubk - Many bugfixes 14- Apr 2005 - finish coding of generic_maper - Move common code of pwent_mapper to mapper.c and mapper.h - Fix MAINTAINERCLEAN entries in some Makefile.am files 11- Apr 2005 - Change package name to pam-pkcs11 - Change directory names according with package name - Rewrite pwent mapper to use cert_info library 08- Apr 2005 - Some typos in some messages - Make sources more "gcc -pedantic" friendly. - Changes in DBG(X) macros, to be C99 compliant 07- Apr 2005 - pkcs11_login-0.5.1 is out - Fix configure.in AM_MAINTAINER_MODE - Makefile fixes - Fix some typos and sample files 04- Apr 2005 - Tarball moved to OpenSC Project web pages 02- Apr 2005 - pkcs11_login-0.5 is out - Mail mapper rewritten to use cert_info lib - First works on mapper api: . create structs and macros in mapper.h . rewrite mappers to initialize mapper_module_st entries - Published roadmap to 1.0 version. See TODO file - Fix: Dont free key/value map entries, as they come from buffer entry 31- Mar 2005 - Added CA & CRL mgmnt doc to manual - Changed occurrences of "if (!x) free(x)" to "free(x)" as glibc already does proper null check - Finished krb_mapper ( no pkinit, just kpn -> login map ). NOTE: I assume that KPN is stored as ASN1_STRING, but cannot deduce it from RFC's - MS mapper rewritten to use cert_info lib 29- Mar 2005 - Manual rewritten in xml format - Check for manual in html format. Re-generate if not present - pkcs11_eventmgr: reset time_counter on expire() event fixed - Update .spec file to release 0.5-0 28- Mar 2005 - pkcs11_login-0.5beta1 is out - Manual and web page updated 22- Mar 2005 - recoded cn and krb mappers to use cert_info library - buxfix in conf file 20- Mar 2005 - New tool: pkcs11_inspect, to see contents of certificate - Man pages: several typos and bugfixes - Recoded uid mapper to use cert_info library 18- Mar 2005 - No more warns in compile: fix "-no-strict-alias" cflag when linking with /usr/lib/libpam.so - Several bugfixes and configuration files typos. - Updated .spec file - New files cert_info.[ch] to get and show cert contents. This will allow coding of some tools to inspect certificates without loading mapper modules - Re-coded digest and subject mapper to use cert_info library. The idea is: 1- Get all mappers use cert_info library 2- join all mappers in one big dynload module to store all pre-defined mappers 17- Mar 2005 - Add mapper module function: mapper_find_entries(), in module API to get textual (ie: without mapping) entries on certificate - Reorganize sources: add src/common directory for shared code and move most of common functions there to create a library - Fixed tons of warnings related to "const char *" typecast - Rename cert.[ch] to cert_vfy.[ch] 15- Mar 2005 - Add Certificate Digest mapper ( updated doc and sample files ) 4- Mar 2005 - Added mapfiles to UID mapper - ms_mapper now works properly ( sorry, no ADS connection yet :-( ) - Updated doc and sample files 3- Mar 2005 - Implemented routines API for file mapping: {set,get,end}_mapfile() mapfile_{find,match}() - Implemented mapfiles in mail_mapper - Added mapfiles to cn_mapper - file_mapper changed to subject_mapper 1- Mar 2005 - Pkcs#11-Login 0.4.4 is out. - New web pages 28- Feb 2005 - New pkcs11_eventmgr tool. card_eventmgr is now deprecated, but still supported - Updated manual 24- Feb 2005 - Proper detect [no]debug commandline option - Updated manual - Fixed pcsc-lite version control in card_eventmgr.c - Several configure.in compatibility issues 16- Feb 2005 - Move scconf to be statically linked - New tool: pklogin_finder - Some manual pages - Redhat .spec file - Bugfixes 11- Feb 2005 - Works on Card Event manager finished - Update documentation 10- Feb 2005 - Preliminary version of card_eventmgr tool to detect insert/extract card events and launch proper actions 9- Feb 2005 - Allow empty strings as user name, and deduce login from certificate - Bugfix: call close_pkcs11_session() on all error conditions - Updated manuals and README's 8- Feb 2005 - New pw_mapper CN-to-getpwent() mapper - Ignoredomain support for mail_mapper - Minor bugs in cn and uid mappers 7- Feb 2005 pkcs11_login-0.4 released: - Now pam_pkcs11 can take arguments from command line or via configuration file - Certificate to User mappin has been modularized - Preliminary works on entering session without userlogin prompt: just insert certificate and enter PIN 2- Feb 2005 Thanks Mario Strasser for allow me re-work in their pam_pkcs11 module and re-release it under LGPL OpenSC-pam_pkcs11-87edc72/INSTALL000066400000000000000000000220301475044520600162600ustar00rootroot00000000000000Copyright (C) 1994, 1995, 1996, 1999, 2000, 2001, 2002 Free Software Foundation, Inc. This file is free documentation; the Free Software Foundation gives unlimited permission to copy, distribute and modify it. Basic Installation ================== These are generic installation instructions. The `configure' shell script attempts to guess correct values for various system-dependent variables used during compilation. It uses those values to create a `Makefile' in each directory of the package. It may also create one or more `.h' files containing system-dependent definitions. Finally, it creates a shell script `config.status' that you can run in the future to recreate the current configuration, and a file `config.log' containing compiler output (useful mainly for debugging `configure'). It can also use an optional file (typically called `config.cache' and enabled with `--cache-file=config.cache' or simply `-C') that saves the results of its tests to speed up reconfiguring. (Caching is disabled by default to prevent problems with accidental use of stale cache files.) If you need to do unusual things to compile the package, please try to figure out how `configure' could check whether to do them, and mail diffs or instructions to the address given in the `README' so they can be considered for the next release. If you are using the cache, and at some point `config.cache' contains results you don't want to keep, you may remove or edit it. The file `configure.ac' (or `configure.in') is used to create `configure' by a program called `autoconf'. You only need `configure.ac' if you want to change it or regenerate `configure' using a newer version of `autoconf'. The simplest way to compile this package is: 1. `cd' to the directory containing the package's source code and type `./configure' to configure the package for your system. If you're using `csh' on an old version of System V, you might need to type `sh ./configure' instead to prevent `csh' from trying to execute `configure' itself. Running `configure' takes awhile. While running, it prints some messages telling which features it is checking for. 2. Type `make' to compile the package. 3. Optionally, type `make check' to run any self-tests that come with the package. 4. Type `make install' to install the programs and any data files and documentation. 5. You can remove the program binaries and object files from the source code directory by typing `make clean'. To also remove the files that `configure' created (so you can compile the package for a different kind of computer), type `make distclean'. There is also a `make maintainer-clean' target, but that is intended mainly for the package's developers. If you use it, you may have to get all sorts of other programs in order to regenerate files that came with the distribution. Compilers and Options ===================== Some systems require unusual options for compilation or linking that the `configure' script does not know about. Run `./configure --help' for details on some of the pertinent environment variables. You can give `configure' initial values for configuration parameters by setting variables in the command line or in the environment. Here is an example: ./configure CC=c89 CFLAGS=-O2 LIBS=-lposix *Note Defining Variables::, for more details. Compiling For Multiple Architectures ==================================== You can compile the package for more than one kind of computer at the same time, by placing the object files for each architecture in their own directory. To do this, you must use a version of `make' that supports the `VPATH' variable, such as GNU `make'. `cd' to the directory where you want the object files and executables to go and run the `configure' script. `configure' automatically checks for the source code in the directory that `configure' is in and in `..'. If you have to use a `make' that does not support the `VPATH' variable, you have to compile the package for one architecture at a time in the source code directory. After you have installed the package for one architecture, use `make distclean' before reconfiguring for another architecture. Installation Names ================== By default, `make install' will install the package's files in `/usr/local/bin', `/usr/local/man', etc. You can specify an installation prefix other than `/usr/local' by giving `configure' the option `--prefix=PATH'. You can specify separate installation prefixes for architecture-specific files and architecture-independent files. If you give `configure' the option `--exec-prefix=PATH', the package will use PATH as the prefix for installing programs and libraries. Documentation and other data files will still use the regular prefix. In addition, if you use an unusual directory layout you can give options like `--bindir=PATH' to specify different values for particular kinds of files. Run `configure --help' for a list of the directories you can set and what kinds of files go in them. If the package supports it, you can cause programs to be installed with an extra prefix or suffix on their names by giving `configure' the option `--program-prefix=PREFIX' or `--program-suffix=SUFFIX'. Optional Features ================= Some packages pay attention to `--enable-FEATURE' options to `configure', where FEATURE indicates an optional part of the package. They may also pay attention to `--with-PACKAGE' options, where PACKAGE is something like `gnu-as' or `x' (for the X Window System). The `README' should mention any `--enable-' and `--with-' options that the package recognizes. For packages that use the X Window System, `configure' can usually find the X include and library files automatically, but if it doesn't, you can use the `configure' options `--x-includes=DIR' and `--x-libraries=DIR' to specify their locations. Specifying the System Type ========================== There may be some features `configure' cannot figure out automatically, but needs to determine by the type of machine the package will run on. Usually, assuming the package is built to be run on the _same_ architectures, `configure' can figure that out, but if it prints a message saying it cannot guess the machine type, give it the `--build=TYPE' option. TYPE can either be a short name for the system type, such as `sun4', or a canonical name which has the form: CPU-COMPANY-SYSTEM where SYSTEM can have one of these forms: OS KERNEL-OS See the file `config.sub' for the possible values of each field. If `config.sub' isn't included in this package, then this package doesn't need to know the machine type. If you are _building_ compiler tools for cross-compiling, you should use the `--target=TYPE' option to select the type of system they will produce code for. If you want to _use_ a cross compiler, that generates code for a platform different from the build platform, you should specify the "host" platform (i.e., that on which the generated programs will eventually be run) with `--host=TYPE'. Sharing Defaults ================ If you want to set default values for `configure' scripts to share, you can create a site shell script called `config.site' that gives default values for variables like `CC', `cache_file', and `prefix'. `configure' looks for `PREFIX/share/config.site' if it exists, then `PREFIX/etc/config.site' if it exists. Or, you can set the `CONFIG_SITE' environment variable to the location of the site script. A warning: not all `configure' scripts look for a site script. Defining Variables ================== Variables not defined in a site shell script can be set in the environment passed to `configure'. However, some packages may run configure again during the build, and the customized values of these variables may be lost. In order to avoid this problem, you should set them in the `configure' command line, using `VAR=value'. For example: ./configure CC=/usr/local2/bin/gcc will cause the specified gcc to be used as the C compiler (unless it is overridden in the site shell script). `configure' Invocation ====================== `configure' recognizes the following options to control how it operates. `--help' `-h' Print a summary of the options to `configure', and exit. `--version' `-V' Print the version of Autoconf used to generate the `configure' script, and exit. `--cache-file=FILE' Enable the cache: use and save the results of the tests in FILE, traditionally `config.cache'. FILE defaults to `/dev/null' to disable caching. `--config-cache' `-C' Alias for `--cache-file=config.cache'. `--quiet' `--silent' `-q' Do not print messages saying which checks are being made. To suppress all normal output, redirect it to `/dev/null' (any error messages will still be shown). `--srcdir=DIR' Look for the package's source code in directory DIR. Usually `configure' can determine that directory automatically. `configure' also accepts some other, not widely useful, options. Run `configure --help' for more details. OpenSC-pam_pkcs11-87edc72/Makefile.am000066400000000000000000000010371475044520600172670ustar00rootroot00000000000000# $Id$ SUBDIRS = . po doc etc src tools DIST_SUBDIRS = . po doc etc src tools EXTRA_DIST = config.rpath ChangeLog COPYING INSTALL \ NEWS README TODO bootstrap pam_pkcs11.spec ChangeLog.git MAINTAINERCLEANFILES = \ Makefile.in config.h.in configure \ install-sh ltmain.sh missing mkinstalldirs \ compile depcomp config.log config.status \ config.guess config.sub acinclude.m4 aclocal.m4 DEPCLEANFILES = config.log configure AUTOMAKE_OPTIONS = foreign ACLOCAL_AMFLAGS = -I aclocal ChangeLog.git: git log --stat --decorate=short > $@ OpenSC-pam_pkcs11-87edc72/NEWS000066400000000000000000000060611475044520600157340ustar00rootroot0000000000000004- Feb 2025 - Version 0.6.13. - Added pkcs11-eventmgr systemd service unit. - Updated Russian translations for pam_pkcs11 (thx Max Kosmach and Andrey Cherepanov). - Fixed possible authentication bypass (CVE-2025-24032): Use signatures to verify authentication by default (thx Frank Morgner). - Fixed possible authentication bypass (CVE-2025-24531): Restoring the original card_only / wait_for_card behavior (thx Matthias Gerstner, Frank Morgner). - Move pam_securetty.so upward in the example PAM config. - Set 'slot_num' configuration parameter to 0 by default (thx Jpereyra316). - Print details about configuration parse errors (thx Jpereyra316). - Add Chinese (Simplified) translation. - Capitalize all PAM messages (thx Alynx Zhou). - Made pkcs11_make_hash_link support whitespaces in file names (thx Ivan Skorikov). 22- May 2019 - Version 0.6.11 - Support OpenSSL 1.1.0 - use green instead of blue text for logs on the console - Solaris runs build process outside of srcdir - Fix openssh_mapper_match_keys() for OpenSSL 1.0 & 1.1 - Fix 64-bit pkcs11_inspect(1) fails on SPARC with a SIBGUS due to misaligned access - Add support of ECDSA signature in addition to RSA 12- Sep 2018 - Version 0.6.10 is out. - Fixed some security issues (thx @frankmorgner): (https://www.x41-dsec.de/lab/advisories/x41-2018-003-pam_pkcs11/) -- fixed buffer overflow with long home directory; -- fixed wiping secrets (now using OpenSSL_cleanse()); -- verify using a nonce from the system, not the card. ... 0.6.9 ... 0.6.0 are yet undescribed. 12- Sep 2005 - Finally pam_pkcs11-0.5.3 is out. - New mapper API and Docs - Full documentation available - New mappers: openssh, openssl, ldap, generic and more 12- Apr 2005 - Changed name to pam_pkcs11 - pam_pkcs11-0.5.2 released - Now pam_pkcs11 is part of OpenSC project web. 04- Apr 2005 - Pkcs11_login-0.5 released: * Grouped all functions in a common library * rewritten all mappers ( openssh,opensc,ldap still to be written) * New certificate digest (md5,sha1, etc) mapper * Documentation updated and rewritten in DocBook XML * Tons of bugfixes - New tool: pkcs11_inspect 28- Feb 2005 - pkcs11_login-0.4.4 released - New pkcs11_eventmgr tool. 15- Feb 2005 - pkcs11_login-0.4.4Beta - Man pages, rpm packages 11- Feb 2005 - pkcs11_login-0.4.3 released - Added card_eventmgr tool to launch actions on card insert/remove 9- Feb 2005 - pkcs11_login-0.4.2 released. - See README.autologin to see how to use login-from-certificate features in console and gdm 8- Feb 2005 - pkcs11_login-0.4.1 released. see ChangeLog 7- Feb 2005 pkcs11_login-0.4 released: - Now pam_pkcs11 can take arguments from command line or via configuration file - Certificate to User mappin has been modularized - Preliminary works on entering session without userlogin prompt: just insert certificate and enter PIN 2- Feb 2005 Thanks Mario Strasser for allow me re-work in their pam_pkcs11 module and re-release it under LGPL OpenSC-pam_pkcs11-87edc72/README000066400000000000000000000144521475044520600161200ustar00rootroot00000000000000This is the README of the PKCS #11 PAM Login Module ====================================================================== Authors: Mario Strasser Juan Antonio Martinez Ludovic Rouseau Frank Morgner Paul Wolneykien This Linux-PAM login module allows a X.509 certificate based user login. The certificate and its dedicated private key are thereby accessed by means of an appropriate PKCS #11 module. For the verification of the users' certificates, locally stored CA certificates as well as either online or locally accessible CRLs are used. Detailed information about the Linux-PAM system can be found in [1], [2] and [3]. The specification of the Cryptographic Token Interface Standard (PKCS #11) is available at [4]. PKCS #11 Module Requirements ---------------------------------------------------------------------- The PKCS #11 modules must fulfill the requirements given by the RSA Asymmetric Client Signing Profile, which has been specified in the PKCS #11 Conformance Profile Specification [5] by RSA Laboratories. User Matching ---------------------------------------------------------------------- To approve the ownership of a certificate, that is to allow the owner of a certificate to login as a particular user Several modules are provided. See README.mappers file in doc directory [Note: This is still a work in progress, any suggestions for improvements or alternative matching algorithms are welcome.] Installation ---------------------------------------------------------------------- bash# tar xvzf pam_pkcs11-X.Y.Z.tar.gz bash# cd pam_pkcs11-X.Y.Z bash# ./configure bash# make bash# sudo make install Configuration 1- Create a directory /etc/pam_pkcs11 2- Copy $(base)/etc/pam_pkcs11.conf.example to /etc/pam_pkcs11/ and personalize 3- Create crls and cacerts directories according with configuration file, and fill them with proper data 4- Choose one or more mappers to install, set up configuration file, and if needed configure mappers The file etc/pam_pkcs11.conf is fully auto-documented, to allow you easy editing 5- setup /etc/pam.d/xxx entries ---------------------------------------------------------------------- To make use of the PKCS #11 login module replace the line auth requisite pam_unix2.so ... with auth requisite pam_pkcs11.so ... in the pam configuration files. Some mappers doesn't map to an existing user. To allow correct login, you may need to install also pam-mkhomedir in session pam stack See http://www.kernel.org/pub/linux/libs/pam for details The following options are recognised for pam-pkcs11.so: debug Enable debugging support. config_file To specify up configuration file ( default /etc/pam_pkcs11/pam_pkcs11.conf ) Next options should be taken from configuration file, but is up to the user to specify them from command line. If so, it takes precedence over configuration file nullok Allow empty passwords. use_first_pass Do not prompt the user for the passwords but take them from the PAM_ items instead. try_first_pass Do not prompt the user for the passwords unless PAM_(OLD)AUTHTOK is unset. use_authtok Like try_first_pass, but fail if the new PAM_AUTHTOK has not been previously set (intended for stacking password modules only). card_only Always try to get the userid from the certificate, don't prompt for the user name if the card is present, and if the token is present, then we must use it to authenticate. wait_for_card This option needs card_only to be set. This will make the system wait for the token to be inserted on login, or after login it will require the same token be inserted to unlock the system. Next options are pkcs11 module specific pkcs11_module= Filename of the PKCS #11 module. The default value is /etc/pam_pkcs11/pkcs11_module.so. Note that this option takes precedence over "module" entry in proper pkcs11_module section, but this section is still needed slot_num= Slot-number to use. One for the first, two for the second and so on. The default value is zero which means to use the first slot with an available token. ca_dir= Path to the directory where the CA certificates are stored. The directory must contain an openssl hash-link to each certificate. The default value is /etc/pam_pkcs11/cacerts. crl_dir= Path to the directory where the CRLs are stored. The directory must contain an openssl hash-link to each CRL. The default value is /etc/pam_pkcs11/crls. crl_policy={none, online, offline, auto} Sets the CRL verification policy. None performs no verification at all, online downloads the CRL form the location given by the CRL distribution point extension of the certificate and offline uses the locally stored CRLs. Auto is a combination of online and offline; it first tries to download the CRL from a possibly given CRL distribution point and if this fails, uses the local CRLs. The default setting is none. Example: auth sufficient pam_pkcs11.so config_file=/etc/pam_pkcs11/pam_pkcs11.conf or ( avoid if possible ) auth sufficient pam_pkcs11.so nullok debug try_first_pass \ pkcs11_module=/usr/lib/pkcs11/pkcs11_module.so \ ca_dir=/etc/cacerts/ crl_dir=/etc/cacerts/ crl_policy=auto Contact ---------------------------------------------------------------------- Any comments, suggestions and bug reports are welcome. Please, mention the keywords 'pkcs' and 'pam' in the subject. Mario Strasser Juan Antonio Martinez References ---------------------------------------------------------------------- [1] The Linux-PAM System Administrators' Guide http://www.kernel.org/pub/linux/libs/pam/Linux-PAM-html/pam.html [2] The Linux-PAM Module Writers' Guide http://www.kernel.org/pub/linux/libs/pam/Linux-PAM-html/pam_modules.html [3] The Linux-PAM Application Developers' Guide http://www.kernel.org/pub/linux/libs/pam/Linux-PAM-html/pam_appl.html [4] PKCS #11 - Cryptographic Token Interface Standard http://www.rsasecurity.com/rsalabs/pkcs/pkcs-11/ [5] PKCS #11: Conformance Profile Specification http://www.rsasecurity.com/rsalabs/pkcs/pkcs-11/ OpenSC-pam_pkcs11-87edc72/README.md000066400000000000000000000106071475044520600165150ustar00rootroot00000000000000PAM-PKCS\#11 Login Tools ======================== Description ----------- This Linux-PAM login module allows a X.509 certificate based user login. The certificate and its dedicated private key are thereby accessed by means of an appropriate PKCS\#11 module. For the verification of the users' certificates, locally stored CA certificates as well as either online or locally accessible CRLs are used. Detailed information about the Linux-PAM system can be found in [The Linux-PAM System Administrators' Guide](http://www.linux-pam.org/Linux-PAM-html/Linux-PAM_SAG.html), [The Linux-PAM Module Writers' Guide](http://www.linux-pam.org/Linux-PAM-html/Linux-PAM_MWG.html) and [The Linux-PAM Application Developers' Guide](http://www.linux-pam.org/Linux-PAM-html/Linux-PAM_ADG.html) The specification of the Cryptographic Token Interface Standard (PKCS\#11) is available at [PKCS\#11 - Cryptographic Token Interface Standard](https://docs.oasis-open.org/pkcs11/pkcs11-base/v2.40/os/pkcs11-base-v2.40-os.html). PAM-PKCS\#11 package provides: * A PAM module able to: * Use certificates to get user credentials * Deduce a login based on provided certificate * Several tools: * Standalone cert-to-login finder tool * Certificate contents viewer * Card Event status monitor, to trigger actions on card insert/removal You can read the online [PAM-PKCS\#11 User Manual](http://opensc.github.io/pam_pkcs11/doc/pam_pkcs11.html) to know how to install, configure and use this software. ### PKCS\#11 Module Requirements The PKCS\#11 modules must fulfill the requirements given by the RSA Asymmetric Client Signing Profile, which has been specified in the [PKCS\#11: Conformance Profile Specification](http://www.rsa.com/rsalabs/node.asp?id=2133) by RSA Laboratories. ### User Matching To map the ownership of a certificate into a user login, pam-pkcs11 uses the concept of *mapper* that is, a list of configurable, stackable list of dynamic modules, each one trying to do a specific cert-to-login mapping. Several mappers are provided: * the common name of the subject matches the login name * the unique identifier of the subject matches the login name * the user part of an e-mail subject alternative name extension matches the login name * the Microsoft universal principal name extension matches the login name * etc...(see documentation on provided mappers) Many mappers may use also a *mapfile* to translate Certificate contents to a login name. Download -------- * [pam\_pkcs11-x.y.z.tar.gz](http://sourceforge.net/projects/opensc/files/pam_pkcs11/) Packages for [various Linux distributions](https://repology.org/metapackage/pam-pkcs11) are available through the their standard package management system. Installation ------------ Unpack the archive, configure, compile and install it: ```sh tar xvzf pkcs11_login-X.Y.Z.tar.gz cd pkcs11_login-X.Y.Z ./configure make sudo make install ``` If you want to use [cURL](http://curl.haxx.se/libcurl/) instead of our native URI-functions for downloading CRLs, use `./configure --with-curl` However, up to now cURL is not able to handle binary LDAP replies and thus CRL download might not work for all LDAP URIs. Next, you have to create the needed openssl-hash-links. ```sh make_hash_link.sh ${path to the directory with the CA certificates} make_hash_link.sh ${path to the directory with the CRLs} ``` Configuration ------------- See [PAM-PKCS\#11 User Manual](http://opensc.github.io/pam_pkcs11/doc/pam_pkcs11.html) to configure and set up pam\_pkcs11. See [PAM-PKCS\#11 Mappers API](http://opensc.github.io/pam_pkcs11/doc/mappers_api.html) to get advanced information on mappers (mainly for developers). Documentation ------------- * Online Manuals * [PAM-PKCS\#11 User Manual](http://opensc.github.io/pam_pkcs11/doc/pam_pkcs11.html) * [PAM-PKCS\#11 Mappers API Reference](http://opensc.github.io/pam_pkcs11/doc/mappers_api.html) * [TODO](https://raw.github.com/OpenSC/pam_pkcs11/master/TODO) file (outdated) * Man pages * [`pam_pkcs11(8)`](https://linux.die.net/man/8/pam_pkcs11) * [`card_eventmgr(1)`](https://linux.die.net/man/1/card_eventmgr) * [`pkcs11_eventmgr(1)`](https://linux.die.net/man/1/pkcs11_eventmgr) * [`pklogin_finder(1)`](https://linux.die.net/man/1/pklogin_finder) * [`pkcs11_inspect(1)`](https://linux.die.net/man/1/pkcs11_inspect) Contact ------- [Get involved](https://github.com/OpenSC/pam_pkcs11/issues) in development! All comments, suggestions and bug reports are welcome. OpenSC-pam_pkcs11-87edc72/TODO000066400000000000000000000043701475044520600157260ustar00rootroot000000000000000.6 will be a finish-code release. Fix source tree estructure, define devel api and code all to-be-written mappers are task to do Expected things to be done in 0.6 release: - Create and Define a pam-pkcs11 mapper API & library. This is mostly done at 0.5.3, but some cleaning is needed. * Create a mapper "devel" package * Use OpenSC libp11 pkcs11 library - Add remote CA's and CRL's lookups Actually, CA's and local CRL's are stored as hash dir. Need to recode to use URL's as data sources - Finish mapper coding * opensc: - Generic mapping files 0.5.3 searches in ${HOME}/.eid/authorized_certificates. Needs an additional tool to manage a "global" certificate file with user mappings * openssh: - Same as opensc. Hint: use "comment" field on ssh public keys to store login name * ldap mapper: - Allow use of any certificate content to make queries - find() function is too expensive when navigate across databases of thousand of users. Need to optimize search filters. * database mapper: - Define and create a UnixODBC based database mapper * Compile as static all mappers that does not depend on extra libraries 0.7 is a try to real-life implementation: MS Active directory configuration, NSS aware configurations, LDAP settings, many samples and docs, general cleanups, etc. Things to be done in 0.7 release: - Review all mappers that depends on remote connections. * conditional queries instead of getpwent() query loop - Allow pam-pkcs11 login against MS Active Directory * Changes to MS_mapper to real use of UPN Domain * Documentation and samples - Manuals on LDAP, NSS and so installations - ncurses (gtk?) tool to create/edit mapfiles 0.8 will be a major cleanup: bugfixes, optimizations, pam-session handling. Most important: pkinit aware pam module is to be scheduled here Things to be done in 0.8 release - Call for pin only when needed - Use certificate only if available for authentication - Implement of Kerberos PKINIT specification. Rewrite of kpn mapper - Check content-type of cert fields instead assume utf-8 - proper handle of free() calls when needed 0.9 will be a preview version. No more items are expected to add, just bugfixes and feedbacks from users. Perhaps it's time for i18n issues 1.0 That's all folks! OpenSC-pam_pkcs11-87edc72/aclocal/000077500000000000000000000000001475044520600166305ustar00rootroot00000000000000OpenSC-pam_pkcs11-87edc72/aclocal/ax_pthread.m4000066400000000000000000000326761475044520600212270ustar00rootroot00000000000000# =========================================================================== # http://www.gnu.org/software/autoconf-archive/ax_pthread.html # =========================================================================== # # SYNOPSIS # # AX_PTHREAD([ACTION-IF-FOUND[, ACTION-IF-NOT-FOUND]]) # # DESCRIPTION # # This macro figures out how to build C programs using POSIX threads. It # sets the PTHREAD_LIBS output variable to the threads library and linker # flags, and the PTHREAD_CFLAGS output variable to any special C compiler # flags that are needed. (The user can also force certain compiler # flags/libs to be tested by setting these environment variables.) # # Also sets PTHREAD_CC to any special C compiler that is needed for # multi-threaded programs (defaults to the value of CC otherwise). (This # is necessary on AIX to use the special cc_r compiler alias.) # # NOTE: You are assumed to not only compile your program with these flags, # but also link it with them as well. e.g. you should link with # $PTHREAD_CC $CFLAGS $PTHREAD_CFLAGS $LDFLAGS ... $PTHREAD_LIBS $LIBS # # If you are only building threads programs, you may wish to use these # variables in your default LIBS, CFLAGS, and CC: # # LIBS="$PTHREAD_LIBS $LIBS" # CFLAGS="$CFLAGS $PTHREAD_CFLAGS" # CC="$PTHREAD_CC" # # In addition, if the PTHREAD_CREATE_JOINABLE thread-attribute constant # has a nonstandard name, defines PTHREAD_CREATE_JOINABLE to that name # (e.g. PTHREAD_CREATE_UNDETACHED on AIX). # # Also HAVE_PTHREAD_PRIO_INHERIT is defined if pthread is found and the # PTHREAD_PRIO_INHERIT symbol is defined when compiling with # PTHREAD_CFLAGS. # # ACTION-IF-FOUND is a list of shell commands to run if a threads library # is found, and ACTION-IF-NOT-FOUND is a list of commands to run it if it # is not found. If ACTION-IF-FOUND is not specified, the default action # will define HAVE_PTHREAD. # # Please let the authors know if this macro fails on any platform, or if # you have any other suggestions or comments. This macro was based on work # by SGJ on autoconf scripts for FFTW (http://www.fftw.org/) (with help # from M. Frigo), as well as ac_pthread and hb_pthread macros posted by # Alejandro Forero Cuervo to the autoconf macro repository. We are also # grateful for the helpful feedback of numerous users. # # Updated for Autoconf 2.68 by Daniel Richard G. # # LICENSE # # Copyright (c) 2008 Steven G. Johnson # Copyright (c) 2011 Daniel Richard G. # # This program is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the # Free Software Foundation, either version 3 of the License, or (at your # option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program. If not, see . # # As a special exception, the respective Autoconf Macro's copyright owner # gives unlimited permission to copy, distribute and modify the configure # scripts that are the output of Autoconf when processing the Macro. You # need not follow the terms of the GNU General Public License when using # or distributing such scripts, even though portions of the text of the # Macro appear in them. The GNU General Public License (GPL) does govern # all other use of the material that constitutes the Autoconf Macro. # # This special exception to the GPL applies to versions of the Autoconf # Macro released by the Autoconf Archive. When you make and distribute a # modified version of the Autoconf Macro, you may extend this special # exception to the GPL to apply to your modified version as well. #serial 21 AU_ALIAS([ACX_PTHREAD], [AX_PTHREAD]) AC_DEFUN([AX_PTHREAD], [ AC_REQUIRE([AC_CANONICAL_HOST]) AC_LANG_PUSH([C]) ax_pthread_ok=no # We used to check for pthread.h first, but this fails if pthread.h # requires special compiler flags (e.g. on True64 or Sequent). # It gets checked for in the link test anyway. # First of all, check if the user has set any of the PTHREAD_LIBS, # etcetera environment variables, and if threads linking works using # them: if test x"$PTHREAD_LIBS$PTHREAD_CFLAGS" != x; then save_CFLAGS="$CFLAGS" CFLAGS="$CFLAGS $PTHREAD_CFLAGS" save_LIBS="$LIBS" LIBS="$PTHREAD_LIBS $LIBS" AC_MSG_CHECKING([for pthread_join in LIBS=$PTHREAD_LIBS with CFLAGS=$PTHREAD_CFLAGS]) AC_TRY_LINK_FUNC([pthread_join], [ax_pthread_ok=yes]) AC_MSG_RESULT([$ax_pthread_ok]) if test x"$ax_pthread_ok" = xno; then PTHREAD_LIBS="" PTHREAD_CFLAGS="" fi LIBS="$save_LIBS" CFLAGS="$save_CFLAGS" fi # We must check for the threads library under a number of different # names; the ordering is very important because some systems # (e.g. DEC) have both -lpthread and -lpthreads, where one of the # libraries is broken (non-POSIX). # Create a list of thread flags to try. Items starting with a "-" are # C compiler flags, and other items are library names, except for "none" # which indicates that we try without any flags at all, and "pthread-config" # which is a program returning the flags for the Pth emulation library. ax_pthread_flags="pthreads none -Kthread -kthread lthread -pthread -pthreads -mthreads pthread --thread-safe -mt pthread-config" # The ordering *is* (sometimes) important. Some notes on the # individual items follow: # pthreads: AIX (must check this before -lpthread) # none: in case threads are in libc; should be tried before -Kthread and # other compiler flags to prevent continual compiler warnings # -Kthread: Sequent (threads in libc, but -Kthread needed for pthread.h) # -kthread: FreeBSD kernel threads (preferred to -pthread since SMP-able) # lthread: LinuxThreads port on FreeBSD (also preferred to -pthread) # -pthread: Linux/gcc (kernel threads), BSD/gcc (userland threads) # -pthreads: Solaris/gcc # -mthreads: Mingw32/gcc, Lynx/gcc # -mt: Sun Workshop C (may only link SunOS threads [-lthread], but it # doesn't hurt to check since this sometimes defines pthreads too; # also defines -D_REENTRANT) # ... -mt is also the pthreads flag for HP/aCC # pthread: Linux, etcetera # --thread-safe: KAI C++ # pthread-config: use pthread-config program (for GNU Pth library) case ${host_os} in solaris*) # On Solaris (at least, for some versions), libc contains stubbed # (non-functional) versions of the pthreads routines, so link-based # tests will erroneously succeed. (We need to link with -pthreads/-mt/ # -lpthread.) (The stubs are missing pthread_cleanup_push, or rather # a function called by this macro, so we could check for that, but # who knows whether they'll stub that too in a future libc.) So, # we'll just look for -pthreads and -lpthread first: ax_pthread_flags="-pthreads pthread -mt -pthread $ax_pthread_flags" ;; darwin*) ax_pthread_flags="-pthread $ax_pthread_flags" ;; esac # Clang doesn't consider unrecognized options an error unless we specify # -Werror. We throw in some extra Clang-specific options to ensure that # this doesn't happen for GCC, which also accepts -Werror. AC_MSG_CHECKING([if compiler needs -Werror to reject unknown flags]) save_CFLAGS="$CFLAGS" ax_pthread_extra_flags="-Werror" CFLAGS="$CFLAGS $ax_pthread_extra_flags -Wunknown-warning-option -Wsizeof-array-argument" AC_COMPILE_IFELSE([AC_LANG_PROGRAM([int foo(void);],[foo()])], [AC_MSG_RESULT([yes])], [ax_pthread_extra_flags= AC_MSG_RESULT([no])]) CFLAGS="$save_CFLAGS" if test x"$ax_pthread_ok" = xno; then for flag in $ax_pthread_flags; do case $flag in none) AC_MSG_CHECKING([whether pthreads work without any flags]) ;; -*) AC_MSG_CHECKING([whether pthreads work with $flag]) PTHREAD_CFLAGS="$flag" ;; pthread-config) AC_CHECK_PROG([ax_pthread_config], [pthread-config], [yes], [no]) if test x"$ax_pthread_config" = xno; then continue; fi PTHREAD_CFLAGS="`pthread-config --cflags`" PTHREAD_LIBS="`pthread-config --ldflags` `pthread-config --libs`" ;; *) AC_MSG_CHECKING([for the pthreads library -l$flag]) PTHREAD_LIBS="-l$flag" ;; esac save_LIBS="$LIBS" save_CFLAGS="$CFLAGS" LIBS="$PTHREAD_LIBS $LIBS" CFLAGS="$CFLAGS $PTHREAD_CFLAGS $ax_pthread_extra_flags" # Check for various functions. We must include pthread.h, # since some functions may be macros. (On the Sequent, we # need a special flag -Kthread to make this header compile.) # We check for pthread_join because it is in -lpthread on IRIX # while pthread_create is in libc. We check for pthread_attr_init # due to DEC craziness with -lpthreads. We check for # pthread_cleanup_push because it is one of the few pthread # functions on Solaris that doesn't have a non-functional libc stub. # We try pthread_create on general principles. AC_LINK_IFELSE([AC_LANG_PROGRAM([#include static void routine(void *a) { a = 0; } static void *start_routine(void *a) { return a; }], [pthread_t th; pthread_attr_t attr; pthread_create(&th, 0, start_routine, 0); pthread_join(th, 0); pthread_attr_init(&attr); pthread_cleanup_push(routine, 0); pthread_cleanup_pop(0) /* ; */])], [ax_pthread_ok=yes], []) LIBS="$save_LIBS" CFLAGS="$save_CFLAGS" AC_MSG_RESULT([$ax_pthread_ok]) if test "x$ax_pthread_ok" = xyes; then break; fi PTHREAD_LIBS="" PTHREAD_CFLAGS="" done fi # Various other checks: if test "x$ax_pthread_ok" = xyes; then save_LIBS="$LIBS" LIBS="$PTHREAD_LIBS $LIBS" save_CFLAGS="$CFLAGS" CFLAGS="$CFLAGS $PTHREAD_CFLAGS" # Detect AIX lossage: JOINABLE attribute is called UNDETACHED. AC_MSG_CHECKING([for joinable pthread attribute]) attr_name=unknown for attr in PTHREAD_CREATE_JOINABLE PTHREAD_CREATE_UNDETACHED; do AC_LINK_IFELSE([AC_LANG_PROGRAM([#include ], [int attr = $attr; return attr /* ; */])], [attr_name=$attr; break], []) done AC_MSG_RESULT([$attr_name]) if test "$attr_name" != PTHREAD_CREATE_JOINABLE; then AC_DEFINE_UNQUOTED([PTHREAD_CREATE_JOINABLE], [$attr_name], [Define to necessary symbol if this constant uses a non-standard name on your system.]) fi AC_MSG_CHECKING([if more special flags are required for pthreads]) flag=no case ${host_os} in aix* | freebsd* | darwin*) flag="-D_THREAD_SAFE";; osf* | hpux*) flag="-D_REENTRANT";; solaris*) if test "$GCC" = "yes"; then flag="-D_REENTRANT" else # TODO: What about Clang on Solaris? flag="-mt -D_REENTRANT" fi ;; esac AC_MSG_RESULT([$flag]) if test "x$flag" != xno; then PTHREAD_CFLAGS="$flag $PTHREAD_CFLAGS" fi AC_CACHE_CHECK([for PTHREAD_PRIO_INHERIT], [ax_cv_PTHREAD_PRIO_INHERIT], [ AC_LINK_IFELSE([AC_LANG_PROGRAM([[#include ]], [[int i = PTHREAD_PRIO_INHERIT;]])], [ax_cv_PTHREAD_PRIO_INHERIT=yes], [ax_cv_PTHREAD_PRIO_INHERIT=no]) ]) AS_IF([test "x$ax_cv_PTHREAD_PRIO_INHERIT" = "xyes"], [AC_DEFINE([HAVE_PTHREAD_PRIO_INHERIT], [1], [Have PTHREAD_PRIO_INHERIT.])]) LIBS="$save_LIBS" CFLAGS="$save_CFLAGS" # More AIX lossage: compile with *_r variant if test "x$GCC" != xyes; then case $host_os in aix*) AS_CASE(["x/$CC"], [x*/c89|x*/c89_128|x*/c99|x*/c99_128|x*/cc|x*/cc128|x*/xlc|x*/xlc_v6|x*/xlc128|x*/xlc128_v6], [#handle absolute path differently from PATH based program lookup AS_CASE(["x$CC"], [x/*], [AS_IF([AS_EXECUTABLE_P([${CC}_r])],[PTHREAD_CC="${CC}_r"])], [AC_CHECK_PROGS([PTHREAD_CC],[${CC}_r],[$CC])])]) ;; esac fi fi test -n "$PTHREAD_CC" || PTHREAD_CC="$CC" AC_SUBST([PTHREAD_LIBS]) AC_SUBST([PTHREAD_CFLAGS]) AC_SUBST([PTHREAD_CC]) # Finally, execute ACTION-IF-FOUND/ACTION-IF-NOT-FOUND: if test x"$ax_pthread_ok" = xyes; then ifelse([$1],,[AC_DEFINE([HAVE_PTHREAD],[1],[Define if you have POSIX threads libraries and header files.])],[$1]) : else ax_pthread_ok=no $2 fi AC_LANG_POP ])dnl AX_PTHREAD OpenSC-pam_pkcs11-87edc72/aclocal/gettext.m4000066400000000000000000000356151475044520600205700ustar00rootroot00000000000000# gettext.m4 serial 66 (gettext-0.18.2) dnl Copyright (C) 1995-2014 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl dnl This file can can be used in projects which are not available under dnl the GNU General Public License or the GNU Library General Public dnl License but which still want to provide support for the GNU gettext dnl functionality. dnl Please note that the actual code of the GNU gettext library is covered dnl by the GNU Library General Public License, and the rest of the GNU dnl gettext package package is covered by the GNU General Public License. dnl They are *not* in the public domain. dnl Authors: dnl Ulrich Drepper , 1995-2000. dnl Bruno Haible , 2000-2006, 2008-2010. dnl Macro to add for using GNU gettext. dnl Usage: AM_GNU_GETTEXT([INTLSYMBOL], [NEEDSYMBOL], [INTLDIR]). dnl INTLSYMBOL can be one of 'external', 'no-libtool', 'use-libtool'. The dnl default (if it is not specified or empty) is 'no-libtool'. dnl INTLSYMBOL should be 'external' for packages with no intl directory, dnl and 'no-libtool' or 'use-libtool' for packages with an intl directory. dnl If INTLSYMBOL is 'use-libtool', then a libtool library dnl $(top_builddir)/intl/libintl.la will be created (shared and/or static, dnl depending on --{enable,disable}-{shared,static} and on the presence of dnl AM-DISABLE-SHARED). If INTLSYMBOL is 'no-libtool', a static library dnl $(top_builddir)/intl/libintl.a will be created. dnl If NEEDSYMBOL is specified and is 'need-ngettext', then GNU gettext dnl implementations (in libc or libintl) without the ngettext() function dnl will be ignored. If NEEDSYMBOL is specified and is dnl 'need-formatstring-macros', then GNU gettext implementations that don't dnl support the ISO C 99 formatstring macros will be ignored. dnl INTLDIR is used to find the intl libraries. If empty, dnl the value '$(top_builddir)/intl/' is used. dnl dnl The result of the configuration is one of three cases: dnl 1) GNU gettext, as included in the intl subdirectory, will be compiled dnl and used. dnl Catalog format: GNU --> install in $(datadir) dnl Catalog extension: .mo after installation, .gmo in source tree dnl 2) GNU gettext has been found in the system's C library. dnl Catalog format: GNU --> install in $(datadir) dnl Catalog extension: .mo after installation, .gmo in source tree dnl 3) No internationalization, always use English msgid. dnl Catalog format: none dnl Catalog extension: none dnl If INTLSYMBOL is 'external', only cases 2 and 3 can occur. dnl The use of .gmo is historical (it was needed to avoid overwriting the dnl GNU format catalogs when building on a platform with an X/Open gettext), dnl but we keep it in order not to force irrelevant filename changes on the dnl maintainers. dnl AC_DEFUN([AM_GNU_GETTEXT], [ dnl Argument checking. ifelse([$1], [], , [ifelse([$1], [external], , [ifelse([$1], [no-libtool], , [ifelse([$1], [use-libtool], , [errprint([ERROR: invalid first argument to AM_GNU_GETTEXT ])])])])]) ifelse(ifelse([$1], [], [old])[]ifelse([$1], [no-libtool], [old]), [old], [AC_DIAGNOSE([obsolete], [Use of AM_GNU_GETTEXT without [external] argument is deprecated.])]) ifelse([$2], [], , [ifelse([$2], [need-ngettext], , [ifelse([$2], [need-formatstring-macros], , [errprint([ERROR: invalid second argument to AM_GNU_GETTEXT ])])])]) define([gt_included_intl], ifelse([$1], [external], ifdef([AM_GNU_GETTEXT_][INTL_SUBDIR], [yes], [no]), [yes])) define([gt_libtool_suffix_prefix], ifelse([$1], [use-libtool], [l], [])) gt_NEEDS_INIT AM_GNU_GETTEXT_NEED([$2]) AC_REQUIRE([AM_PO_SUBDIRS])dnl ifelse(gt_included_intl, yes, [ AC_REQUIRE([AM_INTL_SUBDIR])dnl ]) dnl Prerequisites of AC_LIB_LINKFLAGS_BODY. AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) AC_REQUIRE([AC_LIB_RPATH]) dnl Sometimes libintl requires libiconv, so first search for libiconv. dnl Ideally we would do this search only after the dnl if test "$USE_NLS" = "yes"; then dnl if { eval "gt_val=\$$gt_func_gnugettext_libc"; test "$gt_val" != "yes"; }; then dnl tests. But if configure.in invokes AM_ICONV after AM_GNU_GETTEXT dnl the configure script would need to contain the same shell code dnl again, outside any 'if'. There are two solutions: dnl - Invoke AM_ICONV_LINKFLAGS_BODY here, outside any 'if'. dnl - Control the expansions in more detail using AC_PROVIDE_IFELSE. dnl Since AC_PROVIDE_IFELSE is only in autoconf >= 2.52 and not dnl documented, we avoid it. ifelse(gt_included_intl, yes, , [ AC_REQUIRE([AM_ICONV_LINKFLAGS_BODY]) ]) dnl Sometimes, on Mac OS X, libintl requires linking with CoreFoundation. gt_INTL_MACOSX dnl Set USE_NLS. AC_REQUIRE([AM_NLS]) ifelse(gt_included_intl, yes, [ BUILD_INCLUDED_LIBINTL=no USE_INCLUDED_LIBINTL=no ]) LIBINTL= LTLIBINTL= POSUB= dnl Add a version number to the cache macros. case " $gt_needs " in *" need-formatstring-macros "*) gt_api_version=3 ;; *" need-ngettext "*) gt_api_version=2 ;; *) gt_api_version=1 ;; esac gt_func_gnugettext_libc="gt_cv_func_gnugettext${gt_api_version}_libc" gt_func_gnugettext_libintl="gt_cv_func_gnugettext${gt_api_version}_libintl" dnl If we use NLS figure out what method if test "$USE_NLS" = "yes"; then gt_use_preinstalled_gnugettext=no ifelse(gt_included_intl, yes, [ AC_MSG_CHECKING([whether included gettext is requested]) AC_ARG_WITH([included-gettext], [ --with-included-gettext use the GNU gettext library included here], nls_cv_force_use_gnu_gettext=$withval, nls_cv_force_use_gnu_gettext=no) AC_MSG_RESULT([$nls_cv_force_use_gnu_gettext]) nls_cv_use_gnu_gettext="$nls_cv_force_use_gnu_gettext" if test "$nls_cv_force_use_gnu_gettext" != "yes"; then ]) dnl User does not insist on using GNU NLS library. Figure out what dnl to use. If GNU gettext is available we use this. Else we have dnl to fall back to GNU NLS library. if test $gt_api_version -ge 3; then gt_revision_test_code=' #ifndef __GNU_GETTEXT_SUPPORTED_REVISION #define __GNU_GETTEXT_SUPPORTED_REVISION(major) ((major) == 0 ? 0 : -1) #endif changequote(,)dnl typedef int array [2 * (__GNU_GETTEXT_SUPPORTED_REVISION(0) >= 1) - 1]; changequote([,])dnl ' else gt_revision_test_code= fi if test $gt_api_version -ge 2; then gt_expression_test_code=' + * ngettext ("", "", 0)' else gt_expression_test_code= fi AC_CACHE_CHECK([for GNU gettext in libc], [$gt_func_gnugettext_libc], [AC_LINK_IFELSE( [AC_LANG_PROGRAM( [[ #include $gt_revision_test_code extern int _nl_msg_cat_cntr; extern int *_nl_domain_bindings; ]], [[ bindtextdomain ("", ""); return * gettext ("")$gt_expression_test_code + _nl_msg_cat_cntr + *_nl_domain_bindings ]])], [eval "$gt_func_gnugettext_libc=yes"], [eval "$gt_func_gnugettext_libc=no"])]) if { eval "gt_val=\$$gt_func_gnugettext_libc"; test "$gt_val" != "yes"; }; then dnl Sometimes libintl requires libiconv, so first search for libiconv. ifelse(gt_included_intl, yes, , [ AM_ICONV_LINK ]) dnl Search for libintl and define LIBINTL, LTLIBINTL and INCINTL dnl accordingly. Don't use AC_LIB_LINKFLAGS_BODY([intl],[iconv]) dnl because that would add "-liconv" to LIBINTL and LTLIBINTL dnl even if libiconv doesn't exist. AC_LIB_LINKFLAGS_BODY([intl]) AC_CACHE_CHECK([for GNU gettext in libintl], [$gt_func_gnugettext_libintl], [gt_save_CPPFLAGS="$CPPFLAGS" CPPFLAGS="$CPPFLAGS $INCINTL" gt_save_LIBS="$LIBS" LIBS="$LIBS $LIBINTL" dnl Now see whether libintl exists and does not depend on libiconv. AC_LINK_IFELSE( [AC_LANG_PROGRAM( [[ #include $gt_revision_test_code extern int _nl_msg_cat_cntr; extern #ifdef __cplusplus "C" #endif const char *_nl_expand_alias (const char *); ]], [[ bindtextdomain ("", ""); return * gettext ("")$gt_expression_test_code + _nl_msg_cat_cntr + *_nl_expand_alias ("") ]])], [eval "$gt_func_gnugettext_libintl=yes"], [eval "$gt_func_gnugettext_libintl=no"]) dnl Now see whether libintl exists and depends on libiconv. if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" != yes; } && test -n "$LIBICONV"; then LIBS="$LIBS $LIBICONV" AC_LINK_IFELSE( [AC_LANG_PROGRAM( [[ #include $gt_revision_test_code extern int _nl_msg_cat_cntr; extern #ifdef __cplusplus "C" #endif const char *_nl_expand_alias (const char *); ]], [[ bindtextdomain ("", ""); return * gettext ("")$gt_expression_test_code + _nl_msg_cat_cntr + *_nl_expand_alias ("") ]])], [LIBINTL="$LIBINTL $LIBICONV" LTLIBINTL="$LTLIBINTL $LTLIBICONV" eval "$gt_func_gnugettext_libintl=yes" ]) fi CPPFLAGS="$gt_save_CPPFLAGS" LIBS="$gt_save_LIBS"]) fi dnl If an already present or preinstalled GNU gettext() is found, dnl use it. But if this macro is used in GNU gettext, and GNU dnl gettext is already preinstalled in libintl, we update this dnl libintl. (Cf. the install rule in intl/Makefile.in.) if { eval "gt_val=\$$gt_func_gnugettext_libc"; test "$gt_val" = "yes"; } \ || { { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; } \ && test "$PACKAGE" != gettext-runtime \ && test "$PACKAGE" != gettext-tools; }; then gt_use_preinstalled_gnugettext=yes else dnl Reset the values set by searching for libintl. LIBINTL= LTLIBINTL= INCINTL= fi ifelse(gt_included_intl, yes, [ if test "$gt_use_preinstalled_gnugettext" != "yes"; then dnl GNU gettext is not found in the C library. dnl Fall back on included GNU gettext library. nls_cv_use_gnu_gettext=yes fi fi if test "$nls_cv_use_gnu_gettext" = "yes"; then dnl Mark actions used to generate GNU NLS library. BUILD_INCLUDED_LIBINTL=yes USE_INCLUDED_LIBINTL=yes LIBINTL="ifelse([$3],[],\${top_builddir}/intl,[$3])/libintl.[]gt_libtool_suffix_prefix[]a $LIBICONV $LIBTHREAD" LTLIBINTL="ifelse([$3],[],\${top_builddir}/intl,[$3])/libintl.[]gt_libtool_suffix_prefix[]a $LTLIBICONV $LTLIBTHREAD" LIBS=`echo " $LIBS " | sed -e 's/ -lintl / /' -e 's/^ //' -e 's/ $//'` fi CATOBJEXT= if test "$gt_use_preinstalled_gnugettext" = "yes" \ || test "$nls_cv_use_gnu_gettext" = "yes"; then dnl Mark actions to use GNU gettext tools. CATOBJEXT=.gmo fi ]) if test -n "$INTL_MACOSX_LIBS"; then if test "$gt_use_preinstalled_gnugettext" = "yes" \ || test "$nls_cv_use_gnu_gettext" = "yes"; then dnl Some extra flags are needed during linking. LIBINTL="$LIBINTL $INTL_MACOSX_LIBS" LTLIBINTL="$LTLIBINTL $INTL_MACOSX_LIBS" fi fi if test "$gt_use_preinstalled_gnugettext" = "yes" \ || test "$nls_cv_use_gnu_gettext" = "yes"; then AC_DEFINE([ENABLE_NLS], [1], [Define to 1 if translation of program messages to the user's native language is requested.]) else USE_NLS=no fi fi AC_MSG_CHECKING([whether to use NLS]) AC_MSG_RESULT([$USE_NLS]) if test "$USE_NLS" = "yes"; then AC_MSG_CHECKING([where the gettext function comes from]) if test "$gt_use_preinstalled_gnugettext" = "yes"; then if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; }; then gt_source="external libintl" else gt_source="libc" fi else gt_source="included intl directory" fi AC_MSG_RESULT([$gt_source]) fi if test "$USE_NLS" = "yes"; then if test "$gt_use_preinstalled_gnugettext" = "yes"; then if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; }; then AC_MSG_CHECKING([how to link with libintl]) AC_MSG_RESULT([$LIBINTL]) AC_LIB_APPENDTOVAR([CPPFLAGS], [$INCINTL]) fi dnl For backward compatibility. Some packages may be using this. AC_DEFINE([HAVE_GETTEXT], [1], [Define if the GNU gettext() function is already present or preinstalled.]) AC_DEFINE([HAVE_DCGETTEXT], [1], [Define if the GNU dcgettext() function is already present or preinstalled.]) fi dnl We need to process the po/ directory. POSUB=po fi ifelse(gt_included_intl, yes, [ dnl If this is used in GNU gettext we have to set BUILD_INCLUDED_LIBINTL dnl to 'yes' because some of the testsuite requires it. if test "$PACKAGE" = gettext-runtime || test "$PACKAGE" = gettext-tools; then BUILD_INCLUDED_LIBINTL=yes fi dnl Make all variables we use known to autoconf. AC_SUBST([BUILD_INCLUDED_LIBINTL]) AC_SUBST([USE_INCLUDED_LIBINTL]) AC_SUBST([CATOBJEXT]) dnl For backward compatibility. Some configure.ins may be using this. nls_cv_header_intl= nls_cv_header_libgt= dnl For backward compatibility. Some Makefiles may be using this. DATADIRNAME=share AC_SUBST([DATADIRNAME]) dnl For backward compatibility. Some Makefiles may be using this. INSTOBJEXT=.mo AC_SUBST([INSTOBJEXT]) dnl For backward compatibility. Some Makefiles may be using this. GENCAT=gencat AC_SUBST([GENCAT]) dnl For backward compatibility. Some Makefiles may be using this. INTLOBJS= if test "$USE_INCLUDED_LIBINTL" = yes; then INTLOBJS="\$(GETTOBJS)" fi AC_SUBST([INTLOBJS]) dnl Enable libtool support if the surrounding package wishes it. INTL_LIBTOOL_SUFFIX_PREFIX=gt_libtool_suffix_prefix AC_SUBST([INTL_LIBTOOL_SUFFIX_PREFIX]) ]) dnl For backward compatibility. Some Makefiles may be using this. INTLLIBS="$LIBINTL" AC_SUBST([INTLLIBS]) dnl Make all documented variables known to autoconf. AC_SUBST([LIBINTL]) AC_SUBST([LTLIBINTL]) AC_SUBST([POSUB]) ]) dnl gt_NEEDS_INIT ensures that the gt_needs variable is initialized. m4_define([gt_NEEDS_INIT], [ m4_divert_text([DEFAULTS], [gt_needs=]) m4_define([gt_NEEDS_INIT], []) ]) dnl Usage: AM_GNU_GETTEXT_NEED([NEEDSYMBOL]) AC_DEFUN([AM_GNU_GETTEXT_NEED], [ m4_divert_text([INIT_PREPARE], [gt_needs="$gt_needs $1"]) ]) dnl Usage: AM_GNU_GETTEXT_VERSION([gettext-version]) AC_DEFUN([AM_GNU_GETTEXT_VERSION], []) OpenSC-pam_pkcs11-87edc72/aclocal/iconv.m4000066400000000000000000000216201475044520600202110ustar00rootroot00000000000000# iconv.m4 serial 18 (gettext-0.18.2) dnl Copyright (C) 2000-2002, 2007-2014 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl From Bruno Haible. AC_DEFUN([AM_ICONV_LINKFLAGS_BODY], [ dnl Prerequisites of AC_LIB_LINKFLAGS_BODY. AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) AC_REQUIRE([AC_LIB_RPATH]) dnl Search for libiconv and define LIBICONV, LTLIBICONV and INCICONV dnl accordingly. AC_LIB_LINKFLAGS_BODY([iconv]) ]) AC_DEFUN([AM_ICONV_LINK], [ dnl Some systems have iconv in libc, some have it in libiconv (OSF/1 and dnl those with the standalone portable GNU libiconv installed). AC_REQUIRE([AC_CANONICAL_HOST]) dnl for cross-compiles dnl Search for libiconv and define LIBICONV, LTLIBICONV and INCICONV dnl accordingly. AC_REQUIRE([AM_ICONV_LINKFLAGS_BODY]) dnl Add $INCICONV to CPPFLAGS before performing the following checks, dnl because if the user has installed libiconv and not disabled its use dnl via --without-libiconv-prefix, he wants to use it. The first dnl AC_LINK_IFELSE will then fail, the second AC_LINK_IFELSE will succeed. am_save_CPPFLAGS="$CPPFLAGS" AC_LIB_APPENDTOVAR([CPPFLAGS], [$INCICONV]) AC_CACHE_CHECK([for iconv], [am_cv_func_iconv], [ am_cv_func_iconv="no, consider installing GNU libiconv" am_cv_lib_iconv=no AC_LINK_IFELSE( [AC_LANG_PROGRAM( [[ #include #include ]], [[iconv_t cd = iconv_open("",""); iconv(cd,NULL,NULL,NULL,NULL); iconv_close(cd);]])], [am_cv_func_iconv=yes]) if test "$am_cv_func_iconv" != yes; then am_save_LIBS="$LIBS" LIBS="$LIBS $LIBICONV" AC_LINK_IFELSE( [AC_LANG_PROGRAM( [[ #include #include ]], [[iconv_t cd = iconv_open("",""); iconv(cd,NULL,NULL,NULL,NULL); iconv_close(cd);]])], [am_cv_lib_iconv=yes] [am_cv_func_iconv=yes]) LIBS="$am_save_LIBS" fi ]) if test "$am_cv_func_iconv" = yes; then AC_CACHE_CHECK([for working iconv], [am_cv_func_iconv_works], [ dnl This tests against bugs in AIX 5.1, AIX 6.1..7.1, HP-UX 11.11, dnl Solaris 10. am_save_LIBS="$LIBS" if test $am_cv_lib_iconv = yes; then LIBS="$LIBS $LIBICONV" fi AC_RUN_IFELSE( [AC_LANG_SOURCE([[ #include #include int main () { int result = 0; /* Test against AIX 5.1 bug: Failures are not distinguishable from successful returns. */ { iconv_t cd_utf8_to_88591 = iconv_open ("ISO8859-1", "UTF-8"); if (cd_utf8_to_88591 != (iconv_t)(-1)) { static const char input[] = "\342\202\254"; /* EURO SIGN */ char buf[10]; const char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_utf8_to_88591, (char **) &inptr, &inbytesleft, &outptr, &outbytesleft); if (res == 0) result |= 1; iconv_close (cd_utf8_to_88591); } } /* Test against Solaris 10 bug: Failures are not distinguishable from successful returns. */ { iconv_t cd_ascii_to_88591 = iconv_open ("ISO8859-1", "646"); if (cd_ascii_to_88591 != (iconv_t)(-1)) { static const char input[] = "\263"; char buf[10]; const char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_ascii_to_88591, (char **) &inptr, &inbytesleft, &outptr, &outbytesleft); if (res == 0) result |= 2; iconv_close (cd_ascii_to_88591); } } /* Test against AIX 6.1..7.1 bug: Buffer overrun. */ { iconv_t cd_88591_to_utf8 = iconv_open ("UTF-8", "ISO-8859-1"); if (cd_88591_to_utf8 != (iconv_t)(-1)) { static const char input[] = "\304"; static char buf[2] = { (char)0xDE, (char)0xAD }; const char *inptr = input; size_t inbytesleft = 1; char *outptr = buf; size_t outbytesleft = 1; size_t res = iconv (cd_88591_to_utf8, (char **) &inptr, &inbytesleft, &outptr, &outbytesleft); if (res != (size_t)(-1) || outptr - buf > 1 || buf[1] != (char)0xAD) result |= 4; iconv_close (cd_88591_to_utf8); } } #if 0 /* This bug could be worked around by the caller. */ /* Test against HP-UX 11.11 bug: Positive return value instead of 0. */ { iconv_t cd_88591_to_utf8 = iconv_open ("utf8", "iso88591"); if (cd_88591_to_utf8 != (iconv_t)(-1)) { static const char input[] = "\304rger mit b\366sen B\374bchen ohne Augenma\337"; char buf[50]; const char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_88591_to_utf8, (char **) &inptr, &inbytesleft, &outptr, &outbytesleft); if ((int)res > 0) result |= 8; iconv_close (cd_88591_to_utf8); } } #endif /* Test against HP-UX 11.11 bug: No converter from EUC-JP to UTF-8 is provided. */ if (/* Try standardized names. */ iconv_open ("UTF-8", "EUC-JP") == (iconv_t)(-1) /* Try IRIX, OSF/1 names. */ && iconv_open ("UTF-8", "eucJP") == (iconv_t)(-1) /* Try AIX names. */ && iconv_open ("UTF-8", "IBM-eucJP") == (iconv_t)(-1) /* Try HP-UX names. */ && iconv_open ("utf8", "eucJP") == (iconv_t)(-1)) result |= 16; return result; }]])], [am_cv_func_iconv_works=yes], [am_cv_func_iconv_works=no], [ changequote(,)dnl case "$host_os" in aix* | hpux*) am_cv_func_iconv_works="guessing no" ;; *) am_cv_func_iconv_works="guessing yes" ;; esac changequote([,])dnl ]) LIBS="$am_save_LIBS" ]) case "$am_cv_func_iconv_works" in *no) am_func_iconv=no am_cv_lib_iconv=no ;; *) am_func_iconv=yes ;; esac else am_func_iconv=no am_cv_lib_iconv=no fi if test "$am_func_iconv" = yes; then AC_DEFINE([HAVE_ICONV], [1], [Define if you have the iconv() function and it works.]) fi if test "$am_cv_lib_iconv" = yes; then AC_MSG_CHECKING([how to link with libiconv]) AC_MSG_RESULT([$LIBICONV]) else dnl If $LIBICONV didn't lead to a usable library, we don't need $INCICONV dnl either. CPPFLAGS="$am_save_CPPFLAGS" LIBICONV= LTLIBICONV= fi AC_SUBST([LIBICONV]) AC_SUBST([LTLIBICONV]) ]) dnl Define AM_ICONV using AC_DEFUN_ONCE for Autoconf >= 2.64, in order to dnl avoid warnings like dnl "warning: AC_REQUIRE: `AM_ICONV' was expanded before it was required". dnl This is tricky because of the way 'aclocal' is implemented: dnl - It requires defining an auxiliary macro whose name ends in AC_DEFUN. dnl Otherwise aclocal's initial scan pass would miss the macro definition. dnl - It requires a line break inside the AC_DEFUN_ONCE and AC_DEFUN expansions. dnl Otherwise aclocal would emit many "Use of uninitialized value $1" dnl warnings. m4_define([gl_iconv_AC_DEFUN], m4_version_prereq([2.64], [[AC_DEFUN_ONCE( [$1], [$2])]], [m4_ifdef([gl_00GNULIB], [[AC_DEFUN_ONCE( [$1], [$2])]], [[AC_DEFUN( [$1], [$2])]])])) gl_iconv_AC_DEFUN([AM_ICONV], [ AM_ICONV_LINK if test "$am_cv_func_iconv" = yes; then AC_MSG_CHECKING([for iconv declaration]) AC_CACHE_VAL([am_cv_proto_iconv], [ AC_COMPILE_IFELSE( [AC_LANG_PROGRAM( [[ #include #include extern #ifdef __cplusplus "C" #endif #if defined(__STDC__) || defined(_MSC_VER) || defined(__cplusplus) size_t iconv (iconv_t cd, char * *inbuf, size_t *inbytesleft, char * *outbuf, size_t *outbytesleft); #else size_t iconv(); #endif ]], [[]])], [am_cv_proto_iconv_arg1=""], [am_cv_proto_iconv_arg1="const"]) am_cv_proto_iconv="extern size_t iconv (iconv_t cd, $am_cv_proto_iconv_arg1 char * *inbuf, size_t *inbytesleft, char * *outbuf, size_t *outbytesleft);"]) am_cv_proto_iconv=`echo "[$]am_cv_proto_iconv" | tr -s ' ' | sed -e 's/( /(/'` AC_MSG_RESULT([ $am_cv_proto_iconv]) AC_DEFINE_UNQUOTED([ICONV_CONST], [$am_cv_proto_iconv_arg1], [Define as const if the declaration of iconv() needs const.]) dnl Also substitute ICONV_CONST in the gnulib generated . m4_ifdef([gl_ICONV_H_DEFAULTS], [AC_REQUIRE([gl_ICONV_H_DEFAULTS]) if test -n "$am_cv_proto_iconv_arg1"; then ICONV_CONST="const" fi ]) fi ]) OpenSC-pam_pkcs11-87edc72/aclocal/lib-ld.m4000066400000000000000000000071431475044520600202420ustar00rootroot00000000000000# lib-ld.m4 serial 6 dnl Copyright (C) 1996-2003, 2009-2014 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl Subroutines of libtool.m4, dnl with replacements s/_*LT_PATH/AC_LIB_PROG/ and s/lt_/acl_/ to avoid dnl collision with libtool.m4. dnl From libtool-2.4. Sets the variable with_gnu_ld to yes or no. AC_DEFUN([AC_LIB_PROG_LD_GNU], [AC_CACHE_CHECK([if the linker ($LD) is GNU ld], [acl_cv_prog_gnu_ld], [# I'd rather use --version here, but apparently some GNU lds only accept -v. case `$LD -v 2>&1 /dev/null 2>&1 \ && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 \ || PATH_SEPARATOR=';' } fi ac_prog=ld if test "$GCC" = yes; then # Check if gcc -print-prog-name=ld gives a path. AC_MSG_CHECKING([for ld used by $CC]) case $host in *-*-mingw*) # gcc leaves a trailing carriage return which upsets mingw ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; *) ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; esac case $ac_prog in # Accept absolute paths. [[\\/]]* | ?:[[\\/]]*) re_direlt='/[[^/]][[^/]]*/\.\./' # Canonicalize the pathname of ld ac_prog=`echo "$ac_prog"| sed 's%\\\\%/%g'` while echo "$ac_prog" | grep "$re_direlt" > /dev/null 2>&1; do ac_prog=`echo $ac_prog| sed "s%$re_direlt%/%"` done test -z "$LD" && LD="$ac_prog" ;; "") # If it fails, then pretend we aren't using GCC. ac_prog=ld ;; *) # If it is relative, then search for the first ld in PATH. with_gnu_ld=unknown ;; esac elif test "$with_gnu_ld" = yes; then AC_MSG_CHECKING([for GNU ld]) else AC_MSG_CHECKING([for non-GNU ld]) fi AC_CACHE_VAL([acl_cv_path_LD], [if test -z "$LD"; then acl_save_ifs="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS="$acl_save_ifs" test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then acl_cv_path_LD="$ac_dir/$ac_prog" # Check to see if the program is GNU ld. I'd rather use --version, # but apparently some variants of GNU ld only accept -v. # Break only if it was the GNU/non-GNU ld that we prefer. case `"$acl_cv_path_LD" -v 2>&1 = 1.10 to complain if config.rpath is missing. m4_ifdef([AC_REQUIRE_AUX_FILE], [AC_REQUIRE_AUX_FILE([config.rpath])]) AC_REQUIRE([AC_PROG_CC]) dnl we use $CC, $GCC, $LDFLAGS AC_REQUIRE([AC_LIB_PROG_LD]) dnl we use $LD, $with_gnu_ld AC_REQUIRE([AC_CANONICAL_HOST]) dnl we use $host AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT]) dnl we use $ac_aux_dir AC_CACHE_CHECK([for shared library run path origin], [acl_cv_rpath], [ CC="$CC" GCC="$GCC" LDFLAGS="$LDFLAGS" LD="$LD" with_gnu_ld="$with_gnu_ld" \ ${CONFIG_SHELL-/bin/sh} "$ac_aux_dir/config.rpath" "$host" > conftest.sh . ./conftest.sh rm -f ./conftest.sh acl_cv_rpath=done ]) wl="$acl_cv_wl" acl_libext="$acl_cv_libext" acl_shlibext="$acl_cv_shlibext" acl_libname_spec="$acl_cv_libname_spec" acl_library_names_spec="$acl_cv_library_names_spec" acl_hardcode_libdir_flag_spec="$acl_cv_hardcode_libdir_flag_spec" acl_hardcode_libdir_separator="$acl_cv_hardcode_libdir_separator" acl_hardcode_direct="$acl_cv_hardcode_direct" acl_hardcode_minus_L="$acl_cv_hardcode_minus_L" dnl Determine whether the user wants rpath handling at all. AC_ARG_ENABLE([rpath], [ --disable-rpath do not hardcode runtime library paths], :, enable_rpath=yes) ]) dnl AC_LIB_FROMPACKAGE(name, package) dnl declares that libname comes from the given package. The configure file dnl will then not have a --with-libname-prefix option but a dnl --with-package-prefix option. Several libraries can come from the same dnl package. This declaration must occur before an AC_LIB_LINKFLAGS or similar dnl macro call that searches for libname. AC_DEFUN([AC_LIB_FROMPACKAGE], [ pushdef([NAME],[m4_translit([$1],[abcdefghijklmnopqrstuvwxyz./+-], [ABCDEFGHIJKLMNOPQRSTUVWXYZ____])]) define([acl_frompackage_]NAME, [$2]) popdef([NAME]) pushdef([PACK],[$2]) pushdef([PACKUP],[m4_translit(PACK,[abcdefghijklmnopqrstuvwxyz./+-], [ABCDEFGHIJKLMNOPQRSTUVWXYZ____])]) define([acl_libsinpackage_]PACKUP, m4_ifdef([acl_libsinpackage_]PACKUP, [m4_defn([acl_libsinpackage_]PACKUP)[, ]],)[lib$1]) popdef([PACKUP]) popdef([PACK]) ]) dnl AC_LIB_LINKFLAGS_BODY(name [, dependencies]) searches for libname and dnl the libraries corresponding to explicit and implicit dependencies. dnl Sets the LIB${NAME}, LTLIB${NAME} and INC${NAME} variables. dnl Also, sets the LIB${NAME}_PREFIX variable to nonempty if libname was found dnl in ${LIB${NAME}_PREFIX}/$acl_libdirstem. AC_DEFUN([AC_LIB_LINKFLAGS_BODY], [ AC_REQUIRE([AC_LIB_PREPARE_MULTILIB]) pushdef([NAME],[m4_translit([$1],[abcdefghijklmnopqrstuvwxyz./+-], [ABCDEFGHIJKLMNOPQRSTUVWXYZ____])]) pushdef([PACK],[m4_ifdef([acl_frompackage_]NAME, [acl_frompackage_]NAME, lib[$1])]) pushdef([PACKUP],[m4_translit(PACK,[abcdefghijklmnopqrstuvwxyz./+-], [ABCDEFGHIJKLMNOPQRSTUVWXYZ____])]) pushdef([PACKLIBS],[m4_ifdef([acl_frompackage_]NAME, [acl_libsinpackage_]PACKUP, lib[$1])]) dnl Autoconf >= 2.61 supports dots in --with options. pushdef([P_A_C_K],[m4_if(m4_version_compare(m4_defn([m4_PACKAGE_VERSION]),[2.61]),[-1],[m4_translit(PACK,[.],[_])],PACK)]) dnl By default, look in $includedir and $libdir. use_additional=yes AC_LIB_WITH_FINAL_PREFIX([ eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" ]) AC_ARG_WITH(P_A_C_K[-prefix], [[ --with-]]P_A_C_K[[-prefix[=DIR] search for ]PACKLIBS[ in DIR/include and DIR/lib --without-]]P_A_C_K[[-prefix don't search for ]PACKLIBS[ in includedir and libdir]], [ if test "X$withval" = "Xno"; then use_additional=no else if test "X$withval" = "X"; then AC_LIB_WITH_FINAL_PREFIX([ eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" ]) else additional_includedir="$withval/include" additional_libdir="$withval/$acl_libdirstem" if test "$acl_libdirstem2" != "$acl_libdirstem" \ && ! test -d "$withval/$acl_libdirstem"; then additional_libdir="$withval/$acl_libdirstem2" fi fi fi ]) dnl Search the library and its dependencies in $additional_libdir and dnl $LDFLAGS. Using breadth-first-seach. LIB[]NAME= LTLIB[]NAME= INC[]NAME= LIB[]NAME[]_PREFIX= dnl HAVE_LIB${NAME} is an indicator that LIB${NAME}, LTLIB${NAME} have been dnl computed. So it has to be reset here. HAVE_LIB[]NAME= rpathdirs= ltrpathdirs= names_already_handled= names_next_round='$1 $2' while test -n "$names_next_round"; do names_this_round="$names_next_round" names_next_round= for name in $names_this_round; do already_handled= for n in $names_already_handled; do if test "$n" = "$name"; then already_handled=yes break fi done if test -z "$already_handled"; then names_already_handled="$names_already_handled $name" dnl See if it was already located by an earlier AC_LIB_LINKFLAGS dnl or AC_LIB_HAVE_LINKFLAGS call. uppername=`echo "$name" | sed -e 'y|abcdefghijklmnopqrstuvwxyz./+-|ABCDEFGHIJKLMNOPQRSTUVWXYZ____|'` eval value=\"\$HAVE_LIB$uppername\" if test -n "$value"; then if test "$value" = yes; then eval value=\"\$LIB$uppername\" test -z "$value" || LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$value" eval value=\"\$LTLIB$uppername\" test -z "$value" || LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }$value" else dnl An earlier call to AC_LIB_HAVE_LINKFLAGS has determined dnl that this library doesn't exist. So just drop it. : fi else dnl Search the library lib$name in $additional_libdir and $LDFLAGS dnl and the already constructed $LIBNAME/$LTLIBNAME. found_dir= found_la= found_so= found_a= eval libname=\"$acl_libname_spec\" # typically: libname=lib$name if test -n "$acl_shlibext"; then shrext=".$acl_shlibext" # typically: shrext=.so else shrext= fi if test $use_additional = yes; then dir="$additional_libdir" dnl The same code as in the loop below: dnl First look for a shared library. if test -n "$acl_shlibext"; then if test -f "$dir/$libname$shrext"; then found_dir="$dir" found_so="$dir/$libname$shrext" else if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then ver=`(cd "$dir" && \ for f in "$libname$shrext".*; do echo "$f"; done \ | sed -e "s,^$libname$shrext\\\\.,," \ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ | sed 1q ) 2>/dev/null` if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then found_dir="$dir" found_so="$dir/$libname$shrext.$ver" fi else eval library_names=\"$acl_library_names_spec\" for f in $library_names; do if test -f "$dir/$f"; then found_dir="$dir" found_so="$dir/$f" break fi done fi fi fi dnl Then look for a static library. if test "X$found_dir" = "X"; then if test -f "$dir/$libname.$acl_libext"; then found_dir="$dir" found_a="$dir/$libname.$acl_libext" fi fi if test "X$found_dir" != "X"; then if test -f "$dir/$libname.la"; then found_la="$dir/$libname.la" fi fi fi if test "X$found_dir" = "X"; then for x in $LDFLAGS $LTLIB[]NAME; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) case "$x" in -L*) dir=`echo "X$x" | sed -e 's/^X-L//'` dnl First look for a shared library. if test -n "$acl_shlibext"; then if test -f "$dir/$libname$shrext"; then found_dir="$dir" found_so="$dir/$libname$shrext" else if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then ver=`(cd "$dir" && \ for f in "$libname$shrext".*; do echo "$f"; done \ | sed -e "s,^$libname$shrext\\\\.,," \ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ | sed 1q ) 2>/dev/null` if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then found_dir="$dir" found_so="$dir/$libname$shrext.$ver" fi else eval library_names=\"$acl_library_names_spec\" for f in $library_names; do if test -f "$dir/$f"; then found_dir="$dir" found_so="$dir/$f" break fi done fi fi fi dnl Then look for a static library. if test "X$found_dir" = "X"; then if test -f "$dir/$libname.$acl_libext"; then found_dir="$dir" found_a="$dir/$libname.$acl_libext" fi fi if test "X$found_dir" != "X"; then if test -f "$dir/$libname.la"; then found_la="$dir/$libname.la" fi fi ;; esac if test "X$found_dir" != "X"; then break fi done fi if test "X$found_dir" != "X"; then dnl Found the library. LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-L$found_dir -l$name" if test "X$found_so" != "X"; then dnl Linking with a shared library. We attempt to hardcode its dnl directory into the executable's runpath, unless it's the dnl standard /usr/lib. if test "$enable_rpath" = no \ || test "X$found_dir" = "X/usr/$acl_libdirstem" \ || test "X$found_dir" = "X/usr/$acl_libdirstem2"; then dnl No hardcoding is needed. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" else dnl Use an explicit option to hardcode DIR into the resulting dnl binary. dnl Potentially add DIR to ltrpathdirs. dnl The ltrpathdirs will be appended to $LTLIBNAME at the end. haveit= for x in $ltrpathdirs; do if test "X$x" = "X$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then ltrpathdirs="$ltrpathdirs $found_dir" fi dnl The hardcoding into $LIBNAME is system dependent. if test "$acl_hardcode_direct" = yes; then dnl Using DIR/libNAME.so during linking hardcodes DIR into the dnl resulting binary. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" else if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then dnl Use an explicit option to hardcode DIR into the resulting dnl binary. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" dnl Potentially add DIR to rpathdirs. dnl The rpathdirs will be appended to $LIBNAME at the end. haveit= for x in $rpathdirs; do if test "X$x" = "X$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then rpathdirs="$rpathdirs $found_dir" fi else dnl Rely on "-L$found_dir". dnl But don't add it if it's already contained in the LDFLAGS dnl or the already constructed $LIBNAME haveit= for x in $LDFLAGS $LIB[]NAME; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-L$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-L$found_dir" fi if test "$acl_hardcode_minus_L" != no; then dnl FIXME: Not sure whether we should use dnl "-L$found_dir -l$name" or "-L$found_dir $found_so" dnl here. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" else dnl We cannot use $acl_hardcode_runpath_var and LD_RUN_PATH dnl here, because this doesn't fit in flags passed to the dnl compiler. So give up. No hardcoding. This affects only dnl very old systems. dnl FIXME: Not sure whether we should use dnl "-L$found_dir -l$name" or "-L$found_dir $found_so" dnl here. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-l$name" fi fi fi fi else if test "X$found_a" != "X"; then dnl Linking with a static library. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_a" else dnl We shouldn't come here, but anyway it's good to have a dnl fallback. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-L$found_dir -l$name" fi fi dnl Assume the include files are nearby. additional_includedir= case "$found_dir" in */$acl_libdirstem | */$acl_libdirstem/) basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem/"'*$,,'` if test "$name" = '$1'; then LIB[]NAME[]_PREFIX="$basedir" fi additional_includedir="$basedir/include" ;; */$acl_libdirstem2 | */$acl_libdirstem2/) basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem2/"'*$,,'` if test "$name" = '$1'; then LIB[]NAME[]_PREFIX="$basedir" fi additional_includedir="$basedir/include" ;; esac if test "X$additional_includedir" != "X"; then dnl Potentially add $additional_includedir to $INCNAME. dnl But don't add it dnl 1. if it's the standard /usr/include, dnl 2. if it's /usr/local/include and we are using GCC on Linux, dnl 3. if it's already present in $CPPFLAGS or the already dnl constructed $INCNAME, dnl 4. if it doesn't exist as a directory. if test "X$additional_includedir" != "X/usr/include"; then haveit= if test "X$additional_includedir" = "X/usr/local/include"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then for x in $CPPFLAGS $INC[]NAME; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-I$additional_includedir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_includedir"; then dnl Really add $additional_includedir to $INCNAME. INC[]NAME="${INC[]NAME}${INC[]NAME:+ }-I$additional_includedir" fi fi fi fi fi dnl Look for dependencies. if test -n "$found_la"; then dnl Read the .la file. It defines the variables dnl dlname, library_names, old_library, dependency_libs, current, dnl age, revision, installed, dlopen, dlpreopen, libdir. save_libdir="$libdir" case "$found_la" in */* | *\\*) . "$found_la" ;; *) . "./$found_la" ;; esac libdir="$save_libdir" dnl We use only dependency_libs. for dep in $dependency_libs; do case "$dep" in -L*) additional_libdir=`echo "X$dep" | sed -e 's/^X-L//'` dnl Potentially add $additional_libdir to $LIBNAME and $LTLIBNAME. dnl But don't add it dnl 1. if it's the standard /usr/lib, dnl 2. if it's /usr/local/lib and we are using GCC on Linux, dnl 3. if it's already present in $LDFLAGS or the already dnl constructed $LIBNAME, dnl 4. if it doesn't exist as a directory. if test "X$additional_libdir" != "X/usr/$acl_libdirstem" \ && test "X$additional_libdir" != "X/usr/$acl_libdirstem2"; then haveit= if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem" \ || test "X$additional_libdir" = "X/usr/local/$acl_libdirstem2"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then haveit= for x in $LDFLAGS $LIB[]NAME; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_libdir"; then dnl Really add $additional_libdir to $LIBNAME. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-L$additional_libdir" fi fi haveit= for x in $LDFLAGS $LTLIB[]NAME; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_libdir"; then dnl Really add $additional_libdir to $LTLIBNAME. LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-L$additional_libdir" fi fi fi fi ;; -R*) dir=`echo "X$dep" | sed -e 's/^X-R//'` if test "$enable_rpath" != no; then dnl Potentially add DIR to rpathdirs. dnl The rpathdirs will be appended to $LIBNAME at the end. haveit= for x in $rpathdirs; do if test "X$x" = "X$dir"; then haveit=yes break fi done if test -z "$haveit"; then rpathdirs="$rpathdirs $dir" fi dnl Potentially add DIR to ltrpathdirs. dnl The ltrpathdirs will be appended to $LTLIBNAME at the end. haveit= for x in $ltrpathdirs; do if test "X$x" = "X$dir"; then haveit=yes break fi done if test -z "$haveit"; then ltrpathdirs="$ltrpathdirs $dir" fi fi ;; -l*) dnl Handle this in the next round. names_next_round="$names_next_round "`echo "X$dep" | sed -e 's/^X-l//'` ;; *.la) dnl Handle this in the next round. Throw away the .la's dnl directory; it is already contained in a preceding -L dnl option. names_next_round="$names_next_round "`echo "X$dep" | sed -e 's,^X.*/,,' -e 's,^lib,,' -e 's,\.la$,,'` ;; *) dnl Most likely an immediate library name. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$dep" LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }$dep" ;; esac done fi else dnl Didn't find the library; assume it is in the system directories dnl known to the linker and runtime loader. (All the system dnl directories known to the linker should also be known to the dnl runtime loader, otherwise the system is severely misconfigured.) LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-l$name" LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-l$name" fi fi fi done done if test "X$rpathdirs" != "X"; then if test -n "$acl_hardcode_libdir_separator"; then dnl Weird platform: only the last -rpath option counts, the user must dnl pass all path elements in one option. We can arrange that for a dnl single library, but not when more than one $LIBNAMEs are used. alldirs= for found_dir in $rpathdirs; do alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$found_dir" done dnl Note: acl_hardcode_libdir_flag_spec uses $libdir and $wl. acl_save_libdir="$libdir" libdir="$alldirs" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$flag" else dnl The -rpath options are cumulative. for found_dir in $rpathdirs; do acl_save_libdir="$libdir" libdir="$found_dir" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$flag" done fi fi if test "X$ltrpathdirs" != "X"; then dnl When using libtool, the option that works for both libraries and dnl executables is -R. The -R options are cumulative. for found_dir in $ltrpathdirs; do LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-R$found_dir" done fi popdef([P_A_C_K]) popdef([PACKLIBS]) popdef([PACKUP]) popdef([PACK]) popdef([NAME]) ]) dnl AC_LIB_APPENDTOVAR(VAR, CONTENTS) appends the elements of CONTENTS to VAR, dnl unless already present in VAR. dnl Works only for CPPFLAGS, not for LIB* variables because that sometimes dnl contains two or three consecutive elements that belong together. AC_DEFUN([AC_LIB_APPENDTOVAR], [ for element in [$2]; do haveit= for x in $[$1]; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X$element"; then haveit=yes break fi done if test -z "$haveit"; then [$1]="${[$1]}${[$1]:+ }$element" fi done ]) dnl For those cases where a variable contains several -L and -l options dnl referring to unknown libraries and directories, this macro determines the dnl necessary additional linker options for the runtime path. dnl AC_LIB_LINKFLAGS_FROM_LIBS([LDADDVAR], [LIBSVALUE], [USE-LIBTOOL]) dnl sets LDADDVAR to linker options needed together with LIBSVALUE. dnl If USE-LIBTOOL evaluates to non-empty, linking with libtool is assumed, dnl otherwise linking without libtool is assumed. AC_DEFUN([AC_LIB_LINKFLAGS_FROM_LIBS], [ AC_REQUIRE([AC_LIB_RPATH]) AC_REQUIRE([AC_LIB_PREPARE_MULTILIB]) $1= if test "$enable_rpath" != no; then if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then dnl Use an explicit option to hardcode directories into the resulting dnl binary. rpathdirs= next= for opt in $2; do if test -n "$next"; then dir="$next" dnl No need to hardcode the standard /usr/lib. if test "X$dir" != "X/usr/$acl_libdirstem" \ && test "X$dir" != "X/usr/$acl_libdirstem2"; then rpathdirs="$rpathdirs $dir" fi next= else case $opt in -L) next=yes ;; -L*) dir=`echo "X$opt" | sed -e 's,^X-L,,'` dnl No need to hardcode the standard /usr/lib. if test "X$dir" != "X/usr/$acl_libdirstem" \ && test "X$dir" != "X/usr/$acl_libdirstem2"; then rpathdirs="$rpathdirs $dir" fi next= ;; *) next= ;; esac fi done if test "X$rpathdirs" != "X"; then if test -n ""$3""; then dnl libtool is used for linking. Use -R options. for dir in $rpathdirs; do $1="${$1}${$1:+ }-R$dir" done else dnl The linker is used for linking directly. if test -n "$acl_hardcode_libdir_separator"; then dnl Weird platform: only the last -rpath option counts, the user dnl must pass all path elements in one option. alldirs= for dir in $rpathdirs; do alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$dir" done acl_save_libdir="$libdir" libdir="$alldirs" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" $1="$flag" else dnl The -rpath options are cumulative. for dir in $rpathdirs; do acl_save_libdir="$libdir" libdir="$dir" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" $1="${$1}${$1:+ }$flag" done fi fi fi fi fi AC_SUBST([$1]) ]) OpenSC-pam_pkcs11-87edc72/aclocal/lib-prefix.m4000066400000000000000000000204221475044520600211330ustar00rootroot00000000000000# lib-prefix.m4 serial 7 (gettext-0.18) dnl Copyright (C) 2001-2005, 2008-2014 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl From Bruno Haible. dnl AC_LIB_ARG_WITH is synonymous to AC_ARG_WITH in autoconf-2.13, and dnl similar to AC_ARG_WITH in autoconf 2.52...2.57 except that is doesn't dnl require excessive bracketing. ifdef([AC_HELP_STRING], [AC_DEFUN([AC_LIB_ARG_WITH], [AC_ARG_WITH([$1],[[$2]],[$3],[$4])])], [AC_DEFUN([AC_][LIB_ARG_WITH], [AC_ARG_WITH([$1],[$2],[$3],[$4])])]) dnl AC_LIB_PREFIX adds to the CPPFLAGS and LDFLAGS the flags that are needed dnl to access previously installed libraries. The basic assumption is that dnl a user will want packages to use other packages he previously installed dnl with the same --prefix option. dnl This macro is not needed if only AC_LIB_LINKFLAGS is used to locate dnl libraries, but is otherwise very convenient. AC_DEFUN([AC_LIB_PREFIX], [ AC_BEFORE([$0], [AC_LIB_LINKFLAGS]) AC_REQUIRE([AC_PROG_CC]) AC_REQUIRE([AC_CANONICAL_HOST]) AC_REQUIRE([AC_LIB_PREPARE_MULTILIB]) AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) dnl By default, look in $includedir and $libdir. use_additional=yes AC_LIB_WITH_FINAL_PREFIX([ eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" ]) AC_LIB_ARG_WITH([lib-prefix], [ --with-lib-prefix[=DIR] search for libraries in DIR/include and DIR/lib --without-lib-prefix don't search for libraries in includedir and libdir], [ if test "X$withval" = "Xno"; then use_additional=no else if test "X$withval" = "X"; then AC_LIB_WITH_FINAL_PREFIX([ eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" ]) else additional_includedir="$withval/include" additional_libdir="$withval/$acl_libdirstem" fi fi ]) if test $use_additional = yes; then dnl Potentially add $additional_includedir to $CPPFLAGS. dnl But don't add it dnl 1. if it's the standard /usr/include, dnl 2. if it's already present in $CPPFLAGS, dnl 3. if it's /usr/local/include and we are using GCC on Linux, dnl 4. if it doesn't exist as a directory. if test "X$additional_includedir" != "X/usr/include"; then haveit= for x in $CPPFLAGS; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-I$additional_includedir"; then haveit=yes break fi done if test -z "$haveit"; then if test "X$additional_includedir" = "X/usr/local/include"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then if test -d "$additional_includedir"; then dnl Really add $additional_includedir to $CPPFLAGS. CPPFLAGS="${CPPFLAGS}${CPPFLAGS:+ }-I$additional_includedir" fi fi fi fi dnl Potentially add $additional_libdir to $LDFLAGS. dnl But don't add it dnl 1. if it's the standard /usr/lib, dnl 2. if it's already present in $LDFLAGS, dnl 3. if it's /usr/local/lib and we are using GCC on Linux, dnl 4. if it doesn't exist as a directory. if test "X$additional_libdir" != "X/usr/$acl_libdirstem"; then haveit= for x in $LDFLAGS; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem"; then if test -n "$GCC"; then case $host_os in linux*) haveit=yes;; esac fi fi if test -z "$haveit"; then if test -d "$additional_libdir"; then dnl Really add $additional_libdir to $LDFLAGS. LDFLAGS="${LDFLAGS}${LDFLAGS:+ }-L$additional_libdir" fi fi fi fi fi ]) dnl AC_LIB_PREPARE_PREFIX creates variables acl_final_prefix, dnl acl_final_exec_prefix, containing the values to which $prefix and dnl $exec_prefix will expand at the end of the configure script. AC_DEFUN([AC_LIB_PREPARE_PREFIX], [ dnl Unfortunately, prefix and exec_prefix get only finally determined dnl at the end of configure. if test "X$prefix" = "XNONE"; then acl_final_prefix="$ac_default_prefix" else acl_final_prefix="$prefix" fi if test "X$exec_prefix" = "XNONE"; then acl_final_exec_prefix='${prefix}' else acl_final_exec_prefix="$exec_prefix" fi acl_save_prefix="$prefix" prefix="$acl_final_prefix" eval acl_final_exec_prefix=\"$acl_final_exec_prefix\" prefix="$acl_save_prefix" ]) dnl AC_LIB_WITH_FINAL_PREFIX([statement]) evaluates statement, with the dnl variables prefix and exec_prefix bound to the values they will have dnl at the end of the configure script. AC_DEFUN([AC_LIB_WITH_FINAL_PREFIX], [ acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" $1 exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" ]) dnl AC_LIB_PREPARE_MULTILIB creates dnl - a variable acl_libdirstem, containing the basename of the libdir, either dnl "lib" or "lib64" or "lib/64", dnl - a variable acl_libdirstem2, as a secondary possible value for dnl acl_libdirstem, either the same as acl_libdirstem or "lib/sparcv9" or dnl "lib/amd64". AC_DEFUN([AC_LIB_PREPARE_MULTILIB], [ dnl There is no formal standard regarding lib and lib64. dnl On glibc systems, the current practice is that on a system supporting dnl 32-bit and 64-bit instruction sets or ABIs, 64-bit libraries go under dnl $prefix/lib64 and 32-bit libraries go under $prefix/lib. We determine dnl the compiler's default mode by looking at the compiler's library search dnl path. If at least one of its elements ends in /lib64 or points to a dnl directory whose absolute pathname ends in /lib64, we assume a 64-bit ABI. dnl Otherwise we use the default, namely "lib". dnl On Solaris systems, the current practice is that on a system supporting dnl 32-bit and 64-bit instruction sets or ABIs, 64-bit libraries go under dnl $prefix/lib/64 (which is a symlink to either $prefix/lib/sparcv9 or dnl $prefix/lib/amd64) and 32-bit libraries go under $prefix/lib. AC_REQUIRE([AC_CANONICAL_HOST]) acl_libdirstem=lib acl_libdirstem2= case "$host_os" in solaris*) dnl See Solaris 10 Software Developer Collection > Solaris 64-bit Developer's Guide > The Development Environment dnl . dnl "Portable Makefiles should refer to any library directories using the 64 symbolic link." dnl But we want to recognize the sparcv9 or amd64 subdirectory also if the dnl symlink is missing, so we set acl_libdirstem2 too. AC_CACHE_CHECK([for 64-bit host], [gl_cv_solaris_64bit], [AC_EGREP_CPP([sixtyfour bits], [ #ifdef _LP64 sixtyfour bits #endif ], [gl_cv_solaris_64bit=yes], [gl_cv_solaris_64bit=no]) ]) if test $gl_cv_solaris_64bit = yes; then acl_libdirstem=lib/64 case "$host_cpu" in sparc*) acl_libdirstem2=lib/sparcv9 ;; i*86 | x86_64) acl_libdirstem2=lib/amd64 ;; esac fi ;; *) searchpath=`(LC_ALL=C $CC -print-search-dirs) 2>/dev/null | sed -n -e 's,^libraries: ,,p' | sed -e 's,^=,,'` if test -n "$searchpath"; then acl_save_IFS="${IFS= }"; IFS=":" for searchdir in $searchpath; do if test -d "$searchdir"; then case "$searchdir" in */lib64/ | */lib64 ) acl_libdirstem=lib64 ;; */../ | */.. ) # Better ignore directories of this form. They are misleading. ;; *) searchdir=`cd "$searchdir" && pwd` case "$searchdir" in */lib64 ) acl_libdirstem=lib64 ;; esac ;; esac fi done IFS="$acl_save_IFS" fi ;; esac test -n "$acl_libdirstem2" || acl_libdirstem2="$acl_libdirstem" ]) OpenSC-pam_pkcs11-87edc72/aclocal/nls.m4000066400000000000000000000023151475044520600176670ustar00rootroot00000000000000# nls.m4 serial 5 (gettext-0.18) dnl Copyright (C) 1995-2003, 2005-2006, 2008-2014 Free Software Foundation, dnl Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl dnl This file can can be used in projects which are not available under dnl the GNU General Public License or the GNU Library General Public dnl License but which still want to provide support for the GNU gettext dnl functionality. dnl Please note that the actual code of the GNU gettext library is covered dnl by the GNU Library General Public License, and the rest of the GNU dnl gettext package package is covered by the GNU General Public License. dnl They are *not* in the public domain. dnl Authors: dnl Ulrich Drepper , 1995-2000. dnl Bruno Haible , 2000-2003. AC_PREREQ([2.50]) AC_DEFUN([AM_NLS], [ AC_MSG_CHECKING([whether NLS is requested]) dnl Default is enabled NLS AC_ARG_ENABLE([nls], [ --disable-nls do not use Native Language Support], USE_NLS=$enableval, USE_NLS=yes) AC_MSG_RESULT([$USE_NLS]) AC_SUBST([USE_NLS]) ]) OpenSC-pam_pkcs11-87edc72/aclocal/po.m4000066400000000000000000000450371475044520600175210ustar00rootroot00000000000000# po.m4 serial 22 (gettext-0.19) dnl Copyright (C) 1995-2014 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl dnl This file can can be used in projects which are not available under dnl the GNU General Public License or the GNU Library General Public dnl License but which still want to provide support for the GNU gettext dnl functionality. dnl Please note that the actual code of the GNU gettext library is covered dnl by the GNU Library General Public License, and the rest of the GNU dnl gettext package package is covered by the GNU General Public License. dnl They are *not* in the public domain. dnl Authors: dnl Ulrich Drepper , 1995-2000. dnl Bruno Haible , 2000-2003. AC_PREREQ([2.60]) dnl Checks for all prerequisites of the po subdirectory. AC_DEFUN([AM_PO_SUBDIRS], [ AC_REQUIRE([AC_PROG_MAKE_SET])dnl AC_REQUIRE([AC_PROG_INSTALL])dnl AC_REQUIRE([AC_PROG_MKDIR_P])dnl AC_REQUIRE([AC_PROG_SED])dnl AC_REQUIRE([AM_NLS])dnl dnl Release version of the gettext macros. This is used to ensure that dnl the gettext macros and po/Makefile.in.in are in sync. AC_SUBST([GETTEXT_MACRO_VERSION], [0.19]) dnl Perform the following tests also if --disable-nls has been given, dnl because they are needed for "make dist" to work. dnl Search for GNU msgfmt in the PATH. dnl The first test excludes Solaris msgfmt and early GNU msgfmt versions. dnl The second test excludes FreeBSD msgfmt. AM_PATH_PROG_WITH_TEST(MSGFMT, msgfmt, [$ac_dir/$ac_word --statistics /dev/null >&]AS_MESSAGE_LOG_FD[ 2>&1 && (if $ac_dir/$ac_word --statistics /dev/null 2>&1 >/dev/null | grep usage >/dev/null; then exit 1; else exit 0; fi)], :) AC_PATH_PROG([GMSGFMT], [gmsgfmt], [$MSGFMT]) dnl Test whether it is GNU msgfmt >= 0.15. changequote(,)dnl case `$MSGFMT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) MSGFMT_015=: ;; *) MSGFMT_015=$MSGFMT ;; esac changequote([,])dnl AC_SUBST([MSGFMT_015]) changequote(,)dnl case `$GMSGFMT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) GMSGFMT_015=: ;; *) GMSGFMT_015=$GMSGFMT ;; esac changequote([,])dnl AC_SUBST([GMSGFMT_015]) dnl Search for GNU xgettext 0.12 or newer in the PATH. dnl The first test excludes Solaris xgettext and early GNU xgettext versions. dnl The second test excludes FreeBSD xgettext. AM_PATH_PROG_WITH_TEST(XGETTEXT, xgettext, [$ac_dir/$ac_word --omit-header --copyright-holder= --msgid-bugs-address= /dev/null >&]AS_MESSAGE_LOG_FD[ 2>&1 && (if $ac_dir/$ac_word --omit-header --copyright-holder= --msgid-bugs-address= /dev/null 2>&1 >/dev/null | grep usage >/dev/null; then exit 1; else exit 0; fi)], :) dnl Remove leftover from FreeBSD xgettext call. rm -f messages.po dnl Test whether it is GNU xgettext >= 0.15. changequote(,)dnl case `$XGETTEXT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) XGETTEXT_015=: ;; *) XGETTEXT_015=$XGETTEXT ;; esac changequote([,])dnl AC_SUBST([XGETTEXT_015]) dnl Search for GNU msgmerge 0.11 or newer in the PATH. AM_PATH_PROG_WITH_TEST(MSGMERGE, msgmerge, [$ac_dir/$ac_word --update -q /dev/null /dev/null >&]AS_MESSAGE_LOG_FD[ 2>&1], :) dnl Installation directories. dnl Autoconf >= 2.60 defines localedir. For older versions of autoconf, we dnl have to define it here, so that it can be used in po/Makefile. test -n "$localedir" || localedir='${datadir}/locale' AC_SUBST([localedir]) dnl Support for AM_XGETTEXT_OPTION. test -n "${XGETTEXT_EXTRA_OPTIONS+set}" || XGETTEXT_EXTRA_OPTIONS= AC_SUBST([XGETTEXT_EXTRA_OPTIONS]) AC_CONFIG_COMMANDS([po-directories], [[ for ac_file in $CONFIG_FILES; do # Support "outfile[:infile[:infile...]]" case "$ac_file" in *:*) ac_file=`echo "$ac_file"|sed 's%:.*%%'` ;; esac # PO directories have a Makefile.in generated from Makefile.in.in. case "$ac_file" in */Makefile.in) # Adjust a relative srcdir. ac_dir=`echo "$ac_file"|sed 's%/[^/][^/]*$%%'` ac_dir_suffix=/`echo "$ac_dir"|sed 's%^\./%%'` ac_dots=`echo "$ac_dir_suffix"|sed 's%/[^/]*%../%g'` # In autoconf-2.13 it is called $ac_given_srcdir. # In autoconf-2.50 it is called $srcdir. test -n "$ac_given_srcdir" || ac_given_srcdir="$srcdir" case "$ac_given_srcdir" in .) top_srcdir=`echo $ac_dots|sed 's%/$%%'` ;; /*) top_srcdir="$ac_given_srcdir" ;; *) top_srcdir="$ac_dots$ac_given_srcdir" ;; esac # Treat a directory as a PO directory if and only if it has a # POTFILES.in file. This allows packages to have multiple PO # directories under different names or in different locations. if test -f "$ac_given_srcdir/$ac_dir/POTFILES.in"; then rm -f "$ac_dir/POTFILES" test -n "$as_me" && echo "$as_me: creating $ac_dir/POTFILES" || echo "creating $ac_dir/POTFILES" gt_tab=`printf '\t'` cat "$ac_given_srcdir/$ac_dir/POTFILES.in" | sed -e "/^#/d" -e "/^[ ${gt_tab}]*\$/d" -e "s,.*, $top_srcdir/& \\\\," | sed -e "\$s/\(.*\) \\\\/\1/" > "$ac_dir/POTFILES" POMAKEFILEDEPS="POTFILES.in" # ALL_LINGUAS, POFILES, UPDATEPOFILES, DUMMYPOFILES, GMOFILES depend # on $ac_dir but don't depend on user-specified configuration # parameters. if test -f "$ac_given_srcdir/$ac_dir/LINGUAS"; then # The LINGUAS file contains the set of available languages. if test -n "$OBSOLETE_ALL_LINGUAS"; then test -n "$as_me" && echo "$as_me: setting ALL_LINGUAS in configure.in is obsolete" || echo "setting ALL_LINGUAS in configure.in is obsolete" fi ALL_LINGUAS_=`sed -e "/^#/d" -e "s/#.*//" "$ac_given_srcdir/$ac_dir/LINGUAS"` # Hide the ALL_LINGUAS assignment from automake < 1.5. eval 'ALL_LINGUAS''=$ALL_LINGUAS_' POMAKEFILEDEPS="$POMAKEFILEDEPS LINGUAS" else # The set of available languages was given in configure.in. # Hide the ALL_LINGUAS assignment from automake < 1.5. eval 'ALL_LINGUAS''=$OBSOLETE_ALL_LINGUAS' fi # Compute POFILES # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(lang).po) # Compute UPDATEPOFILES # as $(foreach lang, $(ALL_LINGUAS), $(lang).po-update) # Compute DUMMYPOFILES # as $(foreach lang, $(ALL_LINGUAS), $(lang).nop) # Compute GMOFILES # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(lang).gmo) case "$ac_given_srcdir" in .) srcdirpre= ;; *) srcdirpre='$(srcdir)/' ;; esac POFILES= UPDATEPOFILES= DUMMYPOFILES= GMOFILES= for lang in $ALL_LINGUAS; do POFILES="$POFILES $srcdirpre$lang.po" UPDATEPOFILES="$UPDATEPOFILES $lang.po-update" DUMMYPOFILES="$DUMMYPOFILES $lang.nop" GMOFILES="$GMOFILES $srcdirpre$lang.gmo" done # CATALOGS depends on both $ac_dir and the user's LINGUAS # environment variable. INST_LINGUAS= if test -n "$ALL_LINGUAS"; then for presentlang in $ALL_LINGUAS; do useit=no if test "%UNSET%" != "$LINGUAS"; then desiredlanguages="$LINGUAS" else desiredlanguages="$ALL_LINGUAS" fi for desiredlang in $desiredlanguages; do # Use the presentlang catalog if desiredlang is # a. equal to presentlang, or # b. a variant of presentlang (because in this case, # presentlang can be used as a fallback for messages # which are not translated in the desiredlang catalog). case "$desiredlang" in "$presentlang"*) useit=yes;; esac done if test $useit = yes; then INST_LINGUAS="$INST_LINGUAS $presentlang" fi done fi CATALOGS= if test -n "$INST_LINGUAS"; then for lang in $INST_LINGUAS; do CATALOGS="$CATALOGS $lang.gmo" done fi test -n "$as_me" && echo "$as_me: creating $ac_dir/Makefile" || echo "creating $ac_dir/Makefile" sed -e "/^POTFILES =/r $ac_dir/POTFILES" -e "/^# Makevars/r $ac_given_srcdir/$ac_dir/Makevars" -e "s|@POFILES@|$POFILES|g" -e "s|@UPDATEPOFILES@|$UPDATEPOFILES|g" -e "s|@DUMMYPOFILES@|$DUMMYPOFILES|g" -e "s|@GMOFILES@|$GMOFILES|g" -e "s|@CATALOGS@|$CATALOGS|g" -e "s|@POMAKEFILEDEPS@|$POMAKEFILEDEPS|g" "$ac_dir/Makefile.in" > "$ac_dir/Makefile" for f in "$ac_given_srcdir/$ac_dir"/Rules-*; do if test -f "$f"; then case "$f" in *.orig | *.bak | *~) ;; *) cat "$f" >> "$ac_dir/Makefile" ;; esac fi done fi ;; esac done]], [# Capture the value of obsolete ALL_LINGUAS because we need it to compute # POFILES, UPDATEPOFILES, DUMMYPOFILES, GMOFILES, CATALOGS. But hide it # from automake < 1.5. eval 'OBSOLETE_ALL_LINGUAS''="$ALL_LINGUAS"' # Capture the value of LINGUAS because we need it to compute CATALOGS. LINGUAS="${LINGUAS-%UNSET%}" ]) ]) dnl Postprocesses a Makefile in a directory containing PO files. AC_DEFUN([AM_POSTPROCESS_PO_MAKEFILE], [ # When this code is run, in config.status, two variables have already been # set: # - OBSOLETE_ALL_LINGUAS is the value of LINGUAS set in configure.in, # - LINGUAS is the value of the environment variable LINGUAS at configure # time. changequote(,)dnl # Adjust a relative srcdir. ac_dir=`echo "$ac_file"|sed 's%/[^/][^/]*$%%'` ac_dir_suffix=/`echo "$ac_dir"|sed 's%^\./%%'` ac_dots=`echo "$ac_dir_suffix"|sed 's%/[^/]*%../%g'` # In autoconf-2.13 it is called $ac_given_srcdir. # In autoconf-2.50 it is called $srcdir. test -n "$ac_given_srcdir" || ac_given_srcdir="$srcdir" case "$ac_given_srcdir" in .) top_srcdir=`echo $ac_dots|sed 's%/$%%'` ;; /*) top_srcdir="$ac_given_srcdir" ;; *) top_srcdir="$ac_dots$ac_given_srcdir" ;; esac # Find a way to echo strings without interpreting backslash. if test "X`(echo '\t') 2>/dev/null`" = 'X\t'; then gt_echo='echo' else if test "X`(printf '%s\n' '\t') 2>/dev/null`" = 'X\t'; then gt_echo='printf %s\n' else echo_func () { cat < "$ac_file.tmp" tab=`printf '\t'` if grep -l '@TCLCATALOGS@' "$ac_file" > /dev/null; then # Add dependencies that cannot be formulated as a simple suffix rule. for lang in $ALL_LINGUAS; do frobbedlang=`echo $lang | sed -e 's/\..*$//' -e 'y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/'` cat >> "$ac_file.tmp" < /dev/null; then # Add dependencies that cannot be formulated as a simple suffix rule. for lang in $ALL_LINGUAS; do frobbedlang=`echo $lang | sed -e 's/_/-/g' -e 's/^sr-CS/sr-SP/' -e 's/@latin$/-Latn/' -e 's/@cyrillic$/-Cyrl/' -e 's/^sr-SP$/sr-SP-Latn/' -e 's/^uz-UZ$/uz-UZ-Latn/'` cat >> "$ac_file.tmp" <> "$ac_file.tmp" <, 1996. AC_PREREQ([2.50]) # Search path for a program which passes the given test. dnl AM_PATH_PROG_WITH_TEST(VARIABLE, PROG-TO-CHECK-FOR, dnl TEST-PERFORMED-ON-FOUND_PROGRAM [, VALUE-IF-NOT-FOUND [, PATH]]) AC_DEFUN([AM_PATH_PROG_WITH_TEST], [ # Prepare PATH_SEPARATOR. # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then # Determine PATH_SEPARATOR by trying to find /bin/sh in a PATH which # contains only /bin. Note that ksh looks also at the FPATH variable, # so we have to set that as well for the test. PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 \ && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 \ || PATH_SEPARATOR=';' } fi # Find out how to test for executable files. Don't use a zero-byte file, # as systems may use methods other than mode bits to determine executability. cat >conf$$.file <<_ASEOF #! /bin/sh exit 0 _ASEOF chmod +x conf$$.file if test -x conf$$.file >/dev/null 2>&1; then ac_executable_p="test -x" else ac_executable_p="test -f" fi rm -f conf$$.file # Extract the first word of "$2", so it can be a program name with args. set dummy $2; ac_word=[$]2 AC_MSG_CHECKING([for $ac_word]) AC_CACHE_VAL([ac_cv_path_$1], [case "[$]$1" in [[\\/]]* | ?:[[\\/]]*) ac_cv_path_$1="[$]$1" # Let the user override the test with a path. ;; *) ac_save_IFS="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in ifelse([$5], , $PATH, [$5]); do IFS="$ac_save_IFS" test -z "$ac_dir" && ac_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $ac_executable_p "$ac_dir/$ac_word$ac_exec_ext"; then echo "$as_me: trying $ac_dir/$ac_word..." >&AS_MESSAGE_LOG_FD if [$3]; then ac_cv_path_$1="$ac_dir/$ac_word$ac_exec_ext" break 2 fi fi done done IFS="$ac_save_IFS" dnl If no 4th arg is given, leave the cache variable unset, dnl so AC_PATH_PROGS will keep looking. ifelse([$4], , , [ test -z "[$]ac_cv_path_$1" && ac_cv_path_$1="$4" ])dnl ;; esac])dnl $1="$ac_cv_path_$1" if test ifelse([$4], , [-n "[$]$1"], ["[$]$1" != "$4"]); then AC_MSG_RESULT([$][$1]) else AC_MSG_RESULT([no]) fi AC_SUBST([$1])dnl ]) OpenSC-pam_pkcs11-87edc72/bootstrap000077500000000000000000000004271475044520600172000ustar00rootroot00000000000000#!/bin/sh set -e set -x if test -f Makefile; then make distclean fi rm -rf *.cache *.m4 config.guess config.log \ config.status config.sub depcomp ltmain.sh #gettextize --force aclocal -I aclocal libtoolize --force --copy autoheader automake --add-missing --foreign autoconf OpenSC-pam_pkcs11-87edc72/codespell_ignore_words.txt000066400000000000000000000000301475044520600225170ustar00rootroot00000000000000ba gord parm parms pres OpenSC-pam_pkcs11-87edc72/config.rpath000077500000000000000000000442161475044520600175510ustar00rootroot00000000000000#! /bin/sh # Output a system dependent set of variables, describing how to set the # run time search path of shared libraries in an executable. # # Copyright 1996-2014 Free Software Foundation, Inc. # Taken from GNU libtool, 2001 # Originally by Gordon Matzigkeit , 1996 # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. # # The first argument passed to this file is the canonical host specification, # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM # or # CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM # The environment variables CC, GCC, LDFLAGS, LD, with_gnu_ld # should be set by the caller. # # The set of defined variables is at the end of this script. # Known limitations: # - On IRIX 6.5 with CC="cc", the run time search patch must not be longer # than 256 bytes, otherwise the compiler driver will dump core. The only # known workaround is to choose shorter directory names for the build # directory and/or the installation directory. # All known linkers require a '.a' archive for static linking (except MSVC, # which needs '.lib'). libext=a shrext=.so host="$1" host_cpu=`echo "$host" | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\1/'` host_vendor=`echo "$host" | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\2/'` host_os=`echo "$host" | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\3/'` # Code taken from libtool.m4's _LT_CC_BASENAME. for cc_temp in $CC""; do case $cc_temp in compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; \-*) ;; *) break;; esac done cc_basename=`echo "$cc_temp" | sed -e 's%^.*/%%'` # Code taken from libtool.m4's _LT_COMPILER_PIC. wl= if test "$GCC" = yes; then wl='-Wl,' else case "$host_os" in aix*) wl='-Wl,' ;; mingw* | cygwin* | pw32* | os2* | cegcc*) ;; hpux9* | hpux10* | hpux11*) wl='-Wl,' ;; irix5* | irix6* | nonstopux*) wl='-Wl,' ;; linux* | k*bsd*-gnu | kopensolaris*-gnu) case $cc_basename in ecc*) wl='-Wl,' ;; icc* | ifort*) wl='-Wl,' ;; lf95*) wl='-Wl,' ;; nagfor*) wl='-Wl,-Wl,,' ;; pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) wl='-Wl,' ;; ccc*) wl='-Wl,' ;; xl* | bgxl* | bgf* | mpixl*) wl='-Wl,' ;; como) wl='-lopt=' ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ F* | *Sun*Fortran*) wl= ;; *Sun\ C*) wl='-Wl,' ;; esac ;; esac ;; newsos6) ;; *nto* | *qnx*) ;; osf3* | osf4* | osf5*) wl='-Wl,' ;; rdos*) ;; solaris*) case $cc_basename in f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) wl='-Qoption ld ' ;; *) wl='-Wl,' ;; esac ;; sunos4*) wl='-Qoption ld ' ;; sysv4 | sysv4.2uw2* | sysv4.3*) wl='-Wl,' ;; sysv4*MP*) ;; sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) wl='-Wl,' ;; unicos*) wl='-Wl,' ;; uts4*) ;; esac fi # Code taken from libtool.m4's _LT_LINKER_SHLIBS. hardcode_libdir_flag_spec= hardcode_libdir_separator= hardcode_direct=no hardcode_minus_L=no case "$host_os" in cygwin* | mingw* | pw32* | cegcc*) # FIXME: the MSVC++ port hasn't been tested in a loooong time # When not using gcc, we currently assume that we are using # Microsoft Visual C++. if test "$GCC" != yes; then with_gnu_ld=no fi ;; interix*) # we just hope/assume this is gcc and not c89 (= MSVC++) with_gnu_ld=yes ;; openbsd*) with_gnu_ld=no ;; esac ld_shlibs=yes if test "$with_gnu_ld" = yes; then # Set some defaults for GNU ld with shared library support. These # are reset later if shared libraries are not supported. Putting them # here allows them to be overridden if necessary. # Unlike libtool, we use -rpath here, not --rpath, since the documented # option of GNU ld is called -rpath, not --rpath. hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' case "$host_os" in aix[3-9]*) # On AIX/PPC, the GNU linker is very broken if test "$host_cpu" != ia64; then ld_shlibs=no fi ;; amigaos*) case "$host_cpu" in powerpc) ;; m68k) hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes ;; esac ;; beos*) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then : else ld_shlibs=no fi ;; cygwin* | mingw* | pw32* | cegcc*) # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. hardcode_libdir_flag_spec='-L$libdir' if $LD --help 2>&1 | grep 'auto-import' > /dev/null; then : else ld_shlibs=no fi ;; haiku*) ;; interix[3-9]*) hardcode_direct=no hardcode_libdir_flag_spec='${wl}-rpath,$libdir' ;; gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then : else ld_shlibs=no fi ;; netbsd*) ;; solaris*) if $LD -v 2>&1 | grep 'BFD 2\.8' > /dev/null; then ld_shlibs=no elif $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then : else ld_shlibs=no fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) case `$LD -v 2>&1` in *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*) ld_shlibs=no ;; *) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then hardcode_libdir_flag_spec='`test -z "$SCOABSPATH" && echo ${wl}-rpath,$libdir`' else ld_shlibs=no fi ;; esac ;; sunos4*) hardcode_direct=yes ;; *) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then : else ld_shlibs=no fi ;; esac if test "$ld_shlibs" = no; then hardcode_libdir_flag_spec= fi else case "$host_os" in aix3*) # Note: this linker hardcodes the directories in LIBPATH if there # are no directories specified by -L. hardcode_minus_L=yes if test "$GCC" = yes; then # Neither direct hardcoding nor static linking is supported with a # broken collect2. hardcode_direct=unsupported fi ;; aix[4-9]*) if test "$host_cpu" = ia64; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no else aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # need to do runtime linking. case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) for ld_flag in $LDFLAGS; do if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then aix_use_runtimelinking=yes break fi done ;; esac fi hardcode_direct=yes hardcode_libdir_separator=':' if test "$GCC" = yes; then case $host_os in aix4.[012]|aix4.[012].*) collect2name=`${CC} -print-prog-name=collect2` if test -f "$collect2name" && \ strings "$collect2name" | grep resolve_lib_name >/dev/null then # We have reworked collect2 : else # We have old collect2 hardcode_direct=unsupported hardcode_minus_L=yes hardcode_libdir_flag_spec='-L$libdir' hardcode_libdir_separator= fi ;; esac fi # Begin _LT_AC_SYS_LIBPATH_AIX. echo 'int main () { return 0; }' > conftest.c ${CC} ${LDFLAGS} conftest.c -o conftest aix_libpath=`dump -H conftest 2>/dev/null | sed -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'` if test -z "$aix_libpath"; then aix_libpath=`dump -HX64 conftest 2>/dev/null | sed -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'` fi if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib" fi rm -f conftest.c conftest # End _LT_AC_SYS_LIBPATH_AIX. if test "$aix_use_runtimelinking" = yes; then hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" else if test "$host_cpu" = ia64; then hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib' else hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" fi fi ;; amigaos*) case "$host_cpu" in powerpc) ;; m68k) hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes ;; esac ;; bsdi[45]*) ;; cygwin* | mingw* | pw32* | cegcc*) # When not using gcc, we currently assume that we are using # Microsoft Visual C++. # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. hardcode_libdir_flag_spec=' ' libext=lib ;; darwin* | rhapsody*) hardcode_direct=no if { case $cc_basename in ifort*) true;; *) test "$GCC" = yes;; esac; }; then : else ld_shlibs=no fi ;; dgux*) hardcode_libdir_flag_spec='-L$libdir' ;; freebsd2.[01]*) hardcode_direct=yes hardcode_minus_L=yes ;; freebsd* | dragonfly*) hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes ;; hpux9*) hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' hardcode_libdir_separator=: hardcode_direct=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes ;; hpux10*) if test "$with_gnu_ld" = no; then hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' hardcode_libdir_separator=: hardcode_direct=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes fi ;; hpux11*) if test "$with_gnu_ld" = no; then hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' hardcode_libdir_separator=: case $host_cpu in hppa*64*|ia64*) hardcode_direct=no ;; *) hardcode_direct=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes ;; esac fi ;; irix5* | irix6* | nonstopux*) hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator=: ;; netbsd*) hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes ;; newsos6) hardcode_direct=yes hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator=: ;; *nto* | *qnx*) ;; openbsd*) if test -f /usr/libexec/ld.so; then hardcode_direct=yes if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then hardcode_libdir_flag_spec='${wl}-rpath,$libdir' else case "$host_os" in openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) hardcode_libdir_flag_spec='-R$libdir' ;; *) hardcode_libdir_flag_spec='${wl}-rpath,$libdir' ;; esac fi else ld_shlibs=no fi ;; os2*) hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes ;; osf3*) hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator=: ;; osf4* | osf5*) if test "$GCC" = yes; then hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' else # Both cc and cxx compiler support -rpath directly hardcode_libdir_flag_spec='-rpath $libdir' fi hardcode_libdir_separator=: ;; solaris*) hardcode_libdir_flag_spec='-R$libdir' ;; sunos4*) hardcode_libdir_flag_spec='-L$libdir' hardcode_direct=yes hardcode_minus_L=yes ;; sysv4) case $host_vendor in sni) hardcode_direct=yes # is this really true??? ;; siemens) hardcode_direct=no ;; motorola) hardcode_direct=no #Motorola manual says yes, but my tests say they lie ;; esac ;; sysv4.3*) ;; sysv4*MP*) if test -d /usr/nec; then ld_shlibs=yes fi ;; sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) ;; sysv5* | sco3.2v5* | sco5v6*) hardcode_libdir_flag_spec='`test -z "$SCOABSPATH" && echo ${wl}-R,$libdir`' hardcode_libdir_separator=':' ;; uts4*) hardcode_libdir_flag_spec='-L$libdir' ;; *) ld_shlibs=no ;; esac fi # Check dynamic linker characteristics # Code taken from libtool.m4's _LT_SYS_DYNAMIC_LINKER. # Unlike libtool.m4, here we don't care about _all_ names of the library, but # only about the one the linker finds when passed -lNAME. This is the last # element of library_names_spec in libtool.m4, or possibly two of them if the # linker has special search rules. library_names_spec= # the last element of library_names_spec in libtool.m4 libname_spec='lib$name' case "$host_os" in aix3*) library_names_spec='$libname.a' ;; aix[4-9]*) library_names_spec='$libname$shrext' ;; amigaos*) case "$host_cpu" in powerpc*) library_names_spec='$libname$shrext' ;; m68k) library_names_spec='$libname.a' ;; esac ;; beos*) library_names_spec='$libname$shrext' ;; bsdi[45]*) library_names_spec='$libname$shrext' ;; cygwin* | mingw* | pw32* | cegcc*) shrext=.dll library_names_spec='$libname.dll.a $libname.lib' ;; darwin* | rhapsody*) shrext=.dylib library_names_spec='$libname$shrext' ;; dgux*) library_names_spec='$libname$shrext' ;; freebsd[23].*) library_names_spec='$libname$shrext$versuffix' ;; freebsd* | dragonfly*) library_names_spec='$libname$shrext' ;; gnu*) library_names_spec='$libname$shrext' ;; haiku*) library_names_spec='$libname$shrext' ;; hpux9* | hpux10* | hpux11*) case $host_cpu in ia64*) shrext=.so ;; hppa*64*) shrext=.sl ;; *) shrext=.sl ;; esac library_names_spec='$libname$shrext' ;; interix[3-9]*) library_names_spec='$libname$shrext' ;; irix5* | irix6* | nonstopux*) library_names_spec='$libname$shrext' case "$host_os" in irix5* | nonstopux*) libsuff= shlibsuff= ;; *) case $LD in *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= ;; *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 ;; *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 ;; *) libsuff= shlibsuff= ;; esac ;; esac ;; linux*oldld* | linux*aout* | linux*coff*) ;; linux* | k*bsd*-gnu | kopensolaris*-gnu) library_names_spec='$libname$shrext' ;; knetbsd*-gnu) library_names_spec='$libname$shrext' ;; netbsd*) library_names_spec='$libname$shrext' ;; newsos6) library_names_spec='$libname$shrext' ;; *nto* | *qnx*) library_names_spec='$libname$shrext' ;; openbsd*) library_names_spec='$libname$shrext$versuffix' ;; os2*) libname_spec='$name' shrext=.dll library_names_spec='$libname.a' ;; osf3* | osf4* | osf5*) library_names_spec='$libname$shrext' ;; rdos*) ;; solaris*) library_names_spec='$libname$shrext' ;; sunos4*) library_names_spec='$libname$shrext$versuffix' ;; sysv4 | sysv4.3*) library_names_spec='$libname$shrext' ;; sysv4*MP*) library_names_spec='$libname$shrext' ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) library_names_spec='$libname$shrext' ;; tpf*) library_names_spec='$libname$shrext' ;; uts4*) library_names_spec='$libname$shrext' ;; esac sed_quote_subst='s/\(["`$\\]\)/\\\1/g' escaped_wl=`echo "X$wl" | sed -e 's/^X//' -e "$sed_quote_subst"` shlibext=`echo "$shrext" | sed -e 's,^\.,,'` escaped_libname_spec=`echo "X$libname_spec" | sed -e 's/^X//' -e "$sed_quote_subst"` escaped_library_names_spec=`echo "X$library_names_spec" | sed -e 's/^X//' -e "$sed_quote_subst"` escaped_hardcode_libdir_flag_spec=`echo "X$hardcode_libdir_flag_spec" | sed -e 's/^X//' -e "$sed_quote_subst"` LC_ALL=C sed -e 's/^\([a-zA-Z0-9_]*\)=/acl_cv_\1=/' < ]) LIBS="$LDLIBS $PCSC_LIBS" AC_CHECK_FUNC(SCardEstablishContext, , [AC_MSG_ERROR([SCardEstablishContext() not found, install pcsc-lite or later,or use PCSC_LIBS=... ./configure])]) LIBS="$OLD_LIBS" CFLAGS="$OLD_CFLAGS" else AC_MSG_WARN([pcsc-lite disabled]) fi AC_SUBST(PCSC_CFLAGS) AC_SUBST(PCSC_LIBS) AM_CONDITIONAL(HAVE_PCSC, test "x$with_pcsclite" = "xyes") # Check for SGML processor AC_ARG_WITH(docbook, AS_HELP_STRING([--without-docbook],[do not generate html manual (needs docbook)]), with_docbook=$withval) if test "$with_docbook" = "no" -o "$with_docbook" = "false" then with_docbook=no else AC_CHECK_PROG(XSLTPROC, xsltproc, xsltproc) if test "x$XSLTPROC" = "x"; then if test "x$with_docbook" = "xyes"; then AC_MSG_ERROR([Docbook support requested, but cannot find xsltproc]) fi with_docbook=no else with_docbook=yes fi fi AM_CONDITIONAL(HAVE_DOCBOOK, test "x$with_docbook" != "xno") # Checks for libraries. AC_CHECK_LIB(pam, pam_get_item, , AC_MSG_ERROR(could not locate pam libraries)) # Checks for header files. AC_HEADER_STDC AC_CHECK_HEADERS([string.h syslog.h fcntl.h unistd.h error.h]) if test "x$with_ldap" = "xyes"; then AC_CHECK_HEADERS([ldap.h]) fi if test "x$with_curl" = "xyes"; then AC_CHECK_HEADERS([curl/curl.h]) fi if test "x$with_libp11" = "xyes"; then AC_CHECK_HEADERS([libp11.h]) fi AC_CHECK_HEADERS([security/pam_ext.h]) # Checks for typedefs, structures, and compiler characteristics. AC_C_CONST AC_TYPE_SIZE_T # Checks for library functions. AC_FUNC_MALLOC AC_FUNC_REALLOC AC_FUNC_STAT AC_FUNC_VPRINTF AC_CHECK_FUNCS([memset strdup strerror strndup daemon]) AC_CONFIG_FILES([ Makefile po/Makefile.in doc/Makefile doc/pam_pkcs11.8 doc/doxygen.conf etc/Makefile etc/pam.d_login.example etc/pam_pkcs11.conf.example src/Makefile src/scconf/Makefile src/common/Makefile src/common/rsaref/Makefile src/tools/Makefile src/mappers/Makefile src/pam_pkcs11/Makefile tools/Makefile ]) AC_OUTPUT A=`eval echo ${prefix}` ; A=`eval echo ${A}` B=`eval echo ${bindir}` ; B=`eval echo ${B}` C=`eval echo ${sysconfdir}` ; C=`eval echo ${C}` D=`eval echo ${libdir}` ; D=`eval echo ${D}` echo "" echo "PAM-PKCS#11 has been configured with the following options" echo "" echo "Version: ${PACKAGE_VERSION}" echo "User binaries: ${B}" echo "Configuration files: ${C}" echo "libdir: ${D}" echo "" echo "Host: ${host}" echo "Compiler: ${CC}" echo "Compiler flags: ${CFLAGS}" echo "Preprocessor flags: ${CPPFLAGS}" echo "Linker flags: ${LDFLAGS}" echo "Libraries: ${LIBS}" echo "" echo "Debugging: ${with_debug}" echo "DocBook support: ${with_docbook}" echo "PC/SC support: ${with_pcsclite}" echo "CURL support: ${with_curl}" echo "LDAP support: ${with_ldap}" echo "NSS support: ${with_nss}" echo "OPENSSL support: ${with_ssl}" echo "confdir: ${confdir}" #echo "LIBP11 support: ${with_libp11}" echo "" OpenSC-pam_pkcs11-87edc72/doc/000077500000000000000000000000001475044520600157775ustar00rootroot00000000000000OpenSC-pam_pkcs11-87edc72/doc/Makefile.am000066400000000000000000000021331475044520600200320ustar00rootroot00000000000000# Process this file with automake to create Makefile.in MAINTAINERCLEANFILES = Makefile.in $(HTMLFILES) api/* DISTCLEANFILES = doxygen.conf XSLTPROC = @XSLTPROC@ HTMLFILES = pam_pkcs11.html mappers_api.html XMLFILES = pam_pkcs11.xml mappers_api.xml \ pam_pkcs11.xsl export-wiki.xsl \ pam_pkcs11.css MANSRC = \ pam_pkcs11.8 card_eventmgr.1 pklogin_finder.1 \ pkcs11_eventmgr.1 pkcs11_inspect.1 \ pkcs11_setup.1 pkcs11_listcerts.1 pkcs11_make_hash_link.1 man_MANS = $(MANSRC) noinst_DATA = $(HTMLFILES) doxygen.conf EXTRA_DIST = $(MANSRC) $(XMLFILES) $(HTMLFILES) doxygen.conf.in \ README.mappers README.autologin README.eventmgr \ README.ldap_mapper export-wiki.sh generate-api.sh \ api/index.html $(shell ls api/*) STYLESHEET = $(srcdir)/pam_pkcs11.xsl %.html: %.xml $(STYLESHEET) if HAVE_DOCBOOK $(XSLTPROC) \ --stringparam section.autolabel 1 \ --stringparam section.label.includes.component.label 1 \ -o $@ $(STYLESHEET) $< #tidy -im -utf8 -xml $@ || true else @echo "Docbook support disabled, not building $@" >&2 endif api/index.html: doxygen.conf sh $(srcdir)/generate-api.sh $(srcdir) OpenSC-pam_pkcs11-87edc72/doc/README.autologin000066400000000000000000000050241475044520600206600ustar00rootroot00000000000000EXTRACTING LOGIN FROM CERTIFICATE HOWTO --------------------------------------- Starting at pam_pkcs11-0.4.2 a new feature is provided: pam-pkcs11 can deduce user name from certificate, without login prompt. This is done when pam_get_user() call returns null or empty string. In this case, pam-pcks11 use the module mapper "find" feature instead of normal "match". If the finder list returns ok, evaluated user is set to pam via pam_set_item(PAM_USER) call, and PAM_AUTH_OK is returned. So there are no longer need to enter user name if a certificate is provided and can be mapped to an user. There are to ways to use this feature: a) Patch "gdm" and "login" programs to detect card presence and return null as user name, without prompt for an user login. This is a work to be done :-( b) Use unpatched versions, and do the following procedures: b.1) When login from console, just enter " " (space) + Enter. b.2) When login from gdm, just key Enter at login prompt. In both cases the procedure continues as: - If a card is not present, login will ask for password, and gdm will prompt again for user login - If a card is present, pam-pkcs11 will ask for the PIN, and then invoke finder in module mapper list. When a user is found, this user becomes the logged user This feature can be used with pam-mkhomedir.so PAM Session module. In this case, you can create on-the-fly accounts. This scenario is ideal for centralized auth services (Winbind, ldap, kerberos, RDBMS auth...) As example, here comes my tested /etc/pam.d/gdm file: #%PAM-1.0 auth sufficient pam_pkcs11.so debug config_file=/etc/pam_pkcs11/pam_pkcs11.conf auth required pam_env.so auth required pam_stack.so service=system-auth auth required pam_nologin.so account required pam_stack.so service=system-auth password required pam_stack.so service=system-auth session required pam_stack.so service=system-auth session optional pam_mkhomedir.so skel=/etc/skel umask=0022 session optional pam_console.so IMPORTANT NOTES: For pam_set_item(PAM_USER) success, application using pam must have enough permissions. If this condition is not met, setting user process will fail and proper log message registered. So this feature is mainly provided for logging processes running as root. Improper mapper chain configurations with unauthorized certificates can lead in the creation of fake accounts in the system if pam_mkhomedir.so module is used. So be really careful when authenticating users directly from certificates. Enjoy! OpenSC-pam_pkcs11-87edc72/doc/README.eventmgr000066400000000000000000000056771475044520600205240ustar00rootroot00000000000000Using the Card Event Manager ---------------------------- PAM-PKCS11 includes a tool "card_eventmgr" that can be used to monitor the status of the card reader and dispatch actions on several events. This program can be used for several actions, like screen lock on card removal. Note that this program has no interaction with pam-pkcs11: is just a card status monitor. It's up to the sysadmin to define and configure actions to take on events. To invoke the program, just type "card_eventmgr". Several command lines are recognized: - debug - to enable debugging. Defaults to unset - daemon - to run as daemon. If debug is unset, also detach from tty. Default to unset - timeout= - time in msec between two consecutive status poll. Defaults to 1000 (1 second) - config_file= - configuration file to use. Defaults to /etc/pam_pkcs11/card_eventmgr.conf Structure of configuration file is described below: card_eventmgr { # Run in background. daemon = false; # show debug messages debug = false; # polling time in mili-seconds timeout = 1000; # # list of events and actions # Card inserted event card_insert { # what to do if an action fail? # ignore : continue to next action # return : end action sequence # quit : end program on_error = ignore ; # You can enter several, comma-separated action entries # they will be executed in turn action = "/usr/bin/play /usr/share/sounds/warning.wav", "/usr/X11R6/bin/xscreensaver-command -deactivate"; } # Card has been removed event card_remove { on_error = ignore; action = "/usr/bin/play /usr/share/sounds/error.wav", "/usr/X11R6/bin/xscreensaver-command -lock"; } # Too much time locked session event timeout { } } As you can see, on each event you can define a list of actions, and what to do if an action fails. SECURITY ISSUES: The best way to start card monitoring is at user login into the system. If so, note that all event commands will be executed with user privileges. So is up to the user to take care that he has the rights to execute the desired actions. EXAMPLE: use xscreensaver to lock the screen at card removal you can use the provided configuration sample file. Just add to your .xsession or KDE/GNOME Autostart directory an invocation to card_eventmgr in daemon mode. Additionally you can add this entry to /etc/pam.d/xscreensaver configuration: #%PAM-1.0 # Red Hat says this is right for them, as of 7.3: auth sufficient pam_pkcs11.so debug config_file=/etc/pam_pkcs11/pam_pkcs11.conf auth required pam_stack.so service=system-auth # This is what we were using before: # auth required pam_pwdb.so shadow nullok At pam-pkcs11-0.4.3 handling of timeout event is not managed yet In this case, when the card is removed the X screen will be locked. When the card is re-inserted, screen will prompt for the card PIN, check it and if access is granted the screen will unlock. OpenSC-pam_pkcs11-87edc72/doc/README.ldap_mapper000066400000000000000000000066221475044520600211500ustar00rootroot00000000000000Sample config ============= pam_pkcs11.conf: ---------------- (...) # Directory ( ldap style ) mapper mapper ldap { debug = false; module = /usr/lib/pam_pkcs11/ldap_mapper.so; # where base directory resides basedir = /etc/pam_pkcs11/mapdir; # hostname of ldap server ldaphost = "localhost"; # Port on ldap server to connect ldapport = 389; # Scope of search: 0 = x, 1 = y, 2 = z scope = 2; # DN to bind with. Must have read-access for user entries under "base" binddn = "cn=pam,o=example,c=com"; # Password for above DN passwd = "test"; # Searchbase for user entries base = "ou=People,o=example,c=com"; # Attribute of user entry which contains the certificate attribute = "userCertificate"; # Searchfilter for user entry. Must only let pass user entry for the login user. filter = "(&(objectClass=posixAccount)(uid=%s))" # Attribute of user entry which contains the user's login name (optional) uid_attribute = "uid"; # List of sets of ldap attribute / cert attribute pairs (optional) attribute_map = "uid=uid&mail=email", "krbprincipalname=upn", "userCertificate;binary=cert"; } (...) Sample structure of the LDAP entries ==================================== /etc/openldap/slapd.conf: ------------------------- include /etc/openldap/schema/core.schema include /etc/openldap/schema/cosine.schema include /etc/openldap/schema/inetorgperson.schema include /etc/openldap/schema/nis.schema allow bind_v2 pidfile /var/run/slapd.pid argsfile /var/run/slapd.args access to dn.base="" by * read access to dn.base="ou=People" by dn=cn=pam,o=example,c=com read access to * by self write by users read by anonymous auth database bdb suffix "o=example,c=com" rootdn "cn=root,o=example,c=com" rootpw {SSHA}**** directory /var/lib/ldap index objectClass eq,pres index ou,cn,mail,surname,givenname eq,pres,sub index uidNumber,gidNumber,loginShell eq,pres index uid,memberUid eq,pres,sub index nisMapName,nisMapEntry eq,pres,sub initial.ldif: ------------- dn: o=example,c=com objectClass: dcObject objectClass: organization o: example dc: example dn: ou=People,o=example,c=com ou: People objectclass: organizationalUnit dn: ou=Groups,o=example,c=com ou: Groups objectclass: organizationalUnit pam.user.ldif: -------------- dn: uid=pam,o=example,c=com uid: pam givenName: Pamela sn: Anderson userPassword: pam loginShell: /bin/false uidNumber: 999999 gidNumber: 999999 homeDirectory: /tmp shadowMin: -1 shadowMax: 999999 shadowWarning: 7 shadowInactive: -1 shadowExpire: -1 shadowFlag: 0 objectClass: top objectClass: person objectClass: posixAccount objectClass: shadowAccount objectClass: inetOrgPerson cn: pam sample.user.ldif: ----------------- dn: uid=testuser,ou=People,o=example,c=com uid: testuser givenName: Test sn: User cn: Test User userPassword: abcde loginShell: /bin/bash uidNumber: 1000 gidNumber: 1000 homeDirectory: /home/testuser shadowMin: -1 shadowMax: 999999 shadowWarning: 7 shadowInactive: -1 shadowExpire: -1 shadowFlag: 0 objectClass: top objectClass: person objectClass: posixAccount objectClass: shadowAccount objectClass: inetOrgPerson userCertificate;binary:: MIIELD (...) Bg== OpenSC-pam_pkcs11-87edc72/doc/README.mappers000066400000000000000000000151031475044520600203250ustar00rootroot00000000000000What is a cert mapper? ---------------------- When an X509 Certificate is provided, there are no direct way to map a cert to a login. With a certificate we can check validity and revocation, but user mapping depends entirely on the certificate content. So we need a configurable, stackable, and definable way to specify cert-to-user mapping. pam-pkcs11 cert mappers provides several functions: 1- Deduce a login from certificate 2- Test if a login and a certificate match 3- Look into the certificate for an specific data Normal pam-pkcs11 login process involves the following procedures - Enter login - Ask for PIN - Open and validate certificate - Map certificate into an user (*) - Check if login and user matches (**) An alternate way of working is by mean of not providing user name: - Detect if a card is inserted - Ask for PIN - Open and validate certificate - Map certificate into an user (*) - open session for deduced login Last way needs an additional pam-mkhomedir.so PAM module, that can dynamically create an account. Operations (*) and (**) are the reason for cert-mappers to exist. Implementation of cert mappers in pam-pkcs11 -------------------------------------------- pam-pkcs11 implements cert mapper in form of several stackable modules. Most of them are statically linked; those that depends on external libraries are provided as dynamic loadable ones You can add as many modules as desired, and the system will try all of them in turn, until a match succeed, or end of list is reached. the mapper list is defined in the configuration file: pam-pkcs11 { .... mapper_list = mapper1 [ [[,] mapper2 ] ... ] ; mapper mapper1 { module = /path/to/module.so; [ additional mapper dependent options ] } } Unless you are going to use an internal (static) module with default values, you should provide a entry for every declared mapper "module" entry is mandatory: is tells pam_pkcs11 where to find the dynamic library (or equals to "internal" if static module is used). Additional entries can be defined but are module dependent. Provided Mappers ---------------- Actually pam_opensc provides the following mapper modules: cn - Assumes CN field on certificate to be the login name * When used as finder, the module returns the first CN field found or NULL * When used as matcher, it parses the certificate and compare all CN fields found against the provided login name, returning OK if a match is found file - Parse a file to get a list of CN -> login pairs * When used as finder, retrieve first CN field on certificate, and use it to read provided file to get a CN to login mapping * When used as matcher, maps every CN entry on the certificate to a login, and compare this login with provided by PAM pw - Compare CN against getpwent() "login" or "gecos" fields to match user login * When used as finder use getpwent() system call to retrieve every users on the system. if pw_name or pw_gecos fields match with CN pw_name is returned as login name * When used as matcher, maps CN to an user with via the finder and matches result with login name provided by PAM, returning the result (match or not) Note: newer implementations of getpwent() libraries, use an additional Name Service Switch (NSS) infrastructure, that allows admins to specify how to obtain the requested data. This means you can setup /etc/nsswitch.conf password entries to lookup in to /etc/passwd, or ldap/kerberos/NIS+/YP services ldap - Uses an ldap server to retrieve user name. An additional file tells module the mapping between Cert fields and LDAP entries This mapper is still under development. Provided one just search for certificates, incoming one will ask for "any" certificate content opensc - Search the certificate ${HOME}/.ssh/autorized_certificates in a similar way as OpenSC does. openssh - Search the certificate public key in ${HOME}/.ssh/autorized_keys in a similar way as OpenSSH does. mail - Try to extract an e-mail from the certificate. If found, analyze it against an "aliases" (email to login) list if "use_alias" is set an aliases file is provided, the module tries to map the email field from the certificate to a user (or alternate email). if use_alias is not set, just use email address from certificate to perform find/match. * When used as finder, just return email or mapped email/user (see above) * When used as matcher, compare found email/user against provided by pam. Additionally you can set "ignorecase" or "ignoredomain" flags domain check (if set) is done by testing if provided email domain part (@ie.this.domain) matches host domain. Eg "user@my.company.com" email in host "ahost.in.my.company.com" host matches domain (NOTE: at version 0.4.1 use_alias is still under development) ms - Try to find and use Microsoft Universal Principal Name extension as login name * When used as finder, returns UPN as login name * when used as matcher compares UPN against PAM provided login krb - Try to find and use Kerberos Principal Name as login name uid - Use Unique ID field (if found) as login name Similar to CN mapper, but using UID as field to find/match null - blind access/deny mapper. If "always_match" is set to true: * When used as finder always returns "nobody" * When used as matcher always returns OK If "always_match" is set to false: * When used as finder always returns NULL * When used as matcher always returns FAIL See the provided pam_pkcs11.conf.example file to see module flags Adding new mappers ------------------ Creating new mappers is easy: just read provided Mapper API file, edit skeleton sample files and follow instructions on how to compile and link Mapper.h provides default implementation for required some functions. They should be overridden by user code, but can be used for testing purposes Wish list --------- - Implement PKINIT draft protocol for talking to a kerberos server - Use MS Universal Principal Name to authenticate against an MS Active directory server - Implement mail_aliases parsing for mail mapper module Further information ------------------- Please, send mail with patches, comments and suggestions to Juan Antonio Martinez or even better, to OpenSC development mailing list opensc-devel@list.opensc-project.org OpenSC-pam_pkcs11-87edc72/doc/card_eventmgr.1000066400000000000000000000044171475044520600207070ustar00rootroot00000000000000.TH card_eventmgr 1 "Aug 2005" "Juan Antonio Martinez" PAM-PKCS#11 .SH NAME card_eventmgr \- PCSC\-Lite Event Manager .SH SYNTAX .B card_eventmgr .RB [ debug ] .RB [[ no ] daemon ] .RB [ timeout=\fI\fP ] .RB [ config_file=\fI\fP ] .RB [ kill ] .RB [ pidfile=\fI\fP ] .SH DESCRIPTION .B card_eventmgr is a smart card monitoring tool that listen to the status of the card reader and dispatch actions on several events. .B card_eventmgr can be used for several actions: like screen lock on card removal. .P Three events are supported: card insertion, card removal and timeout on removed card. Actions are specified in a configuration file. .SH OPTIONS .TP .B debug Enable debugging output. .TP .RB [ no ] daemon Runs in background if daemon or in foreground if nodaemon (default). If debug is unset, the program detaches itself from the tty. .TP .BI timeout= Set polling timeout in milliseconds. Defaults to 1000 (1 second). .TP .BI config_file= "" Sets de configuration file. Default value is .IR /etc/pam_pkcs11/card_eventmgr.conf . .TP .BI pidfile= Store the .B card_eventmgr process ID (pid) in the file .IR pidfile . .TP .B kill Read a process id from .I pidfile and kill that process. You must use the argument .BI pidfile= to use .BR kill . .SH FILES \fI/etc/pam_pkcs11/card_eventmgr.conf\fP .SH EXAMPLES To run this program the standard way type: .P card_eventmgr .P Alternatively you can specify options: .P card_eventmgr debug nodaemon timeout=500 config_file=$HOME/.card_eventmgr.conf .P If you want to start and stop card_eventmgr automatically in an X11 session you can create a \fI~/.xsession\fR file containing: # start the card autolock card_eventmgr pidfile=$HOME/.card_eventmgr.pid # start Gnome or something else /usr/bin/x-session-manager # kill the card autolock card_eventmgr kill pidfile=$HOME/.card_eventmgr.pid .SH BUGS Some applications like .B xscreensaver\-command may fail due to external events (eg: try to unlock an unlocked session). In this case, the command incorrectly returns error code. .SH AUTHORS Juan Antonio Martinez .br Ludovic Rousseau .SH "SEE ALSO" .BR pam_pkcs11 (8), .BR pkcs11_eventmgr (1), README.eventmgr, PAM\-PKCS11 User Manual OpenSC-pam_pkcs11-87edc72/doc/doxygen.conf.in000066400000000000000000003501101475044520600207300ustar00rootroot00000000000000# Doxyfile 1.9.4 # This file describes the settings to be used by the documentation system # doxygen (www.doxygen.org) for a project. # # All text after a double hash (##) is considered a comment and is placed in # front of the TAG it is preceding. # # All text after a single hash (#) is considered a comment and will be ignored. # The format is: # TAG = value [value, ...] # For lists, items can also be appended using: # TAG += value [value, ...] # Values that contain spaces should be placed between quotes (\" \"). # # Note: # # Use doxygen to compare the used configuration file with the template # configuration file: # doxygen -x [configFile] # Use doxygen to compare the used configuration file with the template # configuration file without replacing the environment variables: # doxygen -x_noenv [configFile] #--------------------------------------------------------------------------- # Project related configuration options #--------------------------------------------------------------------------- # This tag specifies the encoding used for all characters in the configuration # file that follow. The default is UTF-8 which is also the encoding used for all # text before the first occurrence of this tag. Doxygen uses libiconv (or the # iconv built into libc) for the transcoding. See # https://www.gnu.org/software/libiconv/ for the list of possible encodings. # The default value is: UTF-8. DOXYFILE_ENCODING = UTF-8 # The PROJECT_NAME tag is a single word (or a sequence of words surrounded by # double-quotes, unless you are using Doxywizard) that should identify the # project for which the documentation is generated. This name is used in the # title of most generated pages and in a few other places. # The default value is: My Project. PROJECT_NAME = pam_pkcs11 # The PROJECT_NUMBER tag can be used to enter a project or revision number. This # could be handy for archiving the generated documentation or if some version # control system is used. PROJECT_NUMBER = @PACKAGE_VERSION@ # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a # quick idea about the purpose of the project. Keep the description short. PROJECT_BRIEF = # With the PROJECT_LOGO tag one can specify a logo or an icon that is included # in the documentation. The maximum height of the logo should not exceed 55 # pixels and the maximum width should not exceed 200 pixels. Doxygen will copy # the logo to the output directory. PROJECT_LOGO = # The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path # into which the generated documentation will be written. If a relative path is # entered, it will be relative to the location where doxygen was started. If # left blank the current directory will be used. OUTPUT_DIRECTORY = doc # If the CREATE_SUBDIRS tag is set to YES then doxygen will create up to 4096 # sub-directories (in 2 levels) under the output directory of each output format # and will distribute the generated files over these directories. Enabling this # option can be useful when feeding doxygen a huge amount of source files, where # putting all generated files in the same directory would otherwise causes # performance problems for the file system. Adapt CREATE_SUBDIRS_LEVEL to # control the number of sub-directories. # The default value is: NO. CREATE_SUBDIRS = NO # Controls the number of sub-directories that will be created when # CREATE_SUBDIRS tag is set to YES. Level 0 represents 16 directories, and every # level increment doubles the number of directories, resulting in 4096 # directories at level 8 which is the default and also the maximum value. The # sub-directories are organized in 2 levels, the first level always has a fixed # number of 16 directories. # Minimum value: 0, maximum value: 8, default value: 8. # This tag requires that the tag CREATE_SUBDIRS is set to YES. CREATE_SUBDIRS_LEVEL = 8 # If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII # characters to appear in the names of generated files. If set to NO, non-ASCII # characters will be escaped, for example _xE3_x81_x84 will be used for Unicode # U+3044. # The default value is: NO. ALLOW_UNICODE_NAMES = NO # The OUTPUT_LANGUAGE tag is used to specify the language in which all # documentation generated by doxygen is written. Doxygen will use this # information to generate all constant output in the proper language. # Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Bulgarian, # Catalan, Chinese, Chinese-Traditional, Croatian, Czech, Danish, Dutch, English # (United States), Esperanto, Farsi (Persian), Finnish, French, German, Greek, # Hindi, Hungarian, Indonesian, Italian, Japanese, Japanese-en (Japanese with # English messages), Korean, Korean-en (Korean with English messages), Latvian, # Lithuanian, Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, # Romanian, Russian, Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, # Swedish, Turkish, Ukrainian and Vietnamese. # The default value is: English. OUTPUT_LANGUAGE = English # If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member # descriptions after the members that are listed in the file and class # documentation (similar to Javadoc). Set to NO to disable this. # The default value is: YES. BRIEF_MEMBER_DESC = YES # If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief # description of a member or function before the detailed description # # Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the # brief descriptions will be completely suppressed. # The default value is: YES. REPEAT_BRIEF = YES # This tag implements a quasi-intelligent brief description abbreviator that is # used to form the text in various listings. Each string in this list, if found # as the leading text of the brief description, will be stripped from the text # and the result, after processing the whole list, is used as the annotated # text. Otherwise, the brief description is used as-is. If left blank, the # following values are used ($name is automatically replaced with the name of # the entity):The $name class, The $name widget, The $name file, is, provides, # specifies, contains, represents, a, an and the. ABBREVIATE_BRIEF = # If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then # doxygen will generate a detailed section even if there is only a brief # description. # The default value is: NO. ALWAYS_DETAILED_SEC = NO # If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all # inherited members of a class in the documentation of that class as if those # members were ordinary class members. Constructors, destructors and assignment # operators of the base classes will not be shown. # The default value is: NO. INLINE_INHERITED_MEMB = NO # If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path # before files name in the file list and in the header files. If set to NO the # shortest path that makes the file name unique will be used # The default value is: YES. FULL_PATH_NAMES = NO # The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. # Stripping is only done if one of the specified strings matches the left-hand # part of the path. The tag can be used to show relative paths in the file list. # If left blank the directory from which doxygen is run is used as the path to # strip. # # Note that you can specify absolute paths here, but also relative paths, which # will be relative from the directory where doxygen is started. # This tag requires that the tag FULL_PATH_NAMES is set to YES. STRIP_FROM_PATH = # The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the # path mentioned in the documentation of a class, which tells the reader which # header file to include in order to use a class. If left blank only the name of # the header file containing the class definition is used. Otherwise one should # specify the list of include paths that are normally passed to the compiler # using the -I flag. STRIP_FROM_INC_PATH = # If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but # less readable) file names. This can be useful is your file systems doesn't # support long names like on DOS, Mac, or CD-ROM. # The default value is: NO. SHORT_NAMES = NO # If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the # first line (until the first dot) of a Javadoc-style comment as the brief # description. If set to NO, the Javadoc-style will behave just like regular Qt- # style comments (thus requiring an explicit @brief command for a brief # description.) # The default value is: NO. JAVADOC_AUTOBRIEF = YES # If the JAVADOC_BANNER tag is set to YES then doxygen will interpret a line # such as # /*************** # as being the beginning of a Javadoc-style comment "banner". If set to NO, the # Javadoc-style will behave just like regular comments and it will not be # interpreted by doxygen. # The default value is: NO. JAVADOC_BANNER = NO # If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first # line (until the first dot) of a Qt-style comment as the brief description. If # set to NO, the Qt-style will behave just like regular Qt-style comments (thus # requiring an explicit \brief command for a brief description.) # The default value is: NO. QT_AUTOBRIEF = NO # The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a # multi-line C++ special comment block (i.e. a block of //! or /// comments) as # a brief description. This used to be the default behavior. The new default is # to treat a multi-line C++ comment block as a detailed description. Set this # tag to YES if you prefer the old behavior instead. # # Note that setting this tag to YES also means that rational rose comments are # not recognized any more. # The default value is: NO. MULTILINE_CPP_IS_BRIEF = NO # By default Python docstrings are displayed as preformatted text and doxygen's # special commands cannot be used. By setting PYTHON_DOCSTRING to NO the # doxygen's special commands can be used and the contents of the docstring # documentation blocks is shown as doxygen documentation. # The default value is: YES. PYTHON_DOCSTRING = YES # If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the # documentation from any documented member that it re-implements. # The default value is: YES. INHERIT_DOCS = YES # If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new # page for each member. If set to NO, the documentation of a member will be part # of the file/class/namespace that contains it. # The default value is: NO. SEPARATE_MEMBER_PAGES = NO # The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen # uses this value to replace tabs by spaces in code fragments. # Minimum value: 1, maximum value: 16, default value: 4. TAB_SIZE = 8 # This tag can be used to specify a number of aliases that act as commands in # the documentation. An alias has the form: # name=value # For example adding # "sideeffect=@par Side Effects:^^" # will allow you to put the command \sideeffect (or @sideeffect) in the # documentation, which will result in a user-defined paragraph with heading # "Side Effects:". Note that you cannot put \n's in the value part of an alias # to insert newlines (in the resulting output). You can put ^^ in the value part # of an alias to insert a newline as if a physical newline was in the original # file. When you need a literal { or } or , in the value part of an alias you # have to escape them by means of a backslash (\), this can lead to conflicts # with the commands \{ and \} for these it is advised to use the version @{ and # @} or use a double escape (\\{ and \\}) ALIASES = # Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources # only. Doxygen will then generate output that is more tailored for C. For # instance, some of the names that are used will be different. The list of all # members will be omitted, etc. # The default value is: NO. OPTIMIZE_OUTPUT_FOR_C = YES # Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or # Python sources only. Doxygen will then generate output that is more tailored # for that language. For instance, namespaces will be presented as packages, # qualified scopes will look different, etc. # The default value is: NO. OPTIMIZE_OUTPUT_JAVA = NO # Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran # sources. Doxygen will then generate output that is tailored for Fortran. # The default value is: NO. OPTIMIZE_FOR_FORTRAN = NO # Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL # sources. Doxygen will then generate output that is tailored for VHDL. # The default value is: NO. OPTIMIZE_OUTPUT_VHDL = NO # Set the OPTIMIZE_OUTPUT_SLICE tag to YES if your project consists of Slice # sources only. Doxygen will then generate output that is more tailored for that # language. For instance, namespaces will be presented as modules, types will be # separated into more groups, etc. # The default value is: NO. OPTIMIZE_OUTPUT_SLICE = NO # Doxygen selects the parser to use depending on the extension of the files it # parses. With this tag you can assign which parser to use for a given # extension. Doxygen has a built-in mapping, but you can override or extend it # using this tag. The format is ext=language, where ext is a file extension, and # language is one of the parsers supported by doxygen: IDL, Java, JavaScript, # Csharp (C#), C, C++, Lex, D, PHP, md (Markdown), Objective-C, Python, Slice, # VHDL, Fortran (fixed format Fortran: FortranFixed, free formatted Fortran: # FortranFree, unknown formatted Fortran: Fortran. In the later case the parser # tries to guess whether the code is fixed or free formatted code, this is the # default for Fortran type files). For instance to make doxygen treat .inc files # as Fortran files (default is PHP), and .f files as C (default is Fortran), # use: inc=Fortran f=C. # # Note: For files without extension you can use no_extension as a placeholder. # # Note that for custom extensions you also need to set FILE_PATTERNS otherwise # the files are not read by doxygen. When specifying no_extension you should add # * to the FILE_PATTERNS. # # Note see also the list of default file extension mappings. EXTENSION_MAPPING = # If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments # according to the Markdown format, which allows for more readable # documentation. See https://daringfireball.net/projects/markdown/ for details. # The output of markdown processing is further processed by doxygen, so you can # mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in # case of backward compatibilities issues. # The default value is: YES. MARKDOWN_SUPPORT = YES # When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up # to that level are automatically included in the table of contents, even if # they do not have an id attribute. # Note: This feature currently applies only to Markdown headings. # Minimum value: 0, maximum value: 99, default value: 5. # This tag requires that the tag MARKDOWN_SUPPORT is set to YES. TOC_INCLUDE_HEADINGS = 5 # When enabled doxygen tries to link words that correspond to documented # classes, or namespaces to their corresponding documentation. Such a link can # be prevented in individual cases by putting a % sign in front of the word or # globally by setting AUTOLINK_SUPPORT to NO. # The default value is: YES. AUTOLINK_SUPPORT = YES # If you use STL classes (i.e. std::string, std::vector, etc.) but do not want # to include (a tag file for) the STL sources as input, then you should set this # tag to YES in order to let doxygen match functions declarations and # definitions whose arguments contain STL classes (e.g. func(std::string); # versus func(std::string) {}). This also make the inheritance and collaboration # diagrams that involve STL classes more complete and accurate. # The default value is: NO. BUILTIN_STL_SUPPORT = NO # If you use Microsoft's C++/CLI language, you should set this option to YES to # enable parsing support. # The default value is: NO. CPP_CLI_SUPPORT = NO # Set the SIP_SUPPORT tag to YES if your project consists of sip (see: # https://www.riverbankcomputing.com/software/sip/intro) sources only. Doxygen # will parse them like normal C++ but will assume all classes use public instead # of private inheritance when no explicit protection keyword is present. # The default value is: NO. SIP_SUPPORT = NO # For Microsoft's IDL there are propget and propput attributes to indicate # getter and setter methods for a property. Setting this option to YES will make # doxygen to replace the get and set methods by a property in the documentation. # This will only work if the methods are indeed getting or setting a simple # type. If this is not the case, or you want to show the methods anyway, you # should set this option to NO. # The default value is: YES. IDL_PROPERTY_SUPPORT = YES # If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC # tag is set to YES then doxygen will reuse the documentation of the first # member in the group (if any) for the other members of the group. By default # all members of a group must be documented explicitly. # The default value is: NO. DISTRIBUTE_GROUP_DOC = NO # If one adds a struct or class to a group and this option is enabled, then also # any nested class or struct is added to the same group. By default this option # is disabled and one has to add nested compounds explicitly via \ingroup. # The default value is: NO. GROUP_NESTED_COMPOUNDS = NO # Set the SUBGROUPING tag to YES to allow class member groups of the same type # (for instance a group of public functions) to be put as a subgroup of that # type (e.g. under the Public Functions section). Set it to NO to prevent # subgrouping. Alternatively, this can be done per class using the # \nosubgrouping command. # The default value is: YES. SUBGROUPING = YES # When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions # are shown inside the group in which they are included (e.g. using \ingroup) # instead of on a separate page (for HTML and Man pages) or section (for LaTeX # and RTF). # # Note that this feature does not work in combination with # SEPARATE_MEMBER_PAGES. # The default value is: NO. INLINE_GROUPED_CLASSES = NO # When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions # with only public data fields or simple typedef fields will be shown inline in # the documentation of the scope in which they are defined (i.e. file, # namespace, or group documentation), provided this scope is documented. If set # to NO, structs, classes, and unions are shown on a separate page (for HTML and # Man pages) or section (for LaTeX and RTF). # The default value is: NO. INLINE_SIMPLE_STRUCTS = NO # When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or # enum is documented as struct, union, or enum with the name of the typedef. So # typedef struct TypeS {} TypeT, will appear in the documentation as a struct # with name TypeT. When disabled the typedef will appear as a member of a file, # namespace, or class. And the struct will be named TypeS. This can typically be # useful for C code in case the coding convention dictates that all compound # types are typedef'ed and only the typedef is referenced, never the tag name. # The default value is: NO. TYPEDEF_HIDES_STRUCT = NO # The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This # cache is used to resolve symbols given their name and scope. Since this can be # an expensive process and often the same symbol appears multiple times in the # code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small # doxygen will become slower. If the cache is too large, memory is wasted. The # cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range # is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 # symbols. At the end of a run doxygen will report the cache usage and suggest # the optimal cache size from a speed point of view. # Minimum value: 0, maximum value: 9, default value: 0. LOOKUP_CACHE_SIZE = 0 # The NUM_PROC_THREADS specifies the number of threads doxygen is allowed to use # during processing. When set to 0 doxygen will based this on the number of # cores available in the system. You can set it explicitly to a value larger # than 0 to get more control over the balance between CPU load and processing # speed. At this moment only the input processing can be done using multiple # threads. Since this is still an experimental feature the default is set to 1, # which effectively disables parallel processing. Please report any issues you # encounter. Generating dot graphs in parallel is controlled by the # DOT_NUM_THREADS setting. # Minimum value: 0, maximum value: 32, default value: 1. NUM_PROC_THREADS = 1 #--------------------------------------------------------------------------- # Build related configuration options #--------------------------------------------------------------------------- # If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in # documentation are documented, even if no documentation was available. Private # class members and static file members will be hidden unless the # EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. # Note: This will also disable the warnings about undocumented members that are # normally produced when WARNINGS is set to YES. # The default value is: NO. EXTRACT_ALL = YES # If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will # be included in the documentation. # The default value is: NO. EXTRACT_PRIVATE = NO # If the EXTRACT_PRIV_VIRTUAL tag is set to YES, documented private virtual # methods of a class will be included in the documentation. # The default value is: NO. EXTRACT_PRIV_VIRTUAL = NO # If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal # scope will be included in the documentation. # The default value is: NO. EXTRACT_PACKAGE = NO # If the EXTRACT_STATIC tag is set to YES, all static members of a file will be # included in the documentation. # The default value is: NO. EXTRACT_STATIC = YES # If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined # locally in source files will be included in the documentation. If set to NO, # only classes defined in header files are included. Does not have any effect # for Java sources. # The default value is: YES. EXTRACT_LOCAL_CLASSES = YES # This flag is only useful for Objective-C code. If set to YES, local methods, # which are defined in the implementation section but not in the interface are # included in the documentation. If set to NO, only methods in the interface are # included. # The default value is: NO. EXTRACT_LOCAL_METHODS = NO # If this flag is set to YES, the members of anonymous namespaces will be # extracted and appear in the documentation as a namespace called # 'anonymous_namespace{file}', where file will be replaced with the base name of # the file that contains the anonymous namespace. By default anonymous namespace # are hidden. # The default value is: NO. EXTRACT_ANON_NSPACES = NO # If this flag is set to YES, the name of an unnamed parameter in a declaration # will be determined by the corresponding definition. By default unnamed # parameters remain unnamed in the output. # The default value is: YES. RESOLVE_UNNAMED_PARAMS = YES # If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all # undocumented members inside documented classes or files. If set to NO these # members will be included in the various overviews, but no documentation # section is generated. This option has no effect if EXTRACT_ALL is enabled. # The default value is: NO. HIDE_UNDOC_MEMBERS = NO # If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all # undocumented classes that are normally visible in the class hierarchy. If set # to NO, these classes will be included in the various overviews. This option # has no effect if EXTRACT_ALL is enabled. # The default value is: NO. HIDE_UNDOC_CLASSES = NO # If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend # declarations. If set to NO, these declarations will be included in the # documentation. # The default value is: NO. HIDE_FRIEND_COMPOUNDS = NO # If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any # documentation blocks found inside the body of a function. If set to NO, these # blocks will be appended to the function's detailed documentation block. # The default value is: NO. HIDE_IN_BODY_DOCS = NO # The INTERNAL_DOCS tag determines if documentation that is typed after a # \internal command is included. If the tag is set to NO then the documentation # will be excluded. Set it to YES to include the internal documentation. # The default value is: NO. INTERNAL_DOCS = NO # With the correct setting of option CASE_SENSE_NAMES doxygen will better be # able to match the capabilities of the underlying filesystem. In case the # filesystem is case sensitive (i.e. it supports files in the same directory # whose names only differ in casing), the option must be set to YES to properly # deal with such files in case they appear in the input. For filesystems that # are not case sensitive the option should be set to NO to properly deal with # output files written for symbols that only differ in casing, such as for two # classes, one named CLASS and the other named Class, and to also support # references to files without having to specify the exact matching casing. On # Windows (including Cygwin) and MacOS, users should typically set this option # to NO, whereas on Linux or other Unix flavors it should typically be set to # YES. # The default value is: system dependent. CASE_SENSE_NAMES = YES # If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with # their full class and namespace scopes in the documentation. If set to YES, the # scope will be hidden. # The default value is: NO. HIDE_SCOPE_NAMES = YES # If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will # append additional text to a page's title, such as Class Reference. If set to # YES the compound reference will be hidden. # The default value is: NO. HIDE_COMPOUND_REFERENCE= NO # If the SHOW_HEADERFILE tag is set to YES then the documentation for a class # will show which file needs to be included to use the class. # The default value is: YES. SHOW_HEADERFILE = YES # If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of # the files that are included by a file in the documentation of that file. # The default value is: YES. SHOW_INCLUDE_FILES = YES # If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each # grouped member an include statement to the documentation, telling the reader # which file to include in order to use the member. # The default value is: NO. SHOW_GROUPED_MEMB_INC = NO # If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include # files with double quotes in the documentation rather than with sharp brackets. # The default value is: NO. FORCE_LOCAL_INCLUDES = NO # If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the # documentation for inline members. # The default value is: YES. INLINE_INFO = YES # If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the # (detailed) documentation of file and class members alphabetically by member # name. If set to NO, the members will appear in declaration order. # The default value is: YES. SORT_MEMBER_DOCS = YES # If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief # descriptions of file, namespace and class members alphabetically by member # name. If set to NO, the members will appear in declaration order. Note that # this will also influence the order of the classes in the class list. # The default value is: NO. SORT_BRIEF_DOCS = NO # If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the # (brief and detailed) documentation of class members so that constructors and # destructors are listed first. If set to NO the constructors will appear in the # respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. # Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief # member documentation. # Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting # detailed member documentation. # The default value is: NO. SORT_MEMBERS_CTORS_1ST = NO # If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy # of group names into alphabetical order. If set to NO the group names will # appear in their defined order. # The default value is: NO. SORT_GROUP_NAMES = NO # If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by # fully-qualified names, including namespaces. If set to NO, the class list will # be sorted only by class name, not including the namespace part. # Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. # Note: This option applies only to the class list, not to the alphabetical # list. # The default value is: NO. SORT_BY_SCOPE_NAME = NO # If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper # type resolution of all parameters of a function it will reject a match between # the prototype and the implementation of a member function even if there is # only one candidate or it is obvious which candidate to choose by doing a # simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still # accept a match between prototype and implementation in such cases. # The default value is: NO. STRICT_PROTO_MATCHING = NO # The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo # list. This list is created by putting \todo commands in the documentation. # The default value is: YES. GENERATE_TODOLIST = YES # The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test # list. This list is created by putting \test commands in the documentation. # The default value is: YES. GENERATE_TESTLIST = YES # The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug # list. This list is created by putting \bug commands in the documentation. # The default value is: YES. GENERATE_BUGLIST = YES # The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO) # the deprecated list. This list is created by putting \deprecated commands in # the documentation. # The default value is: YES. GENERATE_DEPRECATEDLIST= YES # The ENABLED_SECTIONS tag can be used to enable conditional documentation # sections, marked by \if ... \endif and \cond # ... \endcond blocks. ENABLED_SECTIONS = # The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the # initial value of a variable or macro / define can have for it to appear in the # documentation. If the initializer consists of more lines than specified here # it will be hidden. Use a value of 0 to hide initializers completely. The # appearance of the value of individual variables and macros / defines can be # controlled using \showinitializer or \hideinitializer command in the # documentation regardless of this setting. # Minimum value: 0, maximum value: 10000, default value: 30. MAX_INITIALIZER_LINES = 30 # Set the SHOW_USED_FILES tag to NO to disable the list of files generated at # the bottom of the documentation of classes and structs. If set to YES, the # list will mention the files that were used to generate the documentation. # The default value is: YES. SHOW_USED_FILES = YES # Set the SHOW_FILES tag to NO to disable the generation of the Files page. This # will remove the Files entry from the Quick Index and from the Folder Tree View # (if specified). # The default value is: YES. SHOW_FILES = YES # Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces # page. This will remove the Namespaces entry from the Quick Index and from the # Folder Tree View (if specified). # The default value is: YES. SHOW_NAMESPACES = YES # The FILE_VERSION_FILTER tag can be used to specify a program or script that # doxygen should invoke to get the current version for each file (typically from # the version control system). Doxygen will invoke the program by executing (via # popen()) the command command input-file, where command is the value of the # FILE_VERSION_FILTER tag, and input-file is the name of an input file provided # by doxygen. Whatever the program writes to standard output is used as the file # version. For an example see the documentation. FILE_VERSION_FILTER = # The LAYOUT_FILE tag can be used to specify a layout file which will be parsed # by doxygen. The layout file controls the global structure of the generated # output files in an output format independent way. To create the layout file # that represents doxygen's defaults, run doxygen with the -l option. You can # optionally specify a file name after the option, if omitted DoxygenLayout.xml # will be used as the name of the layout file. See also section "Changing the # layout of pages" for information. # # Note that if you run doxygen from a directory containing a file called # DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE # tag is left empty. LAYOUT_FILE = # The CITE_BIB_FILES tag can be used to specify one or more bib files containing # the reference definitions. This must be a list of .bib files. The .bib # extension is automatically appended if omitted. This requires the bibtex tool # to be installed. See also https://en.wikipedia.org/wiki/BibTeX for more info. # For LaTeX the style of the bibliography can be controlled using # LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the # search path. See also \cite for info how to create references. CITE_BIB_FILES = #--------------------------------------------------------------------------- # Configuration options related to warning and progress messages #--------------------------------------------------------------------------- # The QUIET tag can be used to turn on/off the messages that are generated to # standard output by doxygen. If QUIET is set to YES this implies that the # messages are off. # The default value is: NO. QUIET = NO # The WARNINGS tag can be used to turn on/off the warning messages that are # generated to standard error (stderr) by doxygen. If WARNINGS is set to YES # this implies that the warnings are on. # # Tip: Turn warnings on while writing the documentation. # The default value is: YES. WARNINGS = YES # If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate # warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag # will automatically be disabled. # The default value is: YES. WARN_IF_UNDOCUMENTED = NO # If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for # potential errors in the documentation, such as documenting some parameters in # a documented function twice, or documenting parameters that don't exist or # using markup commands wrongly. # The default value is: YES. WARN_IF_DOC_ERROR = YES # If WARN_IF_INCOMPLETE_DOC is set to YES, doxygen will warn about incomplete # function parameter documentation. If set to NO, doxygen will accept that some # parameters have no documentation without warning. # The default value is: YES. WARN_IF_INCOMPLETE_DOC = YES # This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that # are documented, but have no documentation for their parameters or return # value. If set to NO, doxygen will only warn about wrong parameter # documentation, but not about the absence of documentation. If EXTRACT_ALL is # set to YES then this flag will automatically be disabled. See also # WARN_IF_INCOMPLETE_DOC # The default value is: NO. WARN_NO_PARAMDOC = NO # If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when # a warning is encountered. If the WARN_AS_ERROR tag is set to FAIL_ON_WARNINGS # then doxygen will continue running as if WARN_AS_ERROR tag is set to NO, but # at the end of the doxygen process doxygen will return with a non-zero status. # Possible values are: NO, YES and FAIL_ON_WARNINGS. # The default value is: NO. WARN_AS_ERROR = NO # The WARN_FORMAT tag determines the format of the warning messages that doxygen # can produce. The string should contain the $file, $line, and $text tags, which # will be replaced by the file and line number from which the warning originated # and the warning text. Optionally the format may contain $version, which will # be replaced by the version of the file (if it could be obtained via # FILE_VERSION_FILTER) # See also: WARN_LINE_FORMAT # The default value is: $file:$line: $text. WARN_FORMAT = "$file:$line: $text" # In the $text part of the WARN_FORMAT command it is possible that a reference # to a more specific place is given. To make it easier to jump to this place # (outside of doxygen) the user can define a custom "cut" / "paste" string. # Example: # WARN_LINE_FORMAT = "'vi $file +$line'" # See also: WARN_FORMAT # The default value is: at line $line of file $file. WARN_LINE_FORMAT = "at line $line of file $file" # The WARN_LOGFILE tag can be used to specify a file to which warning and error # messages should be written. If left blank the output is written to standard # error (stderr). In case the file specified cannot be opened for writing the # warning and error messages are written to standard error. When as file - is # specified the warning and error messages are written to standard output # (stdout). WARN_LOGFILE = #--------------------------------------------------------------------------- # Configuration options related to the input files #--------------------------------------------------------------------------- # The INPUT tag is used to specify the files and/or directories that contain # documented source files. You may enter file names like myfile.cpp or # directories like /usr/src/myproject. Separate the files or directories with # spaces. See also FILE_PATTERNS and EXTENSION_MAPPING # Note: If this tag is empty the current directory is searched. INPUT = src # This tag can be used to specify the character encoding of the source files # that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses # libiconv (or the iconv built into libc) for the transcoding. See the libiconv # documentation (see: # https://www.gnu.org/software/libiconv/) for the list of possible encodings. # The default value is: UTF-8. INPUT_ENCODING = UTF-8 # If the value of the INPUT tag contains directories, you can use the # FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and # *.h) to filter out the source-files in the directories. # # Note that for custom extensions or not directly supported extensions you also # need to set EXTENSION_MAPPING for the extension otherwise the files are not # read by doxygen. # # Note the list of default checked file patterns might differ from the list of # default file extension mappings. # # If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp, # *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, # *.hh, *.hxx, *.hpp, *.h++, *.l, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, # *.inc, *.m, *.markdown, *.md, *.mm, *.dox (to be provided as doxygen C # comment), *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, *.f18, *.f, *.for, *.vhd, # *.vhdl, *.ucf, *.qsf and *.ice. FILE_PATTERNS = *.h # The RECURSIVE tag can be used to specify whether or not subdirectories should # be searched for input files as well. # The default value is: NO. RECURSIVE = YES # The EXCLUDE tag can be used to specify files and/or directories that should be # excluded from the INPUT source files. This way you can easily exclude a # subdirectory from a directory tree whose root is specified with the INPUT tag. # # Note that relative paths are relative to the directory from which doxygen is # run. EXCLUDE = src/common/pkcs11f.h \ src/common/pkcs11t.h # The EXCLUDE_SYMLINKS tag can be used to select whether or not files or # directories that are symbolic links (a Unix file system feature) are excluded # from the input. # The default value is: NO. EXCLUDE_SYMLINKS = NO # If the value of the INPUT tag contains directories, you can use the # EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude # certain files from those directories. # # Note that the wildcards are matched against the file with absolute path, so to # exclude all test directories for example use the pattern */test/* EXCLUDE_PATTERNS = # The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names # (namespaces, classes, functions, etc.) that should be excluded from the # output. The symbol name can be a fully qualified name, a word, or if the # wildcard * is used, a substring. Examples: ANamespace, AClass, # ANamespace::AClass, ANamespace::*Test # # Note that the wildcards are matched against the file with absolute path, so to # exclude all test directories use the pattern */test/* EXCLUDE_SYMBOLS = # The EXAMPLE_PATH tag can be used to specify one or more files or directories # that contain example code fragments that are included (see the \include # command). EXAMPLE_PATH = # If the value of the EXAMPLE_PATH tag contains directories, you can use the # EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and # *.h) to filter out the source-files in the directories. If left blank all # files are included. EXAMPLE_PATTERNS = # If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be # searched for input files to be used with the \include or \dontinclude commands # irrespective of the value of the RECURSIVE tag. # The default value is: NO. EXAMPLE_RECURSIVE = NO # The IMAGE_PATH tag can be used to specify one or more files or directories # that contain images that are to be included in the documentation (see the # \image command). IMAGE_PATH = # The INPUT_FILTER tag can be used to specify a program that doxygen should # invoke to filter for each input file. Doxygen will invoke the filter program # by executing (via popen()) the command: # # # # where is the value of the INPUT_FILTER tag, and is the # name of an input file. Doxygen will then use the output that the filter # program writes to standard output. If FILTER_PATTERNS is specified, this tag # will be ignored. # # Note that the filter must not add or remove lines; it is applied before the # code is scanned, but not when the output code is generated. If lines are added # or removed, the anchors will not be placed correctly. # # Note that for custom extensions or not directly supported extensions you also # need to set EXTENSION_MAPPING for the extension otherwise the files are not # properly processed by doxygen. INPUT_FILTER = # The FILTER_PATTERNS tag can be used to specify filters on a per file pattern # basis. Doxygen will compare the file name with each pattern and apply the # filter if there is a match. The filters are a list of the form: pattern=filter # (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how # filters are used. If the FILTER_PATTERNS tag is empty or if none of the # patterns match the file name, INPUT_FILTER is applied. # # Note that for custom extensions or not directly supported extensions you also # need to set EXTENSION_MAPPING for the extension otherwise the files are not # properly processed by doxygen. FILTER_PATTERNS = # If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using # INPUT_FILTER) will also be used to filter the input files that are used for # producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). # The default value is: NO. FILTER_SOURCE_FILES = NO # The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file # pattern. A pattern will override the setting for FILTER_PATTERN (if any) and # it is also possible to disable source filtering for a specific pattern using # *.ext= (so without naming a filter). # This tag requires that the tag FILTER_SOURCE_FILES is set to YES. FILTER_SOURCE_PATTERNS = # If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that # is part of the input, its contents will be placed on the main page # (index.html). This can be useful if you have a project on for instance GitHub # and want to reuse the introduction page also for the doxygen output. USE_MDFILE_AS_MAINPAGE = #--------------------------------------------------------------------------- # Configuration options related to source browsing #--------------------------------------------------------------------------- # If the SOURCE_BROWSER tag is set to YES then a list of source files will be # generated. Documented entities will be cross-referenced with these sources. # # Note: To get rid of all source code in the generated output, make sure that # also VERBATIM_HEADERS is set to NO. # The default value is: NO. SOURCE_BROWSER = YES # Setting the INLINE_SOURCES tag to YES will include the body of functions, # classes and enums directly into the documentation. # The default value is: NO. INLINE_SOURCES = NO # Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any # special comment blocks from generated source code fragments. Normal C, C++ and # Fortran comments will always remain visible. # The default value is: YES. STRIP_CODE_COMMENTS = YES # If the REFERENCED_BY_RELATION tag is set to YES then for each documented # entity all documented functions referencing it will be listed. # The default value is: NO. REFERENCED_BY_RELATION = YES # If the REFERENCES_RELATION tag is set to YES then for each documented function # all documented entities called/used by that function will be listed. # The default value is: NO. REFERENCES_RELATION = YES # If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set # to YES then the hyperlinks from functions in REFERENCES_RELATION and # REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will # link to the documentation. # The default value is: YES. REFERENCES_LINK_SOURCE = YES # If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the # source code will show a tooltip with additional information such as prototype, # brief description and links to the definition and documentation. Since this # will make the HTML file larger and loading of large files a bit slower, you # can opt to disable this feature. # The default value is: YES. # This tag requires that the tag SOURCE_BROWSER is set to YES. SOURCE_TOOLTIPS = YES # If the USE_HTAGS tag is set to YES then the references to source code will # point to the HTML generated by the htags(1) tool instead of doxygen built-in # source browser. The htags tool is part of GNU's global source tagging system # (see https://www.gnu.org/software/global/global.html). You will need version # 4.8.6 or higher. # # To use it do the following: # - Install the latest version of global # - Enable SOURCE_BROWSER and USE_HTAGS in the configuration file # - Make sure the INPUT points to the root of the source tree # - Run doxygen as normal # # Doxygen will invoke htags (and that will in turn invoke gtags), so these # tools must be available from the command line (i.e. in the search path). # # The result: instead of the source browser generated by doxygen, the links to # source code will now point to the output of htags. # The default value is: NO. # This tag requires that the tag SOURCE_BROWSER is set to YES. USE_HTAGS = NO # If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a # verbatim copy of the header file for each class for which an include is # specified. Set to NO to disable this. # See also: Section \class. # The default value is: YES. VERBATIM_HEADERS = YES # If the CLANG_ASSISTED_PARSING tag is set to YES then doxygen will use the # clang parser (see: # http://clang.llvm.org/) for more accurate parsing at the cost of reduced # performance. This can be particularly helpful with template rich C++ code for # which doxygen's built-in parser lacks the necessary type information. # Note: The availability of this option depends on whether or not doxygen was # generated with the -Duse_libclang=ON option for CMake. # The default value is: NO. CLANG_ASSISTED_PARSING = NO # If the CLANG_ASSISTED_PARSING tag is set to YES and the CLANG_ADD_INC_PATHS # tag is set to YES then doxygen will add the directory of each input to the # include path. # The default value is: YES. # This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES. CLANG_ADD_INC_PATHS = YES # If clang assisted parsing is enabled you can provide the compiler with command # line options that you would normally use when invoking the compiler. Note that # the include paths will already be set by doxygen for the files and directories # specified with INPUT and INCLUDE_PATH. # This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES. CLANG_OPTIONS = # If clang assisted parsing is enabled you can provide the clang parser with the # path to the directory containing a file called compile_commands.json. This # file is the compilation database (see: # http://clang.llvm.org/docs/HowToSetupToolingForLLVM.html) containing the # options used when the source files were built. This is equivalent to # specifying the -p option to a clang tool, such as clang-check. These options # will then be passed to the parser. Any options specified with CLANG_OPTIONS # will be added as well. # Note: The availability of this option depends on whether or not doxygen was # generated with the -Duse_libclang=ON option for CMake. CLANG_DATABASE_PATH = #--------------------------------------------------------------------------- # Configuration options related to the alphabetical class index #--------------------------------------------------------------------------- # If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all # compounds will be generated. Enable this if the project contains a lot of # classes, structs, unions or interfaces. # The default value is: YES. ALPHABETICAL_INDEX = NO # In case all classes in a project start with a common prefix, all classes will # be put under the same header in the alphabetical index. The IGNORE_PREFIX tag # can be used to specify a prefix (or a list of prefixes) that should be ignored # while generating the index headers. # This tag requires that the tag ALPHABETICAL_INDEX is set to YES. IGNORE_PREFIX = #--------------------------------------------------------------------------- # Configuration options related to the HTML output #--------------------------------------------------------------------------- # If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output # The default value is: YES. GENERATE_HTML = YES # The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a # relative path is entered the value of OUTPUT_DIRECTORY will be put in front of # it. # The default directory is: html. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_OUTPUT = api # The HTML_FILE_EXTENSION tag can be used to specify the file extension for each # generated HTML page (for example: .htm, .php, .asp). # The default value is: .html. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_FILE_EXTENSION = .html # The HTML_HEADER tag can be used to specify a user-defined HTML header file for # each generated HTML page. If the tag is left blank doxygen will generate a # standard header. # # To get valid HTML the header file that includes any scripts and style sheets # that doxygen needs, which is dependent on the configuration options used (e.g. # the setting GENERATE_TREEVIEW). It is highly recommended to start with a # default header using # doxygen -w html new_header.html new_footer.html new_stylesheet.css # YourConfigFile # and then modify the file new_header.html. See also section "Doxygen usage" # for information on how to generate the default header that doxygen normally # uses. # Note: The header is subject to change so you typically have to regenerate the # default header when upgrading to a newer version of doxygen. For a description # of the possible markers and block names see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_HEADER = # The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each # generated HTML page. If the tag is left blank doxygen will generate a standard # footer. See HTML_HEADER for more information on how to generate a default # footer and what special commands can be used inside the footer. See also # section "Doxygen usage" for information on how to generate the default footer # that doxygen normally uses. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_FOOTER = # The HTML_STYLESHEET tag can be used to specify a user-defined cascading style # sheet that is used by each HTML page. It can be used to fine-tune the look of # the HTML output. If left blank doxygen will generate a default style sheet. # See also section "Doxygen usage" for information on how to generate the style # sheet that doxygen normally uses. # Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as # it is more robust and this tag (HTML_STYLESHEET) will in the future become # obsolete. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_STYLESHEET = # The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined # cascading style sheets that are included after the standard style sheets # created by doxygen. Using this option one can overrule certain style aspects. # This is preferred over using HTML_STYLESHEET since it does not replace the # standard style sheet and is therefore more robust against future updates. # Doxygen will copy the style sheet files to the output directory. # Note: The order of the extra style sheet files is of importance (e.g. the last # style sheet in the list overrules the setting of the previous ones in the # list). For an example see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_EXTRA_STYLESHEET = # The HTML_EXTRA_FILES tag can be used to specify one or more extra images or # other source files which should be copied to the HTML output directory. Note # that these files will be copied to the base HTML output directory. Use the # $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these # files. In the HTML_STYLESHEET file, use the file name only. Also note that the # files will be copied as-is; there are no commands or markers available. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_EXTRA_FILES = # The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen # will adjust the colors in the style sheet and background images according to # this color. Hue is specified as an angle on a color-wheel, see # https://en.wikipedia.org/wiki/Hue for more information. For instance the value # 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 # purple, and 360 is red again. # Minimum value: 0, maximum value: 359, default value: 220. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_HUE = 220 # The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors # in the HTML output. For a value of 0 the output will use gray-scales only. A # value of 255 will produce the most vivid colors. # Minimum value: 0, maximum value: 255, default value: 100. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_SAT = 100 # The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the # luminance component of the colors in the HTML output. Values below 100 # gradually make the output lighter, whereas values above 100 make the output # darker. The value divided by 100 is the actual gamma applied, so 80 represents # a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not # change the gamma. # Minimum value: 40, maximum value: 240, default value: 80. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_GAMMA = 80 # If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML # page will contain the date and time when the page was generated. Setting this # to YES can help to show when doxygen was last run and thus if the # documentation is up to date. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_TIMESTAMP = YES # If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML # documentation will contain a main index with vertical navigation menus that # are dynamically created via JavaScript. If disabled, the navigation index will # consists of multiple levels of tabs that are statically embedded in every HTML # page. Disable this option to support browsers that do not have JavaScript, # like the Qt help browser. # The default value is: YES. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_DYNAMIC_MENUS = YES # If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML # documentation will contain sections that can be hidden and shown after the # page has loaded. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_DYNAMIC_SECTIONS = NO # With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries # shown in the various tree structured indices initially; the user can expand # and collapse entries dynamically later on. Doxygen will expand the tree to # such a level that at most the specified number of entries are visible (unless # a fully collapsed tree already exceeds this amount). So setting the number of # entries 1 will produce a full collapsed tree by default. 0 is a special value # representing an infinite number of entries and will result in a full expanded # tree by default. # Minimum value: 0, maximum value: 9999, default value: 100. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_INDEX_NUM_ENTRIES = 100 # If the GENERATE_DOCSET tag is set to YES, additional index files will be # generated that can be used as input for Apple's Xcode 3 integrated development # environment (see: # https://developer.apple.com/xcode/), introduced with OSX 10.5 (Leopard). To # create a documentation set, doxygen will generate a Makefile in the HTML # output directory. Running make will produce the docset in that directory and # running make install will install the docset in # ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at # startup. See https://developer.apple.com/library/archive/featuredarticles/Doxy # genXcode/_index.html for more information. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_DOCSET = NO # This tag determines the name of the docset feed. A documentation feed provides # an umbrella under which multiple documentation sets from a single provider # (such as a company or product suite) can be grouped. # The default value is: Doxygen generated docs. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_FEEDNAME = "Doxygen generated docs" # This tag determines the URL of the docset feed. A documentation feed provides # an umbrella under which multiple documentation sets from a single provider # (such as a company or product suite) can be grouped. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_FEEDURL = # This tag specifies a string that should uniquely identify the documentation # set bundle. This should be a reverse domain-name style string, e.g. # com.mycompany.MyDocSet. Doxygen will append .docset to the name. # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_BUNDLE_ID = org.doxygen.Project # The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify # the documentation publisher. This should be a reverse domain-name style # string, e.g. com.mycompany.MyDocSet.documentation. # The default value is: org.doxygen.Publisher. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_PUBLISHER_ID = org.doxygen.Publisher # The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. # The default value is: Publisher. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_PUBLISHER_NAME = Publisher # If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three # additional HTML index files: index.hhp, index.hhc, and index.hhk. The # index.hhp is a project file that can be read by Microsoft's HTML Help Workshop # on Windows. In the beginning of 2021 Microsoft took the original page, with # a.o. the download links, offline the HTML help workshop was already many years # in maintenance mode). You can download the HTML help workshop from the web # archives at Installation executable (see: # http://web.archive.org/web/20160201063255/http://download.microsoft.com/downlo # ad/0/A/9/0A939EF6-E31C-430F-A3DF-DFAE7960D564/htmlhelp.exe). # # The HTML Help Workshop contains a compiler that can convert all HTML output # generated by doxygen into a single compiled HTML file (.chm). Compiled HTML # files are now used as the Windows 98 help format, and will replace the old # Windows help format (.hlp) on all Windows platforms in the future. Compressed # HTML files also contain an index, a table of contents, and you can search for # words in the documentation. The HTML workshop also contains a viewer for # compressed HTML files. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_HTMLHELP = NO # The CHM_FILE tag can be used to specify the file name of the resulting .chm # file. You can add a path in front of the file if the result should not be # written to the html output directory. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. CHM_FILE = # The HHC_LOCATION tag can be used to specify the location (absolute path # including file name) of the HTML help compiler (hhc.exe). If non-empty, # doxygen will try to run the HTML help compiler on the generated index.hhp. # The file has to be specified with full path. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. HHC_LOCATION = # The GENERATE_CHI flag controls if a separate .chi index file is generated # (YES) or that it should be included in the main .chm file (NO). # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. GENERATE_CHI = NO # The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc) # and project file content. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. CHM_INDEX_ENCODING = # The BINARY_TOC flag controls whether a binary table of contents is generated # (YES) or a normal table of contents (NO) in the .chm file. Furthermore it # enables the Previous and Next buttons. # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. BINARY_TOC = NO # The TOC_EXPAND flag can be set to YES to add extra items for group members to # the table of contents of the HTML help documentation and to the tree view. # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. TOC_EXPAND = NO # If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and # QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that # can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help # (.qch) of the generated HTML documentation. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_QHP = NO # If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify # the file name of the resulting .qch file. The path specified is relative to # the HTML output folder. # This tag requires that the tag GENERATE_QHP is set to YES. QCH_FILE = # The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help # Project output. For more information please see Qt Help Project / Namespace # (see: # https://doc.qt.io/archives/qt-4.8/qthelpproject.html#namespace). # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_QHP is set to YES. QHP_NAMESPACE = org.doxygen.Project # The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt # Help Project output. For more information please see Qt Help Project / Virtual # Folders (see: # https://doc.qt.io/archives/qt-4.8/qthelpproject.html#virtual-folders). # The default value is: doc. # This tag requires that the tag GENERATE_QHP is set to YES. QHP_VIRTUAL_FOLDER = doc # If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom # filter to add. For more information please see Qt Help Project / Custom # Filters (see: # https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_NAME = # The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the # custom filter to add. For more information please see Qt Help Project / Custom # Filters (see: # https://doc.qt.io/archives/qt-4.8/qthelpproject.html#custom-filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_ATTRS = # The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this # project's filter section matches. Qt Help Project / Filter Attributes (see: # https://doc.qt.io/archives/qt-4.8/qthelpproject.html#filter-attributes). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_SECT_FILTER_ATTRS = # The QHG_LOCATION tag can be used to specify the location (absolute path # including file name) of Qt's qhelpgenerator. If non-empty doxygen will try to # run qhelpgenerator on the generated .qhp file. # This tag requires that the tag GENERATE_QHP is set to YES. QHG_LOCATION = # If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be # generated, together with the HTML files, they form an Eclipse help plugin. To # install this plugin and make it available under the help contents menu in # Eclipse, the contents of the directory containing the HTML and XML files needs # to be copied into the plugins directory of eclipse. The name of the directory # within the plugins directory should be the same as the ECLIPSE_DOC_ID value. # After copying Eclipse needs to be restarted before the help appears. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_ECLIPSEHELP = NO # A unique identifier for the Eclipse help plugin. When installing the plugin # the directory name containing the HTML and XML files should also have this # name. Each documentation set should have its own identifier. # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. ECLIPSE_DOC_ID = org.doxygen.Project # If you want full control over the layout of the generated HTML pages it might # be necessary to disable the index and replace it with your own. The # DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top # of each HTML page. A value of NO enables the index and the value YES disables # it. Since the tabs in the index contain the same information as the navigation # tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. DISABLE_INDEX = NO # The GENERATE_TREEVIEW tag is used to specify whether a tree-like index # structure should be generated to display hierarchical information. If the tag # value is set to YES, a side panel will be generated containing a tree-like # index structure (just like the one that is generated for HTML Help). For this # to work a browser that supports JavaScript, DHTML, CSS and frames is required # (i.e. any modern browser). Windows users are probably better off using the # HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can # further fine tune the look of the index (see "Fine-tuning the output"). As an # example, the default style sheet generated by doxygen has an example that # shows how to put an image at the root of the tree instead of the PROJECT_NAME. # Since the tree basically has the same information as the tab index, you could # consider setting DISABLE_INDEX to YES when enabling this option. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_TREEVIEW = NO # When both GENERATE_TREEVIEW and DISABLE_INDEX are set to YES, then the # FULL_SIDEBAR option determines if the side bar is limited to only the treeview # area (value NO) or if it should extend to the full height of the window (value # YES). Setting this to YES gives a layout similar to # https://docs.readthedocs.io with more room for contents, but less room for the # project logo, title, and description. If either GENERATE_TREEVIEW or # DISABLE_INDEX is set to NO, this option has no effect. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. FULL_SIDEBAR = NO # The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that # doxygen will group on one line in the generated HTML documentation. # # Note that a value of 0 will completely suppress the enum values from appearing # in the overview section. # Minimum value: 0, maximum value: 20, default value: 4. # This tag requires that the tag GENERATE_HTML is set to YES. ENUM_VALUES_PER_LINE = 4 # If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used # to set the initial width (in pixels) of the frame in which the tree is shown. # Minimum value: 0, maximum value: 1500, default value: 250. # This tag requires that the tag GENERATE_HTML is set to YES. TREEVIEW_WIDTH = 250 # If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to # external symbols imported via tag files in a separate window. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. EXT_LINKS_IN_WINDOW = NO # If the OBFUSCATE_EMAILS tag is set to YES, doxygen will obfuscate email # addresses. # The default value is: YES. # This tag requires that the tag GENERATE_HTML is set to YES. OBFUSCATE_EMAILS = YES # If the HTML_FORMULA_FORMAT option is set to svg, doxygen will use the pdf2svg # tool (see https://github.com/dawbarton/pdf2svg) or inkscape (see # https://inkscape.org) to generate formulas as SVG images instead of PNGs for # the HTML output. These images will generally look nicer at scaled resolutions. # Possible values are: png (the default) and svg (looks nicer but requires the # pdf2svg or inkscape tool). # The default value is: png. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_FORMULA_FORMAT = png # Use this tag to change the font size of LaTeX formulas included as images in # the HTML documentation. When you change the font size after a successful # doxygen run you need to manually remove any form_*.png images from the HTML # output directory to force them to be regenerated. # Minimum value: 8, maximum value: 50, default value: 10. # This tag requires that the tag GENERATE_HTML is set to YES. FORMULA_FONTSIZE = 10 # Use the FORMULA_TRANSPARENT tag to determine whether or not the images # generated for formulas are transparent PNGs. Transparent PNGs are not # supported properly for IE 6.0, but are supported on all modern browsers. # # Note that when changing this option you need to delete any form_*.png files in # the HTML output directory before the changes have effect. # The default value is: YES. # This tag requires that the tag GENERATE_HTML is set to YES. FORMULA_TRANSPARENT = YES # The FORMULA_MACROFILE can contain LaTeX \newcommand and \renewcommand commands # to create new LaTeX commands to be used in formulas as building blocks. See # the section "Including formulas" for details. FORMULA_MACROFILE = # Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see # https://www.mathjax.org) which uses client side JavaScript for the rendering # instead of using pre-rendered bitmaps. Use this if you do not have LaTeX # installed or if you want to formulas look prettier in the HTML output. When # enabled you may also need to install MathJax separately and configure the path # to it using the MATHJAX_RELPATH option. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. USE_MATHJAX = NO # With MATHJAX_VERSION it is possible to specify the MathJax version to be used. # Note that the different versions of MathJax have different requirements with # regards to the different settings, so it is possible that also other MathJax # settings have to be changed when switching between the different MathJax # versions. # Possible values are: MathJax_2 and MathJax_3. # The default value is: MathJax_2. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_VERSION = MathJax_2 # When MathJax is enabled you can set the default output format to be used for # the MathJax output. For more details about the output format see MathJax # version 2 (see: # http://docs.mathjax.org/en/v2.7-latest/output.html) and MathJax version 3 # (see: # http://docs.mathjax.org/en/latest/web/components/output.html). # Possible values are: HTML-CSS (which is slower, but has the best # compatibility. This is the name for Mathjax version 2, for MathJax version 3 # this will be translated into chtml), NativeMML (i.e. MathML. Only supported # for NathJax 2. For MathJax version 3 chtml will be used instead.), chtml (This # is the name for Mathjax version 3, for MathJax version 2 this will be # translated into HTML-CSS) and SVG. # The default value is: HTML-CSS. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_FORMAT = HTML-CSS # When MathJax is enabled you need to specify the location relative to the HTML # output directory using the MATHJAX_RELPATH option. The destination directory # should contain the MathJax.js script. For instance, if the mathjax directory # is located at the same level as the HTML output directory, then # MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax # Content Delivery Network so you can quickly see the result without installing # MathJax. However, it is strongly recommended to install a local copy of # MathJax from https://www.mathjax.org before deployment. The default value is: # - in case of MathJax version 2: https://cdn.jsdelivr.net/npm/mathjax@2 # - in case of MathJax version 3: https://cdn.jsdelivr.net/npm/mathjax@3 # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest # The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax # extension names that should be enabled during MathJax rendering. For example # for MathJax version 2 (see # https://docs.mathjax.org/en/v2.7-latest/tex.html#tex-and-latex-extensions): # MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols # For example for MathJax version 3 (see # http://docs.mathjax.org/en/latest/input/tex/extensions/index.html): # MATHJAX_EXTENSIONS = ams # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_EXTENSIONS = # The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces # of code that will be used on startup of the MathJax code. See the MathJax site # (see: # http://docs.mathjax.org/en/v2.7-latest/output.html) for more details. For an # example see the documentation. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_CODEFILE = # When the SEARCHENGINE tag is enabled doxygen will generate a search box for # the HTML output. The underlying search engine uses javascript and DHTML and # should work on any modern browser. Note that when using HTML help # (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) # there is already a search function so this one should typically be disabled. # For large projects the javascript based search engine can be slow, then # enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to # search using the keyboard; to jump to the search box use + S # (what the is depends on the OS and browser, but it is typically # , /