pax_global_header00006660000000000000000000000064144046774470014533gustar00rootroot0000000000000052 comment=bdfe4b61eb7e05f56f659135d10ac1fe87ee8781 borgmatic-1.7.9/000077500000000000000000000000001440467744700135205ustar00rootroot00000000000000borgmatic-1.7.9/.dockerignore000066400000000000000000000000121440467744700161650ustar00rootroot00000000000000.git .tox borgmatic-1.7.9/.drone.yml000066400000000000000000000017771440467744700154440ustar00rootroot00000000000000kind: pipeline name: python-3-8-alpine-3-13 services: - name: postgresql image: postgres:13.1-alpine environment: POSTGRES_PASSWORD: test POSTGRES_DB: test - name: mysql image: mariadb:10.5 environment: MYSQL_ROOT_PASSWORD: test MYSQL_DATABASE: test - name: mongodb image: mongo:5.0.5 environment: MONGO_INITDB_ROOT_USERNAME: root MONGO_INITDB_ROOT_PASSWORD: test clone: skip_verify: true steps: - name: build image: alpine:3.13 pull: always commands: - scripts/run-full-tests --- kind: pipeline name: documentation clone: skip_verify: true steps: - name: build image: plugins/docker settings: username: from_secret: docker_username password: from_secret: docker_password registry: projects.torsion.org repo: projects.torsion.org/borgmatic-collective/borgmatic tags: docs dockerfile: docs/Dockerfile trigger: repo: - borgmatic-collective/borgmatic branch: - master event: - push borgmatic-1.7.9/.eleventy.js000066400000000000000000000026651440467744700160000ustar00rootroot00000000000000const pluginSyntaxHighlight = require("@11ty/eleventy-plugin-syntaxhighlight"); const inclusiveLangPlugin = require("@11ty/eleventy-plugin-inclusive-language"); const navigationPlugin = require("@11ty/eleventy-navigation"); module.exports = function(eleventyConfig) { eleventyConfig.addPlugin(pluginSyntaxHighlight); eleventyConfig.addPlugin(inclusiveLangPlugin); eleventyConfig.addPlugin(navigationPlugin); let markdownIt = require("markdown-it"); let markdownItAnchor = require("markdown-it-anchor"); let markdownItReplaceLink = require("markdown-it-replace-link"); let markdownItOptions = { html: true, breaks: false, linkify: true, replaceLink: function (link, env) { if (process.env.NODE_ENV == "production") { return link; } return link.replace('https://torsion.org/borgmatic/', 'http://localhost:8080/'); } }; let markdownItAnchorOptions = { permalink: markdownItAnchor.permalink.headerLink() }; eleventyConfig.setLibrary( "md", markdownIt(markdownItOptions) .use(markdownItAnchor, markdownItAnchorOptions) .use(markdownItReplaceLink) ); eleventyConfig.addPassthroughCopy({"docs/static": "static"}); eleventyConfig.setLiquidOptions({dynamicPartials: false}); return { templateFormats: [ "md", "txt" ] } }; borgmatic-1.7.9/.flake8000066400000000000000000000000141440467744700146660ustar00rootroot00000000000000select = Q0 borgmatic-1.7.9/.gitea/000077500000000000000000000000001440467744700146675ustar00rootroot00000000000000borgmatic-1.7.9/.gitea/issue_template.md000066400000000000000000000014501440467744700202340ustar00rootroot00000000000000#### What I'm trying to do and why #### Steps to reproduce (if a bug) Include (sanitized) borgmatic configuration files if applicable. #### Actual behavior (if a bug) Include (sanitized) `--verbosity 2` output if applicable. #### Expected behavior (if a bug) #### Other notes / implementation ideas #### Environment **borgmatic version:** [version here] Use `sudo borgmatic --version` or `sudo pip show borgmatic | grep ^Version` **borgmatic installation method:** [e.g., Debian package, Docker container, etc.] **Borg version:** [version here] Use `sudo borg --version` **Python version:** [version here] Use `python3 --version` **Database version (if applicable):** [version here] Use `psql --version` or `mysql --version` on client and server. **operating system and version:** [OS here] borgmatic-1.7.9/.gitignore000066400000000000000000000001511440467744700155050ustar00rootroot00000000000000*.egg-info *.pyc *.swp .cache .coverage* .pytest_cache .tox __pycache__ build/ dist/ pip-wheel-metadata/ borgmatic-1.7.9/AUTHORS000066400000000000000000000011671440467744700145750ustar00rootroot00000000000000Dan Helfman : Main developer Alexander Görtz: Python 3 compatibility Florian Lindner: Logging rewrite Henning Schroeder: Copy editing Johannes Feichtner: Support for user hooks Michele Lazzeri: Custom archive names Nick Whyte: Support prefix filtering for archive consistency checks newtonne: Read encryption password from external file Robin `ypid` Schneider: Support additional options of Borg and add validate-borgmatic-config command Scott Squires: Custom archive names Thomas LÉVEIL: Support for a keep_minutely prune option. Support for the --json option And many others! See the output of "git log". borgmatic-1.7.9/LICENSE000066400000000000000000001044621440467744700145340ustar00rootroot00000000000000GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. {one line to give the program's name and a brief idea of what it does.} Copyright (C) {year} {name of author} This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: {project} Copyright (C) {year} {fullname} This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . borgmatic-1.7.9/MANIFEST.in000066400000000000000000000000721440467744700152550ustar00rootroot00000000000000include borgmatic/config/schema.yaml graft sample/systemd borgmatic-1.7.9/NEWS000066400000000000000000001507521440467744700142310ustar00rootroot000000000000001.7.9 * #295: Add a SQLite database dump/restore hook. * #304: Change the default action order when no actions are specified on the command-line to: "create", "prune", "compact", "check". If you'd like to retain the old ordering ("prune" and "compact" first), then specify actions explicitly on the command-line. * #304: Run any command-line actions in the order specified instead of using a fixed ordering. * #564: Add "--repository" flag to all actions where it makes sense, so you can run borgmatic on a single configured repository instead of all of them. * #628: Add a Healthchecks "log" state to send borgmatic logs to Healthchecks without signalling success or failure. * #647: Add "--strip-components all" feature on the "extract" action to remove leading path components of files you extract. Must be used with the "--path" flag. * Add support for Python 3.11. 1.7.8 * #620: With the "create" action and the "--list" ("--files") flag, only show excluded files at verbosity 2. * #621: Add optional authentication to the ntfy monitoring hook. * With the "create" action, only one of "--list" ("--files") and "--progress" flags can be used. This lines up with the new behavior in Borg 2.0.0b5. * Internally support new Borg 2.0.0b5 "--filter" status characters / item flags for the "create" action. * Fix the "create" action with the "--dry-run" flag querying for databases when a PostgreSQL/MySQL "all" database is configured. Now, these queries are skipped due to the dry run. * Add "--repository" flag to the "rcreate" action to optionally select one configured repository to create. * Add "--progress" flag to the "transfer" action, new in Borg 2.0.0b5. * Add "checkpoint_volume" configuration option to creates checkpoints every specified number of bytes during a long-running backup, new in Borg 2.0.0b5. 1.7.7 * #642: Add MySQL database hook "add_drop_database" configuration option to control whether dumped MySQL databases get dropped right before restore. * #643: Fix for potential data loss (data not getting backed up) when dumping large "directory" format PostgreSQL/MongoDB databases. Prior to the fix, these dumps would not finish writing to disk before Borg consumed them. Now, the dumping process completes before Borg starts. This only applies to "directory" format databases; other formats still stream to Borg without using temporary disk space. * Fix MongoDB "directory" format to work with mongodump/mongorestore without error. Prior to this fix, only the "archive" format worked. 1.7.6 * #393, #438, #560: Optionally dump "all" PostgreSQL/MySQL databases to separate files instead of one combined dump file, allowing more convenient restores of individual databases. You can enable this by specifying the database dump "format" option when the database is named "all". * #602: Fix logs that interfere with JSON output by making warnings go to stderr instead of stdout. * #622: Fix traceback when include merging configuration files on ARM64. * #629: Skip warning about excluded special files when no special files have been excluded. * #630: Add configuration options for database command customization: "list_options", "restore_options", and "analyze_options" for PostgreSQL, "restore_options" for MySQL, and "restore_options" for MongoDB. 1.7.5 * #311: Override PostgreSQL dump/restore commands via configuration options. * #604: Fix traceback when a configuration section is present but lacking any options. * #607: Clarify documentation examples for include merging and deep merging. * #611: Fix "data" consistency check to support "check_last" and consistency "prefix" options. * #613: Clarify documentation about multiple repositories and separate configuration files. 1.7.4 * #596: Fix special file detection erroring when broken symlinks are encountered. * #597, #598: Fix regression in which "check" action errored on certain systems ("Cannot determine Borg repository ID"). 1.7.3 * #357: Add "break-lock" action for removing any repository and cache locks leftover from Borg aborting. * #360: To prevent Borg hangs, unconditionally delete stale named pipes before dumping databases. * #587: When database hooks are enabled, auto-exclude special files from a "create" action to prevent Borg from hanging. You can override/prevent this behavior by explicitly setting the "read_special" option to true. * #587: Warn when ignoring a configured "read_special" value of false, as true is needed when database hooks are enabled. * #589: Update sample systemd service file to allow system "idle" (e.g. a video monitor turning off) while borgmatic is running. * #590: Fix for potential data loss (data not getting backed up) when the "patterns_from" option was used with "source_directories" (or the "~/.borgmatic" path existed, which got injected into "source_directories" implicitly). The fix is for borgmatic to convert "source_directories" into patterns whenever "patterns_from" is used, working around a Borg bug: https://github.com/borgbackup/borg/issues/6994 * #590: In "borgmatic create --list" output, display which files get excluded from the backup due to patterns or excludes. * #591: Add support for Borg 2's "--match-archives" flag. This replaces "--glob-archives", which borgmatic now treats as an alias for "--match-archives". But note that the two flags have slightly different syntax. See the Borg 2 changelog for more information: https://borgbackup.readthedocs.io/en/2.0.0b3/changes.html#version-2-0-0b3-2022-10-02 * Fix for "borgmatic --archive latest" not finding the latest archive when a verbosity is set. 1.7.2 * #577: Fix regression in which "borgmatic info --archive ..." showed repository info instead of archive info with Borg 1. * #582: Fix hang when database hooks are enabled and "patterns" contains a parent directory of "~/.borgmatic". 1.7.1 * #542: Make the "source_directories" option optional. This is useful for "check"-only setups or using "patterns" exclusively. * #574: Fix for potential data loss (data not getting backed up) when the "patterns" option was used with "source_directories" (or the "~/.borgmatic" path existed, which got injected into "source_directories" implicitly). The fix is for borgmatic to convert "source_directories" into patterns whenever "patterns" is used, working around a Borg bug: https://github.com/borgbackup/borg/issues/6994 1.7.0 * #463: Add "before_actions" and "after_actions" command hooks that run before/after all the actions for each repository. These new hooks are a good place to run per-repository steps like mounting/unmounting a remote filesystem. * #463: Update documentation to cover per-repository configurations: https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/ * #557: Support for Borg 2 while still working with Borg 1. This includes new borgmatic actions like "rcreate" (replaces "init"), "rlist" (list archives in repository), "rinfo" (show repository info), and "transfer" (for upgrading Borg repositories). For the most part, borgmatic tries to smooth over differences between Borg 1 and 2 to make your upgrade process easier. However, there are still a few cases where Borg made breaking changes. See the Borg 2.0 changelog for more information: https://www.borgbackup.org/releases/borg-2.0.html * #557: If you install Borg 2, you'll need to manually upgrade your existing Borg 1 repositories before use. Note that Borg 2 stable is not yet released as of this borgmatic release, so don't use Borg 2 for production until it is! See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/upgrade/#upgrading-borg * #557: Rename several configuration options to match Borg 2: "remote_rate_limit" is now "upload_rate_limit", "numeric_owner" is "numeric_ids", and "bsd_flags" is "flags". borgmatic still works with the old options. * #557: Remote repository paths without the "ssh://" syntax are deprecated but still supported for now. Remote repository paths containing "~" are deprecated in borgmatic and no longer work in Borg 2. * #557: Omitting the "--archive" flag on the "list" action is deprecated when using Borg 2. Use the new "rlist" action instead. * #557: The "--dry-run" flag can now be used with the "rcreate"/"init" action. * #565: Fix handling of "repository" and "data" consistency checks to prevent invalid Borg flags. * #566: Modify "mount" and "extract" actions to require the "--repository" flag when multiple repositories are configured. * #571: BREAKING: Remove old-style command-line action flags like "--create, "--list", etc. If you're already using actions like "create" and "list" instead, this change should not affect you. * #571: BREAKING: Rename "--files" flag on "prune" action to "--list", as it lists archives, not files. * #571: Add "--list" as alias for "--files" flag on "create" and "export-tar" actions. * Add support for disabling TLS verification in Healthchecks monitoring hook with "verify_tls" option. 1.6.6 * #559: Update documentation about configuring multiple consistency checks or multiple databases. * #560: Fix all database hooks to error when the requested database to restore isn't present in the Borg archive. * #561: Fix command-line "--override" flag to continue supporting old configuration file formats. * #563: Fix traceback with "create" action and "--json" flag when a database hook is configured. 1.6.5 * #553: Fix logging to include the full traceback when Borg experiences an internal error, not just the first few lines. * #554: Fix all monitoring hooks to warn if the server returns an HTTP 4xx error. This can happen with Healthchecks, for instance, when using an invalid ping URL. * #555: Fix environment variable plumbing so options like "encryption_passphrase" and "encryption_passcommand" in one configuration file aren't used for other configuration files. 1.6.4 * #546, #382: Keep your repository passphrases and database passwords outside of borgmatic's configuration file with environment variable interpolation. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/ 1.6.3 * #541: Add "borgmatic list --find" flag for searching for files across multiple archives, useful for hunting down that file you accidentally deleted so you can extract it. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/inspect-your-backups/#searching-for-a-file * #543: Add a monitoring hook for sending push notifications via ntfy. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#ntfy-hook * Fix Bash completion script to no longer alter your shell's settings (complain about unset variables or error on pipe failures). * Deprecate "borgmatic list --successful" flag, as listing only non-checkpoint (successful) archives is now the default in newer versions of Borg. 1.6.2 * #523: Reduce the default consistency check frequency and support configuring the frequency independently for each check. Also add "borgmatic check --force" flag to ignore configured frequencies. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/deal-with-very-large-backups/#check-frequency * #536: Fix generate-borgmatic-config to support more complex schema changes like the new Healthchecks configuration options when the "--source" flag is used. * #538: Add support for "borgmatic borg debug" command. * #539: Add "generate-borgmatic-config --overwrite" flag to replace an existing destination file. * Add Bash completion script so you can tab-complete the borgmatic command-line. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/set-up-backups/#shell-completion 1.6.1 * #294: Add Healthchecks monitoring hook "ping_body_limit" option to configure how many bytes of logs to send to the Healthchecks server. * #402: Remove the error when "archive_name_format" is specified but a retention prefix isn't. * #420: Warn when an unsupported variable is used in a hook command. * #439: Change connection failures for monitoring hooks (Healthchecks, Cronitor, PagerDuty, and Cronhub) to be warnings instead of errors. This way, the monitoring system failing does not block backups. * #460: Add Healthchecks monitoring hook "send_logs" option to enable/disable sending borgmatic logs to the Healthchecks server. * #525: Add Healthchecks monitoring hook "states" option to only enable pinging for particular monitoring states (start, finish, fail). * #528: Improve the error message when a configuration override contains an invalid value. * #531: BREAKING: When deep merging common configuration, merge colliding list values by appending them. Previously, one list replaced the other. * #532: When a configuration include is a relative path, load it from either the current working directory or from the directory containing the file doing the including. Previously, only the working directory was used. * Add a randomized delay to the sample systemd timer to spread out the load on a server. * Change the configuration format for borgmatic monitoring hooks (Healthchecks, Cronitor, PagerDuty, and Cronhub) to specify the ping URL / integration key as a named option. The intent is to support additional options (some in this release). This change is backwards-compatible. * Add emojis to documentation table of contents to make it easier to find particular how-to and reference guides at a glance. 1.6.0 * #381: BREAKING: Greatly simplify configuration file reuse by deep merging when including common configuration. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#include-merging * #473: BREAKING: Instead of executing "before" command hooks before all borgmatic actions run (and "after" hooks after), execute these hooks right before/after the corresponding action. E.g., "before_check" now runs immediately before the "check" action. This better supports running timing-sensitive tasks like pausing containers. Side effect: before/after command hooks now run once for each configured repository instead of once per configuration file. Additionally, the "repositories" interpolated variable has been changed to "repository", containing the path to the current repository for the hook. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/ * #513: Add mention of sudo's "secure_path" option to borgmatic installation documentation. * #515: Fix "borgmatic borg key ..." to pass parameters to Borg in the correct order. * #516: Fix handling of TERM signal to exit borgmatic, not just forward the signal to Borg. * #517: Fix borgmatic exit code (so it's zero) when initial Borg calls fail but later retries succeed. * Change Healthchecks logs truncation size from 10k bytes to 100k bytes, corresponding to that same change on Healthchecks.io. 1.5.24 * #431: Add "working_directory" option to support source directories with relative paths. * #444: When loading a configuration file that is unreadable due to file permissions, warn instead of erroring. This supports running borgmatic as a non-root user with configuration in ~/.config even if there is an unreadable global configuration file in /etc. * #469: Add "repositories" context to "before_*" and "after_*" command action hooks. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/ * #486: Fix handling of "patterns_from" and "exclude_from" options to error instead of warning when referencing unreadable files and "create" action is run. * #507: Fix Borg usage error in the "compact" action when running "borgmatic --dry-run". Now, skip "compact" entirely during a dry run. 1.5.23 * #394: Compact repository segments and free space with new "borgmatic compact" action. Borg 1.2+ only. Also run "compact" by default when no actions are specified, as "prune" in Borg 1.2 no longer frees up space unless "compact" is run. * #394: When using the "atime", "bsd_flags", "numeric_owner", or "remote_rate_limit" options, tailor the flags passed to Borg depending on the Borg version. * #480, #482: Fix traceback when a YAML validation error occurs. 1.5.22 * #288: Add database dump hook for MongoDB. * #470: Move mysqldump options to the beginning of the command due to MySQL bug 30994. * #471: When command-line configuration override produces a parse error, error cleanly instead of tracebacking. * #476: Fix unicode error when restoring particular MySQL databases. * Drop support for Python 3.6, which has been end-of-lifed. * Add support for Python 3.10. 1.5.21 * #28: Optionally retry failing backups via "retries" and "retry_wait" configuration options. * #306: Add "list_options" MySQL configuration option for passing additional arguments to MySQL list command. * #459: Add support for old version (2.x) of jsonschema library. 1.5.20 * Re-release with correct version without dev0 tag. 1.5.19 * #387: Fix error when configured source directories are not present on the filesystem at the time of backup. Now, Borg will complain, but the backup will still continue. * #455: Mention changing borgmatic path in cron documentation. * Update sample systemd service file with more granular read-only filesystem settings. * Move Gitea and GitHub hosting from a personal namespace to an organization for better collaboration with related projects. * 1k ★s on GitHub! 1.5.18 * #389: Fix "message too long" error when logging to rsyslog. * #440: Fix traceback that can occur when dumping a database. 1.5.17 * #437: Fix error when configuration file contains "umask" option. * Remove test dependency on vim and /dev/urandom. 1.5.16 * #379: Suppress console output in sample crontab and systemd service files. * #407: Fix syslog logging on FreeBSD. * #430: Fix hang when restoring a PostgreSQL "tar" format database dump. * Better error messages! Switch the library used for validating configuration files (from pykwalify to jsonschema). * Link borgmatic Ansible role from installation documentation: https://torsion.org/borgmatic/docs/how-to/set-up-backups/#other-ways-to-install 1.5.15 * #419: Document use case of running backups conditionally based on laptop power level: https://torsion.org/borgmatic/docs/how-to/backup-to-a-removable-drive-or-an-intermittent-server/ * #425: Run arbitrary Borg commands with new "borgmatic borg" action. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/run-arbitrary-borg-commands/ 1.5.14 * #390: Add link to Hetzner storage offering from the documentation. * #398: Clarify canonical home of borgmatic in documentation. * #406: Clarify that spaces in path names should not be backslashed in path names. * #423: Fix error handling to error loudly when Borg gets killed due to running out of memory! * Fix build so as not to attempt to build and push documentation for a non-master branch. * "Fix" build failure with Alpine Edge by switching from Edge to Alpine 3.13. * Move #borgmatic IRC channel from Freenode to Libera Chat due to Freenode takeover drama. IRC connection info: https://torsion.org/borgmatic/#issues 1.5.13 * #373: Document that passphrase is used for Borg keyfile encryption, not just repokey encryption. * #404: Add support for ruamel.yaml 0.17.x YAML parsing library. * Update systemd service example to return a permission error when a system call isn't permitted (instead of terminating borgmatic outright). * Drop support for Python 3.5, which has been end-of-lifed. * Add support for Python 3.9. * Update versions of test dependencies (test_requirements.txt and test containers). * Only support black code formatter on Python 3.8+. New black dependencies make installation difficult on older versions of Python. * Replace "improve this documentation" form with link to support and ticket tracker. 1.5.12 * Fix for previous release with incorrect version suffix in setup.py. No other changes. 1.5.11 * #341: Add "temporary_directory" option for changing Borg's temporary directory. * #352: Lock down systemd security settings in sample systemd service file. * #355: Fix traceback when a database hook value is null in a configuration file. * #361: Merge override values when specifying the "--override" flag multiple times. The previous behavior was to take the value of the last "--override" flag only. * #367: Fix traceback when upgrading old INI-style configuration with upgrade-borgmatic-config. * #368: Fix signal forwarding from borgmatic to Borg resulting in recursion traceback. * #369: Document support for Borg placeholders in repository names. 1.5.10 * #347: Add hooks that run for the "extract" action: "before_extract" and "after_extract". * #350: Fix traceback when a configuration directory is non-readable due to directory permissions. * Add documentation navigation links on left side of all documentation pages. * Clarify documentation on configuration overrides, specifically the portion about list syntax: http://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#configuration-overrides * Clarify documentation overview of monitoring options: http://torsion.org/borgmatic/docs/how-to/monitor-your-backups/ 1.5.9 * #300: Add "borgmatic export-tar" action to export an archive to a tar-formatted file or stream. * #339: Fix for intermittent timing-related test failure of logging function. * Clarify database documentation about excluding named pipes and character/block devices to prevent hangs. * Add documentation on how to make backups redundant with multiple repositories: https://torsion.org/borgmatic/docs/how-to/make-backups-redundant/ 1.5.8 * #336: Fix for traceback when running Cronitor, Cronhub, and PagerDuty monitor hooks. 1.5.7 * #327: Fix broken pass-through of BORG_* environment variables to Borg. * #328: Fix duplicate logging to Healthchecks and send "after_*" hooks output to Healthchecks. * #331: Add SSL support to PostgreSQL database configuration. * #333: Fix for potential data loss (data not getting backed up) when borgmatic omitted configured source directories in certain situations. Specifically, this occurred when two source directories on different filesystems were related by parentage (e.g. "/foo" and "/foo/bar/baz") and the one_file_system option was enabled. * Update documentation code fragments theme to better match the rest of the page. * Improve configuration reference documentation readability via more aggressive word-wrapping in configuration schema descriptions. 1.5.6 * #292: Allow before_backup and similiar hooks to exit with a soft failure without altering the monitoring status on Healthchecks or other providers. Support this by waiting to ping monitoring services with a "start" status until after before_* hooks finish. Failures in before_* hooks still trigger a monitoring "fail" status. * #316: Fix hang when a stale database dump named pipe from an aborted borgmatic run remains on disk. * #323: Fix for certain configuration options like ssh_command impacting Borg invocations for separate configuration files. * #324: Add "borgmatic extract --strip-components" flag to remove leading path components when extracting an archive. * Tweak comment indentation in generated configuration file for clarity. * Link to Borgmacator GNOME AppIndicator from monitoring documentation. 1.5.5 * #314: Fix regression in support for PostgreSQL's "directory" dump format. Unlike other dump formats, the "directory" dump format does not stream directly to/from Borg. * #315: Fix enabled database hooks to implicitly set one_file_system configuration option to true. This prevents Borg from reading devices like /dev/zero and hanging. * #316: Fix hang when streaming a database dump to Borg with implicit duplicate source directories by deduplicating them first. * #319: Fix error message when there are no MySQL databases to dump for "all" databases. * Improve documentation around the installation process. Specifically, making borgmatic commands runnable via the system PATH and offering a global install option. 1.5.4 * #310: Fix legitimate database dump command errors (exit code 1) not being treated as errors by borgmatic. * For database dumps, replace the named pipe on every borgmatic run. This prevent hangs on stale pipes left over from previous runs. * Fix error handling to handle more edge cases when executing commands. 1.5.3 * #258: Stream database dumps and restores directly to/from Borg without using any additional filesystem space. This feature is automatic, and works even on restores from archives made with previous versions of borgmatic. * #293: Documentation on macOS launchd permissions issues with work-around for Full Disk Access. * Remove "borgmatic restore --progress" flag, as it now conflicts with streaming database restores. 1.5.2 * #301: Fix MySQL restore error on "all" database dump by excluding system tables. * Fix PostgreSQL restore error on "all" database dump by using "psql" for the restore instead of "pg_restore". 1.5.1 * #289: Tired of looking up the latest successful archive name in order to pass it to borgmatic actions? Me too. Now you can specify "--archive latest" to all actions that accept an archive flag. * #290: Fix the "--stats" and "--files" flags so that they yield output at verbosity 0. * Reduce the default verbosity of borgmatic logs sent to Healthchecks monitoring hook. Now, it's warnings and errors only. You can increase the verbosity via the "--monitoring-verbosity" flag. * Add security policy documentation in SECURITY.md. 1.5.0 * #245: Monitor backups with PagerDuty hook integration. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#pagerduty-hook * #255: Add per-action hooks: "before_prune", "after_prune", "before_check", and "after_check". * #274: Add ~/.config/borgmatic.d as another configuration directory default. * #277: Customize Healthchecks log level via borgmatic "--monitoring-verbosity" flag. * #280: Change "exclude_if_present" option to support multiple filenames that indicate a directory should be excluded from backups, rather than just a single filename. * #284: Backup to a removable drive or intermittent server via "soft failure" feature. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/backup-to-a-removable-drive-or-an-intermittent-server/ * #287: View consistency check progress via "--progress" flag for "check" action. * For "create" and "prune" actions, no longer list files or show detailed stats at any verbosities by default. You can opt back in with "--files" or "--stats" flags. * For "list" and "info" actions, show repository names even at verbosity 0. 1.4.22 * #276, #285: Disable colored output when "--json" flag is used, so as to produce valid JSON ouput. * After a backup of a database dump in directory format, properly remove the dump directory. * In "borgmatic --help", don't expand $HOME in listing of default "--config" paths. 1.4.21 * #268: Override particular configuration options from the command-line via "--override" flag. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#configuration-overrides * #270: Only trigger "on_error" hooks and monitoring failures for "prune", "create", and "check" actions, and not for other actions. * When pruning with verbosity level 1, list pruned and kept archives. Previously, this information was only shown at verbosity level 2. 1.4.20 * Fix repository probing during "borgmatic init" to respect verbosity flag and remote_path option. * #249: Update Healthchecks/Cronitor/Cronhub monitoring integrations to fire for "check" and "prune" actions, not just "create". 1.4.19 * #259: Optionally change the internal database dump path via "borgmatic_source_directory" option in location configuration section. * #271: Support piping "borgmatic list" output to grep by logging certain log levels to console stdout and others to stderr. * Retain colored output when piping or redirecting in an interactive terminal. * Add end-to-end tests for database dump and restore. These are run on developer machines with Docker Compose for approximate parity with continuous integration tests. 1.4.18 * Fix "--repository" flag to accept relative paths. * Fix "borgmatic umount" so it only runs Borg once instead of once per repository / configuration file. * #253: Mount whole repositories via "borgmatic mount" without any "--archive" flag. * #269: Filter listed paths via "borgmatic list --path" flag. 1.4.17 * #235: Pass extra options directly to particular Borg commands, handy for Borg options that borgmatic does not yet support natively. Use "extra_borg_options" in the storage configuration section. * #266: Attempt to repair any inconsistencies found during a consistency check via "borgmatic check --repair" flag. 1.4.16 * #256: Fix for "before_backup" hook not triggering an error when the command contains "borg" and has an exit code of 1. * #257: Fix for garbled Borg file listing when using "borgmatic create --progress" with verbosity level 1 or 2. * #260: Fix for missing Healthchecks monitoring payload or HTTP 500 due to incorrect unicode encoding. 1.4.15 * Fix for database dump removal incorrectly skipping some database dumps. * #123: Support for mounting an archive as a FUSE filesystem via "borgmatic mount" action, and unmounting via "borgmatic umount". See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/extract-a-backup/#mount-a-filesystem 1.4.14 * Show summary log errors regardless of verbosity level, and log the "summary:" header with a log level based on the contained summary logs. 1.4.13 * Show full error logs at "--verbosity 0" so you can see command output without upping the verbosity level. 1.4.12 * #247: With "borgmatic check", consider Borg warnings as errors. * Dial back the display of inline error logs a bit, so failed command output doesn't appear multiple times in the logs (well, except for the summary). 1.4.11 * #241: When using the Healthchecks monitoring hook, include borgmatic logs in the payloads for completion and failure pings. * With --verbosity level 1 or 2, show error logs both inline when they occur and in the summary logs at the bottom. With lower verbosity levels, suppress the summary and show error logs when they occur. 1.4.10 * #246: Fix for "borgmatic restore" showing success and incorrectly extracting archive files, even when no databases are configured to restore. As this can overwrite files from the archive and lead to data loss, please upgrade to get the fix before using "borgmatic restore". * Reopen the file given by "--log-file" flag if an external program rotates the log file while borgmatic is running. 1.4.9 * #228: Database dump hooks for MySQL/MariaDB, so you can easily dump your databases before backups run. * #243: Fix repository does not exist error with "borgmatic extract" when repository is remote. 1.4.8 * Monitor backups with Cronhub hook integration. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronhub-hook * Fix Healthchecks/Cronitor hooks to skip actions when the borgmatic "--dry-run" flag is used. 1.4.7 * #238: In documentation, clarify when Healthchecks/Cronitor hooks fire in relation to other hooks. * #239: Upgrade your borgmatic configuration to get new options and comments via "generate-borgmatic-config --source". See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/upgrade/#upgrading-your-configuration 1.4.6 * Verbosity level "-1" for even quieter output: Errors only (#236). 1.4.5 * Log to file instead of syslog via command-line "--log-file" flag (#233). 1.4.4 * #234: Support for Borg --keep-exclude-tags and --exclude-nodump options. 1.4.3 * Monitor backups with Cronitor hook integration. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronitor-hook 1.4.2 * Extract files to a particular directory via "borgmatic extract --destination" flag. * Rename "borgmatic extract --restore-path" flag to "--path" to reduce confusion with the separate "borgmatic restore" action. Any uses of "--restore-path" will continue working. 1.4.1 * #229: Restore backed up PostgreSQL databases via "borgmatic restore" action. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/backup-your-databases/ * Documentation on how to develop borgmatic's documentation: https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/#documentation-development 1.4.0 * #225: Database dump hooks for PostgreSQL, so you can easily dump your databases before backups run. * #230: Rename "borgmatic list --pattern-from" flag to "--patterns-from" to match Borg. 1.3.26 * #224: Fix "borgmatic list --successful" with a slightly better heuristic for listing successful (non-checkpoint) archives. 1.3.25 * #223: Dead man's switch to detect when backups start failing silently, implemented via healthchecks.io hook integration. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#healthchecks-hook * Documentation on monitoring and alerting options for borgmatic backups: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/ * Automatically rewrite links when developing on documentation locally. 1.3.24 * #86: Add "borgmatic list --successful" flag to only list successful (non-checkpoint) archives. * Add a suggestion form to all documentation pages, so users can submit ideas for improving the documentation. * Update documentation link to community Arch Linux borgmatic package. 1.3.23 * #174: More detailed error alerting via runtime context available in "on_error" hook. 1.3.22 * #144: When backups to one of several repositories fails, keep backing up to the other repositories and report errors afterwards. 1.3.21 * #192: User-defined hooks for global setup or cleanup that run before/after all actions. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/ 1.3.20 * #205: More robust sample systemd service: boot delay, network dependency, lowered CPU/IO priority, etc. * #221: Fix "borgmatic create --progress" output so that it updates on the console in real-time. 1.3.19 * #219: Fix visibility of "borgmatic prune --stats" output. 1.3.18 * #220: Fix regression of argument parsing for default actions. 1.3.17 * #217: Fix error with "borgmatic check --only" command-line flag with "extract" consistency check. 1.3.16 * #210: Support for Borg check --verify-data flag via borgmatic "data" consistency check. * #210: Override configured consistency checks via "borgmatic check --only" command-line flag. * When generating sample configuration with generate-borgmatic-config, add a space after each "#" comment indicator. 1.3.15 * #208: Fix for traceback when the "checks" option has an empty value. * #209: Bypass Borg error about a moved repository via "relocated_repo_access_is_ok" option in borgmatic storage configuration section. * #213: Reorder arguments passed to Borg to fix duplicate directories when using Borg patterns. * #214: Fix for hook erroring with exit code 1 not being interpreted as an error. 1.3.14 * #204: Do not treat Borg warnings (exit code 1) as failures. * When validating configuration files, require strings instead of allowing any scalar type. 1.3.13 * #199: Add note to documentation about using spaces instead of tabs for indentation, as YAML does not allow tabs. * #203: Fix compatibility with ruamel.yaml 0.16.x. * If a "prefix" option in borgmatic's configuration has an empty value (blank or ""), then disable default prefix. 1.3.12 * Only log to syslog when run from a non-interactive console (e.g. a cron job). * Remove unicode byte order mark from syslog output so it doesn't show up as a literal in rsyslog output. See discussion on #197. 1.3.11 * #193: Pass through several "borg list" and "borg info" flags like --short, --format, --sort-by, --first, --last, etc. via borgmatic command-line flags. * Add borgmatic info --repository and --archive command-line flags to display info for individual repositories or archives. * Support for Borg --noatime, --noctime, and --nobirthtime flags via corresponding options in borgmatic configuration location section. 1.3.10 * #198: Fix for Borg create error output not showing up at borgmatic verbosity level zero. 1.3.9 * #195: Switch to command-line actions as more traditional sub-commands, e.g. "borgmatic create", "borgmatic prune", etc. However, the classic dashed options like "--create" still work! 1.3.8 * #191: Disable console color via "color" option in borgmatic configuration output section. 1.3.7 * #196: Fix for unclear error message for invalid YAML merge include. * #197: Don't color syslog output. * Change default syslog verbosity to show errors only. 1.3.6 * #53: Log to syslog in addition to existing console logging. Add --syslog-verbosity flag to customize the log level. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/inspect-your-backups/ * #178: Look for .yml configuration file extension in addition to .yaml. * #189: Set umask used when executing hooks via "umask" option in borgmatic hooks section. * Remove Python cache files before each Tox run. * Add #borgmatic Freenode IRC channel to documentation. * Add Borg/borgmatic hosting providers section to documentation. * Add files for building documentation into a Docker image for web serving. * Upgrade project build server from Drone 0.8 to 1.1. * Build borgmatic documentation during continuous integration. * We're nearly at 500 ★s on GitHub. We can do this! 1.3.5 * #153: Support for various Borg directory environment variables (BORG_CONFIG_DIR, BORG_CACHE_DIR, etc.) via options in borgmatic's storage configuration. * #177: Fix for regression with missing verbose log entries. 1.3.4 * Part of #125: Color borgmatic (but not Borg) output when using an interactive terminal. * #166: Run tests for all installed versions of Python. * #168: Update README with continuous integration badge. * #169: Automatically sort Python imports in code. * Document installing borgmatic with pip install --user instead of a system Python install. * Get more reproducible builds by pinning the versions of pip and tox used to run tests. * Factor out build/test configuration from tox.ini file. 1.3.3 * Add validate-borgmatic-config command, useful for validating borgmatic config generated by configuration management or even edited by hand. 1.3.2 * #160: Fix for hooks executing when using --dry-run. Now hooks are skipped during a dry run. 1.3.1 * #155: Fix for invalid JSON output when using multiple borgmatic configuration files. * #157: Fix for seemingly random filename ordering when running through a directory of configuration files. * Fix for empty JSON output when using --create --json. * Now capturing Borg output only when --json flag is used. Previously, borgmatic delayed Borg output even without the --json flag. 1.3.0 * #148: Configuration file includes and merging via "!include" tag to support reuse of common options across configuration files. 1.2.18 * #147: Support for Borg create/extract --numeric-owner flag via "numeric_owner" option in borgmatic's location section. 1.2.17 * #140: List the files within an archive via --list --archive option. 1.2.16 * #119: Include a sample borgmatic configuration file in the documentation. * #123: Support for Borg archive restoration via borgmatic --extract command-line flag. * Refactor documentation into multiple separate pages for clarity and findability. * Organize options within command-line help into logical groups. * Exclude tests from distribution packages. 1.2.15 * #127: Remove date echo from schema example, as it's not a substitute for real logging. * #132: Leave exclude_patterns glob expansion to Borg, since doing it in borgmatic leads to confusing behavior. * #136: Handle and format validation errors raised during argument parsing. * #138: Allow use of --stats flag when --create or --prune flags are implied. 1.2.14 * #103: When generating sample configuration with generate-borgmatic-config, document the defaults for each option. * #116: When running multiple configuration files, attempt all configuration files even if one of them errors. Log a summary of results at the end. * Add borgmatic --version command-line flag to get the current installed version number. 1.2.13 * #100: Support for --stats command-line flag independent of --verbosity. * #117: With borgmatic --init command-line flag, proceed without erroring if a repository already exists. 1.2.12 * #110: Support for Borg repository initialization via borgmatic --init command-line flag. * #111: Update Borg create --filter values so a dry run lists files to back up. * #113: Update README with link to a new/forked Docker image. * Prevent deprecated --excludes command-line option from being used. * Refactor README a bit to flow better for first-time users. * Update README with a few additional borgmatic packages (Debian and Ubuntu). 1.2.11 * #108: Support for Borg create --progress via borgmatic command-line flag. 1.2.10 * #105: Support for Borg --chunker-params create option via "chunker_params" option in borgmatic's storage section. 1.2.9 * #102: Fix for syntax error that occurred in Python 3.5 and below. * Make automated tests support running in Python 3.5. 1.2.8 * #73: Enable consistency checks for only certain repositories via "check_repositories" option in borgmatic's consistency configuration. Handy for large repositories that take forever to check. * Include link to issue tracker within various command output. * Run continuous integration tests on a matrix of Python and Borg versions. 1.2.7 * #98: Support for Borg --keep-secondly prune option. * Use Black code formatter and Flake8 code checker as part of running automated tests. * Add an end-to-end automated test that actually integrates with Borg. * Set up continuous integration for borgmatic automated tests on projects.evoworx.org. 1.2.6 * Fix generated configuration to also include a "keep_daily" value so pruning works out of the box. 1.2.5 * #57: When generating sample configuration with generate-borgmatic-config, comment out all optional configuration so as to streamline the initial configuration process. 1.2.4 * Fix for archive checking traceback due to parameter mismatch. 1.2.3 * #64, #90, #92: Rewrite of logging system. Now verbosity flags passed to Borg are derived from borgmatic's log level. Note that the output of borgmatic might slightly change. * Part of #80: Support for Borg create --read-special via "read_special" option in borgmatic's location configuration. * #87: Support for Borg create --checkpoint-interval via "checkpoint_interval" option in borgmatic's storage configuration. * #88: Fix declared pykwalify compatibility version range in setup.py to prevent use of ancient versions of pykwalify with large version numbers. * #89: Pass --show-rc option to Borg when at highest verbosity level. * #94: Support for Borg --json option via borgmatic command-line to --create archives. 1.2.2 * #85: Fix compatibility issue between pykwalify and ruamel.yaml 0.15.52, which manifested in borgmatic as a pykwalify RuleError. 1.2.1 * Skip before/after backup hooks when only doing --prune, --check, --list, and/or --info. * #71: Support for XDG_CONFIG_HOME environment variable for specifying alternate user ~/.config/ path. * #74, #83: Support for Borg --json option via borgmatic command-line to --list archives or show archive --info in JSON format, ideal for programmatic consumption. * #38, #76: Upgrade ruamel.yaml compatibility version range and fix support for Python 3.7. * #77: Skip non-"*.yaml" config filenames in /etc/borgmatic.d/ so as not to parse backup files, editor swap files, etc. * #81: Document user-defined hooks run before/after backup, or on error. * Add code style guidelines to the documention. 1.2.0 * #61: Support for Borg --list option via borgmatic command-line to list all archives. * #61: Support for Borg --info option via borgmatic command-line to display summary information. * #62: Update README to mention other ways of installing borgmatic. * Support for Borg --prefix option for consistency checks via "prefix" option in borgmatic's consistency configuration. * Add introductory screencast link to documentation. * #59: Ignore "check_last" and consistency "prefix" when "archives" not in consistency checks. * #60: Add "Persistent" flag to systemd timer example. * #63: Support for Borg --nobsdflags option to skip recording bsdflags (e.g. NODUMP, IMMUTABLE) in archive. * #69: Support for Borg prune --umask option using value of existing "umask" option in borgmatic's storage configuration. * Update tox.ini to only assume Python 3.x instead of Python 3.4 specifically. * Add ~/.config/borgmatic/config.yaml to default configuration path probing. * Document how to develop on and contribute to borgmatic. 1.1.15 * Support for Borg BORG_PASSCOMMAND environment variable to read a password from an external file. * Fix for Borg create error when using borgmatic's --dry-run and --verbosity options together. Work-around for behavior introduced in Borg 1.1.3: https://github.com/borgbackup/borg/issues/3298 * #55: Fix for missing tags/releases on Gitea and GitHub project hosting. * #56: Support for Borg --lock-wait option for the maximum wait for a repository/cache lock. * #58: Support for using tilde in exclude_patterns to reference home directory. 1.1.14 * #49: Fix for typo in --patterns-from option. * #47: Support for Borg --dry-run option via borgmatic command-line. 1.1.13 * #54: Fix for incorrect consistency check flags passed to Borg when all three checks ("repository", "archives", and "extract") are specified in borgmatic configuration. * #48: Add "local_path" to configuration for specifying an alternative Borg executable path. * #49: Support for Borg experimental --patterns-from and --patterns options for specifying mixed includes/excludes. * Moved issue tracker from Taiga to integrated Gitea tracker at https://projects.torsion.org/borgmatic-collective/borgmatic/issues 1.1.12 * #46: Declare dependency on pykwalify 1.6 or above, as older versions yield "Unknown key: version" rule errors. * Support for Borg --keep-minutely prune option. 1.1.11 * #26: Add "ssh_command" to configuration for specifying a custom SSH command or options. * Fix for incorrect /etc/borgmatic.d/ configuration path probing on macOS. This problem manifested as an error on startup: "[Errno 2] No such file or directory: '/etc/borgmatic.d'". 1.1.10 * Pass several Unix signals through to child processes like Borg. This means that Borg now properly shuts down if borgmatic is terminated (e.g. due to a system suspend). * #30: Support for using tilde in repository paths to reference home directory. * #43: Support for Borg --files-cache option for setting the files cache operation mode. * #45: Support for Borg --remote-ratelimit option for limiting upload rate. * Log invoked Borg commands when at highest verbosity level. 1.1.9 * #17, #39: Support for user-defined hooks before/after backup, or on error. * #34: Improve clarity of logging spew at high verbosity levels. * #30: Support for using tilde in source directory path to reference home directory. * Require "prefix" in retention section when "archive_name_format" is set. This is to avoid accidental pruning of archives with a different archive name format. For similar reasons, default "prefix" to "{hostname}-" if not specified. * Convert main source repository from Mercurial to Git. * Update dead links to Borg documentation. 1.1.8 * #40: Fix to make /etc/borgmatic/config.yaml optional rather than required when using the default config paths. 1.1.7 * #29: Add "archive_name_format" to configuration for customizing archive names. * Fix for traceback when "exclude_from" value is empty in configuration file. * When pruning, make highest verbosity level list archives kept and pruned. * Clarification of Python 3 pip usage in documentation. 1.1.6 * #13, #36: Support for Borg --exclude-from, --exclude-caches, and --exclude-if-present options. 1.1.5 * #35: New "extract" consistency check that performs a dry-run extraction of the most recent archive. 1.1.4 * #18: Added command-line flags for performing a borgmatic run with only pruning, creating, or checking enabled. This supports use cases like running consistency checks from a different cron job with a different frequency, or running pruning with a different verbosity level. 1.1.3 * #15: Support for running multiple config files in /etc/borgmatic.d/ from a single borgmatic run. * Fix for generate-borgmatic-config writing config with invalid one_file_system value. 1.1.2 * #33: Fix for passing check_last as integer to subprocess when calling Borg. 1.1.1 * Part of #33: Fix for upgrade-borgmatic-config converting check_last option as a string instead of an integer. * Fix for upgrade-borgmatic-config erroring when consistency checks option is not present. 1.1.0 * Switched config file format to YAML. Run upgrade-borgmatic-config to upgrade. * Added generate-borgmatic-config command for initial config creation. * Dropped Python 2 support. Now Python 3 only. * #19: Fix for README mention of sample files not included in package. * #23: Sample files for triggering borgmatic from a systemd timer. * Support for backing up to multiple repositories. * To free up space, now pruning backups prior to creating a new backup. * Enabled test coverage output during tox runs. * Added logo. 1.0.3 * #22: Fix for verbosity flag not actually causing verbose output. 1.0.2 * #21: Fix for traceback when remote_path option is missing. 1.0.1 * #20: Support for Borg's --remote-path option to use an alternate Borg executable. See sample/config. 1.0.0 * Attic is no longer supported, as there hasn't been any recent development on it. Dropping Attic support will allow faster iteration on Borg-specific features. If you're still using Attic, this is a good time to switch to Borg! * Project renamed from atticmatic to borgmatic. See the borgmatic README for information on upgrading. 0.1.8 * Fix for handling of spaces in source_directories which resulted in backup up everything. * Fix for broken links to Borg documentation. * At verbosity zero, suppressing Borg check stderr spew about "Checking segments". * Support for Borg --one-file-system. * Support for Borg create --umask. * Support for file globs in source_directories. 0.1.7 * #12: Fixed parsing of punctuation in configuration file. * Better error message when configuration file is missing. 0.1.6 * #10: New configuration option for the encryption passphrase. * #11: Support for Borg's new archive compression feature. 0.1.5 * Changes to support release on PyPI. Now pip installable by name! 0.1.4 * Adding test that setup.py version matches release version. 0.1.3 * #2: Add support for "borg check --last N" to Borg backend. 0.1.2 * As a convenience to new users, allow a missing default excludes file. * New issue tracker, linked from documentation. 0.1.1 * Adding borgmatic cron example, and updating documentation to refer to it. 0.1.0 * New "borgmatic" command to support Borg backup software, a fork of Attic. 0.0.7 * Flag for multiple levels of verbosity: some, and lots. * Improved mocking of Python builtins in unit tests. 0.0.6 * New configuration section for customizing which Attic consistency checks run, if any. 0.0.5 * Fixed regression with --verbose output being buffered. This means dropping the helpful error message introduced in 0.0.4. 0.0.4 * Now using tox to run tests against multiple versions of Python in one go. * Helpful error message about how to create a repository if one is missing. * Troubleshooting section with steps to deal with broken pipes. * Nosetests config file (setup.cfg) with defaults. 0.0.3 * After pruning, run attic's consistency checks on all archives. * Integration tests for argument parsing. * Documentation updates about repository encryption. 0.0.2 * Configuration support for additional attic prune flags: keep_within, keep_hourly, keep_yearly, and prefix. 0.0.1 * Initial release. borgmatic-1.7.9/README.md000066400000000000000000000160731440467744700150060ustar00rootroot00000000000000--- title: borgmatic permalink: index.html --- ## It's your data. Keep it that way. borgmatic logo borgmatic is simple, configuration-driven backup software for servers and workstations. Protect your files with client-side encryption. Backup your databases too. Monitor it all with integrated third-party services. The canonical home of borgmatic is at https://torsion.org/borgmatic. Here's an example configuration file: ```yaml location: # List of source directories to backup. source_directories: - /home - /etc # Paths of local or remote repositories to backup to. repositories: - ssh://1234@usw-s001.rsync.net/./backups.borg - ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo - /var/lib/backups/local.borg retention: # Retention policy for how many backups to keep. keep_daily: 7 keep_weekly: 4 keep_monthly: 6 consistency: # List of checks to run to validate your backups. checks: - name: repository - name: archives frequency: 2 weeks hooks: # Custom preparation scripts to run. before_backup: - prepare-for-backup.sh # Databases to dump and include in backups. postgresql_databases: - name: users # Third-party services to notify you if backups aren't happening. healthchecks: https://hc-ping.com/be067061-cf96-4412-8eae-62b0c50d6a8c ``` Want to see borgmatic in action? Check out the screencast. borgmatic is powered by [Borg Backup](https://www.borgbackup.org/). ## Integrations PostgreSQL      MySQL      MariaDB      MongoDB      SQLite      Healthchecks      Cronitor      Cronhub      PagerDuty      ntfy      BorgBase      ## Getting started Your first step is to [install and configure borgmatic](https://torsion.org/borgmatic/docs/how-to/set-up-backups/). For additional documentation, check out the links above (left panel on wide screens) for borgmatic how-to and reference guides. ## Hosting providers Need somewhere to store your encrypted off-site backups? The following hosting providers include specific support for Borg/borgmatic—and fund borgmatic development and hosting when you use these links to sign up. (These are referral links, but without any tracking scripts or cookies.)
  • BorgBase: Borg hosting service with support for monitoring, 2FA, and append-only repos
Additionally, [rsync.net](https://www.rsync.net/products/borg.html) and [Hetzner](https://www.hetzner.com/storage/storage-box) have compatible storage offerings, but do not currently fund borgmatic development or hosting. ## Support and contributing ### Issues Are you experiencing an issue with borgmatic? Or do you have an idea for a feature enhancement? Head on over to our [issue tracker](https://projects.torsion.org/borgmatic-collective/borgmatic/issues). In order to create a new issue or add a comment, you'll need to [register](https://projects.torsion.org/user/sign_up?invite_code=borgmatic) first. If you prefer to use an existing GitHub account, you can skip account creation and [login directly](https://projects.torsion.org/user/login). Also see the [security policy](https://torsion.org/borgmatic/docs/security-policy/) for any security issues. ### Social Check out the [Borg subreddit](https://www.reddit.com/r/BorgBackup/) for general Borg and borgmatic discussion and support. Also follow [borgmatic on Mastodon](https://fosstodon.org/@borgmatic). ### Chat To chat with borgmatic developers or users, check out the `#borgmatic` IRC channel on Libera Chat, either via web chat or a native IRC client. If you don't get a response right away, please hang around a while—or file a ticket instead. ### Other Other questions or comments? Contact [witten@torsion.org](mailto:witten@torsion.org). ### Contributing borgmatic [source code is available](https://projects.torsion.org/borgmatic-collective/borgmatic) and is also mirrored on [GitHub](https://github.com/borgmatic-collective/borgmatic) for convenience. borgmatic is licensed under the GNU General Public License version 3 or any later version. If you'd like to contribute to borgmatic development, please feel free to submit a [Pull Request](https://projects.torsion.org/borgmatic-collective/borgmatic/pulls) or open an [issue](https://projects.torsion.org/borgmatic-collective/borgmatic/issues) to discuss your idea. Note that you'll need to [register](https://projects.torsion.org/user/sign_up?invite_code=borgmatic) first. We also accept Pull Requests on GitHub, if that's more your thing. In general, contributions are very welcome. We don't bite! Also, please check out the [borgmatic development how-to](https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/) for info on cloning source code, running tests, etc. ![Build Status](https://build.torsion.org/api/badges/borgmatic-collective/borgmatic/status.svg?ref=refs/heads/master) borgmatic-1.7.9/SECURITY.md000066400000000000000000000012311440467744700153060ustar00rootroot00000000000000--- title: Security policy permalink: security-policy/index.html --- ## Supported versions While we want to hear about security vulnerabilities in all versions of borgmatic, security fixes are only made to the most recently released version. It's simply not practical for our small volunteer effort to maintain multiple release branches and put out separate security patches for each. ## Reporting a vulnerability If you find a security vulnerability, please [file a ticket](https://torsion.org/borgmatic/#issues) or [send email directly](mailto:witten@torsion.org) as appropriate. You should expect to hear back within a few days at most and generally sooner. borgmatic-1.7.9/borgmatic/000077500000000000000000000000001440467744700154675ustar00rootroot00000000000000borgmatic-1.7.9/borgmatic/__init__.py000066400000000000000000000000001440467744700175660ustar00rootroot00000000000000borgmatic-1.7.9/borgmatic/actions/000077500000000000000000000000001440467744700171275ustar00rootroot00000000000000borgmatic-1.7.9/borgmatic/actions/__init__.py000066400000000000000000000000001440467744700212260ustar00rootroot00000000000000borgmatic-1.7.9/borgmatic/actions/borg.py000066400000000000000000000020421440467744700204300ustar00rootroot00000000000000import logging import borgmatic.borg.borg import borgmatic.borg.rlist import borgmatic.config.validate logger = logging.getLogger(__name__) def run_borg( repository, storage, local_borg_version, borg_arguments, local_path, remote_path, ): ''' Run the "borg" action for the given repository. ''' if borg_arguments.repository is None or borgmatic.config.validate.repositories_match( repository, borg_arguments.repository ): logger.info('{}: Running arbitrary Borg command'.format(repository)) archive_name = borgmatic.borg.rlist.resolve_archive_name( repository, borg_arguments.archive, storage, local_borg_version, local_path, remote_path, ) borgmatic.borg.borg.run_arbitrary_borg( repository, storage, local_borg_version, options=borg_arguments.options, archive=archive_name, local_path=local_path, remote_path=remote_path, ) borgmatic-1.7.9/borgmatic/actions/break_lock.py000066400000000000000000000012711440467744700215760ustar00rootroot00000000000000import logging import borgmatic.borg.break_lock import borgmatic.config.validate logger = logging.getLogger(__name__) def run_break_lock( repository, storage, local_borg_version, break_lock_arguments, local_path, remote_path, ): ''' Run the "break-lock" action for the given repository. ''' if break_lock_arguments.repository is None or borgmatic.config.validate.repositories_match( repository, break_lock_arguments.repository ): logger.info(f'{repository}: Breaking repository and cache locks') borgmatic.borg.break_lock.break_lock( repository, storage, local_borg_version, local_path=local_path, remote_path=remote_path, ) borgmatic-1.7.9/borgmatic/actions/check.py000066400000000000000000000027061440467744700205630ustar00rootroot00000000000000import logging import borgmatic.borg.check import borgmatic.config.validate import borgmatic.hooks.command logger = logging.getLogger(__name__) def run_check( config_filename, repository, location, storage, consistency, hooks, hook_context, local_borg_version, check_arguments, global_arguments, local_path, remote_path, ): ''' Run the "check" action for the given repository. ''' if check_arguments.repository and not borgmatic.config.validate.repositories_match( repository, check_arguments.repository ): return borgmatic.hooks.command.execute_hook( hooks.get('before_check'), hooks.get('umask'), config_filename, 'pre-check', global_arguments.dry_run, **hook_context, ) logger.info('{}: Running consistency checks'.format(repository)) borgmatic.borg.check.check_archives( repository, location, storage, consistency, local_borg_version, local_path=local_path, remote_path=remote_path, progress=check_arguments.progress, repair=check_arguments.repair, only_checks=check_arguments.only, force=check_arguments.force, ) borgmatic.hooks.command.execute_hook( hooks.get('after_check'), hooks.get('umask'), config_filename, 'post-check', global_arguments.dry_run, **hook_context, ) borgmatic-1.7.9/borgmatic/actions/compact.py000066400000000000000000000034201440467744700211260ustar00rootroot00000000000000import logging import borgmatic.borg.compact import borgmatic.borg.feature import borgmatic.config.validate import borgmatic.hooks.command logger = logging.getLogger(__name__) def run_compact( config_filename, repository, storage, retention, hooks, hook_context, local_borg_version, compact_arguments, global_arguments, dry_run_label, local_path, remote_path, ): ''' Run the "compact" action for the given repository. ''' if compact_arguments.repository and not borgmatic.config.validate.repositories_match( repository, compact_arguments.repository ): return borgmatic.hooks.command.execute_hook( hooks.get('before_compact'), hooks.get('umask'), config_filename, 'pre-compact', global_arguments.dry_run, **hook_context, ) if borgmatic.borg.feature.available(borgmatic.borg.feature.Feature.COMPACT, local_borg_version): logger.info('{}: Compacting segments{}'.format(repository, dry_run_label)) borgmatic.borg.compact.compact_segments( global_arguments.dry_run, repository, storage, local_borg_version, local_path=local_path, remote_path=remote_path, progress=compact_arguments.progress, cleanup_commits=compact_arguments.cleanup_commits, threshold=compact_arguments.threshold, ) else: # pragma: nocover logger.info('{}: Skipping compact (only available/needed in Borg 1.2+)'.format(repository)) borgmatic.hooks.command.execute_hook( hooks.get('after_compact'), hooks.get('umask'), config_filename, 'post-compact', global_arguments.dry_run, **hook_context, ) borgmatic-1.7.9/borgmatic/actions/create.py000066400000000000000000000050671440467744700207540ustar00rootroot00000000000000import json import logging import borgmatic.borg.create import borgmatic.config.validate import borgmatic.hooks.command import borgmatic.hooks.dispatch import borgmatic.hooks.dump logger = logging.getLogger(__name__) def run_create( config_filename, repository, location, storage, hooks, hook_context, local_borg_version, create_arguments, global_arguments, dry_run_label, local_path, remote_path, ): ''' Run the "create" action for the given repository. If create_arguments.json is True, yield the JSON output from creating the archive. ''' if create_arguments.repository and not borgmatic.config.validate.repositories_match( repository, create_arguments.repository ): return borgmatic.hooks.command.execute_hook( hooks.get('before_backup'), hooks.get('umask'), config_filename, 'pre-backup', global_arguments.dry_run, **hook_context, ) logger.info('{}: Creating archive{}'.format(repository, dry_run_label)) borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( 'remove_database_dumps', hooks, repository, borgmatic.hooks.dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) active_dumps = borgmatic.hooks.dispatch.call_hooks( 'dump_databases', hooks, repository, borgmatic.hooks.dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) stream_processes = [process for processes in active_dumps.values() for process in processes] json_output = borgmatic.borg.create.create_archive( global_arguments.dry_run, repository, location, storage, local_borg_version, local_path=local_path, remote_path=remote_path, progress=create_arguments.progress, stats=create_arguments.stats, json=create_arguments.json, list_files=create_arguments.list_files, stream_processes=stream_processes, ) if json_output: # pragma: nocover yield json.loads(json_output) borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( 'remove_database_dumps', hooks, config_filename, borgmatic.hooks.dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) borgmatic.hooks.command.execute_hook( hooks.get('after_backup'), hooks.get('umask'), config_filename, 'post-backup', global_arguments.dry_run, **hook_context, ) borgmatic-1.7.9/borgmatic/actions/export_tar.py000066400000000000000000000026461440467744700217000ustar00rootroot00000000000000import logging import borgmatic.borg.export_tar import borgmatic.borg.rlist import borgmatic.config.validate logger = logging.getLogger(__name__) def run_export_tar( repository, storage, local_borg_version, export_tar_arguments, global_arguments, local_path, remote_path, ): ''' Run the "export-tar" action for the given repository. ''' if export_tar_arguments.repository is None or borgmatic.config.validate.repositories_match( repository, export_tar_arguments.repository ): logger.info( '{}: Exporting archive {} as tar file'.format(repository, export_tar_arguments.archive) ) borgmatic.borg.export_tar.export_tar_archive( global_arguments.dry_run, repository, borgmatic.borg.rlist.resolve_archive_name( repository, export_tar_arguments.archive, storage, local_borg_version, local_path, remote_path, ), export_tar_arguments.paths, export_tar_arguments.destination, storage, local_borg_version, local_path=local_path, remote_path=remote_path, tar_filter=export_tar_arguments.tar_filter, list_files=export_tar_arguments.list_files, strip_components=export_tar_arguments.strip_components, ) borgmatic-1.7.9/borgmatic/actions/extract.py000066400000000000000000000035331440467744700211570ustar00rootroot00000000000000import logging import borgmatic.borg.extract import borgmatic.borg.rlist import borgmatic.config.validate import borgmatic.hooks.command logger = logging.getLogger(__name__) def run_extract( config_filename, repository, location, storage, hooks, hook_context, local_borg_version, extract_arguments, global_arguments, local_path, remote_path, ): ''' Run the "extract" action for the given repository. ''' borgmatic.hooks.command.execute_hook( hooks.get('before_extract'), hooks.get('umask'), config_filename, 'pre-extract', global_arguments.dry_run, **hook_context, ) if extract_arguments.repository is None or borgmatic.config.validate.repositories_match( repository, extract_arguments.repository ): logger.info('{}: Extracting archive {}'.format(repository, extract_arguments.archive)) borgmatic.borg.extract.extract_archive( global_arguments.dry_run, repository, borgmatic.borg.rlist.resolve_archive_name( repository, extract_arguments.archive, storage, local_borg_version, local_path, remote_path, ), extract_arguments.paths, location, storage, local_borg_version, local_path=local_path, remote_path=remote_path, destination_path=extract_arguments.destination, strip_components=extract_arguments.strip_components, progress=extract_arguments.progress, ) borgmatic.hooks.command.execute_hook( hooks.get('after_extract'), hooks.get('umask'), config_filename, 'post-extract', global_arguments.dry_run, **hook_context, ) borgmatic-1.7.9/borgmatic/actions/info.py000066400000000000000000000024361440467744700204410ustar00rootroot00000000000000import json import logging import borgmatic.borg.info import borgmatic.borg.rlist import borgmatic.config.validate logger = logging.getLogger(__name__) def run_info( repository, storage, local_borg_version, info_arguments, local_path, remote_path, ): ''' Run the "info" action for the given repository and archive. If info_arguments.json is True, yield the JSON output from the info for the archive. ''' if info_arguments.repository is None or borgmatic.config.validate.repositories_match( repository, info_arguments.repository ): if not info_arguments.json: # pragma: nocover logger.answer(f'{repository}: Displaying archive summary information') info_arguments.archive = borgmatic.borg.rlist.resolve_archive_name( repository, info_arguments.archive, storage, local_borg_version, local_path, remote_path, ) json_output = borgmatic.borg.info.display_archives_info( repository, storage, local_borg_version, info_arguments=info_arguments, local_path=local_path, remote_path=remote_path, ) if json_output: # pragma: nocover yield json.loads(json_output) borgmatic-1.7.9/borgmatic/actions/list.py000066400000000000000000000025741440467744700204640ustar00rootroot00000000000000import json import logging import borgmatic.borg.list import borgmatic.config.validate logger = logging.getLogger(__name__) def run_list( repository, storage, local_borg_version, list_arguments, local_path, remote_path, ): ''' Run the "list" action for the given repository and archive. If list_arguments.json is True, yield the JSON output from listing the archive. ''' if list_arguments.repository is None or borgmatic.config.validate.repositories_match( repository, list_arguments.repository ): if not list_arguments.json: # pragma: nocover if list_arguments.find_paths: logger.answer(f'{repository}: Searching archives') elif not list_arguments.archive: logger.answer(f'{repository}: Listing archives') list_arguments.archive = borgmatic.borg.rlist.resolve_archive_name( repository, list_arguments.archive, storage, local_borg_version, local_path, remote_path, ) json_output = borgmatic.borg.list.list_archive( repository, storage, local_borg_version, list_arguments=list_arguments, local_path=local_path, remote_path=remote_path, ) if json_output: # pragma: nocover yield json.loads(json_output) borgmatic-1.7.9/borgmatic/actions/mount.py000066400000000000000000000024401440467744700206430ustar00rootroot00000000000000import logging import borgmatic.borg.mount import borgmatic.borg.rlist import borgmatic.config.validate logger = logging.getLogger(__name__) def run_mount( repository, storage, local_borg_version, mount_arguments, local_path, remote_path, ): ''' Run the "mount" action for the given repository. ''' if mount_arguments.repository is None or borgmatic.config.validate.repositories_match( repository, mount_arguments.repository ): if mount_arguments.archive: logger.info('{}: Mounting archive {}'.format(repository, mount_arguments.archive)) else: # pragma: nocover logger.info('{}: Mounting repository'.format(repository)) borgmatic.borg.mount.mount_archive( repository, borgmatic.borg.rlist.resolve_archive_name( repository, mount_arguments.archive, storage, local_borg_version, local_path, remote_path, ), mount_arguments.mount_point, mount_arguments.paths, mount_arguments.foreground, mount_arguments.options, storage, local_borg_version, local_path=local_path, remote_path=remote_path, ) borgmatic-1.7.9/borgmatic/actions/prune.py000066400000000000000000000026271440467744700206410ustar00rootroot00000000000000import logging import borgmatic.borg.prune import borgmatic.config.validate import borgmatic.hooks.command logger = logging.getLogger(__name__) def run_prune( config_filename, repository, storage, retention, hooks, hook_context, local_borg_version, prune_arguments, global_arguments, dry_run_label, local_path, remote_path, ): ''' Run the "prune" action for the given repository. ''' if prune_arguments.repository and not borgmatic.config.validate.repositories_match( repository, prune_arguments.repository ): return borgmatic.hooks.command.execute_hook( hooks.get('before_prune'), hooks.get('umask'), config_filename, 'pre-prune', global_arguments.dry_run, **hook_context, ) logger.info('{}: Pruning archives{}'.format(repository, dry_run_label)) borgmatic.borg.prune.prune_archives( global_arguments.dry_run, repository, storage, retention, local_borg_version, local_path=local_path, remote_path=remote_path, stats=prune_arguments.stats, list_archives=prune_arguments.list_archives, ) borgmatic.hooks.command.execute_hook( hooks.get('after_prune'), hooks.get('umask'), config_filename, 'post-prune', global_arguments.dry_run, **hook_context, ) borgmatic-1.7.9/borgmatic/actions/rcreate.py000066400000000000000000000020121440467744700211210ustar00rootroot00000000000000import logging import borgmatic.borg.rcreate import borgmatic.config.validate logger = logging.getLogger(__name__) def run_rcreate( repository, storage, local_borg_version, rcreate_arguments, global_arguments, local_path, remote_path, ): ''' Run the "rcreate" action for the given repository. ''' if rcreate_arguments.repository and not borgmatic.config.validate.repositories_match( repository, rcreate_arguments.repository ): return logger.info('{}: Creating repository'.format(repository)) borgmatic.borg.rcreate.create_repository( global_arguments.dry_run, repository, storage, local_borg_version, rcreate_arguments.encryption_mode, rcreate_arguments.source_repository, rcreate_arguments.copy_crypt_key, rcreate_arguments.append_only, rcreate_arguments.storage_quota, rcreate_arguments.make_parent_dirs, local_path=local_path, remote_path=remote_path, ) borgmatic-1.7.9/borgmatic/actions/restore.py000066400000000000000000000300051440467744700211620ustar00rootroot00000000000000import copy import logging import os import borgmatic.borg.extract import borgmatic.borg.list import borgmatic.borg.mount import borgmatic.borg.rlist import borgmatic.borg.state import borgmatic.config.validate import borgmatic.hooks.dispatch import borgmatic.hooks.dump logger = logging.getLogger(__name__) UNSPECIFIED_HOOK = object() def get_configured_database( hooks, archive_database_names, hook_name, database_name, configuration_database_name=None ): ''' Find the first database with the given hook name and database name in the configured hooks dict and the given archive database names dict (from hook name to database names contained in a particular backup archive). If UNSPECIFIED_HOOK is given as the hook name, search all database hooks for the named database. If a configuration database name is given, use that instead of the database name to lookup the database in the given hooks configuration. Return the found database as a tuple of (found hook name, database configuration dict). ''' if not configuration_database_name: configuration_database_name = database_name if hook_name == UNSPECIFIED_HOOK: hooks_to_search = hooks else: hooks_to_search = {hook_name: hooks[hook_name]} return next( ( (name, hook_database) for (name, hook) in hooks_to_search.items() for hook_database in hook if hook_database['name'] == configuration_database_name and database_name in archive_database_names.get(name, []) ), (None, None), ) def get_configured_hook_name_and_database(hooks, database_name): ''' Find the hook name and first database dict with the given database name in the configured hooks dict. This searches across all database hooks. ''' def restore_single_database( repository, location, storage, hooks, local_borg_version, global_arguments, local_path, remote_path, archive_name, hook_name, database, ): # pragma: no cover ''' Given (among other things) an archive name, a database hook name, and a configured database configuration dict, restore that database from the archive. ''' logger.info(f'{repository}: Restoring database {database["name"]}') dump_pattern = borgmatic.hooks.dispatch.call_hooks( 'make_database_dump_pattern', hooks, repository, borgmatic.hooks.dump.DATABASE_HOOK_NAMES, location, database['name'], )[hook_name] # Kick off a single database extract to stdout. extract_process = borgmatic.borg.extract.extract_archive( dry_run=global_arguments.dry_run, repository=repository, archive=archive_name, paths=borgmatic.hooks.dump.convert_glob_patterns_to_borg_patterns([dump_pattern]), location_config=location, storage_config=storage, local_borg_version=local_borg_version, local_path=local_path, remote_path=remote_path, destination_path='/', # A directory format dump isn't a single file, and therefore can't extract # to stdout. In this case, the extract_process return value is None. extract_to_stdout=bool(database.get('format') != 'directory'), ) # Run a single database restore, consuming the extract stdout (if any). borgmatic.hooks.dispatch.call_hooks( 'restore_database_dump', {hook_name: [database]}, repository, borgmatic.hooks.dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, extract_process, ) def collect_archive_database_names( repository, archive, location, storage, local_borg_version, local_path, remote_path, ): ''' Given a local or remote repository path, a resolved archive name, a location configuration dict, a storage configuration dict, the local Borg version, and local and remote Borg paths, query the archive for the names of databases it contains and return them as a dict from hook name to a sequence of database names. ''' borgmatic_source_directory = os.path.expanduser( location.get( 'borgmatic_source_directory', borgmatic.borg.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY ) ).lstrip('/') parent_dump_path = os.path.expanduser( borgmatic.hooks.dump.make_database_dump_path(borgmatic_source_directory, '*_databases/*/*') ) dump_paths = borgmatic.borg.list.capture_archive_listing( repository, archive, storage, local_borg_version, list_path=parent_dump_path, local_path=local_path, remote_path=remote_path, ) # Determine the database names corresponding to the dumps found in the archive and # add them to restore_names. archive_database_names = {} for dump_path in dump_paths: try: (hook_name, _, database_name) = dump_path.split( borgmatic_source_directory + os.path.sep, 1 )[1].split(os.path.sep)[0:3] except (ValueError, IndexError): logger.warning( f'{repository}: Ignoring invalid database dump path "{dump_path}" in archive {archive}' ) else: if database_name not in archive_database_names.get(hook_name, []): archive_database_names.setdefault(hook_name, []).extend([database_name]) return archive_database_names def find_databases_to_restore(requested_database_names, archive_database_names): ''' Given a sequence of requested database names to restore and a dict of hook name to the names of databases found in an archive, return an expanded sequence of database names to restore, replacing "all" with actual database names as appropriate. Raise ValueError if any of the requested database names cannot be found in the archive. ''' # A map from database hook name to the database names to restore for that hook. restore_names = ( {UNSPECIFIED_HOOK: requested_database_names} if requested_database_names else {UNSPECIFIED_HOOK: ['all']} ) # If "all" is in restore_names, then replace it with the names of dumps found within the # archive. if 'all' in restore_names[UNSPECIFIED_HOOK]: restore_names[UNSPECIFIED_HOOK].remove('all') for (hook_name, database_names) in archive_database_names.items(): restore_names.setdefault(hook_name, []).extend(database_names) # If a database is to be restored as part of "all", then remove it from restore names so # it doesn't get restored twice. for database_name in database_names: if database_name in restore_names[UNSPECIFIED_HOOK]: restore_names[UNSPECIFIED_HOOK].remove(database_name) if not restore_names[UNSPECIFIED_HOOK]: restore_names.pop(UNSPECIFIED_HOOK) combined_restore_names = set( name for database_names in restore_names.values() for name in database_names ) combined_archive_database_names = set( name for database_names in archive_database_names.values() for name in database_names ) missing_names = sorted(set(combined_restore_names) - combined_archive_database_names) if missing_names: joined_names = ', '.join(f'"{name}"' for name in missing_names) raise ValueError( f"Cannot restore database{'s' if len(missing_names) > 1 else ''} {joined_names} missing from archive" ) return restore_names def ensure_databases_found(restore_names, remaining_restore_names, found_names): ''' Given a dict from hook name to database names to restore, a dict from hook name to remaining database names to restore, and a sequence of found (actually restored) database names, raise ValueError if requested databases to restore were missing from the archive and/or configuration. ''' combined_restore_names = set( name for database_names in tuple(restore_names.values()) + tuple(remaining_restore_names.values()) for name in database_names ) if not combined_restore_names and not found_names: raise ValueError('No databases were found to restore') missing_names = sorted(set(combined_restore_names) - set(found_names)) if missing_names: joined_names = ', '.join(f'"{name}"' for name in missing_names) raise ValueError( f"Cannot restore database{'s' if len(missing_names) > 1 else ''} {joined_names} missing from borgmatic's configuration" ) def run_restore( repository, location, storage, hooks, local_borg_version, restore_arguments, global_arguments, local_path, remote_path, ): ''' Run the "restore" action for the given repository, but only if the repository matches the requested repository in restore arguments. Raise ValueError if a configured database could not be found to restore. ''' if restore_arguments.repository and not borgmatic.config.validate.repositories_match( repository, restore_arguments.repository ): return logger.info( '{}: Restoring databases from archive {}'.format(repository, restore_arguments.archive) ) borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( 'remove_database_dumps', hooks, repository, borgmatic.hooks.dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) archive_name = borgmatic.borg.rlist.resolve_archive_name( repository, restore_arguments.archive, storage, local_borg_version, local_path, remote_path, ) archive_database_names = collect_archive_database_names( repository, archive_name, location, storage, local_borg_version, local_path, remote_path, ) restore_names = find_databases_to_restore(restore_arguments.databases, archive_database_names) found_names = set() remaining_restore_names = {} for hook_name, database_names in restore_names.items(): for database_name in database_names: found_hook_name, found_database = get_configured_database( hooks, archive_database_names, hook_name, database_name ) if not found_database: remaining_restore_names.setdefault(found_hook_name or hook_name, []).append( database_name ) continue found_names.add(database_name) restore_single_database( repository, location, storage, hooks, local_borg_version, global_arguments, local_path, remote_path, archive_name, found_hook_name or hook_name, found_database, ) # For any database that weren't found via exact matches in the hooks configuration, try to # fallback to "all" entries. for hook_name, database_names in remaining_restore_names.items(): for database_name in database_names: found_hook_name, found_database = get_configured_database( hooks, archive_database_names, hook_name, database_name, 'all' ) if not found_database: continue found_names.add(database_name) database = copy.copy(found_database) database['name'] = database_name restore_single_database( repository, location, storage, hooks, local_borg_version, global_arguments, local_path, remote_path, archive_name, found_hook_name or hook_name, database, ) borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( 'remove_database_dumps', hooks, repository, borgmatic.hooks.dump.DATABASE_HOOK_NAMES, location, global_arguments.dry_run, ) ensure_databases_found(restore_names, remaining_restore_names, found_names) borgmatic-1.7.9/borgmatic/actions/rinfo.py000066400000000000000000000020311440467744700206120ustar00rootroot00000000000000import json import logging import borgmatic.borg.rinfo import borgmatic.config.validate logger = logging.getLogger(__name__) def run_rinfo( repository, storage, local_borg_version, rinfo_arguments, local_path, remote_path, ): ''' Run the "rinfo" action for the given repository. If rinfo_arguments.json is True, yield the JSON output from the info for the repository. ''' if rinfo_arguments.repository is None or borgmatic.config.validate.repositories_match( repository, rinfo_arguments.repository ): if not rinfo_arguments.json: # pragma: nocover logger.answer('{}: Displaying repository summary information'.format(repository)) json_output = borgmatic.borg.rinfo.display_repository_info( repository, storage, local_borg_version, rinfo_arguments=rinfo_arguments, local_path=local_path, remote_path=remote_path, ) if json_output: # pragma: nocover yield json.loads(json_output) borgmatic-1.7.9/borgmatic/actions/rlist.py000066400000000000000000000017651440467744700206470ustar00rootroot00000000000000import json import logging import borgmatic.borg.rlist import borgmatic.config.validate logger = logging.getLogger(__name__) def run_rlist( repository, storage, local_borg_version, rlist_arguments, local_path, remote_path, ): ''' Run the "rlist" action for the given repository. If rlist_arguments.json is True, yield the JSON output from listing the repository. ''' if rlist_arguments.repository is None or borgmatic.config.validate.repositories_match( repository, rlist_arguments.repository ): if not rlist_arguments.json: # pragma: nocover logger.answer('{}: Listing repository'.format(repository)) json_output = borgmatic.borg.rlist.list_repository( repository, storage, local_borg_version, rlist_arguments=rlist_arguments, local_path=local_path, remote_path=remote_path, ) if json_output: # pragma: nocover yield json.loads(json_output) borgmatic-1.7.9/borgmatic/actions/transfer.py000066400000000000000000000011621440467744700213250ustar00rootroot00000000000000import logging import borgmatic.borg.transfer logger = logging.getLogger(__name__) def run_transfer( repository, storage, local_borg_version, transfer_arguments, global_arguments, local_path, remote_path, ): ''' Run the "transfer" action for the given repository. ''' logger.info(f'{repository}: Transferring archives to repository') borgmatic.borg.transfer.transfer_archives( global_arguments.dry_run, repository, storage, local_borg_version, transfer_arguments, local_path=local_path, remote_path=remote_path, ) borgmatic-1.7.9/borgmatic/borg/000077500000000000000000000000001440467744700164205ustar00rootroot00000000000000borgmatic-1.7.9/borgmatic/borg/__init__.py000066400000000000000000000000001440467744700205170ustar00rootroot00000000000000borgmatic-1.7.9/borgmatic/borg/borg.py000066400000000000000000000044361440467744700177320ustar00rootroot00000000000000import logging import borgmatic.logger from borgmatic.borg import environment, flags from borgmatic.execute import execute_command logger = logging.getLogger(__name__) REPOSITORYLESS_BORG_COMMANDS = {'serve', None} BORG_SUBCOMMANDS_WITH_SUBCOMMANDS = {'key', 'debug'} BORG_SUBCOMMANDS_WITHOUT_REPOSITORY = (('debug', 'info'), ('debug', 'convert-profile'), ()) def run_arbitrary_borg( repository, storage_config, local_borg_version, options, archive=None, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, a storage config dict, the local Borg version, a sequence of arbitrary command-line Borg options, and an optional archive name, run an arbitrary Borg command on the given repository/archive. ''' borgmatic.logger.add_custom_log_levels() lock_wait = storage_config.get('lock_wait', None) try: options = options[1:] if options[0] == '--' else options # Borg commands like "key" have a sub-command ("export", etc.) that must follow it. command_options_start_index = 2 if options[0] in BORG_SUBCOMMANDS_WITH_SUBCOMMANDS else 1 borg_command = tuple(options[:command_options_start_index]) command_options = tuple(options[command_options_start_index:]) except IndexError: borg_command = () command_options = () if borg_command in BORG_SUBCOMMANDS_WITHOUT_REPOSITORY: repository_archive_flags = () elif archive: repository_archive_flags = flags.make_repository_archive_flags( repository, archive, local_borg_version ) else: repository_archive_flags = flags.make_repository_flags(repository, local_borg_version) full_command = ( (local_path,) + borg_command + repository_archive_flags + command_options + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + flags.make_flags('remote-path', remote_path) + flags.make_flags('lock-wait', lock_wait) ) return execute_command( full_command, output_log_level=logging.ANSWER, borg_local_path=local_path, extra_environment=environment.make_environment(storage_config), ) borgmatic-1.7.9/borgmatic/borg/break_lock.py000066400000000000000000000023201440467744700210630ustar00rootroot00000000000000import logging from borgmatic.borg import environment, flags from borgmatic.execute import execute_command logger = logging.getLogger(__name__) def break_lock( repository, storage_config, local_borg_version, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, a storage configuration dict, the local Borg version, and optional local and remote Borg paths, break any repository and cache locks leftover from Borg aborting. ''' umask = storage_config.get('umask', None) lock_wait = storage_config.get('lock_wait', None) full_command = ( (local_path, 'break-lock') + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + flags.make_repository_flags(repository, local_borg_version) ) borg_environment = environment.make_environment(storage_config) execute_command(full_command, borg_local_path=local_path, extra_environment=borg_environment) borgmatic-1.7.9/borgmatic/borg/check.py000066400000000000000000000260771440467744700200630ustar00rootroot00000000000000import argparse import datetime import json import logging import os import pathlib from borgmatic.borg import environment, extract, feature, flags, rinfo, state from borgmatic.execute import DO_NOT_CAPTURE, execute_command DEFAULT_CHECKS = ( {'name': 'repository', 'frequency': '1 month'}, {'name': 'archives', 'frequency': '1 month'}, ) DEFAULT_PREFIX = '{hostname}-' logger = logging.getLogger(__name__) def parse_checks(consistency_config, only_checks=None): ''' Given a consistency config with a "checks" sequence of dicts and an optional list of override checks, return a tuple of named checks to run. For example, given a retention config of: {'checks': ({'name': 'repository'}, {'name': 'archives'})} This will be returned as: ('repository', 'archives') If no "checks" option is present in the config, return the DEFAULT_CHECKS. If a checks value has a name of "disabled", return an empty tuple, meaning that no checks should be run. ''' checks = only_checks or tuple( check_config['name'] for check_config in (consistency_config.get('checks', None) or DEFAULT_CHECKS) ) checks = tuple(check.lower() for check in checks) if 'disabled' in checks: if len(checks) > 1: logger.warning( 'Multiple checks are configured, but one of them is "disabled"; not running any checks' ) return () return checks def parse_frequency(frequency): ''' Given a frequency string with a number and a unit of time, return a corresponding datetime.timedelta instance or None if the frequency is None or "always". For instance, given "3 weeks", return datetime.timedelta(weeks=3) Raise ValueError if the given frequency cannot be parsed. ''' if not frequency: return None frequency = frequency.strip().lower() if frequency == 'always': return None try: number, time_unit = frequency.split(' ') number = int(number) except ValueError: raise ValueError(f"Could not parse consistency check frequency '{frequency}'") if not time_unit.endswith('s'): time_unit += 's' if time_unit == 'months': number *= 30 time_unit = 'days' elif time_unit == 'years': number *= 365 time_unit = 'days' try: return datetime.timedelta(**{time_unit: number}) except TypeError: raise ValueError(f"Could not parse consistency check frequency '{frequency}'") def filter_checks_on_frequency( location_config, consistency_config, borg_repository_id, checks, force ): ''' Given a location config, a consistency config with a "checks" sequence of dicts, a Borg repository ID, a sequence of checks, and whether to force checks to run, filter down those checks based on the configured "frequency" for each check as compared to its check time file. In other words, a check whose check time file's timestamp is too new (based on the configured frequency) will get cut from the returned sequence of checks. Example: consistency_config = { 'checks': [ { 'name': 'archives', 'frequency': '2 weeks', }, ] } When this function is called with that consistency_config and "archives" in checks, "archives" will get filtered out of the returned result if its check time file is newer than 2 weeks old, indicating that it's not yet time to run that check again. Raise ValueError if a frequency cannot be parsed. ''' filtered_checks = list(checks) if force: return tuple(filtered_checks) for check_config in consistency_config.get('checks', DEFAULT_CHECKS): check = check_config['name'] if checks and check not in checks: continue frequency_delta = parse_frequency(check_config.get('frequency')) if not frequency_delta: continue check_time = read_check_time( make_check_time_path(location_config, borg_repository_id, check) ) if not check_time: continue # If we've not yet reached the time when the frequency dictates we're ready for another # check, skip this check. if datetime.datetime.now() < check_time + frequency_delta: remaining = check_time + frequency_delta - datetime.datetime.now() logger.info( f'Skipping {check} check due to configured frequency; {remaining} until next check' ) filtered_checks.remove(check) return tuple(filtered_checks) def make_check_flags(local_borg_version, checks, check_last=None, prefix=None): ''' Given the local Borg version and a parsed sequence of checks, transform the checks into tuple of command-line flags. For example, given parsed checks of: ('repository',) This will be returned as: ('--repository-only',) However, if both "repository" and "archives" are in checks, then omit them from the returned flags because Borg does both checks by default. If "data" is in checks, that implies "archives". Additionally, if a check_last value is given and "archives" is in checks, then include a "--last" flag. And if a prefix value is given and "archives" is in checks, then include a "--match-archives" flag. ''' if 'data' in checks: data_flags = ('--verify-data',) checks += ('archives',) else: data_flags = () if 'archives' in checks: last_flags = ('--last', str(check_last)) if check_last else () if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version): match_archives_flags = ('--match-archives', f'sh:{prefix}*') if prefix else () else: match_archives_flags = ('--glob-archives', f'{prefix}*') if prefix else () else: last_flags = () match_archives_flags = () if check_last: logger.warning( 'Ignoring check_last option, as "archives" or "data" are not in consistency checks' ) if prefix: logger.warning( 'Ignoring consistency prefix option, as "archives" or "data" are not in consistency checks' ) common_flags = last_flags + match_archives_flags + data_flags if {'repository', 'archives'}.issubset(set(checks)): return common_flags return ( tuple('--{}-only'.format(check) for check in checks if check in ('repository', 'archives')) + common_flags ) def make_check_time_path(location_config, borg_repository_id, check_type): ''' Given a location configuration dict, a Borg repository ID, and the name of a check type ("repository", "archives", etc.), return a path for recording that check's time (the time of that check last occurring). ''' return os.path.join( os.path.expanduser( location_config.get( 'borgmatic_source_directory', state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY ) ), 'checks', borg_repository_id, check_type, ) def write_check_time(path): # pragma: no cover ''' Record a check time of now as the modification time of the given path. ''' logger.debug(f'Writing check time at {path}') os.makedirs(os.path.dirname(path), mode=0o700, exist_ok=True) pathlib.Path(path, mode=0o600).touch() def read_check_time(path): ''' Return the check time based on the modification time of the given path. Return None if the path doesn't exist. ''' logger.debug(f'Reading check time from {path}') try: return datetime.datetime.fromtimestamp(os.stat(path).st_mtime) except FileNotFoundError: return None def check_archives( repository, location_config, storage_config, consistency_config, local_borg_version, local_path='borg', remote_path=None, progress=None, repair=None, only_checks=None, force=None, ): ''' Given a local or remote repository path, a storage config dict, a consistency config dict, local/remote commands to run, whether to include progress information, whether to attempt a repair, and an optional list of checks to use instead of configured checks, check the contained Borg archives for consistency. If there are no consistency checks to run, skip running them. Raises ValueError if the Borg repository ID cannot be determined. ''' try: borg_repository_id = json.loads( rinfo.display_repository_info( repository, storage_config, local_borg_version, argparse.Namespace(json=True), local_path, remote_path, ) )['repository']['id'] except (json.JSONDecodeError, KeyError): raise ValueError(f'Cannot determine Borg repository ID for {repository}') checks = filter_checks_on_frequency( location_config, consistency_config, borg_repository_id, parse_checks(consistency_config, only_checks), force, ) check_last = consistency_config.get('check_last', None) lock_wait = None extra_borg_options = storage_config.get('extra_borg_options', {}).get('check', '') if set(checks).intersection({'repository', 'archives', 'data'}): lock_wait = storage_config.get('lock_wait', None) verbosity_flags = () if logger.isEnabledFor(logging.INFO): verbosity_flags = ('--info',) if logger.isEnabledFor(logging.DEBUG): verbosity_flags = ('--debug', '--show-rc') prefix = consistency_config.get('prefix', DEFAULT_PREFIX) full_command = ( (local_path, 'check') + (('--repair',) if repair else ()) + make_check_flags(local_borg_version, checks, check_last, prefix) + (('--remote-path', remote_path) if remote_path else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + verbosity_flags + (('--progress',) if progress else ()) + (tuple(extra_borg_options.split(' ')) if extra_borg_options else ()) + flags.make_repository_flags(repository, local_borg_version) ) borg_environment = environment.make_environment(storage_config) # The Borg repair option triggers an interactive prompt, which won't work when output is # captured. And progress messes with the terminal directly. if repair or progress: execute_command( full_command, output_file=DO_NOT_CAPTURE, extra_environment=borg_environment ) else: execute_command(full_command, extra_environment=borg_environment) for check in checks: write_check_time(make_check_time_path(location_config, borg_repository_id, check)) if 'extract' in checks: extract.extract_last_archive_dry_run( storage_config, local_borg_version, repository, lock_wait, local_path, remote_path ) write_check_time(make_check_time_path(location_config, borg_repository_id, 'extract')) borgmatic-1.7.9/borgmatic/borg/compact.py000066400000000000000000000033131440467744700204200ustar00rootroot00000000000000import logging from borgmatic.borg import environment, flags from borgmatic.execute import execute_command logger = logging.getLogger(__name__) def compact_segments( dry_run, repository, storage_config, local_borg_version, local_path='borg', remote_path=None, progress=False, cleanup_commits=False, threshold=None, ): ''' Given dry-run flag, a local or remote repository path, a storage config dict, and the local Borg version, compact the segments in a repository. ''' umask = storage_config.get('umask', None) lock_wait = storage_config.get('lock_wait', None) extra_borg_options = storage_config.get('extra_borg_options', {}).get('compact', '') full_command = ( (local_path, 'compact') + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--progress',) if progress else ()) + (('--cleanup-commits',) if cleanup_commits else ()) + (('--threshold', str(threshold)) if threshold else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + (tuple(extra_borg_options.split(' ')) if extra_borg_options else ()) + flags.make_repository_flags(repository, local_borg_version) ) if dry_run: logging.info(f'{repository}: Skipping compact (dry run)') return execute_command( full_command, output_log_level=logging.INFO, borg_local_path=local_path, extra_environment=environment.make_environment(storage_config), ) borgmatic-1.7.9/borgmatic/borg/create.py000066400000000000000000000451421440467744700202430ustar00rootroot00000000000000import glob import itertools import logging import os import pathlib import stat import tempfile import borgmatic.logger from borgmatic.borg import environment, feature, flags, state from borgmatic.execute import ( DO_NOT_CAPTURE, execute_command, execute_command_and_capture_output, execute_command_with_processes, ) logger = logging.getLogger(__name__) def expand_directory(directory): ''' Given a directory path, expand any tilde (representing a user's home directory) and any globs therein. Return a list of one or more resulting paths. ''' expanded_directory = os.path.expanduser(directory) return glob.glob(expanded_directory) or [expanded_directory] def expand_directories(directories): ''' Given a sequence of directory paths, expand tildes and globs in each one. Return all the resulting directories as a single flattened tuple. ''' if directories is None: return () return tuple( itertools.chain.from_iterable(expand_directory(directory) for directory in directories) ) def expand_home_directories(directories): ''' Given a sequence of directory paths, expand tildes in each one. Do not perform any globbing. Return the results as a tuple. ''' if directories is None: return () return tuple(os.path.expanduser(directory) for directory in directories) def map_directories_to_devices(directories): ''' Given a sequence of directories, return a map from directory to an identifier for the device on which that directory resides or None if the path doesn't exist. This is handy for determining whether two different directories are on the same filesystem (have the same device identifier). ''' return { directory: os.stat(directory).st_dev if os.path.exists(directory) else None for directory in directories } def deduplicate_directories(directory_devices, additional_directory_devices): ''' Given a map from directory to the identifier for the device on which that directory resides, return the directories as a sorted tuple with all duplicate child directories removed. For instance, if paths is ('/foo', '/foo/bar'), return just: ('/foo',) The one exception to this rule is if two paths are on different filesystems (devices). In that case, they won't get de-duplicated in case they both need to be passed to Borg (e.g. the location.one_file_system option is true). The idea is that if Borg is given a parent directory, then it doesn't also need to be given child directories, because it will naturally spider the contents of the parent directory. And there are cases where Borg coming across the same file twice will result in duplicate reads and even hangs, e.g. when a database hook is using a named pipe for streaming database dumps to Borg. If any additional directory devices are given, also deduplicate against them, but don't include them in the returned directories. ''' deduplicated = set() directories = sorted(directory_devices.keys()) additional_directories = sorted(additional_directory_devices.keys()) all_devices = {**directory_devices, **additional_directory_devices} for directory in directories: deduplicated.add(directory) parents = pathlib.PurePath(directory).parents # If another directory in the given list (or the additional list) is a parent of current # directory (even n levels up) and both are on the same filesystem, then the current # directory is a duplicate. for other_directory in directories + additional_directories: for parent in parents: if ( pathlib.PurePath(other_directory) == parent and all_devices[directory] is not None and all_devices[other_directory] == all_devices[directory] ): if directory in deduplicated: deduplicated.remove(directory) break return tuple(sorted(deduplicated)) def write_pattern_file(patterns=None, sources=None, pattern_file=None): ''' Given a sequence of patterns and an optional sequence of source directories, write them to a named temporary file (with the source directories as additional roots) and return the file. If an optional open pattern file is given, overwrite it instead of making a new temporary file. Return None if no patterns are provided. ''' if not patterns and not sources: return None if pattern_file is None: pattern_file = tempfile.NamedTemporaryFile('w') else: pattern_file.seek(0) pattern_file.write( '\n'.join(tuple(patterns or ()) + tuple(f'R {source}' for source in (sources or []))) ) pattern_file.flush() return pattern_file def ensure_files_readable(*filename_lists): ''' Given a sequence of filename sequences, ensure that each filename is openable. This prevents unreadable files from being passed to Borg, which in certain situations only warns instead of erroring. ''' for file_object in itertools.chain.from_iterable( filename_list for filename_list in filename_lists if filename_list ): open(file_object).close() def make_pattern_flags(location_config, pattern_filename=None): ''' Given a location config dict with a potential patterns_from option, and a filename containing any additional patterns, return the corresponding Borg flags for those files as a tuple. ''' pattern_filenames = tuple(location_config.get('patterns_from') or ()) + ( (pattern_filename,) if pattern_filename else () ) return tuple( itertools.chain.from_iterable( ('--patterns-from', pattern_filename) for pattern_filename in pattern_filenames ) ) def make_exclude_flags(location_config, exclude_filename=None): ''' Given a location config dict with various exclude options, and a filename containing any exclude patterns, return the corresponding Borg flags as a tuple. ''' exclude_filenames = tuple(location_config.get('exclude_from') or ()) + ( (exclude_filename,) if exclude_filename else () ) exclude_from_flags = tuple( itertools.chain.from_iterable( ('--exclude-from', exclude_filename) for exclude_filename in exclude_filenames ) ) caches_flag = ('--exclude-caches',) if location_config.get('exclude_caches') else () if_present_flags = tuple( itertools.chain.from_iterable( ('--exclude-if-present', if_present) for if_present in location_config.get('exclude_if_present', ()) ) ) keep_exclude_tags_flags = ( ('--keep-exclude-tags',) if location_config.get('keep_exclude_tags') else () ) exclude_nodump_flags = ('--exclude-nodump',) if location_config.get('exclude_nodump') else () return ( exclude_from_flags + caches_flag + if_present_flags + keep_exclude_tags_flags + exclude_nodump_flags ) def make_list_filter_flags(local_borg_version, dry_run): ''' Given the local Borg version and whether this is a dry run, return the corresponding flags for passing to "--list --filter". The general idea is that excludes are shown for a dry run or when the verbosity is debug. ''' base_flags = 'AME' show_excludes = logger.isEnabledFor(logging.DEBUG) if feature.available(feature.Feature.EXCLUDED_FILES_MINUS, local_borg_version): if show_excludes or dry_run: return f'{base_flags}+-' else: return base_flags if show_excludes: return f'{base_flags}x-' else: return f'{base_flags}-' DEFAULT_ARCHIVE_NAME_FORMAT = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}' def collect_borgmatic_source_directories(borgmatic_source_directory): ''' Return a list of borgmatic-specific source directories used for state like database backups. ''' if not borgmatic_source_directory: borgmatic_source_directory = state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY return ( [borgmatic_source_directory] if os.path.exists(os.path.expanduser(borgmatic_source_directory)) else [] ) ROOT_PATTERN_PREFIX = 'R ' def pattern_root_directories(patterns=None): ''' Given a sequence of patterns, parse out and return just the root directories. ''' if not patterns: return [] return [ pattern.split(ROOT_PATTERN_PREFIX, maxsplit=1)[1] for pattern in patterns if pattern.startswith(ROOT_PATTERN_PREFIX) ] def special_file(path): ''' Return whether the given path is a special file (character device, block device, or named pipe / FIFO). ''' try: mode = os.stat(path).st_mode except (FileNotFoundError, OSError): return False return stat.S_ISCHR(mode) or stat.S_ISBLK(mode) or stat.S_ISFIFO(mode) def any_parent_directories(path, candidate_parents): ''' Return whether any of the given candidate parent directories are an actual parent of the given path. This includes grandparents, etc. ''' for parent in candidate_parents: if pathlib.PurePosixPath(parent) in pathlib.PurePath(path).parents: return True return False def collect_special_file_paths( create_command, local_path, working_directory, borg_environment, skip_directories ): ''' Given a Borg create command as a tuple, a local Borg path, a working directory, and a dict of environment variables to pass to Borg, and a sequence of parent directories to skip, collect the paths for any special files (character devices, block devices, and named pipes / FIFOs) that Borg would encounter during a create. These are all paths that could cause Borg to hang if its --read-special flag is used. ''' paths_output = execute_command_and_capture_output( create_command + ('--dry-run', '--list'), capture_stderr=True, working_directory=working_directory, extra_environment=borg_environment, ) paths = tuple( path_line.split(' ', 1)[1] for path_line in paths_output.split('\n') if path_line and path_line.startswith('- ') or path_line.startswith('+ ') ) return tuple( path for path in paths if special_file(path) and not any_parent_directories(path, skip_directories) ) def create_archive( dry_run, repository, location_config, storage_config, local_borg_version, local_path='borg', remote_path=None, progress=False, stats=False, json=False, list_files=False, stream_processes=None, ): ''' Given vebosity/dry-run flags, a local or remote repository path, a location config dict, and a storage config dict, create a Borg archive and return Borg's JSON output (if any). If a sequence of stream processes is given (instances of subprocess.Popen), then execute the create command while also triggering the given processes to produce output. ''' borgmatic.logger.add_custom_log_levels() borgmatic_source_directories = expand_directories( collect_borgmatic_source_directories(location_config.get('borgmatic_source_directory')) ) sources = deduplicate_directories( map_directories_to_devices( expand_directories( tuple(location_config.get('source_directories', ())) + borgmatic_source_directories ) ), additional_directory_devices=map_directories_to_devices( expand_directories(pattern_root_directories(location_config.get('patterns'))) ), ) ensure_files_readable(location_config.get('patterns_from'), location_config.get('exclude_from')) try: working_directory = os.path.expanduser(location_config.get('working_directory')) except TypeError: working_directory = None pattern_file = ( write_pattern_file(location_config.get('patterns'), sources) if location_config.get('patterns') or location_config.get('patterns_from') else None ) exclude_file = write_pattern_file( expand_home_directories(location_config.get('exclude_patterns')) ) checkpoint_interval = storage_config.get('checkpoint_interval', None) checkpoint_volume = storage_config.get('checkpoint_volume', None) chunker_params = storage_config.get('chunker_params', None) compression = storage_config.get('compression', None) upload_rate_limit = storage_config.get('upload_rate_limit', None) umask = storage_config.get('umask', None) lock_wait = storage_config.get('lock_wait', None) list_filter_flags = make_list_filter_flags(local_borg_version, dry_run) files_cache = location_config.get('files_cache') archive_name_format = storage_config.get('archive_name_format', DEFAULT_ARCHIVE_NAME_FORMAT) extra_borg_options = storage_config.get('extra_borg_options', {}).get('create', '') if feature.available(feature.Feature.ATIME, local_borg_version): atime_flags = ('--atime',) if location_config.get('atime') is True else () else: atime_flags = ('--noatime',) if location_config.get('atime') is False else () if feature.available(feature.Feature.NOFLAGS, local_borg_version): noflags_flags = ('--noflags',) if location_config.get('flags') is False else () else: noflags_flags = ('--nobsdflags',) if location_config.get('flags') is False else () if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version): numeric_ids_flags = ('--numeric-ids',) if location_config.get('numeric_ids') else () else: numeric_ids_flags = ('--numeric-owner',) if location_config.get('numeric_ids') else () if feature.available(feature.Feature.UPLOAD_RATELIMIT, local_borg_version): upload_ratelimit_flags = ( ('--upload-ratelimit', str(upload_rate_limit)) if upload_rate_limit else () ) else: upload_ratelimit_flags = ( ('--remote-ratelimit', str(upload_rate_limit)) if upload_rate_limit else () ) if stream_processes and location_config.get('read_special') is False: logger.warning( f'{repository}: Ignoring configured "read_special" value of false, as true is needed for database hooks.' ) create_command = ( tuple(local_path.split(' ')) + ('create',) + make_pattern_flags(location_config, pattern_file.name if pattern_file else None) + make_exclude_flags(location_config, exclude_file.name if exclude_file else None) + (('--checkpoint-interval', str(checkpoint_interval)) if checkpoint_interval else ()) + (('--checkpoint-volume', str(checkpoint_volume)) if checkpoint_volume else ()) + (('--chunker-params', chunker_params) if chunker_params else ()) + (('--compression', compression) if compression else ()) + upload_ratelimit_flags + ( ('--one-file-system',) if location_config.get('one_file_system') or stream_processes else () ) + numeric_ids_flags + atime_flags + (('--noctime',) if location_config.get('ctime') is False else ()) + (('--nobirthtime',) if location_config.get('birthtime') is False else ()) + (('--read-special',) if location_config.get('read_special') or stream_processes else ()) + noflags_flags + (('--files-cache', files_cache) if files_cache else ()) + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + ( ('--list', '--filter', list_filter_flags) if list_files and not json and not progress else () ) + (('--dry-run',) if dry_run else ()) + (tuple(extra_borg_options.split(' ')) if extra_borg_options else ()) + flags.make_repository_archive_flags(repository, archive_name_format, local_borg_version) + (sources if not pattern_file else ()) ) if json: output_log_level = None elif list_files or (stats and not dry_run): output_log_level = logging.ANSWER else: output_log_level = logging.INFO # The progress output isn't compatible with captured and logged output, as progress messes with # the terminal directly. output_file = DO_NOT_CAPTURE if progress else None borg_environment = environment.make_environment(storage_config) # If database hooks are enabled (as indicated by streaming processes), exclude files that might # cause Borg to hang. But skip this if the user has explicitly set the "read_special" to True. if stream_processes and not location_config.get('read_special'): logger.debug(f'{repository}: Collecting special file paths') special_file_paths = collect_special_file_paths( create_command, local_path, working_directory, borg_environment, skip_directories=borgmatic_source_directories, ) if special_file_paths: logger.warning( f'{repository}: Excluding special files to prevent Borg from hanging: {", ".join(special_file_paths)}' ) exclude_file = write_pattern_file( expand_home_directories( tuple(location_config.get('exclude_patterns') or ()) + special_file_paths ), pattern_file=exclude_file, ) create_command += make_exclude_flags(location_config, exclude_file.name) create_command += ( (('--info',) if logger.getEffectiveLevel() == logging.INFO and not json else ()) + (('--stats',) if stats and not json and not dry_run else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) and not json else ()) + (('--progress',) if progress else ()) + (('--json',) if json else ()) ) if stream_processes: return execute_command_with_processes( create_command, stream_processes, output_log_level, output_file, borg_local_path=local_path, working_directory=working_directory, extra_environment=borg_environment, ) elif output_log_level is None: return execute_command_and_capture_output( create_command, working_directory=working_directory, extra_environment=borg_environment, ) else: execute_command( create_command, output_log_level, output_file, borg_local_path=local_path, working_directory=working_directory, extra_environment=borg_environment, ) borgmatic-1.7.9/borgmatic/borg/environment.py000066400000000000000000000025021440467744700213350ustar00rootroot00000000000000OPTION_TO_ENVIRONMENT_VARIABLE = { 'borg_base_directory': 'BORG_BASE_DIR', 'borg_config_directory': 'BORG_CONFIG_DIR', 'borg_cache_directory': 'BORG_CACHE_DIR', 'borg_security_directory': 'BORG_SECURITY_DIR', 'borg_keys_directory': 'BORG_KEYS_DIR', 'encryption_passcommand': 'BORG_PASSCOMMAND', 'encryption_passphrase': 'BORG_PASSPHRASE', 'ssh_command': 'BORG_RSH', 'temporary_directory': 'TMPDIR', } DEFAULT_BOOL_OPTION_TO_ENVIRONMENT_VARIABLE = { 'relocated_repo_access_is_ok': 'BORG_RELOCATED_REPO_ACCESS_IS_OK', 'unknown_unencrypted_repo_access_is_ok': 'BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK', } def make_environment(storage_config): ''' Given a borgmatic storage configuration dict, return its options converted to a Borg environment variable dict. ''' environment = {} for option_name, environment_variable_name in OPTION_TO_ENVIRONMENT_VARIABLE.items(): value = storage_config.get(option_name) if value: environment[environment_variable_name] = value for ( option_name, environment_variable_name, ) in DEFAULT_BOOL_OPTION_TO_ENVIRONMENT_VARIABLE.items(): value = storage_config.get(option_name, False) environment[environment_variable_name] = 'yes' if value else 'no' return environment borgmatic-1.7.9/borgmatic/borg/export_tar.py000066400000000000000000000050271440467744700211650ustar00rootroot00000000000000import logging import os import borgmatic.logger from borgmatic.borg import environment, flags from borgmatic.execute import DO_NOT_CAPTURE, execute_command logger = logging.getLogger(__name__) def export_tar_archive( dry_run, repository, archive, paths, destination_path, storage_config, local_borg_version, local_path='borg', remote_path=None, tar_filter=None, list_files=False, strip_components=None, ): ''' Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to export from the archive, a destination path to export to, a storage configuration dict, the local Borg version, optional local and remote Borg paths, an optional filter program, whether to include per-file details, and an optional number of path components to strip, export the archive into the given destination path as a tar-formatted file. If the destination path is "-", then stream the output to stdout instead of to a file. ''' borgmatic.logger.add_custom_log_levels() umask = storage_config.get('umask', None) lock_wait = storage_config.get('lock_wait', None) full_command = ( (local_path, 'export-tar') + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--list',) if list_files else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + (('--dry-run',) if dry_run else ()) + (('--tar-filter', tar_filter) if tar_filter else ()) + (('--strip-components', str(strip_components)) if strip_components else ()) + flags.make_repository_archive_flags( repository if ':' in repository else os.path.abspath(repository), archive, local_borg_version, ) + (destination_path,) + (tuple(paths) if paths else ()) ) if list_files: output_log_level = logging.ANSWER else: output_log_level = logging.INFO if dry_run: logging.info('{}: Skipping export to tar file (dry run)'.format(repository)) return execute_command( full_command, output_file=DO_NOT_CAPTURE if destination_path == '-' else None, output_log_level=output_log_level, borg_local_path=local_path, extra_environment=environment.make_environment(storage_config), ) borgmatic-1.7.9/borgmatic/borg/extract.py000066400000000000000000000121151440467744700204440ustar00rootroot00000000000000import logging import os import subprocess from borgmatic.borg import environment, feature, flags, rlist from borgmatic.execute import DO_NOT_CAPTURE, execute_command logger = logging.getLogger(__name__) def extract_last_archive_dry_run( storage_config, local_borg_version, repository, lock_wait=None, local_path='borg', remote_path=None, ): ''' Perform an extraction dry-run of the most recent archive. If there are no archives, skip the dry-run. ''' remote_path_flags = ('--remote-path', remote_path) if remote_path else () lock_wait_flags = ('--lock-wait', str(lock_wait)) if lock_wait else () verbosity_flags = () if logger.isEnabledFor(logging.DEBUG): verbosity_flags = ('--debug', '--show-rc') elif logger.isEnabledFor(logging.INFO): verbosity_flags = ('--info',) try: last_archive_name = rlist.resolve_archive_name( repository, 'latest', storage_config, local_borg_version, local_path, remote_path ) except ValueError: logger.warning('No archives found. Skipping extract consistency check.') return list_flag = ('--list',) if logger.isEnabledFor(logging.DEBUG) else () borg_environment = environment.make_environment(storage_config) full_extract_command = ( (local_path, 'extract', '--dry-run') + remote_path_flags + lock_wait_flags + verbosity_flags + list_flag + flags.make_repository_archive_flags(repository, last_archive_name, local_borg_version) ) execute_command( full_extract_command, working_directory=None, extra_environment=borg_environment ) def extract_archive( dry_run, repository, archive, paths, location_config, storage_config, local_borg_version, local_path='borg', remote_path=None, destination_path=None, strip_components=None, progress=False, extract_to_stdout=False, ): ''' Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to restore from the archive, the local Borg version string, location/storage configuration dicts, optional local and remote Borg paths, and an optional destination path to extract to, extract the archive into the current directory. If extract to stdout is True, then start the extraction streaming to stdout, and return that extract process as an instance of subprocess.Popen. ''' umask = storage_config.get('umask', None) lock_wait = storage_config.get('lock_wait', None) if progress and extract_to_stdout: raise ValueError('progress and extract_to_stdout cannot both be set') if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version): numeric_ids_flags = ('--numeric-ids',) if location_config.get('numeric_ids') else () else: numeric_ids_flags = ('--numeric-owner',) if location_config.get('numeric_ids') else () if strip_components == 'all': if not paths: raise ValueError('The --strip-components flag with "all" requires at least one --path') # Calculate the maximum number of leading path components of the given paths. strip_components = max(0, *(len(path.split(os.path.sep)) - 1 for path in paths)) full_command = ( (local_path, 'extract') + (('--remote-path', remote_path) if remote_path else ()) + numeric_ids_flags + (('--umask', str(umask)) if umask else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--list', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + (('--dry-run',) if dry_run else ()) + (('--strip-components', str(strip_components)) if strip_components else ()) + (('--progress',) if progress else ()) + (('--stdout',) if extract_to_stdout else ()) + flags.make_repository_archive_flags( repository if ':' in repository else os.path.abspath(repository), archive, local_borg_version, ) + (tuple(paths) if paths else ()) ) borg_environment = environment.make_environment(storage_config) # The progress output isn't compatible with captured and logged output, as progress messes with # the terminal directly. if progress: return execute_command( full_command, output_file=DO_NOT_CAPTURE, working_directory=destination_path, extra_environment=borg_environment, ) return None if extract_to_stdout: return execute_command( full_command, output_file=subprocess.PIPE, working_directory=destination_path, run_to_completion=False, extra_environment=borg_environment, ) # Don't give Borg local path so as to error on warnings, as "borg extract" only gives a warning # if the restore paths don't exist in the archive. execute_command( full_command, working_directory=destination_path, extra_environment=borg_environment ) borgmatic-1.7.9/borgmatic/borg/feature.py000066400000000000000000000026741440467744700204360ustar00rootroot00000000000000from enum import Enum from pkg_resources import parse_version class Feature(Enum): COMPACT = 1 ATIME = 2 NOFLAGS = 3 NUMERIC_IDS = 4 UPLOAD_RATELIMIT = 5 SEPARATE_REPOSITORY_ARCHIVE = 6 RCREATE = 7 RLIST = 8 RINFO = 9 MATCH_ARCHIVES = 10 EXCLUDED_FILES_MINUS = 11 FEATURE_TO_MINIMUM_BORG_VERSION = { Feature.COMPACT: parse_version('1.2.0a2'), # borg compact Feature.ATIME: parse_version('1.2.0a7'), # borg create --atime Feature.NOFLAGS: parse_version('1.2.0a8'), # borg create --noflags Feature.NUMERIC_IDS: parse_version('1.2.0b3'), # borg create/extract/mount --numeric-ids Feature.UPLOAD_RATELIMIT: parse_version('1.2.0b3'), # borg create --upload-ratelimit Feature.SEPARATE_REPOSITORY_ARCHIVE: parse_version('2.0.0a2'), # --repo with separate archive Feature.RCREATE: parse_version('2.0.0a2'), # borg rcreate Feature.RLIST: parse_version('2.0.0a2'), # borg rlist Feature.RINFO: parse_version('2.0.0a2'), # borg rinfo Feature.MATCH_ARCHIVES: parse_version('2.0.0b3'), # borg --match-archives Feature.EXCLUDED_FILES_MINUS: parse_version('2.0.0b5'), # --list --filter uses "-" for excludes } def available(feature, borg_version): ''' Given a Borg Feature constant and a Borg version string, return whether that feature is available in that version of Borg. ''' return FEATURE_TO_MINIMUM_BORG_VERSION[feature] <= parse_version(borg_version) borgmatic-1.7.9/borgmatic/borg/flags.py000066400000000000000000000034011440467744700200640ustar00rootroot00000000000000import itertools from borgmatic.borg import feature def make_flags(name, value): ''' Given a flag name and its value, return it formatted as Borg-compatible flags. ''' if not value: return () flag = '--{}'.format(name.replace('_', '-')) if value is True: return (flag,) return (flag, str(value)) def make_flags_from_arguments(arguments, excludes=()): ''' Given borgmatic command-line arguments as an instance of argparse.Namespace, and optionally a list of named arguments to exclude, generate and return the corresponding Borg command-line flags as a tuple. ''' return tuple( itertools.chain.from_iterable( make_flags(name, value=getattr(arguments, name)) for name in sorted(vars(arguments)) if name not in excludes and not name.startswith('_') ) ) def make_repository_flags(repository, local_borg_version): ''' Given the path of a Borg repository and the local Borg version, return Borg-version-appropriate command-line flags (as a tuple) for selecting that repository. ''' return ( ('--repo',) if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version) else () ) + (repository,) def make_repository_archive_flags(repository, archive, local_borg_version): ''' Given the path of a Borg repository, an archive name or pattern, and the local Borg version, return Borg-version-appropriate command-line flags (as a tuple) for selecting that repository and archive. ''' return ( ('--repo', repository, archive) if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version) else (f'{repository}::{archive}',) ) borgmatic-1.7.9/borgmatic/borg/info.py000066400000000000000000000045651440467744700177370ustar00rootroot00000000000000import logging import borgmatic.logger from borgmatic.borg import environment, feature, flags from borgmatic.execute import execute_command, execute_command_and_capture_output logger = logging.getLogger(__name__) def display_archives_info( repository, storage_config, local_borg_version, info_arguments, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, a storage config dict, the local Borg version, and the arguments to the info action, display summary information for Borg archives in the repository or return JSON summary information. ''' borgmatic.logger.add_custom_log_levels() lock_wait = storage_config.get('lock_wait', None) full_command = ( (local_path, 'info') + ( ('--info',) if logger.getEffectiveLevel() == logging.INFO and not info_arguments.json else () ) + ( ('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) and not info_arguments.json else () ) + flags.make_flags('remote-path', remote_path) + flags.make_flags('lock-wait', lock_wait) + ( ( flags.make_flags('match-archives', f'sh:{info_arguments.prefix}*') if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version) else flags.make_flags('glob-archives', f'{info_arguments.prefix}*') ) if info_arguments.prefix else () ) + flags.make_flags_from_arguments( info_arguments, excludes=('repository', 'archive', 'prefix') ) + flags.make_repository_flags(repository, local_borg_version) + ( flags.make_flags('match-archives', info_arguments.archive) if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version) else flags.make_flags('glob-archives', info_arguments.archive) ) ) if info_arguments.json: return execute_command_and_capture_output( full_command, extra_environment=environment.make_environment(storage_config), ) else: execute_command( full_command, output_log_level=logging.ANSWER, borg_local_path=local_path, extra_environment=environment.make_environment(storage_config), ) borgmatic-1.7.9/borgmatic/borg/list.py000066400000000000000000000174331440467744700177550ustar00rootroot00000000000000import argparse import copy import logging import re import borgmatic.logger from borgmatic.borg import environment, feature, flags, rlist from borgmatic.execute import execute_command, execute_command_and_capture_output logger = logging.getLogger(__name__) ARCHIVE_FILTER_FLAGS_MOVED_TO_RLIST = ('prefix', 'match_archives', 'sort_by', 'first', 'last') MAKE_FLAGS_EXCLUDES = ( 'repository', 'archive', 'successful', 'paths', 'find_paths', ) + ARCHIVE_FILTER_FLAGS_MOVED_TO_RLIST def make_list_command( repository, storage_config, local_borg_version, list_arguments, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, a storage config dict, the arguments to the list action, and local and remote Borg paths, return a command as a tuple to list archives or paths within an archive. ''' lock_wait = storage_config.get('lock_wait', None) return ( (local_path, 'list') + ( ('--info',) if logger.getEffectiveLevel() == logging.INFO and not list_arguments.json else () ) + ( ('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) and not list_arguments.json else () ) + flags.make_flags('remote-path', remote_path) + flags.make_flags('lock-wait', lock_wait) + flags.make_flags_from_arguments(list_arguments, excludes=MAKE_FLAGS_EXCLUDES) + ( flags.make_repository_archive_flags( repository, list_arguments.archive, local_borg_version ) if list_arguments.archive else flags.make_repository_flags(repository, local_borg_version) ) + (tuple(list_arguments.paths) if list_arguments.paths else ()) ) def make_find_paths(find_paths): ''' Given a sequence of path fragments or patterns as passed to `--find`, transform all path fragments into glob patterns. Pass through existing patterns untouched. For example, given find_paths of: ['foo.txt', 'pp:root/somedir'] ... transform that into: ['sh:**/*foo.txt*/**', 'pp:root/somedir'] ''' if not find_paths: return () return tuple( find_path if re.compile(r'([-!+RrPp] )|(\w\w:)').match(find_path) else f'sh:**/*{find_path}*/**' for find_path in find_paths ) def capture_archive_listing( repository, archive, storage_config, local_borg_version, list_path=None, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, an archive name, a storage config dict, the local Borg version, the archive path in which to list files, and local and remote Borg paths, capture the output of listing that archive and return it as a list of file paths. ''' borg_environment = environment.make_environment(storage_config) return tuple( execute_command_and_capture_output( make_list_command( repository, storage_config, local_borg_version, argparse.Namespace( repository=repository, archive=archive, paths=[f'sh:{list_path}'], find_paths=None, json=None, format='{path}{NL}', ), local_path, remote_path, ), extra_environment=borg_environment, ) .strip('\n') .split('\n') ) def list_archive( repository, storage_config, local_borg_version, list_arguments, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, a storage config dict, the local Borg version, the arguments to the list action, and local and remote Borg paths, display the output of listing the files of a Borg archive (or return JSON output). If list_arguments.find_paths are given, list the files by searching across multiple archives. If neither find_paths nor archive name are given, instead list the archives in the given repository. ''' borgmatic.logger.add_custom_log_levels() if not list_arguments.archive and not list_arguments.find_paths: if feature.available(feature.Feature.RLIST, local_borg_version): logger.warning( 'Omitting the --archive flag on the list action is deprecated when using Borg 2.x+. Use the rlist action instead.' ) rlist_arguments = argparse.Namespace( repository=repository, short=list_arguments.short, format=list_arguments.format, json=list_arguments.json, prefix=list_arguments.prefix, match_archives=list_arguments.match_archives, sort_by=list_arguments.sort_by, first=list_arguments.first, last=list_arguments.last, ) return rlist.list_repository( repository, storage_config, local_borg_version, rlist_arguments, local_path, remote_path ) if list_arguments.archive: for name in ARCHIVE_FILTER_FLAGS_MOVED_TO_RLIST: if getattr(list_arguments, name, None): logger.warning( f"The --{name.replace('_', '-')} flag on the list action is ignored when using the --archive flag." ) if list_arguments.json: raise ValueError( 'The --json flag on the list action is not supported when using the --archive/--find flags.' ) borg_environment = environment.make_environment(storage_config) # If there are any paths to find (and there's not a single archive already selected), start by # getting a list of archives to search. if list_arguments.find_paths and not list_arguments.archive: rlist_arguments = argparse.Namespace( repository=repository, short=True, format=None, json=None, prefix=list_arguments.prefix, match_archives=list_arguments.match_archives, sort_by=list_arguments.sort_by, first=list_arguments.first, last=list_arguments.last, ) # Ask Borg to list archives. Capture its output for use below. archive_lines = tuple( execute_command_and_capture_output( rlist.make_rlist_command( repository, storage_config, local_borg_version, rlist_arguments, local_path, remote_path, ), extra_environment=borg_environment, ) .strip('\n') .split('\n') ) else: archive_lines = (list_arguments.archive,) # For each archive listed by Borg, run list on the contents of that archive. for archive in archive_lines: logger.answer(f'{repository}: Listing archive {archive}') archive_arguments = copy.copy(list_arguments) archive_arguments.archive = archive # This list call is to show the files in a single archive, not list multiple archives. So # blank out any archive filtering flags. They'll break anyway in Borg 2. for name in ARCHIVE_FILTER_FLAGS_MOVED_TO_RLIST: setattr(archive_arguments, name, None) main_command = make_list_command( repository, storage_config, local_borg_version, archive_arguments, local_path, remote_path, ) + make_find_paths(list_arguments.find_paths) execute_command( main_command, output_log_level=logging.ANSWER, borg_local_path=local_path, extra_environment=borg_environment, ) borgmatic-1.7.9/borgmatic/borg/mount.py000066400000000000000000000047671440467744700201520ustar00rootroot00000000000000import logging from borgmatic.borg import environment, feature, flags from borgmatic.execute import DO_NOT_CAPTURE, execute_command logger = logging.getLogger(__name__) def mount_archive( repository, archive, mount_point, paths, foreground, options, storage_config, local_borg_version, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, an optional archive name, a filesystem mount point, zero or more paths to mount from the archive, extra Borg mount options, a storage configuration dict, the local Borg version, and optional local and remote Borg paths, mount the archive onto the mount point. ''' umask = storage_config.get('umask', None) lock_wait = storage_config.get('lock_wait', None) full_command = ( (local_path, 'mount') + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + (('--foreground',) if foreground else ()) + (('-o', options) if options else ()) + ( ( flags.make_repository_flags(repository, local_borg_version) + ( ('--match-archives', archive) if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version) else ('--glob-archives', archive) ) ) if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version) else ( flags.make_repository_archive_flags(repository, archive, local_borg_version) if archive else flags.make_repository_flags(repository, local_borg_version) ) ) + (mount_point,) + (tuple(paths) if paths else ()) ) borg_environment = environment.make_environment(storage_config) # Don't capture the output when foreground mode is used so that ctrl-C can work properly. if foreground: execute_command( full_command, output_file=DO_NOT_CAPTURE, borg_local_path=local_path, extra_environment=borg_environment, ) return execute_command(full_command, borg_local_path=local_path, extra_environment=borg_environment) borgmatic-1.7.9/borgmatic/borg/prune.py000066400000000000000000000055311440467744700201270ustar00rootroot00000000000000import logging import borgmatic.logger from borgmatic.borg import environment, feature, flags from borgmatic.execute import execute_command logger = logging.getLogger(__name__) def make_prune_flags(retention_config, local_borg_version): ''' Given a retention config dict mapping from option name to value, tranform it into an iterable of command-line name-value flag pairs. For example, given a retention config of: {'keep_weekly': 4, 'keep_monthly': 6} This will be returned as an iterable of: ( ('--keep-weekly', '4'), ('--keep-monthly', '6'), ) ''' config = retention_config.copy() prefix = config.pop('prefix', '{hostname}-') if prefix: if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version): config['match_archives'] = f'sh:{prefix}*' else: config['glob_archives'] = f'{prefix}*' return ( ('--' + option_name.replace('_', '-'), str(value)) for option_name, value in config.items() ) def prune_archives( dry_run, repository, storage_config, retention_config, local_borg_version, local_path='borg', remote_path=None, stats=False, list_archives=False, ): ''' Given dry-run flag, a local or remote repository path, a storage config dict, and a retention config dict, prune Borg archives according to the retention policy specified in that configuration. ''' borgmatic.logger.add_custom_log_levels() umask = storage_config.get('umask', None) lock_wait = storage_config.get('lock_wait', None) extra_borg_options = storage_config.get('extra_borg_options', {}).get('prune', '') full_command = ( (local_path, 'prune') + tuple( element for pair in make_prune_flags(retention_config, local_borg_version) for element in pair ) + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--stats',) if stats and not dry_run else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--list',) if list_archives else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + (('--dry-run',) if dry_run else ()) + (tuple(extra_borg_options.split(' ')) if extra_borg_options else ()) + flags.make_repository_flags(repository, local_borg_version) ) if stats or list_archives: output_log_level = logging.ANSWER else: output_log_level = logging.INFO execute_command( full_command, output_log_level=output_log_level, borg_local_path=local_path, extra_environment=environment.make_environment(storage_config), ) borgmatic-1.7.9/borgmatic/borg/rcreate.py000066400000000000000000000054411440467744700204230ustar00rootroot00000000000000import argparse import logging import subprocess from borgmatic.borg import environment, feature, flags, rinfo from borgmatic.execute import DO_NOT_CAPTURE, execute_command logger = logging.getLogger(__name__) RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE = 2 def create_repository( dry_run, repository, storage_config, local_borg_version, encryption_mode, source_repository=None, copy_crypt_key=False, append_only=None, storage_quota=None, make_parent_dirs=False, local_path='borg', remote_path=None, ): ''' Given a dry-run flag, a local or remote repository path, a storage configuration dict, the local Borg version, a Borg encryption mode, the path to another repo whose key material should be reused, whether the repository should be append-only, and the storage quota to use, create the repository. If the repository already exists, then log and skip creation. ''' try: rinfo.display_repository_info( repository, storage_config, local_borg_version, argparse.Namespace(json=True), local_path, remote_path, ) logger.info(f'{repository}: Repository already exists. Skipping creation.') return except subprocess.CalledProcessError as error: if error.returncode != RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE: raise extra_borg_options = storage_config.get('extra_borg_options', {}).get('rcreate', '') rcreate_command = ( (local_path,) + ( ('rcreate',) if feature.available(feature.Feature.RCREATE, local_borg_version) else ('init',) ) + (('--encryption', encryption_mode) if encryption_mode else ()) + (('--other-repo', source_repository) if source_repository else ()) + (('--copy-crypt-key',) if copy_crypt_key else ()) + (('--append-only',) if append_only else ()) + (('--storage-quota', storage_quota) if storage_quota else ()) + (('--make-parent-dirs',) if make_parent_dirs else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug',) if logger.isEnabledFor(logging.DEBUG) else ()) + (('--remote-path', remote_path) if remote_path else ()) + (tuple(extra_borg_options.split(' ')) if extra_borg_options else ()) + flags.make_repository_flags(repository, local_borg_version) ) if dry_run: logging.info(f'{repository}: Skipping repository creation (dry run)') return # Do not capture output here, so as to support interactive prompts. execute_command( rcreate_command, output_file=DO_NOT_CAPTURE, borg_local_path=local_path, extra_environment=environment.make_environment(storage_config), ) borgmatic-1.7.9/borgmatic/borg/rinfo.py000066400000000000000000000035371440467744700201170ustar00rootroot00000000000000import logging import borgmatic.logger from borgmatic.borg import environment, feature, flags from borgmatic.execute import execute_command, execute_command_and_capture_output logger = logging.getLogger(__name__) def display_repository_info( repository, storage_config, local_borg_version, rinfo_arguments, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, a storage config dict, the local Borg version, and the arguments to the rinfo action, display summary information for the Borg repository or return JSON summary information. ''' borgmatic.logger.add_custom_log_levels() lock_wait = storage_config.get('lock_wait', None) full_command = ( (local_path,) + ( ('rinfo',) if feature.available(feature.Feature.RINFO, local_borg_version) else ('info',) ) + ( ('--info',) if logger.getEffectiveLevel() == logging.INFO and not rinfo_arguments.json else () ) + ( ('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) and not rinfo_arguments.json else () ) + flags.make_flags('remote-path', remote_path) + flags.make_flags('lock-wait', lock_wait) + (('--json',) if rinfo_arguments.json else ()) + flags.make_repository_flags(repository, local_borg_version) ) extra_environment = environment.make_environment(storage_config) if rinfo_arguments.json: return execute_command_and_capture_output( full_command, extra_environment=extra_environment, ) else: execute_command( full_command, output_log_level=logging.ANSWER, borg_local_path=local_path, extra_environment=extra_environment, ) borgmatic-1.7.9/borgmatic/borg/rlist.py000066400000000000000000000102661440467744700201340ustar00rootroot00000000000000import logging import borgmatic.logger from borgmatic.borg import environment, feature, flags from borgmatic.execute import execute_command, execute_command_and_capture_output logger = logging.getLogger(__name__) def resolve_archive_name( repository, archive, storage_config, local_borg_version, local_path='borg', remote_path=None ): ''' Given a local or remote repository path, an archive name, a storage config dict, a local Borg path, and a remote Borg path, simply return the archive name. But if the archive name is "latest", then instead introspect the repository for the latest archive and return its name. Raise ValueError if "latest" is given but there are no archives in the repository. ''' if archive != 'latest': return archive lock_wait = storage_config.get('lock_wait', None) full_command = ( ( local_path, 'rlist' if feature.available(feature.Feature.RLIST, local_borg_version) else 'list', ) + flags.make_flags('remote-path', remote_path) + flags.make_flags('lock-wait', lock_wait) + flags.make_flags('last', 1) + ('--short',) + flags.make_repository_flags(repository, local_borg_version) ) output = execute_command_and_capture_output( full_command, extra_environment=environment.make_environment(storage_config), ) try: latest_archive = output.strip().splitlines()[-1] except IndexError: raise ValueError('No archives found in the repository') logger.debug('{}: Latest archive is {}'.format(repository, latest_archive)) return latest_archive MAKE_FLAGS_EXCLUDES = ('repository', 'prefix') def make_rlist_command( repository, storage_config, local_borg_version, rlist_arguments, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, a storage config dict, the local Borg version, the arguments to the rlist action, and local and remote Borg paths, return a command as a tuple to list archives with a repository. ''' lock_wait = storage_config.get('lock_wait', None) return ( ( local_path, 'rlist' if feature.available(feature.Feature.RLIST, local_borg_version) else 'list', ) + ( ('--info',) if logger.getEffectiveLevel() == logging.INFO and not rlist_arguments.json else () ) + ( ('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) and not rlist_arguments.json else () ) + flags.make_flags('remote-path', remote_path) + flags.make_flags('lock-wait', lock_wait) + ( ( flags.make_flags('match-archives', f'sh:{rlist_arguments.prefix}*') if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version) else flags.make_flags('glob-archives', f'{rlist_arguments.prefix}*') ) if rlist_arguments.prefix else () ) + flags.make_flags_from_arguments(rlist_arguments, excludes=MAKE_FLAGS_EXCLUDES) + flags.make_repository_flags(repository, local_borg_version) ) def list_repository( repository, storage_config, local_borg_version, rlist_arguments, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, a storage config dict, the local Borg version, the arguments to the list action, and local and remote Borg paths, display the output of listing Borg archives in the given repository (or return JSON output). ''' borgmatic.logger.add_custom_log_levels() borg_environment = environment.make_environment(storage_config) main_command = make_rlist_command( repository, storage_config, local_borg_version, rlist_arguments, local_path, remote_path ) if rlist_arguments.json: return execute_command_and_capture_output(main_command, extra_environment=borg_environment,) else: execute_command( main_command, output_log_level=logging.ANSWER, borg_local_path=local_path, extra_environment=borg_environment, ) borgmatic-1.7.9/borgmatic/borg/state.py000066400000000000000000000000641440467744700201120ustar00rootroot00000000000000DEFAULT_BORGMATIC_SOURCE_DIRECTORY = '~/.borgmatic' borgmatic-1.7.9/borgmatic/borg/transfer.py000066400000000000000000000034741440467744700206260ustar00rootroot00000000000000import logging import borgmatic.logger from borgmatic.borg import environment, flags from borgmatic.execute import DO_NOT_CAPTURE, execute_command logger = logging.getLogger(__name__) def transfer_archives( dry_run, repository, storage_config, local_borg_version, transfer_arguments, local_path='borg', remote_path=None, ): ''' Given a dry-run flag, a local or remote repository path, a storage config dict, the local Borg version, and the arguments to the transfer action, transfer archives to the given repository. ''' borgmatic.logger.add_custom_log_levels() full_command = ( (local_path, 'transfer') + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + flags.make_flags('remote-path', remote_path) + flags.make_flags('lock-wait', storage_config.get('lock_wait', None)) + (('--progress',) if transfer_arguments.progress else ()) + ( flags.make_flags( 'match-archives', transfer_arguments.match_archives or transfer_arguments.archive ) ) + flags.make_flags_from_arguments( transfer_arguments, excludes=('repository', 'source_repository', 'archive', 'match_archives'), ) + flags.make_repository_flags(repository, local_borg_version) + flags.make_flags('other-repo', transfer_arguments.source_repository) + flags.make_flags('dry-run', dry_run) ) return execute_command( full_command, output_log_level=logging.ANSWER, output_file=DO_NOT_CAPTURE if transfer_arguments.progress else None, borg_local_path=local_path, extra_environment=environment.make_environment(storage_config), ) borgmatic-1.7.9/borgmatic/borg/umount.py000066400000000000000000000011001440467744700203110ustar00rootroot00000000000000import logging from borgmatic.execute import execute_command logger = logging.getLogger(__name__) def unmount_archive(mount_point, local_path='borg'): ''' Given a mounted filesystem mount point, and an optional local Borg paths, umount the filesystem from the mount point. ''' full_command = ( (local_path, 'umount') + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + (mount_point,) ) execute_command(full_command) borgmatic-1.7.9/borgmatic/borg/version.py000066400000000000000000000017131440467744700204610ustar00rootroot00000000000000import logging from borgmatic.borg import environment from borgmatic.execute import execute_command_and_capture_output logger = logging.getLogger(__name__) def local_borg_version(storage_config, local_path='borg'): ''' Given a storage configuration dict and a local Borg binary path, return a version string for it. Raise OSError or CalledProcessError if there is a problem running Borg. Raise ValueError if the version cannot be parsed. ''' full_command = ( (local_path, '--version') + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) ) output = execute_command_and_capture_output( full_command, extra_environment=environment.make_environment(storage_config), ) try: return output.split(' ')[1].strip() except IndexError: raise ValueError('Could not parse Borg version string') borgmatic-1.7.9/borgmatic/commands/000077500000000000000000000000001440467744700172705ustar00rootroot00000000000000borgmatic-1.7.9/borgmatic/commands/__init__.py000066400000000000000000000000001440467744700213670ustar00rootroot00000000000000borgmatic-1.7.9/borgmatic/commands/arguments.py000066400000000000000000001031761440467744700216570ustar00rootroot00000000000000import collections from argparse import Action, ArgumentParser from borgmatic.config import collect SUBPARSER_ALIASES = { 'rcreate': ['init', '-I'], 'prune': ['-p'], 'compact': [], 'create': ['-C'], 'check': ['-k'], 'extract': ['-x'], 'export-tar': [], 'mount': ['-m'], 'umount': ['-u'], 'restore': ['-r'], 'rlist': [], 'list': ['-l'], 'rinfo': [], 'info': ['-i'], 'transfer': [], 'break-lock': [], 'borg': [], } def parse_subparser_arguments(unparsed_arguments, subparsers): ''' Given a sequence of arguments and a dict from subparser name to argparse.ArgumentParser instance, give each requested action's subparser a shot at parsing all arguments. This allows common arguments like "--repository" to be shared across multiple subparsers. Return the result as a tuple of (a dict mapping from subparser name to a parsed namespace of arguments, a list of remaining arguments not claimed by any subparser). ''' arguments = collections.OrderedDict() remaining_arguments = list(unparsed_arguments) alias_to_subparser_name = { alias: subparser_name for subparser_name, aliases in SUBPARSER_ALIASES.items() for alias in aliases } # If the "borg" action is used, skip all other subparsers. This avoids confusion like # "borg list" triggering borgmatic's own list action. if 'borg' in unparsed_arguments: subparsers = {'borg': subparsers['borg']} for argument in remaining_arguments: canonical_name = alias_to_subparser_name.get(argument, argument) subparser = subparsers.get(canonical_name) if not subparser: continue # If a parsed value happens to be the same as the name of a subparser, remove it from the # remaining arguments. This prevents, for instance, "check --only extract" from triggering # the "extract" subparser. parsed, unused_remaining = subparser.parse_known_args(unparsed_arguments) for value in vars(parsed).values(): if isinstance(value, str): if value in subparsers: remaining_arguments.remove(value) elif isinstance(value, list): for item in value: if item in subparsers: remaining_arguments.remove(item) arguments[canonical_name] = parsed # If no actions are explicitly requested, assume defaults. if not arguments and '--help' not in unparsed_arguments and '-h' not in unparsed_arguments: for subparser_name in ('create', 'prune', 'compact', 'check'): subparser = subparsers[subparser_name] parsed, unused_remaining = subparser.parse_known_args(unparsed_arguments) arguments[subparser_name] = parsed remaining_arguments = list(unparsed_arguments) # Now ask each subparser, one by one, to greedily consume arguments. for subparser_name, subparser in subparsers.items(): if subparser_name not in arguments.keys(): continue subparser = subparsers[subparser_name] unused_parsed, remaining_arguments = subparser.parse_known_args(remaining_arguments) # Special case: If "borg" is present in the arguments, consume all arguments after (+1) the # "borg" action. if 'borg' in arguments: borg_options_index = remaining_arguments.index('borg') + 1 arguments['borg'].options = remaining_arguments[borg_options_index:] remaining_arguments = remaining_arguments[:borg_options_index] # Remove the subparser names themselves. for subparser_name, subparser in subparsers.items(): if subparser_name in remaining_arguments: remaining_arguments.remove(subparser_name) return (arguments, remaining_arguments) class Extend_action(Action): ''' An argparse action to support Python 3.8's "extend" action in older versions of Python. ''' def __call__(self, parser, namespace, values, option_string=None): items = getattr(namespace, self.dest, None) if items: items.extend(values) else: setattr(namespace, self.dest, list(values)) def make_parsers(): ''' Build a top-level parser and its subparsers and return them as a tuple. ''' config_paths = collect.get_default_config_paths(expand_home=True) unexpanded_config_paths = collect.get_default_config_paths(expand_home=False) global_parser = ArgumentParser(add_help=False) global_parser.register('action', 'extend', Extend_action) global_group = global_parser.add_argument_group('global arguments') global_group.add_argument( '-c', '--config', nargs='*', dest='config_paths', default=config_paths, help='Configuration filenames or directories, defaults to: {}'.format( ' '.join(unexpanded_config_paths) ), ) global_group.add_argument( '--excludes', dest='excludes_filename', help='Deprecated in favor of exclude_patterns within configuration', ) global_group.add_argument( '-n', '--dry-run', dest='dry_run', action='store_true', help='Go through the motions, but do not actually write to any repositories', ) global_group.add_argument( '-nc', '--no-color', dest='no_color', action='store_true', help='Disable colored output' ) global_group.add_argument( '-v', '--verbosity', type=int, choices=range(-1, 3), default=0, help='Display verbose progress to the console (from only errors to very verbose: -1, 0, 1, or 2)', ) global_group.add_argument( '--syslog-verbosity', type=int, choices=range(-1, 3), default=0, help='Log verbose progress to syslog (from only errors to very verbose: -1, 0, 1, or 2). Ignored when console is interactive or --log-file is given', ) global_group.add_argument( '--log-file-verbosity', type=int, choices=range(-1, 3), default=0, help='Log verbose progress to log file (from only errors to very verbose: -1, 0, 1, or 2). Only used when --log-file is given', ) global_group.add_argument( '--monitoring-verbosity', type=int, choices=range(-1, 3), default=0, help='Log verbose progress to monitoring integrations that support logging (from only errors to very verbose: -1, 0, 1, or 2)', ) global_group.add_argument( '--log-file', type=str, default=None, help='Write log messages to this file instead of syslog', ) global_group.add_argument( '--override', metavar='SECTION.OPTION=VALUE', nargs='+', dest='overrides', action='extend', help='One or more configuration file options to override with specified values', ) global_group.add_argument( '--no-environment-interpolation', dest='resolve_env', action='store_false', help='Do not resolve environment variables in configuration file', ) global_group.add_argument( '--bash-completion', default=False, action='store_true', help='Show bash completion script and exit', ) global_group.add_argument( '--version', dest='version', default=False, action='store_true', help='Display installed version number of borgmatic and exit', ) top_level_parser = ArgumentParser( description=''' Simple, configuration-driven backup software for servers and workstations. If none of the action options are given, then borgmatic defaults to: create, prune, compact, and check. ''', parents=[global_parser], ) subparsers = top_level_parser.add_subparsers( title='actions', metavar='', help='Specify zero or more actions. Defaults to creat, prune, compact, and check. Use --help with action for details:', ) rcreate_parser = subparsers.add_parser( 'rcreate', aliases=SUBPARSER_ALIASES['rcreate'], help='Create a new, empty Borg repository', description='Create a new, empty Borg repository', add_help=False, ) rcreate_group = rcreate_parser.add_argument_group('rcreate arguments') rcreate_group.add_argument( '-e', '--encryption', dest='encryption_mode', help='Borg repository encryption mode', required=True, ) rcreate_group.add_argument( '--source-repository', '--other-repo', metavar='KEY_REPOSITORY', help='Path to an existing Borg repository whose key material should be reused (Borg 2.x+ only)', ) rcreate_group.add_argument( '--repository', help='Path of the new repository to create (must be already specified in a borgmatic configuration file), defaults to the configured repository if there is only one', ) rcreate_group.add_argument( '--copy-crypt-key', action='store_true', help='Copy the crypt key used for authenticated encryption from the source repository, defaults to a new random key (Borg 2.x+ only)', ) rcreate_group.add_argument( '--append-only', action='store_true', help='Create an append-only repository', ) rcreate_group.add_argument( '--storage-quota', help='Create a repository with a fixed storage quota', ) rcreate_group.add_argument( '--make-parent-dirs', action='store_true', help='Create any missing parent directories of the repository directory', ) rcreate_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit' ) transfer_parser = subparsers.add_parser( 'transfer', aliases=SUBPARSER_ALIASES['transfer'], help='Transfer archives from one repository to another, optionally upgrading the transferred data (Borg 2.0+ only)', description='Transfer archives from one repository to another, optionally upgrading the transferred data (Borg 2.0+ only)', add_help=False, ) transfer_group = transfer_parser.add_argument_group('transfer arguments') transfer_group.add_argument( '--repository', help='Path of existing destination repository to transfer archives to, defaults to the configured repository if there is only one', ) transfer_group.add_argument( '--source-repository', help='Path of existing source repository to transfer archives from', required=True, ) transfer_group.add_argument( '--archive', help='Name of single archive to transfer (or "latest"), defaults to transferring all archives', ) transfer_group.add_argument( '--upgrader', help='Upgrader type used to convert the transfered data, e.g. "From12To20" to upgrade data from Borg 1.2 to 2.0 format, defaults to no conversion', ) transfer_group.add_argument( '--progress', default=False, action='store_true', help='Display progress as each archive is transferred', ) transfer_group.add_argument( '-a', '--match-archives', '--glob-archives', metavar='PATTERN', help='Only transfer archives with names matching this pattern', ) transfer_group.add_argument( '--sort-by', metavar='KEYS', help='Comma-separated list of sorting keys' ) transfer_group.add_argument( '--first', metavar='N', help='Only transfer first N archives after other filters are applied', ) transfer_group.add_argument( '--last', metavar='N', help='Only transfer last N archives after other filters are applied' ) transfer_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit' ) prune_parser = subparsers.add_parser( 'prune', aliases=SUBPARSER_ALIASES['prune'], help='Prune archives according to the retention policy (with Borg 1.2+, run compact afterwards to actually free space)', description='Prune archives according to the retention policy (with Borg 1.2+, run compact afterwards to actually free space)', add_help=False, ) prune_group = prune_parser.add_argument_group('prune arguments') prune_group.add_argument( '--repository', help='Path of specific existing repository to prune (must be already specified in a borgmatic configuration file)', ) prune_group.add_argument( '--stats', dest='stats', default=False, action='store_true', help='Display statistics of archive', ) prune_group.add_argument( '--list', dest='list_archives', action='store_true', help='List archives kept/pruned' ) prune_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') compact_parser = subparsers.add_parser( 'compact', aliases=SUBPARSER_ALIASES['compact'], help='Compact segments to free space (Borg 1.2+, borgmatic 1.5.23+ only)', description='Compact segments to free space (Borg 1.2+, borgmatic 1.5.23+ only)', add_help=False, ) compact_group = compact_parser.add_argument_group('compact arguments') compact_group.add_argument( '--repository', help='Path of specific existing repository to compact (must be already specified in a borgmatic configuration file)', ) compact_group.add_argument( '--progress', dest='progress', default=False, action='store_true', help='Display progress as each segment is compacted', ) compact_group.add_argument( '--cleanup-commits', dest='cleanup_commits', default=False, action='store_true', help='Cleanup commit-only 17-byte segment files left behind by Borg 1.1 (flag in Borg 1.2 only)', ) compact_group.add_argument( '--threshold', type=int, dest='threshold', help='Minimum saved space percentage threshold for compacting a segment, defaults to 10', ) compact_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit' ) create_parser = subparsers.add_parser( 'create', aliases=SUBPARSER_ALIASES['create'], help='Create an archive (actually perform a backup)', description='Create an archive (actually perform a backup)', add_help=False, ) create_group = create_parser.add_argument_group('create arguments') create_group.add_argument( '--repository', help='Path of specific existing repository to backup to (must be already specified in a borgmatic configuration file)', ) create_group.add_argument( '--progress', dest='progress', default=False, action='store_true', help='Display progress for each file as it is backed up', ) create_group.add_argument( '--stats', dest='stats', default=False, action='store_true', help='Display statistics of archive', ) create_group.add_argument( '--list', '--files', dest='list_files', action='store_true', help='Show per-file details' ) create_group.add_argument( '--json', dest='json', default=False, action='store_true', help='Output results as JSON' ) create_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') check_parser = subparsers.add_parser( 'check', aliases=SUBPARSER_ALIASES['check'], help='Check archives for consistency', description='Check archives for consistency', add_help=False, ) check_group = check_parser.add_argument_group('check arguments') check_group.add_argument( '--repository', help='Path of specific existing repository to check (must be already specified in a borgmatic configuration file)', ) check_group.add_argument( '--progress', dest='progress', default=False, action='store_true', help='Display progress for each file as it is checked', ) check_group.add_argument( '--repair', dest='repair', default=False, action='store_true', help='Attempt to repair any inconsistencies found (for interactive use)', ) check_group.add_argument( '--only', metavar='CHECK', choices=('repository', 'archives', 'data', 'extract'), dest='only', action='append', help='Run a particular consistency check (repository, archives, data, or extract) instead of configured checks (subject to configured frequency, can specify flag multiple times)', ) check_group.add_argument( '--force', default=False, action='store_true', help='Ignore configured check frequencies and run checks unconditionally', ) check_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') extract_parser = subparsers.add_parser( 'extract', aliases=SUBPARSER_ALIASES['extract'], help='Extract files from a named archive to the current directory', description='Extract a named archive to the current directory', add_help=False, ) extract_group = extract_parser.add_argument_group('extract arguments') extract_group.add_argument( '--repository', help='Path of repository to extract, defaults to the configured repository if there is only one', ) extract_group.add_argument( '--archive', help='Name of archive to extract (or "latest")', required=True ) extract_group.add_argument( '--path', '--restore-path', metavar='PATH', nargs='+', dest='paths', help='Paths to extract from archive, defaults to the entire archive', ) extract_group.add_argument( '--destination', metavar='PATH', dest='destination', help='Directory to extract files into, defaults to the current directory', ) extract_group.add_argument( '--strip-components', type=lambda number: number if number == 'all' else int(number), metavar='NUMBER', help='Number of leading path components to remove from each extracted path or "all" to strip all leading path components. Skip paths with fewer elements', ) extract_group.add_argument( '--progress', dest='progress', default=False, action='store_true', help='Display progress for each file as it is extracted', ) extract_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit' ) export_tar_parser = subparsers.add_parser( 'export-tar', aliases=SUBPARSER_ALIASES['export-tar'], help='Export an archive to a tar-formatted file or stream', description='Export an archive to a tar-formatted file or stream', add_help=False, ) export_tar_group = export_tar_parser.add_argument_group('export-tar arguments') export_tar_group.add_argument( '--repository', help='Path of repository to export from, defaults to the configured repository if there is only one', ) export_tar_group.add_argument( '--archive', help='Name of archive to export (or "latest")', required=True ) export_tar_group.add_argument( '--path', metavar='PATH', nargs='+', dest='paths', help='Paths to export from archive, defaults to the entire archive', ) export_tar_group.add_argument( '--destination', metavar='PATH', dest='destination', help='Path to destination export tar file, or "-" for stdout (but be careful about dirtying output with --verbosity or --list)', required=True, ) export_tar_group.add_argument( '--tar-filter', help='Name of filter program to pipe data through' ) export_tar_group.add_argument( '--list', '--files', dest='list_files', action='store_true', help='Show per-file details' ) export_tar_group.add_argument( '--strip-components', type=int, metavar='NUMBER', dest='strip_components', help='Number of leading path components to remove from each exported path. Skip paths with fewer elements', ) export_tar_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit' ) mount_parser = subparsers.add_parser( 'mount', aliases=SUBPARSER_ALIASES['mount'], help='Mount files from a named archive as a FUSE filesystem', description='Mount a named archive as a FUSE filesystem', add_help=False, ) mount_group = mount_parser.add_argument_group('mount arguments') mount_group.add_argument( '--repository', help='Path of repository to use, defaults to the configured repository if there is only one', ) mount_group.add_argument('--archive', help='Name of archive to mount (or "latest")') mount_group.add_argument( '--mount-point', metavar='PATH', dest='mount_point', help='Path where filesystem is to be mounted', required=True, ) mount_group.add_argument( '--path', metavar='PATH', nargs='+', dest='paths', help='Paths to mount from archive, defaults to the entire archive', ) mount_group.add_argument( '--foreground', dest='foreground', default=False, action='store_true', help='Stay in foreground until ctrl-C is pressed', ) mount_group.add_argument('--options', dest='options', help='Extra Borg mount options') mount_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') umount_parser = subparsers.add_parser( 'umount', aliases=SUBPARSER_ALIASES['umount'], help='Unmount a FUSE filesystem that was mounted with "borgmatic mount"', description='Unmount a mounted FUSE filesystem', add_help=False, ) umount_group = umount_parser.add_argument_group('umount arguments') umount_group.add_argument( '--mount-point', metavar='PATH', dest='mount_point', help='Path of filesystem to unmount', required=True, ) umount_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') restore_parser = subparsers.add_parser( 'restore', aliases=SUBPARSER_ALIASES['restore'], help='Restore database dumps from a named archive', description='Restore database dumps from a named archive. (To extract files instead, use "borgmatic extract".)', add_help=False, ) restore_group = restore_parser.add_argument_group('restore arguments') restore_group.add_argument( '--repository', help='Path of repository to restore from, defaults to the configured repository if there is only one', ) restore_group.add_argument( '--archive', help='Name of archive to restore from (or "latest")', required=True ) restore_group.add_argument( '--database', metavar='NAME', nargs='+', dest='databases', help="Names of databases to restore from archive, defaults to all databases. Note that any databases to restore must be defined in borgmatic's configuration", ) restore_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit' ) rlist_parser = subparsers.add_parser( 'rlist', aliases=SUBPARSER_ALIASES['rlist'], help='List repository', description='List the archives in a repository', add_help=False, ) rlist_group = rlist_parser.add_argument_group('rlist arguments') rlist_group.add_argument( '--repository', help='Path of repository to list, defaults to the configured repositories', ) rlist_group.add_argument( '--short', default=False, action='store_true', help='Output only archive names' ) rlist_group.add_argument('--format', help='Format for archive listing') rlist_group.add_argument( '--json', default=False, action='store_true', help='Output results as JSON' ) rlist_group.add_argument( '-P', '--prefix', help='Only list archive names starting with this prefix' ) rlist_group.add_argument( '-a', '--match-archives', '--glob-archives', metavar='PATTERN', help='Only list archive names matching this pattern', ) rlist_group.add_argument( '--sort-by', metavar='KEYS', help='Comma-separated list of sorting keys' ) rlist_group.add_argument( '--first', metavar='N', help='List first N archives after other filters are applied' ) rlist_group.add_argument( '--last', metavar='N', help='List last N archives after other filters are applied' ) rlist_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') list_parser = subparsers.add_parser( 'list', aliases=SUBPARSER_ALIASES['list'], help='List archive', description='List the files in an archive or search for a file across archives', add_help=False, ) list_group = list_parser.add_argument_group('list arguments') list_group.add_argument( '--repository', help='Path of repository containing archive to list, defaults to the configured repositories', ) list_group.add_argument('--archive', help='Name of the archive to list (or "latest")') list_group.add_argument( '--path', metavar='PATH', nargs='+', dest='paths', help='Paths or patterns to list from a single selected archive (via "--archive"), defaults to listing the entire archive', ) list_group.add_argument( '--find', metavar='PATH', nargs='+', dest='find_paths', help='Partial paths or patterns to search for and list across multiple archives', ) list_group.add_argument( '--short', default=False, action='store_true', help='Output only path names' ) list_group.add_argument('--format', help='Format for file listing') list_group.add_argument( '--json', default=False, action='store_true', help='Output results as JSON' ) list_group.add_argument( '-P', '--prefix', help='Only list archive names starting with this prefix' ) list_group.add_argument( '-a', '--match-archives', '--glob-archives', metavar='PATTERN', help='Only list archive names matching this pattern', ) list_group.add_argument( '--successful', default=True, action='store_true', help='Deprecated; no effect. Newer versions of Borg shows successful (non-checkpoint) archives by default.', ) list_group.add_argument( '--sort-by', metavar='KEYS', help='Comma-separated list of sorting keys' ) list_group.add_argument( '--first', metavar='N', help='List first N archives after other filters are applied' ) list_group.add_argument( '--last', metavar='N', help='List last N archives after other filters are applied' ) list_group.add_argument( '-e', '--exclude', metavar='PATTERN', help='Exclude paths matching the pattern' ) list_group.add_argument( '--exclude-from', metavar='FILENAME', help='Exclude paths from exclude file, one per line' ) list_group.add_argument('--pattern', help='Include or exclude paths matching a pattern') list_group.add_argument( '--patterns-from', metavar='FILENAME', help='Include or exclude paths matching patterns from pattern file, one per line', ) list_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') rinfo_parser = subparsers.add_parser( 'rinfo', aliases=SUBPARSER_ALIASES['rinfo'], help='Show repository summary information such as disk space used', description='Show repository summary information such as disk space used', add_help=False, ) rinfo_group = rinfo_parser.add_argument_group('rinfo arguments') rinfo_group.add_argument( '--repository', help='Path of repository to show info for, defaults to the configured repository if there is only one', ) rinfo_group.add_argument( '--json', dest='json', default=False, action='store_true', help='Output results as JSON' ) rinfo_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') info_parser = subparsers.add_parser( 'info', aliases=SUBPARSER_ALIASES['info'], help='Show archive summary information such as disk space used', description='Show archive summary information such as disk space used', add_help=False, ) info_group = info_parser.add_argument_group('info arguments') info_group.add_argument( '--repository', help='Path of repository containing archive to show info for, defaults to the configured repository if there is only one', ) info_group.add_argument('--archive', help='Name of archive to show info for (or "latest")') info_group.add_argument( '--json', dest='json', default=False, action='store_true', help='Output results as JSON' ) info_group.add_argument( '-P', '--prefix', help='Only show info for archive names starting with this prefix' ) info_group.add_argument( '-a', '--match-archives', '--glob-archives', metavar='PATTERN', help='Only show info for archive names matching this pattern', ) info_group.add_argument( '--sort-by', metavar='KEYS', help='Comma-separated list of sorting keys' ) info_group.add_argument( '--first', metavar='N', help='Show info for first N archives after other filters are applied', ) info_group.add_argument( '--last', metavar='N', help='Show info for last N archives after other filters are applied' ) info_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') break_lock_parser = subparsers.add_parser( 'break-lock', aliases=SUBPARSER_ALIASES['break-lock'], help='Break the repository and cache locks left behind by Borg aborting', description='Break Borg repository and cache locks left behind by Borg aborting', add_help=False, ) break_lock_group = break_lock_parser.add_argument_group('break-lock arguments') break_lock_group.add_argument( '--repository', help='Path of repository to break the lock for, defaults to the configured repository if there is only one', ) break_lock_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit' ) borg_parser = subparsers.add_parser( 'borg', aliases=SUBPARSER_ALIASES['borg'], help='Run an arbitrary Borg command', description="Run an arbitrary Borg command based on borgmatic's configuration", add_help=False, ) borg_group = borg_parser.add_argument_group('borg arguments') borg_group.add_argument( '--repository', help='Path of repository to pass to Borg, defaults to the configured repositories', ) borg_group.add_argument('--archive', help='Name of archive to pass to Borg (or "latest")') borg_group.add_argument( '--', metavar='OPTION', dest='options', nargs='+', help='Options to pass to Borg, command first ("create", "list", etc). "--" is optional. To specify the repository or the archive, you must use --repository or --archive instead of providing them here.', ) borg_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') return top_level_parser, subparsers def parse_arguments(*unparsed_arguments): ''' Given command-line arguments with which this script was invoked, parse the arguments and return them as a dict mapping from subparser name (or "global") to an argparse.Namespace instance. ''' top_level_parser, subparsers = make_parsers() arguments, remaining_arguments = parse_subparser_arguments( unparsed_arguments, subparsers.choices ) arguments['global'] = top_level_parser.parse_args(remaining_arguments) if arguments['global'].excludes_filename: raise ValueError( 'The --excludes flag has been replaced with exclude_patterns in configuration.' ) if 'create' in arguments and arguments['create'].list_files and arguments['create'].progress: raise ValueError( 'With the create action, only one of --list (--files) and --progress flags can be used.' ) if ( ('list' in arguments and 'rinfo' in arguments and arguments['list'].json) or ('list' in arguments and 'info' in arguments and arguments['list'].json) or ('rinfo' in arguments and 'info' in arguments and arguments['rinfo'].json) ): raise ValueError('With the --json flag, multiple actions cannot be used together.') if ( 'transfer' in arguments and arguments['transfer'].archive and arguments['transfer'].match_archives ): raise ValueError( 'With the transfer action, only one of --archive and --glob-archives flags can be used.' ) if 'info' in arguments and ( (arguments['info'].archive and arguments['info'].prefix) or (arguments['info'].archive and arguments['info'].match_archives) or (arguments['info'].prefix and arguments['info'].match_archives) ): raise ValueError( 'With the info action, only one of --archive, --prefix, or --match-archives flags can be used.' ) return arguments borgmatic-1.7.9/borgmatic/commands/borgmatic.py000066400000000000000000000637461440467744700216310ustar00rootroot00000000000000import collections import json import logging import os import sys import time from queue import Queue from subprocess import CalledProcessError import colorama import pkg_resources import borgmatic.actions.borg import borgmatic.actions.break_lock import borgmatic.actions.check import borgmatic.actions.compact import borgmatic.actions.create import borgmatic.actions.export_tar import borgmatic.actions.extract import borgmatic.actions.info import borgmatic.actions.list import borgmatic.actions.mount import borgmatic.actions.prune import borgmatic.actions.rcreate import borgmatic.actions.restore import borgmatic.actions.rinfo import borgmatic.actions.rlist import borgmatic.actions.transfer import borgmatic.commands.completion from borgmatic.borg import umount as borg_umount from borgmatic.borg import version as borg_version from borgmatic.commands.arguments import parse_arguments from borgmatic.config import checks, collect, convert, validate from borgmatic.hooks import command, dispatch, monitor from borgmatic.logger import add_custom_log_levels, configure_logging, should_do_markup from borgmatic.signals import configure_signals from borgmatic.verbosity import verbosity_to_log_level logger = logging.getLogger(__name__) LEGACY_CONFIG_PATH = '/etc/borgmatic/config' def run_configuration(config_filename, config, arguments): ''' Given a config filename, the corresponding parsed config dict, and command-line arguments as a dict from subparser name to a namespace of parsed arguments, execute the defined create, prune, compact, check, and/or other actions. Yield a combination of: * JSON output strings from successfully executing any actions that produce JSON * logging.LogRecord instances containing errors from any actions or backup hooks that fail ''' (location, storage, retention, consistency, hooks) = ( config.get(section_name, {}) for section_name in ('location', 'storage', 'retention', 'consistency', 'hooks') ) global_arguments = arguments['global'] local_path = location.get('local_path', 'borg') remote_path = location.get('remote_path') retries = storage.get('retries', 0) retry_wait = storage.get('retry_wait', 0) encountered_error = None error_repository = '' using_primary_action = {'create', 'prune', 'compact', 'check'}.intersection(arguments) monitoring_log_level = verbosity_to_log_level(global_arguments.monitoring_verbosity) try: local_borg_version = borg_version.local_borg_version(storage, local_path) except (OSError, CalledProcessError, ValueError) as error: yield from log_error_records( '{}: Error getting local Borg version'.format(config_filename), error ) return try: if using_primary_action: dispatch.call_hooks( 'initialize_monitor', hooks, config_filename, monitor.MONITOR_HOOK_NAMES, monitoring_log_level, global_arguments.dry_run, ) if using_primary_action: dispatch.call_hooks( 'ping_monitor', hooks, config_filename, monitor.MONITOR_HOOK_NAMES, monitor.State.START, monitoring_log_level, global_arguments.dry_run, ) except (OSError, CalledProcessError) as error: if command.considered_soft_failure(config_filename, error): return encountered_error = error yield from log_error_records('{}: Error pinging monitor'.format(config_filename), error) if not encountered_error: repo_queue = Queue() for repo in location['repositories']: repo_queue.put((repo, 0),) while not repo_queue.empty(): repository_path, retry_num = repo_queue.get() timeout = retry_num * retry_wait if timeout: logger.warning(f'{config_filename}: Sleeping {timeout}s before next retry') time.sleep(timeout) try: yield from run_actions( arguments=arguments, config_filename=config_filename, location=location, storage=storage, retention=retention, consistency=consistency, hooks=hooks, local_path=local_path, remote_path=remote_path, local_borg_version=local_borg_version, repository_path=repository_path, ) except (OSError, CalledProcessError, ValueError) as error: if retry_num < retries: repo_queue.put((repository_path, retry_num + 1),) tuple( # Consume the generator so as to trigger logging. log_error_records( '{}: Error running actions for repository'.format(repository_path), error, levelno=logging.WARNING, log_command_error_output=True, ) ) logger.warning( f'{config_filename}: Retrying... attempt {retry_num + 1}/{retries}' ) continue if command.considered_soft_failure(config_filename, error): return yield from log_error_records( '{}: Error running actions for repository'.format(repository_path), error ) encountered_error = error error_repository = repository_path try: if using_primary_action: # send logs irrespective of error dispatch.call_hooks( 'ping_monitor', hooks, config_filename, monitor.MONITOR_HOOK_NAMES, monitor.State.LOG, monitoring_log_level, global_arguments.dry_run, ) except (OSError, CalledProcessError) as error: if command.considered_soft_failure(config_filename, error): return encountered_error = error yield from log_error_records('{}: Error pinging monitor'.format(config_filename), error) if not encountered_error: try: if using_primary_action: dispatch.call_hooks( 'ping_monitor', hooks, config_filename, monitor.MONITOR_HOOK_NAMES, monitor.State.FINISH, monitoring_log_level, global_arguments.dry_run, ) dispatch.call_hooks( 'destroy_monitor', hooks, config_filename, monitor.MONITOR_HOOK_NAMES, monitoring_log_level, global_arguments.dry_run, ) except (OSError, CalledProcessError) as error: if command.considered_soft_failure(config_filename, error): return encountered_error = error yield from log_error_records('{}: Error pinging monitor'.format(config_filename), error) if encountered_error and using_primary_action: try: command.execute_hook( hooks.get('on_error'), hooks.get('umask'), config_filename, 'on-error', global_arguments.dry_run, repository=error_repository, error=encountered_error, output=getattr(encountered_error, 'output', ''), ) dispatch.call_hooks( 'ping_monitor', hooks, config_filename, monitor.MONITOR_HOOK_NAMES, monitor.State.FAIL, monitoring_log_level, global_arguments.dry_run, ) dispatch.call_hooks( 'destroy_monitor', hooks, config_filename, monitor.MONITOR_HOOK_NAMES, monitoring_log_level, global_arguments.dry_run, ) except (OSError, CalledProcessError) as error: if command.considered_soft_failure(config_filename, error): return yield from log_error_records( '{}: Error running on-error hook'.format(config_filename), error ) def run_actions( *, arguments, config_filename, location, storage, retention, consistency, hooks, local_path, remote_path, local_borg_version, repository_path, ): ''' Given parsed command-line arguments as an argparse.ArgumentParser instance, the configuration filename, several different configuration dicts, local and remote paths to Borg, a local Borg version string, and a repository name, run all actions from the command-line arguments on the given repository. Yield JSON output strings from executing any actions that produce JSON. Raise OSError or subprocess.CalledProcessError if an error occurs running a command for an action or a hook. Raise ValueError if the arguments or configuration passed to action are invalid. ''' add_custom_log_levels() repository = os.path.expanduser(repository_path) global_arguments = arguments['global'] dry_run_label = ' (dry run; not making any changes)' if global_arguments.dry_run else '' hook_context = { 'repository': repository_path, # Deprecated: For backwards compatibility with borgmatic < 1.6.0. 'repositories': ','.join(location['repositories']), } command.execute_hook( hooks.get('before_actions'), hooks.get('umask'), config_filename, 'pre-actions', global_arguments.dry_run, **hook_context, ) for (action_name, action_arguments) in arguments.items(): if action_name == 'rcreate': borgmatic.actions.rcreate.run_rcreate( repository, storage, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'transfer': borgmatic.actions.transfer.run_transfer( repository, storage, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'create': yield from borgmatic.actions.create.run_create( config_filename, repository, location, storage, hooks, hook_context, local_borg_version, action_arguments, global_arguments, dry_run_label, local_path, remote_path, ) elif action_name == 'prune': borgmatic.actions.prune.run_prune( config_filename, repository, storage, retention, hooks, hook_context, local_borg_version, action_arguments, global_arguments, dry_run_label, local_path, remote_path, ) elif action_name == 'compact': borgmatic.actions.compact.run_compact( config_filename, repository, storage, retention, hooks, hook_context, local_borg_version, action_arguments, global_arguments, dry_run_label, local_path, remote_path, ) elif action_name == 'check': if checks.repository_enabled_for_checks(repository, consistency): borgmatic.actions.check.run_check( config_filename, repository, location, storage, consistency, hooks, hook_context, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'extract': borgmatic.actions.extract.run_extract( config_filename, repository, location, storage, hooks, hook_context, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'export-tar': borgmatic.actions.export_tar.run_export_tar( repository, storage, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'mount': borgmatic.actions.mount.run_mount( repository, storage, local_borg_version, arguments['mount'], local_path, remote_path, ) elif action_name == 'restore': borgmatic.actions.restore.run_restore( repository, location, storage, hooks, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'rlist': yield from borgmatic.actions.rlist.run_rlist( repository, storage, local_borg_version, action_arguments, local_path, remote_path, ) elif action_name == 'list': yield from borgmatic.actions.list.run_list( repository, storage, local_borg_version, action_arguments, local_path, remote_path, ) elif action_name == 'rinfo': yield from borgmatic.actions.rinfo.run_rinfo( repository, storage, local_borg_version, action_arguments, local_path, remote_path, ) elif action_name == 'info': yield from borgmatic.actions.info.run_info( repository, storage, local_borg_version, action_arguments, local_path, remote_path, ) elif action_name == 'break-lock': borgmatic.actions.break_lock.run_break_lock( repository, storage, local_borg_version, arguments['break-lock'], local_path, remote_path, ) elif action_name == 'borg': borgmatic.actions.borg.run_borg( repository, storage, local_borg_version, action_arguments, local_path, remote_path, ) command.execute_hook( hooks.get('after_actions'), hooks.get('umask'), config_filename, 'post-actions', global_arguments.dry_run, **hook_context, ) def load_configurations(config_filenames, overrides=None, resolve_env=True): ''' Given a sequence of configuration filenames, load and validate each configuration file. Return the results as a tuple of: dict of configuration filename to corresponding parsed configuration, and sequence of logging.LogRecord instances containing any parse errors. ''' # Dict mapping from config filename to corresponding parsed config dict. configs = collections.OrderedDict() logs = [] # Parse and load each configuration file. for config_filename in config_filenames: try: configs[config_filename], parse_logs = validate.parse_configuration( config_filename, validate.schema_filename(), overrides, resolve_env ) logs.extend(parse_logs) except PermissionError: logs.extend( [ logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg='{}: Insufficient permissions to read configuration file'.format( config_filename ), ) ), ] ) except (ValueError, OSError, validate.Validation_error) as error: logs.extend( [ logging.makeLogRecord( dict( levelno=logging.CRITICAL, levelname='CRITICAL', msg='{}: Error parsing configuration file'.format(config_filename), ) ), logging.makeLogRecord( dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg=error) ), ] ) return (configs, logs) def log_record(suppress_log=False, **kwargs): ''' Create a log record based on the given makeLogRecord() arguments, one of which must be named "levelno". Log the record (unless suppress log is set) and return it. ''' record = logging.makeLogRecord(kwargs) if suppress_log: return record logger.handle(record) return record def log_error_records( message, error=None, levelno=logging.CRITICAL, log_command_error_output=False ): ''' Given error message text, an optional exception object, an optional log level, and whether to log the error output of a CalledProcessError (if any), log error summary information and also yield it as a series of logging.LogRecord instances. Note that because the logs are yielded as a generator, logs won't get logged unless you consume the generator output. ''' level_name = logging._levelToName[levelno] if not error: yield log_record(levelno=levelno, levelname=level_name, msg=message) return try: raise error except CalledProcessError as error: yield log_record(levelno=levelno, levelname=level_name, msg=message) if error.output: # Suppress these logs for now and save full error output for the log summary at the end. yield log_record( levelno=levelno, levelname=level_name, msg=error.output, suppress_log=not log_command_error_output, ) yield log_record(levelno=levelno, levelname=level_name, msg=error) except (ValueError, OSError) as error: yield log_record(levelno=levelno, levelname=level_name, msg=message) yield log_record(levelno=levelno, levelname=level_name, msg=error) except: # noqa: E722 # Raising above only as a means of determining the error type. Swallow the exception here # because we don't want the exception to propagate out of this function. pass def get_local_path(configs): ''' Arbitrarily return the local path from the first configuration dict. Default to "borg" if not set. ''' return next(iter(configs.values())).get('location', {}).get('local_path', 'borg') def collect_configuration_run_summary_logs(configs, arguments): ''' Given a dict of configuration filename to corresponding parsed configuration, and parsed command-line arguments as a dict from subparser name to a parsed namespace of arguments, run each configuration file and yield a series of logging.LogRecord instances containing summary information about each run. As a side effect of running through these configuration files, output their JSON results, if any, to stdout. ''' # Run cross-file validation checks. repository = None for action_name, action_arguments in arguments.items(): if hasattr(action_arguments, 'repository'): repository = getattr(action_arguments, 'repository') break try: if 'extract' in arguments or 'mount' in arguments: validate.guard_single_repository_selected(repository, configs) validate.guard_configuration_contains_repository(repository, configs) except ValueError as error: yield from log_error_records(str(error)) return if not configs: yield from log_error_records( '{}: No valid configuration files found'.format( ' '.join(arguments['global'].config_paths) ) ) return if 'create' in arguments: try: for config_filename, config in configs.items(): hooks = config.get('hooks', {}) command.execute_hook( hooks.get('before_everything'), hooks.get('umask'), config_filename, 'pre-everything', arguments['global'].dry_run, ) except (CalledProcessError, ValueError, OSError) as error: yield from log_error_records('Error running pre-everything hook', error) return # Execute the actions corresponding to each configuration file. json_results = [] for config_filename, config in configs.items(): results = list(run_configuration(config_filename, config, arguments)) error_logs = tuple(result for result in results if isinstance(result, logging.LogRecord)) if error_logs: yield from log_error_records( '{}: Error running configuration file'.format(config_filename) ) yield from error_logs else: yield logging.makeLogRecord( dict( levelno=logging.INFO, levelname='INFO', msg='{}: Successfully ran configuration file'.format(config_filename), ) ) if results: json_results.extend(results) if 'umount' in arguments: logger.info('Unmounting mount point {}'.format(arguments['umount'].mount_point)) try: borg_umount.unmount_archive( mount_point=arguments['umount'].mount_point, local_path=get_local_path(configs), ) except (CalledProcessError, OSError) as error: yield from log_error_records('Error unmounting mount point', error) if json_results: sys.stdout.write(json.dumps(json_results)) if 'create' in arguments: try: for config_filename, config in configs.items(): hooks = config.get('hooks', {}) command.execute_hook( hooks.get('after_everything'), hooks.get('umask'), config_filename, 'post-everything', arguments['global'].dry_run, ) except (CalledProcessError, ValueError, OSError) as error: yield from log_error_records('Error running post-everything hook', error) def exit_with_help_link(): # pragma: no cover ''' Display a link to get help and exit with an error code. ''' logger.critical('') logger.critical('Need some help? https://torsion.org/borgmatic/#issues') sys.exit(1) def main(): # pragma: no cover configure_signals() try: arguments = parse_arguments(*sys.argv[1:]) except ValueError as error: configure_logging(logging.CRITICAL) logger.critical(error) exit_with_help_link() except SystemExit as error: if error.code == 0: raise error configure_logging(logging.CRITICAL) logger.critical('Error parsing arguments: {}'.format(' '.join(sys.argv))) exit_with_help_link() global_arguments = arguments['global'] if global_arguments.version: print(pkg_resources.require('borgmatic')[0].version) sys.exit(0) if global_arguments.bash_completion: print(borgmatic.commands.completion.bash_completion()) sys.exit(0) config_filenames = tuple(collect.collect_config_filenames(global_arguments.config_paths)) configs, parse_logs = load_configurations( config_filenames, global_arguments.overrides, global_arguments.resolve_env ) any_json_flags = any( getattr(sub_arguments, 'json', False) for sub_arguments in arguments.values() ) colorama.init( autoreset=True, strip=not should_do_markup(global_arguments.no_color or any_json_flags, configs), ) try: configure_logging( verbosity_to_log_level(global_arguments.verbosity), verbosity_to_log_level(global_arguments.syslog_verbosity), verbosity_to_log_level(global_arguments.log_file_verbosity), verbosity_to_log_level(global_arguments.monitoring_verbosity), global_arguments.log_file, ) except (FileNotFoundError, PermissionError) as error: configure_logging(logging.CRITICAL) logger.critical('Error configuring logging: {}'.format(error)) exit_with_help_link() logger.debug('Ensuring legacy configuration is upgraded') convert.guard_configuration_upgraded(LEGACY_CONFIG_PATH, config_filenames) summary_logs = parse_logs + list(collect_configuration_run_summary_logs(configs, arguments)) summary_logs_max_level = max(log.levelno for log in summary_logs) for message in ('', 'summary:'): log_record( levelno=summary_logs_max_level, levelname=logging.getLevelName(summary_logs_max_level), msg=message, ) for log in summary_logs: logger.handle(log) if summary_logs_max_level >= logging.CRITICAL: exit_with_help_link() borgmatic-1.7.9/borgmatic/commands/completion.py000066400000000000000000000040421440467744700220130ustar00rootroot00000000000000from borgmatic.commands import arguments UPGRADE_MESSAGE = ''' Your bash completions script is from a different version of borgmatic than is currently installed. Please upgrade your script so your completions match the command-line flags in your installed borgmatic! Try this to upgrade: sudo sh -c "borgmatic --bash-completion > $BASH_SOURCE" source $BASH_SOURCE ''' def parser_flags(parser): ''' Given an argparse.ArgumentParser instance, return its argument flags in a space-separated string. ''' return ' '.join(option for action in parser._actions for option in action.option_strings) def bash_completion(): ''' Return a bash completion script for the borgmatic command. Produce this by introspecting borgmatic's command-line argument parsers. ''' top_level_parser, subparsers = arguments.make_parsers() global_flags = parser_flags(top_level_parser) actions = ' '.join(subparsers.choices.keys()) # Avert your eyes. return '\n'.join( ( 'check_version() {', ' local this_script="$(cat "$BASH_SOURCE" 2> /dev/null)"', ' local installed_script="$(borgmatic --bash-completion 2> /dev/null)"', ' if [ "$this_script" != "$installed_script" ] && [ "$installed_script" != "" ];' ' then cat << EOF\n%s\nEOF' % UPGRADE_MESSAGE, ' fi', '}', 'complete_borgmatic() {', ) + tuple( ''' if [[ " ${COMP_WORDS[*]} " =~ " %s " ]]; then COMPREPLY=($(compgen -W "%s %s %s" -- "${COMP_WORDS[COMP_CWORD]}")) return 0 fi''' % (action, parser_flags(subparser), actions, global_flags) for action, subparser in subparsers.choices.items() ) + ( ' COMPREPLY=($(compgen -W "%s %s" -- "${COMP_WORDS[COMP_CWORD]}"))' % (actions, global_flags), ' (check_version &)', '}', '\ncomplete -o bashdefault -o default -F complete_borgmatic borgmatic', ) ) borgmatic-1.7.9/borgmatic/commands/convert_config.py000066400000000000000000000065351440467744700226600ustar00rootroot00000000000000import os import sys import textwrap from argparse import ArgumentParser from ruamel import yaml from borgmatic.config import convert, generate, legacy, validate DEFAULT_SOURCE_CONFIG_FILENAME = '/etc/borgmatic/config' DEFAULT_SOURCE_EXCLUDES_FILENAME = '/etc/borgmatic/excludes' DEFAULT_DESTINATION_CONFIG_FILENAME = '/etc/borgmatic/config.yaml' def parse_arguments(*arguments): ''' Given command-line arguments with which this script was invoked, parse the arguments and return them as an ArgumentParser instance. ''' parser = ArgumentParser( description=''' Convert legacy INI-style borgmatic configuration and excludes files to a single YAML configuration file. Note that this replaces any comments from the source files. ''' ) parser.add_argument( '-s', '--source-config', dest='source_config_filename', default=DEFAULT_SOURCE_CONFIG_FILENAME, help='Source INI-style configuration filename. Default: {}'.format( DEFAULT_SOURCE_CONFIG_FILENAME ), ) parser.add_argument( '-e', '--source-excludes', dest='source_excludes_filename', default=DEFAULT_SOURCE_EXCLUDES_FILENAME if os.path.exists(DEFAULT_SOURCE_EXCLUDES_FILENAME) else None, help='Excludes filename', ) parser.add_argument( '-d', '--destination-config', dest='destination_config_filename', default=DEFAULT_DESTINATION_CONFIG_FILENAME, help='Destination YAML configuration filename. Default: {}'.format( DEFAULT_DESTINATION_CONFIG_FILENAME ), ) return parser.parse_args(arguments) TEXT_WRAP_CHARACTERS = 80 def display_result(args): # pragma: no cover result_lines = textwrap.wrap( 'Your borgmatic configuration has been upgraded. Please review the result in {}.'.format( args.destination_config_filename ), TEXT_WRAP_CHARACTERS, ) delete_lines = textwrap.wrap( 'Once you are satisfied, you can safely delete {}{}.'.format( args.source_config_filename, ' and {}'.format(args.source_excludes_filename) if args.source_excludes_filename else '', ), TEXT_WRAP_CHARACTERS, ) print('\n'.join(result_lines)) print() print('\n'.join(delete_lines)) def main(): # pragma: no cover try: args = parse_arguments(*sys.argv[1:]) schema = yaml.round_trip_load(open(validate.schema_filename()).read()) source_config = legacy.parse_configuration( args.source_config_filename, legacy.CONFIG_FORMAT ) source_config_file_mode = os.stat(args.source_config_filename).st_mode source_excludes = ( open(args.source_excludes_filename).read().splitlines() if args.source_excludes_filename else [] ) destination_config = convert.convert_legacy_parsed_config( source_config, source_excludes, schema ) generate.write_configuration( args.destination_config_filename, generate.render_configuration(destination_config), mode=source_config_file_mode, ) display_result(args) except (ValueError, OSError) as error: print(error, file=sys.stderr) sys.exit(1) borgmatic-1.7.9/borgmatic/commands/generate_config.py000066400000000000000000000045241440467744700227660ustar00rootroot00000000000000import sys from argparse import ArgumentParser from borgmatic.config import generate, validate DEFAULT_DESTINATION_CONFIG_FILENAME = '/etc/borgmatic/config.yaml' def parse_arguments(*arguments): ''' Given command-line arguments with which this script was invoked, parse the arguments and return them as an ArgumentParser instance. ''' parser = ArgumentParser(description='Generate a sample borgmatic YAML configuration file.') parser.add_argument( '-s', '--source', dest='source_filename', help='Optional YAML configuration file to merge into the generated configuration, useful for upgrading your configuration', ) parser.add_argument( '-d', '--destination', dest='destination_filename', default=DEFAULT_DESTINATION_CONFIG_FILENAME, help='Destination YAML configuration file, default: {}'.format( DEFAULT_DESTINATION_CONFIG_FILENAME ), ) parser.add_argument( '--overwrite', default=False, action='store_true', help='Whether to overwrite any existing destination file, defaults to false', ) return parser.parse_args(arguments) def main(): # pragma: no cover try: args = parse_arguments(*sys.argv[1:]) generate.generate_sample_configuration( args.source_filename, args.destination_filename, validate.schema_filename(), overwrite=args.overwrite, ) print('Generated a sample configuration file at {}.'.format(args.destination_filename)) print() if args.source_filename: print( 'Merged in the contents of configuration file at {}.'.format(args.source_filename) ) print('To review the changes made, run:') print() print( ' diff --unified {} {}'.format(args.source_filename, args.destination_filename) ) print() print('This includes all available configuration options with example values. The few') print('required options are indicated. Please edit the file to suit your needs.') print() print('If you ever need help: https://torsion.org/borgmatic/#issues') except (ValueError, OSError) as error: print(error, file=sys.stderr) sys.exit(1) borgmatic-1.7.9/borgmatic/commands/validate_config.py000066400000000000000000000032511440467744700227610ustar00rootroot00000000000000import logging import sys from argparse import ArgumentParser from borgmatic.config import collect, validate logger = logging.getLogger(__name__) def parse_arguments(*arguments): ''' Given command-line arguments with which this script was invoked, parse the arguments and return them as an ArgumentParser instance. ''' config_paths = collect.get_default_config_paths() parser = ArgumentParser(description='Validate borgmatic configuration file(s).') parser.add_argument( '-c', '--config', nargs='+', dest='config_paths', default=config_paths, help='Configuration filenames or directories, defaults to: {}'.format( ' '.join(config_paths) ), ) return parser.parse_args(arguments) def main(): # pragma: no cover args = parse_arguments(*sys.argv[1:]) logging.basicConfig(level=logging.INFO, format='%(message)s') config_filenames = tuple(collect.collect_config_filenames(args.config_paths)) if len(config_filenames) == 0: logger.critical('No files to validate found') sys.exit(1) found_issues = False for config_filename in config_filenames: try: validate.parse_configuration(config_filename, validate.schema_filename()) except (ValueError, OSError, validate.Validation_error) as error: logging.critical('{}: Error parsing configuration file'.format(config_filename)) logging.critical(error) found_issues = True if found_issues: sys.exit(1) else: logger.info( 'All given configuration files are valid: {}'.format(', '.join(config_filenames)) ) borgmatic-1.7.9/borgmatic/config/000077500000000000000000000000001440467744700167345ustar00rootroot00000000000000borgmatic-1.7.9/borgmatic/config/__init__.py000066400000000000000000000000001440467744700210330ustar00rootroot00000000000000borgmatic-1.7.9/borgmatic/config/checks.py000066400000000000000000000005351440467744700205510ustar00rootroot00000000000000def repository_enabled_for_checks(repository, consistency): ''' Given a repository name and a consistency configuration dict, return whether the repository is enabled to have consistency checks run. ''' if not consistency.get('check_repositories'): return True return repository in consistency['check_repositories'] borgmatic-1.7.9/borgmatic/config/collect.py000066400000000000000000000041431440467744700207350ustar00rootroot00000000000000import os def get_default_config_paths(expand_home=True): ''' Based on the value of the XDG_CONFIG_HOME and HOME environment variables, return a list of default configuration paths. This includes both system-wide configuration and configuration in the current user's home directory. Don't expand the home directory ($HOME) if the expand home flag is False. ''' user_config_directory = os.getenv('XDG_CONFIG_HOME') or os.path.join('$HOME', '.config') if expand_home: user_config_directory = os.path.expandvars(user_config_directory) return [ '/etc/borgmatic/config.yaml', '/etc/borgmatic.d', '%s/borgmatic/config.yaml' % user_config_directory, '%s/borgmatic.d' % user_config_directory, ] def collect_config_filenames(config_paths): ''' Given a sequence of config paths, both filenames and directories, resolve that to an iterable of files. Accomplish this by listing any given directories looking for contained config files (ending with the ".yaml" or ".yml" extension). This is non-recursive, so any directories within the given directories are ignored. Return paths even if they don't exist on disk, so the user can find out about missing configuration paths. However, skip a default config path if it's missing, so the user doesn't have to create a default config path unless they need it. ''' real_default_config_paths = set(map(os.path.realpath, get_default_config_paths())) for path in config_paths: exists = os.path.exists(path) if os.path.realpath(path) in real_default_config_paths and not exists: continue if not os.path.isdir(path) or not exists: yield path continue if not os.access(path, os.R_OK): continue for filename in sorted(os.listdir(path)): full_filename = os.path.join(path, filename) matching_filetype = full_filename.endswith('.yaml') or full_filename.endswith('.yml') if matching_filetype and not os.path.isdir(full_filename): yield full_filename borgmatic-1.7.9/borgmatic/config/convert.py000066400000000000000000000073351440467744700207760ustar00rootroot00000000000000import os from ruamel import yaml from borgmatic.config import generate def _convert_section(source_section_config, section_schema): ''' Given a legacy Parsed_config instance for a single section, convert it to its corresponding yaml.comments.CommentedMap representation in preparation for actual serialization to YAML. Where integer types exist in the given section schema, convert their values to integers. ''' destination_section_config = yaml.comments.CommentedMap( [ ( option_name, int(option_value) if section_schema['properties'].get(option_name, {}).get('type') == 'integer' else option_value, ) for option_name, option_value in source_section_config.items() ] ) return destination_section_config def convert_legacy_parsed_config(source_config, source_excludes, schema): ''' Given a legacy Parsed_config instance loaded from an INI-style config file and a list of exclude patterns, convert them to a corresponding yaml.comments.CommentedMap representation in preparation for serialization to a single YAML config file. Additionally, use the given schema as a source of helpful comments to include within the returned CommentedMap. ''' destination_config = yaml.comments.CommentedMap( [ (section_name, _convert_section(section_config, schema['properties'][section_name])) for section_name, section_config in source_config._asdict().items() ] ) # Split space-seperated values into actual lists, make "repository" into a list, and merge in # excludes. location = destination_config['location'] location['source_directories'] = source_config.location['source_directories'].split(' ') location['repositories'] = [location.pop('repository')] location['exclude_patterns'] = source_excludes if source_config.consistency.get('checks'): destination_config['consistency']['checks'] = source_config.consistency['checks'].split(' ') # Add comments to each section, and then add comments to the fields in each section. generate.add_comments_to_configuration_object(destination_config, schema) for section_name, section_config in destination_config.items(): generate.add_comments_to_configuration_object( section_config, schema['properties'][section_name], indent=generate.INDENT ) return destination_config class Legacy_configuration_not_upgraded(FileNotFoundError): def __init__(self): super(Legacy_configuration_not_upgraded, self).__init__( '''borgmatic changed its configuration file format in version 1.1.0 from INI-style to YAML. This better supports validation, and has a more natural way to express lists of values. To upgrade your existing configuration, run: sudo upgrade-borgmatic-config That will generate a new YAML configuration file at /etc/borgmatic/config.yaml (by default) using the values from both your existing configuration and excludes files. The new version of borgmatic will consume the YAML configuration file instead of the old one.''' ) def guard_configuration_upgraded(source_config_filename, destination_config_filenames): ''' If legacy source configuration exists but no destination upgraded configs do, raise Legacy_configuration_not_upgraded. The idea is that we want to alert the user about upgrading their config if they haven't already. ''' destination_config_exists = any( os.path.exists(filename) for filename in destination_config_filenames ) if os.path.exists(source_config_filename) and not destination_config_exists: raise Legacy_configuration_not_upgraded() borgmatic-1.7.9/borgmatic/config/environment.py000066400000000000000000000030251440467744700216520ustar00rootroot00000000000000import os import re _VARIABLE_PATTERN = re.compile( r'(?P\\)?(?P\$\{(?P[A-Za-z0-9_]+)((:?-)(?P[^}]+))?\})' ) def _resolve_string(matcher): ''' Get the value from environment given a matcher containing a name and an optional default value. If the variable is not defined in environment and no default value is provided, an Error is raised. ''' if matcher.group('escape') is not None: # in case of escaped envvar, unescape it return matcher.group('variable') # resolve the env var name, default = matcher.group('name'), matcher.group('default') out = os.getenv(name, default=default) if out is None: raise ValueError('Cannot find variable ${name} in environment'.format(name=name)) return out def resolve_env_variables(item): ''' Resolves variables like or ${FOO} from given configuration with values from process environment Supported formats: - ${FOO} will return FOO env variable - ${FOO-bar} or ${FOO:-bar} will return FOO env variable if it exists, else "bar" If any variable is missing in environment and no default value is provided, an Error is raised. ''' if isinstance(item, str): return _VARIABLE_PATTERN.sub(_resolve_string, item) if isinstance(item, list): for i, subitem in enumerate(item): item[i] = resolve_env_variables(subitem) if isinstance(item, dict): for key, value in item.items(): item[key] = resolve_env_variables(value) return item borgmatic-1.7.9/borgmatic/config/generate.py000066400000000000000000000254141440467744700211060ustar00rootroot00000000000000import collections import io import os import re from ruamel import yaml from borgmatic.config import load, normalize INDENT = 4 SEQUENCE_INDENT = 2 def _insert_newline_before_comment(config, field_name): ''' Using some ruamel.yaml black magic, insert a blank line in the config right before the given field and its comments. ''' config.ca.items[field_name][1].insert( 0, yaml.tokens.CommentToken('\n', yaml.error.CommentMark(0), None) ) def _schema_to_sample_configuration(schema, level=0, parent_is_sequence=False): ''' Given a loaded configuration schema, generate and return sample config for it. Include comments for each section based on the schema "description". ''' schema_type = schema.get('type') example = schema.get('example') if example is not None: return example if schema_type == 'array': config = yaml.comments.CommentedSeq( [_schema_to_sample_configuration(schema['items'], level, parent_is_sequence=True)] ) add_comments_to_configuration_sequence(config, schema, indent=(level * INDENT)) elif schema_type == 'object': config = yaml.comments.CommentedMap( [ (field_name, _schema_to_sample_configuration(sub_schema, level + 1)) for field_name, sub_schema in schema['properties'].items() ] ) indent = (level * INDENT) + (SEQUENCE_INDENT if parent_is_sequence else 0) add_comments_to_configuration_object( config, schema, indent=indent, skip_first=parent_is_sequence ) else: raise ValueError('Schema at level {} is unsupported: {}'.format(level, schema)) return config def _comment_out_line(line): # If it's already is commented out (or empty), there's nothing further to do! stripped_line = line.lstrip() if not stripped_line or stripped_line.startswith('#'): return line # Comment out the names of optional sections, inserting the '#' after any indent for aesthetics. matches = re.match(r'(\s*)', line) indent_spaces = matches.group(0) if matches else '' count_indent_spaces = len(indent_spaces) return '# '.join((indent_spaces, line[count_indent_spaces:])) def _comment_out_optional_configuration(rendered_config): ''' Post-process a rendered configuration string to comment out optional key/values, as determined by a sentinel in the comment before each key. The idea is that the pre-commented configuration prevents the user from having to comment out a bunch of configuration they don't care about to get to a minimal viable configuration file. Ideally ruamel.yaml would support commenting out keys during configuration generation, but it's not terribly easy to accomplish that way. ''' lines = [] optional = False for line in rendered_config.split('\n'): # Upon encountering an optional configuration option, comment out lines until the next blank # line. if line.strip().startswith('# {}'.format(COMMENTED_OUT_SENTINEL)): optional = True continue # Hit a blank line, so reset commenting. if not line.strip(): optional = False lines.append(_comment_out_line(line) if optional else line) return '\n'.join(lines) def render_configuration(config): ''' Given a config data structure of nested OrderedDicts, render the config as YAML and return it. ''' dumper = yaml.YAML() dumper.indent(mapping=INDENT, sequence=INDENT + SEQUENCE_INDENT, offset=INDENT) rendered = io.StringIO() dumper.dump(config, rendered) return rendered.getvalue() def write_configuration(config_filename, rendered_config, mode=0o600, overwrite=False): ''' Given a target config filename and rendered config YAML, write it out to file. Create any containing directories as needed. But if the file already exists and overwrite is False, abort before writing anything. ''' if not overwrite and os.path.exists(config_filename): raise FileExistsError( '{} already exists. Aborting. Use --overwrite to replace the file.'.format( config_filename ) ) try: os.makedirs(os.path.dirname(config_filename), mode=0o700) except (FileExistsError, FileNotFoundError): pass with open(config_filename, 'w') as config_file: config_file.write(rendered_config) os.chmod(config_filename, mode) def add_comments_to_configuration_sequence(config, schema, indent=0): ''' If the given config sequence's items are object, then mine the schema for the description of the object's first item, and slap that atop the sequence. Indent the comment the given number of characters. Doing this for sequences of maps results in nice comments that look like: ``` things: # First key description. Added by this function. - key: foo # Second key description. Added by add_comments_to_configuration_object(). other: bar ``` ''' if schema['items'].get('type') != 'object': return for field_name in config[0].keys(): field_schema = schema['items']['properties'].get(field_name, {}) description = field_schema.get('description') # No description to use? Skip it. if not field_schema or not description: return config[0].yaml_set_start_comment(description, indent=indent) # We only want the first key's description here, as the rest of the keys get commented by # add_comments_to_configuration_object(). return REQUIRED_SECTION_NAMES = {'location', 'retention'} REQUIRED_KEYS = {'source_directories', 'repositories', 'keep_daily'} COMMENTED_OUT_SENTINEL = 'COMMENT_OUT' def add_comments_to_configuration_object(config, schema, indent=0, skip_first=False): ''' Using descriptions from a schema as a source, add those descriptions as comments to the given config mapping, before each field. Indent the comment the given number of characters. ''' for index, field_name in enumerate(config.keys()): if skip_first and index == 0: continue field_schema = schema['properties'].get(field_name, {}) description = field_schema.get('description', '').strip() # If this is an optional key, add an indicator to the comment flagging it to be commented # out from the sample configuration. This sentinel is consumed by downstream processing that # does the actual commenting out. if field_name not in REQUIRED_SECTION_NAMES and field_name not in REQUIRED_KEYS: description = ( '\n'.join((description, COMMENTED_OUT_SENTINEL)) if description else COMMENTED_OUT_SENTINEL ) # No description to use? Skip it. if not field_schema or not description: # pragma: no cover continue config.yaml_set_comment_before_after_key(key=field_name, before=description, indent=indent) if index > 0: _insert_newline_before_comment(config, field_name) RUAMEL_YAML_COMMENTS_INDEX = 1 def remove_commented_out_sentinel(config, field_name): ''' Given a configuration CommentedMap and a top-level field name in it, remove any "commented out" sentinel found at the end of its YAML comments. This prevents the given field name from getting commented out by downstream processing that consumes the sentinel. ''' try: last_comment_value = config.ca.items[field_name][RUAMEL_YAML_COMMENTS_INDEX][-1].value except KeyError: return if last_comment_value == '# {}\n'.format(COMMENTED_OUT_SENTINEL): config.ca.items[field_name][RUAMEL_YAML_COMMENTS_INDEX].pop() def merge_source_configuration_into_destination(destination_config, source_config): ''' Deep merge the given source configuration dict into the destination configuration CommentedMap, favoring values from the source when there are collisions. The purpose of this is to upgrade configuration files from old versions of borgmatic by adding new configuration keys and comments. ''' if not source_config: return destination_config if not destination_config or not isinstance(source_config, collections.abc.Mapping): return source_config for field_name, source_value in source_config.items(): # Since this key/value is from the source configuration, leave it uncommented and remove any # sentinel that would cause it to get commented out. remove_commented_out_sentinel(destination_config, field_name) # This is a mapping. Recurse for this key/value. if isinstance(source_value, collections.abc.Mapping): destination_config[field_name] = merge_source_configuration_into_destination( destination_config[field_name], source_value ) continue # This is a sequence. Recurse for each item in it. if isinstance(source_value, collections.abc.Sequence) and not isinstance(source_value, str): destination_value = destination_config[field_name] destination_config[field_name] = yaml.comments.CommentedSeq( [ merge_source_configuration_into_destination( destination_value[index] if index < len(destination_value) else None, source_item, ) for index, source_item in enumerate(source_value) ] ) continue # This is some sort of scalar. Simply set it into the destination. destination_config[field_name] = source_config[field_name] return destination_config def generate_sample_configuration( source_filename, destination_filename, schema_filename, overwrite=False ): ''' Given an optional source configuration filename, and a required destination configuration filename, the path to a schema filename in a YAML rendition of the JSON Schema format, and whether to overwrite a destination file, write out a sample configuration file based on that schema. If a source filename is provided, merge the parsed contents of that configuration into the generated configuration. ''' schema = yaml.round_trip_load(open(schema_filename)) source_config = None if source_filename: source_config = load.load_configuration(source_filename) normalize.normalize(source_filename, source_config) destination_config = merge_source_configuration_into_destination( _schema_to_sample_configuration(schema), source_config ) write_configuration( destination_filename, _comment_out_optional_configuration(render_configuration(destination_config)), overwrite=overwrite, ) borgmatic-1.7.9/borgmatic/config/legacy.py000066400000000000000000000124701440467744700205560ustar00rootroot00000000000000from collections import OrderedDict, namedtuple from configparser import RawConfigParser Section_format = namedtuple('Section_format', ('name', 'options')) Config_option = namedtuple('Config_option', ('name', 'value_type', 'required')) def option(name, value_type=str, required=True): ''' Given a config file option name, an expected type for its value, and whether it's required, return a Config_option capturing that information. ''' return Config_option(name, value_type, required) CONFIG_FORMAT = ( Section_format( 'location', ( option('source_directories'), option('one_file_system', value_type=bool, required=False), option('remote_path', required=False), option('repository'), ), ), Section_format( 'storage', ( option('encryption_passphrase', required=False), option('compression', required=False), option('umask', required=False), ), ), Section_format( 'retention', ( option('keep_within', required=False), option('keep_hourly', int, required=False), option('keep_daily', int, required=False), option('keep_weekly', int, required=False), option('keep_monthly', int, required=False), option('keep_yearly', int, required=False), option('prefix', required=False), ), ), Section_format( 'consistency', (option('checks', required=False), option('check_last', required=False)) ), ) def validate_configuration_format(parser, config_format): ''' Given an open RawConfigParser and an expected config file format, validate that the parsed configuration file has the expected sections, that any required options are present in those sections, and that there aren't any unexpected options. A section is required if any of its contained options are required. Raise ValueError if anything is awry. ''' section_names = set(parser.sections()) required_section_names = tuple( section.name for section in config_format if any(option.required for option in section.options) ) unknown_section_names = section_names - set( section_format.name for section_format in config_format ) if unknown_section_names: raise ValueError( 'Unknown config sections found: {}'.format(', '.join(unknown_section_names)) ) missing_section_names = set(required_section_names) - section_names if missing_section_names: raise ValueError('Missing config sections: {}'.format(', '.join(missing_section_names))) for section_format in config_format: if section_format.name not in section_names: continue option_names = parser.options(section_format.name) expected_options = section_format.options unexpected_option_names = set(option_names) - set( option.name for option in expected_options ) if unexpected_option_names: raise ValueError( 'Unexpected options found in config section {}: {}'.format( section_format.name, ', '.join(sorted(unexpected_option_names)) ) ) missing_option_names = tuple( option.name for option in expected_options if option.required if option.name not in option_names ) if missing_option_names: raise ValueError( 'Required options missing from config section {}: {}'.format( section_format.name, ', '.join(missing_option_names) ) ) def parse_section_options(parser, section_format): ''' Given an open RawConfigParser and an expected section format, return the option values from that section as a dict mapping from option name to value. Omit those options that are not present in the parsed options. Raise ValueError if any option values cannot be coerced to the expected Python data type. ''' type_getter = {str: parser.get, int: parser.getint, bool: parser.getboolean} return OrderedDict( (option.name, type_getter[option.value_type](section_format.name, option.name)) for option in section_format.options if parser.has_option(section_format.name, option.name) ) def parse_configuration(config_filename, config_format): ''' Given a config filename and an expected config file format, return the parsed configuration as a namedtuple with one attribute for each parsed section. Raise IOError if the file cannot be read, or ValueError if the format is not as expected. ''' parser = RawConfigParser() if not parser.read(config_filename): raise ValueError('Configuration file cannot be opened: {}'.format(config_filename)) validate_configuration_format(parser, config_format) # Describes a parsed configuration, where each attribute is the name of a configuration file # section and each value is a dict of that section's parsed options. Parsed_config = namedtuple( 'Parsed_config', (section_format.name for section_format in config_format) ) return Parsed_config( *(parse_section_options(parser, section_format) for section_format in config_format) ) borgmatic-1.7.9/borgmatic/config/load.py000066400000000000000000000215471440467744700202360ustar00rootroot00000000000000import functools import logging import os import ruamel.yaml logger = logging.getLogger(__name__) def include_configuration(loader, filename_node, include_directory): ''' Given a ruamel.yaml.loader.Loader, a ruamel.yaml.serializer.ScalarNode containing the included filename, and an include directory path to search for matching files, load the given YAML filename (ignoring the given loader so we can use our own) and return its contents as a data structure of nested dicts and lists. If the filename is relative, probe for it within 1. the current working directory and 2. the given include directory. Raise FileNotFoundError if an included file was not found. ''' include_directories = [os.getcwd(), os.path.abspath(include_directory)] include_filename = os.path.expanduser(filename_node.value) if not os.path.isabs(include_filename): candidate_filenames = [ os.path.join(directory, include_filename) for directory in include_directories ] for candidate_filename in candidate_filenames: if os.path.exists(candidate_filename): include_filename = candidate_filename break else: raise FileNotFoundError( f'Could not find include {filename_node.value} at {" or ".join(candidate_filenames)}' ) return load_configuration(include_filename) class Include_constructor(ruamel.yaml.SafeConstructor): ''' A YAML "constructor" (a ruamel.yaml concept) that supports a custom "!include" tag for including separate YAML configuration files. Example syntax: `retention: !include common.yaml` ''' def __init__(self, preserve_quotes=None, loader=None, include_directory=None): super(Include_constructor, self).__init__(preserve_quotes, loader) self.add_constructor( '!include', functools.partial(include_configuration, include_directory=include_directory), ) def flatten_mapping(self, node): ''' Support the special case of deep merging included configuration into an existing mapping using the YAML '<<' merge key. Example syntax: ``` retention: keep_daily: 1 <<: !include common.yaml ``` These includes are deep merged into the current configuration file. For instance, in this example, any "retention" options in common.yaml will get merged into the "retention" section in the example configuration file. ''' representer = ruamel.yaml.representer.SafeRepresenter() for index, (key_node, value_node) in enumerate(node.value): if key_node.tag == u'tag:yaml.org,2002:merge' and value_node.tag == '!include': included_value = representer.represent_data(self.construct_object(value_node)) node.value[index] = (key_node, included_value) super(Include_constructor, self).flatten_mapping(node) node.value = deep_merge_nodes(node.value) def load_configuration(filename): ''' Load the given configuration file and return its contents as a data structure of nested dicts and lists. Raise ruamel.yaml.error.YAMLError if something goes wrong parsing the YAML, or RecursionError if there are too many recursive includes. ''' # Use an embedded derived class for the include constructor so as to capture the filename # value. (functools.partial doesn't work for this use case because yaml.Constructor has to be # an actual class.) class Include_constructor_with_include_directory(Include_constructor): def __init__(self, preserve_quotes=None, loader=None): super(Include_constructor_with_include_directory, self).__init__( preserve_quotes, loader, include_directory=os.path.dirname(filename) ) yaml = ruamel.yaml.YAML(typ='safe') yaml.Constructor = Include_constructor_with_include_directory return yaml.load(open(filename)) DELETED_NODE = object() def deep_merge_nodes(nodes): ''' Given a nested borgmatic configuration data structure as a list of tuples in the form of: ( ruamel.yaml.nodes.ScalarNode as a key, ruamel.yaml.nodes.MappingNode or other Node as a value, ), ... deep merge any node values corresponding to duplicate keys and return the result. If there are colliding keys with non-MappingNode values (e.g., integers or strings), the last of the values wins. For instance, given node values of: [ ( ScalarNode(tag='tag:yaml.org,2002:str', value='retention'), MappingNode(tag='tag:yaml.org,2002:map', value=[ ( ScalarNode(tag='tag:yaml.org,2002:str', value='keep_hourly'), ScalarNode(tag='tag:yaml.org,2002:int', value='24') ), ( ScalarNode(tag='tag:yaml.org,2002:str', value='keep_daily'), ScalarNode(tag='tag:yaml.org,2002:int', value='7') ), ]), ), ( ScalarNode(tag='tag:yaml.org,2002:str', value='retention'), MappingNode(tag='tag:yaml.org,2002:map', value=[ ( ScalarNode(tag='tag:yaml.org,2002:str', value='keep_daily'), ScalarNode(tag='tag:yaml.org,2002:int', value='5') ), ]), ), ] ... the returned result would be: [ ( ScalarNode(tag='tag:yaml.org,2002:str', value='retention'), MappingNode(tag='tag:yaml.org,2002:map', value=[ ( ScalarNode(tag='tag:yaml.org,2002:str', value='keep_hourly'), ScalarNode(tag='tag:yaml.org,2002:int', value='24') ), ( ScalarNode(tag='tag:yaml.org,2002:str', value='keep_daily'), ScalarNode(tag='tag:yaml.org,2002:int', value='5') ), ]), ), ] The purpose of deep merging like this is to support, for instance, merging one borgmatic configuration file into another for reuse, such that a configuration section ("retention", etc.) does not completely replace the corresponding section in a merged file. ''' # Map from original node key/value to the replacement merged node. DELETED_NODE as a replacement # node indications deletion. replaced_nodes = {} # To find nodes that require merging, compare each node with each other node. for a_key, a_value in nodes: for b_key, b_value in nodes: # If we've already considered one of the nodes for merging, skip it. if (a_key, a_value) in replaced_nodes or (b_key, b_value) in replaced_nodes: continue # If the keys match and the values are different, we need to merge these two A and B nodes. if a_key.tag == b_key.tag and a_key.value == b_key.value and a_value != b_value: # Since we're merging into the B node, consider the A node a duplicate and remove it. replaced_nodes[(a_key, a_value)] = DELETED_NODE # If we're dealing with MappingNodes, recurse and merge its values as well. if isinstance(b_value, ruamel.yaml.nodes.MappingNode): replaced_nodes[(b_key, b_value)] = ( b_key, ruamel.yaml.nodes.MappingNode( tag=b_value.tag, value=deep_merge_nodes(a_value.value + b_value.value), start_mark=b_value.start_mark, end_mark=b_value.end_mark, flow_style=b_value.flow_style, comment=b_value.comment, anchor=b_value.anchor, ), ) # If we're dealing with SequenceNodes, merge by appending one sequence to the other. elif isinstance(b_value, ruamel.yaml.nodes.SequenceNode): replaced_nodes[(b_key, b_value)] = ( b_key, ruamel.yaml.nodes.SequenceNode( tag=b_value.tag, value=a_value.value + b_value.value, start_mark=b_value.start_mark, end_mark=b_value.end_mark, flow_style=b_value.flow_style, comment=b_value.comment, anchor=b_value.anchor, ), ) return [ replaced_nodes.get(node, node) for node in nodes if replaced_nodes.get(node) != DELETED_NODE ] borgmatic-1.7.9/borgmatic/config/normalize.py000066400000000000000000000071731440467744700213160ustar00rootroot00000000000000import logging def normalize(config_filename, config): ''' Given a configuration filename and a configuration dict of its loaded contents, apply particular hard-coded rules to normalize the configuration to adhere to the current schema. Return any log message warnings produced based on the normalization performed. ''' logs = [] location = config.get('location') or {} storage = config.get('storage') or {} consistency = config.get('consistency') or {} hooks = config.get('hooks') or {} # Upgrade exclude_if_present from a string to a list. exclude_if_present = location.get('exclude_if_present') if isinstance(exclude_if_present, str): config['location']['exclude_if_present'] = [exclude_if_present] # Upgrade various monitoring hooks from a string to a dict. healthchecks = hooks.get('healthchecks') if isinstance(healthchecks, str): config['hooks']['healthchecks'] = {'ping_url': healthchecks} cronitor = hooks.get('cronitor') if isinstance(cronitor, str): config['hooks']['cronitor'] = {'ping_url': cronitor} pagerduty = hooks.get('pagerduty') if isinstance(pagerduty, str): config['hooks']['pagerduty'] = {'integration_key': pagerduty} cronhub = hooks.get('cronhub') if isinstance(cronhub, str): config['hooks']['cronhub'] = {'ping_url': cronhub} # Upgrade consistency checks from a list of strings to a list of dicts. checks = consistency.get('checks') if isinstance(checks, list) and len(checks) and isinstance(checks[0], str): config['consistency']['checks'] = [{'name': check_type} for check_type in checks] # Rename various configuration options. numeric_owner = location.pop('numeric_owner', None) if numeric_owner is not None: config['location']['numeric_ids'] = numeric_owner bsd_flags = location.pop('bsd_flags', None) if bsd_flags is not None: config['location']['flags'] = bsd_flags remote_rate_limit = storage.pop('remote_rate_limit', None) if remote_rate_limit is not None: config['storage']['upload_rate_limit'] = remote_rate_limit # Upgrade remote repositories to ssh:// syntax, required in Borg 2. repositories = location.get('repositories') if repositories: config['location']['repositories'] = [] for repository in repositories: if '~' in repository: logs.append( logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg=f'{config_filename}: Repository paths containing "~" are deprecated in borgmatic and no longer work in Borg 2.x+.', ) ) ) if ':' in repository and not repository.startswith('ssh://'): rewritten_repository = ( f"ssh://{repository.replace(':~', '/~').replace(':/', '/').replace(':', '/./')}" ) logs.append( logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg=f'{config_filename}: Remote repository paths without ssh:// syntax are deprecated. Interpreting "{repository}" as "{rewritten_repository}"', ) ) ) config['location']['repositories'].append(rewritten_repository) else: config['location']['repositories'].append(repository) return logs borgmatic-1.7.9/borgmatic/config/override.py000066400000000000000000000045411440467744700211310ustar00rootroot00000000000000import io import ruamel.yaml def set_values(config, keys, value): ''' Given a hierarchy of configuration dicts, a sequence of parsed key strings, and a string value, descend into the hierarchy based on the keys to set the value into the right place. ''' if not keys: return first_key = keys[0] if len(keys) == 1: config[first_key] = value return if first_key not in config: config[first_key] = {} set_values(config[first_key], keys[1:], value) def convert_value_type(value): ''' Given a string value, determine its logical type (string, boolean, integer, etc.), and return it converted to that type. Raise ruamel.yaml.error.YAMLError if there's a parse issue with the YAML. ''' return ruamel.yaml.YAML(typ='safe').load(io.StringIO(value)) def parse_overrides(raw_overrides): ''' Given a sequence of configuration file override strings in the form of "section.option=value", parse and return a sequence of tuples (keys, values), where keys is a sequence of strings. For instance, given the following raw overrides: ['section.my_option=value1', 'section.other_option=value2'] ... return this: ( (('section', 'my_option'), 'value1'), (('section', 'other_option'), 'value2'), ) Raise ValueError if an override can't be parsed. ''' if not raw_overrides: return () parsed_overrides = [] for raw_override in raw_overrides: try: raw_keys, value = raw_override.split('=', 1) parsed_overrides.append((tuple(raw_keys.split('.')), convert_value_type(value),)) except ValueError: raise ValueError( f"Invalid override '{raw_override}'. Make sure you use the form: SECTION.OPTION=VALUE" ) except ruamel.yaml.error.YAMLError as error: raise ValueError(f"Invalid override '{raw_override}': {error.problem}") return tuple(parsed_overrides) def apply_overrides(config, raw_overrides): ''' Given a configuration dict and a sequence of configuration file override strings in the form of "section.option=value", parse each override and set it the configuration dict. ''' overrides = parse_overrides(raw_overrides) for (keys, value) in overrides: set_values(config, keys, value) borgmatic-1.7.9/borgmatic/config/schema.yaml000066400000000000000000001704231440467744700210670ustar00rootroot00000000000000type: object required: - location additionalProperties: false properties: location: type: object description: | Where to look for files to backup, and where to store those backups. See https://borgbackup.readthedocs.io/en/stable/quickstart.html and https://borgbackup.readthedocs.io/en/stable/usage/create.html for details. required: - repositories additionalProperties: false properties: source_directories: type: array items: type: string description: | List of source directories to backup. Globs and tildes are expanded. Do not backslash spaces in path names. example: - /home - /etc - /var/log/syslog* - /home/user/path with spaces repositories: type: array items: type: string description: | Paths to local or remote repositories (required). Tildes are expanded. Multiple repositories are backed up to in sequence. Borg placeholders can be used. See the output of "borg help placeholders" for details. See ssh_command for SSH options like identity file or port. If systemd service is used, then add local repository paths in the systemd service file to the ReadWritePaths list. example: - ssh://user@backupserver/./sourcehostname.borg - ssh://user@backupserver/./{fqdn} - /var/local/backups/local.borg working_directory: type: string description: | Working directory for the "borg create" command. Tildes are expanded. Useful for backing up using relative paths. See http://borgbackup.readthedocs.io/en/stable/usage/create.html for details. Defaults to not set. example: /path/to/working/directory one_file_system: type: boolean description: | Stay in same file system: do not cross mount points beyond the given source directories. Defaults to false. But when a database hook is used, the setting here is ignored and one_file_system is considered true. example: true numeric_ids: type: boolean description: | Only store/extract numeric user and group identifiers. Defaults to false. example: true atime: type: boolean description: | Store atime into archive. Defaults to true in Borg < 1.2, false in Borg 1.2+. example: false ctime: type: boolean description: Store ctime into archive. Defaults to true. example: false birthtime: type: boolean description: | Store birthtime (creation date) into archive. Defaults to true. example: false read_special: type: boolean description: | Use Borg's --read-special flag to allow backup of block and other special devices. Use with caution, as it will lead to problems if used when backing up special devices such as /dev/zero. Defaults to false. But when a database hook is used, the setting here is ignored and read_special is considered true. example: false flags: type: boolean description: | Record filesystem flags (e.g. NODUMP, IMMUTABLE) in archive. Defaults to true. example: true files_cache: type: string description: | Mode in which to operate the files cache. See http://borgbackup.readthedocs.io/en/stable/usage/create.html for details. Defaults to "ctime,size,inode". example: ctime,size,inode local_path: type: string description: | Alternate Borg local executable. Defaults to "borg". example: borg1 remote_path: type: string description: | Alternate Borg remote executable. Defaults to "borg". example: borg1 patterns: type: array items: type: string description: | Any paths matching these patterns are included/excluded from backups. Globs are expanded. (Tildes are not.) See the output of "borg help patterns" for more details. Quote any value if it contains leading punctuation, so it parses correctly. Note that only one of "patterns" and "source_directories" may be used. example: - 'R /' - '- /home/*/.cache' - '+ /home/susan' - '- /home/*' patterns_from: type: array items: type: string description: | Read include/exclude patterns from one or more separate named files, one pattern per line. Note that Borg considers this option experimental. See the output of "borg help patterns" for more details. example: - /etc/borgmatic/patterns exclude_patterns: type: array items: type: string description: | Any paths matching these patterns are excluded from backups. Globs and tildes are expanded. Note that a glob pattern must either start with a glob or be an absolute path. Do not backslash spaces in path names. See the output of "borg help patterns" for more details. example: - '*.pyc' - /home/*/.cache - '*/.vim*.tmp' - /etc/ssl - /home/user/path with spaces exclude_from: type: array items: type: string description: | Read exclude patterns from one or more separate named files, one pattern per line. See the output of "borg help patterns" for more details. example: - /etc/borgmatic/excludes exclude_caches: type: boolean description: | Exclude directories that contain a CACHEDIR.TAG file. See http://www.brynosaurus.com/cachedir/spec.html for details. Defaults to false. example: true exclude_if_present: type: array items: type: string description: | Exclude directories that contain a file with the given filenames. Defaults to not set. example: - .nobackup keep_exclude_tags: type: boolean description: | If true, the exclude_if_present filename is included in backups. Defaults to false, meaning that the exclude_if_present filename is omitted from backups. example: true exclude_nodump: type: boolean description: | Exclude files with the NODUMP flag. Defaults to false. example: true borgmatic_source_directory: type: string description: | Path for additional source files used for temporary internal state like borgmatic database dumps. Note that changing this path prevents "borgmatic restore" from finding any database dumps created before the change. Defaults to ~/.borgmatic example: /tmp/borgmatic storage: type: object description: | Repository storage options. See https://borgbackup.readthedocs.io/en/stable/usage/create.html and https://borgbackup.readthedocs.io/en/stable/usage/general.html for details. additionalProperties: false properties: encryption_passcommand: type: string description: | The standard output of this command is used to unlock the encryption key. Only use on repositories that were initialized with passcommand/repokey/keyfile encryption. Note that if both encryption_passcommand and encryption_passphrase are set, then encryption_passphrase takes precedence. Defaults to not set. example: "secret-tool lookup borg-repository repo-name" encryption_passphrase: type: string description: | Passphrase to unlock the encryption key with. Only use on repositories that were initialized with passphrase/repokey/keyfile encryption. Quote the value if it contains punctuation, so it parses correctly. And backslash any quote or backslash literals as well. Defaults to not set. example: "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~" checkpoint_interval: type: integer description: | Number of seconds between each checkpoint during a long-running backup. See https://borgbackup.readthedocs.io/en/stable/faq.html for details. Defaults to checkpoints every 1800 seconds (30 minutes). example: 1800 checkpoint_volume: type: integer description: | Number of backed up bytes between each checkpoint during a long-running backup. Only supported with Borg 2+. See https://borgbackup.readthedocs.io/en/stable/faq.html for details. Defaults to only time-based checkpointing (see "checkpoint_interval") instead of volume-based checkpointing. example: 1048576 chunker_params: type: string description: | Specify the parameters passed to then chunker (CHUNK_MIN_EXP, CHUNK_MAX_EXP, HASH_MASK_BITS, HASH_WINDOW_SIZE). See https://borgbackup.readthedocs.io/en/stable/internals.html for details. Defaults to "19,23,21,4095". example: 19,23,21,4095 compression: type: string description: | Type of compression to use when creating archives. See http://borgbackup.readthedocs.io/en/stable/usage/create.html for details. Defaults to "lz4". example: lz4 upload_rate_limit: type: integer description: | Remote network upload rate limit in kiBytes/second. Defaults to unlimited. example: 100 retries: type: integer description: | Number of times to retry a failing backup before giving up. Defaults to 0 (i.e., does not attempt retry). example: 3 retry_wait: type: integer description: | Wait time between retries (in seconds) to allow transient issues to pass. Increases after each retry as a form of backoff. Defaults to 0 (no wait). example: 10 temporary_directory: type: string description: | Directory where temporary files are stored. Defaults to $TMPDIR example: /path/to/tmpdir ssh_command: type: string description: | Command to use instead of "ssh". This can be used to specify ssh options. Defaults to not set. example: ssh -i /path/to/private/key borg_base_directory: type: string description: | Base path used for various Borg directories. Defaults to $HOME, ~$USER, or ~. example: /path/to/base borg_config_directory: type: string description: | Path for Borg configuration files. Defaults to $borg_base_directory/.config/borg example: /path/to/base/config borg_cache_directory: type: string description: | Path for Borg cache files. Defaults to $borg_base_directory/.cache/borg example: /path/to/base/cache borg_security_directory: type: string description: | Path for Borg security and encryption nonce files. Defaults to $borg_base_directory/.config/borg/security example: /path/to/base/config/security borg_keys_directory: type: string description: | Path for Borg encryption key files. Defaults to $borg_base_directory/.config/borg/keys example: /path/to/base/config/keys umask: type: integer description: Umask to be used for borg create. Defaults to 0077. example: 0077 lock_wait: type: integer description: | Maximum seconds to wait for acquiring a repository/cache lock. Defaults to 1. example: 5 archive_name_format: type: string description: | Name of the archive. Borg placeholders can be used. See the output of "borg help placeholders" for details. Defaults to "{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}". If you specify this option, consider also specifying a prefix in the retention and consistency sections to avoid accidental pruning/checking of archives with different archive name formats. example: "{hostname}-documents-{now}" relocated_repo_access_is_ok: type: boolean description: | Bypass Borg error about a repository that has been moved. Defaults to false. example: true unknown_unencrypted_repo_access_is_ok: type: boolean description: | Bypass Borg error about a previously unknown unencrypted repository. Defaults to false. example: true extra_borg_options: type: object additionalProperties: false properties: init: type: string description: | Extra command-line options to pass to "borg init". example: "--extra-option" create: type: string description: | Extra command-line options to pass to "borg create". example: "--extra-option" prune: type: string description: | Extra command-line options to pass to "borg prune". example: "--extra-option" compact: type: string description: | Extra command-line options to pass to "borg compact". example: "--extra-option" check: type: string description: | Extra command-line options to pass to "borg check". example: "--extra-option" description: | Additional options to pass directly to particular Borg commands, handy for Borg options that borgmatic does not yet support natively. Note that borgmatic does not perform any validation on these options. Running borgmatic with "--verbosity 2" shows the exact Borg command-line invocation. retention: type: object description: | Retention policy for how many backups to keep in each category. See https://borgbackup.readthedocs.io/en/stable/usage/prune.html for details. At least one of the "keep" options is required for pruning to work. To skip pruning entirely, run "borgmatic create" or "check" without the "prune" action. See borgmatic documentation for details. additionalProperties: false properties: keep_within: type: string description: Keep all archives within this time interval. example: 3H keep_secondly: type: integer description: Number of secondly archives to keep. example: 60 keep_minutely: type: integer description: Number of minutely archives to keep. example: 60 keep_hourly: type: integer description: Number of hourly archives to keep. example: 24 keep_daily: type: integer description: Number of daily archives to keep. example: 7 keep_weekly: type: integer description: Number of weekly archives to keep. example: 4 keep_monthly: type: integer description: Number of monthly archives to keep. example: 6 keep_yearly: type: integer description: Number of yearly archives to keep. example: 1 prefix: type: string description: | When pruning, only consider archive names starting with this prefix. Borg placeholders can be used. See the output of "borg help placeholders" for details. Defaults to "{hostname}-". Use an empty value to disable the default. example: sourcehostname consistency: type: object description: | Consistency checks to run after backups. See https://borgbackup.readthedocs.io/en/stable/usage/check.html and https://borgbackup.readthedocs.io/en/stable/usage/extract.html for details. additionalProperties: false properties: checks: type: array items: type: object required: ['name'] additionalProperties: false properties: name: type: string enum: - repository - archives - data - extract - disabled description: | Name of consistency check to run: "repository", "archives", "data", and/or "extract". Set to "disabled" to disable all consistency checks. "repository" checks the consistency of the repository, "archives" checks all of the archives, "data" verifies the integrity of the data within the archives, and "extract" does an extraction dry-run of the most recent archive. Note that "data" implies "archives". example: repository frequency: type: string description: | How frequently to run this type of consistency check (as a best effort). The value is a number followed by a unit of time. E.g., "2 weeks" to run this consistency check no more than every two weeks for a given repository or "1 month" to run it no more than monthly. Defaults to "always": running this check every time checks are run. example: 2 weeks description: | List of one or more consistency checks to run on a periodic basis (if "frequency" is set) or every time borgmatic runs checks (if "frequency" is omitted). check_repositories: type: array items: type: string description: | Paths to a subset of the repositories in the location section on which to run consistency checks. Handy in case some of your repositories are very large, and so running consistency checks on them would take too long. Defaults to running consistency checks on all repositories configured in the location section. example: - user@backupserver:sourcehostname.borg check_last: type: integer description: | Restrict the number of checked archives to the last n. Applies only to the "archives" check. Defaults to checking all archives. example: 3 prefix: type: string description: | When performing the "archives" check, only consider archive names starting with this prefix. Borg placeholders can be used. See the output of "borg help placeholders" for details. Defaults to "{hostname}-". Use an empty value to disable the default. example: sourcehostname output: type: object description: | Options for customizing borgmatic's own output and logging. additionalProperties: false properties: color: type: boolean description: | Apply color to console output. Can be overridden with --no-color command-line flag. Defaults to true. example: false hooks: type: object description: | Shell commands, scripts, or integrations to execute at various points during a borgmatic run. IMPORTANT: All provided commands and scripts are executed with user permissions of borgmatic. Do not forget to set secure permissions on this configuration file (chmod 0600) as well as on any script called from a hook (chmod 0700) to prevent potential shell injection or privilege escalation. additionalProperties: false properties: before_actions: type: array items: type: string description: | List of one or more shell commands or scripts to execute before all the actions for each repository. example: - echo "Starting actions." before_backup: type: array items: type: string description: | List of one or more shell commands or scripts to execute before creating a backup, run once per repository. example: - echo "Starting a backup." before_prune: type: array items: type: string description: | List of one or more shell commands or scripts to execute before pruning, run once per repository. example: - echo "Starting pruning." before_compact: type: array items: type: string description: | List of one or more shell commands or scripts to execute before compaction, run once per repository. example: - echo "Starting compaction." before_check: type: array items: type: string description: | List of one or more shell commands or scripts to execute before consistency checks, run once per repository. example: - echo "Starting checks." before_extract: type: array items: type: string description: | List of one or more shell commands or scripts to execute before extracting a backup, run once per repository. example: - echo "Starting extracting." after_backup: type: array items: type: string description: | List of one or more shell commands or scripts to execute after creating a backup, run once per repository. example: - echo "Finished a backup." after_compact: type: array items: type: string description: | List of one or more shell commands or scripts to execute after compaction, run once per repository. example: - echo "Finished compaction." after_prune: type: array items: type: string description: | List of one or more shell commands or scripts to execute after pruning, run once per repository. example: - echo "Finished pruning." after_check: type: array items: type: string description: | List of one or more shell commands or scripts to execute after consistency checks, run once per repository. example: - echo "Finished checks." after_extract: type: array items: type: string description: | List of one or more shell commands or scripts to execute after extracting a backup, run once per repository. example: - echo "Finished extracting." after_actions: type: array items: type: string description: | List of one or more shell commands or scripts to execute after all actions for each repository. example: - echo "Finished actions." on_error: type: array items: type: string description: | List of one or more shell commands or scripts to execute when an exception occurs during a "create", "prune", "compact", or "check" action or an associated before/after hook. example: - echo "Error during create/prune/compact/check." before_everything: type: array items: type: string description: | List of one or more shell commands or scripts to execute before running all actions (if one of them is "create"). These are collected from all configuration files and then run once before all of them (prior to all actions). example: - echo "Starting actions." after_everything: type: array items: type: string description: | List of one or more shell commands or scripts to execute after running all actions (if one of them is "create"). These are collected from all configuration files and then run once after all of them (after any action). example: - echo "Completed actions." postgresql_databases: type: array items: type: object required: ['name'] additionalProperties: false properties: name: type: string description: | Database name (required if using this hook). Or "all" to dump all databases on the host. (Also set the "format" to dump each database to a separate file instead of one combined file.) Note that using this database hook implicitly enables both read_special and one_file_system (see above) to support dump and restore streaming. example: users hostname: type: string description: | Database hostname to connect to. Defaults to connecting via local Unix socket. example: database.example.org port: type: integer description: Port to connect to. Defaults to 5432. example: 5433 username: type: string description: | Username with which to connect to the database. Defaults to the username of the current user. You probably want to specify the "postgres" superuser here when the database name is "all". example: dbuser password: type: string description: | Password with which to connect to the database. Omitting a password will only work if PostgreSQL is configured to trust the configured username without a password or you create a ~/.pgpass file. example: trustsome1 format: type: string enum: ['plain', 'custom', 'directory', 'tar'] description: | Database dump output format. One of "plain", "custom", "directory", or "tar". Defaults to "custom" (unlike raw pg_dump) for a single database. Or, when database name is "all" and format is blank, dumps all databases to a single file. But if a format is specified with an "all" database name, dumps each database to a separate file of that format, allowing more convenient restores of individual databases. See the pg_dump documentation for more about formats. example: directory ssl_mode: type: string enum: ['disable', 'allow', 'prefer', 'require', 'verify-ca', 'verify-full'] description: | SSL mode to use to connect to the database server. One of "disable", "allow", "prefer", "require", "verify-ca" or "verify-full". Defaults to "disable". example: require ssl_cert: type: string description: | Path to a client certificate. example: "/root/.postgresql/postgresql.crt" ssl_key: type: string description: | Path to a private client key. example: "/root/.postgresql/postgresql.key" ssl_root_cert: type: string description: | Path to a root certificate containing a list of trusted certificate authorities. example: "/root/.postgresql/root.crt" ssl_crl: type: string description: | Path to a certificate revocation list. example: "/root/.postgresql/root.crl" pg_dump_command: type: string description: | Command to use instead of "pg_dump" or "pg_dumpall". This can be used to run a specific pg_dump version (e.g., one inside a running docker container). Defaults to "pg_dump" for single database dump or "pg_dumpall" to dump all databases. example: docker exec my_pg_container pg_dump pg_restore_command: type: string description: | Command to use instead of "pg_restore". This can be used to run a specific pg_restore version (e.g., one inside a running docker container). Defaults to "pg_restore". example: docker exec my_pg_container pg_restore psql_command: type: string description: | Command to use instead of "psql". This can be used to run a specific psql version (e.g., one inside a running docker container). Defaults to "psql". example: docker exec my_pg_container psql options: type: string description: | Additional pg_dump/pg_dumpall options to pass directly to the dump command, without performing any validation on them. See pg_dump documentation for details. example: --role=someone list_options: type: string description: | Additional psql options to pass directly to the psql command that lists available databases, without performing any validation on them. See psql documentation for details. example: --role=someone restore_options: type: string description: | Additional pg_restore/psql options to pass directly to the restore command, without performing any validation on them. See pg_restore/psql documentation for details. example: --role=someone analyze_options: type: string description: | Additional psql options to pass directly to the analyze command run after a restore, without performing any validation on them. See psql documentation for details. example: --role=someone description: | List of one or more PostgreSQL databases to dump before creating a backup, run once per configuration file. The database dumps are added to your source directories at runtime, backed up, and removed afterwards. Requires pg_dump/pg_dumpall/pg_restore commands. See https://www.postgresql.org/docs/current/app-pgdump.html and https://www.postgresql.org/docs/current/libpq-ssl.html for details. mysql_databases: type: array items: type: object required: ['name'] additionalProperties: false properties: name: type: string description: | Database name (required if using this hook). Or "all" to dump all databases on the host. Note that using this database hook implicitly enables both read_special and one_file_system (see above) to support dump and restore streaming. example: users hostname: type: string description: | Database hostname to connect to. Defaults to connecting via local Unix socket. example: database.example.org port: type: integer description: Port to connect to. Defaults to 3306. example: 3307 username: type: string description: | Username with which to connect to the database. Defaults to the username of the current user. example: dbuser password: type: string description: | Password with which to connect to the database. Omitting a password will only work if MySQL is configured to trust the configured username without a password. example: trustsome1 format: type: string enum: ['sql'] description: | Database dump output format. Currenly only "sql" is supported. Defaults to "sql" for a single database. Or, when database name is "all" and format is blank, dumps all databases to a single file. But if a format is specified with an "all" database name, dumps each database to a separate file of that format, allowing more convenient restores of individual databases. example: directory add_drop_database: type: boolean description: | Use the "--add-drop-database" flag with mysqldump, causing the database to be dropped right before restore. Defaults to true. example: false options: type: string description: | Additional mysqldump options to pass directly to the dump command, without performing any validation on them. See mysqldump documentation for details. example: --skip-comments list_options: type: string description: | Additional mysql options to pass directly to the mysql command that lists available databases, without performing any validation on them. See mysql documentation for details. example: --defaults-extra-file=my.cnf restore_options: type: string description: | Additional mysql options to pass directly to the mysql command that restores database dumps, without performing any validation on them. See mysql documentation for details. example: --defaults-extra-file=my.cnf description: | List of one or more MySQL/MariaDB databases to dump before creating a backup, run once per configuration file. The database dumps are added to your source directories at runtime, backed up, and removed afterwards. Requires mysqldump/mysql commands (from either MySQL or MariaDB). See https://dev.mysql.com/doc/refman/8.0/en/mysqldump.html or https://mariadb.com/kb/en/library/mysqldump/ for details. sqlite_databases: type: array items: type: object required: ['path','name'] additionalProperties: false properties: name: type: string description: | This is used to tag the database dump file with a name. It is not the path to the database file itself. The name "all" has no special meaning for SQLite databases. example: users path: type: string description: | Path to the SQLite database file to dump. If relative, it is relative to the current working directory. Note that using this database hook implicitly enables both read_special and one_file_system (see above) to support dump and restore streaming. example: /var/lib/sqlite/users.db mongodb_databases: type: array items: type: object required: ['name'] additionalProperties: false properties: name: type: string description: | Database name (required if using this hook). Or "all" to dump all databases on the host. Note that using this database hook implicitly enables both read_special and one_file_system (see above) to support dump and restore streaming. example: users hostname: type: string description: | Database hostname to connect to. Defaults to connecting to localhost. example: database.example.org port: type: integer description: Port to connect to. Defaults to 27017. example: 27018 username: type: string description: | Username with which to connect to the database. Skip it if no authentication is needed. example: dbuser password: type: string description: | Password with which to connect to the database. Skip it if no authentication is needed. example: trustsome1 authentication_database: type: string description: | Authentication database where the specified username exists. If no authentication database is specified, the database provided in "name" is used. If "name" is "all", the "admin" database is used. example: admin format: type: string enum: ['archive', 'directory'] description: | Database dump output format. One of "archive", or "directory". Defaults to "archive". See mongodump documentation for details. Note that format is ignored when the database name is "all". example: directory options: type: string description: | Additional mongodump options to pass directly to the dump command, without performing any validation on them. See mongodump documentation for details. example: --dumpDbUsersAndRoles restore_options: type: string description: | Additional mongorestore options to pass directly to the dump command, without performing any validation on them. See mongorestore documentation for details. example: --restoreDbUsersAndRoles description: | List of one or more MongoDB databases to dump before creating a backup, run once per configuration file. The database dumps are added to your source directories at runtime, backed up, and removed afterwards. Requires mongodump/mongorestore commands. See https://docs.mongodb.com/database-tools/mongodump/ and https://docs.mongodb.com/database-tools/mongorestore/ for details. ntfy: type: object required: ['topic'] additionalProperties: false properties: topic: type: string description: | The topic to publish to. (https://ntfy.sh/docs/publish/) example: topic server: type: string description: | The address of your self-hosted ntfy.sh instance. example: https://ntfy.your-domain.com username: type: string description: | The username used for authentication. example: testuser password: type: string description: | The password used for authentication. example: fakepassword start: type: object properties: title: type: string description: | The title of the message example: Ping! message: type: string description: | The message body to publish. example: Your backups have failed. priority: type: string description: | The priority to set. example: urgent tags: type: string description: | Tags to attach to the message. example: incoming_envelope finish: type: object properties: title: type: string description: | The title of the message. example: Ping! message: type: string description: | The message body to publish. example: Your backups have failed. priority: type: string description: | The priority to set. example: urgent tags: type: string description: | Tags to attach to the message. example: incoming_envelope fail: type: object properties: title: type: string description: | The title of the message. example: Ping! message: type: string description: | The message body to publish. example: Your backups have failed. priority: type: string description: | The priority to set. example: urgent tags: type: string description: | Tags to attach to the message. example: incoming_envelope states: type: array items: type: string enum: - start - finish - fail uniqueItems: true description: | List of one or more monitoring states to ping for: "start", "finish", and/or "fail". Defaults to pinging for failure only. example: - start - finish healthchecks: type: object required: ['ping_url'] additionalProperties: false properties: ping_url: type: string description: | Healthchecks ping URL or UUID to notify when a backup begins, ends, errors or just to send logs. example: https://hc-ping.com/your-uuid-here verify_tls: type: boolean description: | Verify the TLS certificate of the ping URL host. Defaults to true. example: false send_logs: type: boolean description: | Send borgmatic logs to Healthchecks as part the "finish", "fail", and "log" states. Defaults to true. example: false ping_body_limit: type: integer description: | Number of bytes of borgmatic logs to send to Healthchecks, ideally the same as PING_BODY_LIMIT configured on the Healthchecks server. Set to 0 to send all logs and disable this truncation. Defaults to 100000. example: 200000 states: type: array items: type: string enum: - start - finish - fail - log uniqueItems: true description: | List of one or more monitoring states to ping for: "start", "finish", "fail", and/or "log". Defaults to pinging for all states. example: - finish description: | Configuration for a monitoring integration with Healthchecks. Create an account at https://healthchecks.io (or self-host Healthchecks) if you'd like to use this service. See borgmatic monitoring documentation for details. cronitor: type: object required: ['ping_url'] additionalProperties: false properties: ping_url: type: string description: | Cronitor ping URL to notify when a backup begins, ends, or errors. example: https://cronitor.link/d3x0c1 description: | Configuration for a monitoring integration with Cronitor. Create an account at https://cronitor.io if you'd like to use this service. See borgmatic monitoring documentation for details. pagerduty: type: object required: ['integration_key'] additionalProperties: false properties: integration_key: type: string description: | PagerDuty integration key used to notify PagerDuty when a backup errors. example: a177cad45bd374409f78906a810a3074 description: | Configuration for a monitoring integration with PagerDuty. Create an account at https://www.pagerduty.com/ if you'd like to use this service. See borgmatic monitoring documentation for details. cronhub: type: object required: ['ping_url'] additionalProperties: false properties: ping_url: type: string description: | Cronhub ping URL to notify when a backup begins, ends, or errors. example: https://cronhub.io/ping/1f5e3410-254c-5587 description: | Configuration for a monitoring integration with Crunhub. Create an account at https://cronhub.io if you'd like to use this service. See borgmatic monitoring documentation for details. umask: type: integer description: | Umask used when executing hooks. Defaults to the umask that borgmatic is run with. example: 0077 borgmatic-1.7.9/borgmatic/config/validate.py000066400000000000000000000150701440467744700211020ustar00rootroot00000000000000import os import jsonschema import pkg_resources import ruamel.yaml from borgmatic.config import environment, load, normalize, override def schema_filename(): ''' Path to the installed YAML configuration schema file, used to validate and parse the configuration. ''' return pkg_resources.resource_filename('borgmatic', 'config/schema.yaml') def format_json_error_path_element(path_element): ''' Given a path element into a JSON data structure, format it for display as a string. ''' if isinstance(path_element, int): return str('[{}]'.format(path_element)) return str('.{}'.format(path_element)) def format_json_error(error): ''' Given an instance of jsonschema.exceptions.ValidationError, format it for display as a string. ''' if not error.path: return 'At the top level: {}'.format(error.message) formatted_path = ''.join(format_json_error_path_element(element) for element in error.path) return "At '{}': {}".format(formatted_path.lstrip('.'), error.message) class Validation_error(ValueError): ''' A collection of error messages generated when attempting to validate a particular configuration file. ''' def __init__(self, config_filename, errors): ''' Given a configuration filename path and a sequence of string error messages, create a Validation_error. ''' self.config_filename = config_filename self.errors = errors def __str__(self): ''' Render a validation error as a user-facing string. ''' return 'An error occurred while parsing a configuration file at {}:\n'.format( self.config_filename ) + '\n'.join(error for error in self.errors) def apply_logical_validation(config_filename, parsed_configuration): ''' Given a parsed and schematically valid configuration as a data structure of nested dicts (see below), run through any additional logical validation checks. If there are any such validation problems, raise a Validation_error. ''' location_repositories = parsed_configuration.get('location', {}).get('repositories') check_repositories = parsed_configuration.get('consistency', {}).get('check_repositories', []) for repository in check_repositories: if repository not in location_repositories: raise Validation_error( config_filename, ( 'Unknown repository in the "consistency" section\'s "check_repositories": {}'.format( repository ), ), ) def parse_configuration(config_filename, schema_filename, overrides=None, resolve_env=True): ''' Given the path to a config filename in YAML format, the path to a schema filename in a YAML rendition of JSON Schema format, a sequence of configuration file override strings in the form of "section.option=value", return the parsed configuration as a data structure of nested dicts and lists corresponding to the schema. Example return value: {'location': {'source_directories': ['/home', '/etc'], 'repository': 'hostname.borg'}, 'retention': {'keep_daily': 7}, 'consistency': {'checks': ['repository', 'archives']}} Also return a sequence of logging.LogRecord instances containing any warnings about the configuration. Raise FileNotFoundError if the file does not exist, PermissionError if the user does not have permissions to read the file, or Validation_error if the config does not match the schema. ''' try: config = load.load_configuration(config_filename) schema = load.load_configuration(schema_filename) except (ruamel.yaml.error.YAMLError, RecursionError) as error: raise Validation_error(config_filename, (str(error),)) override.apply_overrides(config, overrides) logs = normalize.normalize(config_filename, config) if resolve_env: environment.resolve_env_variables(config) try: validator = jsonschema.Draft7Validator(schema) except AttributeError: # pragma: no cover validator = jsonschema.Draft4Validator(schema) validation_errors = tuple(validator.iter_errors(config)) if validation_errors: raise Validation_error( config_filename, tuple(format_json_error(error) for error in validation_errors) ) apply_logical_validation(config_filename, config) return config, logs def normalize_repository_path(repository): ''' Given a repository path, return the absolute path of it (for local repositories). ''' # A colon in the repository indicates it's a remote repository. Bail. if ':' in repository: return repository return os.path.abspath(repository) def repositories_match(first, second): ''' Given two repository paths (relative and/or absolute), return whether they match. ''' return normalize_repository_path(first) == normalize_repository_path(second) def guard_configuration_contains_repository(repository, configurations): ''' Given a repository path and a dict mapping from config filename to corresponding parsed config dict, ensure that the repository is declared exactly once in all of the configurations. If no repository is given, skip this check. Raise ValueError if the repository is not found in a configuration, or is declared multiple times. ''' if not repository: return count = len( tuple( config_repository for config in configurations.values() for config_repository in config['location']['repositories'] if repositories_match(repository, config_repository) ) ) if count == 0: raise ValueError('Repository {} not found in configuration files'.format(repository)) if count > 1: raise ValueError('Repository {} found in multiple configuration files'.format(repository)) def guard_single_repository_selected(repository, configurations): ''' Given a repository path and a dict mapping from config filename to corresponding parsed config dict, ensure either a single repository exists across all configuration files or a repository path was given. ''' if repository: return count = len( tuple( config_repository for config in configurations.values() for config_repository in config['location']['repositories'] ) ) if count != 1: raise ValueError( "Can't determine which repository to use. Use --repository to disambiguate" ) borgmatic-1.7.9/borgmatic/execute.py000066400000000000000000000306551440467744700175140ustar00rootroot00000000000000import collections import logging import os import select import subprocess logger = logging.getLogger(__name__) ERROR_OUTPUT_MAX_LINE_COUNT = 25 BORG_ERROR_EXIT_CODE = 2 def exit_code_indicates_error(process, exit_code, borg_local_path=None): ''' Return True if the given exit code from running a command corresponds to an error. If a Borg local path is given and matches the process' command, then treat exit code 1 as a warning instead of an error. ''' if exit_code is None: return False command = process.args.split(' ') if isinstance(process.args, str) else process.args if borg_local_path and command[0] == borg_local_path: return bool(exit_code < 0 or exit_code >= BORG_ERROR_EXIT_CODE) return bool(exit_code != 0) def command_for_process(process): ''' Given a process as an instance of subprocess.Popen, return the command string that was used to invoke it. ''' return process.args if isinstance(process.args, str) else ' '.join(process.args) def output_buffer_for_process(process, exclude_stdouts): ''' Given a process as an instance of subprocess.Popen and a sequence of stdouts to exclude, return either the process's stdout or stderr. The idea is that if stdout is excluded for a process, we still have stderr to log. ''' return process.stderr if process.stdout in exclude_stdouts else process.stdout def log_outputs(processes, exclude_stdouts, output_log_level, borg_local_path): ''' Given a sequence of subprocess.Popen() instances for multiple processes, log the output for each process with the requested log level. Additionally, raise a CalledProcessError if a process exits with an error (or a warning for exit code 1, if that process does not match the Borg local path). If output log level is None, then instead of logging, capture output for each process and return it as a dict from the process to its output. For simplicity, it's assumed that the output buffer for each process is its stdout. But if any stdouts are given to exclude, then for any matching processes, log from their stderr instead. Note that stdout for a process can be None if output is intentionally not captured. In which case it won't be logged. ''' # Map from output buffer to sequence of last lines. buffer_last_lines = collections.defaultdict(list) process_for_output_buffer = { output_buffer_for_process(process, exclude_stdouts): process for process in processes if process.stdout or process.stderr } output_buffers = list(process_for_output_buffer.keys()) captured_outputs = collections.defaultdict(list) still_running = True # Log output for each process until they all exit. while True: if output_buffers: (ready_buffers, _, _) = select.select(output_buffers, [], []) for ready_buffer in ready_buffers: ready_process = process_for_output_buffer.get(ready_buffer) # The "ready" process has exited, but it might be a pipe destination with other # processes (pipe sources) waiting to be read from. So as a measure to prevent # hangs, vent all processes when one exits. if ready_process and ready_process.poll() is not None: for other_process in processes: if ( other_process.poll() is None and other_process.stdout and other_process.stdout not in output_buffers ): # Add the process's output to output_buffers to ensure it'll get read. output_buffers.append(other_process.stdout) while True: line = ready_buffer.readline().rstrip().decode() if not line or not ready_process: break # Keep the last few lines of output in case the process errors, and we need the output for # the exception below. last_lines = buffer_last_lines[ready_buffer] last_lines.append(line) if len(last_lines) > ERROR_OUTPUT_MAX_LINE_COUNT: last_lines.pop(0) if output_log_level is None: captured_outputs[ready_process].append(line) else: logger.log(output_log_level, line) if not still_running: break still_running = False for process in processes: exit_code = process.poll() if output_buffers else process.wait() if exit_code is None: still_running = True # If any process errors, then raise accordingly. if exit_code_indicates_error(process, exit_code, borg_local_path): # If an error occurs, include its output in the raised exception so that we don't # inadvertently hide error output. output_buffer = output_buffer_for_process(process, exclude_stdouts) last_lines = buffer_last_lines[output_buffer] if output_buffer else [] if len(last_lines) == ERROR_OUTPUT_MAX_LINE_COUNT: last_lines.insert(0, '...') # Something has gone wrong. So vent each process' output buffer to prevent it from # hanging. And then kill the process. for other_process in processes: if other_process.poll() is None: other_process.stdout.read(0) other_process.kill() raise subprocess.CalledProcessError( exit_code, command_for_process(process), '\n'.join(last_lines) ) if captured_outputs: return { process: '\n'.join(output_lines) for process, output_lines in captured_outputs.items() } def log_command(full_command, input_file=None, output_file=None): ''' Log the given command (a sequence of command/argument strings), along with its input/output file paths. ''' logger.debug( ' '.join(full_command) + (' < {}'.format(getattr(input_file, 'name', '')) if input_file else '') + (' > {}'.format(getattr(output_file, 'name', '')) if output_file else '') ) # An sentinel passed as an output file to execute_command() to indicate that the command's output # should be allowed to flow through to stdout without being captured for logging. Useful for # commands with interactive prompts or those that mess directly with the console. DO_NOT_CAPTURE = object() def execute_command( full_command, output_log_level=logging.INFO, output_file=None, input_file=None, shell=False, extra_environment=None, working_directory=None, borg_local_path=None, run_to_completion=True, ): ''' Execute the given command (a sequence of command/argument strings) and log its output at the given log level. If an open output file object is given, then write stdout to the file and only log stderr. If an open input file object is given, then read stdin from the file. If shell is True, execute the command within a shell. If an extra environment dict is given, then use it to augment the current environment, and pass the result into the command. If a working directory is given, use that as the present working directory when running the command. If a Borg local path is given, and the command matches it (regardless of arguments), treat exit code 1 as a warning instead of an error. If run to completion is False, then return the process for the command without executing it to completion. Raise subprocesses.CalledProcessError if an error occurs while running the command. ''' log_command(full_command, input_file, output_file) environment = {**os.environ, **extra_environment} if extra_environment else None do_not_capture = bool(output_file is DO_NOT_CAPTURE) command = ' '.join(full_command) if shell else full_command process = subprocess.Popen( command, stdin=input_file, stdout=None if do_not_capture else (output_file or subprocess.PIPE), stderr=None if do_not_capture else (subprocess.PIPE if output_file else subprocess.STDOUT), shell=shell, env=environment, cwd=working_directory, ) if not run_to_completion: return process log_outputs( (process,), (input_file, output_file), output_log_level, borg_local_path=borg_local_path ) def execute_command_and_capture_output( full_command, capture_stderr=False, shell=False, extra_environment=None, working_directory=None, ): ''' Execute the given command (a sequence of command/argument strings), capturing and returning its output (stdout). If capture stderr is True, then capture and return stderr in addition to stdout. If shell is True, execute the command within a shell. If an extra environment dict is given, then use it to augment the current environment, and pass the result into the command. If a working directory is given, use that as the present working directory when running the command. Raise subprocesses.CalledProcessError if an error occurs while running the command. ''' log_command(full_command) environment = {**os.environ, **extra_environment} if extra_environment else None command = ' '.join(full_command) if shell else full_command output = subprocess.check_output( command, stderr=subprocess.STDOUT if capture_stderr else None, shell=shell, env=environment, cwd=working_directory, ) return output.decode() if output is not None else None def execute_command_with_processes( full_command, processes, output_log_level=logging.INFO, output_file=None, input_file=None, shell=False, extra_environment=None, working_directory=None, borg_local_path=None, ): ''' Execute the given command (a sequence of command/argument strings) and log its output at the given log level. Simultaneously, continue to poll one or more active processes so that they run as well. This is useful, for instance, for processes that are streaming output to a named pipe that the given command is consuming from. If an open output file object is given, then write stdout to the file and only log stderr. But if output log level is None, instead suppress logging and return the captured output for (only) the given command. If an open input file object is given, then read stdin from the file. If shell is True, execute the command within a shell. If an extra environment dict is given, then use it to augment the current environment, and pass the result into the command. If a working directory is given, use that as the present working directory when running the command. If a Borg local path is given, then for any matching command or process (regardless of arguments), treat exit code 1 as a warning instead of an error. Raise subprocesses.CalledProcessError if an error occurs while running the command or in the upstream process. ''' log_command(full_command, input_file, output_file) environment = {**os.environ, **extra_environment} if extra_environment else None do_not_capture = bool(output_file is DO_NOT_CAPTURE) command = ' '.join(full_command) if shell else full_command try: command_process = subprocess.Popen( command, stdin=input_file, stdout=None if do_not_capture else (output_file or subprocess.PIPE), stderr=None if do_not_capture else (subprocess.PIPE if output_file else subprocess.STDOUT), shell=shell, env=environment, cwd=working_directory, ) except (subprocess.CalledProcessError, OSError): # Something has gone wrong. So vent each process' output buffer to prevent it from hanging. # And then kill the process. for process in processes: if process.poll() is None: process.stdout.read(0) process.kill() raise captured_outputs = log_outputs( tuple(processes) + (command_process,), (input_file, output_file), output_log_level, borg_local_path=borg_local_path, ) if output_log_level is None: return captured_outputs.get(command_process) borgmatic-1.7.9/borgmatic/hooks/000077500000000000000000000000001440467744700166125ustar00rootroot00000000000000borgmatic-1.7.9/borgmatic/hooks/__init__.py000066400000000000000000000000001440467744700207110ustar00rootroot00000000000000borgmatic-1.7.9/borgmatic/hooks/command.py000066400000000000000000000065241440467744700206110ustar00rootroot00000000000000import logging import os import re from borgmatic import execute logger = logging.getLogger(__name__) SOFT_FAIL_EXIT_CODE = 75 def interpolate_context(config_filename, hook_description, command, context): ''' Given a config filename, a hook description, a single hook command, and a dict of context names/values, interpolate the values by "{name}" into the command and return the result. ''' for name, value in context.items(): command = command.replace('{%s}' % name, str(value)) for unsupported_variable in re.findall(r'{\w+}', command): logger.warning( f"{config_filename}: Variable '{unsupported_variable}' is not supported in {hook_description} hook" ) return command def execute_hook(commands, umask, config_filename, description, dry_run, **context): ''' Given a list of hook commands to execute, a umask to execute with (or None), a config filename, a hook description, and whether this is a dry run, run the given commands. Or, don't run them if this is a dry run. The context contains optional values interpolated by name into the hook commands. Raise ValueError if the umask cannot be parsed. Raise subprocesses.CalledProcessError if an error occurs in a hook. ''' if not commands: logger.debug('{}: No commands to run for {} hook'.format(config_filename, description)) return dry_run_label = ' (dry run; not actually running hooks)' if dry_run else '' context['configuration_filename'] = config_filename commands = [ interpolate_context(config_filename, description, command, context) for command in commands ] if len(commands) == 1: logger.info( '{}: Running command for {} hook{}'.format(config_filename, description, dry_run_label) ) else: logger.info( '{}: Running {} commands for {} hook{}'.format( config_filename, len(commands), description, dry_run_label ) ) if umask: parsed_umask = int(str(umask), 8) logger.debug('{}: Set hook umask to {}'.format(config_filename, oct(parsed_umask))) original_umask = os.umask(parsed_umask) else: original_umask = None try: for command in commands: if not dry_run: execute.execute_command( [command], output_log_level=logging.ERROR if description == 'on-error' else logging.WARNING, shell=True, ) finally: if original_umask: os.umask(original_umask) def considered_soft_failure(config_filename, error): ''' Given a configuration filename and an exception object, return whether the exception object represents a subprocess.CalledProcessError with a return code of SOFT_FAIL_EXIT_CODE. If so, that indicates that the error is a "soft failure", and should not result in an error. ''' exit_code = getattr(error, 'returncode', None) if exit_code is None: return False if exit_code == SOFT_FAIL_EXIT_CODE: logger.info( '{}: Command hook exited with soft failure exit code ({}); skipping remaining actions'.format( config_filename, SOFT_FAIL_EXIT_CODE ) ) return True return False borgmatic-1.7.9/borgmatic/hooks/cronhub.py000066400000000000000000000036351440467744700206330ustar00rootroot00000000000000import logging import requests from borgmatic.hooks import monitor logger = logging.getLogger(__name__) MONITOR_STATE_TO_CRONHUB = { monitor.State.START: 'start', monitor.State.FINISH: 'finish', monitor.State.FAIL: 'fail', } def initialize_monitor( ping_url, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No initialization is necessary for this monitor. ''' pass def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run): ''' Ping the configured Cronhub URL, modified with the monitor.State. Use the given configuration filename in any log entries. If this is a dry run, then don't actually ping anything. ''' if state not in MONITOR_STATE_TO_CRONHUB: logger.debug( f'{config_filename}: Ignoring unsupported monitoring {state.name.lower()} in Cronhub hook' ) return dry_run_label = ' (dry run; not actually pinging)' if dry_run else '' formatted_state = '/{}/'.format(MONITOR_STATE_TO_CRONHUB[state]) ping_url = ( hook_config['ping_url'] .replace('/start/', formatted_state) .replace('/ping/', formatted_state) ) logger.info( '{}: Pinging Cronhub {}{}'.format(config_filename, state.name.lower(), dry_run_label) ) logger.debug('{}: Using Cronhub ping URL {}'.format(config_filename, ping_url)) if not dry_run: logging.getLogger('urllib3').setLevel(logging.ERROR) try: response = requests.get(ping_url) if not response.ok: response.raise_for_status() except requests.exceptions.RequestException as error: logger.warning(f'{config_filename}: Cronhub error: {error}') def destroy_monitor( ping_url_or_uuid, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No destruction is necessary for this monitor. ''' pass borgmatic-1.7.9/borgmatic/hooks/cronitor.py000066400000000000000000000034501440467744700210250ustar00rootroot00000000000000import logging import requests from borgmatic.hooks import monitor logger = logging.getLogger(__name__) MONITOR_STATE_TO_CRONITOR = { monitor.State.START: 'run', monitor.State.FINISH: 'complete', monitor.State.FAIL: 'fail', } def initialize_monitor( ping_url, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No initialization is necessary for this monitor. ''' pass def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run): ''' Ping the configured Cronitor URL, modified with the monitor.State. Use the given configuration filename in any log entries. If this is a dry run, then don't actually ping anything. ''' if state not in MONITOR_STATE_TO_CRONITOR: logger.debug( f'{config_filename}: Ignoring unsupported monitoring {state.name.lower()} in Cronitor hook' ) return dry_run_label = ' (dry run; not actually pinging)' if dry_run else '' ping_url = '{}/{}'.format(hook_config['ping_url'], MONITOR_STATE_TO_CRONITOR[state]) logger.info( '{}: Pinging Cronitor {}{}'.format(config_filename, state.name.lower(), dry_run_label) ) logger.debug('{}: Using Cronitor ping URL {}'.format(config_filename, ping_url)) if not dry_run: logging.getLogger('urllib3').setLevel(logging.ERROR) try: response = requests.get(ping_url) if not response.ok: response.raise_for_status() except requests.exceptions.RequestException as error: logger.warning(f'{config_filename}: Cronitor error: {error}') def destroy_monitor( ping_url_or_uuid, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No destruction is necessary for this monitor. ''' pass borgmatic-1.7.9/borgmatic/hooks/dispatch.py000066400000000000000000000062321440467744700207660ustar00rootroot00000000000000import logging from borgmatic.hooks import ( cronhub, cronitor, healthchecks, mongodb, mysql, ntfy, pagerduty, postgresql, sqlite, ) logger = logging.getLogger(__name__) HOOK_NAME_TO_MODULE = { 'cronhub': cronhub, 'cronitor': cronitor, 'healthchecks': healthchecks, 'mongodb_databases': mongodb, 'mysql_databases': mysql, 'ntfy': ntfy, 'pagerduty': pagerduty, 'postgresql_databases': postgresql, 'sqlite_databases': sqlite, } def call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs): ''' Given the hooks configuration dict and a prefix to use in log entries, call the requested function of the Python module corresponding to the given hook name. Supply that call with the configuration for this hook (if any), the log prefix, and any given args and kwargs. Return any return value. Raise ValueError if the hook name is unknown. Raise AttributeError if the function name is not found in the module. Raise anything else that the called function raises. ''' config = hooks.get(hook_name, {}) try: module = HOOK_NAME_TO_MODULE[hook_name] except KeyError: raise ValueError('Unknown hook name: {}'.format(hook_name)) logger.debug('{}: Calling {} hook function {}'.format(log_prefix, hook_name, function_name)) return getattr(module, function_name)(config, log_prefix, *args, **kwargs) def call_hooks(function_name, hooks, log_prefix, hook_names, *args, **kwargs): ''' Given the hooks configuration dict and a prefix to use in log entries, call the requested function of the Python module corresponding to each given hook name. Supply each call with the configuration for that hook, the log prefix, and any given args and kwargs. Collect any return values into a dict from hook name to return value. If the hook name is not present in the hooks configuration, then don't call the function for it and omit it from the return values. Raise ValueError if the hook name is unknown. Raise AttributeError if the function name is not found in the module. Raise anything else that a called function raises. An error stops calls to subsequent functions. ''' return { hook_name: call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs) for hook_name in hook_names if hooks.get(hook_name) } def call_hooks_even_if_unconfigured(function_name, hooks, log_prefix, hook_names, *args, **kwargs): ''' Given the hooks configuration dict and a prefix to use in log entries, call the requested function of the Python module corresponding to each given hook name. Supply each call with the configuration for that hook, the log prefix, and any given args and kwargs. Collect any return values into a dict from hook name to return value. Raise AttributeError if the function name is not found in the module. Raise anything else that a called function raises. An error stops calls to subsequent functions. ''' return { hook_name: call_hook(function_name, hooks, log_prefix, hook_name, *args, **kwargs) for hook_name in hook_names } borgmatic-1.7.9/borgmatic/hooks/dump.py000066400000000000000000000045751440467744700201440ustar00rootroot00000000000000import logging import os import shutil from borgmatic.borg.state import DEFAULT_BORGMATIC_SOURCE_DIRECTORY logger = logging.getLogger(__name__) DATABASE_HOOK_NAMES = ( 'postgresql_databases', 'mysql_databases', 'mongodb_databases', 'sqlite_databases', ) def make_database_dump_path(borgmatic_source_directory, database_hook_name): ''' Given a borgmatic source directory (or None) and a database hook name, construct a database dump path. ''' if not borgmatic_source_directory: borgmatic_source_directory = DEFAULT_BORGMATIC_SOURCE_DIRECTORY return os.path.join(borgmatic_source_directory, database_hook_name) def make_database_dump_filename(dump_path, name, hostname=None): ''' Based on the given dump directory path, database name, and hostname, return a filename to use for the database dump. The hostname defaults to localhost. Raise ValueError if the database name is invalid. ''' if os.path.sep in name: raise ValueError('Invalid database name {}'.format(name)) return os.path.join(os.path.expanduser(dump_path), hostname or 'localhost', name) def create_parent_directory_for_dump(dump_path): ''' Create a directory to contain the given dump path. ''' os.makedirs(os.path.dirname(dump_path), mode=0o700, exist_ok=True) def create_named_pipe_for_dump(dump_path): ''' Create a named pipe at the given dump path. ''' create_parent_directory_for_dump(dump_path) os.mkfifo(dump_path, mode=0o600) def remove_database_dumps(dump_path, database_type_name, log_prefix, dry_run): ''' Remove all database dumps in the given dump directory path (including the directory itself). If this is a dry run, then don't actually remove anything. ''' dry_run_label = ' (dry run; not actually removing anything)' if dry_run else '' logger.debug( '{}: Removing {} database dumps{}'.format(log_prefix, database_type_name, dry_run_label) ) expanded_path = os.path.expanduser(dump_path) if dry_run: return if os.path.exists(expanded_path): shutil.rmtree(expanded_path) def convert_glob_patterns_to_borg_patterns(patterns): ''' Convert a sequence of shell glob patterns like "/etc/*" to the corresponding Borg archive patterns like "sh:etc/*". ''' return ['sh:{}'.format(pattern.lstrip(os.path.sep)) for pattern in patterns] borgmatic-1.7.9/borgmatic/hooks/healthchecks.py000066400000000000000000000114761440467744700216230ustar00rootroot00000000000000import logging import requests from borgmatic.hooks import monitor logger = logging.getLogger(__name__) MONITOR_STATE_TO_HEALTHCHECKS = { monitor.State.START: 'start', monitor.State.FINISH: None, # Healthchecks doesn't append to the URL for the finished state. monitor.State.FAIL: 'fail', monitor.State.LOG: 'log', } PAYLOAD_TRUNCATION_INDICATOR = '...\n' DEFAULT_PING_BODY_LIMIT_BYTES = 100000 class Forgetful_buffering_handler(logging.Handler): ''' A buffering log handler that stores log messages in memory, and throws away messages (oldest first) once a particular capacity in bytes is reached. But if the given byte capacity is zero, don't throw away any messages. ''' def __init__(self, byte_capacity, log_level): super().__init__() self.byte_capacity = byte_capacity self.byte_count = 0 self.buffer = [] self.forgot = False self.setLevel(log_level) def emit(self, record): message = record.getMessage() + '\n' self.byte_count += len(message) self.buffer.append(message) if not self.byte_capacity: return while self.byte_count > self.byte_capacity and self.buffer: self.byte_count -= len(self.buffer[0]) self.buffer.pop(0) self.forgot = True def format_buffered_logs_for_payload(): ''' Get the handler previously added to the root logger, and slurp buffered logs out of it to send to Healthchecks. ''' try: buffering_handler = next( handler for handler in logging.getLogger().handlers if isinstance(handler, Forgetful_buffering_handler) ) except StopIteration: # No handler means no payload. return '' payload = ''.join(message for message in buffering_handler.buffer) if buffering_handler.forgot: return PAYLOAD_TRUNCATION_INDICATOR + payload return payload def initialize_monitor(hook_config, config_filename, monitoring_log_level, dry_run): ''' Add a handler to the root logger that stores in memory the most recent logs emitted. That way, we can send them all to Healthchecks upon a finish or failure state. But skip this if the "send_logs" option is false. ''' if hook_config.get('send_logs') is False: return ping_body_limit = max( hook_config.get('ping_body_limit', DEFAULT_PING_BODY_LIMIT_BYTES) - len(PAYLOAD_TRUNCATION_INDICATOR), 0, ) logging.getLogger().addHandler( Forgetful_buffering_handler(ping_body_limit, monitoring_log_level) ) def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run): ''' Ping the configured Healthchecks URL or UUID, modified with the monitor.State. Use the given configuration filename in any log entries, and log to Healthchecks with the giving log level. If this is a dry run, then don't actually ping anything. ''' ping_url = ( hook_config['ping_url'] if hook_config['ping_url'].startswith('http') else 'https://hc-ping.com/{}'.format(hook_config['ping_url']) ) dry_run_label = ' (dry run; not actually pinging)' if dry_run else '' if 'states' in hook_config and state.name.lower() not in hook_config['states']: logger.info( f'{config_filename}: Skipping Healthchecks {state.name.lower()} ping due to configured states' ) return healthchecks_state = MONITOR_STATE_TO_HEALTHCHECKS.get(state) if healthchecks_state: ping_url = '{}/{}'.format(ping_url, healthchecks_state) logger.info( '{}: Pinging Healthchecks {}{}'.format(config_filename, state.name.lower(), dry_run_label) ) logger.debug('{}: Using Healthchecks ping URL {}'.format(config_filename, ping_url)) if state in (monitor.State.FINISH, monitor.State.FAIL, monitor.State.LOG): payload = format_buffered_logs_for_payload() else: payload = '' if not dry_run: logging.getLogger('urllib3').setLevel(logging.ERROR) try: response = requests.post( ping_url, data=payload.encode('utf-8'), verify=hook_config.get('verify_tls', True) ) if not response.ok: response.raise_for_status() except requests.exceptions.RequestException as error: logger.warning(f'{config_filename}: Healthchecks error: {error}') def destroy_monitor(hook_config, config_filename, monitoring_log_level, dry_run): ''' Remove the monitor handler that was added to the root logger. This prevents the handler from getting reused by other instances of this monitor. ''' logger = logging.getLogger() for handler in tuple(logger.handlers): if isinstance(handler, Forgetful_buffering_handler): logger.removeHandler(handler) borgmatic-1.7.9/borgmatic/hooks/mongodb.py000066400000000000000000000152141440467744700206140ustar00rootroot00000000000000import logging from borgmatic.execute import execute_command, execute_command_with_processes from borgmatic.hooks import dump logger = logging.getLogger(__name__) def make_dump_path(location_config): # pragma: no cover ''' Make the dump path from the given location configuration and the name of this hook. ''' return dump.make_database_dump_path( location_config.get('borgmatic_source_directory'), 'mongodb_databases' ) def dump_databases(databases, log_prefix, location_config, dry_run): ''' Dump the given MongoDB databases to a named pipe. The databases are supplied as a sequence of dicts, one dict describing each database as per the configuration schema. Use the given log prefix in any log entries. Use the given location configuration dict to construct the destination path. Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence. ''' dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else '' logger.info('{}: Dumping MongoDB databases{}'.format(log_prefix, dry_run_label)) processes = [] for database in databases: name = database['name'] dump_filename = dump.make_database_dump_filename( make_dump_path(location_config), name, database.get('hostname') ) dump_format = database.get('format', 'archive') logger.debug( '{}: Dumping MongoDB database {} to {}{}'.format( log_prefix, name, dump_filename, dry_run_label ) ) if dry_run: continue command = build_dump_command(database, dump_filename, dump_format) if dump_format == 'directory': dump.create_parent_directory_for_dump(dump_filename) execute_command(command, shell=True) else: dump.create_named_pipe_for_dump(dump_filename) processes.append(execute_command(command, shell=True, run_to_completion=False)) return processes def build_dump_command(database, dump_filename, dump_format): ''' Return the mongodump command from a single database configuration. ''' all_databases = database['name'] == 'all' command = ['mongodump'] if dump_format == 'directory': command.extend(('--out', dump_filename)) if 'hostname' in database: command.extend(('--host', database['hostname'])) if 'port' in database: command.extend(('--port', str(database['port']))) if 'username' in database: command.extend(('--username', database['username'])) if 'password' in database: command.extend(('--password', database['password'])) if 'authentication_database' in database: command.extend(('--authenticationDatabase', database['authentication_database'])) if not all_databases: command.extend(('--db', database['name'])) if 'options' in database: command.extend(database['options'].split(' ')) if dump_format != 'directory': command.extend(('--archive', '>', dump_filename)) return command def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover ''' Remove all database dump files for this hook regardless of the given databases. Use the log prefix in any log entries. Use the given location configuration dict to construct the destination path. If this is a dry run, then don't actually remove anything. ''' dump.remove_database_dumps(make_dump_path(location_config), 'MongoDB', log_prefix, dry_run) def make_database_dump_pattern( databases, log_prefix, location_config, name=None ): # pragma: no cover ''' Given a sequence of configurations dicts, a prefix to log with, a location configuration dict, and a database name to match, return the corresponding glob patterns to match the database dump in an archive. ''' return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*') def restore_database_dump(database_config, log_prefix, location_config, dry_run, extract_process): ''' Restore the given MongoDB database from an extract stream. The database is supplied as a one-element sequence containing a dict describing the database, as per the configuration schema. Use the given log prefix in any log entries. If this is a dry run, then don't actually restore anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce output to consume. If the extract process is None, then restore the dump from the filesystem rather than from an extract stream. ''' dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else '' if len(database_config) != 1: raise ValueError('The database configuration value is invalid') database = database_config[0] dump_filename = dump.make_database_dump_filename( make_dump_path(location_config), database['name'], database.get('hostname') ) restore_command = build_restore_command(extract_process, database, dump_filename) logger.debug( '{}: Restoring MongoDB database {}{}'.format(log_prefix, database['name'], dry_run_label) ) if dry_run: return # Don't give Borg local path so as to error on warnings, as "borg extract" only gives a warning # if the restore paths don't exist in the archive. execute_command_with_processes( restore_command, [extract_process] if extract_process else [], output_log_level=logging.DEBUG, input_file=extract_process.stdout if extract_process else None, ) def build_restore_command(extract_process, database, dump_filename): ''' Return the mongorestore command from a single database configuration. ''' command = ['mongorestore'] if extract_process: command.append('--archive') else: command.extend(('--dir', dump_filename)) if database['name'] != 'all': command.extend(('--drop', '--db', database['name'])) if 'hostname' in database: command.extend(('--host', database['hostname'])) if 'port' in database: command.extend(('--port', str(database['port']))) if 'username' in database: command.extend(('--username', database['username'])) if 'password' in database: command.extend(('--password', database['password'])) if 'authentication_database' in database: command.extend(('--authenticationDatabase', database['authentication_database'])) if 'restore_options' in database: command.extend(database['restore_options'].split(' ')) return command borgmatic-1.7.9/borgmatic/hooks/monitor.py000066400000000000000000000002641440467744700206550ustar00rootroot00000000000000from enum import Enum MONITOR_HOOK_NAMES = ('healthchecks', 'cronitor', 'cronhub', 'pagerduty', 'ntfy') class State(Enum): START = 1 FINISH = 2 FAIL = 3 LOG = 4 borgmatic-1.7.9/borgmatic/hooks/mysql.py000066400000000000000000000215771440467744700203450ustar00rootroot00000000000000import copy import logging import os from borgmatic.execute import ( execute_command, execute_command_and_capture_output, execute_command_with_processes, ) from borgmatic.hooks import dump logger = logging.getLogger(__name__) def make_dump_path(location_config): # pragma: no cover ''' Make the dump path from the given location configuration and the name of this hook. ''' return dump.make_database_dump_path( location_config.get('borgmatic_source_directory'), 'mysql_databases' ) SYSTEM_DATABASE_NAMES = ('information_schema', 'mysql', 'performance_schema', 'sys') def database_names_to_dump(database, extra_environment, log_prefix, dry_run): ''' Given a requested database config, return the corresponding sequence of database names to dump. In the case of "all", query for the names of databases on the configured host and return them, excluding any system databases that will cause problems during restore. ''' if database['name'] != 'all': return (database['name'],) if dry_run: return () show_command = ( ('mysql',) + (tuple(database['list_options'].split(' ')) if 'list_options' in database else ()) + (('--host', database['hostname']) if 'hostname' in database else ()) + (('--port', str(database['port'])) if 'port' in database else ()) + (('--protocol', 'tcp') if 'hostname' in database or 'port' in database else ()) + (('--user', database['username']) if 'username' in database else ()) + ('--skip-column-names', '--batch') + ('--execute', 'show schemas') ) logger.debug(f'{log_prefix}: Querying for "all" MySQL databases to dump') show_output = execute_command_and_capture_output( show_command, extra_environment=extra_environment ) return tuple( show_name for show_name in show_output.strip().splitlines() if show_name not in SYSTEM_DATABASE_NAMES ) def execute_dump_command( database, log_prefix, dump_path, database_names, extra_environment, dry_run, dry_run_label ): ''' Kick off a dump for the given MySQL/MariaDB database (provided as a configuration dict) to a named pipe constructed from the given dump path and database names. Use the given log prefix in any log entries. Return a subprocess.Popen instance for the dump process ready to spew to a named pipe. But if this is a dry run, then don't actually dump anything and return None. ''' database_name = database['name'] dump_filename = dump.make_database_dump_filename( dump_path, database['name'], database.get('hostname') ) if os.path.exists(dump_filename): logger.warning( f'{log_prefix}: Skipping duplicate dump of MySQL database "{database_name}" to {dump_filename}' ) return None dump_command = ( ('mysqldump',) + (tuple(database['options'].split(' ')) if 'options' in database else ()) + (('--add-drop-database',) if database.get('add_drop_database', True) else ()) + (('--host', database['hostname']) if 'hostname' in database else ()) + (('--port', str(database['port'])) if 'port' in database else ()) + (('--protocol', 'tcp') if 'hostname' in database or 'port' in database else ()) + (('--user', database['username']) if 'username' in database else ()) + ('--databases',) + database_names # Use shell redirection rather than execute_command(output_file=open(...)) to prevent # the open() call on a named pipe from hanging the main borgmatic process. + ('>', dump_filename) ) logger.debug( f'{log_prefix}: Dumping MySQL database "{database_name}" to {dump_filename}{dry_run_label}' ) if dry_run: return None dump.create_named_pipe_for_dump(dump_filename) return execute_command( dump_command, shell=True, extra_environment=extra_environment, run_to_completion=False, ) def dump_databases(databases, log_prefix, location_config, dry_run): ''' Dump the given MySQL/MariaDB databases to a named pipe. The databases are supplied as a sequence of dicts, one dict describing each database as per the configuration schema. Use the given log prefix in any log entries. Use the given location configuration dict to construct the destination path. Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence. ''' dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else '' processes = [] logger.info('{}: Dumping MySQL databases{}'.format(log_prefix, dry_run_label)) for database in databases: dump_path = make_dump_path(location_config) extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None dump_database_names = database_names_to_dump( database, extra_environment, log_prefix, dry_run ) if not dump_database_names: if dry_run: continue raise ValueError('Cannot find any MySQL databases to dump.') if database['name'] == 'all' and database.get('format'): for dump_name in dump_database_names: renamed_database = copy.copy(database) renamed_database['name'] = dump_name processes.append( execute_dump_command( renamed_database, log_prefix, dump_path, (dump_name,), extra_environment, dry_run, dry_run_label, ) ) else: processes.append( execute_dump_command( database, log_prefix, dump_path, dump_database_names, extra_environment, dry_run, dry_run_label, ) ) return [process for process in processes if process] def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover ''' Remove all database dump files for this hook regardless of the given databases. Use the log prefix in any log entries. Use the given location configuration dict to construct the destination path. If this is a dry run, then don't actually remove anything. ''' dump.remove_database_dumps(make_dump_path(location_config), 'MySQL', log_prefix, dry_run) def make_database_dump_pattern( databases, log_prefix, location_config, name=None ): # pragma: no cover ''' Given a sequence of configurations dicts, a prefix to log with, a location configuration dict, and a database name to match, return the corresponding glob patterns to match the database dump in an archive. ''' return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*') def restore_database_dump(database_config, log_prefix, location_config, dry_run, extract_process): ''' Restore the given MySQL/MariaDB database from an extract stream. The database is supplied as a one-element sequence containing a dict describing the database, as per the configuration schema. Use the given log prefix in any log entries. If this is a dry run, then don't actually restore anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce output to consume. ''' dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else '' if len(database_config) != 1: raise ValueError('The database configuration value is invalid') database = database_config[0] restore_command = ( ('mysql', '--batch') + (tuple(database['restore_options'].split(' ')) if 'restore_options' in database else ()) + (('--host', database['hostname']) if 'hostname' in database else ()) + (('--port', str(database['port'])) if 'port' in database else ()) + (('--protocol', 'tcp') if 'hostname' in database or 'port' in database else ()) + (('--user', database['username']) if 'username' in database else ()) ) extra_environment = {'MYSQL_PWD': database['password']} if 'password' in database else None logger.debug( '{}: Restoring MySQL database {}{}'.format(log_prefix, database['name'], dry_run_label) ) if dry_run: return # Don't give Borg local path so as to error on warnings, as "borg extract" only gives a warning # if the restore paths don't exist in the archive. execute_command_with_processes( restore_command, [extract_process], output_log_level=logging.DEBUG, input_file=extract_process.stdout, extra_environment=extra_environment, ) borgmatic-1.7.9/borgmatic/hooks/ntfy.py000066400000000000000000000054141440467744700201500ustar00rootroot00000000000000import logging import requests logger = logging.getLogger(__name__) def initialize_monitor( ping_url, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No initialization is necessary for this monitor. ''' pass def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run): ''' Ping the configured Ntfy topic. Use the given configuration filename in any log entries. If this is a dry run, then don't actually ping anything. ''' run_states = hook_config.get('states', ['fail']) if state.name.lower() in run_states: dry_run_label = ' (dry run; not actually pinging)' if dry_run else '' state_config = hook_config.get( state.name.lower(), { 'title': f'A Borgmatic {state.name} event happened', 'message': f'A Borgmatic {state.name} event happened', 'priority': 'default', 'tags': 'borgmatic', }, ) base_url = hook_config.get('server', 'https://ntfy.sh') topic = hook_config.get('topic') logger.info(f'{config_filename}: Pinging ntfy topic {topic}{dry_run_label}') logger.debug(f'{config_filename}: Using Ntfy ping URL {base_url}/{topic}') headers = { 'X-Title': state_config.get('title'), 'X-Message': state_config.get('message'), 'X-Priority': state_config.get('priority'), 'X-Tags': state_config.get('tags'), } username = hook_config.get('username') password = hook_config.get('password') auth = None if (username and password) is not None: auth = requests.auth.HTTPBasicAuth(username, password) logger.info(f'{config_filename}: Using basic auth with user {username} for ntfy') elif username is not None: logger.warning( f'{config_filename}: Password missing for ntfy authentication, defaulting to no auth' ) elif password is not None: logger.warning( f'{config_filename}: Username missing for ntfy authentication, defaulting to no auth' ) if not dry_run: logging.getLogger('urllib3').setLevel(logging.ERROR) try: response = requests.post(f'{base_url}/{topic}', headers=headers, auth=auth) if not response.ok: response.raise_for_status() except requests.exceptions.RequestException as error: logger.warning(f'{config_filename}: ntfy error: {error}') def destroy_monitor( ping_url_or_uuid, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No destruction is necessary for this monitor. ''' pass borgmatic-1.7.9/borgmatic/hooks/pagerduty.py000066400000000000000000000051311440467744700211700ustar00rootroot00000000000000import datetime import json import logging import platform import requests from borgmatic.hooks import monitor logger = logging.getLogger(__name__) EVENTS_API_URL = 'https://events.pagerduty.com/v2/enqueue' def initialize_monitor( integration_key, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No initialization is necessary for this monitor. ''' pass def ping_monitor(hook_config, config_filename, state, monitoring_log_level, dry_run): ''' If this is an error state, create a PagerDuty event with the configured integration key. Use the given configuration filename in any log entries. If this is a dry run, then don't actually create an event. ''' if state != monitor.State.FAIL: logger.debug( '{}: Ignoring unsupported monitoring {} in PagerDuty hook'.format( config_filename, state.name.lower() ) ) return dry_run_label = ' (dry run; not actually sending)' if dry_run else '' logger.info('{}: Sending failure event to PagerDuty {}'.format(config_filename, dry_run_label)) if dry_run: return hostname = platform.node() local_timestamp = ( datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).astimezone().isoformat() ) payload = json.dumps( { 'routing_key': hook_config['integration_key'], 'event_action': 'trigger', 'payload': { 'summary': 'backup failed on {}'.format(hostname), 'severity': 'error', 'source': hostname, 'timestamp': local_timestamp, 'component': 'borgmatic', 'group': 'backups', 'class': 'backup failure', 'custom_details': { 'hostname': hostname, 'configuration filename': config_filename, 'server time': local_timestamp, }, }, } ) logger.debug('{}: Using PagerDuty payload: {}'.format(config_filename, payload)) logging.getLogger('urllib3').setLevel(logging.ERROR) try: response = requests.post(EVENTS_API_URL, data=payload.encode('utf-8')) if not response.ok: response.raise_for_status() except requests.exceptions.RequestException as error: logger.warning(f'{config_filename}: PagerDuty error: {error}') def destroy_monitor( ping_url_or_uuid, config_filename, monitoring_log_level, dry_run ): # pragma: no cover ''' No destruction is necessary for this monitor. ''' pass borgmatic-1.7.9/borgmatic/hooks/postgresql.py000066400000000000000000000245531440467744700214000ustar00rootroot00000000000000import csv import logging import os from borgmatic.execute import ( execute_command, execute_command_and_capture_output, execute_command_with_processes, ) from borgmatic.hooks import dump logger = logging.getLogger(__name__) def make_dump_path(location_config): # pragma: no cover ''' Make the dump path from the given location configuration and the name of this hook. ''' return dump.make_database_dump_path( location_config.get('borgmatic_source_directory'), 'postgresql_databases' ) def make_extra_environment(database): ''' Make the extra_environment dict from the given database configuration. ''' extra = dict() if 'password' in database: extra['PGPASSWORD'] = database['password'] extra['PGSSLMODE'] = database.get('ssl_mode', 'disable') if 'ssl_cert' in database: extra['PGSSLCERT'] = database['ssl_cert'] if 'ssl_key' in database: extra['PGSSLKEY'] = database['ssl_key'] if 'ssl_root_cert' in database: extra['PGSSLROOTCERT'] = database['ssl_root_cert'] if 'ssl_crl' in database: extra['PGSSLCRL'] = database['ssl_crl'] return extra EXCLUDED_DATABASE_NAMES = ('template0', 'template1') def database_names_to_dump(database, extra_environment, log_prefix, dry_run): ''' Given a requested database config, return the corresponding sequence of database names to dump. In the case of "all" when a database format is given, query for the names of databases on the configured host and return them. For "all" without a database format, just return a sequence containing "all". ''' requested_name = database['name'] if requested_name != 'all': return (requested_name,) if not database.get('format'): return ('all',) if dry_run: return () list_command = ( ('psql', '--list', '--no-password', '--csv', '--tuples-only') + (('--host', database['hostname']) if 'hostname' in database else ()) + (('--port', str(database['port'])) if 'port' in database else ()) + (('--username', database['username']) if 'username' in database else ()) + (tuple(database['list_options'].split(' ')) if 'list_options' in database else ()) ) logger.debug(f'{log_prefix}: Querying for "all" PostgreSQL databases to dump') list_output = execute_command_and_capture_output( list_command, extra_environment=extra_environment ) return tuple( row[0] for row in csv.reader(list_output.splitlines(), delimiter=',', quotechar='"') if row[0] not in EXCLUDED_DATABASE_NAMES ) def dump_databases(databases, log_prefix, location_config, dry_run): ''' Dump the given PostgreSQL databases to a named pipe. The databases are supplied as a sequence of dicts, one dict describing each database as per the configuration schema. Use the given log prefix in any log entries. Use the given location configuration dict to construct the destination path. Return a sequence of subprocess.Popen instances for the dump processes ready to spew to a named pipe. But if this is a dry run, then don't actually dump anything and return an empty sequence. Raise ValueError if the databases to dump cannot be determined. ''' dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else '' processes = [] logger.info('{}: Dumping PostgreSQL databases{}'.format(log_prefix, dry_run_label)) for database in databases: extra_environment = make_extra_environment(database) dump_path = make_dump_path(location_config) dump_database_names = database_names_to_dump( database, extra_environment, log_prefix, dry_run ) if not dump_database_names: if dry_run: continue raise ValueError('Cannot find any PostgreSQL databases to dump.') for database_name in dump_database_names: dump_format = database.get('format', None if database_name == 'all' else 'custom') default_dump_command = 'pg_dumpall' if database_name == 'all' else 'pg_dump' dump_command = database.get('pg_dump_command') or default_dump_command dump_filename = dump.make_database_dump_filename( dump_path, database_name, database.get('hostname') ) if os.path.exists(dump_filename): logger.warning( f'{log_prefix}: Skipping duplicate dump of PostgreSQL database "{database_name}" to {dump_filename}' ) continue command = ( (dump_command, '--no-password', '--clean', '--if-exists',) + (('--host', database['hostname']) if 'hostname' in database else ()) + (('--port', str(database['port'])) if 'port' in database else ()) + (('--username', database['username']) if 'username' in database else ()) + (('--format', dump_format) if dump_format else ()) + (('--file', dump_filename) if dump_format == 'directory' else ()) + (tuple(database['options'].split(' ')) if 'options' in database else ()) + (() if database_name == 'all' else (database_name,)) # Use shell redirection rather than the --file flag to sidestep synchronization issues # when pg_dump/pg_dumpall tries to write to a named pipe. But for the directory dump # format in a particular, a named destination is required, and redirection doesn't work. + (('>', dump_filename) if dump_format != 'directory' else ()) ) logger.debug( f'{log_prefix}: Dumping PostgreSQL database "{database_name}" to {dump_filename}{dry_run_label}' ) if dry_run: continue if dump_format == 'directory': dump.create_parent_directory_for_dump(dump_filename) execute_command( command, shell=True, extra_environment=extra_environment, ) else: dump.create_named_pipe_for_dump(dump_filename) processes.append( execute_command( command, shell=True, extra_environment=extra_environment, run_to_completion=False, ) ) return processes def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover ''' Remove all database dump files for this hook regardless of the given databases. Use the log prefix in any log entries. Use the given location configuration dict to construct the destination path. If this is a dry run, then don't actually remove anything. ''' dump.remove_database_dumps(make_dump_path(location_config), 'PostgreSQL', log_prefix, dry_run) def make_database_dump_pattern( databases, log_prefix, location_config, name=None ): # pragma: no cover ''' Given a sequence of configurations dicts, a prefix to log with, a location configuration dict, and a database name to match, return the corresponding glob patterns to match the database dump in an archive. ''' return dump.make_database_dump_filename(make_dump_path(location_config), name, hostname='*') def restore_database_dump(database_config, log_prefix, location_config, dry_run, extract_process): ''' Restore the given PostgreSQL database from an extract stream. The database is supplied as a one-element sequence containing a dict describing the database, as per the configuration schema. Use the given log prefix in any log entries. If this is a dry run, then don't actually restore anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce output to consume. If the extract process is None, then restore the dump from the filesystem rather than from an extract stream. ''' dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else '' if len(database_config) != 1: raise ValueError('The database configuration value is invalid') database = database_config[0] all_databases = bool(database['name'] == 'all') dump_filename = dump.make_database_dump_filename( make_dump_path(location_config), database['name'], database.get('hostname') ) psql_command = database.get('psql_command') or 'psql' analyze_command = ( (psql_command, '--no-password', '--quiet') + (('--host', database['hostname']) if 'hostname' in database else ()) + (('--port', str(database['port'])) if 'port' in database else ()) + (('--username', database['username']) if 'username' in database else ()) + (('--dbname', database['name']) if not all_databases else ()) + (tuple(database['analyze_options'].split(' ')) if 'analyze_options' in database else ()) + ('--command', 'ANALYZE') ) pg_restore_command = database.get('pg_restore_command') or 'pg_restore' restore_command = ( (psql_command if all_databases else pg_restore_command, '--no-password') + ( ('--if-exists', '--exit-on-error', '--clean', '--dbname', database['name']) if not all_databases else () ) + (('--host', database['hostname']) if 'hostname' in database else ()) + (('--port', str(database['port'])) if 'port' in database else ()) + (('--username', database['username']) if 'username' in database else ()) + (tuple(database['restore_options'].split(' ')) if 'restore_options' in database else ()) + (() if extract_process else (dump_filename,)) ) extra_environment = make_extra_environment(database) logger.debug( '{}: Restoring PostgreSQL database {}{}'.format(log_prefix, database['name'], dry_run_label) ) if dry_run: return # Don't give Borg local path so as to error on warnings, as "borg extract" only gives a warning # if the restore paths don't exist in the archive. execute_command_with_processes( restore_command, [extract_process] if extract_process else [], output_log_level=logging.DEBUG, input_file=extract_process.stdout if extract_process else None, extra_environment=extra_environment, ) execute_command(analyze_command, extra_environment=extra_environment) borgmatic-1.7.9/borgmatic/hooks/sqlite.py000066400000000000000000000112541440467744700204700ustar00rootroot00000000000000import logging import os from borgmatic.execute import execute_command, execute_command_with_processes from borgmatic.hooks import dump logger = logging.getLogger(__name__) def make_dump_path(location_config): # pragma: no cover ''' Make the dump path from the given location configuration and the name of this hook. ''' return dump.make_database_dump_path( location_config.get('borgmatic_source_directory'), 'sqlite_databases' ) def dump_databases(databases, log_prefix, location_config, dry_run): ''' Dump the given SQLite3 databases to a file. The databases are supplied as a sequence of configuration dicts, as per the configuration schema. Use the given log prefix in any log entries. Use the given location configuration dict to construct the destination path. If this is a dry run, then don't actually dump anything. ''' dry_run_label = ' (dry run; not actually dumping anything)' if dry_run else '' processes = [] logger.info('{}: Dumping SQLite databases{}'.format(log_prefix, dry_run_label)) for database in databases: database_path = database['path'] if database['name'] == 'all': logger.warning('The "all" database name has no meaning for SQLite3 databases') if not os.path.exists(database_path): logger.warning( f'{log_prefix}: No SQLite database at {database_path}; An empty database will be created and dumped' ) dump_path = make_dump_path(location_config) dump_filename = dump.make_database_dump_filename(dump_path, database['name']) if os.path.exists(dump_filename): logger.warning( f'{log_prefix}: Skipping duplicate dump of SQLite database at {database_path} to {dump_filename}' ) continue command = ( 'sqlite3', database_path, '.dump', '>', dump_filename, ) logger.debug( f'{log_prefix}: Dumping SQLite database at {database_path} to {dump_filename}{dry_run_label}' ) if dry_run: continue dump.create_parent_directory_for_dump(dump_filename) processes.append(execute_command(command, shell=True, run_to_completion=False)) return processes def remove_database_dumps(databases, log_prefix, location_config, dry_run): # pragma: no cover ''' Remove the given SQLite3 database dumps from the filesystem. The databases are supplied as a sequence of configuration dicts, as per the configuration schema. Use the given log prefix in any log entries. Use the given location configuration dict to construct the destination path. If this is a dry run, then don't actually remove anything. ''' dump.remove_database_dumps(make_dump_path(location_config), 'SQLite', log_prefix, dry_run) def make_database_dump_pattern( databases, log_prefix, location_config, name=None ): # pragma: no cover ''' Make a pattern that matches the given SQLite3 databases. The databases are supplied as a sequence of configuration dicts, as per the configuration schema. ''' return dump.make_database_dump_filename(make_dump_path(location_config), name) def restore_database_dump(database_config, log_prefix, location_config, dry_run, extract_process): ''' Restore the given SQLite3 database from an extract stream. The database is supplied as a one-element sequence containing a dict describing the database, as per the configuration schema. Use the given log prefix in any log entries. If this is a dry run, then don't actually restore anything. Trigger the given active extract process (an instance of subprocess.Popen) to produce output to consume. ''' dry_run_label = ' (dry run; not actually restoring anything)' if dry_run else '' if len(database_config) != 1: raise ValueError('The database configuration value is invalid') database_path = database_config[0]['path'] logger.debug(f'{log_prefix}: Restoring SQLite database at {database_path}{dry_run_label}') if dry_run: return try: os.remove(database_path) logger.warning(f'{log_prefix}: Removed existing SQLite database at {database_path}') except FileNotFoundError: # pragma: no cover pass restore_command = ( 'sqlite3', database_path, ) # Don't give Borg local path so as to error on warnings, as "borg extract" only gives a warning # if the restore paths don't exist in the archive. execute_command_with_processes( restore_command, [extract_process], output_log_level=logging.DEBUG, input_file=extract_process.stdout, ) borgmatic-1.7.9/borgmatic/logger.py000066400000000000000000000152661440467744700173320ustar00rootroot00000000000000import logging import logging.handlers import os import sys import colorama def to_bool(arg): ''' Return a boolean value based on `arg`. ''' if arg is None or isinstance(arg, bool): return arg if isinstance(arg, str): arg = arg.lower() if arg in ('yes', 'on', '1', 'true', 1): return True return False def interactive_console(): ''' Return whether the current console is "interactive". Meaning: Capable of user input and not just something like a cron job. ''' return sys.stderr.isatty() and os.environ.get('TERM') != 'dumb' def should_do_markup(no_color, configs): ''' Given the value of the command-line no-color argument, and a dict of configuration filename to corresponding parsed configuration, determine if we should enable colorama marking up. ''' if no_color: return False if any(config.get('output', {}).get('color') is False for config in configs.values()): return False py_colors = os.environ.get('PY_COLORS', None) if py_colors is not None: return to_bool(py_colors) return interactive_console() class Multi_stream_handler(logging.Handler): ''' A logging handler that dispatches each log record to one of multiple stream handlers depending on the record's log level. ''' def __init__(self, log_level_to_stream_handler): super(Multi_stream_handler, self).__init__() self.log_level_to_handler = log_level_to_stream_handler self.handlers = set(self.log_level_to_handler.values()) def flush(self): # pragma: no cover super(Multi_stream_handler, self).flush() for handler in self.handlers: handler.flush() def emit(self, record): ''' Dispatch the log record to the approriate stream handler for the record's log level. ''' self.log_level_to_handler[record.levelno].emit(record) def setFormatter(self, formatter): # pragma: no cover super(Multi_stream_handler, self).setFormatter(formatter) for handler in self.handlers: handler.setFormatter(formatter) def setLevel(self, level): # pragma: no cover super(Multi_stream_handler, self).setLevel(level) for handler in self.handlers: handler.setLevel(level) class Console_color_formatter(logging.Formatter): def format(self, record): add_custom_log_levels() color = { logging.CRITICAL: colorama.Fore.RED, logging.ERROR: colorama.Fore.RED, logging.WARN: colorama.Fore.YELLOW, logging.ANSWER: colorama.Fore.MAGENTA, logging.INFO: colorama.Fore.GREEN, logging.DEBUG: colorama.Fore.CYAN, }.get(record.levelno) return color_text(color, record.msg) def color_text(color, message): ''' Give colored text. ''' if not color: return message return '{}{}{}'.format(color, message, colorama.Style.RESET_ALL) def add_logging_level(level_name, level_number): ''' Globally add a custom logging level based on the given (all uppercase) level name and number. Do this idempotently. Inspired by https://stackoverflow.com/questions/2183233/how-to-add-a-custom-loglevel-to-pythons-logging-facility/35804945#35804945 ''' method_name = level_name.lower() if not hasattr(logging, level_name): logging.addLevelName(level_number, level_name) setattr(logging, level_name, level_number) if not hasattr(logging, method_name): def log_for_level(self, message, *args, **kwargs): # pragma: no cover if self.isEnabledFor(level_number): self._log(level_number, message, args, **kwargs) setattr(logging.getLoggerClass(), method_name, log_for_level) if not hasattr(logging.getLoggerClass(), method_name): def log_to_root(message, *args, **kwargs): # pragma: no cover logging.log(level_number, message, *args, **kwargs) setattr(logging, method_name, log_to_root) ANSWER = logging.WARN - 5 def add_custom_log_levels(): # pragma: no cover ''' Add a custom log level between WARN and INFO for user-requested answers. ''' add_logging_level('ANSWER', ANSWER) def configure_logging( console_log_level, syslog_log_level=None, log_file_log_level=None, monitoring_log_level=None, log_file=None, ): ''' Configure logging to go to both the console and (syslog or log file). Use the given log levels, respectively. Raise FileNotFoundError or PermissionError if the log file could not be opened for writing. ''' if syslog_log_level is None: syslog_log_level = console_log_level if log_file_log_level is None: log_file_log_level = console_log_level if monitoring_log_level is None: monitoring_log_level = console_log_level add_custom_log_levels() # Log certain log levels to console stderr and others to stdout. This supports use cases like # grepping (non-error) output. console_error_handler = logging.StreamHandler(sys.stderr) console_standard_handler = logging.StreamHandler(sys.stdout) console_handler = Multi_stream_handler( { logging.CRITICAL: console_error_handler, logging.ERROR: console_error_handler, logging.WARN: console_error_handler, logging.ANSWER: console_standard_handler, logging.INFO: console_standard_handler, logging.DEBUG: console_standard_handler, } ) console_handler.setFormatter(Console_color_formatter()) console_handler.setLevel(console_log_level) syslog_path = None if log_file is None: if os.path.exists('/dev/log'): syslog_path = '/dev/log' elif os.path.exists('/var/run/syslog'): syslog_path = '/var/run/syslog' elif os.path.exists('/var/run/log'): syslog_path = '/var/run/log' if syslog_path and not interactive_console(): syslog_handler = logging.handlers.SysLogHandler(address=syslog_path) syslog_handler.setFormatter(logging.Formatter('borgmatic: %(levelname)s %(message)s')) syslog_handler.setLevel(syslog_log_level) handlers = (console_handler, syslog_handler) elif log_file: file_handler = logging.handlers.WatchedFileHandler(log_file) file_handler.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)s: %(message)s')) file_handler.setLevel(log_file_log_level) handlers = (console_handler, file_handler) else: handlers = (console_handler,) logging.basicConfig( level=min(console_log_level, syslog_log_level, log_file_log_level, monitoring_log_level), handlers=handlers, ) borgmatic-1.7.9/borgmatic/signals.py000066400000000000000000000020301440467744700174740ustar00rootroot00000000000000import logging import os import signal import sys logger = logging.getLogger(__name__) EXIT_CODE_FROM_SIGNAL = 128 def handle_signal(signal_number, frame): ''' Send the signal to all processes in borgmatic's process group, which includes child processes. ''' # Prevent infinite signal handler recursion. If the parent frame is this very same handler # function, we know we're recursing. if frame.f_back.f_code.co_name == handle_signal.__name__: return os.killpg(os.getpgrp(), signal_number) if signal_number == signal.SIGTERM: logger.critical('Exiting due to TERM signal') sys.exit(EXIT_CODE_FROM_SIGNAL + signal.SIGTERM) def configure_signals(): ''' Configure borgmatic's signal handlers to pass relevant signals through to any child processes like Borg. Note that SIGINT gets passed through even without these changes. ''' for signal_number in (signal.SIGHUP, signal.SIGTERM, signal.SIGUSR1, signal.SIGUSR2): signal.signal(signal_number, handle_signal) borgmatic-1.7.9/borgmatic/verbosity.py000066400000000000000000000010041440467744700200620ustar00rootroot00000000000000import logging import borgmatic.logger VERBOSITY_ERROR = -1 VERBOSITY_ANSWER = 0 VERBOSITY_SOME = 1 VERBOSITY_LOTS = 2 def verbosity_to_log_level(verbosity): ''' Given a borgmatic verbosity value, return the corresponding Python log level. ''' borgmatic.logger.add_custom_log_levels() return { VERBOSITY_ERROR: logging.ERROR, VERBOSITY_ANSWER: logging.ANSWER, VERBOSITY_SOME: logging.INFO, VERBOSITY_LOTS: logging.DEBUG, }.get(verbosity, logging.WARNING) borgmatic-1.7.9/docs/000077500000000000000000000000001440467744700144505ustar00rootroot00000000000000borgmatic-1.7.9/docs/Dockerfile000066400000000000000000000026111440467744700164420ustar00rootroot00000000000000FROM alpine:3.17.1 as borgmatic COPY . /app RUN apk add --no-cache py3-pip py3-ruamel.yaml py3-ruamel.yaml.clib RUN pip install --no-cache /app && generate-borgmatic-config && chmod +r /etc/borgmatic/config.yaml RUN borgmatic --help > /command-line.txt \ && for action in rcreate transfer create prune compact check extract export-tar mount umount restore rlist list rinfo info break-lock borg; do \ echo -e "\n--------------------------------------------------------------------------------\n" >> /command-line.txt \ && borgmatic "$action" --help >> /command-line.txt; done FROM node:19.5.0-alpine as html ARG ENVIRONMENT=production WORKDIR /source RUN npm install @11ty/eleventy \ @11ty/eleventy-plugin-syntaxhighlight \ @11ty/eleventy-plugin-inclusive-language \ @11ty/eleventy-navigation \ markdown-it \ markdown-it-anchor \ markdown-it-replace-link COPY --from=borgmatic /etc/borgmatic/config.yaml /source/docs/_includes/borgmatic/config.yaml COPY --from=borgmatic /command-line.txt /source/docs/_includes/borgmatic/command-line.txt COPY . /source RUN NODE_ENV=${ENVIRONMENT} npx eleventy --input=/source/docs --output=/output/docs \ && mv /output/docs/index.html /output/index.html FROM nginx:1.22.1-alpine COPY --from=html /output /usr/share/nginx/html COPY --from=borgmatic /etc/borgmatic/config.yaml /usr/share/nginx/html/docs/reference/config.yaml borgmatic-1.7.9/docs/README.md000077700000000000000000000000001440467744700174142../README.mdustar00rootroot00000000000000borgmatic-1.7.9/docs/SECURITY.md000066400000000000000000000012411440467744700162370ustar00rootroot00000000000000--- title: Security policy permalink: security-policy/index.html --- ## Supported versions While we want to hear about security vulnerabilities in all versions of borgmatic, security fixes will only be made to the most recently released version. It's not practical for our small volunteer effort to maintain multiple different release branches and put out separate security patches for each. ## Reporting a vulnerability If you find a security vulnerability, please [file a ticket](https://torsion.org/borgmatic/#issues) or [send email directly](mailto:witten@torsion.org) as appropriate. You should expect to hear back within a few days at most, and generally sooner. borgmatic-1.7.9/docs/_data/000077500000000000000000000000001440467744700155205ustar00rootroot00000000000000borgmatic-1.7.9/docs/_data/layout.json000066400000000000000000000000231440467744700177230ustar00rootroot00000000000000"layouts/main.njk" borgmatic-1.7.9/docs/_includes/000077500000000000000000000000001440467744700164155ustar00rootroot00000000000000borgmatic-1.7.9/docs/_includes/asciinema.css000066400000000000000000000000641440467744700210600ustar00rootroot00000000000000.asciicast > iframe { width: 100% !important; } borgmatic-1.7.9/docs/_includes/components/000077500000000000000000000000001440467744700206025ustar00rootroot00000000000000borgmatic-1.7.9/docs/_includes/components/external-links.css000066400000000000000000000007541440467744700242620ustar00rootroot00000000000000/* External links */ a[href^="http://"]:not(.minilink):not(.elv-externalexempt), a[href^="https://"]:not(.minilink):not(.elv-externalexempt), a[href^="//"]:not(.minilink):not(.elv-externalexempt) { text-decoration-color: inherit; } /* External link hovers */ a[href^="http://"]:not(.minilink):not(.elv-externalexempt):hover, a[href^="https://"]:not(.minilink):not(.elv-externalexempt):hover, a[href^="//"]:not(.minilink):not(.elv-externalexempt):hover { text-decoration-color: #00bcd4; } borgmatic-1.7.9/docs/_includes/components/info-blocks.css000066400000000000000000000011741440467744700235250ustar00rootroot00000000000000/* Warning */ .elv-info { line-height: 1.5; padding: 0.8125em 1em 0.75em; /* 13px 16px 12px /16 */ margin-left: -1rem; margin-right: -1rem; margin-bottom: 2em; background-color: #dff7ff; } .elv-info:before { content: "ℹ️ "; } .elv-info-warn { background-color: #ffa; } .elv-info-warn:before { content: "⚠️ "; } .elv-info:first-child { margin-top: 0; } body > .elv-info { margin-left: 0; margin-right: 0; padding: .5rem 1rem; } @media (min-width: 37.5em) and (min-height: 25em) { /* 600px / 400px */ body > .elv-info-sticky { position: sticky; top: 0; z-index: 2; box-shadow: 0 3px 0 0 rgba(0,0,0,.08); } }borgmatic-1.7.9/docs/_includes/components/lists.css000066400000000000000000000120051440467744700224500ustar00rootroot00000000000000/* Buzzwords */ @keyframes rainbow { 0% { background-position: 0% 50%; } 100% { background-position: 100% 50%; } } .buzzword-list, .inlinelist { padding: 0; } .inlinelist:first-child:last-child { margin: 0; } .buzzword, .buzzword-list li, .inlinelist .inlinelist-item { display: inline; -webkit-box-decoration-break: clone; box-decoration-break: clone; font-family: Georgia, serif; font-size: 116%; white-space: normal; line-height: 1.85; padding: .2em .5em; margin: 4px 4px 4px 0; transition: .15s linear outline; } .inlinelist .inlinelist-item.active { background-color: #222; color: #fff; font-weight: inherit; } .inlinelist .inlinelist-item.active :link, .inlinelist .inlinelist-item.active :visited { color: #fff; } .inlinelist .inlinelist-item code { background-color: transparent; font-size: 80%; margin-left: 6px; padding-left: 6px; display: inline-block; position: relative; } @media (max-width: 26.8125em) { /* 429px */ .inlinelist .inlinelist-item { overflow: hidden; } .inlinelist .inlinelist-item code { float: right; line-height: 1.75; } } @media (min-width: 26.875em) { /* 430px */ .inlinelist .inlinelist-item code { float: none; } .inlinelist .inlinelist-item code:before { content: " "; border-left: 1px solid rgba(255,255,255,.8); position: absolute; left: -2px; top: -2px; bottom: 2px; } } a.buzzword { text-decoration: underline; } .buzzword-list a, .inlinelist a { text-decoration: none; } .inlinelist .inlinelist-item { font-size: 100%; line-height: 2; } @supports not(-webkit-box-decoration-break: clone) { .buzzword, .buzzword-list li, .inlinelist .inlinelist-item { display: inline-block; } } .buzzword-list li, .buzzword { background-color: #f7f7f7; } .inlinelist .inlinelist-item { background-color: #e9e9e9; } .inlinelist .inlinelist-item:hover, .inlinelist .inlinelist-item:focus, .buzzword-list li:hover, .buzzword-list li:focus, .buzzword:hover, .buzzword:focus, .rainbow-active:hover, .rainbow-active:focus { position: relative; background-image: linear-gradient(238deg, #ff0000, #ff8000, #ffff00, #80ff00, #00ff00, #00ff80, #00ffff, #0080ff, #0000ff, #8000ff, #ff0080); background-size: 1200% 1200%; background-position: 2% 80%; color: #fff; text-shadow: 0 0 2px rgba(0,0,0,.9); animation: rainbow 4s ease-out alternate infinite; } .rainbow-active-noanim { animation: none !important; } .inlinelist .inlinelist-item:hover a, .inlinelist .inlinelist-item:focus a, .buzzword-list li:hover a, .buzzword-list li:focus a, a.buzzword:hover, a.buzzword:focus, a.rainbow-active:hover, a.rainbow-active:focus { color: #fff; text-decoration: none; } @media (prefers-reduced-motion: reduce) { .inlinelist .inlinelist-item:hover, .inlinelist .inlinelist-item:focus, .buzzword-list li:hover, .buzzword-list li:focus, .buzzword:hover, .buzzword:focus, .rainbow-active:hover, .rainbow-active:focus { animation: none; } } .buzzword-list li:hover:after, .buzzword-list li:focus:after, .buzzword:hover:after, .buzzword:focus:after { font-family: system-ui, -apple-system, sans-serif; content: "Buzzword alert!!!"; position: absolute; left: 0; top: 0; max-width: 8em; color: #f00; font-weight: 700; text-transform: uppercase; transform: rotate(-10deg) translate(-25%, -125%); text-shadow: 1px 1px 5px rgba(0,0,0,.6); line-height: 1.2; pointer-events: none; } main h2 .buzzword, main h3 .buzzword, main p .buzzword { padding: 0px 7px; font-size: 1em; /* 18px /18 */ margin: 0; line-height: 1.444444444444; /* 26px /18 */ font-family: inherit; } main h2 a.buzzword, main h3 a.buzzword, main p a.buzzword { text-decoration: underline; } /* Small viewport */ @media (max-width: 26.8125em) { /* 429px */ .inlinelist .inlinelist-item { display: block; width: auto; padding: 0; line-height: 1.4; } .inlinelist .inlinelist-item > a { display: block; padding: .2em .5em; } } @media (min-width: 26.875em) { /* 430px */ .inlinelist .inlinelist-item > a { display: inline-block; white-space: nowrap; } } .numberflag { display: inline-flex; align-items: center; justify-content: center; background-color: #dff7ff; border-radius: 50%; width: 1.75em; height: 1.75em; font-weight: 600; } h1 .numberflag, h2 .numberflag, h3 .numberflag, h4 .numberflag, h5 .numberflag { width: 1.25em; height: 1.25em; } h2 .numberflag { position: relative; margin-right: 0.25em; /* 10px /40 */ } h2 .numberflag:after { content: " "; position: absolute; bottom: -1px; left: 0; height: 1px; background-color: #fff; width: calc(100% + 0.4em); /* 16px /40 */ } /* Super featured list on home page */ .list-superfeatured .avatar { width: calc(30px + 5vw); height: calc(30px + 5vw); max-width: 60px; max-height: 60px; margin-left: 0; } @media (max-width: 26.8125em) { /* 429px */ .list-superfeatured .inlinelist-item > a { white-space: nowrap; overflow: hidden; text-overflow: ellipsis; } } @media (min-width: 26.875em) { /* 430px */ .list-superfeatured .inlinelist-item { font-size: 110%; } } /* Only top level */ .inlinelist-no-nest ul, .inlinelist-no-nest ol { display: none; } borgmatic-1.7.9/docs/_includes/components/minilink.css000066400000000000000000000033741440467744700231350ustar00rootroot00000000000000/* Mini link */ .minilink { display: inline-block; padding: .125em .375em; text-transform: uppercase; font-size: 0.875rem; /* 14px /16 */ text-decoration: none; background-color: #ddd; border-radius: 0.1875em; /* 3px /16 */ font-weight: 500; margin: 0 0.4285714285714em 0.07142857142857em 0; /* 0 6px 1px 0 /14 */ line-height: 1.285714285714; /* 18px /14 */ font-family: system-ui, -apple-system, sans-serif; } table .minilink { margin-top: 6px; } .minilink[href] { box-shadow: 0 1px 1px 0 rgba(0,0,0,.5); } .minilink[href]:hover, .minilink[href]:focus { background-color: #bbb; } pre + .minilink { color: #fff; border-radius: 0 0 0.2857142857143em 0.2857142857143em; /* 4px /14 */ float: right; background-color: #444; color: #fff; } pre[class*=language-] + .minilink { position: relative; top: -0.7142857142857em; /* -10px /14 */ } p.minilink { float: right; margin-left: 2em; margin-bottom: 2em; } h1 .minilink, h2 .minilink, h3 .minilink, h4 .minilink { font-size: 0.9375rem; /* 15px /16 */ vertical-align: middle; margin-left: 1em; } h3 .minilink, h4 .minilink { font-size: 0.8125rem; /* 13px /16 */ } .minilink + pre[class*=language-] { clear: both; } .minilink-addedin { text-transform: none; box-shadow: 0 0 0 1px rgba(0,0,0,0.3); } .minilink-addedin:not(:first-child) { margin-left: .5em; } .minilink-addedin.minilink-inline { margin: 0 4px; background-color: #fff; } .minilink-lower { text-transform: none; background-color: transparent; } .minilink-lower[href] { box-shadow: 0 0 0 1px rgba(0,0,0,0.5); } .minilink-lower[href]:hover, .minilink-lower[href]:focus { background-color: #eee; } .minilink > .minilink { margin: -.125em .375em -.125em -.375em; box-shadow: none; border-top-right-radius: 0; border-bottom-right-radius: 0; } borgmatic-1.7.9/docs/_includes/components/suggestion-link.html000066400000000000000000000003461440467744700246150ustar00rootroot00000000000000

Improve this documentation

Have an idea on how to make this documentation even better? Use our issue tracker to send your feedback!

borgmatic-1.7.9/docs/_includes/components/toc.css000066400000000000000000000035051440467744700221040ustar00rootroot00000000000000.elv-toc { font-size: 1rem; /* Reset */ } .elv-toc details { --details-force-closed: (max-width: 63.9375em); /* 1023px */ } .elv-toc details > summary { font-size: 1.375rem; /* 22px /16 */ margin-bottom: .5em; } @media (min-width: 64em) { /* 1024px */ .elv-toc { position: absolute; left: 3rem; width: 16rem; z-index: 1; } .elv-toc details > summary { margin-top: 0; } .js .elv-toc details > summary { display: none; } } .elv-toc-list { display: flex; flex-wrap: wrap; justify-content: space-between; padding-left: 0; padding-right: 0; margin: 0 0 2.5em; list-style: none; } .elv-toc-list li { font-size: 0.9375em; /* 15px /16 */ line-height: 1.466666666667; /* 22px /15 */ } /* Nested lists */ .elv-toc-list ul { padding: 0 0 .75em 0; margin: 0; list-style: none; } /* Menus nested 2 or more deep */ .elv-toc-list ul ul { padding-bottom: 0; padding-left: 0.625rem; /* 10px /16 */ } /* Hide inactive menus 3 or more deep */ .elv-toc-list ul ul > li:not(.elv-toc-active) > ul > li:not(.elv-toc-active) { display: none; } /* List items */ .elv-toc summary, .elv-toc-list a { padding: .15em .25em; } .elv-toc-list a { display: block; } .elv-toc-list a:not(:hover) { text-decoration: none; } .elv-toc-list li { margin: 0; padding: 0; } .elv-toc-list > li { flex-grow: 1; flex-basis: 14.375rem; /* 230px /16 */ } /* Top level links */ .elv-toc-list > li > a { color: #222; font-weight: 600; border-bottom: 1px solid #ddd; margin-bottom: 0.25em; /* 4px /16 */ } /* Active links */ .elv-toc-list li.elv-toc-active > a { background-color: #dff7ff; } .elv-toc-list ul .elv-toc-active > a:after { content: ""; } /* Show only active nested lists */ .elv-toc-list ul.elv-toc-active, .elv-toc-list li.elv-toc-active > ul { display: block; } /* Footer catgory navigation */ .elv-cat-list-active { font-weight: 600; } borgmatic-1.7.9/docs/_includes/header.njk000066400000000000000000000004111440467744700203450ustar00rootroot00000000000000
{% if page.url != '/' %}

borgmatic

{% endif %}

{{ title | safe }}

borgmatic-1.7.9/docs/_includes/index.css000066400000000000000000000622671440467744700202530ustar00rootroot00000000000000@font-face { font-family: BenchNine; src: url("data:font/woff2;charset=utf-8;base64,d09GMgABAAAAADFYABEAAAAAX2gAADD4AADrxwAAAAAAAAAAAAAAAAAAAAAAAAAAGh4bj2AcIAZWAEQILgmSYhEICoGQSIGBDwE2AiQDgwgLgUYABCAFOAcgDIEGG75VFezYC+A8QAq6uT9B9v+3BE1iKKT2IG91MwzFYrVtDyfavY9ii6qSIJybn7qqPfVk4Jv4IPPDqz8vFV7HmV9WXLRjVL2OAjH0oMfYZod2qMIF73BHXHv4/Ifftah4dMb/iIGvGyHJrM+/P9V7H/zP8jeJLYv8BWiW7SR6IVbskBymqWtgzVjAtacj0Zazd+vp3NO5w94M8HPr36JeLfK9tyi2UQvGNgYMYWMMcPRwRA+QkYKkioR4YGEUFkZdGFj9lbvWS734XkRYl/Dw/X07f2+lVbAEmjDQbTdqQoroJxC+7o868/ValnqbIclHIMcB+ohbYIl/N7mjLDv2IYDhYJhKLJl4wDepkfVmxuhZlZp298zsLCLJC1J+J0qOAaR9T5YLcgQVXlcoemjbv6ifY4f5g28eysziQmieNyjHNp5nrNibQZPNkF07pqVYu/Y/ABlN+P9XV27CPZACz/kBsgO0gJrlanZCUlAeqtOUOeqqKy+bNWzJt0YvmdyXrAdVlxMGurtj5p2hWY112P/v175KT2//rqOS4WIDPnM2JKqERIp41f/fpp/tfc/6/vLyLGoDVBH3XxvGMnVOTj9z3xt6b1gyjS1b1nzUJ3u0JHmd8+ePfVYLCFWAKrK1QBwgqPZ3CXfp0m6XMn2blOmIiq7LwzYShGFUDPe+imPMpII1pV5Lqa6ioiJ0ZxB2k6v/TwECgksCksiIbtU+yO33VMUhL1f+AIA+d93tAEoBgBn7e5y62gEmtXYO4skgeMqJgKrkFEOynt7+/0LUi+sZ1r3+XL58KQFt71M8Bv3+E/L56De+l8P3HaXzZza/tiPwR/OxefD5NB57wixv2OH9NS9ceRsPnZk3QxQ2P8sO5Lwep99bsOE4PMpQgaz+afvKp+75zbs+i/szOO+yya+O6M8cKp/lMN9QVaRZSKZe0U0ONkWYdPTvHLFPgG/NVPFldejfNI3/4K/6OPyhgvmqRXw9O5ob23PMjLPkDxHfUbI55aMMDXPUPNg2zh+iHhwDD++jZvfHR/8nGvKF3791Gra1j7In/ONQrYwf+VmtCybZQeiyh/+XdvFAIaw+Kv4S78j5m8yfjwVxaLimiaULz/TsSPzVpRAnuPLyTfftxcSUo6YVQc8gRiwzCysbhwSJPDL4ZPLLV6BIQFCJUmVIypxIAEECFH22m8VWHC7xpL8PkvDQE6RvYmOEi5cvm1ZmCHdLErenYXAEcm0jpm9CmaFWSKoKWV2haClUfYVmSaFbVRjWFKZ1hWWggDYUyKYCO6AgDimowwpHhsKVqfDkK3yFilBAEQsqEiUKplTBlSmEWle0t7+hPLlxee3M1/lVNJiYDR+D2Dy+d3fuLfodyNu/BkBX+vzX1w0oKT+M8WXaASTFfD/GRyBoBonanvEla4WqbztIxKW9/G6U2BxQvEnFt/W3mXT59/G3mLi4kTEN0O/iZ/h/Sj0Rv1VDxV/xcH5QCoMZUCzTGkJj8a6PSj9Q+WeorHL5shj1qY6Jjuhgl8F7THm5HVN/q0Is+oXot8K7GZoqv21Zney0k8UO+EzXYC1Isag7ENmIAd+axJ/wGgFtCOXtWGj2A9o8lnlXVRGWGgmrrNK46A+vxhmdp41Fld9kKhc/v5+J0m9H5HMh9V07iMFiOmo+h0A4zvk0GrUp4JjHrinqxKbDVeHwRQeFJ3mSyzsa0BG4oAOLPIJ5mNb3/dbiCCtWy5M2cqWCLmCEAyVspniAEWsE6bf2ulu7jaLbGda57gBCU3jcpdCGwLxK+O/IA/E7a1zoREndb4uEcqUSMCmbgOAGbBEPBeODX0MJ8w5YPW7EldSEexHqxWt4Q/w63DZoSO+HVlBxfitmU86iMjfj1XVDtZq9nx7xuIUPZ0u8Hqrgc1og07YOCa46qjL5Gh8F7cNfwgyDeSVYDxMTFRhun28WrxU8euEeRAI1Z0Qg+x5cKoJHJfCpDAFVIKQqRFQDQHWA1ABEzXcHbv/QW/MSQiRCkQCjEsRUhoQqkFIVMqpBTnXg1ACAFanbuwQlEkU9g0VuGq3bC6bFZdOYWt5x3F9CTRmUgtKipgJpS3KsWztrcKwXwhfC9fGr8nwCt6ksKluvBZALwuZ/ncTQh42yoYYZjNXFQCBefnnYPoAAOLRssyVW4fOaOrSorclHAOVhqmmgubFY02yZghzdHriFIkT0SI1h2+hGCt/Jb7dsy9ohNN81lbR19TmR9tQ2yivWsjhWfjHJXZMW84BX6FTU1E2hzFPE4qZv3HKzF/bF6Tml2xfpHB69N+FSuULZbpsJRV4PkyoCeBYRTGNDsh2LtghbRyJ5omajWhclKz3paGed4XLTaGrH2AwmFcKIbQuRFNbU3qKHaS7ImXTGA54uVjmrIrNGrOmKYms3qBMZscXZo77es+Gal7SJrFHFsr92kK5QzEHxFBotWuWcSVdGt5ad7Su546MbmUc2dBldmsIsRyzeoYSMazUZLZYpGxIPljCKRuiRzUHYTY9NKavgxdHy5bL+oaxQ5rdv2ALYcoKog9HXXmmoL85elD1r96JAFyENsvbd260OVo+7HTzNOP6yg6ym5ARVOLkdhZyiOn0NdCIXHdBxjisPWMxAsaFsyIUBWbMvDlTh0nYUcpmqv26TJBkuWzctStzZJdHTZJ2eIev0LHHQEbn48B6guC337n0kbtk0rhTB5OKLNusq+xqsIqd0PxnQA2RAD5IEeohcRualc4a8f94knIYwpE+9xEkziJP6yAbNJE6aOKjSlCihjLppAUmkhfgERduRABKgJDGPvKtXozQqM1ZrahVslZ74xkWIsjfhTFzQV06soqIPZiQeKn0TJVQVTFBdMMmaHG+hdrkZ6gpmCBXMUF8wQ8NyCzQWLLCwYIGmgiUIxyRpZsMMRZvfj8r7jQUIcG4uQZZ1v5Png3FgC6d2E6QBRUDpEImlJD+84xLQgAB4aOLVkbT8xhxGmV/zhxeVvmCyjGa90PwPGhEOlzOPyqA7eDAMwQUirlRpNUtRmAuJhPbkKLeOLWYp2REIDBOwkcOxRXJKISVbjCA2tihGn2viEWqxOYKDSflarhW2KExig5YwSri4ks01w1rQJEO1oAVCsqNgWALHwWmxPKke0kJafVw0bo6Pljs5CWpU4eonFHyQUhUUZTykahG0uiXU1PMunvFwVishTcoM2ZejtpzC/c3H0HSS4NZ4jDRBRQChs/WLy6eqxgXvSNmzBze6tOwivM8yW4Hmf56n4BdUOSDgJUSPlGFv8tD3lzLG4VWi8KIGwsWUfPVc1JsuUUHgRWoD5FIgQm+EvIsP9QDgY7Idi1KfFVJ23WzKFNZ3M5/B8T8HH7Vwm6BU309566hlXSl7BGIYNU1fE0QIwddPjF4xO8J3Ie7dxvRV1s2mm8eHl1V9bWyqq8t0YKjh4mUJp76ws6LK51n0/dPrZ0Fsdrn6wfljNNGObAQ7LQ5rQTn7kaUzwOnbtv3ubpLy1TtBHLspOgkIlgJZ5b2APiLPE23eoevTjLaYSdSDeFbxHjIQ45gRVWG/UBt8cFyQgw+TlDLBh6N72+iJYbM7m8GIhQehYKeWR7oCAL7iYieuO59jo7rJWg2GDWEROmofgWWoiaWr8bJ+dv7ilB6fOz8htbBPcyQoeVyJ/TjetxyCix/AxwM/mESoy3j/GgU0wftZEJhr3fC60B7hqjw1CI3G7jcoDFHhYvZ39GteqwXk3nUCQeCogqrEGTyFwiIcuEOdXEOjMLH/wrhipx2JOomIjquAVVKZrstNnaO11Uf9h1slcuqmaDVaWkZGVshl6C+G92F9ursdF6y9XYMpaKRVBa3VuCLab7ViNHf9bB59yKgJG4LFJNFiF8VBQ6QWzNPv9COG1i5tUBAecXHX0QPUzNIHzLRvBFBwfCu23qChiyr1DD66w0yq/N3M3t1R//5uF8131H+YQ+c4htjh8Z74L5h9uVnaTs6dH7tZPabX/Sij467pq2Elp+6Yctld7JLap/f/3Nlpw+v6nEd+MV9ctDV2/wLtQpyVxYShnQ22pxzUrR3y0Nbe79rRTuP+ueWUqUlJ3skl+MAm8aeOSs/4tsVrf2R/cLvslOxHgiAWwJsrRM23Vzs8sNfbi5a/dRWGGae94FMq4+A13XiXWq1pebjZG7WYITNx0GiJGjp0TW7TajyiriHL1Qo0LcMADa6HKCbrRAMIYd1NwpH9P1CYXfcAabmtUDYMgBm9XbXbmx9SLAuPiA6QihxTyDMEIQeTxr7n584UVhYNPIZ7pOTYdIjcG4Q4eXihfaIw0+Vs4xhLkzvYkJ/ggWVFxR8IZmz4RKFMKIVBmtraRw8vI25l9BHM0T1tR22xykJavjbpyuss8NjuOw/o3ZoPoIJiDZ5NzPDuu1Hx0EaJOFoq4psJTrHp9hvXYA6sxTUT4vd2I3bVNsiH7QRKg9YxDkpnlrltbWNdfU1BAHDAfCohBHs77mMI8FrSVln8raK7kad/v9BlZh4unBdNHViTKRkd4ULFBKNXUJ+vu7uoSfyHE41vFYcJhq4xNLJus63K8vR84ZXOSLudibesK4fyyMKtYbfe8ZKlo0V/B3C4lQR4ZlVdm2MR7Y5+nmH+SzbfnxqOg5f0JFQqbqXXEtixMTsvopXpKHz86dvPsrI4MbNYF/XOnWF96+3a6e21g5MzoNwnjejWOlTSQ72xj7P9PhMfMNv21Bjh4lW0Idd8tZV9MbsdtrD1e6leOxPPW9gg2jKn+YVu06mqp7uLNuSVaCGxwCKov/doN7XIg54wB04fVx/vemQHFfderGn2TSjgllx28MRTiZRlVDPG+ROT8X5cuyGjjXdlck7XCOKcodkKGveUTHBJ9sbcMOJJ79dQQ14Gjlo5jVuNC4qjsLRo4+1FjYPDsNCMuZaglIwgkqoJFRrwiNeoVDe91aHjtrtU5+1Zm1MWSle3O8lOTLXaK2QK5iTFSirhhDGNsTkdGc3mKJbkS7z2FperIF/bfF/jDIUigb0b9+xI5Q6D3LiWHK0WutxPErrc8yCpKmur/i4YYCWiVd1g0maww/B0cWbE/uAI8XXddzfOX1VZrCKC0GNwc9V9iQmWIsdOjvewDoz8B9G3GDean4MugfC6MTVUd4t1tKCT6PAVE0aEeGFEFyDGSXJfC4oNWXNR6ADf4dq7/lNPrwDKXn8k2hozARJvfRjl3M6iric+Or37ykGeM0w55ThmQj1uGaIfjH3O+mfQvdfEBxaZmqqTnh7hjc3Mr0wEOwTeob2BdXQvIdzZtoexteOwZbaaQI7ZdRxnKIDzPUsk96j3zvpDIQuJWvTrWC2kGhZ3aUTKxtMXOrRxYCVbXHZVv1OZCFdI00pHuNf5Gwr21u7Gv/UpRKpN0vFStx0s1u8Fa/oK+GERb7uJNFSQxcReA2JrAqacvd7h4OiM3BDHiv6jET7fo5yLxpceR1JLAhjo+Gsmd2blqEH3thW7L0RBCaawLBpuoqGvY7gmZwtUj6vMFn02cYXHLOHqLrvxFs4/PcFAOun1tCCB8Bn58S6IE5iXMjd7HFH5SiarqGKSZYDMFTGhfKBHSOSNiSbqUKoT2hoSjpDoXWuzGhXSI2KgnpjpdyjycaT63KXv6WbcasdObJycG5hkugAZ6667/S6Ix1vG5BHfS+7IYZbycfxLQmliRgfSjqtsKw0UxBh4xkY8tJVl5RgLzZkC33++tRfXbuCBsSCmBfAeHBFHa1HIbit8j+HE0q1m1r9gatLoIHQ06dOkvSj6vedazLhoO352FQt97j7e9ipv26aLwwM7aZRHAtpYZHY2aTGOo/rMsTQ61Mz/tXmp9Pcr/xtjhHUqjliFzMqnrHjxOwJyJnm2jyX9EuqFPiNg8pl+cibfP/KPgOyN4AA+/iVYB94jWFoO1cv7gk/x6jgs4iswBM7isJZD9vJj10xTi6UR7zolcao4UZa0xS2J08aJC2Pm0M20HIgXMzcRO8dDsy3UKXSJzgOhH0MPpgzEgSdr1ch8kNeJiYvv5/TlE0TLxA6Jl/KMryPH7wl9G3c53tG6yVZbQvYgbHcP4+7A+RJUk1CIg/0dk0OMO+3Zu+4nBORd3uOHPltFXZbGFqFpg4dY1YO7eZrEHAFdk1PZBN0ssr088tehqU9XCJa40eBoJuO4gCFFaatBLx7JWMMHpchDWRyDD5ZF65avmdSNRMfUweQa+KVGd5WWnX/1ldJzZaUl5155tfT8D1ELWidXF/5ekVIV/x/w5kfc/KGYhmrHkcUrui77hD5e976Vr5x8aXtdzDqCRaXlwrvJj0U9uDsUAhbV5q/JfsiW07+lyUdl9u/k6FZFUmyKYisq47/ZFDV3niYGQWP7Q3f6ykzcC7z9rGX6g2dfIe9XNrcbPMlDW0mUBin7h3SgcmtHElY5EDDOFbf/w6cY7BOodE1mYKH/oqoORFWF5jbDZNb3hM98BOMZTN99NL+VedTiI75nyYYngcarzjRfYKoNxm+/M5jUpzeHVRny/Un2YXVzz6veejuqsu4/yaPDojEuLyJ7mSNPc8n+wCzCG/lInqPct83AbmXk6yVTgthYAWlPaAtTMCuKGDgZM751cjXMg/nC2d6Xin4wzMkXodxfpW+gC5ln2HO4Cv+PfYa5EH1R+is3k/Op9EW0jzXD/i/576Y59gyrD31D+inH3J1kIfOjeYpqcpmkOpKgxwqdm5OJbjx1W0C22lg0I+niI8/LrfJZ5DkszmrF5P24dSoOX0XETVkJrOI2CyfRyOiRR25kFYgxyOj5x25IdnVDsYeebE+nHHOTHUnpjMABZ7WupNF1QMs4Psp7S7XNssiRnhGVzo5MqlCUVyXvjoDOfh3zPuYpUN7M5ZMKjDWGboZHSReZAB3lwTV1wAPnpVCpzA3llJ5zXfty9/jKy8vTddnsoSuFKjnXz9WunRvXeeMWoMFIl0fXOd+tXupszmzNW5o5mjrDqyazOEP0HF6c2RTLRydvBgZ41HQyxCDH5ZvKLv8NdJeMBQAt94SwRSLVHKZ8P5LXlt+eazwqoDLJNDeDuSZQaF2T3hJZXGLc1Yw9b6kXNV7bGbZP1RQu0t5MBBZcn00Kr0pYVOYffyk/cwu5UhFEo2s66VtZX211RdRGuLaOxiTMS4gZ/cwVYZxE912PORNjYELOQTrFDSzZffLKuHR56fSzZlq2oCTRozdG/hS9cx1qRheRmVR+B5Iv04AXBaTCKPiQOI68Omvx1MaJx593qo8mN1ekNUnyF57Ye6z6tZQx3YWGlQwq3QcIT24LJhRklHns53dii2JthQPejGHlgcZ21cGEUDCpSug6eSBzwRHTsGrVkhSwGi9I0qWWRff/6r7qvAb+X8xkMA/1R6GzuLEjBp+QWEAa6wWCkcZXKt9JvIZtcz/gfT1SX0f2lMX6YL/KkRpbLd5Qudz9/IWC4qujGnMeP9tmLZavLq82Hnw4ZN8/FlZPO6sCSQsE1mvZ2d1TTl7eicU3iqVXhPheNx6/HXVst334/yhaDqVmQ1HhUqu7PPOZF+wLbGnNawZct9e2Kg86mopTq7DkrrUrspOZ+uycrU913cp2xx/dKH8mIdXkTlv39JqEbb3LidTbqUTJ70cPnnpzqTF9Pg6Fb+QSmb7XHRyiSzeShf2tt6YrnyfPftElDOP+1wEejWxgxKgDPtFDkUVE/3yJqvIZlG3gJbDk+HLhG1w+8I94gjfxCaL+2HvM3orLWfO4BrmvADkpj5SfQPYhJyQ5iewVfSAnOfnxN13ewJbwHQwYS9Q1a9dYr/I+FfULnvoY0QBHrhmqFGAqT8QGf1AXh/+giSxMOEeyR/imhs9VVv5fjjK075fXg6u0Krfbm2EeDEyTw83d/sXh7ORXaAIfqB3UTFrS7FWSoHNeUD9OCvzKmJ4fa228gULCf4QH5B3yA8Lw2Z+vGt/cvevB6NTnefrn1hgfh95aP3W7eOnI7YKJdWWvGb5b9fPgj0F3bi0tEM+glSnCnW2hkQPf37+765uhC8kfpTBhaLRGy+SYMGqJ1qNGR8d4+eDKYJ/8pYxjrf0HPU0NR/I6xv2X1K93diheyL7UNXYqf1HvYBJAEnxfjHa379XXfbGg6fDeXU3PFFeWnowqPzkXkc6WcREKKowduvnaqSHkXizFBFGl3MOSJu1/EczfGfbIfxJm4LbLtxk8Oo9updDJFBqjiEIn813r0TekgPbK3kDbZesulWRN0HcsK3nQbz5aUdVnyDuwdaikZvNQZ/FGz15U8r0AefaII621cklgWfYwKtoucP2ExvtZpeqfUtoipsQfT/7PyPft6Ao2ibMqrwBxahFtAGWQZnvOeXimNIEnPXIAe2fLdtE7miXzPH+YfXWkEGkxqQ54caV3emlOUbnhwdf3jI8Lyt0rYFG86Llp5IYU0NnYdaSkZNhbu9csenmS+750p615OqgqBF0x65DrBKCz8RsIytxcy8tT3aIFsGoLq5bCx4w5RoxHDYGvNLC+PtnriLXr7j+wVixlhfFLAvAv5BpWtpr2J7WDFUP0358xlmGO7jqJ2AmBdAOnY1FPZWwWVKibS+7QbRPfXzUp/cS4PastO3mhupjljKoT3gZCdsZSlEnyGsbv8W3zBTnppgHZO1vWCl/XDCbMf1ZvfEHeAa6sj2V1sg6t4BUSnyIInrJSHp8/ic7IAZ0qn0E3ynUIWG2UGMpyFl92E9mMNKqwFGtLjyAkhCJBYSgKotevwwZBih1c5svvnFMIlrITUYwobw5vni/q2w0GKb05B6M5h7nR7+0z3Ld3hZPzQdk6zA7oODu2CbdvfrCVh20RitGq3xYuxFNIq2pRMbKJsDI6u+IJ3bqJtegfapdFKBW5huVj1xHlgMRCI98z0TunP7XMBzefpng8VPDgyfVv7et0kB+a6GasXSIkPaDQlUoKVI8M4WmSvZCASdOQqGTKf3Zx1CQyKwe0w4ZPCb8WWteLEpndNqIPswE6ST6DtIILa8Wb33dIydRZ+SwacXsyI6tK9eSXbxQ/ZdZkZmTXKX754ifVk8wa756iLk1lle2oFPhjC++TqP3OcDAlJPEUdGkrKx2HpP+cuUyQbEcrK7o0MeuweEAnyU4hOqObVijclpihqRVtqOjXHU+qrUoIspMmId6JeEy9uT8yebPACzblL+D1EmnuJ7oGUODlWWSlWdlpQBa1MXJBbdwh4u+Zc0pg82Bd1WL1ocXqUIXzoJJ0/i+7qWkyiGVr9hFjkLr4bm0Wl0HkyjJzApH//HZT9yi3NCszt0L/+Kuzgi/Mg+4Y+zpkRgpoj/wGkj+vBKmMTSjQrqufL2yPTte71HSVeTY2XdxDLqbkSOJ4OwEqB2ZAp2oMR56eItZDIq90BiFbtSNH/xgpa2yphQTHiHis58TVponI8tejpGWpt6g7kOcLuaOWF7w88teFS9/siU8vKzfYXxnwNvmMQ8VJ6lfk7IlPEqnwLdlZDDoBjtbw8ogZ1IbMEHnYMfQ5CDsrv4loPkHtOcwi6WZ7iWKQd2qxwQH/NFH3Inux2s/NUbbtplLOqSm1fAbZwhxrEBi6eWcVy+zB0UJpFkPjH7nGPkxwGVGczTiYCt4Vg2NiVioxwNbzMPQhcXyjkrOR5YaKBRgvkrMCts2j/3GOufh0Sircumkt848XGZRXUyLgTk4kYoSEApa7c8lGlocVOsziHxnq1rkgAsl/Mvzz00EmAd0ktENtaxappH0Iwc76Jv/JR/O5cuQMwd989PAzFsygR87Kn0MXj+WiCs4W6d218auswUp4kKNEUlDPyAkrpm1FlMtL1vzGH9M6QA5Dgxji76b/yYmNhPnP6zA6Sk8mUZi1HZxi7HVEv36ZFAb0/1hSB25ttWKjsCqohEfwxEo77i65A7nR849JNDLOWoOo0SOPKAwJUzY8hMzKl1vxEUhlV0IjeFKrE0+AfkUBfZNJuHuqpDJuN39x0zv/lVblnYfgncY+RCHLFd45/9PRwqXo0sIvj758hTAMDOK9hEnag9/EeghznEVOaDfCjSkBByIFC6EjCqEbESrZWhkUah1aUm4fL/SGYouRVDjD1mJsTom3p/Lcaz5rwhiFjFSsdrC+Z16FwVup3tcdbRUqefcxdRtOVlkoeHl9pW0oI3Bh2M/inWvB6wfqxrtnwH3O1wRkKrtOUodKBSmARpTvQx3Q+8r32KCIyCjJiDcVxHs79W9Ojkfdzupa4CgY7xs9Pe9/CYnZt/3Z96+tBrVH3XiMJVfRwttVO6i6bG/NT8sRml2ZIvMAtZm+JO2mQjbwrXQ+b22oibsBy/m2X2WYZ15HD3Fna2KLQcYcHXgRvyZMoiLfJGPLsGRACz0SmegKviNNnYAvbSsLsq55zrnkJzQ53G0duxTHE6JGj7T/1tG3rteUIyhKc7Zqrw+PaK47WvNTs/ix5M8zsAomBnyWcTBddad0tSkCLRDHdzi/ImMMWbMlbZD2VFzCFO76R8mIh09iLpM4nqEZkT7tCmYrHN5zIWWa1MyaZ4su6S2hU9aJotySfdAEU/mPC6/oeWlZamd9OHvMlZS+ZGEb4wtGKBTIS8uITOti/UoU6hrCTcvGg/kVgeXxLKhI+WxBaTqYMG3UQCbMn7svzIIW/G5Z6tu16eoG/PLGHpBFomzeDBYwhqLSCHPbSyPVV/aseW6Nd/8H1zWBppZtKxo63LGNGUk1ujIkOIrcQt3fi0OY7S82jWKT+8M+u8UX6ayVzQwh8lvwMHJart4ZORaflH9ohJCfhuNXYp6wG1skcYc9Ekb6Ksw+YsP6MNuIHfOrcqFPcJdVotel1Jx2Yx2Y+2QJf1CRnvGnliJ8n+oLWjKKuQDtSsY6seQAmw6Uk9aNc7aIRlkizuSNATsM6t/6uJIjg4ZEc1NHnVQ63XbkwsU6KcmF1Y0HKyvdCRZp/deXHTOPQHcVtcqXIQ32HPepaBolJu7O2mHLNT2FzpBUNlcn6DeOaUqF2xeK0rFuLN0wpW8zWd1rdzAVd6HFeDqxJ7In1mAPiMaC/+4+IS+MwLfkI8mSaJTr5w/iNCrGis95sFbjyNOnVCmODUrkpxF5/NnJuC5qpSWlYN+AXn4LHqxZlrpT4+fu7u/h7tD6d6U8p/Vzp3r7Obu0OS0H3BFpYpY1RxEW7HtZMK0KW/1Mt4iWSksTMa1+ZVgw/bJgnyIcl8NKE8uXB8XeqxliHAO9VCh1Dw2nUgzEot5wuiFT7cmxdEmPNXNWP+m2lvolPiGVjmzzSnbfHew7k/6qM9l3Z+Zs5q01nKA4HvgGodNomK/Yb/2TNRH92vzOEps1va2DWGygUHG6aMsxDBe73lMcgqG7SRm3zp7y3k5ypr58rm/NCHRPUZHD10FX60TjchpVLgsXNdzk4Ji0XjII6r7qsJ1ruKW4BxUoQe9oGpjYVbaw0pmY63QkZ+TaPx22+SRlyT0jO8JDZfWu3oycTJPVHZfg8n9z+6tEv6whdflTG+xNJMOzjwwebCBAEkApmNluwVIggjumFddjtpUEPIxZ7GZsmJGBUBNR/+xrQh8RkRwpq/CCkam8tyOLJBMVWfJbSC98S17fZ6mPT/RNNzui4iLknxvDLgxHSTmidTHMaElDZ2iQL4E9UFEzxtOyl2BrOi9hdFbNpaXSRWw1Mx482Aq7Md23RD/zYDeXqeeGJIPlbwEGKrPynVa8TLys4i6TSmYseKcNz+FoESNjtmeCCBNLgmEGXVQaXk2kJjFxyHB+v56HXXnFCRHguNgSAvRUmq6aosA7WRjiODbhA1nOVz7ToQRrArfUQZF0hqGR3TMBR388IFZlezn8KthjkjG17AGU5x85mUSiQg3HXVQkwNWzZJBmr6DLMnBfCRa82h+RLSF7+JX19N1wF37zm7XfKtOjwQYCWssmhpXbPh39Lm050oghevw8A/ocx5X/CTWcfGDbbmApvnKvhWNZjy3p3gns51ZECP9V4fjnUGGO5zjp/yuBdlkgtQaIcuigGrOsltRBB95QgA9Vh69Z7ZNb4a+UgCVTfgVHPcfyxY2j76T6d5dl+Nj+uJZJyQBQ7vifkhwDUqnYXMP+SushsVlaR2oX9gCbVYeuW5O1/MyKRNcLHEFMJpc9/F5RTOxo6sQ8+9D8/maONCJVyD4Fn1ZFNOiLze36lp6fS8Y7lTupoL41emUnkDPd0Ina+uWrr6Jdev0YdMf/W+NL6+538vDI+eD9zX9jEr+8h3/F5KmfjL6alKZkM8yuhwKJs4pAAJMzCBYUTUvTF3xsphIhkZx284mcrRCwK/3eTDPAEw5y0sIN4GwkO5ipSRHZVFhE3LIRcaLjI915noWyih8Jajr5ox99NXWb4XHptwLEg0QrDZ0IwWXTyMznWuQ+2vGpOVWY4eGG3M59GqROj7E4D/7jJ969NzFxYL8Zbt3dXJ4f2zKPXVPmacIwCL2ga4pjH/dOOi3zcQNK/6ngWK5Mmo6k+YR0eWqFzGX3iEpo7Fh03w0FM74DeUUzDt+tnLZl7LssYSQK3IPj702/P/kjG301HY9w8MDtpOzlRb6AvK5uVcyI6A4opc4Da+3r8alMnOUeaZ/q6Vl90BvYpcl+7zxzDgYv+XefuPdjE69d8hxEIvZsXr5cmaM3T22dj3xkJMpAZtmYZJQQhZArTdxSRGX6gXBsyp0U2jKZ+UC86vjbwJvRsOgds3ouiBgOksZiAl2X66lWnHV7AVM4jir6kj8Xk6+98GZ+l+TsxdvqEqqLD122bf+HwTwhaAxbL4zrF3VnjYtPoClvvyNuXD13f/v+kUNbG1fn+zr1TZkllPghRnw39nuTJYQ8JxKsJIoqsVmIdcQAiZNoAS0K3gHI4UlNDilCGEBOdxtnAeJgOllK4f7kD3N0NuEzUl/EGVAgZaBD2l8UVDmg2FoHMTYaMe9TDpSJtsIgDLdVCLgH6UaYsbtKh8Gn+z0cwlXH0CyOIGDzEqjrhkEWxQtCXrhkF7i43EbXxsq/uojotkuesMDDEpfQ5xJXmCBPTgUR5u7BDGctQuIg0YjLnOHQcwt67NqBfHXbvJL+ACDCcXmjlCf2Zku5R4JlSDjsHbI3DzdFCHT5NLbDZxtpGhoQ8yIeM9f2SAyMo90Qb2++mIPSlTB9R2TTdjEoWhoe24vrK0BBEHchdQc5S3uUsAtmOK3k0VNSlKPUrGBgZSL5jRRY6uVp4GFVrOpg/8sFVXACgm5OHXK6uCKgFL4kscICNXZDmUIRl28yQvwu7QdEc9p9H5zWvNxGzCWyRMlYLw4TBYKUSGvzE9G1RZ5QGHpBW7Ngi1tesDezphUSkmenOvrtq46rJc9iiv0gDA3WHWXx1Xrv7ClqghvN5y/JqKdKSE8zgDKQoOB0EJuDy7Nt9uHnhJHtYj9/a27FVVLU+pIqNEHQrtg3mwAmh6GCmfTKr6R5Ee9bd9FfjO1S6dEjN9NNVoNPHXqJJGfbjuUCCG4IdwQClUzIws7dfN2w3Ffr0zXFZt2rz2jh3kQ1UcWOwgttfCXoExx2ztt0CLpQMBP2iUWm8lrLdzd7bJJxK6LTao/uyFMYeQGqrDqOa6YS5Tc9wp6YjpxUmPQBnolh+Zbriu5b9a5tJp+reNKl7fldqXnZU0UJdNjpXuIUs0ufusrJSEQmGfAwNy1McEzzR3qK0rzwDVe4eloWQEhATikgQ5d5+nooRPhsxvPX+/EpTfnu6t6bxOvG2Wm8ma6nDmzGvgzrFa3hIR2Tr6UPoAw2FCalOlj9zHRq1bUrAmGJYdn0qk51VQvakfCwg6bZv1Zn3theeOhHs9f11lOQV51YBVD0Ivo2ZN4iY4gc1osnI9xgMz+mTcA7IRyy8cXBBONm734jbl4f39m9s3329Mn9w9XzbR76tt6Y7Qg4Hi7zvKXoLvBh3H3BI1/t8ZIllJ1BdgdhyaaK5eLVN2rFVm0xgaFpY54Fp6dwuoLQeYFIv9KmdAXBVFxlBHs3m73bi4zUrDatjcZSNcL8dDyCflZ1H8ShjiTefDxw2jDrEpr2FgLCivR1wu99oWBYBlMzO+155Bn7P4A2eAg7BKzYQX9N2wmB9KxfjWrbQULWLHalTIX5gvpKJxFRXCNRLfOU4ch3C1psjo3dy+FpyqQ4R1EVx1zIf0OvBGFxgkcNryuYJVlWVNisiQ/FUmqiEUDetONHzN7B1TJ1TRo7Xhac5h8tiAEYCSLjnK4yYzbo7tplR2Ku+gVPP3kyVKTFxA5gRjoCS0rTyDILlbW6YMZSOLAmlcpLzWc8Px0lFnUAuBzTu0dkJ7dcrG9yX46xSsmwsjWuJBSERmxArNOsQk6Fyv7gOoEqK3pPR3R6jkI+GMLBtOkmvVk0CIDZsbjrRrW7WhGterrlG4GGRo5ZFc0+T4uQzyiw6wyJ//uQHQZOieK6F6rr3Fy0F3UZV1pibw6P0jdIdlMVV5enbQ3Ony6fIlEBZ7zExWzkGgwuV/QVHvK4R8XgzghuV0tUSjyj2PFqQZ99/iYLyR2uX6Gu9+8ECpzTh/V9tl9dQqXIiOejy67BevgUOJFAoGTboQ4giaWVKyXx1HUVOtIgdDitlcx6K3+yNbxIXnVXWtR6aYvE066xHoUcCBwXFZbPLl6mXGV3ggU6mhLZDs34TeDOplWXT8QlAj0fHbb1ntKYhnk9tINTndMQ5mlDtBP7BU7R6XrpSDGp3f1wypjRGJuwj0wPSHIaDNJjZtLzayDD9FlnK5Qg8QBRhXty1Eug3RGZNAgsXAU9r4h31wSHahksMLfdiG87rZbBt1QR+MjqFRHekOPYzVAHUiEdUcxeHUZzWMvdniayWGBUUlzcr3JtZb8/7tsqjf0QPfY8PsdztOHJdCJ8IRyLYadj6V1yMzQJXqT6syePR6BPv0qQ+l5Vh0Vu9xe5wwBwN7ILjYhmvVnbNWEYhZ6XYaiw4kXwMAMybKPrhjizaa0aRH64OTB/FKUlskL1CVFChXE8FDF0oJRD5ioPV51u3L3vXLVZZLu9qB2uSsxU6ZKjyA+a9eydZ94Zu4T/YNO5JgPCQ2l2RjCJQjWojrndelmfTZUx6eKr5uCetILmEEe9MdLvyrW90B3jZ1a70VZ5gsBuEPfZI5+tj0Pq8jcTJcpqE0qUCwC7wj6jQqraAtJRPiGSzy3dt8o8Tjv+h2V7dOTg1gb9zm6ByE4FqWpgQ2waCmjBbv0OIbtvgl66UkpZFLie2GiwbtvqIctv5ujwKoyHlxkPeSk2ZzOUtyJtOohxNAUjTaP1MbwwWtrqAZTV1WSoiPk8rVnlBzWwyLPhpquU4AAA+o9S/ri2w8y1l++Q62lEbaHiEtoeTC6yz1wG43nSuvEt2elDwONCUiPuzSgoKkF9apjJOUzySRyrtVpPkOU8xfmzhbslGesL8OSma4U6vii3j82MhOkEMzBBTZuXYuzpSxAb6+f7PLaVd7kwCrygKTA8+CtoZKEo2LYC86kmgysv51C5h49uuLvt82jEnOm4ylMEPHPGbbYISfRuxmnMdd4K8zxNAyVq/XWo5DFkSxvKSI2WSkYZylwC4hqzs4nboWFdh7p3ZTztmJnTvTk22Xp/hJnqj+GoipPshh+mR6TSQzhHLSMX0k2Zs4wJkCFb7TocQfK768jcs9VotnbLE4Ki4DtjDUiZf+OVZCDX1k7unrZXETefhxLuluy7qUYuA0MN4GiYjZwnRW4+VIK224MNmYW7o5OYj0dLM3SNmDEYqIiA2KdSN6DYzM7bZJc2g7R2mnWRxSQyQC+kDrFnZREU9KfF6xGaX8IHWUWu9TwZWstjM7wFAwXLfIsgAkCMsfVKx0XzNGVODREUhnSpCEy4PqPHLs3HBvalbfZCUkytTg1wb3tqzXJ0k241+4anTDgn/HnvUN0TkhXHoNuWJWICIz/AMpeg+QugoNsR5qUXJnTnREH9M3Qdj09NfgzBII/wmBYPsts5jVpdMBwFgmZiyX0aRSpOflJijnlxzmgzA1x+KtsoM53SO6GULe51hpOpjyAxLAy4bqdtu05wfOLtoshIDMJPOzDdOnos9AUrw1RZ5xqXxJOX4QHtiDyhOGXaaoKxGDgWOAiTT6TJmO0YwqjcmHKSQp4RcMSQLErZ/H9w5X9WsL8o3AHQAyWKkSEc9JH95ZBQSVV89jzle1ysvkTAfy+t+PUSr/aTf0U6AODPp4b/CQDwt8UdiuFkzv3NT8ghAADhf1kVOG6K/+v4KXv/g307lTShA1/CFfL2vvGYGmdhvD3a06I11zXllgd9wZ2pdGSDsm5QmgFVjRfYUgtL8Pc9o26TsOWCy7Jofk3NlrkCzauzMAGh1Ks5i50yMz/eBKPafhqUzM62Yd2942YfBfvXGo7dhD2lYmd054SeisTNeAZrn+wS1e5WuN1au84nF8nu7y+mRreT7JL6KUVfM9sixfVD5b5mpz499apspoVHt7tzMlucVeR3nem/HjZDwYTyekFBvtKewtC4mtKiKVuw8o9HadF++Zom9G0AbRiBVMyvn7AGGAoq9Q9Xup1jOzl7mLHbpDRdoKjdZOk++zLvnLwXwNXkK0UC3NaP4SUqoSlBMwBfN3UoBEAooGDZRVOA11HBkaXdGrQLaZIrnKiwjbt1aFWP4v63puGTJ/gbhEL9jaeOMBEStHUh7R8zWaJepsoXwv+9JkDcde57g3Yr9v3V3wcmt9tip2aXvYP1PVy8kmI170tVJ/YC5v/lFSiNx0+UlHYsep3wfyXnf8oFSQcAAAA=") format("woff2"); font-weight: 700; font-display: swap; unicode-range: U+20-7E,U+2014,U+2019; } * { box-sizing: border-box; } body { font-family: system-ui, sans-serif; margin: 0; color: #222; } img { border: 0; } a, a:visited, a[href] { color: #222; } strong, b { font-weight: 600; } hr { margin: 3em 0; border: none; border-top: 1px solid #ddd; } p { max-width: 42em; line-height: 1.5; } /* Blockquotes */ blockquote { font-family: Georgia, serif; font-size: 1.1875em; /* 19px /16 */ color: #666; margin: 1.5em 0; padding: 0 1em; max-width: 31.57894736842em; /* 600px /19 */ border-left: 6px solid #ddd; /*text-indent: -0.3684210526316em;*/ /* 7px /19 */ } blockquote + blockquote { margin-top: 2em; } blockquote img { height: 1.3em; width: 1.3em; border-radius: 50%; vertical-align: text-top; margin-left: 2px; margin-right: 6px; } /* Main */ main { font-size: 1.125em; /* 18px /16 */ } main:not(:empty) { padding-bottom: 3em; margin-bottom: 3em; } /* Tables */ table { border-collapse: collapse; margin-bottom: 2em; } table th, table td { text-align: left; border-top: 1px solid #eee; border-bottom: 1px solid #eee; padding: .4em; font-size: 0.8125em; /* 13px /16 */ } table th:first-child, table td:first-child { padding-left: 0; } table th { border-color: #ddd; } h2 + table { margin-top: -0.625em; /* -10px /16 */ } @media (min-width: 37.5em) { /* 600px */ table th, table td { padding: .4em .8em; font-size: 1em; /* 16px /16 */ } } /* Headings */ h1, h2, h3, h4, h5 { font-family: BenchNine, system-ui, sans-serif; } h1 { font-size: 2.666666666667em; /* 48px /18 */ margin: 0 0 .5em; } main .elv-toc + h1 { margin-top: 1em; } main h1:first-child, main .elv-toc + h1 { border-bottom: 2px dotted #666; } @media (min-width: 64em) { /* 1024px */ main .elv-toc + h1, main .elv-toc + h2 { margin-top: 0; } } h2 { font-size: 2.222222222222em; /* 40px /18 */ border-bottom: 1px solid #ddd; margin: 1em 0 .25em; } h3 { font-size: 1.666666666667em; /* 30px /18 */ margin-bottom: .5em; } h4 { font-size: 1.444444444444em; /* 26px /18 */ margin-bottom: .5em; } h5 { font-size: 1.277777777778em; /* 23px /18 */ margin-bottom: .5em; } main h1, main h2, main h3 { text-transform: uppercase; } h1 code, h2 code, h3 code, h4 code, h5 code { font-family: inherit; text-transform: none; } /* Lists */ ul { padding: 0 1em; } li { padding: .25em 0; } li ul { margin: .5em 0; padding-left: 1em; } li li { padding-top: .1em; padding-bottom: .1em; } /* Syntax highlighting and Code blocks */ pre { display: block; padding: .5em; margin: 1em -.5em 2em -.5em; overflow-x: auto; background-color: #fafafa; font-size: 0.75em; /* 12px /16 */ } pre, code { font-family: Monaco, monospace; } code { -ms-word-break: break-all; word-break: break-word; -webkit-hyphens: manual; -moz-hyphens: manual; hyphens: manual; background-color: #fafafa; } pre + pre[class*="language-"] { margin-top: 1em; } pre + .note { font-size: 0.6666666666667em; /* 16px /24 */ margin-top: -2.875em; /* 46px /16 */ margin-bottom: 2.5em; /* 40px /16 */ text-align: right; } @media (min-width: 37.5em) { /* 600px */ pre { font-size: 0.75em; /* 16px /16 */ } } #quick-start ~ .language-text { border-top: 2px solid #666; border-bottom: 2px solid #666; } @media (min-width: 42em) { /* 672px */ #quick-start ~ .language-text { border: 2px solid #666; } } #quick-start ~ .language-text, #quick-start ~ .language-text code { background-color: #fafafa; color: #222; } /* Layout */ .elv-layout { padding: 1rem; margin: 0 auto; max-width: 42rem; clear: both; } header.elv-layout { padding: 0 1rem; } footer.elv-layout { margin-bottom: 5em; } .elv-layout-full { max-width: none; } @media (min-width: 64em) { /* 1024px */ .elv-layout-toc { padding-left: 15rem; max-width: 60rem; margin-right: 1rem; position: relative; } } /*.elv-layout-wider { max-width: 60rem; }*/ /* Header */ .elv-header { position: relative; text-align: center; } .elv-header-default { display: flex; flex-direction: column; justify-content: center; align-items: center; padding-top: 0; } .elv-header-c { width: 100%; } .elv-header-docs .elv-header-c { padding: 1rem 0; } .elv-header-docs:before, .elv-header-docs:after { content: " "; display: table; } .elv-header-docs:after { clear: both; } /* Header Hero */ .elv-hero { background-color: #222; } .elv-hero img, .elv-hero svg { width: 42.95774646vh; height: 60vh; } .elv-hero:hover img, .elv-hero:hover svg { background-color: inherit; } .elv-header-default .elv-hero { display: flex; justify-content: center; width: calc(100% + 2rem); margin-left: -1rem; margin-right: -1rem; } .elv-hero:hover { background-color: #333; } .elv-header-docs .elv-hero { float: left; margin-right: .5em; } .elv-header-default .elv-hero img, .elv-header-default .elv-hero svg { position: relative; background-color: transparent; z-index: 1; } .elv-header-docs .elv-hero img, .elv-header-docs .elv-hero svg { width: auto; height: 3em; } @media (min-width: 43.75em) { /* 700px */ .elv-header-docs .elv-hero { margin-right: 1em; } .elv-header-docs .elv-hero img, .elv-header-docs .elv-hero svg { width: 4.303125em; /* 68.85px /16 */ height: 6em; } } /* Header Possum */ .elv-possum-anchor { display: block; } .elv-possum { position: absolute; right: .5rem; top: 1rem; transition: .3s opacity ease-out; } .elv-header-docs .elv-possum { width: 15vw; max-width: 6.25rem; /* 100px /16 */ } .elv-header-default { overflow: hidden; } .elv-header-default .elv-possum { pointer-events: none; width: auto; height: calc((60vh - 2rem) / 1.6); top: 36%; left: 1vw; right: auto; animation-duration: 180s; animation-name: balloonFloat; } @media (prefers-reduced-motion: reduce) { .elv-header-default .elv-possum { display: none; } } /* Navigation */ .elv-nav { padding: 0; margin: 1em 0 0 0; clear: both; list-style: none; } .elv-nav-item { float: left; padding-left: .25em; padding-right: .25em; font-size: 0.8125rem; /* 13px /16 */ } .elv-nav-item:first-child { padding-left: 0; } .elv-nav-item:last-child { padding-right: 0; } .elv-nav-item a { font-weight: 600; } .elv-nav-item .elv-nav-light { font-weight: 300; } @media (min-width: 20em) { /* 320px */ .elv-nav-item { font-size: 4vw; } } @media (min-width: 25em) { /* 400px */ .elv-nav-item { font-size: 1rem; /* 16px /16 */ padding-left: .45em; padding-right: .45em; } } @media (min-width: 35.625em) { /* 570px */ .elv-nav { clear: none; width: auto; margin-top: 0; } .elv-nav-item { float: left; padding-left: 0; padding-right: 0; } .elv-nav-item a:not(:hover) { text-decoration: none; } .elv-nav-item:not(:first-child):before { content: ""; border-left: 1px solid #ccc; padding: 0 0 0 .75em; margin-left: .75em; } } /* Version */ .latestversion { font-size: 2em; margin-top: 0; } .latestversion code { font-size: 0.75em; /* 24px /32 */ } .latestversion { font-family: BenchNine, system-ui, sans-serif; } .tmpl-docs .latestversion { position: absolute; top: 1rem; right: 1rem; margin: 0; } /* News */ .news { text-align: center; } /* Direct Links / Markdown Headers */ .direct-link { font-family: sans-serif; text-decoration: none; font-style: normal; margin-left: .1em; } a[href].direct-link, a[href].direct-link:visited { color: transparent; } a[href].direct-link:focus, a[href].direct-link:focus:visited, :hover > a[href].direct-link, :hover > a[href].direct-link:visited, :focus > a[href].direct-link, :focus > a[href].direct-link:visited { color: #aaa; } /* don’t use a direct link, should be a link to the page */ main .elv-toc + h1 .direct-link { display: none; } /* Style Guide */ .elv-sg-component { background-color: #f9f9f9; border-top: 1px dotted #ddd; border-bottom: 1px dotted #ddd; margin: 2rem 0; } /* Screen readers only */ .sr-only { position: absolute; height: 1px; width: 1px; overflow: hidden; clip: rect(1px, 1px, 1px, 1px); } /* Language List */ .elv-langlist { font-size: 0.8333333333333em; /* 15px /18 */ background-color: #f7f7f7; padding: .5rem; margin: 2em 0; } .elv-langlist-hed { margin: 0; float: left; border: none; font-size: 1.4em; /* 21px /15 */ } .elv-langlist > .inlinelist { display: inline; margin-left: 1em; } @media (min-width: 37.5em) { /* 600px */ .quicktipstoc { margin: 0 0 3% 3%; float: right; width: 32%; border-radius: .25em; font-size: 0.8125em; /* 13px /16 */ } } /* Breakpoint Overrides */ @media (max-width: 37.4375em) { /* 599px */ .bp-notsm.bp-notsm.bp-notsm.bp-notsm { display: none; } } @media (min-width: 37.5em) { /* 600px */ .bp-sm.bp-sm.bp-sm.bp-sm { display: none ; } } .header-anchor { text-decoration: none; } .header-anchor:hover::after { content: " 🔗"; } borgmatic-1.7.9/docs/_includes/layouts/000077500000000000000000000000001440467744700201155ustar00rootroot00000000000000borgmatic-1.7.9/docs/_includes/layouts/base.njk000066400000000000000000000014551440467744700215400ustar00rootroot00000000000000 {{ subtitle + ' - ' if subtitle}}{{ title }} {%- set css %} {% include 'index.css' %} {% include 'components/lists.css' %} {% include 'components/external-links.css' %} {% include 'components/minilink.css' %} {% include 'components/toc.css' %} {% include 'components/info-blocks.css' %} {% include 'prism-theme.css' %} {% include 'asciinema.css' %} {% endset %} {% if feedTitle and feedUrl %} {% endif %} {{ content | safe }} borgmatic-1.7.9/docs/_includes/layouts/main.njk000066400000000000000000000021031440467744700215410ustar00rootroot00000000000000--- layout: layouts/base.njk templateClass: elv-default headerClass: elv-header-default --- {% include "header.njk" %}
{% set navPages = collections.all | eleventyNavigation %} {% macro renderNavListItem(entry) -%} {{ entry.title }} {%- if entry.children.length -%}
    {%- for child in entry.children %}{{ renderNavListItem(child) }}{% endfor -%}
{%- endif -%} {%- endmacro %}
    {%- for entry in navPages %}{{ renderNavListItem(entry) }}{%- endfor -%}
{{ content | safe }} {% include 'components/suggestion-link.html' %}
borgmatic-1.7.9/docs/_includes/prism-theme.css000066400000000000000000000057371440467744700213750ustar00rootroot00000000000000/** * prism.js default theme for JavaScript, CSS and HTML * Based on dabblet (http://dabblet.com) * @author Lea Verou */ /* * Modified with an approximation of the One Light syntax highlighting theme. */ code[class*="language-"], pre[class*="language-"] { color: #494b53; background: none; font-family: Consolas, Monaco, 'Andale Mono', 'Ubuntu Mono', monospace; text-align: left; white-space: pre; word-spacing: normal; word-break: normal; word-wrap: normal; line-height: 1.5; -moz-tab-size: 4; -o-tab-size: 4; tab-size: 4; -webkit-hyphens: none; -moz-hyphens: none; -ms-hyphens: none; hyphens: none; } pre[class*="language-"]::-moz-selection, pre[class*="language-"] ::-moz-selection, code[class*="language-"]::-moz-selection, code[class*="language-"] ::-moz-selection { text-shadow: none; color: #232324; background: #dbdbdc; } pre[class*="language-"]::selection, pre[class*="language-"] ::selection, code[class*="language-"]::selection, code[class*="language-"] ::selection { text-shadow: none; color: #232324; background: #dbdbdc; } @media print { code[class*="language-"], pre[class*="language-"] { text-shadow: none; } } /* Code blocks */ pre[class*="language-"] { padding: 1em; margin: .5em 0; overflow: auto; } :not(pre) > code[class*="language-"], pre[class*="language-"] { background: #fafafa; } /* Inline code */ :not(pre) > code[class*="language-"] { padding: .1em; border-radius: .3em; white-space: normal; } .token.comment, .token.prolog, .token.doctype, .token.cdata { color: #505157; } .token.punctuation { color: #526fff; } .token.selector, .token.tag { color: none; } .token.property, .token.boolean, .token.number, .token.constant, .token.symbol, .token.attr-name, .token.deleted { color: #986801; } .token.string, .token.char, .token.attr-value, .token.builtin, .token.inserted { color: #50a14f; } .token.operator, .token.entity, .token.url, .language-css .token.string, .style .token.string { color: #526fff; } .token.atrule, .token.keyword { color: #e45649; } .token.function { color: #4078f2; } .token.regex, .token.important, .token.variable { color: #e45649; } .token.important, .token.bold { font-weight: bold; } .token.italic { font-style: italic; } .token.entity { cursor: help; } pre.line-numbers { position: relative; padding-left: 3.8em; counter-reset: linenumber; } pre.line-numbers > code { position: relative; } .line-numbers .line-numbers-rows { position: absolute; pointer-events: none; top: 0; font-size: 100%; left: -3.8em; width: 3em; /* works for line-numbers below 1000 lines */ letter-spacing: -1px; border-right: 0; -webkit-user-select: none; -moz-user-select: none; -ms-user-select: none; user-select: none; } .line-numbers-rows > span { pointer-events: none; display: block; counter-increment: linenumber; } .line-numbers-rows > span:before { content: counter(linenumber); color: #5C6370; display: block; padding-right: 0.8em; text-align: right; } borgmatic-1.7.9/docs/how-to/000077500000000000000000000000001440467744700156655ustar00rootroot00000000000000borgmatic-1.7.9/docs/how-to/add-preparation-and-cleanup-steps-to-backups.md000066400000000000000000000106521440467744700266540ustar00rootroot00000000000000--- title: How to add preparation and cleanup steps to backups eleventyNavigation: key: 🧹 Add preparation and cleanup steps parent: How-to guides order: 9 --- ## Preparation and cleanup hooks If you find yourself performing preparation tasks before your backup runs, or cleanup work afterwards, borgmatic hooks may be of interest. Hooks are shell commands that borgmatic executes for you at various points as it runs, and they're configured in the `hooks` section of your configuration file. But if you're looking to backup a database, it's probably easier to use the [database backup feature](https://torsion.org/borgmatic/docs/how-to/backup-your-databases/) instead. You can specify `before_backup` hooks to perform preparation steps before running backups, and specify `after_backup` hooks to perform cleanup steps afterwards. Here's an example: ```yaml hooks: before_backup: - mount /some/filesystem after_backup: - umount /some/filesystem ``` New in version 1.6.0 The `before_backup` and `after_backup` hooks each run once per repository in a configuration file. `before_backup` hooks runs right before the `create` action for a particular repository, and `after_backup` hooks run afterwards, but not if an error occurs in a previous hook or in the backups themselves. (Prior to borgmatic 1.6.0, these hooks instead ran once per configuration file rather than once per repository.) There are additional hooks that run before/after other actions as well. For instance, `before_prune` runs before a `prune` action for a repository, while `after_prune` runs after it. New in version 1.7.0 The `before_actions` and `after_actions` hooks run before/after all the actions (like `create`, `prune`, etc.) for each repository. These hooks are a good place to run per-repository steps like mounting/unmounting a remote filesystem. ## Variable interpolation The before and after action hooks support interpolating particular runtime variables into the hook command. Here's an example that assumes you provide a separate shell script: ```yaml hooks: after_prune: - record-prune.sh "{configuration_filename}" "{repository}" ``` In this example, when the hook is triggered, borgmatic interpolates runtime values into the hook command: the borgmatic configuration filename and the paths of the current Borg repository. Here's the full set of supported variables you can use here: * `configuration_filename`: borgmatic configuration filename in which the hook was defined * `repository`: path of the current repository as configured in the current borgmatic configuration file Note that you can also interpolate in [arbitrary environment variables](https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/). ## Global hooks You can also use `before_everything` and `after_everything` hooks to perform global setup or cleanup: ```yaml hooks: before_everything: - set-up-stuff-globally after_everything: - clean-up-stuff-globally ``` `before_everything` hooks collected from all borgmatic configuration files run once before all configuration files (prior to all actions), but only if there is a `create` action. An error encountered during a `before_everything` hook causes borgmatic to exit without creating backups. `after_everything` hooks run once after all configuration files and actions, but only if there is a `create` action. It runs even if an error occurs during a backup or a backup hook, but not if an error occurs during a `before_everything` hook. ## Error hooks borgmatic also runs `on_error` hooks if an error occurs, either when creating a backup or running a backup hook. See the [monitoring and alerting documentation](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/) for more information. ## Hook output Any output produced by your hooks shows up both at the console and in syslog (when run in a non-interactive console). For more information, read about inspecting your backups. ## Security An important security note about hooks: borgmatic executes all hook commands with the user permissions of borgmatic itself. So to prevent potential shell injection or privilege escalation, do not forget to set secure permissions on borgmatic configuration files (`chmod 0600`) and scripts (`chmod 0700`) invoked by hooks. borgmatic-1.7.9/docs/how-to/backup-to-a-removable-drive-or-an-intermittent-server.md000066400000000000000000000117321440467744700304410ustar00rootroot00000000000000--- title: How to backup to a removable drive or an intermittent server eleventyNavigation: key: 💾 Backup to a removable drive/server parent: How-to guides order: 10 --- ## Occasional backups A common situation is backing up to a repository that's only sometimes online. For instance, you might send most of your backups to the cloud, but occasionally you want to plug in an external hard drive or backup to your buddy's sometimes-online server for that extra level of redundancy. But if you run borgmatic and your hard drive isn't plugged in, or your buddy's server is offline, then you'll get an annoying error message and the overall borgmatic run will fail (even if individual repositories still complete). Another variant is when the source machine is only sometimes available for backups, e.g. a laptop where you want to skip backups when the battery falls below a certain level. So what if you want borgmatic to swallow the error of a missing drive or an offline server or a low battery—and exit gracefully? That's where the concept of "soft failure" come in. ## Soft failure command hooks This feature leverages [borgmatic command hooks](https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/), so first familiarize yourself with them. The idea is that you write a simple test in the form of a borgmatic hook to see if backups should proceed or not. The way the test works is that if any of your hook commands return a special exit status of 75, that indicates to borgmatic that it's a temporary failure, and borgmatic should skip all subsequent actions for that configuration file. If you return any other status, then it's a standard success or error. (Zero is success; anything else other than 75 is an error). So for instance, if you have an external drive that's only sometimes mounted, declare its repository in its own [separate configuration file](https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/), say at `/etc/borgmatic.d/removable.yaml`: ```yaml location: source_directories: - /home repositories: - /mnt/removable/backup.borg ``` Then, write a `before_backup` hook in that same configuration file that uses the external `findmnt` utility to see whether the drive is mounted before proceeding. ```yaml hooks: before_backup: - findmnt /mnt/removable > /dev/null || exit 75 ``` What this does is check if the `findmnt` command errors when probing for a particular mount point. If it does error, then it returns exit code 75 to borgmatic. borgmatic logs the soft failure, skips all further actions in that configurable file, and proceeds onward to any other borgmatic configuration files you may have. Note that `before_backup` only runs on the `create` action. See below about optionally using `before_actions` instead. You can imagine a similar check for the sometimes-online server case: ```yaml location: source_directories: - /home repositories: - ssh://me@buddys-server.org/./backup.borg hooks: before_backup: - ping -q -c 1 buddys-server.org > /dev/null || exit 75 ``` Or to only run backups if the battery level is high enough: ```yaml hooks: before_backup: - is_battery_percent_at_least.sh 25 ``` (Writing the battery script is left as an exercise to the reader.) New in version 1.7.0 The `before_actions` and `after_actions` hooks run before/after all the actions (like `create`, `prune`, etc.) for each repository. So if you'd like your soft failure command hook to run regardless of action, consider using `before_actions` instead of `before_backup`. ## Caveats and details There are some caveats you should be aware of with this feature. * You'll generally want to put a soft failure command in the `before_backup` hook, so as to gate whether the backup action occurs. While a soft failure is also supported in the `after_backup` hook, returning a soft failure there won't prevent any actions from occuring, because they've already occurred! Similiarly, you can return a soft failure from an `on_error` hook, but at that point it's too late to prevent the error. * Returning a soft failure does prevent further commands in the same hook from executing. So, like a standard error, it is an "early out". Unlike a standard error, borgmatic does not display it in angry red text or consider it a failure. * The soft failure only applies to the scope of a single borgmatic configuration file. So put anything that you don't want soft-failed, like always-online cloud backups, in separate configuration files from your soft-failing repositories. * The soft failure doesn't have to apply to a repository. You can even perform a test to make sure that individual source directories are mounted and available. Use your imagination! * The soft failure feature also works for before/after hooks for other actions as well. But it is not implemented for `before_everything` or `after_everything`. borgmatic-1.7.9/docs/how-to/backup-your-databases.md000066400000000000000000000303351440467744700224010ustar00rootroot00000000000000--- title: How to backup your databases eleventyNavigation: key: 🗄️ Backup your databases parent: How-to guides order: 8 --- ## Database dump hooks If you want to backup a database, it's best practice with most database systems to backup an exported database dump, rather than backing up your database's internal file storage. That's because the internal storage can change while you're reading from it. In contrast, a database dump creates a consistent snapshot that is more suited for backups. Fortunately, borgmatic includes built-in support for creating database dumps prior to running backups. For example, here is everything you need to dump and backup a couple of local PostgreSQL databases and a MySQL/MariaDB database. ```yaml hooks: postgresql_databases: - name: users - name: orders mysql_databases: - name: posts ``` New in version 1.5.22 You can also dump MongoDB databases. For example: ```yaml hooks: mongodb_databases: - name: messages ``` New in version 1.7.9 Additionally, you can dump SQLite databases. For example: ```yaml hooks: sqlite_databases: - name: mydb path: /var/lib/sqlite3/mydb.sqlite ``` As part of each backup, borgmatic streams a database dump for each configured database directly to Borg, so it's included in the backup without consuming additional disk space. (The exceptions are the PostgreSQL/MongoDB "directory" dump formats, which can't stream and therefore do consume temporary disk space. Additionally, prior to borgmatic 1.5.3, all database dumps consumed temporary disk space.) To support this, borgmatic creates temporary named pipes in `~/.borgmatic` by default. To customize this path, set the `borgmatic_source_directory` option in the `location` section of borgmatic's configuration. Also note that using a database hook implicitly enables both the `read_special` and `one_file_system` configuration settings (even if they're disabled in your configuration) to support this dump and restore streaming. See Limitations below for more on this. Here's a more involved example that connects to remote databases: ```yaml hooks: postgresql_databases: - name: users hostname: database1.example.org - name: orders hostname: database2.example.org port: 5433 username: postgres password: trustsome1 format: tar options: "--role=someone" mysql_databases: - name: posts hostname: database3.example.org port: 3307 username: root password: trustsome1 options: "--skip-comments" mongodb_databases: - name: messages hostname: database4.example.org port: 27018 username: dbuser password: trustsome1 authentication_database: mongousers options: "--ssl" sqlite_databases: - name: mydb path: /var/lib/sqlite3/mydb.sqlite ``` See your [borgmatic configuration file](https://torsion.org/borgmatic/docs/reference/configuration/) for additional customization of the options passed to database commands (when listing databases, restoring databases, etc.). ### All databases If you want to dump all databases on a host, use `all` for the database name: ```yaml hooks: postgresql_databases: - name: all mysql_databases: - name: all mongodb_databases: - name: all ``` Note that you may need to use a `username` of the `postgres` superuser for this to work with PostgreSQL. The SQLite hook in particular does not consider "all" a special database name. New in version 1.7.6 With PostgreSQL and MySQL, you can optionally dump "all" databases to separate files instead of one combined dump file, allowing more convenient restores of individual databases. Enable this by specifying your desired database dump `format`: ```yaml hooks: postgresql_databases: - name: all format: custom mysql_databases: - name: all format: sql ``` ### No source directories New in version 1.7.1 If you would like to backup databases only and not source directories, you can omit `source_directories` entirely. In older versions of borgmatic, instead specify an empty `source_directories` value, as it is a mandatory option prior to version 1.7.1: ```yaml location: source_directories: [] hooks: mysql_databases: - name: all ``` ### External passwords If you don't want to keep your database passwords in your borgmatic configuration file, you can instead pass them in via [environment variables](https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/) or command-line [configuration overrides](https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#configuration-overrides). ### Configuration backups An important note about this database configuration: You'll need the configuration to be present in order for borgmatic to restore a database. So to prepare for this situation, it's a good idea to include borgmatic's own configuration files as part of your regular backups. That way, you can always bring back any missing configuration files in order to restore a database. ## Supported databases As of now, borgmatic supports PostgreSQL, MySQL/MariaDB, MongoDB, and SQLite databases directly. But see below about general-purpose preparation and cleanup hooks as a work-around with other database systems. Also, please [file a ticket](https://torsion.org/borgmatic/#issues) for additional database systems that you'd like supported. ## Database restoration To restore a database dump from an archive, use the `borgmatic restore` action. But the first step is to figure out which archive to restore from. A good way to do that is to use the `rlist` action: ```bash borgmatic rlist ``` (No borgmatic `rlist` action? Try `list` instead or upgrade borgmatic!) That should yield output looking something like: ```text host-2023-01-01T04:05:06.070809 Tue, 2023-01-01 04:05:06 [...] host-2023-01-02T04:06:07.080910 Wed, 2023-01-02 04:06:07 [...] ``` Assuming that you want to restore all database dumps from the archive with the most up-to-date files and therefore the latest timestamp, run a command like: ```bash borgmatic restore --archive host-2023-01-02T04:06:07.080910 ``` (No borgmatic `restore` action? Upgrade borgmatic!) With newer versions of borgmatic, you can simplify this to: ```bash borgmatic restore --archive latest ``` The `--archive` value is the name of the archive to restore from. This restores all databases dumps that borgmatic originally backed up to that archive. This is a destructive action! `borgmatic restore` replaces live databases by restoring dumps from the selected archive. So be very careful when and where you run it. ### Repository selection If you have a single repository in your borgmatic configuration file(s), no problem: the `restore` action figures out which repository to use. But if you have multiple repositories configured, then you'll need to specify the repository path containing the archive to restore. Here's an example: ```bash borgmatic restore --repository repo.borg --archive host-2023-... ``` ### Restore particular databases If you've backed up multiple databases into an archive, and you'd only like to restore one of them, use the `--database` flag to select one or more databases. For instance: ```bash borgmatic restore --archive host-2023-... --database users ``` New in version 1.7.6 You can also restore individual databases even if you dumped them as "all"—as long as you dumped them into separate files via use of the "format" option. See above for more information. ### Restore all databases To restore all databases: ```bash borgmatic restore --archive host-2023-... --database all ``` Or omit the `--database` flag entirely: ```bash borgmatic restore --archive host-2023-... ``` Prior to borgmatic version 1.7.6, this restores a combined "all" database dump from the archive. New in version 1.7.6 Restoring "all" databases restores each database found in the selected archive. That includes any combined dump file named "all" and any other individual database dumps found in the archive. ### Limitations There are a few important limitations with borgmatic's current database restoration feature that you should know about: 1. You must restore as the same Unix user that created the archive containing the database dump. That's because the user's home directory path is encoded into the path of the database dump within the archive. 2. As mentioned above, borgmatic can only restore a database that's defined in borgmatic's own configuration file. So include your configuration file in backups to avoid getting caught without a way to restore a database. 3. borgmatic does not currently support backing up or restoring multiple databases that share the exact same name on different hosts. 4. Because database hooks implicitly enable the `read_special` configuration setting to support dump and restore streaming, you'll need to ensure that any special files are excluded from backups (named pipes, block devices, character devices, and sockets) to prevent hanging. Try a command like `find /your/source/path -type b -or -type c -or -type p -or -type s` to find such files. Common directories to exclude are `/dev` and `/run`, but that may not be exhaustive. New in version 1.7.3 When database hooks are enabled, borgmatic automatically excludes special files that may cause Borg to hang, so you no longer need to manually exclude them. (This includes symlinks with special files as a destination.) You can override/prevent this behavior by explicitly setting `read_special` to true. ### Manual restoration If you prefer to restore a database without the help of borgmatic, first [extract](https://torsion.org/borgmatic/docs/how-to/extract-a-backup/) an archive containing a database dump. borgmatic extracts the dump file into the *`username`*`/.borgmatic/` directory within the extraction destination path, where *`username`* is the user that created the backup. For example, if you created the backup with the `root` user and you're extracting to `/tmp`, then the dump will be in `/tmp/root/.borgmatic`. After extraction, you can manually restore the dump file using native database commands like `pg_restore`, `mysql`, `mongorestore`, `sqlite`, or similar. Also see the documentation on [listing database dumps](https://torsion.org/borgmatic/docs/how-to/inspect-your-backups/#listing-database-dumps). ## Preparation and cleanup hooks If this database integration is too limited for needs, borgmatic also supports general-purpose [preparation and cleanup hooks](https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/). These hooks allows you to trigger arbitrary commands or scripts before and after backups. So if necessary, you can use these hooks to create database dumps with any database system. ## Troubleshooting ### MySQL table lock errors If you encounter table lock errors during a database dump with MySQL/MariaDB, you may need to [use a transaction](https://dev.mysql.com/doc/refman/8.0/en/mysqldump.html#option_mysqldump_single-transaction). You can add any additional flags to the `options:` in your database configuration. Here's an example: ```yaml hooks: mysql_databases: - name: posts options: "--single-transaction --quick" ``` ### borgmatic hangs during backup See Limitations above about `read_special`. You may need to exclude certain paths with named pipes, block devices, character devices, or sockets on which borgmatic is hanging. Alternatively, if excluding special files is too onerous, you can create two separate borgmatic configuration files—one for your source files and a separate one for backing up databases. That way, the database `read_special` option will not be active when backing up special files. New in version 1.7.3 See Limitations above about borgmatic's automatic exclusion of special files to prevent Borg hangs. borgmatic-1.7.9/docs/how-to/deal-with-very-large-backups.md000066400000000000000000000151501440467744700235700ustar00rootroot00000000000000--- title: How to deal with very large backups eleventyNavigation: key: 📏 Deal with very large backups parent: How-to guides order: 4 --- ## Biggish data Borg itself is great for efficiently de-duplicating data across successive backup archives, even when dealing with very large repositories. But you may find that while borgmatic's default actions of `create`, `prune`, `compact`, and `check` works well on small repositories, it's not so great on larger ones. That's because running the default pruning, compact, and consistency checks take a long time on large repositories. Prior to version 1.7.9 The default action ordering was `prune`, `compact`, `create`, and `check`. ### A la carte actions If you find yourself wanting to customize the actions, you have some options. First, you can run borgmatic's `prune`, `compact`, `create`, or `check` actions separately. For instance, the following optional actions are available (among others): ```bash borgmatic create borgmatic prune borgmatic compact borgmatic check ``` You can run borgmatic with only one of these actions provided, or you can mix and match any number of them in a single borgmatic run. This supports approaches like skipping certain actions while running others. For instance, this skips `prune` and `compact` and only runs `create` and `check`: ```bash borgmatic create check ``` New in version 1.7.9 borgmatic now respects your specified command-line action order, running actions in the order you specify. In previous versions, borgmatic ran your specified actions in a fixed ordering regardless of the order they appeared on the command-line. But instead of running actions together, another option is to run backups with `create` on a frequent schedule (e.g. with `borgmatic create` called from one cron job), while only running expensive consistency checks with `check` on a much less frequent basis (e.g. with `borgmatic check` called from a separate cron job). ### Consistency check configuration Another option is to customize your consistency checks. By default, if you omit consistency checks from configuration, borgmatic runs full-repository checks (`repository`) and per-archive checks (`archives`) within each repository, no more than once a month. This is equivalent to what `borg check` does if run without options. But if you find that archive checks are too slow, for example, you can configure borgmatic to run repository checks only. Configure this in the `consistency` section of borgmatic configuration: ```yaml consistency: checks: - name: repository ``` Prior to version 1.6.2 `checks` was a plain list of strings without the `name:` part. For example: ```yaml consistency: checks: - repository ``` Here are the available checks from fastest to slowest: * `repository`: Checks the consistency of the repository itself. * `archives`: Checks all of the archives in the repository. * `extract`: Performs an extraction dry-run of the most recent archive. * `data`: Verifies the data integrity of all archives contents, decrypting and decompressing all data. Note that the `data` check is a more thorough version of the `archives` check, so enabling the `data` check implicitly enables the `archives` check as well. See [Borg's check documentation](https://borgbackup.readthedocs.io/en/stable/usage/check.html) for more information. ### Check frequency New in version 1.6.2 You can optionally configure checks to run on a periodic basis rather than every time borgmatic runs checks. For instance: ```yaml consistency: checks: - name: repository frequency: 2 weeks - name: archives frequency: 1 month ``` This tells borgmatic to run the `repository` consistency check at most once every two weeks for a given repository and the `archives` check at most once a month. The `frequency` value is a number followed by a unit of time, e.g. "3 days", "1 week", "2 months", etc. The `frequency` defaults to `always`, which means run this check every time checks run. Unlike a real scheduler like cron, borgmatic only makes a best effort to run checks on the configured frequency. It compares that frequency with how long it's been since the last check for a given repository (as recorded in a file within `~/.borgmatic/checks`). If it hasn't been long enough, the check is skipped. And you still have to run `borgmatic check` (or `borgmatic` without actions) in order for checks to run, even when a `frequency` is configured! This also applies *across* configuration files that have the same repository configured. Make sure you have the same check frequency configured in each though—or the most frequently configured check will apply. If you want to temporarily ignore your configured frequencies, you can invoke `borgmatic check --force` to run checks unconditionally. ### Disabling checks If that's still too slow, you can disable consistency checks entirely, either for a single repository or for all repositories. Disabling all consistency checks looks like this: ```yaml consistency: checks: - name: disabled ``` Prior to version 1.6.2 `checks` was a plain list of strings without the `name:` part. For instance: ```yaml consistency: checks: - disabled ``` If you have multiple repositories in your borgmatic configuration file, you can keep running consistency checks, but only against a subset of the repositories: ```yaml consistency: check_repositories: - path/of/repository_to_check.borg ``` Finally, you can override your configuration file's consistency checks, and run particular checks via the command-line. For instance: ```bash borgmatic check --only data --only extract ``` This is useful for running slow consistency checks on an infrequent basis, separate from your regular checks. It is still subject to any configured check frequencies unless the `--force` flag is used. ## Troubleshooting ### Broken pipe with remote repository When running borgmatic on a large remote repository, you may receive errors like the following, particularly while "borg check" is validating backups for consistency: ```text Write failed: Broken pipe borg: Error: Connection closed by remote host ``` This error can be caused by an ssh timeout, which you can rectify by adding the following to the `~/.ssh/config` file on the client: ```text Host * ServerAliveInterval 120 ``` This should make the client keep the connection alive while validating backups. borgmatic-1.7.9/docs/how-to/develop-on-borgmatic.md000066400000000000000000000110731440467744700222260ustar00rootroot00000000000000--- title: How to develop on borgmatic eleventyNavigation: key: 🏗️ Develop on borgmatic parent: How-to guides order: 13 --- ## Source code To get set up to hack on borgmatic, first clone master via HTTPS or SSH: ```bash git clone https://projects.torsion.org/borgmatic-collective/borgmatic.git ``` Or: ```bash git clone ssh://git@projects.torsion.org:3022/borgmatic-collective/borgmatic.git ``` Then, install borgmatic "[editable](https://pip.pypa.io/en/stable/cli/pip_install/#editable-installs)" so that you can run borgmatic commands while you're hacking on them to make sure your changes work. ```bash cd borgmatic/ pip3 install --user --editable . ``` Note that this will typically install the borgmatic commands into `~/.local/bin`, which may or may not be on your PATH. There are other ways to install borgmatic editable as well, for instance into the system Python install (so without `--user`, as root), or even into a [virtualenv](https://virtualenv.pypa.io/en/stable/). How or where you install borgmatic is up to you, but generally an editable install makes development and testing easier. ## Automated tests Assuming you've cloned the borgmatic source code as described above, and you're in the `borgmatic/` working copy, install tox, which is used for setting up testing environments: ```bash pip3 install --user tox ``` Finally, to actually run tests, run: ```bash cd borgmatic tox ``` ### Code formatting If when running tests, you get an error from the [Black](https://black.readthedocs.io/en/stable/) code formatter about files that would be reformatted, you can ask Black to format them for you via the following: ```bash tox -e black ``` And if you get a complaint from the [isort](https://github.com/timothycrosley/isort) Python import orderer, you can ask isort to order your imports for you: ```bash tox -e isort ``` ### End-to-end tests borgmatic additionally includes some end-to-end tests that integration test with Borg and supported databases for a few representative scenarios. These tests don't run by default when running `tox`, because they're relatively slow and depend on Docker containers for runtime dependencies. These tests tests do run on the continuous integration (CI) server, and running them on your developer machine is the closest thing to CI test parity. If you would like to run the full test suite, first install Docker and [Docker Compose](https://docs.docker.com/compose/install/). Then run: ```bash scripts/run-full-dev-tests ``` Note that this scripts assumes you have permission to run Docker. If you don't, then you may need to run with `sudo`. ## Code style Start with [PEP 8](https://www.python.org/dev/peps/pep-0008/). But then, apply the following deviations from it: * For strings, prefer single quotes over double quotes. * Limit all lines to a maximum of 100 characters. * Use trailing commas within multiline values or argument lists. * For multiline constructs, put opening and closing delimeters on lines separate from their contents. * Within multiline constructs, use standard four-space indentation. Don't align indentation with an opening delimeter. borgmatic code uses the [Black](https://black.readthedocs.io/en/stable/) code formatter, the [Flake8](http://flake8.pycqa.org/en/latest/) code checker, and the [isort](https://github.com/timothycrosley/isort) import orderer, so certain code style requirements will be enforced when running automated tests. See the Black, Flake8, and isort documentation for more information. ## Continuous integration Each pull request triggers a continuous integration build which runs the test suite. You can view these builds on [build.torsion.org](https://build.torsion.org/borgmatic-collective/borgmatic), and they're also linked from the commits list on each pull request. ## Documentation development Updates to borgmatic's documentation are welcome. It's formatted in Markdown and located in the `docs/` directory in borgmatic's source, plus the `README.md` file at the root. To build and view a copy of the documentation with your local changes, run the following from the root of borgmatic's source code: ```bash sudo scripts/dev-docs ``` This requires Docker to be installed on your system. You may not need to use sudo if your non-root user has permissions to run Docker. After you run the script, you can point your web browser at http://localhost:8080 to view the documentation with your changes. To close the documentation server, ctrl-C the script. Note that it does not currently auto-reload, so you'll need to stop it and re-run it for any additional documentation changes to take effect. borgmatic-1.7.9/docs/how-to/extract-a-backup.md000066400000000000000000000102471440467744700213460ustar00rootroot00000000000000--- title: How to extract a backup eleventyNavigation: key: 📤 Extract a backup parent: How-to guides order: 7 --- ## Extract When the worst happens—or you want to test your backups—the first step is to figure out which archive to extract. A good way to do that is to use the `rlist` action: ```bash borgmatic rlist ``` (No borgmatic `rlist` action? Try `list` instead or upgrade borgmatic!) That should yield output looking something like: ```text host-2023-01-01T04:05:06.070809 Tue, 2023-01-01 04:05:06 [...] host-2023-01-02T04:06:07.080910 Wed, 2023-01-02 04:06:07 [...] ``` Assuming that you want to extract the archive with the most up-to-date files and therefore the latest timestamp, run a command like: ```bash borgmatic extract --archive host-2023-01-02T04:06:07.080910 ``` (No borgmatic `extract` action? Upgrade borgmatic!) Or simplify this to: ```bash borgmatic extract --archive latest ``` The `--archive` value is the name of the archive to extract. This extracts the entire contents of the archive to the current directory, so make sure you're in the right place before running the command—or see below about the `--destination` flag. ## Repository selection If you have a single repository in your borgmatic configuration file(s), no problem: the `extract` action figures out which repository to use. But if you have multiple repositories configured, then you'll need to specify the repository path containing the archive to extract. Here's an example: ```bash borgmatic extract --repository repo.borg --archive host-2023-... ``` ## Extract particular files Sometimes, you want to extract a single deleted file, rather than extracting everything from an archive. To do that, tack on one or more `--path` values. For instance: ```bash borgmatic extract --archive latest --path path/1 path/2 ``` Note that the specified restore paths should not have a leading slash. Like a whole-archive extract, this also extracts into the current directory by default. So for example, if you happen to be in the directory `/var` and you run the `extract` command above, borgmatic will extract `/var/path/1` and `/var/path/2`. ### Searching for files If you're not sure which archive contains the files you're looking for, you can [search across archives](https://torsion.org/borgmatic/docs/how-to/inspect-your-backups/#searching-for-a-file). ## Extract to a particular destination By default, borgmatic extracts files into the current directory. To instead extract files to a particular destination directory, use the `--destination` flag: ```bash borgmatic extract --archive latest --destination /tmp ``` When using the `--destination` flag, be careful not to overwrite your system's files with extracted files unless that is your intent. ## Database restoration The `borgmatic extract` command only extracts files. To restore a database, please see the [documentation on database backups and restores](https://torsion.org/borgmatic/docs/how-to/backup-your-databases/). borgmatic does not perform database restoration as part of `borgmatic extract` so that you can extract files from your archive without impacting your live databases. ## Mount a filesystem If instead of extracting files, you'd like to explore the files from an archive as a [FUSE](https://en.wikipedia.org/wiki/Filesystem_in_Userspace) filesystem, you can use the `borgmatic mount` action. Here's an example: ```bash borgmatic mount --archive latest --mount-point /mnt ``` This mounts the entire archive on the given mount point `/mnt`, so that you can look in there for your files. Omit the `--archive` flag to mount all archives (lazy-loaded): ```bash borgmatic mount --mount-point /mnt ``` Or use the "latest" value for the archive to mount the latest archive: ```bash borgmatic mount --archive latest --mount-point /mnt ``` If you'd like to restrict the mounted filesystem to only particular paths from your archive, use the `--path` flag, similar to the `extract` action above. For instance: ```bash borgmatic mount --archive latest --mount-point /mnt --path var/lib ``` When you're all done exploring your files, unmount your mount point. No `--archive` flag is needed: ```bash borgmatic umount --mount-point /mnt ``` borgmatic-1.7.9/docs/how-to/index.md000066400000000000000000000001021440467744700173070ustar00rootroot00000000000000--- eleventyNavigation: key: How-to guides permalink: false --- borgmatic-1.7.9/docs/how-to/inspect-your-backups.md000066400000000000000000000113271440467744700223020ustar00rootroot00000000000000--- title: How to inspect your backups eleventyNavigation: key: 🔎 Inspect your backups parent: How-to guides order: 5 --- ## Backup progress By default, borgmatic runs proceed silently except in the case of errors. But if you'd like to to get additional information about the progress of the backup as it proceeds, use the verbosity option: ```bash borgmatic --verbosity 1 ``` This lists the files that borgmatic is archiving, which are those that are new or changed since the last backup. Or, for even more progress and debug spew: ```bash borgmatic --verbosity 2 ``` ## Backup summary If you're less concerned with progress during a backup, and you only want to see the summary of archive statistics at the end, you can use the stats option when performing a backup: ```bash borgmatic --stats ``` ## Existing backups borgmatic provides convenient actions for Borg's [`list`](https://borgbackup.readthedocs.io/en/stable/usage/list.html) and [`info`](https://borgbackup.readthedocs.io/en/stable/usage/info.html) functionality: ```bash borgmatic list borgmatic info ``` You can change the output format of `borgmatic list` by specifying your own with `--format`. Refer to the [borg list --format documentation](https://borgbackup.readthedocs.io/en/stable/usage/list.html#the-format-specifier-syntax) for available values. *(No borgmatic `list` or `info` actions? Upgrade borgmatic!)* New in borgmatic version 1.7.0 There are also `rlist` and `rinfo` actions for displaying repository information with Borg 2.x: ```bash borgmatic rlist borgmatic rinfo ``` See the [borgmatic command-line reference](https://torsion.org/borgmatic/docs/reference/command-line/) for more information. ### Searching for a file New in version 1.6.3 Let's say you've accidentally deleted a file and want to find the backup archive(s) containing it. `borgmatic list` provides a `--find` flag for exactly this purpose. For instance, if you're looking for a `foo.txt`: ```bash borgmatic list --find foo.txt ``` This will list your archives and indicate those with files matching `*foo.txt*` anywhere in the archive. The `--find` parameter can alternatively be a [Borg pattern](https://borgbackup.readthedocs.io/en/stable/usage/help.html#borg-patterns). To limit the archives searched, use the standard `list` parameters for filtering archives such as `--last`, `--archive`, `--match-archives`, etc. For example, to search only the last five archives: ```bash borgmatic list --find foo.txt --last 5 ``` ## Listing database dumps If you have enabled borgmatic's [database hooks](https://torsion.org/borgmatic/docs/how-to/backup-your-databases/), you can list backed up database dumps via borgmatic. For example: ```bash borgmatic list --archive latest --find .borgmatic/*_databases ``` This gives you a listing of all database dump files contained in the latest archive, complete with file sizes. ## Logging By default, borgmatic logs to a local syslog-compatible daemon if one is present and borgmatic is running in a non-interactive console. Where those logs show up depends on your particular system. If you're using systemd, try running `journalctl -xe`. Otherwise, try viewing `/var/log/syslog` or similiar. You can customize the log level used for syslog logging with the `--syslog-verbosity` flag, and this is independent from the console logging `--verbosity` flag described above. For instance, to get additional information about the progress of the backup as it proceeds: ```bash borgmatic --syslog-verbosity 1 ``` Or to increase syslog logging to include debug spew: ```bash borgmatic --syslog-verbosity 2 ``` ### Rate limiting If you are using rsyslog or systemd's journal, be aware that by default they both throttle the rate at which logging occurs. So you may need to change either [the global rate limit](https://www.rootusers.com/how-to-change-log-rate-limiting-in-linux/) or [the per-service rate limit](https://www.freedesktop.org/software/systemd/man/journald.conf.html#RateLimitIntervalSec=) if you're finding that borgmatic logs are missing. Note that the [sample borgmatic systemd service file](https://torsion.org/borgmatic/docs/how-to/set-up-backups/#systemd) already has this rate limit disabled for systemd's journal. ### Logging to file If you don't want to use syslog, and you'd rather borgmatic log to a plain file, use the `--log-file` flag: ```bash borgmatic --log-file /path/to/file.log ``` Note that if you use the `--log-file` flag, you are responsible for rotating the log file so it doesn't grow too large, for example with [logrotate](https://wiki.archlinux.org/index.php/Logrotate). Also, there is a `--log-file-verbosity` flag to customize the log file's log level. borgmatic-1.7.9/docs/how-to/make-backups-redundant.md000066400000000000000000000040111440467744700225300ustar00rootroot00000000000000--- title: How to make backups redundant eleventyNavigation: key: ☁️ Make backups redundant parent: How-to guides order: 3 --- ## Multiple repositories If you really care about your data, you probably want more than one backup of it. borgmatic supports this in its configuration by specifying multiple backup repositories. Here's an example: ```yaml location: # List of source directories to backup. source_directories: - /home - /etc # Paths of local or remote repositories to backup to. repositories: - ssh://1234@usw-s001.rsync.net/./backups.borg - ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo - /var/lib/backups/local.borg ``` When you run borgmatic with this configuration, it invokes Borg once for each configured repository in sequence. (So, not in parallel.) That means—in each repository—borgmatic creates a single new backup archive containing all of your source directories. Here's a way of visualizing what borgmatic does with the above configuration: 1. Backup `/home` and `/etc` to `1234@usw-s001.rsync.net:backups.borg` 2. Backup `/home` and `/etc` to `k8pDxu32@k8pDxu32.repo.borgbase.com:repo` 3. Backup `/home` and `/etc` to `/var/lib/backups/local.borg` This gives you redundancy of your data across repositories and even potentially across providers. See [Borg repository URLs documentation](https://borgbackup.readthedocs.io/en/stable/usage/general.html#repository-urls) for more information on how to specify local and remote repository paths. ### Different options per repository What if you want borgmatic to backup to multiple repositories—while also setting different options for each one? In that case, you'll need to use [a separate borgmatic configuration file for each repository](https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/) instead of the multiple repositories in one configuration file as described above. That's because all of the repositories in a particular configuration file get the same options applied. borgmatic-1.7.9/docs/how-to/make-per-application-backups.md000066400000000000000000000212341440467744700236410ustar00rootroot00000000000000--- title: How to make per-application backups eleventyNavigation: key: 🔀 Make per-application backups parent: How-to guides order: 1 --- ## Multiple backup configurations You may find yourself wanting to create different backup policies for different applications on your system or even for different backup repositories. For instance, you might want one backup configuration for your database data directory and a different configuration for your user home directories. Or one backup configuration for your local backups with a different configuration for your remote repository. The way to accomplish that is pretty simple: Create multiple separate configuration files and place each one in a `/etc/borgmatic.d/` directory. For instance, for applications: ```bash sudo mkdir /etc/borgmatic.d sudo generate-borgmatic-config --destination /etc/borgmatic.d/app1.yaml sudo generate-borgmatic-config --destination /etc/borgmatic.d/app2.yaml ``` Or, for repositories: ```bash sudo mkdir /etc/borgmatic.d sudo generate-borgmatic-config --destination /etc/borgmatic.d/repo1.yaml sudo generate-borgmatic-config --destination /etc/borgmatic.d/repo2.yaml ``` When you set up multiple configuration files like this, borgmatic will run each one in turn from a single borgmatic invocation. This includes, by default, the traditional `/etc/borgmatic/config.yaml` as well. Each configuration file is interpreted independently, as if you ran borgmatic for each configuration file one at a time. In other words, borgmatic does not perform any merging of configuration files by default. If you'd like borgmatic to merge your configuration files, for instance to avoid duplication of settings, see below about configuration includes. Additionally, the `~/.config/borgmatic.d/` directory works the same way as `/etc/borgmatic.d`. If you need even more customizability, you can specify alternate configuration paths on the command-line with borgmatic's `--config` flag. (See `borgmatic --help` for more information.) For instance, if you want to schedule your various borgmatic backups to run at different times, you'll need multiple entries in your [scheduling software of choice](https://torsion.org/borgmatic/docs/how-to/set-up-backups/#autopilot), each entry using borgmatic's `--config` flag instead of relying on `/etc/borgmatic.d`. ## Configuration includes Once you have multiple different configuration files, you might want to share common configuration options across these files with having to copy and paste them. To achieve this, you can put fragments of common configuration options into a file, and then include or inline that file into one or more borgmatic configuration files. Let's say that you want to include common retention configuration across all of your configuration files. You could do that in each configuration file with the following: ```yaml location: ... retention: !include /etc/borgmatic/common_retention.yaml ``` And then the contents of `common_retention.yaml` could be: ```yaml keep_hourly: 24 keep_daily: 7 ``` To prevent borgmatic from trying to load these configuration fragments by themselves and complaining that they are not valid configuration files, you should put them in a directory other than `/etc/borgmatic.d/`. (A subdirectory is fine.) When a configuration include is a relative path, borgmatic loads it from either the current working directory or from the directory containing the file doing the including. Note that this form of include must be a YAML value rather than a key. For example, this will not work: ```yaml location: ... # Don't do this. It won't work! !include /etc/borgmatic/common_retention.yaml ``` But if you do want to merge in a YAML key *and* its values, keep reading! ## Include merging If you need to get even fancier and merge in common configuration options, you can perform a YAML merge of included configuration using the YAML `<<` key. For instance, here's an example of a main configuration file that pulls in retention and consistency options via a single include: ```yaml <<: !include /etc/borgmatic/common.yaml location: ... ``` This is what `common.yaml` might look like: ```yaml retention: keep_hourly: 24 keep_daily: 7 consistency: checks: - name: repository ``` Once this include gets merged in, the resulting configuration would have all of the `location` options from the original configuration file *and* the `retention` and `consistency` options from the include. Prior to borgmatic version 1.6.0, when there's a section collision between the local file and the merged include, the local file's section takes precedence. So if the `retention` section appears in both the local file and the include file, the included `retention` is ignored in favor of the local `retention`. But see below about deep merge in version 1.6.0+. Note that this `<<` include merging syntax is only for merging in mappings (configuration options and their values). But if you'd like to include a single value directly, please see the section above about standard includes. Additionally, there is a limitation preventing multiple `<<` include merges per section. So for instance, that means you can do one `<<` merge at the global level, another `<<` within each configuration section, etc. (This is a YAML limitation.) ### Deep merge New in version 1.6.0 borgmatic performs a deep merge of merged include files, meaning that values are merged at all levels in the two configuration files. This allows you to include common configuration—up to full borgmatic configuration files—while overriding only the parts you want to customize. For instance, here's an example of a main configuration file that pulls in two retention options via an include and then overrides one of them locally: ```yaml <<: !include /etc/borgmatic/common.yaml location: ... retention: keep_daily: 5 ``` This is what `common.yaml` might look like: ```yaml retention: keep_hourly: 24 keep_daily: 7 ``` Once this include gets merged in, the resulting configuration would have a `keep_hourly` value of `24` and an overridden `keep_daily` value of `5`. When there's an option collision between the local file and the merged include, the local file's option takes precedence. New in version 1.6.1 Colliding list values are appended together. ## Configuration overrides In more complex multi-application setups, you may want to override particular borgmatic configuration file options at the time you run borgmatic. For instance, you could reuse a common configuration file for multiple applications, but then set the repository for each application at runtime. Or you might want to try a variant of an option for testing purposes without actually touching your configuration file. Whatever the reason, you can override borgmatic configuration options at the command-line via the `--override` flag. Here's an example: ```bash borgmatic create --override location.remote_path=/usr/local/bin/borg1 ``` What this does is load your configuration files, and for each one, disregard the configured value for the `remote_path` option in the `location` section, and use the value of `/usr/local/bin/borg1` instead. You can even override multiple values at once. For instance: ```bash borgmatic create --override section.option1=value1 section.option2=value2 ``` This will accomplish the same thing: ```bash borgmatic create --override section.option1=value1 --override section.option2=value2 ``` Note that each value is parsed as an actual YAML string, so you can even set list values by using brackets. For instance: ```bash borgmatic create --override location.repositories=[test1.borg,test2.borg] ``` Or even a single list element: ```bash borgmatic create --override location.repositories=[/root/test.borg] ``` If your override value contains special YAML characters like colons, then you'll need quotes for it to parse correctly: ```bash borgmatic create --override location.repositories="['user@server:test.borg']" ``` There is not currently a way to override a single element of a list without replacing the whole list. Note that if you override an option of the list type (like `location.repositories`), you do need to use the `[ ]` list syntax. See the [configuration reference](https://torsion.org/borgmatic/docs/reference/configuration/) for which options are list types. (YAML list values look like `- this` with an indentation and a leading dash.) Be sure to quote your overrides if they contain spaces or other characters that your shell may interpret. An alternate to command-line overrides is passing in your values via [environment variables](https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/). borgmatic-1.7.9/docs/how-to/monitor-your-backups.md000066400000000000000000000333611440467744700223260ustar00rootroot00000000000000--- title: How to monitor your backups eleventyNavigation: key: 🚨 Monitor your backups parent: How-to guides order: 6 --- ## Monitoring and alerting Having backups is great, but they won't do you a lot of good unless you have confidence that they're running on a regular basis. That's where monitoring and alerting comes in. There are several different ways you can monitor your backups and find out whether they're succeeding. Which of these you choose to do is up to you and your particular infrastructure. ### Job runner alerts The easiest place to start is with failure alerts from the [scheduled job runner](https://torsion.org/borgmatic/docs/how-to/set-up-backups/#autopilot) (cron, systemd, etc.) that's running borgmatic. But note that if the job doesn't even get scheduled (e.g. due to the job runner not running), you probably won't get an alert at all! Still, this is a decent first line of defense, especially when combined with some of the other approaches below. ### Commands run on error The `on_error` hook allows you to run an arbitrary command or script when borgmatic itself encounters an error running your backups. So for instance, you can run a script to send yourself a text message alert. But note that if borgmatic doesn't actually run, this alert won't fire. See [error hooks](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#error-hooks) below for how to configure this. ### Third-party monitoring services borgmatic integrates with monitoring services like [Healthchecks](https://healthchecks.io/), [Cronitor](https://cronitor.io), [Cronhub](https://cronhub.io), [PagerDuty](https://www.pagerduty.com/), and [ntfy](https://ntfy.sh/) and pings these services whenever borgmatic runs. That way, you'll receive an alert when something goes wrong or (for certain hooks) the service doesn't hear from borgmatic for a configured interval. See [Healthchecks hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#healthchecks-hook), [Cronitor hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronitor-hook), [Cronhub hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronhub-hook), [PagerDuty hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#pagerduty-hook), and [ntfy hook](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#ntfy-hook) below for how to configure this. While these services offer different features, you probably only need to use one of them at most. ### Third-party monitoring software You can use traditional monitoring software to consume borgmatic JSON output and track when the last successful backup occurred. See [scripting borgmatic](https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#scripting-borgmatic) below for how to configure this. ### Borg hosting providers Most [Borg hosting providers](https://torsion.org/borgmatic/#hosting-providers) include monitoring and alerting as part of their offering. This gives you a dashboard to check on all of your backups, and can alert you if the service doesn't hear from borgmatic for a configured interval. ### Consistency checks While not strictly part of monitoring, if you really want confidence that your backups are not only running but are restorable as well, you can configure particular [consistency checks](https://torsion.org/borgmatic/docs/how-to/deal-with-very-large-backups/#consistency-check-configuration) or even script full [extract tests](https://torsion.org/borgmatic/docs/how-to/extract-a-backup/). ## Error hooks When an error occurs during a `create`, `prune`, `compact`, or `check` action, borgmatic can run configurable shell commands to fire off custom error notifications or take other actions, so you can get alerted as soon as something goes wrong. Here's a not-so-useful example: ```yaml hooks: on_error: - echo "Error while creating a backup or running a backup hook." ``` The `on_error` hook supports interpolating particular runtime variables into the hook command. Here's an example that assumes you provide a separate shell script to handle the alerting: ```yaml hooks: on_error: - send-text-message.sh "{configuration_filename}" "{repository}" ``` In this example, when the error occurs, borgmatic interpolates runtime values into the hook command: the borgmatic configuration filename, and the path of the repository. Here's the full set of supported variables you can use here: * `configuration_filename`: borgmatic configuration filename in which the error occurred * `repository`: path of the repository in which the error occurred (may be blank if the error occurs in a hook) * `error`: the error message itself * `output`: output of the command that failed (may be blank if an error occurred without running a command) Note that borgmatic runs the `on_error` hooks only for `create`, `prune`, `compact`, or `check` actions or hooks in which an error occurs, and not other actions. borgmatic does not run `on_error` hooks if an error occurs within a `before_everything` or `after_everything` hook. For more about hooks, see the [borgmatic hooks documentation](https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/), especially the security information. ## Healthchecks hook [Healthchecks](https://healthchecks.io/) is a service that provides "instant alerts when your cron jobs fail silently", and borgmatic has built-in integration with it. Once you create a Healthchecks account and project on their site, all you need to do is configure borgmatic with the unique "Ping URL" for your project. Here's an example: ```yaml hooks: healthchecks: ping_url: https://hc-ping.com/addffa72-da17-40ae-be9c-ff591afb942a ``` With this hook in place, borgmatic pings your Healthchecks project when a backup begins, ends, or errors. Specifically, after the `before_backup` hooks run, borgmatic lets Healthchecks know that it has started if any of the `create`, `prune`, `compact`, or `check` actions are run. Then, if the actions complete successfully, borgmatic notifies Healthchecks of the success after the `after_backup` hooks run, and includes borgmatic logs in the payload data sent to Healthchecks. This means that borgmatic logs show up in the Healthchecks UI, although be aware that Healthchecks currently has a 10-kilobyte limit for the logs in each ping. If an error occurs during any action or hook, borgmatic notifies Healthchecks after the `on_error` hooks run, also tacking on logs including the error itself. But the logs are only included for errors that occur when a `create`, `prune`, `compact`, or `check` action is run. You can customize the verbosity of the logs that are sent to Healthchecks with borgmatic's `--monitoring-verbosity` flag. The `--list` and `--stats` flags may also be of use. See `borgmatic create --help` for more information. Additionally, see the [borgmatic configuration file](https://torsion.org/borgmatic/docs/reference/configuration/) for additional Healthchecks options. You can configure Healthchecks to notify you by a [variety of mechanisms](https://healthchecks.io/#welcome-integrations) when backups fail or it doesn't hear from borgmatic for a certain period of time. ## Cronitor hook [Cronitor](https://cronitor.io/) provides "Cron monitoring and uptime healthchecks for websites, services and APIs", and borgmatic has built-in integration with it. Once you create a Cronitor account and cron job monitor on their site, all you need to do is configure borgmatic with the unique "Ping API URL" for your monitor. Here's an example: ```yaml hooks: cronitor: ping_url: https://cronitor.link/d3x0c1 ``` With this hook in place, borgmatic pings your Cronitor monitor when a backup begins, ends, or errors. Specifically, after the `before_backup` hooks run, borgmatic lets Cronitor know that it has started if any of the `prune`, `compact`, `create`, or `check` actions are run. Then, if the actions complete successfully, borgmatic notifies Cronitor of the success after the `after_backup` hooks run. And if an error occurs during any action or hook, borgmatic notifies Cronitor after the `on_error` hooks run. You can configure Cronitor to notify you by a [variety of mechanisms](https://cronitor.io/docs/cron-job-notifications) when backups fail or it doesn't hear from borgmatic for a certain period of time. ## Cronhub hook [Cronhub](https://cronhub.io/) provides "instant alerts when any of your background jobs fail silently or run longer than expected", and borgmatic has built-in integration with it. Once you create a Cronhub account and monitor on their site, all you need to do is configure borgmatic with the unique "Ping URL" for your monitor. Here's an example: ```yaml hooks: cronhub: ping_url: https://cronhub.io/start/1f5e3410-254c-11e8-b61d-55875966d031 ``` With this hook in place, borgmatic pings your Cronhub monitor when a backup begins, ends, or errors. Specifically, after the `before_backup` hooks run, borgmatic lets Cronhub know that it has started if any of the `prune`, `compact`, `create`, or `check` actions are run. Then, if the actions complete successfully, borgmatic notifies Cronhub of the success after the `after_backup` hooks run. And if an error occurs during any action or hook, borgmatic notifies Cronhub after the `on_error` hooks run. Note that even though you configure borgmatic with the "start" variant of the ping URL, borgmatic substitutes the correct state into the URL when pinging Cronhub ("start", "finish", or "fail"). You can configure Cronhub to notify you by a [variety of mechanisms](https://docs.cronhub.io/integrations.html) when backups fail or it doesn't hear from borgmatic for a certain period of time. ## PagerDuty hook In case you're new here: [borgmatic](https://torsion.org/borgmatic/) is simple, configuration-driven backup software for servers and workstations, powered by [Borg Backup](https://www.borgbackup.org/). [PagerDuty](https://www.pagerduty.com/) provides incident monitoring and alerting. borgmatic has built-in integration that can notify you via PagerDuty as soon as a backup fails, so you can make sure your backups keep working. First, create a PagerDuty account and service on their site. On the service, add an integration and set the Integration Type to "borgmatic". Then, configure borgmatic with the unique "Integration Key" for your service. Here's an example: ```yaml hooks: pagerduty: integration_key: a177cad45bd374409f78906a810a3074 ``` With this hook in place, borgmatic creates a PagerDuty event for your service whenever backups fail. Specifically, if an error occurs during a `create`, `prune`, `compact`, or `check` action, borgmatic sends an event to PagerDuty before the `on_error` hooks run. Note that borgmatic does not contact PagerDuty when a backup starts or ends without error. You can configure PagerDuty to notify you by a [variety of mechanisms](https://support.pagerduty.com/docs/notifications) when backups fail. If you have any issues with the integration, [please contact us](https://torsion.org/borgmatic/#support-and-contributing). ## ntfy hook [ntfy](https://ntfy.sh) is a free, simple, service (either hosted or self-hosted) which offers simple pub/sub push notifications to multiple platforms including [web](https://ntfy.sh/stats), [Android](https://play.google.com/store/apps/details?id=io.heckel.ntfy) and [iOS](https://apps.apple.com/us/app/ntfy/id1625396347). Since push notifications for regular events might soon become quite annoying, this hook only fires on any errors by default in order to instantly alert you to issues. The `states` list can override this. As ntfy is unauthenticated, it isn't a suitable channel for any private information so the default messages are intentionally generic. These can be overridden, depending on your risk assessment. Each `state` can have its own custom messages, priorities and tags or, if none are provided, will use the default. An example configuration is shown here, with all the available options, including [priorities](https://ntfy.sh/docs/publish/#message-priority) and [tags](https://ntfy.sh/docs/publish/#tags-emojis): ```yaml hooks: ntfy: topic: my-unique-topic server: https://ntfy.my-domain.com start: title: A Borgmatic backup started message: Watch this space... tags: borgmatic priority: min finish: title: A Borgmatic backup completed successfully message: Nice! tags: borgmatic,+1 priority: min fail: title: A Borgmatic backup failed message: You should probably fix it tags: borgmatic,-1,skull priority: max states: - start - finish - fail ``` ## Scripting borgmatic To consume the output of borgmatic in other software, you can include an optional `--json` flag with `create`, `rlist`, `rinfo`, or `info` to get the output formatted as JSON. Note that when you specify the `--json` flag, Borg's other non-JSON output is suppressed so as not to interfere with the captured JSON. Also note that JSON output only shows up at the console, and not in syslog. ### Latest backups All borgmatic actions that accept an `--archive` flag allow you to specify an archive name of `latest`. This lets you get the latest archive without having to first run `borgmatic rlist` manually, which can be handy in automated scripts. Here's an example: ```bash borgmatic info --archive latest ``` borgmatic-1.7.9/docs/how-to/provide-your-passwords.md000066400000000000000000000060461440467744700227040ustar00rootroot00000000000000--- title: How to provide your passwords eleventyNavigation: key: 🔒 Provide your passwords parent: How-to guides order: 2 --- ## Environment variable interpolation If you want to use a Borg repository passphrase or database passwords with borgmatic, you can set them directly in your borgmatic configuration file, treating those secrets like any other option value. But if you'd rather store them outside of borgmatic, whether for convenience or security reasons, read on. New in version 1.6.4 borgmatic supports interpolating arbitrary environment variables directly into option values in your configuration file. That means you can instruct borgmatic to pull your repository passphrase, your database passwords, or any other option values from environment variables. For instance: ```yaml storage: encryption_passphrase: ${MY_PASSPHRASE} ``` This uses the `MY_PASSPHRASE` environment variable as your encryption passphrase. Note that the `{` `}` brackets are required. `$MY_PASSPHRASE` by itself will not work. In the case of `encryption_passphrase` in particular, an alternate approach is to use Borg's `BORG_PASSPHRASE` environment variable, which doesn't even require setting an explicit `encryption_passphrase` value in borgmatic's configuration file. For [database configuration](https://torsion.org/borgmatic/docs/how-to/backup-your-databases/), the same approach applies. For example: ```yaml hooks: postgresql_databases: - name: users password: ${MY_DATABASE_PASSWORD} ``` This uses the `MY_DATABASE_PASSWORD` environment variable as your database password. ### Interpolation defaults If you'd like to set a default for your environment variables, you can do so with the following syntax: ```yaml storage: encryption_passphrase: ${MY_PASSPHRASE:-defaultpass} ``` Here, "`defaultpass`" is the default passphrase if the `MY_PASSPHRASE` environment variable is not set. Without a default, if the environment variable doesn't exist, borgmatic will error. ### Disabling interpolation To disable this environment variable interpolation feature entirely, you can pass the `--no-environment-interpolation` flag on the command-line. Or if you'd like to disable interpolation within a single option value, you can escape it with a backslash. For instance, if your password is literally `${A}@!`: ```yaml storage: encryption_passphrase: \${A}@! ``` ### Related features Another way to override particular options within a borgmatic configuration file is to use a [configuration override](https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#configuration-overrides) on the command-line. But please be aware of the security implications of specifying secrets on the command-line. Additionally, borgmatic action hooks support their own [variable interpolation](https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/#variable-interpolation), although in that case it's for particular borgmatic runtime values rather than (only) environment variables. borgmatic-1.7.9/docs/how-to/restore-a-backup.md000066400000000000000000000001741440467744700213550ustar00rootroot00000000000000 borgmatic-1.7.9/docs/how-to/run-arbitrary-borg-commands.md000066400000000000000000000075241440467744700235460ustar00rootroot00000000000000--- title: How to run arbitrary Borg commands eleventyNavigation: key: 🔧 Run arbitrary Borg commands parent: How-to guides order: 11 --- ## Running Borg with borgmatic Borg has several commands (and options) that borgmatic does not currently support. Sometimes though, as a borgmatic user, you may find yourself wanting to take advantage of these off-the-beaten-path Borg features. You could of course drop down to running Borg directly. But then you'd give up all the niceties of your borgmatic configuration. You could file a [borgmatic ticket](https://torsion.org/borgmatic/#issues) or even a [pull request](https://torsion.org/borgmatic/#contributing) to add the feature. But what if you need it *now*? That's where borgmatic's support for running "arbitrary" Borg commands comes in. Running Borg commands with borgmatic takes advantage of the following, all based on your borgmatic configuration files or command-line arguments: * configured repositories (automatically runs your Borg command once for each one) * local and remote Borg binary paths * SSH settings and Borg environment variables * lock wait settings * verbosity ### borg action New in version 1.5.15 The way you run Borg with borgmatic is via the `borg` action. Here's a simple example: ```bash borgmatic borg break-lock ``` (No `borg` action in borgmatic? Time to upgrade!) This runs Borg's `break-lock` command once on each configured borgmatic repository. Notice how the repository isn't present in the specified Borg options, as that part is provided by borgmatic. You can also specify Borg options for relevant commands: ```bash borgmatic borg rlist --short ``` This runs Borg's `rlist` command once on each configured borgmatic repository. (The native `borgmatic rlist` action should be preferred for most use.) What if you only want to run Borg on a single configured borgmatic repository when you've got several configured? Not a problem. ```bash borgmatic borg --repository repo.borg break-lock ``` And what about a single archive? ```bash borgmatic borg --archive your-archive-name rlist ``` ### Limitations borgmatic's `borg` action is not without limitations: * The Borg command you want to run (`create`, `list`, etc.) *must* come first after the `borg` action. If you have any other Borg options to specify, provide them after. For instance, `borgmatic borg list --progress` will work, but `borgmatic borg --progress list` will not. * borgmatic supplies the repository/archive name to Borg for you (based on your borgmatic configuration or the `borgmatic borg --repository`/`--archive` arguments), so do not specify the repository/archive otherwise. * The `borg` action will not currently work for any Borg commands like `borg serve` that do not accept a repository/archive name. * Do not specify any global borgmatic arguments to the right of the `borg` action. (They will be passed to Borg instead of borgmatic.) If you have global borgmatic arguments, specify them *before* the `borg` action. * Unlike other borgmatic actions, you cannot combine the `borg` action with other borgmatic actions. This is to prevent ambiguity in commands like `borgmatic borg list`, in which `list` is both a valid Borg command and a borgmatic action. In this case, only the Borg command is run. * Unlike normal borgmatic actions that support JSON, the `borg` action will not disable certain borgmatic logs to avoid interfering with JSON output. * Unlike other borgmatic actions, the `borg` action captures (and logs) all output, so interactive prompts or flags like `--progress` will not work as expected. In general, this `borgmatic borg` feature should be considered an escape valve—a feature of second resort. In the long run, it's preferable to wrap Borg commands with borgmatic actions that can support them fully. borgmatic-1.7.9/docs/how-to/run-preparation-steps-before-backups.md000066400000000000000000000002301440467744700253520ustar00rootroot00000000000000 borgmatic-1.7.9/docs/how-to/set-up-backups.md000066400000000000000000000350251440467744700210570ustar00rootroot00000000000000--- title: How to set up backups eleventyNavigation: key: 📥 Set up backups parent: How-to guides order: 0 --- ## Installation Many users need to backup system files that require privileged access, so these instructions install and run borgmatic as root. If you don't need to backup such files, then you are welcome to install and run borgmatic as a non-root user. First, manually [install Borg](https://borgbackup.readthedocs.io/en/stable/installation.html), at least version 1.1. borgmatic does not install Borg automatically so as to avoid conflicts with existing Borg installations. Then, download and install borgmatic as a [user site installation](https://packaging.python.org/tutorials/installing-packages/#installing-to-the-user-site) by running the following command: ```bash sudo pip3 install --user --upgrade borgmatic ``` This installs borgmatic and its commands at the `/root/.local/bin` path. Your pip binary may have a different name than "pip3". Make sure you're using Python 3.7+, as borgmatic does not support older versions of Python. The next step is to ensure that borgmatic's commands available are on your system `PATH`, so that you can run borgmatic: ```bash echo export 'PATH="$PATH:/root/.local/bin"' >> ~/.bashrc source ~/.bashrc ``` This adds `/root/.local/bin` to your non-root user's system `PATH`. If you're using a command shell other than Bash, you may need to use different commands here. You can check whether all of this worked with: ```bash sudo borgmatic --version ``` If borgmatic is properly installed, that should output your borgmatic version. As an alternative to adding the path to `~/.bashrc` file, if you're using sudo to run borgmatic, you can configure [sudo's `secure_path` option](https://man.archlinux.org/man/sudoers.5) to include borgmatic's path. ### Global install option If you try the user site installation above, and have problems making borgmatic commands runnable on your system `PATH`, an alternate approach is to install borgmatic globally. The following uninstalls borgmatic, and then reinstalls it such that borgmatic commands are on the default system `PATH`: ```bash sudo pip3 uninstall borgmatic sudo pip3 install --upgrade borgmatic ``` The main downside of a global install is that borgmatic is less cleanly separated from the rest of your Python software, and there's the theoretical possibility of library conflicts. But if you're okay with that, for instance on a relatively dedicated system, then a global install can work out fine. ### Other ways to install Besides the approaches described above, there are several other options for installing borgmatic: * [Docker image with scheduled backups](https://hub.docker.com/r/b3vis/borgmatic/) (+ Docker Compose files) * [Docker image with multi-arch and Docker CLI support](https://hub.docker.com/r/modem7/borgmatic-docker/) * [Debian](https://tracker.debian.org/pkg/borgmatic) * [Ubuntu](https://launchpad.net/ubuntu/+source/borgmatic) * [Fedora official](https://bodhi.fedoraproject.org/updates/?search=borgmatic) * [Fedora unofficial](https://copr.fedorainfracloud.org/coprs/heffer/borgmatic/) * [Arch Linux](https://www.archlinux.org/packages/community/any/borgmatic/) * [Alpine Linux](https://pkgs.alpinelinux.org/packages?name=borgmatic) * [OpenBSD](http://ports.su/sysutils/borgmatic) * [openSUSE](https://software.opensuse.org/package/borgmatic) * [macOS (via Homebrew)](https://formulae.brew.sh/formula/borgmatic) * [macOS (via MacPorts)](https://ports.macports.org/port/borgmatic/) * [Ansible role](https://github.com/borgbase/ansible-role-borgbackup) * [virtualenv](https://virtualenv.pypa.io/en/stable/) ## Hosting providers Need somewhere to store your encrypted off-site backups? The following hosting providers include specific support for Borg/borgmatic—and fund borgmatic development and hosting when you use these links to sign up. (These are referral links, but without any tracking scripts or cookies.)
  • BorgBase: Borg hosting service with support for monitoring, 2FA, and append-only repos
Additionally, [rsync.net](https://www.rsync.net/products/borg.html) and [Hetzner](https://www.hetzner.com/storage/storage-box) have compatible storage offerings, but do not currently fund borgmatic development or hosting. ## Configuration After you install borgmatic, generate a sample configuration file: ```bash sudo generate-borgmatic-config ``` If that command is not found, then it may be installed in a location that's not in your system `PATH` (see above). Try looking in `~/.local/bin/`. This generates a sample configuration file at `/etc/borgmatic/config.yaml` by default. If you'd like to use another path, use the `--destination` flag, for instance: `--destination ~/.config/borgmatic/config.yaml`. You should edit the configuration file to suit your needs, as the generated values are only representative. All options are optional except where indicated, so feel free to ignore anything you don't need. Note that the configuration file is organized into distinct sections, each with a section name like `location:` or `storage:`. So take care that if you uncomment a particular option, also uncomment its containing section name, or else borgmatic won't recognize the option. Also be sure to use spaces rather than tabs for indentation; YAML does not allow tabs. You can get the same sample configuration file from the [configuration reference](https://torsion.org/borgmatic/docs/reference/configuration/), the authoritative set of all configuration options. This is handy if borgmatic has added new options since you originally created your configuration file. Also check out how to [upgrade your configuration](https://torsion.org/borgmatic/docs/how-to/upgrade/#upgrading-your-configuration). ### Encryption If you encrypt your Borg repository with a passphrase or a key file, you'll either need to set the borgmatic `encryption_passphrase` configuration variable or set the `BORG_PASSPHRASE` environment variable. See the [repository encryption section](https://borgbackup.readthedocs.io/en/stable/quickstart.html#repository-encryption) of the Borg Quick Start for more info. Alternatively, you can specify the passphrase programatically by setting either the borgmatic `encryption_passcommand` configuration variable or the `BORG_PASSCOMMAND` environment variable. See the [Borg Security FAQ](http://borgbackup.readthedocs.io/en/stable/faq.html#how-can-i-specify-the-encryption-passphrase-programmatically) for more info. ### Redundancy If you'd like to configure your backups to go to multiple different repositories, see the documentation on how to [make backups redundant](https://torsion.org/borgmatic/docs/how-to/make-backups-redundant/). ### Validation If you'd like to validate that your borgmatic configuration is valid, the following command is available for that: ```bash sudo validate-borgmatic-config ``` This command's exit status (`$?` in Bash) is zero when configuration is valid and non-zero otherwise. Validating configuration can be useful if you generate your configuration files via configuration management, or you want to double check that your hand edits are valid. ## Repository creation Before you can create backups with borgmatic, you first need to create a Borg repository so you have a destination for your backup archives. (But skip this step if you already have a Borg repository.) To create a repository, run a command like the following with Borg 1.x: ```bash sudo borgmatic init --encryption repokey ``` New in borgmatic version 1.7.0 Or, with Borg 2.x: ```bash sudo borgmatic rcreate --encryption repokey-aes-ocb ``` (Note that `repokey-chacha20-poly1305` may be faster than `repokey-aes-ocb` on certain platforms like ARM64.) This uses the borgmatic configuration file you created above to determine which local or remote repository to create, and encrypts it with the encryption passphrase specified there if one is provided. Read about [Borg encryption modes](https://borgbackup.readthedocs.io/en/stable/usage/init.html#encryption-mode-tldr) for the menu of available encryption modes. Also, optionally check out the [Borg Quick Start](https://borgbackup.readthedocs.org/en/stable/quickstart.html) for more background about repository creation. Note that borgmatic skips repository creation if the repository already exists. This supports use cases like ensuring a repository exists prior to performing a backup. If the repository is on a remote host, make sure that your local user has key-based SSH access to the desired user account on the remote host. ## Backups Now that you've configured borgmatic and created a repository, it's a good idea to test that borgmatic is working. So to run borgmatic and start a backup, you can invoke it like this: ```bash sudo borgmatic create --verbosity 1 --list --stats ``` (No borgmatic `--list` flag? Try `--files` instead, leave it out, or upgrade borgmatic!) The `--verbosity` flag makes borgmatic show the steps it's performing. The `--list` flag lists each file that's new or changed since the last backup. And `--stats` shows summary information about the created archive. All of these flags are optional. As the command runs, you should eyeball the output to see if it matches your expectations based on your configuration. If you'd like to specify an alternate configuration file path, use the `--config` flag. See `borgmatic --help` and `borgmatic create --help` for more information. ## Default actions If you omit `create` and other actions, borgmatic runs through a set of default actions: `prune` any old backups as per the configured retention policy, `compact` segments to free up space (with Borg 1.2+, borgmatic 1.5.23+), `create` a backup, *and* `check` backups for consistency problems due to things like file damage. For instance: ```bash sudo borgmatic --verbosity 1 --list --stats ``` ## Autopilot Running backups manually is good for validating your configuration, but I'm guessing that you want to run borgmatic automatically, say once a day. To do that, you can configure a separate job runner to invoke it periodically. ### cron If you're using cron, download the [sample cron file](https://projects.torsion.org/borgmatic-collective/borgmatic/src/master/sample/cron/borgmatic). Then, from the directory where you downloaded it: ```bash sudo mv borgmatic /etc/cron.d/borgmatic sudo chmod +x /etc/cron.d/borgmatic ``` If borgmatic is installed at a different location than `/root/.local/bin/borgmatic`, edit the cron file with the correct path. You can also modify the cron file if you'd like to run borgmatic more or less frequently. ### systemd If you're using systemd instead of cron to run jobs, you can still configure borgmatic to run automatically. (If you installed borgmatic from [Other ways to install](https://torsion.org/borgmatic/docs/how-to/set-up-backups/#other-ways-to-install), you may already have borgmatic systemd service and timer files. If so, you may be able to skip some of the steps below.) First, download the [sample systemd service file](https://projects.torsion.org/borgmatic-collective/borgmatic/raw/branch/master/sample/systemd/borgmatic.service) and the [sample systemd timer file](https://projects.torsion.org/borgmatic-collective/borgmatic/raw/branch/master/sample/systemd/borgmatic.timer). Then, from the directory where you downloaded them: ```bash sudo mv borgmatic.service borgmatic.timer /etc/systemd/system/ sudo systemctl enable --now borgmatic.timer ``` Review the security settings in the service file and update them as needed. If `ProtectSystem=strict` is enabled and local repositories are used, then the repository path must be added to the `ReadWritePaths` list. Feel free to modify the timer file based on how frequently you'd like borgmatic to run. ### launchd in macOS If you run borgmatic in macOS with launchd, you may encounter permissions issues when reading files to backup. If that happens to you, you may be interested in an [unofficial work-around for Full Disk Access](https://projects.torsion.org/borgmatic-collective/borgmatic/issues/293). ## Niceties ### Shell completion borgmatic includes a shell completion script (currently only for Bash) to support tab-completing borgmatic command-line actions and flags. Depending on how you installed borgmatic, this may be enabled by default. But if it's not, start by installing the `bash-completion` Linux package or the [`bash-completion@2`](https://formulae.brew.sh/formula/bash-completion@2) macOS Homebrew formula. Then, install the shell completion script globally: ```bash sudo su -c "borgmatic --bash-completion > $(pkg-config --variable=completionsdir bash-completion)/borgmatic" ``` If you don't have `pkg-config` installed, you can try the following path instead: ```bash sudo su -c "borgmatic --bash-completion > /usr/share/bash-completion/completions/borgmatic" ``` Or, if you'd like to install the script for only the current user: ```bash mkdir --parents ~/.local/share/bash-completion/completions borgmatic --bash-completion > ~/.local/share/bash-completion/completions/borgmatic ``` Finally, restart your shell (`exit` and open a new shell) so the completions take effect. ### Colored output borgmatic produces colored terminal output by default. It is disabled when a non-interactive terminal is detected (like a cron job), or when you use the `--json` flag. Otherwise, you can disable it by passing the `--no-color` flag, setting the environment variable `PY_COLORS=False`, or setting the `color` option to `false` in the `output` section of configuration. ## Troubleshooting ### "found character that cannot start any token" error If you run borgmatic and see an error looking something like this, it probably means you've used tabs instead of spaces: ``` test.yaml: Error parsing configuration file An error occurred while parsing a configuration file at config.yaml: while scanning for the next token found character that cannot start any token in "config.yaml", line 230, column 1 ``` YAML does not allow tabs. So to fix this, replace any tabs in your configuration file with the requisite number of spaces. ### libyaml compilation errors borgmatic depends on a Python YAML library (ruamel.yaml) that will optionally use a C YAML library (libyaml) if present. But if it's not installed, then when installing or upgrading borgmatic, you may see errors about compiling the YAML library. If so, not to worry. borgmatic should install and function correctly even without the C YAML library. And borgmatic won't be any faster with the C library present, so you don't need to go out of your way to install it. borgmatic-1.7.9/docs/how-to/upgrade.md000066400000000000000000000171511440467744700176430ustar00rootroot00000000000000--- title: How to upgrade borgmatic and Borg eleventyNavigation: key: 📦 Upgrade borgmatic/Borg parent: How-to guides order: 12 --- ## Upgrading borgmatic In general, all you should need to do to upgrade borgmatic is run the following: ```bash sudo pip3 install --user --upgrade borgmatic ``` See below about special cases with old versions of borgmatic. Additionally, if you installed borgmatic [without using `pip3 install --user`](https://torsion.org/borgmatic/docs/how-to/set-up-backups/#other-ways-to-install), then your upgrade process may be different. ### Upgrading your configuration The borgmatic configuration file format is almost always backwards-compatible from release to release without any changes, but you may still want to update your configuration file when you upgrade to take advantage of new configuration options. This is completely optional. If you prefer, you can add new configuration options manually. If you do want to upgrade your configuration file to include new options, use the `generate-borgmatic-config` script with its optional `--source` flag that takes the path to your original configuration file. If provided with this path, `generate-borgmatic-config` merges your original configuration into the generated configuration file, so you get all the newest options and comments. Here's an example: ```bash generate-borgmatic-config --source config.yaml --destination config-new.yaml ``` New options start as commented out, so you can edit the file and decide whether you want to use each one. There are a few caveats to this process. First, when generating the new configuration file, `generate-borgmatic-config` replaces any comments you've written in your original configuration file with the newest generated comments. Second, the script adds back any options you had originally deleted, although it does so with the options commented out. And finally, any YAML includes you've used in the source configuration get flattened out into a single generated file. As a safety measure, `generate-borgmatic-config` refuses to modify configuration files in-place. So it's up to you to review the generated file and, if desired, replace your original configuration file with it. ### Upgrading from borgmatic 1.0.x borgmatic changed its configuration file format in version 1.1.0 from INI-style to YAML. This better supports validation, and has a more natural way to express lists of values. To upgrade your existing configuration, first upgrade to the new version of borgmatic. As of version 1.1.0, borgmatic no longer supports Python 2. If you were already running borgmatic with Python 3, then you can upgrade borgmatic in-place: ```bash sudo pip3 install --user --upgrade borgmatic ``` But if you were running borgmatic with Python 2, uninstall and reinstall instead: ```bash sudo pip uninstall borgmatic sudo pip3 install --user borgmatic ``` The pip binary names for different versions of Python can differ, so the above commands may need some tweaking to work on your machine. Once borgmatic is upgraded, run: ```bash sudo upgrade-borgmatic-config ``` That will generate a new YAML configuration file at /etc/borgmatic/config.yaml (by default) using the values from both your existing configuration and excludes files. The new version of borgmatic will consume the YAML configuration file instead of the old one. ### Upgrading from atticmatic You can ignore this section if you're not an atticmatic user (the former name of borgmatic). borgmatic only supports Borg now and no longer supports Attic. So if you're an Attic user, consider switching to Borg. See the [Borg upgrade command](https://borgbackup.readthedocs.io/en/stable/usage.html#borg-upgrade) for more information. Then, follow the instructions above about setting up your borgmatic configuration files. If you were already using Borg with atticmatic, then you can upgrade from atticmatic to borgmatic by running the following commands: ```bash sudo pip3 uninstall atticmatic sudo pip3 install --user borgmatic ``` That's it! borgmatic will continue using your /etc/borgmatic configuration files. ## Upgrading Borg To upgrade to a new version of Borg, you can generally install a new version the same way you installed the previous version, paying attention to any instructions included with each Borg release changelog linked from the [releases page](https://github.com/borgbackup/borg/releases). Some more major Borg releases require additional steps that borgmatic can help with. ### Borg 1.2 to 2.0 New in borgmatic version 1.7.0 Upgrading Borg from 1.2 to 2.0 requires manually upgrading your existing Borg 1 repositories before use with Borg or borgmatic. Here's how you can accomplish that. Start by upgrading borgmatic as described above to at least version 1.7.0 and Borg to 2.0. Then, rename your repository in borgmatic's configuration file to a new repository path. The repository upgrade process does not occur in-place; you'll create a new repository with a copy of your old repository's data. Let's say your original borgmatic repository configuration file looks something like this: ```yaml location: repositories: - original.borg ``` Change it to a new (not yet created) repository path: ```yaml location: repositories: - upgraded.borg ``` Then, run the `rcreate` action (formerly `init`) to create that new Borg 2 repository: ```bash borgmatic rcreate --verbosity 1 --encryption repokey-blake2-aes-ocb \ --source-repository original.borg --repository upgraded.borg ``` This creates an empty repository and doesn't actually transfer any data yet. The `--source-repository` flag is necessary to reuse key material from your Borg 1 repository so that the subsequent data transfer can work. The `--encryption` value above selects the same chunk ID algorithm (`blake2`) commonly used in Borg 1, thereby making deduplication work across transferred archives and new archives. If you get an error about "You must keep the same ID hash" from Borg, that means the encryption value you specified doesn't correspond to your source repository's chunk ID algorithm. In that case, try not using `blake2`: ```bash borgmatic rcreate --verbosity 1 --encryption repokey-aes-ocb \ --source-repository original.borg --repository upgraded.borg ``` Read about [Borg encryption modes](https://borgbackup.readthedocs.io/en/2.0.0b5/usage/rcreate.html#encryption-mode-tldr) for more details. To transfer data from your original Borg 1 repository to your newly created Borg 2 repository: ```bash borgmatic transfer --verbosity 1 --upgrader From12To20 --source-repository \ original.borg --repository upgraded.borg --dry-run borgmatic transfer --verbosity 1 --upgrader From12To20 --source-repository \ original.borg --repository upgraded.borg borgmatic transfer --verbosity 1 --upgrader From12To20 --source-repository \ original.borg --repository upgraded.borg --dry-run ``` The first command with `--dry-run` tells you what Borg is going to do during the transfer, the second command actually performs the transfer/upgrade (this might take a while), and the final command with `--dry-run` again provides confirmation of success—or tells you if something hasn't been transferred yet. Note that by omitting the `--upgrader` flag, you can also do archive transfers between related Borg 2 repositories without upgrading, even down to individual archives. For more on that functionality, see the [Borg transfer documentation](https://borgbackup.readthedocs.io/en/2.0.0b5/usage/transfer.html). That's it! Now you can use your new Borg 2 repository as normal with borgmatic. If you've got multiple repositories, repeat the above process for each. borgmatic-1.7.9/docs/reference/000077500000000000000000000000001440467744700164065ustar00rootroot00000000000000borgmatic-1.7.9/docs/reference/command-line.md000066400000000000000000000005131440467744700212720ustar00rootroot00000000000000--- title: Command-line reference eleventyNavigation: key: ⌨️ Command-line reference parent: Reference guides order: 1 --- ## borgmatic options Here are all of the available borgmatic command-line options. This includes the separate options for each action sub-command: ``` {% include borgmatic/command-line.txt %} ``` borgmatic-1.7.9/docs/reference/configuration.md000066400000000000000000000006521440467744700216020ustar00rootroot00000000000000--- title: Configuration reference eleventyNavigation: key: ⚙️ Configuration reference parent: Reference guides order: 0 --- ## Configuration file Here is a full sample borgmatic configuration file including all available options: ```yaml {% include borgmatic/config.yaml %} ``` Note that you can also [download this configuration file](https://torsion.org/borgmatic/docs/reference/config.yaml) for use locally. borgmatic-1.7.9/docs/reference/index.md000066400000000000000000000001051440467744700200330ustar00rootroot00000000000000--- eleventyNavigation: key: Reference guides permalink: false --- borgmatic-1.7.9/docs/static/000077500000000000000000000000001440467744700157375ustar00rootroot00000000000000borgmatic-1.7.9/docs/static/borgbase.png000066400000000000000000000132761440467744700202420ustar00rootroot00000000000000PNG  IHDR%b/R@iCCPICC profile(}=H@_SU38dNDE EjVL.& IZpc⬫ ~:)HK -b=8ǻ{wP+1tñάJW""fs@u_'ď\W}~XJRfK'"hABggTa{2i#E,AQ()6k2Trȱ2t(~wk&'p |qݏ ԫ} \M|^mj#njp >xRgM`^{kHQWSzwwF? r-$D pHYs%&tIME &B !tEXtCommentCreated with GIMPWIDATxy<ǟ0c{"B,V%ERD тnڴȍ67J RiED.T :O=fꊹ?>>ۑqyPO{3egiδcNAxyDETUfϜ3}su!0ЛVwp*-y.[[,ع}nȨں^# RHPp?N+<D4t=n*ִOx'Bo;~SXsiXyQ?FӦZr[۩>*x́>|l""?pdBmmm>>}y~zw7eqh*9HIm{ 5473i(FmjUVU ;1Feu5D*HFhW8`^^^3iiK i4ZyeՓ§Wn|շ4'7Rb_tF+vvv~)IIk s#=ъ$:+ѵoJJDl>Kp8].omk$%$2FvKxyy|BBBBJ f%o :-͙??u'o9B{G\".Fp]<,<;6ٜޞr#ߤRDE4얤MĪ"nWzeU $1vbK#Cŗؘ|'MzBO'I#dCôMmlKyDmlיeke)#-5cTuUNh5JR[emRƆ]%P};CVcI/d=̭rŲI:ڨwcgϘSY}yY9)bFSV;;JKIO4?;]oQ[7sjdYKk` .&6Q{2k#RV^U+tkҷNkpy3VS[gFŞ)8{=.|ZU]:::vfr;6{I\^K,AxX vKs .]z/Ŀb8zޖ ;(1*fsfg3RJ߽ɎPi,:-M@v`9wusn'#8yF)wR3^նC>ɺQ'F~BL4հo`0E`nKF-gʝb/2s/_aJحFm }3yHHoh|`>۠AYukruMƢi験$-͉ >Fqa;JS"ν~;8|RX:TOfύXkimmi$ _ϚWUQbTdj?GyYAfXh~#-Y9#W4Y("DZZZ1L:%*+ Q͌ a:`_tf6`IL✼ vwwgd=(*.qzUT-+#F j7"z T ]/:)G )`ԝbOaNlߒ~># 9wvC\&83 ;~8;}anM^N+g}N!,zGwp#$t:йq;icr&$]Rָ8 #-iq7y?GMVgu9.>fyyyǏ/f9;$]aK&Z۾dl%\嚺:ˍeu֮tX뭢zCXxx5qbw]+!6wy]+Lqpt5Z$G`A,43\una>*zm,H,g hAъ1BAXyfo!AA4&;0we Da(WV]iVPHP# rz[6zmp_sk^Aܛxw~d0ĸ+IOc,zs6&v7*%!>J$%EӍ0A')+8uo #LE7RUQNNy.":n%cߎWT~99:}TԋHI=Omlmke񯶺N#\vc 4^q7)¥^9gdIEEOM7z-s jH#0[l=\$1=s3~e%A^OL]YwOݥVc vOo/0x Mkxy3 ۿiˣ: fݴDȡ޴nq&B_L@q4}Q(׮{7N#th?**֙3lPTgFsZ:JlvRr$ӛmmn^C;F8ػ8[_Y~8`XLn'ys{===7o]h}9DƦ&٬/tttԠU'kXR{{G`hة9f)7ljPL:phiߕ/Z~1nWW٘K^f3WU9E%YԷw _[R~~F&f){l~ńj q1eK4z{{[[ۨMMoJJ ޸utK榳 (==uO;iw3Dž,?o*J FWge_V]Skp>vpۿ\<1#ܺgkea<@yh!Aweg$$]G3CcJi)ɾ~um]F'"̙ab0e}\K~nk[۞sMgMA/@-*.ιz3y(~ a0U~6g$ūsf/^lh8qUQsIQo\*'oOwncFC1,h 9ߏ(~;>.. )9~sSv'IyywddySR:(+p{LEi4Kk7yέ<Qn#;BF:,̅f0LJ.Af~?yiFGe&%E;tt^^I$()ZYܵey\w@ph;`KxA&՛)\Za#&|l(yv{{]ۿMH c6zQI?"~ttt\Lǩʞ7RlwowB012eb,-%O$Rޗ$Lppx0 @o7@oz7z7   ڭIENDB`borgmatic-1.7.9/docs/static/borgmatic.png000066400000000000000000000070111440467744700204130ustar00rootroot00000000000000PNG  IHDR<qbKGD pHYs B(xtIME Dw IDATxyˑ]$ /"@AԠ J%xĨ`Ru FK,( l ,,{#.߷;oOL|Tu%tLO{B@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@( "h;Z@CM V[ 迃݁߮mv3h>>jm)t48 oX LHLOxq p5"T Ti>-+Q"j[_szTVM$x&JݪxjtW$09x`]Xcp~xYe({gs |OƟ6Z~>;b( M(w+c50xe;;ܞ huZEo⛖Ud1 x%#B.Շ ?ɦ[\Az:rꋲ5S)^3"jx6pSJAnW^5n/FySfErI0OhA: [>HX^0T e8ȘTOLByE90r.oݞky? ,mThV@tmQШNP_K7eK-۝b#U es_&fheR\Z -+j?My02]L٘z et-%~^ty"K]i?wLP9#4;鷵?eׄTk1 LZ@n]|'̣ETLc@n6+0lSf~{-$|v83#E'9G;ruewj=*P"*& x w Y%6da)s ]o H5Sxr`r. S?qLڃޟLf_2HcQBOQ!v,poDWVEij@ro95_k=8[ < [AkX= z="|C)O9n>Hqg6oנ >ؘ)V32&U *6Y0$aGXڬ'z#sCp7B^GzN NX=.x*UtDxL, Y6=0Ik 5$+֗iK19uCQ>`O%qH}Œ097y=p3rKR8JN>aOWobIMF(_ {8fOX35G'jOq*QD戮}&*EJ,SrbOQi6$M뇵=&oaMoƢuLX 9x$%M#O>ʣ+K/u.f;\f'E_Hju,*}.CyuN'=\aGuR;`078fM4T1 ³Q1\גP *Xۧo!>E,/oh^Ga `]RL7ak .j$'Rxh@7U&3"ȢV-˶p[esEqI]$T3 B%X=b(JTcI[! [I0D9_$@[+~ejRp汛1 ļl71E mK<:di C-ˆX9[4'I2j;ZF=</8][A6_fIlϟj|^<$TcPR? BL-̪$*,~"z'܏ 7U<vKXa.av0k\uRi|l||]xHp#¦*[qĘn"P")&9~J(WPzѾ Cx5g'G0`dЮY+h}{Brm+$Kک`+VnPFe8exos?NLU5_o;lG>(}_UQBj c't!oC5/tη<9 rO F`X&noTL( 5(ӏ|${|d<eT_ޢ=&U3QyWT8cbw\Qb[M0qqNU!T13 Y3Wڣȵ!F2YhU(f6;.Ӿ1淔 '19 3;I(GV8ɶ?*//'b],k"Zfkя lx"2gO`ät`^JX,_lz/ L٤1U>I(_b͋P|<"U9*ue@K(!=;)_gXL4*?1ƉO 8 sı*(nɱ^ar@|FT{pPW| 4DtG4m ]Q1"^mo ]9%: wg kz,)^ y! W MP#ch"0䩒fQⰘ胼ͧXJXXf5yaPc^(;t~Oj8t\w;Nһ&) [ ,n;@{lv;jz P>j0ϪROYUPӜF? wD>ef;fjıU78cpMikb91ɵ x8 12T%6 ;]A|ԭ(̈́ {E%Y 'c'F}Sg Y 1\\Db߻[93;Yf;YUi ԼR~ wU 9zs+?W=5v!z,+CABA >Qz6TX$C"qi&U|{}Js,YR9f*R@ UXLtVCF*Xv O6r7b=G*K^ *Dʧk!PS~gDIENDB`borgmatic-1.7.9/docs/static/borgmatic.svg000066400000000000000000000032121440467744700204250ustar00rootroot00000000000000borgmatic-1.7.9/docs/static/cronhub.png000066400000000000000000000557601440467744700201220ustar00rootroot00000000000000PNG  IHDRD4zTXtRaw profile type exifxY$ Dy $.!AL7=#3%SU.\#*6j(zFe'z|{y}ۧxsQ.z49?.hs|}=}韁>?|P|̇Yg F~?|=C|?$yL*d5=s$>G_>mE!?m[8}'}(qE)N?~Dzo!5~~yw7 8~6m'N\S*Ɵ=w#𛭮_FDv[2Xɍ-sK#ts dcK'WFZ33N2н<ʎ/W"!~bO|SxOA}bi-y,QB>1]RI4[-%d/24hެ2Jrzܬ= *P.$-z@h-ZiסJ-Uk:O&4mhK/]{>y4aFcɤLƚ?9UkiNJUkmyˆ®Ǟ'tSO;3/Xr[o;gկY9s:k铵$k?־ NsFrIdytŞJɞ9YYfVcﵶ(2 JNyD*{P(}<zukR9=!{.Dԩ-QL\k֤i6Zɦn_({[#eD]q8O'X@-D4g@b2I5DsX+sr d6VKuw+$?c+ (Je%?ʺ*`<[ ό8 `fXY+T.hLe6# {hg<q,0]FL֠W+vhtBֹLV(1K<@Vx> I<⎞SaCc:FKj_Ĩ8۬k6_:%!5pWVt_&b`d֓Q_k]wLSw'ZcqZW> XK&u,qVBQ=ЙGCʒJǖ$!g]WjᘦB$0h䧯=ЍWq5 qr(H]Ա@m?2?[}D= 6вV 9k1c$tF' {ZGÜ\X$dLu_K UTp PشF k|ƂuWi:Yp>29cGg Ėβ|()&K9%pz̙oFvxb[ؿ#Œc:OCHxSyRVBpR@:QTy=ܷbxXX&eJ Eæu)2?B0%ܰ2N%tAyF3oݘt$- 8m|kac1)̃{ Qg'2Xw.9u` Є b}¡C욀0J݇Lg ѥS%%' L5ژ dK#P߭:n̔-LuhR8 M.V(_p_EQ 946[)y]+e2o*.5&Q#9E_AI:vkQ~B{q~ruCHhqW dec@cpt"B! []=.lCSZ"n~3*^sY}hrQV<~;>p&}Esu-TݠcɃ]Q频}u Ӈ4`&R= (!Uю"JnW];ˠ}Ymޱ+H,E \zG?c>K0Xz, k}<ü"ܤ̑ 'Hr?~陠Y<1~efEЦ?-Ay q+=tݻF~h$M;=">N6*s#34KH@fx` 4yt m]w験:3K6#dYLĈ*,'¹a-l$=X.hqXz' ɍ9Fp-V_tة|nYE?=_XVC`wrU1U[jloC:nB,!XcȠa&Tۂ| Ã؃ψ~~@@#bfzTXtRaw profile type iptcx=A@'%7e[{ M]%oC`@j3~s'r> +iTXtXML:com.adobe.xmp J=&> pHYs  tIME / IDATx{|\Wy/Glj%@!i ~ȱ-dpIuRyK@{i:oiҗC{ R %04HXۺM  [[z?ޣ,HAy^k?RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ-Pjql$qmlGCA q XkCɮ5 ltdUc=IrMQV[Y^)V J)QJI D)V J)uChR'V@RJiRJ+RZ(C'ѕRJD'ѕR`j*@R"*@RJiRJiԪs J+RZ(RZ(*RJ]ϟ;TZEu_VTpb@6:H/yyP=)Ndy;ҽMZy\Ɨ/vJB+jU}*mGTB˨OOOfyQ2djZiӟaSR1N'UUn%R2rVTmSYIS 7߬JҜs3[mg|P!en2+ qz۩cDZZ揑bԔ1 8R<#־hÆ $;B>H;)N8Q;\!.c^+yB>GwB-XOD&exx\SĚ ўjIyNB}}}v ?ŤrQ I!SB Bh? B'%nno>ZDCǃD粓>&<7F$=Y/ !~@O&x֩, J*+\.`" =sn`$$H;YvcI<O$ET_%Z@$ BK]|>ZLLͥ|i{FXccFҩEAϨT*\< mZ>,r"R?F`zz[:̳_›X "$A@EY ޿n&S{GFFfjs\uB럮ղ4uΫ6e4 MdZr>?^ lj#ft* jE뮻RTzZ+s>E9x:_X5"LPT[L"A~qy 03L[rj_T-4SSU"xefK }*s-` "> {ș7c^{gn5Ŏo@,b~BJ[wm]C: _槽0 fZy@xvcփspi΍ey$"Ad}239M_*(I)"[+Z D| M!#4똨2~/`f8{B0eR?AQ#km|bWm?VZڎPJjYt6g(<,d1 ?_67fL)Rc".Os9C6ƘS1͒fi>Woi^B㭽 $ST ") /3B_J%i"s-0FЍ[kϗ>S+V>1ffRW/ 2{DĹ4MI~Z#wýE$̼!k peǶ*~0a}s>Ӧ JB\ꌱR9~#s+X87'B@WgatygѢUM9ZiP_43#7diwI Q01izB _"ryxj/$+˗/߰9 !kt+iTCB-/Dt6|{Zy4 'T|=oFǩ|GwN(ß.9C%"L|Nb+t_B @>?tYZ +H%4KiELHeBǦR"ө cVvygml޼XX2iLin޼8I5@'}ƘSM :{ '>Q*{X B]X=VGSvA[جgF8|Zۓ(k$D!Ykz$q}}} sn1Gk p D ݞN`$4{ⵎbTcǎ.xQwȖFrl"⭵]μ۷f~` ~+3(9/ӽCq7w x[X!&g౱C l{&[5ЉxüNnf_ˆes捙 |Ul}Q^:Tgo)N3"d{ݻ`O;7o_ BA:X$d]zN@9O1gĊ:!!Oˀ쌉a ,` C 5!Kԍ1 oܱcGW86Zm߾f3CWyffc1Zc5Y^BFcm7sCX+[vu0/^"/!Ck 3?݊i(KQ.^Z+夛|92K: ǡĭTmBKgn_ޘ782GlQG=M>T`r 8Н_bC'K@+suΥ#λ[%c i-7{؎"늠U$y|R8 /Oyޡ5Y8>:ޥ1W$`ѫ} YlfK :٭ +L4c\QO02xZ41Y`H/"a#ٳ,ŮHE2 Dװ>Vf1xG?=Mw]yS9xuYmPȆ~0햗CNw U s%3]Љއcf}"YE߿"k!c̦Lv^ !8k޹W|"qhclvA~ lF\YoSqq\'`U8O_4.@'\ EƘ󂈕l-`\._066v-q ;1!x`7c7M]wu`h\R5F'Ӎ󮿨@Q MAv5{O ڟ FLDH~)X erlG01qT7SƘv)6C4LŞyCVT!錹k1=#&6N;Zff:nrΞle<,@rcs6g~EpF3޷o }*<?96zqW'{m6f]Zdރ<6*f#!?σ[ ^ſqȧnҲ,5p___{H2M[ QWmp ewHʩ1Bn`{N$/kN3fa׹t/`f_ޓPAنP>T*|c/!ne^^d<v-w [nQ?kCgM-6Zd=3N9pN!D&Ir|_+Yp )Py6!Ϭv 7g<W #DFo0M 2 !"H=> ےgÆ # LOO=3==]8&6 P6[m>gr,ŤybQ'"_^%RuO<{) ٰE3UyQj(zQ~ܽk\~Zf>CA*oh /^]I>Bxdd$=ZTlҕ$/RWwgE$b6}z),ֈ$ ye[Oy{?83}+]K>X;R/tddccv3f?G|f6i6$Xk9Mݗ'Dz# gU{qdd|p藌1opI{Q\4҄bwW&wgDZ\{-3olՒe&vQzmϊVxs ]TZ$9Z""+ />3ӟ c9IWT2,9ky4StDc udOMNN@8s'/6¹i Mf%|n5xEJ`ΝNevq3;5t`);{[y/< .[-\T&'{c(GY鼧go M2/孍 [ߞs.B6Щ瘊 Fƒ[`dSEo9m|::.qrttj)}/ {;=wfE''ձFv0Hs-E~$ND>\zEmēc^$<"{ EK˻ds17w6S! aÆ)vv<| މ_cӞ1|6ey @5Z^\x!GrO6$ԑ-k"`m-<rO>H8m&_ F;S|}14jh@D$OiäMa!v>D}9a[~E?n~[;dfC:6-MAnm.9Bo}X{%BD$laLVZYeg s,l,W-[h4LNN 2P;CXi'Zj1>%{h`wn5e B333f9,Ɔm;eۋ{b/[/B"lpO ~!PWtZ#C;4mcZ?:k [a,j*,pf @y)P'k\ [\Ȧ|(u?+ IDAT3`n̥*m=h7 ~hT'E%R*"R#~q*C+!U{S;͕N >/˒G۹bᡇ:%[_4[uj,U.(F qxG usp"݁n{/NC'lIk62Eδ7R66t5Mk$ jCj`]y"#{]C+1dU|;I:DݽY hd Kp&a"9.:fvgthiE:Z}*ZxסZ:s.H{/{X}0c5̟[)xt6 md1IȆvR\~n!9ؑGɁx@(1$:ؽ:F;DNpZyh}ă!3pcnHvQtM?dim;#zsZV⧚gsqȷM:S:A;9{dM:R-Y#z 3XKJGX)M@Gt֮'/*k\^侼mg~E1B"1O^w!74( UPȚțUvT r8TJB1uo馹@L,so[U̮gR0jd[\ޭ|zyn{[[} g5ӏTnE^O۪86@yo{@XE<.ʇl_; @[ؽ{{5 "G"HYc;B!H83sZ-V!x!O>ZbC!.inXHG =P>޻N:K"@OK$&/#mq$ɽ}}}Q}}W> $YGD$BdI ÄU58*{AG|iDKރ ?=~퍦"Chiy!A6 ʖD!y>5d#V\V!+J@JS%vFy$oQTriIh%6;mn>f>;lyƘ^**Z8ڶT.u?0ɜb}CѥYa'BߘuZL>V*7== z5-V du6B켜gLLLZme8l^_*oHZ=VvQVo{sϴ $Ifq-Hi@/ޔ7:hWR1}}}vdddv c#|YL$"ya)qy%r DZ㘧-q,"1Mou^j55wZX)cZ{s8_6222GʼʃGV8~.@dZ)CiYoW,"0-HH>dQf>-Pt1[yз jorb Ttw/D&[;皮F3XkxW2Rq"F/$ozFWWby|>}g1{[=Di]U@+B0Dp:1=#{Qդ ]1y;r&@BD {p;38*O$'{c<r_߻kO :~>53ƜݔѭqChCo}0p y$.Z}0{g9{O6yx\=cSs6?s-PfٓU䭾ܸ{===' '@} ݴM^l%<Mv T.f:Yyɖ {c"b\< &ރK4C.`/fc.*j^Ȏ'FBӵZ}kID@d[~/Xk" ýq"O$31gJv2q2"3OyZ(%DW0t-cSRJOa?o:84EȘ(σ,ݝ=Y) }˭86Β#{w6![Ieؘ_`_v$_le_8{/;qi,?5W@nt0"*1&f>N8^jӅl%}Ebˋ$I~OBY/h BٺwA mftQ0=眀9{KdzV޹a_D/[_|y@z#KS/zZqꓭ8;@nG˱b btisxP)! !}I+{:~AV_`λ[5Œ)2mu'V;X]IUxZxদJb&GGoѩgq~ay^H u|{keıo>ʗ#I rm Vy[#4a"p^iP<ٽ{xDZ{B{0S ْNCz@Utzzֽ2>佟2Rt;ԍK/WVho8}Hn ރ wGZ'` $kw?4E/M~ O[˙bZi߮5CMo?&3spV |3G*o xV-'^X6: "rg򚏯Q#(xC}}}QI#Wjf'\=zs锵[ݙN1: ?|ݣBCjL7z"`c,316 a?$D/vZس"s8Vv c`jjA01ʒUD(yA$/Irw1aVkb dBV|N Q)bB:μGkDT8ݒsgps9ֹ(X뜻RFPań迃u޻G5%iZg5d>eEƉO-Y˅ʺ4ME,[MAT\"Sק{q'vyC$fdsGI_V8OYԙ;ޜ$ɽ m`GjI^Z[ʗ QA<>{@cjO%I:u\9k-BP_$XE&M1{p____-WmY;8Q||s7JSX7E8S`lt'oWSrQ+RV*^pFQAx"="xr{0d0OF_ iwDZ7%ocuW4c+NT*~+MӻldK! e^Gw=CWJkw3\PAZS !x\muQC*Jٽ{/yAZO5`nH =u0Q)*y8LJw+p6""f;5?F+HQT !p޿gbw+7<ƇVGwH$T*fϮ]wP׸ڨ tG4R"HT2!q>AT\!|=<`|S"(JQK~nd= dRBvbb&F_]OlE2Ӣ"ex@ulMwݻ>]$Xk 4W"cgX6_B6~E+Wy }Pw.hry:ƴRtV JŌﺅap.nDL0NFsd5x "ݶ50|%al>v=E cK+KMd5ΥxPZ7X@&ym>BxFQd)΍HzYBWy d#޹޿muWLNU*srMs#}%k@ooﲷ&$qVRO}5H^|aKQUG-2J6Jr{-œoG~x"{MDpPX%Gʏ簿 ӫ@|Q~?ݍA^QzϑL><8,OFe[d_XO5̌"^b](f8"2Q)*wٺ^yȣpdPzݢ}qA~%LXssn||& 4hA6݈~yw`fEQ$?cKyrrYRV;6˟_˘B6&+>(tY+o spsKI h9zJ눩[ bvf.iT}jj*͛nJW4}۬ ۭ417j~,1HY\\}npgW*uo[ QTŒKOmܦbv tZT*iZng (۩q X3HX_Z REplh%6J(-4 YN^sZcD#Ɵx}?KHlf6Q!k 23\t.G ]4Ǻl5#A$tz?fGJ]%8}͢%"]^7٘0qqPfl_;u1 %m[|V"JC 1&ilT1;o A/|>>991.)ކeyȮ+YJȔx}Ɋg}(Ijjm0%s5H`j-N@lZ$4|!|@B$7>sKqZZ$IG6}v9ćaO7٪,)䝼Q,s4=@3(J%LMMAW~M\E!iΟ[Jr |s{s8<%c='kiq_>$9F1o9{W/z}s"a>pVy@#ne6ߺyH {aǎ]###|K!Mc,c;d5h|vF<~Xrg$`B,tӟr]^V6 nE%*ځM16{!L1Uձ.kA$ _ Eg%DTp:'`=@b w^aSd|Ԕ===2۟*EK@t` g|Z(@?v&>01==]A !D!ȷ#4~)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)RJ)Rk~E SIENDB`borgmatic-1.7.9/docs/static/cronitor.png000066400000000000000000000241741440467744700203140ustar00rootroot00000000000000PNG  IHDRT24zTXtRaw profile type exifx[# D //r|_33?]T` 㧌Cm)~~>d^=u&Bw ǿ.T w+mt{?i7S-皣j9V%o̵͒Bw:5SJ|'(|-Oe7sWZ~O)RǓ|->vwFU>{Y料zxhlvcK'x4R7tL7&Ě-+9sSStRS: @p4%=6:!ĩ9ϞxHsqeGQDSORI?^BIsg3YbIS‰맗wRĥ`hbKER#"YS"MB4ԢI$̵Fqaw4=fɟÐaHiE Hj𣵃)E4Q2dҼZ:hUѦ=K]z{}< +~}1'<9W^e%-]}>nm{y)>>giɀUkmؼ@[v5~ǝ?wPV-?G8Dr:5QpjT `{bOf,LWH&HR~\ O[}jy?'5Ꝺkw'/ךkYH9-w1tijC66UB"A RZy搨Rm*ݭx*A=萓Nz*"*۩ڥ}l&5v{fLk7kYFH)`eA6(6r))$Cz; `x棝4gQPڴzi=;M8;D,BZUzݛ]2 qW LOaa(=Oo ܈*{aNTm!V F-h!r7Mj ;{@},bjcRmRXX4dž5Wi{7B=n,2![e85AP/|; R:6Kl$67\rv.gɨ &)mm# fޛbYlRЀM*:-4Ü`ַ>PB , yhkؿ2l0BPd%4Ij$YSM?[cdÚt _u(*|N"s_Z+-oՐ0K ؠ ~55"Uwî*FXP)Daxt?8;"<Бa/iFH4X y\05#H%d\Fԓ}r ℣3 ؙOZ|ve@zrGY;O gb@(iCd*U$gdiPdA: }XchF+ '-ׯs[TL5ㅳDCߩhېw̛9rFT\e Pu!5CC\bϞ[;0oే qEJG<.J3vqia -D)f g1$h5<G&vϙG$ 1LM.;XjeCBS)p8t72 *w<@{kNW*S+hR p~"trt)d'x(`S3Lثzm"D8 V&`bV?l  D|? UZ1!L`GAwd!fS~ ۥ79mN㇀h ` ޔmv;0X\x'zht`Pח㗦#p^PS'dg=~w hFU˗.a LplӯNF\B_-JbP f FQk)uiF/ң怍qaT{Tpx~1H;+L-ɢaSxI*"t;v0%kxM}ztM/.ya;j7D/KmZw#0(P}[l\ݿF9mmy0G 0 3PLA\K9N`uDa⒋,e"m an6` `nv6Ă `XVӹ TB^ t\ٚ6P+j~+B!'̸1h9EK w4@pu$n!xQ i bOqOm?BK \Fɤ-l c5Ol圄5aFOKb(3GNc@6:N"-RM;&.-Q<$a&|Yi:kv~ǨL5| #Lohe~4ȅ6 Ij |b=lڎBIy _FȤ.e&4g=_FZQ"Yy&JE tխA{ R|3r~#陭LFi:gw>h> *h(*5{giy >VFcd5` gRٳ f6uհt%DZpb FO'34V$޼Ndܱq~ϔ6C<B6r4lo9 #lH $qDw̶e0 b}4RZ2'DZI1&S A` XhT//WL"><"0gzTXtRaw profile type iptcx=A @ }Ohs]{112u?öwdtʿ2cxPH8-2*Yt^b n;M{dھEiCCPICC profilex}=H@_[ ␡:Y*U(BP+`r4iHR\ׂUg]\AIEJ_Rhq?{ܽ2S͎ @,# |E0$fswQ}ѫLYs'tAG.q.8a#' 6ۘ x83.+8*kޓ0VNs ,b "Ȩ2,DiH1U#*P!9~?ݭI8b@phlض'@ZJ$"G@6pq=rz%CrM>7e[{ M]%oC`@j3~s'r> DiTXtXML:com.adobe.xmp 3 pHYs%&tIME ,,KtEXtCommentCreated with GIMPWIDATx=PgVZV_B^⋑= dB1E8&¤ⰋӘPk&G2'EC B "KZؕJp$yr+f?}Y>ԯ\)" )"RD!ERD!EB(BHQH" )" )"ERD!ERD(B(BHQuYZN$V9O 2dt !$;8:2nFJ<ȴ(]Ge=k''^`_f4v7nF?FdD1{Ao޹j,v,{:[ӟj5uu(.#ҋyN؄MS\L U)X>Y^&5Suu~_ZIM$B:5<|_ c8K7y/5X+inLKO Y~j!sFc:#!Ӫiwd-fo*'Ȍd/ҩ5 v*'C6SF^?3 Kuc) pGqè~ЎU%|Wg_xKnǧW)-NPn}xEB,mfldmY ،/ݻ[[U:GU.i ?٬0<ɽR_5vbl^,\?=k+?=vQeTW<*eGak箃w2/B_FamSJdۼ|x-/Zb&U(xWT d UehR<͋tx" LxVzη4Q_,/Å/ ۱TGMd3(oc{eZ笳ïZ5P(F[-R['.tZ-$R<~QCڛg}P-sayq*TZ4HЃ([rYjū@*m-Ȧn.ėZػLpj9S˿bQCG:Ug-$4[ U)dI~fpߔ[1 3r}!e?) F&MjZ6!-O<w@UKb5 B$m;]ozGR}ESo`Nk[ c `O+T-ZP >"W/LwY;\I@_N%I5g8HfUǑO)L9`ykq> K)C/ 8 _e~p;&Xsd5VDٍOWtRNSI3 w0Ug)'" `f 6",F]€% IbHܐ!  D>" ȥ|73" R9ZW;fw$#켻>ߝgp)y[שh;=c&o]v 1cնIf>}ȐtyȡCrOOz|Ռ'5޳lӂV7n9؈MhUfLtunXqI9-qu،zuuWk 꾡В2SozFY1" WOi8G ~Wfn f,zû-Hb$svA"܃6g_ZnΞD H:E_^]G\^o{jfvmmoFs$t@1kk׋{]t@cfT^AMUW: ,uuVދ>ޠh{uv: 0kijzw킍+T@Yi"93 Q7J1z!f3uY$Ŀ "9:G_b|g)Sbw[k+Oi_g3$ i+ 5J_qʰ[m^YH l(` 42 ںɌ%_ 1 ]$xJ]/7 #M ,h٠ F'׭ T[`mr٫.2) !vz?d5~?8LʴrȁTl1YobE u4iêRѢgUW ]љӲP1q (6k~g e6R~gh\r @:,Z G +d@Ha sŤBk)_H+`M6(ŽT5HY`^LR -3zB/@ʈlNzQ#WDPYbtbyr Ű?4^-Ӈzm[耬wLj\KY#Ol̜ l U̳e@pIx>H {0Oxd';][R$p搘2ݩy/L sf>?Z4#z|Tܖ% )O ݙp -ĕ9};@fj!'CEi<~v?0} *#Er*^vB+Jn#sʉ|ٸ] i%z97ہ_:ǵ{ݝ"V( *sB AYt@bO ANG0ѾxFnp!:@"!3w ȎU5mLrXDL9NyT@t(Wz e$kv  aχ˅ڬBxj8Q `ZHv ZN5 |C@桍52uBr_N]>],TJ/- >_?CRD U`8&URYxPj9t(Nym;sp:CM#*吭Mx QbJdX,r`Ag] p=ln)@ ^mWaJo89wvvI^bc`B)w|Q<ޣZf. ;a1c[b@OԖ40_jz~id,ԲKL uv9U"2?I5y] }[̆y(u2t9G4^rqxF  qĕgB8b5 *^<7Q́E$)~rJ⒩H 6)bVh2:UQ\Ŝ*U8 j@.zO101K^)܈|C3$$x%ϜrdhoGTR 6^|1to,KNdˁpN#s4xaㅱzf,aUغg \qܐ:Lș/,:) s;/򆂖\^m*@ '<z!ev)y@BYV65g4TB w(;SXMaQIRfvP8$c'HZ89+xOhdk~m̱ȁĤR|hy.&TbjeU9āo]sLƀod{_3e@td;@|u4fvm4<"<^("^xrfvmx,(u}Ax|yz#38Y2B8^qxLyƧd

#^T|W@WktLIۀtK+@%2MfIƿ^Ǭ9pHr[Iugޔx.zfvmͰ- 'wF2h% iޛuh ȋkܙ:d ~IfuE[Gh^_tk^^0ft= f|i+2jVogIos(շA':89~Pdj4 ~z=C{hxѹ)OU k@}bdh XPMX.4DdZG^P6MuaLorv<:Y5bV65IQ G(RY1'uJdjKKK<-1oKK('䴦-SRg'O L:D^Me(Դ"RIENDB`borgmatic-1.7.9/docs/static/mariadb.png000066400000000000000000000224021440467744700200440ustar00rootroot00000000000000PNG  IHDRx'b zTXtRaw profile type exifxڵi >Ip=A>?({zf[fU)K)q=7^"?WW]u͇~{s._m}}]8#_#t|8'.[ѩ__;];aM}x ÅJ佭6>xϴ5]S'\$5'xaXK9EsSFYH,5ː%W9D>R%w'3̥%3X?}?uO"F9G0I}$C0J2+o Cb 7̓ !߰%/򸑵2wc1YHAYZ9RFU$AE@n\YH'717^[ZTO.,$~F`h7ih.{=jnus:s9ܫ,$kڛ9w}vܽbo-*ZiסS|N=3<[zwyݖ (=VYaӖmk.^yӗY꿼寬7Sq5ΎC9#cf2>"D̵\,BU"[洟Ia\-sXUʯd2WYا #I>eC>q}&>jTBnVM)rhLAٓu葻GS99)4flG~ z)R.f$@vw;y0%rmOo52>o z1EXX׺\iCN_VY$IA%<aV7UZ\u 07.GC=MUfI:V'\S&@WEȑ47GO& ge$}d0`7{b\_1[S.,HMGϤRn>;'#] F̽fE{W- 06c+/.wSA }>ד^]GbRg'[Ct6-uzb[\Xkqo$!gzC 8!Q'>NvH_,X^(?\ⷷM,IZtX(Y cфdWVN·EL`ֶ̄x}\`Оk3imk)T2@!%OR< GOnjiudrW,eYi{a ]Ƈ`u2Ѭ#@4 [+ ʡ%,C/S)f7oHj:Uژz7Ff `(wLyAJb.b5@HLځɣ&X(fߎy1<xJ{CB- ]Q@aeQ-wJSO&vSwp5ryN':d`$bPȨIP$&i0lSI.aX';;zcP?%0GAAlIA78J!~F*p.I*Mٔpq:rMAqJ5s"vD[ԉa,:6ڪ1D%A D aV ۖy><&%Ǹ-W*~j(PwnAdF)>2rIwa}k=U܆m |AiQ.qohļO2ƔyfŦپDļp_PF"̉e@5i P]DGE5!֒5O [xB+O]@1e+~QYwPխRo$ZE`z0X H\$|j{M/~0@flp*@^`muxr@'W=Ճy><pAipȌZop21ß2Ӽ]p(9$1QH)8`i *h;1V"G/:=S'=]j/H.&Q"C`{:׸ !m(2핂F5hpWAC0ub3`FpQ<;7EBi`k!05z^-hvخ5+{3BGyRǣ_s[߄U>w-q^QŊ?hg|;$dvC[e+ßܮ<1AOTՀLӂo^|Cq* (,QOx4 ,EWEmo +hR"#xl/ۅ탳>ʣH3a/&4MXD:`ķ(OK<xKc|9J@b/ ,O&,2r!X`z%( NqS`SH V4o5ĽguLg7Zr#Hp)_.6:5RTksl )OQ H@dx75Bb,:BԢcq -kHqb@Codz tDJ\]@@6mn+3.(ԓE7}SIX$j%jp"Hp3j{a@(i'Ǝqa@bx]oPyV>)9p pQGFq+DQ"OtFQ *:Zld쥋Ft=GzdT?P wY |(=>$1IaHmLz򱋟蠂7@㟛ވ;zF5B0}l2Go;"7pPʤhQA*^yxű+EZj;N;=!B Jͳ'FN;˃l0ja<*l V bphgJBKq&P7#j;6.s2uQV"}nL G;k`fYӠ x\>\ ( ,=0c62L֍TDJ1u i,rr{$&Ax) ̠` 7j -J!̉04PNMR^|S :+OCi O?hu !80"+ hJ" B:A]Ai\g'h2F :<pW:;qx YpiCCPICC profilex}=H@_ӊU;dNDE EjVL.& IZpc⬫ ~:)HK -b=8ǻ{wP-2 nXTLWWt cPf1'Iq_.³Zst>xMArB PLTE0_# )4_-$$-=i39^7*';Js==^@/+GB^IW}J5/QF]T:2Wd[J]^@6eO]eqhE9pS]rK=s~zW\{PA\\UD`\[He[`LbMi[fOlXìkSm[vcnqVrZz;vZĝʧбպļ*bKGDH pHYs.#.#x?vIDATxnJqpȘdpxŃ1b!$$$K]0Yz]]%^\U]]= |@B !B !B !B !B !B !^= @  Rw;Z*A .@d%% ?BTO S/Q^ ez tq>w,S+&qWAچڻli^{˧Z8鳵bz[ጟ5=^~t ow_+y(-W[3>ro;!jb;a}޿Sh9cE߱߾~.5#Y:B%n<y%u) 왿R3o~wo {oV>.3oo{mHC~+O~20O_/A`Zm@{ߞ=J,B˻V`_2+G>v{$&P}!z8 yx~3?7Yٓק`6'mIm AnP#kyF>9๼#{'cB O:y/يd56JGi~wHo$8q;@rbk6#2>nC9&YK WdJ_d~@xdǿj`k Rx4!@سLdUu6-M.i@-3E Tw,Cr 䱴`UXrek_HC5=0Gk)Y%i9jlUFi@Gž=F$!F{V;TcgG} RWy$nJZ`<ܟe0vmn>F'k,bRS^i J,9V݋![*n@&ҜHX47?/ٮ5.ia]5 U,1M(3K=iaAvo$Jp:^$`# -IM?lq{:i2&VK:V*|,A̎DY8rttfB^V\i^wRC[0z "PWyQ۟`?tTX͹,f9@ ܌Ao y8Q'5oܷ0C f:)#hQ Ώw%6Se!0'0F}7!=@Ț"_7Hk.oe)1|-6E\9/@Kxi#Nɝc{u|W)˻oSi䵲 1KHȯT=0%IxH1pRmF)0K EtB:<]\K8 `dޒMVG_:kյDAY.~fc5ä.Ȯ@h+dm_YUl ^ͤ,pup;Wj7*z%D`z LE+ǾXO@`27ՀnćͶ*h BS Į&}úNe{:NnUf&hJH~Y%V|O;LQ(0I\s;U? @va;~wP8U)ͭQNPH-Zܥ. `u `N"Zq1x5LE7kOMpBZ]&M#n+L\@&uЍ'mwmYY@Aqx&tU:Һ+@{$e7J9x ;J~v?̃8&T39&H ˯;Хw|?ޑɴZ6gS0-6$LԺ8*1e_+_;ZupY2LX~¯;OJ| :/u!iIs<{P bvP/MX&xTTT@wLi(`>l_їM @0Շk@2M%{ 4?~^mޔ/4IXtX{6*&:g;~\ej|U[W AujkrUE7+]vNy5w4+Y}QeyuSc`QzD[d7^2ax-NL?R 6V0_ta,nn] jРxzт?m@tʚ5E @Ǹv>ȊetVg1i_ra|Rlye$A"{~~*ײַT}á9<.GAwPyU6SG8s;LjA/k閺Z/yz8;4xd1Tp^XP`3LqfThT&d©zǯ@wgBT0TU9k*U`%μrz fo J.kqKc 'nmEM`(7`@K' (D"]4 f "HN7t:/ Qa@n_4n u6%RNr#ځIUo?z!^ bbND[-lW1EN"ޜߺ/b}t28\?s Sҟ&-] rɹ zBԜU## ~K9*ڛfw*~6M$SXIM-4P ~_9/XM.,p @PbkɕqusE۷bi6 `d)٪Eպԡ,YZDȜ~8<ZtMv^sG69Dfzfw3vrotBtUEaߡʆf0]h5瑺W $eϭn|f=@f1RF[j.xض)j\u;XeJ8Ej {bK|p3-:E}Wf,5 `Z㛬JY/~;?xfi6>1m4&^G/ c֙oEA%ONlL7Q풳Av:ɘm+IoF ! ˏ /q$+@Psؼ>/XNwNu뽻{Psj90P&-ܠL&mǶ)w~ߐ G!Ŭ[Aλ}Ш2463˯WЋ,cV{ꦺK,?+,Df6:Ԧmp'tAG+q.,̘LjR+ʦF{sY= :_j4FAQ_P Ϯh4-p /k1؍5F#a[qwEh4d_dIre uaݫk4fGMsx*h4-p'cY5 ͡h43{ӫ1=oc|B'&~]7Fh#VczAa(U3&&h4(@e#6,Fl䶇^\`yl94FLd[UWa..:h㓺y4Fc Dp7.3h4Bv{rk6VbxDF d.S@2efGtF 23):oIm4gn*F3%7Q1Dh4MI9: qkظM\F)V'5$*F &h4M1Ig(l*ͥh4&J@4<_7Fh+ @&C;Pi4f @h?>._:Q7Fh+7F0ӫNh4CkOyNh4C-\|5FP[]E3 Oͧh4f!kqn>F rdhYnFF;stj45ܙ^-")V0&]^7FhG |tL [rynFF5崢pnFF:7 ۹Vdu0eusj45ܒËP֡XݔF)d>I,r tx඼~9ŠBYDï\o~]iiRM.cü enDF5%,n&4j1Ql>d96~PhW~ 1g2~koΣb}ȑYv ?\ݏ Wi J 1rlH5X7<{W"҇YhM` B0,,O *pjv-N3FP쏨+P?؊?tcM3d6=x; ތoڀ$>Yj#bClX7/o_nѠZ-@ #Wlh`ȊQ ~ Y`Jcúq(6>(*`/a]oU9TB/\boq!#Ǖh6DCp1Yʫo/$On>#oiT|1e?u VF?u!aU1yp)؈A3YC2|[2m > 4[4zK7ms|7f i^ r\dNibmno.n 3x>4l:<؛tslsD#i_Ý2:DaH%TT`6].kn/Iv ( يRxT:Y0e gBa=zazP(3ۧ"%N(*zwXtch^p.*0|Rb %n;Uq'ؖTMVL1D"HKuT[֝1?wbpqu<"M=DJcH7+yM %v:y>MZ^n~`p·B `V̠5@RaWKXX2OVi<ԃY aglw忘RCkv;͢q-7Anpm|6>be{imzh^(4ϝ@,PVv '%`:i L!r-7lZfsC}9B,7 )MۻCi: Chyw5sIơL>ՑRMRH*В2mP1J@]BknFw rͱUxa|CfN@͍6H4| Ɉ\gh8-oDk-qKh{6&Sl_[zңm<-= bch ޛlAv;^`Hחr2F#a_B.C3UPXw1lR .SFs(F1/rUmnyDϠ0s)UwىIRnڰ'Eͯ"x#j+ؤ_`+-}CϚu䨬SU+K-Ue9IbE@UAY?RHPwۧCyX/E(:?%ׂv$X _ qk[>ȁ)Z;J _:vE@*0"2/;I8 ӷYG%Y{RN8nR0}M'h vigSUwSVNzWu#+4K _=D̗VDߧq+r_3rQ5;L&J~GP[|=z:Zs{x lf)pUsbCPdlM&ROuYL|Wa*Ͽ=6 2X6GvS<;M"Mt_L [0fWۧq)D+p1,,Z.Xzz^t=j^RO"Eksƞ^ԟ-:I/qH[̑\j yc9TMϳF&vp=Q:d%-ٙT`{g5Tu'k7_-pC>''* cS7oFvǤlܢ6d[7N XO秉bZkv&:v4ц 1IUi۝XL)``yyp3^"*uT80 ug8ѰJX^x N@qVOn[ _ȵE%6ʡ.v>!p3R=_{7BU|1Yqϝ`Rryie#]x|u.*[qŲ+Kb0Ӏ_=~O.+߲eX2.-%|r/Xf% PJĩm.(M27H=ygx]{:alDfU\H@,)+N[\ux\Sk5eujz,$ջf'V{Ӆs*LP/ΕڒC "sjKz^Mc1co E-pㆽ~TF66wigmDKӇ(r1Ҕb!-MomilB{ΤҥD^H{(=\mYtWiNP|u`#V,JOSRb5Κ'ZZ!R̘hs+c 1z+o(bM|͛O>osx⑛ s3AfrħPɼִ9\#t(y6G9;&ŀJqD Z;fjO))(fl jqQԣJAN7? D^seu:iR^IعքG>Xe(%fa +-$Z^ʪL'Ol#ur}::}d-FO \jlh2BV(BuW@(ϾAӚksI[hy1Pw4QI328KY6\N6!1 6rx>j%3yVm9s_=h3~*(P:w!-bX }ױs͔v]͗'qmЗ^I7863Ȭ Yhȯc.YosO<evX\1_słxWUxO`g˅ROUL;uHEE]dUr%˗w\ÉDmZ?W^رtX.CRQGeܖs굪}{ R#reco&=&\wOps0=oa^#9~ B< R1߭]ɪh N9L lpe:*R#mj1;*mt7Pt?H_"pYGroZiP)6<\Lq \S=C:Y–>Nwguq-lm 2_( h+`:} k+印#@,QHՌ =wq_)l|H–bbcU޾_cԥD1n F̾l"6JN/mMN7)h4b$RrGVwJ #'{UQf)^c=y( e%_Ĉ-&!Z>jr|x"awNI7ZP+޵W{O_2%wT\rݑ-L{:Ħ3fsK_U9[+V`><IA?DGweiGv*X9XVΊM"TIqtȵx;[ ':_pu j : nm( :C> +ObEgQ" o6@b^ Όۑgya^}䤔:ovٲMQ4ߜ$KΛ=8w}Fa cZ|yƓ@9oKE(6FFppNo_y/X|DnL.ܕ/BƱT-=-şp (T {爴% N"ovIϷ1efDU -i0~(ȡ.ZIDAT!A4r# H,~ N - y/]sF?+>a?;,}1r2@Ց}I9+p0唥߃}Za"t~7;H௛J=?^V|S SALƔh ױNO+cPoa p5& Nk$u wX,$/b4`LRJ9|#X'ok@8fN?,n08cͨp+Q3@'ѤE47G<&֒9-7h(eNm{m+u]}U[iktީd|PYII0~-)-55Tj@~Z>db`V- Yy@Y%U%[=?Yu%S*CѴ8 b1/"7(Ęcp/=ػ(:hx~ $5խz\ধ?AK㟋&&P{~F|?m78/Ml .U/Kվ񕽝)n4/R3M?E}۞ZH4eo{w(IqQ}_F^vV=%X _ѺluX6j>!.Gzі/g:6ZN`?6&׏'i ߟu_m6$%Z\1@1HSCPo ( po[ԧƿ8-} s,*cښגVz{+x[u̵^(5@DT$kNtڭz%$m ^~`Sb` 3oFG̑O݈?W+;BlU-}㦘T IVQ hm|Ӂ7;4dJ'y Qrܢl/ H}x.J̋Fj]T&OYHizy){p *x>̅qΙr\0ہR |ox Ak[Y j"(*TE[5%fE47w9Xf`lWQFgVSpB*EKuV[w>&R?8HCBbeLPj%wda!rg2<媥Ȝ*5$ c9,N"Ԏx[cs(X^ksL: p>ls?Yl\pkY'1|oQ_r8}W)m֠M#7J~#f{ \)6Pjq[q(jk \}6>K w|TlkgB^?28# 0[(Q72o"\au,kѬq`jrhWXM*(C6TOƤbєB+2NWm6`?t:==a-J͢(mվ娍Je ^u ~ )^K4|yMFXGڊi>?36&X0USm* J11JkS߉"^КҰ-gPKٌ)TF*&KKf{$ѱ?(ң?)ߊy2Df֙e.-ag64`'𠜊}U(<UK|OD7Lc)9_H+nqվB3@NMqIldGF I$Lhބ){YXN\N@ bzmcr6̍6 j YLsxM^`pUk'VxT3Q&0nC1b'j˒x~D4\ՉH!ވ Q2Cf%bЉvEjOE]cی\Z]ȯ@s>ڃrwOWM$0FHyRʹEV12|g"DA5BH2?lPG8Y!`a-3%Z5zM%;S~61KD?!aghkduM2a(_HODr,M#>5$Ui"CP:D#aPVG|H-pS=i4fH^n͍/^% g+sn')5\͊h GH5 nP-pSaz\S5C,2ȪA[^CnJIYãFSpɩ/<;8ڛiI[4Zত؈ҟ])8 *aER }^.tM^eM}b2Pv\Rg5W эמ FShXdQ Ի0/1FhkrVˀ25B UKWbn_p _L)j79x0&qq7_`t;ޠlK}$?FT+AqH*FφvgҜU R<Q_ahhc+^'*}f9p *2ugQ5c;Rυ/n ,hFD6wCq֚msvBԷH SGhmj W b Hf]`9 ["LIsːsY|nf6R:0OY5Zj4o~Ԟ_]z (u-5ۚ5-p5f_(NF8@ɣbhh4S]] z9Ta `=Nc yBbo ç(IENDB`borgmatic-1.7.9/docs/static/mysql.png000066400000000000000000000072611440467744700176200ustar00rootroot00000000000000PNG  IHDRwZRtEXtSoftwareAdobe ImageReadyqe<SIDATx]z8SJ.]t 'UH%JBR\neI'0 FUWA H=-9881@ c|G-=9?F#=6D`B3[~<㑓y1 M1ϼs mCnžQ}E"/9_1 Ah0 ~{ED`)vE>7% ?BSH0?MKbIyB LhB_/b 0z⺎%tXdفsu,[L }, 7=L T BIi4-yD`BU?͕E!np2PY * ʜ UUq&nNr{r0_"Df1M*~i^LB|ga`bY^Ӄ}S_SźU8b͉\QqXTSjD o.r&$XJܟ.éoS=Ҕ7M0o- dJEbƠ &N0CP㎍U-Jw\)UH՚MsЍ0*Էh-,<5D SK Ey4 ZV!Y\#H }7a!-/2Ԭgf37fS;D~rqs6c#S٘tUDɓNrok9 IӴIpA|O m :}lD;&!ąGBT&_%XtExqWe@aNaY69#dMq.T0IUBNHH jGTOy]egN78'$I<fS"08#$u+vW* @FmCef7,/L("Ԓ'5 ӟMRщ3O8>h d - ɛ rS=xQfLX-XoS@S@\ 1NE^xc껉Sa(0'={,ts5&Bd4/P*#Dlލ@d%.YJb"pO7ߣSHxILj;T:nL*eE9ZHXgjE  7T[Gc0|<+;)(jV$y.jP/Ra`4i|| .F,5Ϳ%#mBd_ws$Ӑv8xe;akH><e/իp<N>MOeUOiFɻb F.+R)f-P`AY_-+ߝ2A 6a?'I Ynt>kL\Z`Ae쉤Zs&:!hzUZ`p7k$_tg<0-D-zvR>M„lml\U,/DrLBຈhkD+߮Mz$ B5˚JEw{^5}h?=MqC|Z^T^*ơ(zH6t^٪лf`% XP]1ΆDm.%-E Mz @ӦK K jl ;0!@%Jrx1WxGx^(#Nabm{RZ=CGoaMZo!C3#%F*źu=EVrȮq=]*E9 \ЎQFj>ЍԭP%=c?NF͢BwZ3'֪_ A6Ux)9 y*w檈-"jF&M,v ^c@2 0 D3x(SDJMhbUTÐw *(vjNՋlD΁hIh.9@ 52f؅V A:ėWF% 86 $]U `Moa<]iTFV6BmEJ 1?`̠Y0A}֫y9TG~qi󭮔a:M`dnnH&cXxh# KP_o8ܠdm$J`cb#<$O"lfGc8}K <*HSEuSnd Bn FyU5- Cvu|*^t3$TQ9+C5j16+VaMU{X2%m#e_Y؇*_XNkVҸfb/n^9*[`vvԪgJt)Y;Tҵ!y귺C#{*/di(?uiι.QH>>km-v*>¹ sބkx%U6-!.e fS>t{Wxګa6f( ˜}k"z iY6۔꺬>p=:3AQ,aأ*/Q~&|עw{ Z%^+$o;fM_.4؈Eѭne uτ5<)7fEX"A"umx¸y[ yL=ǧwIs#d&6_Y Miu^`&uvCEI ðjsEaOaڅM "ogHt}"/Fo@x BGT\؂YIENDB`borgmatic-1.7.9/docs/static/ntfy.png000066400000000000000000000240321440467744700174260ustar00rootroot00000000000000PNG  IHDRIN!iCCPICC profile(}=H@_[*3:dNDE EjVL.4$).kŪ "%/)=ZiV鶙JLvE ] bPf1+II{zY=jb@@$aiOm}bexԤ ?r]s tjX  -0+$qTtfv z\?I6л \\75e ٔ])D3gMY^zkHSW)Pϻ;[{LZ rHMzVbKGD pHYsLL7tIME #tEXtCommentCreated with GIMPW IDATx}yt[u;"E")j-[lǶbq444L3'6s=g3tNҙi̴紧dId{mŖ%dHq@N{(Hc{߽߽xw~8v J#"}+@d׀ařE 3Q|l9-9{,:c8Cr[ufޤxrAT(u` LyNM723T `[6^{R7z=8YnR 4-ּH"F A6M#03TCsZE s,o:d9zb=+D$!9Eg&^w]5v ꥨ vo7-:sQ2= "* Fo]6hNS|w'o9@WʮG9[ j8 !?_BXqs"m=Sg-zl5AH@?FoM;Dlp[qMNtObfMw.fיa 4u74xd\`0S(nƗʢQPXۥ_XYH}2-JAD[uZѮ7dujì&ɾI ;m@*B ]nB`<2$ϊbn F4m<"!)Kr>a!A|c?&63RCN` E{Ml/Tl9a6auZuZ <{‰xR(HlrP=\LއhEQ*2%`$+Tԃ)z8pPI[ko-B b,7u3n[:jܥPFsFU7֡HJisq@gE/(4>S}D'_) BLo bli*$ ˁ1lqJ=OnWZ+w^JFN~wˏ ͇XDfKN!^Txm&79N-$2i,6 ʈ*ĖZnx2Grא2_pȿLVD/-A %DLu1w5I.ŨͰ>zHEV(U SGن4#tXCCI| בGO1:C[s"dÛNСLKVr댊S;}GRĦhtSx:)`o ȕm՘gܮ =ڕ> d5(g.W&vy EKNXINLAurJgHU~fTwNC_Fh Tۘ"pẒXř`"'y8 '[M? iPB6G[wbͰ=W?4$rEj]:=Ϸu5I{q)A9Ş1,hANlQy ȩ5phB0?bt{/x -ֺFc.r+k\\chjfW<`2Ȥ;UYR%,#.+)25w]g>Y'I~||F*Oa:R@Q=mjw>:?l`-%J(%ʹ&UV`Kt)$ϙ];{#AD}BPgUxH?XsFkpԍZY{bVJyuL9E ĩgR 1]ܿv.p7 ːƓdrF QHuZ]`iڝkv>[!Y%B`*R51`^BaC'yÑ/Ϋ6С0.Gcgmז_u/3 D,hҦi.mtL㭰mֿlW槗'Ī V\ٯjM:{sAѵO$>(^JL8RTu[wTvh9mCN^:0noP4<L&hLꝓ^&lL)FM귢L/cw Nma#&h0noe-wk9dvݎ:gif& c jTpKTxSmF5tݻ`U:t>Lsז^ţLF^BZ;(ŵq嫠HlFJń o"?vp}S 5k9LZ!Ggy{RيtsPHP+hZSw2Ma(JBH*qea:/uv[hr>¬ӛdnڬ !j$JI7-إⴢ2 )rN t/зlm>)-j$tseFk5Xڼ!"Mkoﺺ4'NA$^4P N'$~&6ZzͦOT$^ 'ܞx|r2z21q- [:pb5gpF':54OnqrN)FU&6li8ڃ=ҟ6H,0 H "qqu|ϝ_|);'o^'M֝{Yo>z5$:_71Υ ߇?~/>ۘs_SERk#h^jN}2h=ו?<2v NɈb$ZX^8}6hj5P+g@ KEUkKz9#&}P_ z&_|5ן~mu ;2vgf-H$]<6$gXzXĵE1{y67oxnf$M3Z?y+i;7lh>q!M%6vq:UqӟiXxFbߜؕŐ?%dJ~ nmPmytq7ޛui=ʀ䅯gc.e+;_Ӛ{>IS՟yf¨/#t9w7L2=c_muCKiх>Z#QO?|mjtI[<ԁ^q,Mld5s>: 4ѥ2d 0=3=s_sǬ;qu.0uHxstv{wq%5%:GQqvQgko}*F-ĨT/2QPwس 囮Ԕ''SCɁ-.ǡr_yqyKٹ {B'~<\sOKLӭYi<0͹w!58-]ZY?jݱH(ξIxGm56佀]S98"&z4d>\Ң=jzF-5ZΆ!#'Ξ?AA7(y~M_C. |9ՓZ7DOȴ5/--+ 2ZNgu}@p2m>Uq5P|7.,=ߡE =7oEڞZ}WqRFC\s^w$Lg2V' twmm]g;?zk gFF|FaF3c #W$fۑ-|]jo6eB2D xppe=LH PHG5ZbL D4ZEb52=r <ބhh?U5L;2dk43C4ŢHh,OiBHḯ-9 "1}[LoC 9N8;<כ*FG_I6R-oPEB20_Yykv" zѦx&KoiG Fgͥ@ dL6!@r؊ŝM)M P">^YFB ^ԥځ՚.  qgv]ykN hOozsҕM>W{}d.{9Y"Eo,/SU_,sHoҝ Rd2t^Ϝu՝KL Ym++1bZPTkS#;^퉮+Ry %:9̓Nlsyr/]|ApM}y{JL'ΕV 7veWaVKUki9/y_<*ђPʗRhNť*BcP$R@UG5ua:=.rN=l=T?nX dm YO&@DzU[ *rbTim5GWa 8A'Y =M\R׮5350QbhBD1NmSJnjf(aOg]Fb5 vk}h\#v \{3!㊊|@^wSHhyҠX~B]#JD2)Hͼ 1$Yk9wiqȝ0rjPwHxy2k+>TX] +L( J4Yqezrs@d޼W3897?[%o[Up`wF߾Иr}Ip]6cV]Z+VkI"Nd2xT2pP2Ke 5L/@\F2\zrEچS=Mݭ1zh,&fǵ/w tj7Dzkd*P 90T iI_j"(P$*RUmŦ VG~Ǟţ\xd3OwH;s]TE;bCX<;T$6 J-YyMZ+ ʈ_O/HҌ3uZgke ۳0UY˼zi^4u>J=<,(֋ow~sFg籦Q! EkK*/^ ix5>5wÂJ)M$QI&%ANl[w%Mđf)h2(#ʐC|=Yõ:27G]UHقo[.*n34ULa(5s֨S[jb2&[CufSmى_$5uq"$NJX~MT}~J@/@Nk36`V2P,J0TSO5:\'Z,Ħ:Q%/,_S+ G> r: o-:Iw(@Qc 'ij8ۤt?¾,916{Af˰Ldayic5/p܆L8kZc3Ko⎋Up;oZͮcGg(ikp+7D&/VLo~Uf{|y@b: -/2s;5Xl:x!D$$-dRL$97}'DB1GlpE6\-< NVۻ6sf i}x_ K%[ 3"D0㲮xvg+c[q>jST&EWEڼד"!(2X2 ^w(H o4 ~Ƽ&^n3sۨq[HHF$X2FbD$X ݮ)_V2-eelvf79ܴܦf8.VB2]/Ɨ޼5DSI@ 8'Z:,ys)|Ϋ mVgʡ!$NbEڝi+$ҩX2<`,X`ĸEnE:f|5NdUnM:KǞʤx Y P<6] >f+%iEYrKL:0Ͱ[ܮXl 2"f1-dt*c~kԪY/Z\YQa A>Ir_Cd{vsMC-sFeőٱ(2LQs`3& /n[y>s6f3sP!nR9[^w0%h  \>oL7q[! 16Alg`n{QҼo= p\~e\\r1;k:31[Pn#BB(Bpr%e@[gvr?&f`<!g !!guۛwA,G1] 1W@|MTR9٠ A @ KgzYPYn#ADB{ܱte-#@ 8M̘P>yd&Z3aq[bB50aoq[X6 *]mmmmm1?# IENDB`borgmatic-1.7.9/docs/static/pagerduty.png000066400000000000000000000472131440467744700204600ustar00rootroot00000000000000PNG  IHDRNhiCCPICC profile(}=HPOSEEE2T' "Z"TB&/& IZpg⬫ 89:)H%1^xy޻j%Ym〦f2әU }29IJ.ʳn5k1 2ô77m>qdx̤?r]segTr8L,[XiaV05)∪/=V9oqJ8'a(,s0X$PPA%؈_'Bc>A/K!W (Cl䄗/1t|;N>Wz_3WZ.\Olʮ%r=Sּ5qhVSϽ;[oOc~?#Ir=A pHYs%&tIME y_tEXtCommentCreated with GIMPW IDATxw\TXzҥ {Qc ĒDXr$Fc,5&zc4WcĎ5bDD/eܓ-gE@9sf>33S( .c F#   j4 j4  F#F#   j4   F#F#  j4 j4   F#   j4 j4  F#F#   j4   F#F#  j4 j4   F#   j4 j4  F#F#   j4   F#F#  j4 j4   F#   j4 j4   F#   j4 j4  F#F#   j4 !KPxe2z/IF5ZP={K$>r99r9W"H;'O۪N}A&8::ZYY:::v\$&J%GnF äƃ 9rMMMaB뇍i&Bill #('O55uɒ% 8;;/ZhРAĚ~z%Si.Wk2Q(NJMM]nݤI? K ߿ĉ'L ``ĀF_z>\nݺ(X6xavyyyoT*}E"4@/8p+WnݺX0@ U` ly]χ'{ݑ6k֬d2C:FLii-[NzuvoVj켗׎xBa333rT*#:E0Ǎw޽`)g5u B$+**؋1:ɫL&IIIϟ߿?1xFw}ܸqfffP(ASSSUUU]]]ee'O4MI&ݺuZ8"+iP(111W4O>P^^^UUUZZZPPpR\.:tLJ'_T° :99z?s۶muBM Q dffO-"s:lHh//={r\./+++((-(({n|||MM G& Ǐ?rȸq㘿oC^:#Hb1L8ۛa#G7ܹsqqq^4c?=zL&9+H`F*j:s^y7SRRsz 7Q8qSFn \$"? d'{]\\bbb-[_E|>0wft!s֩q:<1r}z{РAfڿ={LMM_tѣG_|YehlAgmJ8q";9ydaaaKB!Ht'dsy@@k_Zia6nܘ 2J-8@G 62 ]ri,ƪQ("cɒ%laFkܬ6{Ox`̘1ϧ>))ai vl6Tv´Ͱjd aD"Q~222&Ou*95~95r''~uԉd2YVV֓'OtZf48 v>|}/ػ9VIbڃ43_uP /Q٭$@&_~\LO0WGEz< {VTTD,//ɉT(p" IHXIWMFBH$  Ðy+9& _(̝daZp֟{dH3R]`cc ~ ~)0;w3CSrԯG_)H5@q>[ t>^.]J-P榦owER%%%-nU#ZDfdd@rrrHb1̀rhsRihhF'O!Kӻ166@&LÝVGF?Ԑ*jL}˗R-|p5D0l0/DEEq|d%/=h"j裏fFDѣ~~~;9r.sΌ3(n5\UUUaaa\ZKffr(idU>iĈ]t܎-ѣGr?zСC+++n4[r9} }O=zAg=p1..>شiN>;բ%f۶m ,ؾ};2 CnkrQk֬Yp?Os|W~W_][[ J(G$h]INNN5k޽{ 7|+III0ހ7/SdlslzڵkZ/~a?afe/[IR<~۷|x~eK@uDv ӧ/_޽{J yΝy-Z#4uA=(xٳg:p!%%eӧO/**'Ep"{}ݎxW^cǎ|/Jվ#5ІUԪfܹ3DJ̙3D )O?!!!`v#;ISU[$1|;w""" )gkO 8~"nS\zz: c6L@'MOP!۶m7oA~fssѣG_~U:xǏ=|kny;;e˖1jv i&&&0O?ȡC`FQ6J? 2 k#655A5de<~8I.\rڅMF!!:?~Υ:t,++sF뷈#߿?{줤$̴N) '۷o߲eK~Zñ =C-c@```FF[[.]WH52O Tۤ-fYzY"(""BKJJRSScoLJJ"E,/^xǎvvv-9NبVٟB___H"!Hz{6377Y+o߾Mt Q655mhhH$ȨNQ~[[[766J$hА|>UJkmm=lذ^u֭X\Clwޅ8Y#v_ .qqqjMMM%%%عs'_>}ؘv ꐐ~z.|kΜ9cǎܹqbqee̙3:믿2 !ƚ@ qرc?55 ]hQn<<<5...(arС ?g컰UVٙ^UUebbPwmǁ!!!D||X,(˗a<;& Ϟ=gQ~a'N411QyB yѣ՞cb7o2DkXOw!֫W/h:CbΝ <ƆJ?#Cu]O>`Щ=fjllЧO3gjZփO/^BdBUXXH6)Pɓ}}}t?**js̡$/޽;""b$~@_2djǃۡF5i4yފ ++ X,/xyy[լ:u1b&&~!CD`J \)NϞ=6|b'OHsss(:III7oޤhBغu41 uʔ)dw{?Ѥ\ ۷o4Y1tPgggbV(zxx3ܹssᒢ;ؾ}{bb"ė9r䣏> S\LسO񹸸L>bӭo݅VꚘ8{젠 ;;;A)0)ӎç) Ν;kP5W.堜QͻvJI >}ӧ:'7ndddhRGEEYZZ3ۤI)RӧUOJǠA˝;w=zDQ%WW?b0SP餲ɸqkMnٲE*.J֭[~-Ä`ҥ|󍝝;{ӳK.֭[dI˧;wFo+"ӷo߇j-նmݻǴ8+ٿtt0W{l}JRqv6pRBh=ێ?&NH/Rvvvbb"2<==cbb 4%$'O[SL'233(^ySk#>ngee5jԨ1cP VqF K&}ׯkZv"%gge˖͜98}%$$aqM%wqqYvɘӭI$؀8 L&Z8_umǑV]=C&nnnpOϟ?0#>z 3p@ځ)S.1677w…oj'={6!$C &;g]C22_>rY\P 2/022oj|Qxbr?`") L28Xt:"On$$ɀ P }0o1x`v;C⺥!cBE"vF k!=`S.{7TmO}Zեmܸ2ȃ]ewwwBzj0 3hРݻXw5;wQ }K pW_}eaa_;}!!!޶!(`CCÝ;wRSS5R( 裏);Y\.3gN=5v9aJx<>}ҡ̓‹D"ʮs;> =$$k׮ ۣGCʄ>bJCcV umMMMO<ÍRi]]]~~~ZZٳgw .2O>f".P;9 eԹs={Raccc]r.ٰaIvfi;k$ӣG`>scccȷ|0FE)ڵkyKKK=@)ݻGFFrWLMM!6z?Kmr$**СCZ`AAAjjNmx#sss_ib m޼y:B]3g>|(V&Zjs0 lݺښb^ؘK:,4r*}}} ԐNNNC7r( -@䀣CS2eJtt48 C vk:yM"$$1Dֱ : dG3 3|p߿?33300"pKvv Ė6l@mmZ#SGDD"(HDzwlMWy3 sEFۑř3gt!ctttpppjj3ܲ2SZyAa0YY[M>ї.]0`;Ov҅a%_v5 2֭[^@* }J3,zzjW^}#G*I㣚U-IDT666j5| `Q4uPGZѐk5*?9}4}"~Cv IDAT~BP~ر?H'H3 DZh%0 3iҤ////lll$g8:H;M}jɚ^%_G =n;vx7/*ˈ&j!O6$IrOi k: S)..]'<} 슃쭅gQ9.91 sׯ'&&nE$^Vى\O~jll4335 宾>7/Wk$ۇ. `{_YTTDqtH$#&Lj ,-- BuHoq /^zwN=$zk:d9)ơdfxKrY>{n4J E[r%DBBBdd$D!W2p{.I(VUJM)DUaT*ݲeKll _ӯ+}BG^vL3=TomIrz_\tiBB{L[2www''';;;N:ǔ݂t$. )5y^8ZЯqww۷/\e]]RV}V ^BS EwU1ZS,2đKۥK rZGMXX )NGhˋKHHi ԑu߾}}||}||mll4_Jaa!DI4E ﬤΰhѣGNHa/-E{e[[[:o%hMo| .1:Y"255ڵ=;wJNP5 EPPЀnG@:Ff߾}w 4СCG 6G8fmmM7o3=}XPm:Bm799Y ۰;FL&ve(-Z$`ީ~֖Љjjj My<3;wn$*uVV/T@P TFa|݃L@@&>wRS Ro͚5aaa>>>DV [pk@@Tٹ$炡d7Get̐~pH¢B؟G% sa/^qjCQ ؒnKR'O& k׮}ɸqzڡ٪vIo=iɒ%۔)SufffFlv$ͺnFFFNbbbQQa=02+v'\͎8>>LSVK.z˅U'oa{#$$dذaȑ#΃ʷo.--Um}􉊊*7n+ix - cbbBܻw/** t AÇӪ_~%V;M2HD͖4HR`H,]v =w9???\TTKW + E:4g82$Z*._ޮ~n!>M?ȧM< \Kt>**ۛ2\pw PabM<  \(Cٴi5 Ԙ4Ҝ ;,X$IKqz$xSNjz*D52iv*M|!yv06lp1ih⼼T_i$T9faJJJ u/ \q+nNj XEiiiR)W{WJ+v?yUy)鮡B#""\]]@vMMM02tW=,/'= % OwbqРAylHlf(QmmmffffffVVVVVǏ?~|CݴCX$/hѢf>w`MT9ŏ xdKC7oPlx Ì;`^Ӥ$=u8f]55M!zÃH$j|7##RQ122ZlJ0~oܸs ɟ9Z"5K#g577$1b;/H$DS(L^ӧOwi!CFM2Ψe{{{&wUbG8FYPpXGFR ݑ^__): }\'H)؇)W tWVV&%%Q ىT?~xx~HKR##̘1ݻD_J݃$1kjjN>p“'OD41kZzǎcϻ[2#|?>C^*J/ެT*MNNްa{ァU`9w޽;ezׯڗ7oބHa@m bcc)$&&<}TrlG<>O뙌<>CE2.V^Ms TVVuBaLLL~mYbM?/]Կ={ܿ6-7ի۶m7n͛7rqtt{, ,v.У@ LvSٳa) q/b޽{v9s&֣gx ~ uM-[S{Ÿyպ:{`t@ܼo߾Xp1 6P3c_zBJ;5kJPxcǎUVVUCח.%Ʒ#200pZYWW7w˗o޼ӏ?Jnjjjd2Ễ>[e2 ü$;Q{u] C̳5[H#}ҥ͛7ͻjKUad{MMML@>>>d§iҡ؉O83-)/by@ϲ_bǏÞWMNDQǏiVSBh:q755m۶mϞ=\޴B'.Ff͚5kp/TM9E9;;kI T*U([nߍͥc58r\"455y'O*fF[\+R?^{uuujk>dggܽWHb[[XX3QT`0h":::--M`F>ƛ7oKRTJ6}+K.]r_~\rR2'OTСCz[EFFƫJe ,kMMMUUUŷn_7n[oEEEU\WUtIRtಌ |EEEpL", ;w 2D'|2@W(gΜSׯ_WNZ">h}@zv4P(ӧltرc`)-|>_(/X0lr֑M<'r{Ŋuuu#|ٿOݺuꫯ9J[har'>WOֲev矏3F$鹪CWXX?YaE ͛{Ng>]/^\d ᲭK>_WW1iԆ'*GQ.{yy]|ع\4hѣG :t(111>>>""V ; SNe },ujvttJ_tkVX={~bGPٳgkCYbE~~~CC/bdd? !&; L[ $11Q*Ν;W;ZrS :a6lpں:8Z___WWWSSSXXx…KrSk2ډݒk|groc :ܷz/))!oǏ믐E;? ݎ& P_e-tO>HKR%z-[ yv4kmm=k֬]vQ2b}_~ʕ+G gϞꫯ&{њ666uqe2Y```\\\Ν׮]<%b'\2""c3Mc@ JZyyO7^UPgQdddppMEEE}}}JJJRRAGw҅؉|rlޚ^oGG%K[`sttkN.2dĈ%/wϞ=@6lmcccEEEީj8p(&>4wǃj]lr7CPteƌ;w֭ҜamUJGziGٵ{ȑ#!4F~j* E4WPϘ1СCdLaz- BLfnn~޽C!/UTjHmذѣGXkBznvJS]r%**PN$;;kqJ=PwW(jBJ9MGֵk~a Z}얤VAHΝ yI:+HRpUs.)f̘a؅+o&55ucƌquue1S(Z|Bz.^y7n@3ze2: ^-JOO޽;^&'Hm۶Ei[ꎽ… ߘD"њ#?6`iGFFB 5A411 tqqR=zt֭%ݘaiɓ'׮] +~d6 Lgg9H+HS .Ǎ5lhdd$""",YRQQ +** H۩S'[[[@@F/M9)v4ζy<^ddѣGwرb XJҚ{Í><|p8/ `#d ,8qbxx9Yr,{We[ 7XXZ2JL^5볰1cFYY٦M]6oܝr9N= Ѫ V-- W_}uժUE$CHӷ0aŠ+ ~ IQ655D"](GSN:uCi.jEptt[T+P˗/0a–-[I)%<̙3gΜ 8 \%gg) zSSSkkݻʊ]l6ţP(n8duݺusε3nԓm999\~˖-] - AڤSxx֢v f- d166$jseIRr :[.^xĉeeei5H ~@ɱW?R]]]RR~ro;j}ҝӬ ]IDATobtb^}ҥK㓒?~ )9"!itt!̱F\(:;;(L["p|XMM[ZX! BH$ pssS5ziNr죏>|_XXXRRCL@@Gn"""LMMgpu5HUUUiiiTjkk%Y;MMM% '~Ā=ܽ{֭[J_trr rww󋎎ڵX___)cK|.]9vUݻwSRR򊋋kkkvVܹs =O>+KKK5^;"uGꢱӧ555bX"+@mmm"#8Dۯt,d- =:qDJ&O>dzySSSiiiyy9X422277wtttrr"[C˖V):3sZg%fʦ& [{{N:g0`j“*͍****++# jii A8p`ڴijҥ7oV;]c?v9Nٛ:ZP();͛7ݻwЪc >3///$@_h*7׻IhӓY\ܓ{B¥rZ86PUo 0=3%򘦦 TZm%U!%oCCCeǶ&<r믿k;6zh2$dp}6}A#**ꫯׯ_gaK$v a& UkӴvVbׯ_=7nC$ -Lk5n O2~tqq!ZߓaL?j2^z||⨹(1Ïtp Z Ia[ud2Χtp=e>qę3gM|>رc /]xx8>Ɨ ˗/_jtk7z̙3SSSrD4")?ٔdpNKfee_oN*6jt48.:w>>$X(a9!{ڵkۧR7s{||vU>l' pPH1q>C'g'îcGm;ȴ/ 'bG˓'O/Xm;QH$%%%N֭޽{)nvZWWW8 ATa*Ŭ/,,_SRR &:u*3d!_ȽwgxK.=r%T o3f666}GgbqeeeeeeFFF||6 Rڵkwi)oYYR/--KOOg88!n00!jh .>G_sXXȑ#ͅB!onnnnn*//իW/F[-AݻPfǎ[f mmllԩS7mDFw0{ƪP!uJJ d#C˽Kߠ;wׯ5N =3f̨ךUmF?[[w>(r<88ƍ'1pڄ :8iSDѨѭnJO2l'N%1hZx'O޷o_޽a#ǔpep.ógφCd3ϗJcǎvo +xcb^GGG_ d1"S`Gݙ4iҍ7&N(J[?9WBL&ҥw}wԩgJxU3bFA]{RPAM8166qL"1kstҤI#ƍ'NwQ]A䊆ي /=dȐW_}_~@kՄ-:⼼;w\~$Mttt޽}||\\\<<<`H7My;޽{z-e!!!Ǐׯ);dkw4kZ,744TTTWWW2KX[[;;;D"+++tJ7[Ο?Y;BiiiiiiiccÎܺjt_@3"l EK7jO|P( UA?4fEy v7ϳOlyZİִ3| 5dTdi6۶F#_ j4   F#F#  j4 j4   F#   j4 j4  F#F#   j4   F#F#  j4 j4   F#   j4 j4  F#F#   j4   F#F#  j4 j4   F#   j4 j4  F#F#   j4   F#XIENDB`borgmatic-1.7.9/docs/static/postgresql.png000066400000000000000000000754501440467744700206630ustar00rootroot00000000000000PNG  IHDRxi=zTXtRaw profile type exifxڭi$7r:6rf!&g(SYKVd[uſ[qj+Ͽso| >¯M_r>_u__wu}Ӿ' f=QgQt;^o뷟s8_.JU9пWbJ|Z?]0_#r|N :}_{UB'oZ{w|ndp ?n{\zo+|T7mnu:?Hoan8 %xbk+ZK5^S>эXt.rc-]z\q82NFpD !Z uN9&:h̍bZ93/ X6sB XC V*cNМumS;6Z K%UZøЬ-,Y6b՚u%\RxjTsZj:ZjY+z=zcpё?xaƙf6ˬ>>+/[eV_cǝ6vu8riqM7_n㏮}k~ܿZv-Fk\Sщgt,@ǫ:z[9s1,JSsvP`>! ge}9tΩu_7]mz RS21~n뫴)nim;7AH}0'KN3[vu2 yf~asu)ʖ.$8.#uk4*AcyZRbck9F3O֎M]Ӆ4!m)O:nROj>W?XniA mnufa֝+@zF.WH6 WkOj.Sl9IJK\\jkxm0;t@xr=?pODX=i1..Puռg0PY=t+\,RX0#U@#x,}}i"87rИ-*[v VCGwQ?;'ĸmnbɥ @g4ƪQ܊nSm<{0*3]3VH=u]cb{ sep{~@ܹnJ,6Ix bCJ1 o3P͚w +Ka]PyA0E:cu.}:G=gZ],J .,z"5డAF6GC}Nd>l=JoCweX7 7KF%U*MS%])8-DP )Van4.\0a?0h Ӗ<d V)v@ 0B!.P;iK "枀p( P%miմ挧tz*Ǯ; RⶓFaJV7kNQd8٘L'\rf.u\ Q䭱, .X6p1f̗6*r5o"egpoFG-D( hV ]c(0:0mEꩽO˼ٸJhFٛ۽+\z7TR  I~BBr#FQzt;@? 7`g* Mm3<7EǃL+;2*Lx~a`?=d5`:YcHecߘKckn,cwvk<甮iHAubDI@m6vύ"afjL%pmo6 S,qF7L+YR~tz)Ps7d9-Z-*%lfdC@p@yq1G9c+ C8@gq0Y؝<wĐ@e0@m3A';BQqp^& K! f7[זΪƷ;s}C ITwEmQ+_MIv+`ERD>FLix3e qHf 79&} Q;@(l|6Y4 x|5{ y4yNtrQPI(fSL␃͹5 A{nÕBD;b51F4 24]r/$ _; M4,j0\$=ETwe ?|"ݢ+݀\?e sѮ}=HFu@oQEFü, yEmUQ 0䯓2!{] g)CP.3;B;-\sVFњd9 `b"q):M"69fQ!M/?循[;*LSdT$RH0Ŵ5 SL_@4`ch^0 0L"@mNfquCبzn=''Ƃ_^DT53܀SǚB't$P+RRJ QC;B3Saz$zbĔѐghASHnȭB^ucpD3RA1`(ӮDH G(CläkSƸci j&mݶ6 vj6 dbiע c﬑֦f Z6]) HgH`wopEA+Hc mIWȢ !=[2|\ðiO'sіAz500 X1*r hgaS "X<ץFedXp2#6ȑ`)82'~[(d$87mb0 0Dz^Qykcԓj *LK,@ꎓQ$&@UdUYF[gDYPHT[oax*X*X,^vtn4NaO .dLfy㚽<&ɿ_oQb0$+"`L3.ɱhFAW;`G? /c#@\H(1Jo/q -mٌohbӡ) \>xyY)/-h"P^sHw6C('1R9E۵"`X&c*eR'b;a)K֥ɿ2wL3.aƽ6qܶTx@|H:Yѓ~y6DZtWK[t3( vo`p'ܴ1jN2ͦ&8M 1g$Xjq'="4‹z7tfN1V<\LZ9ֹw*:1 XDRI}'F`BMsKUW- t0m'LE-.FZNPExl=>4zBKa}s$$KM<˒l89 ГƝiqNOO)9m 2rSdhFl<=vDt=A'R%@YM<;Ď& P1m/׭h䝂'i=[X"܃fC(:*f\nOQF=700&jqә֛b)H#8.gB)!4fU'\p|QnAXb1(śb,^9Ugy0~)룍Q2.5aDU&o Cbq;rro!McӃƚ'baђH^¦6?1fVk+x(1P*=%jV¼FہmSpw]R>$AVN#^Yhy5p1٨1ڌ'tH[z^M޹LB)9"lGH*3K#? |ͦ?j2U9mIsRMmn=d<5(.%n4B>.1`Jtɺ iv|<7p&7 fUak$ANN%!LׯRm,=m kxD']8v &U<e.qvM~i="ENrOJ *"+Q ɥk洈׸Nfd> YI$@qF9ٺ"ț\S¯gy(`;+~Z{Pʪ=:c8K *KEOtc,l`_|u)}CyVDPBezTXtRaw profile type iptcx=K@C#1,ܹ!6)rOҊMpGР[c{¡0uF"d nY{UrYO#iCCPICC profilex}=H@_S"vqP,8jP! :\!4iHR\ׂUg]\AIEJ_Rhq?{ܽjiV鶙tfE =CB2YIJwQܟ#f-Df6:Ԧmp'tAG+qλ,̰J|+M F W% pHYs'0tIME ;bg![ IDATx]wXT]wzwP{Ekl$31Eb^)S^1cĂWP@A@@: L/LaPx͝r̹u>{MPEPըFEEJKKQYY Bz֢ yyyP(8p OOO @ `: DGF\.L&5=p8RŨJR4=<==\. ˑt߿wnvy{{ǰa ___f42`AW!4Z7o"##HMMűc #F 22[ ˑk׮سgOKbb"uN3gX,ft2`AgZqI]UUU$$%%!22)B^^<BQQQ<<<wwwxxx@(B*uuu5=rrr$ɒ%Xt)"""ʀMhj'NUpf?+PR Bar1o< 0=z@vv6>slڴwBBB777 PTAuu5jjjPUU|Faĉ2dbcc6NJ8q9G봟OϏ 0`ބf0p|ᇴDqaܸq@PP<<5/^ի࣏>BRRR0`Aheeexcknnnx饗 /thpa޽jkk4!W^ػC7@@B(U,u)Bi/Eȿ]py6b{a`D A`aL,YΝz²e ˀ-% ,Xז,Y>δɻ]̛]S5rH?wB7O׻Z2FgΜi\Qo# 2:іhlJswBT/B|7b_Ǻ՝,npmtx!JMϟ?~)̀ڵke:r|6%Srq!5re%Ы7zz!"1~tvJ ~?3P[jStrER0+zY 6#ؘv&Ҋ ̘1.\06l0 cF1 ܍d2fΜi%ϝϾRy3q=з»yڼ_TV܀)C"XG8(?$BrؘOM;Z-?lz-22Cpp030`=B#I+WĻ 1ŋ6}Faņ8Ui / QVSU}o7 5.3GYFgg3Ww}\'">||MSHRR~gxyy14;toݻ/6Y'N4VсVYSDzc͋3lL͟)4FsnxFIYS m.HR}rºG www$'' P^^$|fD3`!4Kcرt9sXWv$/GS5: g}}Lc^H+k>HqQ!eРAh48{,ƈHFѣG) gк[<`\rPSHtۉL|.щbCpzl+2,(v2x2si7n^s #}oUյ`06j@7o߾lٲL5  0vΝ3z{{c٦ԼNm ӓ j>4wTIX.$u=]XtxOtJn Vn>lrr\,[ƍnjj 0f4n:E6 4y/wAT%;(0Ne><] m[9}̙1߿ 0`-;;TbY%OZ֩cXX4lT˰bnP#IzZ|t`vJxxꩧL[dfMh&MBPPx穬{nHFf/oEbarJRrMdzf2=ohh0i\2`Kh)))9g5r%NfvjÚZgU2$7?PP^gzѱ.?{[7kRɌl >'Urs2qReEh  0xMVZg[GAV)u: uV [g9 J330`XhM E}֙BCSH8g$->UVVZČl A4JKMDBl(z ;~NxdDz-2ݻ78lvK:u&M\'1`?ЦOopU̜9µѐB|s9IH:>tUͯ?b*WPWLBX/W1}a zt:(fT3`&ӋW^5=pU"v`Gœ~. 2 QP^zTIv#bCg6~|lNf k[ͲZ|^@gccYUVZff&t:8$$ 0x r"ʲ.êsYh* ~</硼6_ڠF*컐{a㉦2-uńa\p[X~x:EwƨMF[vMl6B̄fnfT3`&4ˉ0??Z֤ Y$ +琜r&Y,[`dl(XiSo`cez_wv@@b\sFw7NF{}}LJBs ˕&Ǫ0`!4h4"''$Rܳт|r5{F+;!PܜxxD4Bb~R?p9l蘾cP?nZX-ޮxj\^Coɻ6Iٙ͵ƎۮMPC`X #CE7nVzT׫!q,],3\\ߎ\U ~Uյ+q j=:\6$|.=*;ƒ|!p۵ U`( pPu!&&`e"ChM_vjnTHjG@k+FDAY-e |G4ADie2)v _γZ-ٟjeI暎; ,<4$(@ARPH sŘ>27mQUHBIn8ù(˶?)P'\b O w1vb46gə j&Ꙣ( $rF0^=~.`~F<=BF>ݱF&Md"4"4:)@L  Y'2 ƼKM!*;,<.jgVu7*˱Y,J{z?22]łO+Wm{/blxFc~#3d > T? =ļ~ ,P$ x7F L{-A^i ~2KŎ ls8q&vcnLw<7+=yD9V/ dqM&Ẋj >@H1潛esGvQeRk2c9{.bɢltU8֯qaV%͙3]f'<6o6 I;Ҋ0%.-AF ,Dž['2hG t[yB !ݮÖX4mx,DO.aᆣßzˊf -/ǖ-(\&1:]|u )tAz{;UIQ/W$|ܝc:2nd8}ڜ_w`WS׊1ퟰ9bXX$fݴ4X0%,3 oד~xedF;p6؉]p1߼<6Vtn/))\n.ٳ܎&֘ޖ Bc\dؠu&4G-d1qqq ENwAzDE~:x]Wk^#IA5JtE*=5h $X,LѢvn.敃j~t$j)uHU萭ԡR`Ds"E\9%XBmvadyQEu/O]|%V$Hbu{5-ݍFK A"c6s\ :ڰ{.kl9!.ٸv NvGLށpQנFT%8~u 5M5x;A"9܎:SnBȱb4${bDT0z{YćJGI biىW`Y~:$wМc۳:M]YV[oz{*ͺ">pz9{L*( `0 99tpBp87j$r׽&)*cY)lCtYkxY+SNfAƈ7p.:-0?hYϕ߹ƻ P#WyxcTטX}搕WOdBe.eZŅ`xDM`Ƹy^\C粰|_:"<4]9l|@T-`CˣG,->c'Mq E;D=p) w,?,rM 6#N< Lf:8p`tzhQ.|0~X60UOe13Kk208[XCwlCftta@P,>g'`xôd#WO_pz2k^X:w >jJpp|.lW/lYa#&Էc RvB)̕]; Ng0Z)'R§'4eu O7nhz޽{w6aPogjU;Tz5 +VT%ո]muX5P@T*9{6VnUɐUP5\d#5-) 9RۼXވâZ=NUwBZ4z|D2U'.zE.Mh63iϞfaK.AP FƇw;-Z{XhI<.]} Lژ,`pd d|`grmzx9a;w4k v 4@qԣAעs4:HJ$_?/c*@,@O##ѿww8KD`)4(5+R.6 ,:H:#cz`<2}! ͫTitH-KOL&<6X<1&9~*+v<.v/Iz^Nxr@8 wx`xz,$GbOEk\{26w6 Hz ze3JYI@^?(Tؐ$Ų䡁tugjրX5 ,sq j$C7BsXb>< "vM ޫ9s &LHh{:1ua 2Nh)%kPݩXoM,Og.bF[EnٲZ6]{/Lb>e59ݰgjUxasMB"z fCbp'5 !28@\xOVrlxqxt\+Foݧp85Jh6YܒjdY(4Qw%g)ȪEEGLb"q 33N%)7üR60W J@襐t 坥OVTm=sj#xY])70;^~B#*85ur)N;ӆsWq 4Z .9\<ӦǏ;اCDp7S$E^iv$#ynǞͺf]<8<כϘ1Eܕѫ;ނp80Jom'ΡV M/C`(oV"ͭh*Ɩjx d*-lI) \6t:RKxg)մdf>_hN}Ao0bQXTm:ŃBXZ3oǡbp=xxŊ:7 bv{.re}m~?۟&(ՂߤF3>Zl Rok$)ФۂLKa!@c=!kP5s. ľWZXs #3g ''th"G4GOԝn.,?JI+%<|/)0MkPCK'Xtx;vus:t I \d;nU;t=|y'uOCshLx'E @` ÉkLFoꄡȕi(X{""4K 4כCûu ڃں)rp1t<'!";]jyIȎ` ֭zK5[ IDATv*&ԓ~2Yfr_;F lB|>S4cI  lv"a}:=OmZ:5RkTHQi^Tk$*4~V*^$RhW,sh?(xIĜ*{Sk&Oo$E lwe-k寳v'sl#Ecھ)IlX27M-BJ%ڥ>hĩSC!CCSia+;37&_ƠFWY#P*g pvfXXg2 xpssoxf֑hFK3U‡E5rqX@6n[1v*xhd*z8| .;. !!Z%RTHI) ۬6I.'i'6Fٙlmצ6D^NB`,FFY,@mnb_POClEaLd7쟗!"y:>;ipX㝠V^v, wĻ '.Q=jWo^gۿ3ޏg ތb𛸇Y;CW ò9p U0 |j(<6c YE7Y0Kxw#cC&Wuhg(EfA9bH76/JDdu: HgJ?jsI2 &>.OGSQԂn^1`0<3y <]$"P'W}w ~FYQZb,l+7PĄF)4:JpgbӅ|%ϝo>f`Y,~ 7mYCyc7wtynWRSH,Hcv`nb,fۇ䴛V)AhnsZh\XfjBb|]L1*44l'쒙Z<}$EގƠ&cJN?s)0{qĻhI4\Q>EkH#l( ˘P_eD/쒄fy`fp3aZkUޡ?&sܝEV{`d_8iM$M5e """p?6Gfԑ$^׬j#d4l0^<=U,0\=]082>{ =jkz~$4+00*nȷDCfr _ uW x\\lZWE5Z`\6al]3ٴul^iBfMK螺DrVq9B<`yIEIC+z*˦'Wޘ ?`6@d?_?.͵sqpBnw.3:;{l._b{0qjÁOP+k YpquȽp '>ZK7v r-##RYixtp'Z\%BY$$;*Zf5EYW_w'Lb&_7n#~"D5`{pj1f6ak84Z#3 '!W<1MԩƟs.t=/,(UStzZbꙃLfvoF}ZݯwCo៦6`ۛztxb?h .Juށ3,x|\ȥX=VE[E6TfAsYlE@7Nhb$Ib~rEbd{²@"Fa9}S,o0o9 q6.^eJp|#IxMcԨQvA; *-TZԫ)Ԩ)pJe<%CCfFĨnpw?$) ӮÕ<|1:<,>JcRB^nbiCu;(Yt4JN{9KVK.k=#r}X#EaÑtZ/'«I% {5?ki_ g+D7hLݐ)5؝ZW7bvEQ6*&:<;$:%ǼݞDXbx׮]-Juh\ -xInqJ;L>wRoU ..o7ղcP{tX} 澵 sڄGڌ9o+#csf_Q3-RkS@KtC$jYPл.о><*4V\G Sr6i ୮35k4z$ jw o@L3j $F:s0M,r=<{|Y:Ǩ Ļpm~o^rUj3AtXh [͗rcǎ5=?pUPҮnm*JEsg]f+M&jbK Mkaś3{YYgǏZEB U -jU:4h0Ip#I~4f #Jն1`p\x,0?£J,  hnt^/`mNqg>};lS $@7\@;TF*C iĠI+v=Q!f1%L{h_-FJʶw?Q*R2XX̀hQp̘1@=ڵ1%V$㊢ iΕgn<ߩ{G1_8mX$ICqeJBkt9z1}ysN+W g.DOժd hFDv\Ȥ] NYPiwd@y6;ډZ9^xz%j Nn5p`o Np3{Mz#f o}.^vus]-r,[&gsœlJx׶p,B̛[qn\ n&Z倢(kHu#i?M+1lBswwNj/Hv RoB+Rp0FŘty$UfaOg8̫;+M!MF]BRHi 1{deux${zACw5vU1E"p!n~_'ҡi?28f"8xy[!etjGdF aX ٰ}؅IqyL,HA]A\OI;IPQ%皻&N8|޽[jocRSȖV"">»3w7%+el'bB-Gz3f0[233M\]]AQ+;=I[ǘ ~ ^WCtn4G.BOkP]EX04:yy.x xSpy Ok`ڀfnW@3`$A FZY_b+s?mT 4;a.xaNt?/M _'P @ZU{N7'hiƑ`Af$76fQ޻ƚd2;fIS۵Aۏg(ݜ>, h9cQWiۼ_"&mJIX2uUyVa9jJ|&( 3ݺuZXtV*ipgb`=$u4ZK .MPeUbdJ,#e+," Çۭs'OW Bf3rH#\%ǁJgD~uT$Iaj|ĢiB%&O5~³Sb^R\zsMFcPaX}*6޶ W_/W4H, uhKHp( $|OBEaÆFkgnEU*Y0_^:SCq!inswˆsg{(J8yI9=#.DBV}?#\b:^jUIhv,q^tKPnCNHXDcRn׋U$y-7\U EWO!ߣӮES'IpP&UI 7MI5j tٚr}ڦkG,à\%&-JŬcR>B`x߰vo[n}rwڴi&B?fX,DcNZK-a_M.Mz^^d=͇RhwWiq :66Z]]^{5qdd$&Oܥ Z|N]%B6֫T „A5Lr܍bq` C?7oS 7t6$v з~du/NǼտs9,*EZutMxD6V>N^!DT""8J:͎:[/tf$i-^nNxw84k$N`k~}ݬǺ=l'gsp9c ,Y -*I8! /UJYM$E67ېр˹%߻4oUɐRZx7 »l:ٳM[n5=sq>~Uf7XL+@X9`*D1 ˭Z )))V<.\h heT娭\{o7 hۭ:穫hk lM"Qqy8lV'wwQQ(Ǖ ɌʣQ9:\ 3gZ~_m 761^{?MU qLF&::}q^ .۷-K 9sc~!M 0V k3dDy.b+6,3] F/ًo?n_=Jxɒ%VyΜ9~:PSSJ#;;s'NĬY>W hy"Dm6)y0e)$Ro"f/)wm ϞX-qX(]PڱwTx(.BqbB"ˍ $E9/,kuJGg$)dn3]|=*BAB,^϶zl:C'ѣMhJM{=C# GQ\)Ó73%" x,1 Bv W +k?]lPR-;NGRCZZ-d̙3}=|D8tB߬BDޅ2ohE,_?A$؄jm3GVSkP%Sber&⽬=Bђ)eg2z@O&Û"۩,PzUZZrcI{ٷmkAomŭ5ϡWtg;Rl~(_hp{zz>30)B};[j\Rϖ<?S_,6i*}*nUZHR+̶`Ν60DެSD _vy~v_<] IJ%@祹h6Y@i+"ٚw/p1,2kpjvͲv9*T4 ^nJX4 W4 !/˾ݦ} V8˴X.=>Z4Nj6JJC Rh3W6j5Ȟ6x X5/ΰJ `(_$|<\\\bܹx ŨQ0o<,_k֬={:aV۳g yZۉ"0}ȽUy6콀Ec]gPZYrFk/؜ʂMVJ60<6E)PԘ&X=Zzo<>9Zvy9x)hb@)LYv_p_C)9+Z0_U׀U;iJTz=DX7odN8y #^ۈ'j?Ti(?4)4VyjaDҥKM&g"_<ڥa{uK,,zG1uTL0ގsu#Vo;?^'7Ჹ0''zR^^^x/_}鸺m#=.colH̗,xcQ_ N4y@7PfMfHkP"}{!v8b}8(s!5﩯*'ZUej=n|Sx=/#Ia0u'D…zl?6g#Fyg6X!*F9:n٫aMЩ j]V+$)Axq^<+߮C.Bv zyJ'ifEk`(yvB߈` 뉘Pfsy6ƵLbPwZ翃/E) H9#6~Xq$_+UiiC -&fBu6R&DxwV :}ŋ<| 0c[ِXDV=شiS@߾}G}=X Xd,Ggbp|ZQPJ~FƄˊVZw{BkPi1m&2FFH(ULv >><0 xxp8x¾>ۛakYT5 !b\d!.."p9l $nUJq0k@H/L ̗N,Wz)&ԫp^̃H"峧lSeWwi( J:Ӈ"$kMG_I WjTQ&@^uq6@tAh-&/w!4*zvw 7pPӠj4 4BKRLĆ9&}S 1a_(ks# ΀[5F2%b\&Wو2Qp:B#h'ߨӀ Z#ԯ_??|嗦0~g$ĆXVTo7 z~6dш)SXP*Nd`€^cr67?nY B-Gp,&$_,AnF㎤(իP%UJ>AVA%V?N">6/Nʮ|@PՍMw?_qbnb އZ~=)öWiWtdFࣧ u:#ZNS8e(|1A=/Sܳq#g_{ \bu-F< 8h~<[6$IabLb'~a<.xرYxF: yղc4.:2 [_yz4+ͻuYUln۷NN/wOw\7ˡPkF+Ww/ŕαҺyXCfYlYw{MwALJUReJI.bM"3\իWwtr7fb誝̷"udGQ}VVM--<9Df\|[zXD*ٳ=c߾}cWLd76dE:17ufW˰T~9éyȼYP0'!]`"J1vX^{饗OP\yovC) x|t{A9ԝǚy =CߙqmߏUWzp0Ѡq1{l<><͉!Zm0/!$[EqL>XiZMhxM_}Uӓ"-A+&!ޢpŋ1w\Z?l6'||j2  u`7̿\fMVu-,!m+ò*] 11/_66x`1DC'pl R9PHꬡ8SsPiCÃZFZߢ HRH :3O8z?|x  GjcM$LBaTrRo7?*HUH[ѓEfN=c;0}p8RT!͡FGji&lx5jKsHSctLv|iw:5֕,rs⍹#p0aP}7G8(oTVV"&&UU~QFĉ]ˡgСCM5k1|pZ%* 7I)5z|%416oٲ7r TxlL^m(0dTUUa֬YX`r| Ra㫥0bSĕxeNxW.Ƀ{[#JȔmPIȇF_6l qZFyG`Xp ͪx3@h0(S1AA޵MK@R"z(q7YX~?ܰ'2 8B-_n?/ٞ3xˍ VFk6{ FF$@Qw~<."B] Fh4+%Yvx;~ ~-_4ifϞmx|Wݻ!(cjOOgSJ%&O Xׯ߿{-/9Mw,]7o-kEfht< vZVk톚2e 6l///3:MrC_ι~4ݺkx q&AW=n l9fseSnH(4k̭1** dg#^t [l'a•HЪp=du())vK8^Zar8X67>V10ϙ-_䠬 ?ܹszoǎؾ};T*dp!c>;^R:AoOmi!w:"{idɱ,b\~^mݺSN!99 l^сkע7`?FA.FٷpUM-w`х?#6+bBlr%޺u uuu?g3gU]7ZK,kaȣC‡D,3#PUw|$vULJA|.Xۡc>ؙ9iRvYMV ѣGdž lʦٳ8hp*--͛QXX&W>{/sK}[ ꫯF۷CA" |> \ 7<;s>v;3۽t'Ɩ6v/W:LX6kLT|?Ncc#-ZNE!!!2 8y$0%jkk",\qC0U"Lg 8_lr=RDxx8bbb2% v'\2˚aV3)26<*lØS>Pu^Lff()M>rTO睶["WWt{V<͔ hmmEvv6Fe˖!((Vۣm6,_j׿PVVǏgӧIaK+"w|-ҟ/`0Օ 7n"  !EE!>Qд`|BH\xs0τ>Az/P[ьf5N&Sw> }&l Ɵ+p'hLoPOOO 66zMrr2z-HRjٳ}p"Hj.  … ?GMix1}t:Cx˥ؽr6t>v׺pۇ^uņb6l>ܾnlǻaT7ŚF@>$DGG ΝCQQs,YTBѰ3w܉ӧ[-ى vHs$:f-o9ܨh!vpap!B2 z*|Ld*zvB]'cJ bBRARa޼y#ze !GC.bzgQ]Ӈֻhcb`63p89(@*q4D]"d1=4je@H$B,@; 4:dʙҩ&,r鄀vFˀ&0B6"n- 4777B6,:9c[&F\F=4B6bzzoײv+ooojyBm$mmmm /I Bm,Yie8T@KHHiBmdBBBlc\ hBP6U$ЁXB!F&D一)2P# h#}vaZFB(Jhh(e{yrv ZLL T*:B6r=ܼys蘵`R7!AO?NU !(Jŋ{n3,6\S-..Z!P@X㐌!dozܤRQ# h7PG믿f]lnJ333)#!P@sJKKЧ.yx9u)8R7`߸q#6B66ZjF`{a1bo Wnۻw/ZF!1[rsA$>>kL ƅo ^YUUUhT*EmmB!C@P֭[0 ۻ} ],¦sPF`6ˣ`F!Nz>P^^: t:Ѕ5 01 n݁hw2 V=!sb$hUxW\իٔ?<^xji`̐#П-ߟ1lڴ9Fk͸ց.|( }_Dww7 -- J!;r{fB!:Qﯲ JJJmWÇ!˩B4@RRΜ9n[brss3 ϣ6ըOOOj]Bͱڰ{nV###~zh4hhh@cc#._6\`rssǮE#BẺ~dggi?iiiXf t:ݩEB43LѣG7ڵ :QQQVD!&zTUUϠP(J???`֬Ydt!d?B@EoIENDB`borgmatic-1.7.9/docs/static/sqlite.png000066400000000000000000000110741440467744700177510ustar00rootroot00000000000000PNG  IHDReA}iCCPICC profile(};H@ƿ>";qP Z"TB&IC(|,V\uup"ƃ~|wwYeLfTRW+bӈJD1 u_<ܟ_) ̦s'tAG.q.9#' . x8s.+8:kߓ0\VNs),b "Ȩ*,iH1U#jP!9~?ݭYpIŶ?F.jmN3pu&0IzŎm⺣{}%CrM7偡[o K]oC`D3~Srk˞PLTEJ\DԤDzb ̼,f|,ztZlVt,dl\Ԍ􌪴nVl\~Ĥ,vt\RlDRl^r,lT, ^|t$ ~jt Vlvml,m<]ZX%^#Ӧpf;K{J C+W3~'S8۽e6 M~ ⫸9^8E/-W1p;^vO$h+'B]zJǓ) e]"kψY ؠ!pCW*|3s|ݾּdpĶ(Yuhܽw8n[o4ײ·*В.6ࡃ6xaܖݻwo)hele8н(O }bQZS̽Z\ē=>AjWg^7㕔Զp`Hg333WЦn̼#rJPZWq+rl5&W(5Q0hhS'YMl/NpŜO_^x3 l@-p?| EJtK,]Cv`,HilD?.]zSoJ5xete-`&:RS'1/KmƥWrn{TnTFd:)p>?{p7LK/T:vk?jG--Edn#ssx78bL< t 7N婉ȩ`3Q!p]Qpۿ8dZ?L(3m0jU<yFA<&m^xQ ҔF5,ąQL2LO5(or.Q[. 0s繝;o>3GaYgu@wLu2p;zeȷb6o&ߪKNJ NL>]l+6Rv1 4ǥWv$s߇ yrvW:H=[;/ΠLj!rPSJ3΍ܩb_?dsE9+G¡<(EsJ{kb.GsTz15:s="s׽[ßVgn~rqMM 63RV]xZlSpC}5J6rlm̩67K/ h&&iDB~OC;ose*r}Ls٧GW'ot=Ikآ2E`CmW8 3A0h=IU̍N&ٻe^SPmJ )plTJ["snxGђѧ+(%0]v5!N[?f3"]I,?qΡ\+J&ѩ8&wJ>^]hkrt92dr'Qtu1ԁ aZV Iia>cdt,R /q6'2#%ߵc=\1K&bt%æ'Hx͛rf+1gN5ۙ7H.)z7'Ip!.TTdC&t X+9r]r`%Jx~ ąof( 0!:}y?Ch2JxZ"0fnVj\Vf‪4a#zW d LCyB]@U4m;d&42W9\'$9Y(hRB%Cs!6=\Fgeu||W]2{$}.ٜ9DxX>V'gҨؠ׹ vG9qzXM21'_k_`fHoY!.Pzvq(;Ed|"m׾q(9Z=Y@_ B{@ǞZJǸ;D+:)~ 7,.ys(twS1>>"9/xQk"H2C^%خQq}j#5m}BRІ0lA6HTh,mԗI႙˞UL5vkApMfDz7mv ȘQUy9#t(9 ʣH@*\AȰ\#t`aylk"9׸t JXzi5w9_el]49D:6 Jq[iuvBRnK,6 NKz~6*s̙Ⲃ[OpĜL2'!66BEp#JQ@蛘T<'Bg㊥qL3F`n`Y} q09pz蘦pT.*E l*X=ڒkp y a<6<)d(+yhu|RHOwc%tK=aVEW/&}0뒹/C<=OF;Zx^̘ P."} 6&~7+0(sW\Eˈ>s R˺.§X,܍dr`W}Chǥm0꣨v["Vu J(T? lu-G}y_lSFo2TfBT\N`eӄN I mQ;D$ݻ*T~mp>Q+[s_ efF325oH-cB9܌gA?^t{.> ,E_f(L?Ubu-~:#\ɒ"wCWKcZ1c)gkƠ-pۊ 1Ƶo g:X#'{kRK temp.yaml.uncommented mv temp.yaml.uncommented temp.yaml # For each sub-command (prune, create, and check), collect the Borg command-line flags that result # from running borgmatic with the generated configuration. Then, collect the full set of available # Borg flags as reported by "borg --help" for that sub-command. Finally, compare the two lists of # flags to determine which Borg flags borgmatic doesn't yet support. for sub_command in prune create check list info; do echo "********** borg $sub_command **********" for line in $(borgmatic --config temp.yaml $sub_command -v 2 2>&1 | grep "borg\w* $sub_command") ; do echo "$line" | grep '^-' >> borgmatic_borg_flags done sort borgmatic_borg_flags > borgmatic_borg_flags.sorted mv borgmatic_borg_flags.sorted borgmatic_borg_flags for word in $(borg $sub_command --help | grep '^ -') ; do # Exclude a bunch of flags that borgmatic actually supports, but don't get exercised by the # generated sample config, and also flags that don't make sense to support. echo "$word" | grep ^-- | sed -e 's/,$//' \ | grep -v '^--archives-only$' \ | grep -v '^--critical$' \ | grep -v '^--debug$' \ | grep -v '^--dry-run$' \ | grep -v '^--error$' \ | grep -v '^--help$' \ | grep -v '^--info$' \ | grep -v '^--json$' \ | grep -v '^--keep-last$' \ | grep -v '^--list$' \ | grep -v '^--bsdflags$' \ | grep -v '^--pattern$' \ | grep -v '^--progress$' \ | grep -v '^--stats$' \ | grep -v '^--read-special$' \ | grep -v '^--repository-only$' \ | grep -v '^--show-rc$' \ | grep -v '^--stats$' \ | grep -v '^--verbose$' \ | grep -v '^--warning$' \ | grep -v '^--exclude' \ | grep -v '^--exclude-from' \ | grep -v '^--first' \ | grep -v '^--format' \ | grep -v '^--glob-archives' \ | grep -v '^--match-archives' \ | grep -v '^--last' \ | grep -v '^--format' \ | grep -v '^--patterns-from' \ | grep -v '^--prefix' \ | grep -v '^--short' \ | grep -v '^--sort-by' \ | grep -v '^-h$' \ >> all_borg_flags done sort all_borg_flags > all_borg_flags.sorted mv all_borg_flags.sorted all_borg_flags comm -13 borgmatic_borg_flags all_borg_flags rm ./*_borg_flags done rm temp.yaml borgmatic-1.7.9/scripts/push000077500000000000000000000002111440467744700161060ustar00rootroot00000000000000#!/bin/bash set -e branch_name=$(git rev-parse --abbrev-ref HEAD) git push -u github "$branch_name" git push -u origin "$branch_name" borgmatic-1.7.9/scripts/release000077500000000000000000000034561440467744700165650ustar00rootroot00000000000000#!/bin/bash set -e projects_token=${1:-} github_token=${2:-} if [[ -z $github_token ]]; then echo "Usage: $0 [projects-token] [github-token]" exit 1 fi if [[ ! -f NEWS ]]; then echo "Missing NEWS file. Try running from root of repository." exit 1 fi version=$(head --lines=1 NEWS) if [[ $version =~ .*dev* ]]; then echo "Refusing to release a dev version: $version" exit 1 fi if ! git diff-index --quiet HEAD -- ; then echo "Refusing to release with local changes:" git status --porcelain exit 1 fi git tag $version git push origin $version git push github $version # Build borgmatic and publish to pypi. rm -fr dist python3 setup.py bdist_wheel python3 setup.py sdist gpg --detach-sign --armor dist/borgmatic-*.tar.gz gpg --detach-sign --armor dist/borgmatic-*-py3-none-any.whl twine upload -r pypi --username __token__ dist/borgmatic-*.tar.gz dist/borgmatic-*.tar.gz.asc twine upload -r pypi --username __token__ dist/borgmatic-*-py3-none-any.whl dist/borgmatic-*-py3-none-any.whl.asc # Set release changelogs on projects.torsion.org and GitHub. release_changelog="$(cat NEWS | sed '/^$/q' | grep -v '^\S')" escaped_release_changelog="$(echo "$release_changelog" | sed -z 's/\n/\\n/g' | sed -z 's/\"/\\"/g')" curl --silent --request POST \ "https://projects.torsion.org/api/v1/repos/borgmatic-collective/borgmatic/releases" \ --header "Authorization: token $projects_token" \ --header "Accept: application/json" \ --header "Content-Type: application/json" \ --data "{\"body\": \"$escaped_release_changelog\", \"draft\": false, \"name\": \"borgmatic $version\", \"prerelease\": false, \"tag_name\": \"$version\"}" github-release create --token="$github_token" --owner=witten --repo=borgmatic --tag="$version" \ --name="borgmatic $version" --body="$release_changelog" borgmatic-1.7.9/scripts/run-full-dev-tests000077500000000000000000000010051440467744700206110ustar00rootroot00000000000000#!/bin/sh # This script is for running all tests, including end-to-end tests, on a developer machine. It sets # up database containers to run tests against, runs the tests, and then tears down the containers. # # Run this script from the root directory of the borgmatic source. # # For more information, see: # https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/ set -e docker-compose --file tests/end-to-end/docker-compose.yaml up --force-recreate \ --renew-anon-volumes --abort-on-container-exit borgmatic-1.7.9/scripts/run-full-tests000077500000000000000000000017371440467744700200510ustar00rootroot00000000000000#!/bin/sh # This script installs test dependencies and runs all tests, including end-to-end tests. It # is designed to run inside a test container, and presumes that other test infrastructure like # databases are already running. Therefore, on a developer machine, you should not run this script # directly. Instead, run scripts/run-full-dev-tests # # For more information, see: # https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/ set -e apk add --no-cache python3 py3-pip borgbackup postgresql-client mariadb-client mongodb-tools \ py3-ruamel.yaml py3-ruamel.yaml.clib bash sqlite # If certain dependencies of black are available in this version of Alpine, install them. apk add --no-cache py3-typed-ast py3-regex || true python3 -m pip install --no-cache --upgrade pip==22.2.2 setuptools==64.0.1 pip3 install --ignore-installed tox==3.25.1 export COVERAGE_FILE=/tmp/.coverage tox --workdir /tmp/.tox --sitepackages tox --workdir /tmp/.tox --sitepackages -e end-to-end borgmatic-1.7.9/setup.cfg000066400000000000000000000007331440467744700153440ustar00rootroot00000000000000[metadata] description_file=README.md [tool:pytest] testpaths = tests addopts = --cov-report term-missing:skip-covered --cov=borgmatic --ignore=tests/end-to-end filterwarnings = ignore:Coverage disabled.*:pytest.PytestWarning [flake8] ignore = E501,W503 exclude = *.*/* multiline-quotes = ''' docstring-quotes = ''' [tool:isort] force_single_line = False include_trailing_comma = True known_first_party = borgmatic line_length = 100 multi_line_output = 3 skip = .tox borgmatic-1.7.9/setup.py000066400000000000000000000025341440467744700152360ustar00rootroot00000000000000from setuptools import find_packages, setup VERSION = '1.7.9' setup( name='borgmatic', version=VERSION, description='Simple, configuration-driven backup software for servers and workstations', author='Dan Helfman', author_email='witten@torsion.org', url='https://torsion.org/borgmatic', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Programming Language :: Python', 'Topic :: Security :: Cryptography', 'Topic :: System :: Archiving :: Backup', ], packages=find_packages(exclude=['tests*']), entry_points={ 'console_scripts': [ 'borgmatic = borgmatic.commands.borgmatic:main', 'upgrade-borgmatic-config = borgmatic.commands.convert_config:main', 'generate-borgmatic-config = borgmatic.commands.generate_config:main', 'validate-borgmatic-config = borgmatic.commands.validate_config:main', ] }, obsoletes=['atticmatic'], install_requires=( 'colorama>=0.4.1,<0.5', 'jsonschema', 'requests', 'ruamel.yaml>0.15.0,<0.18.0', 'setuptools', ), include_package_data=True, python_requires='>=3.7', ) borgmatic-1.7.9/test_requirements.txt000066400000000000000000000010661440467744700200460ustar00rootroot00000000000000appdirs==1.4.4; python_version >= '3.8' attrs==20.3.0; python_version >= '3.8' black==19.10b0; python_version >= '3.8' click==7.1.2; python_version >= '3.8' colorama==0.4.4 coverage==5.3 flake8==4.0.1 flake8-quotes==3.3.2 flexmock==0.10.4 isort==5.9.1 mccabe==0.6.1 pluggy==0.13.1 pathspec==0.8.1; python_version >= '3.8' py==1.10.0 pycodestyle==2.8.0 pyflakes==2.4.0 jsonschema==3.2.0 pytest==7.2.0 pytest-cov==4.0.0 regex; python_version >= '3.8' requests==2.25.0 ruamel.yaml>0.15.0,<0.18.0 toml==0.10.2; python_version >= '3.8' typed-ast; python_version >= '3.8' borgmatic-1.7.9/tests/000077500000000000000000000000001440467744700146625ustar00rootroot00000000000000borgmatic-1.7.9/tests/__init__.py000066400000000000000000000000001440467744700167610ustar00rootroot00000000000000borgmatic-1.7.9/tests/end-to-end/000077500000000000000000000000001440467744700166145ustar00rootroot00000000000000borgmatic-1.7.9/tests/end-to-end/__init__.py000066400000000000000000000000001440467744700207130ustar00rootroot00000000000000borgmatic-1.7.9/tests/end-to-end/docker-compose.yaml000066400000000000000000000011621440467744700224120ustar00rootroot00000000000000version: '3' services: postgresql: image: postgres:13.1-alpine environment: POSTGRES_PASSWORD: test POSTGRES_DB: test mysql: image: mariadb:10.5 environment: MYSQL_ROOT_PASSWORD: test MYSQL_DATABASE: test mongodb: image: mongo:5.0.5 environment: MONGO_INITDB_ROOT_USERNAME: root MONGO_INITDB_ROOT_PASSWORD: test tests: image: alpine:3.13 volumes: - "../..:/app:ro" tmpfs: - "/app/borgmatic.egg-info" tty: true working_dir: /app command: - /app/scripts/run-full-tests depends_on: - postgresql - mysql borgmatic-1.7.9/tests/end-to-end/test_borgmatic.py000066400000000000000000000061541440467744700222020ustar00rootroot00000000000000import json import os import shutil import subprocess import sys import tempfile def generate_configuration(config_path, repository_path): ''' Generate borgmatic configuration into a file at the config path, and update the defaults so as to work for testing (including injecting the given repository path and tacking on an encryption passphrase). ''' subprocess.check_call( 'generate-borgmatic-config --destination {}'.format(config_path).split(' ') ) config = ( open(config_path) .read() .replace('ssh://user@backupserver/./sourcehostname.borg', repository_path) .replace('- ssh://user@backupserver/./{fqdn}', '') .replace('- /var/local/backups/local.borg', '') .replace('- /home/user/path with spaces', '') .replace('- /home', '- {}'.format(config_path)) .replace('- /etc', '') .replace('- /var/log/syslog*', '') + 'storage:\n encryption_passphrase: "test"' ) config_file = open(config_path, 'w') config_file.write(config) config_file.close() def test_borgmatic_command(): # Create a Borg repository. temporary_directory = tempfile.mkdtemp() repository_path = os.path.join(temporary_directory, 'test.borg') extract_path = os.path.join(temporary_directory, 'extract') original_working_directory = os.getcwd() os.mkdir(extract_path) os.chdir(extract_path) try: config_path = os.path.join(temporary_directory, 'test.yaml') generate_configuration(config_path, repository_path) subprocess.check_call( 'borgmatic -v 2 --config {} init --encryption repokey'.format(config_path).split(' ') ) # Run borgmatic to generate a backup archive, and then list it to make sure it exists. subprocess.check_call('borgmatic --config {}'.format(config_path).split(' ')) output = subprocess.check_output( 'borgmatic --config {} list --json'.format(config_path).split(' ') ).decode(sys.stdout.encoding) parsed_output = json.loads(output) assert len(parsed_output) == 1 assert len(parsed_output[0]['archives']) == 1 archive_name = parsed_output[0]['archives'][0]['archive'] # Extract the created archive into the current (temporary) directory, and confirm that the # extracted file looks right. output = subprocess.check_output( 'borgmatic --config {} extract --archive {}'.format(config_path, archive_name).split( ' ' ) ).decode(sys.stdout.encoding) extracted_config_path = os.path.join(extract_path, config_path) assert open(extracted_config_path).read() == open(config_path).read() # Exercise the info action. output = subprocess.check_output( 'borgmatic --config {} info --json'.format(config_path).split(' ') ).decode(sys.stdout.encoding) parsed_output = json.loads(output) assert len(parsed_output) == 1 assert 'repository' in parsed_output[0] finally: os.chdir(original_working_directory) shutil.rmtree(temporary_directory) borgmatic-1.7.9/tests/end-to-end/test_completion.py000066400000000000000000000002171440467744700223760ustar00rootroot00000000000000import subprocess def test_bash_completion_runs_without_error(): subprocess.check_call('borgmatic --bash-completion | bash', shell=True) borgmatic-1.7.9/tests/end-to-end/test_database.py000066400000000000000000000142021440467744700217700ustar00rootroot00000000000000import json import os import shutil import subprocess import sys import tempfile import pytest def write_configuration( source_directory, config_path, repository_path, borgmatic_source_directory, postgresql_dump_format='custom', mongodb_dump_format='archive', ): ''' Write out borgmatic configuration into a file at the config path. Set the options so as to work for testing. This includes injecting the given repository path, borgmatic source directory for storing database dumps, dump format (for PostgreSQL), and encryption passphrase. ''' config = f''' location: source_directories: - {source_directory} repositories: - {repository_path} borgmatic_source_directory: {borgmatic_source_directory} storage: encryption_passphrase: "test" hooks: postgresql_databases: - name: test hostname: postgresql username: postgres password: test format: {postgresql_dump_format} - name: all hostname: postgresql username: postgres password: test - name: all format: custom hostname: postgresql username: postgres password: test mysql_databases: - name: test hostname: mysql username: root password: test - name: all hostname: mysql username: root password: test - name: all format: sql hostname: mysql username: root password: test mongodb_databases: - name: test hostname: mongodb username: root password: test authentication_database: admin format: {mongodb_dump_format} - name: all hostname: mongodb username: root password: test sqlite_databases: - name: sqlite_test path: /tmp/sqlite_test.db ''' with open(config_path, 'w') as config_file: config_file.write(config) def test_database_dump_and_restore(): # Create a Borg repository. temporary_directory = tempfile.mkdtemp() repository_path = os.path.join(temporary_directory, 'test.borg') borgmatic_source_directory = os.path.join(temporary_directory, '.borgmatic') # Write out a special file to ensure that it gets properly excluded and Borg doesn't hang on it. os.mkfifo(os.path.join(temporary_directory, 'special_file')) original_working_directory = os.getcwd() try: config_path = os.path.join(temporary_directory, 'test.yaml') write_configuration( temporary_directory, config_path, repository_path, borgmatic_source_directory ) subprocess.check_call( ['borgmatic', '-v', '2', '--config', config_path, 'init', '--encryption', 'repokey'] ) # Run borgmatic to generate a backup archive including a database dump. subprocess.check_call(['borgmatic', 'create', '--config', config_path, '-v', '2']) # Get the created archive name. output = subprocess.check_output( ['borgmatic', '--config', config_path, 'list', '--json'] ).decode(sys.stdout.encoding) parsed_output = json.loads(output) assert len(parsed_output) == 1 assert len(parsed_output[0]['archives']) == 1 archive_name = parsed_output[0]['archives'][0]['archive'] # Restore the database from the archive. subprocess.check_call( ['borgmatic', '--config', config_path, 'restore', '--archive', archive_name] ) finally: os.chdir(original_working_directory) shutil.rmtree(temporary_directory) def test_database_dump_and_restore_with_directory_format(): # Create a Borg repository. temporary_directory = tempfile.mkdtemp() repository_path = os.path.join(temporary_directory, 'test.borg') borgmatic_source_directory = os.path.join(temporary_directory, '.borgmatic') original_working_directory = os.getcwd() try: config_path = os.path.join(temporary_directory, 'test.yaml') write_configuration( temporary_directory, config_path, repository_path, borgmatic_source_directory, postgresql_dump_format='directory', mongodb_dump_format='directory', ) subprocess.check_call( ['borgmatic', '-v', '2', '--config', config_path, 'init', '--encryption', 'repokey'] ) # Run borgmatic to generate a backup archive including a database dump. subprocess.check_call(['borgmatic', 'create', '--config', config_path, '-v', '2']) # Restore the database from the archive. subprocess.check_call( ['borgmatic', '--config', config_path, 'restore', '--archive', 'latest'] ) finally: os.chdir(original_working_directory) shutil.rmtree(temporary_directory) def test_database_dump_with_error_causes_borgmatic_to_exit(): # Create a Borg repository. temporary_directory = tempfile.mkdtemp() repository_path = os.path.join(temporary_directory, 'test.borg') borgmatic_source_directory = os.path.join(temporary_directory, '.borgmatic') original_working_directory = os.getcwd() try: config_path = os.path.join(temporary_directory, 'test.yaml') write_configuration( temporary_directory, config_path, repository_path, borgmatic_source_directory ) subprocess.check_call( ['borgmatic', '-v', '2', '--config', config_path, 'init', '--encryption', 'repokey'] ) # Run borgmatic with a config override such that the database dump fails. with pytest.raises(subprocess.CalledProcessError): subprocess.check_call( [ 'borgmatic', 'create', '--config', config_path, '-v', '2', '--override', "hooks.postgresql_databases=[{'name': 'nope'}]", ] ) finally: os.chdir(original_working_directory) shutil.rmtree(temporary_directory) borgmatic-1.7.9/tests/end-to-end/test_generate_config.py000066400000000000000000000011141440467744700233410ustar00rootroot00000000000000import os import subprocess import tempfile def test_generate_borgmatic_config_with_merging_succeeds(): with tempfile.TemporaryDirectory() as temporary_directory: config_path = os.path.join(temporary_directory, 'test.yaml') new_config_path = os.path.join(temporary_directory, 'new.yaml') subprocess.check_call(f'generate-borgmatic-config --destination {config_path}'.split(' ')) subprocess.check_call( f'generate-borgmatic-config --source {config_path} --destination {new_config_path}'.split( ' ' ) ) borgmatic-1.7.9/tests/end-to-end/test_override.py000066400000000000000000000037261440467744700220540ustar00rootroot00000000000000import os import shutil import subprocess import tempfile def generate_configuration(config_path, repository_path): ''' Generate borgmatic configuration into a file at the config path, and update the defaults so as to work for testing (including injecting the given repository path and tacking on an encryption passphrase). ''' subprocess.check_call( 'generate-borgmatic-config --destination {}'.format(config_path).split(' ') ) config = ( open(config_path) .read() .replace('ssh://user@backupserver/./sourcehostname.borg', repository_path) .replace('- ssh://user@backupserver/./{fqdn}', '') .replace('- /var/local/backups/local.borg', '') .replace('- /home/user/path with spaces', '') .replace('- /home', '- {}'.format(config_path)) .replace('- /etc', '') .replace('- /var/log/syslog*', '') + 'storage:\n encryption_passphrase: "test"' ) config_file = open(config_path, 'w') config_file.write(config) config_file.close() def test_override_get_normalized(): temporary_directory = tempfile.mkdtemp() repository_path = os.path.join(temporary_directory, 'test.borg') original_working_directory = os.getcwd() try: config_path = os.path.join(temporary_directory, 'test.yaml') generate_configuration(config_path, repository_path) subprocess.check_call( f'borgmatic -v 2 --config {config_path} init --encryption repokey'.split(' ') ) # Run borgmatic with an override structured for an outdated config file format. If # normalization is working, it should get normalized and shouldn't error. subprocess.check_call( f'borgmatic create --config {config_path} --override hooks.healthchecks=http://localhost:8888/someuuid'.split( ' ' ) ) finally: os.chdir(original_working_directory) shutil.rmtree(temporary_directory) borgmatic-1.7.9/tests/end-to-end/test_validate_config.py000066400000000000000000000023131440467744700233420ustar00rootroot00000000000000import os import subprocess import tempfile def test_validate_config_command_with_valid_configuration_succeeds(): with tempfile.TemporaryDirectory() as temporary_directory: config_path = os.path.join(temporary_directory, 'test.yaml') subprocess.check_call( 'generate-borgmatic-config --destination {}'.format(config_path).split(' ') ) exit_code = subprocess.call( 'validate-borgmatic-config --config {}'.format(config_path).split(' ') ) assert exit_code == 0 def test_validate_config_command_with_invalid_configuration_fails(): with tempfile.TemporaryDirectory() as temporary_directory: config_path = os.path.join(temporary_directory, 'test.yaml') subprocess.check_call( 'generate-borgmatic-config --destination {}'.format(config_path).split(' ') ) config = open(config_path).read().replace('keep_daily: 7', 'keep_daily: "7"') config_file = open(config_path, 'w') config_file.write(config) config_file.close() exit_code = subprocess.call( 'validate-borgmatic-config --config {}'.format(config_path).split(' ') ) assert exit_code == 1 borgmatic-1.7.9/tests/integration/000077500000000000000000000000001440467744700172055ustar00rootroot00000000000000borgmatic-1.7.9/tests/integration/__init__.py000066400000000000000000000000001440467744700213040ustar00rootroot00000000000000borgmatic-1.7.9/tests/integration/borg/000077500000000000000000000000001440467744700201365ustar00rootroot00000000000000borgmatic-1.7.9/tests/integration/borg/test_feature.py000066400000000000000000000010421440467744700231770ustar00rootroot00000000000000from borgmatic.borg import feature as module def test_available_true_for_new_enough_borg_version(): assert module.available(module.Feature.COMPACT, '1.3.7') def test_available_true_for_borg_version_introducing_feature(): assert module.available(module.Feature.COMPACT, '1.2.0a2') def test_available_true_for_borg_stable_version_introducing_feature(): assert module.available(module.Feature.COMPACT, '1.2.0') def test_available_false_for_too_old_borg_version(): assert not module.available(module.Feature.COMPACT, '1.1.5') borgmatic-1.7.9/tests/integration/commands/000077500000000000000000000000001440467744700210065ustar00rootroot00000000000000borgmatic-1.7.9/tests/integration/commands/__init__.py000066400000000000000000000000001440467744700231050ustar00rootroot00000000000000borgmatic-1.7.9/tests/integration/commands/test_arguments.py000066400000000000000000000443611440467744700244340ustar00rootroot00000000000000import pytest from flexmock import flexmock from borgmatic.commands import arguments as module def test_parse_arguments_with_no_arguments_uses_defaults(): config_paths = ['default'] flexmock(module.collect).should_receive('get_default_config_paths').and_return(config_paths) arguments = module.parse_arguments() global_arguments = arguments['global'] assert global_arguments.config_paths == config_paths assert global_arguments.excludes_filename is None assert global_arguments.verbosity == 0 assert global_arguments.syslog_verbosity == 0 assert global_arguments.log_file_verbosity == 0 def test_parse_arguments_with_multiple_config_paths_parses_as_list(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) arguments = module.parse_arguments('--config', 'myconfig', 'otherconfig') global_arguments = arguments['global'] assert global_arguments.config_paths == ['myconfig', 'otherconfig'] assert global_arguments.verbosity == 0 assert global_arguments.syslog_verbosity == 0 assert global_arguments.log_file_verbosity == 0 def test_parse_arguments_with_verbosity_overrides_default(): config_paths = ['default'] flexmock(module.collect).should_receive('get_default_config_paths').and_return(config_paths) arguments = module.parse_arguments('--verbosity', '1') global_arguments = arguments['global'] assert global_arguments.config_paths == config_paths assert global_arguments.excludes_filename is None assert global_arguments.verbosity == 1 assert global_arguments.syslog_verbosity == 0 assert global_arguments.log_file_verbosity == 0 def test_parse_arguments_with_syslog_verbosity_overrides_default(): config_paths = ['default'] flexmock(module.collect).should_receive('get_default_config_paths').and_return(config_paths) arguments = module.parse_arguments('--syslog-verbosity', '2') global_arguments = arguments['global'] assert global_arguments.config_paths == config_paths assert global_arguments.excludes_filename is None assert global_arguments.verbosity == 0 assert global_arguments.syslog_verbosity == 2 def test_parse_arguments_with_log_file_verbosity_overrides_default(): config_paths = ['default'] flexmock(module.collect).should_receive('get_default_config_paths').and_return(config_paths) arguments = module.parse_arguments('--log-file-verbosity', '-1') global_arguments = arguments['global'] assert global_arguments.config_paths == config_paths assert global_arguments.excludes_filename is None assert global_arguments.verbosity == 0 assert global_arguments.syslog_verbosity == 0 assert global_arguments.log_file_verbosity == -1 def test_parse_arguments_with_single_override_parses(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) arguments = module.parse_arguments('--override', 'foo.bar=baz') global_arguments = arguments['global'] assert global_arguments.overrides == ['foo.bar=baz'] def test_parse_arguments_with_multiple_overrides_parses(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) arguments = module.parse_arguments('--override', 'foo.bar=baz', 'foo.quux=7') global_arguments = arguments['global'] assert global_arguments.overrides == ['foo.bar=baz', 'foo.quux=7'] def test_parse_arguments_with_multiple_overrides_and_flags_parses(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) arguments = module.parse_arguments( '--override', 'foo.bar=baz', '--override', 'foo.quux=7', 'this.that=8' ) global_arguments = arguments['global'] assert global_arguments.overrides == ['foo.bar=baz', 'foo.quux=7', 'this.that=8'] def test_parse_arguments_with_list_json_overrides_default(): arguments = module.parse_arguments('list', '--json') assert 'list' in arguments assert arguments['list'].json is True def test_parse_arguments_with_no_actions_defaults_to_all_actions_enabled(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) arguments = module.parse_arguments() assert 'prune' in arguments assert 'create' in arguments assert 'check' in arguments def test_parse_arguments_with_no_actions_passes_argument_to_relevant_actions(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) arguments = module.parse_arguments('--stats', '--list') assert 'prune' in arguments assert arguments['prune'].stats assert arguments['prune'].list_archives assert 'create' in arguments assert arguments['create'].stats assert arguments['create'].list_files assert 'check' in arguments def test_parse_arguments_with_help_and_no_actions_shows_global_help(capsys): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(SystemExit) as exit: module.parse_arguments('--help') assert exit.value.code == 0 captured = capsys.readouterr() assert 'global arguments:' in captured.out assert 'actions:' in captured.out def test_parse_arguments_with_help_and_action_shows_action_help(capsys): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(SystemExit) as exit: module.parse_arguments('create', '--help') assert exit.value.code == 0 captured = capsys.readouterr() assert 'global arguments:' not in captured.out assert 'actions:' not in captured.out assert 'create arguments:' in captured.out def test_parse_arguments_with_action_before_global_options_parses_options(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) arguments = module.parse_arguments('prune', '--verbosity', '2') assert 'prune' in arguments assert arguments['global'].verbosity == 2 def test_parse_arguments_with_global_options_before_action_parses_options(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) arguments = module.parse_arguments('--verbosity', '2', 'prune') assert 'prune' in arguments assert arguments['global'].verbosity == 2 def test_parse_arguments_with_prune_action_leaves_other_actions_disabled(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) arguments = module.parse_arguments('prune') assert 'prune' in arguments assert 'create' not in arguments assert 'check' not in arguments def test_parse_arguments_with_multiple_actions_leaves_other_action_disabled(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) arguments = module.parse_arguments('create', 'check') assert 'prune' not in arguments assert 'create' in arguments assert 'check' in arguments def test_parse_arguments_with_invalid_arguments_exits(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(SystemExit): module.parse_arguments('--posix-me-harder') def test_parse_arguments_disallows_deprecated_excludes_option(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(ValueError): module.parse_arguments('--config', 'myconfig', '--excludes', 'myexcludes') def test_parse_arguments_disallows_encryption_mode_without_init(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(SystemExit): module.parse_arguments('--config', 'myconfig', '--encryption', 'repokey') def test_parse_arguments_allows_encryption_mode_with_init(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) module.parse_arguments('--config', 'myconfig', 'init', '--encryption', 'repokey') def test_parse_arguments_requires_encryption_mode_with_init(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(SystemExit): module.parse_arguments('--config', 'myconfig', 'init') def test_parse_arguments_disallows_append_only_without_init(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(SystemExit): module.parse_arguments('--config', 'myconfig', '--append-only') def test_parse_arguments_disallows_storage_quota_without_init(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(SystemExit): module.parse_arguments('--config', 'myconfig', '--storage-quota', '5G') def test_parse_arguments_allows_init_and_prune(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) module.parse_arguments('--config', 'myconfig', 'init', '--encryption', 'repokey', 'prune') def test_parse_arguments_allows_init_and_create(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) module.parse_arguments('--config', 'myconfig', 'init', '--encryption', 'repokey', 'create') def test_parse_arguments_allows_repository_with_extract(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) module.parse_arguments( '--config', 'myconfig', 'extract', '--repository', 'test.borg', '--archive', 'test' ) def test_parse_arguments_allows_repository_with_mount(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) module.parse_arguments( '--config', 'myconfig', 'mount', '--repository', 'test.borg', '--archive', 'test', '--mount-point', '/mnt', ) def test_parse_arguments_allows_repository_with_list(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) module.parse_arguments('--config', 'myconfig', 'list', '--repository', 'test.borg') def test_parse_arguments_disallows_archive_unless_action_consumes_it(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(SystemExit): module.parse_arguments('--config', 'myconfig', '--archive', 'test') def test_parse_arguments_disallows_paths_unless_action_consumes_it(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(SystemExit): module.parse_arguments('--config', 'myconfig', '--path', 'test') def test_parse_arguments_allows_archive_with_extract(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) module.parse_arguments('--config', 'myconfig', 'extract', '--archive', 'test') def test_parse_arguments_allows_archive_with_mount(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) module.parse_arguments( '--config', 'myconfig', 'mount', '--archive', 'test', '--mount-point', '/mnt' ) def test_parse_arguments_allows_archive_with_restore(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) module.parse_arguments('--config', 'myconfig', 'restore', '--archive', 'test') def test_parse_arguments_allows_archive_with_list(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) module.parse_arguments('--config', 'myconfig', 'list', '--archive', 'test') def test_parse_arguments_requires_archive_with_extract(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(SystemExit): module.parse_arguments('--config', 'myconfig', 'extract') def test_parse_arguments_requires_archive_with_restore(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(SystemExit): module.parse_arguments('--config', 'myconfig', 'restore') def test_parse_arguments_requires_mount_point_with_mount(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(SystemExit): module.parse_arguments('--config', 'myconfig', 'mount', '--archive', 'test') def test_parse_arguments_requires_mount_point_with_umount(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(SystemExit): module.parse_arguments('--config', 'myconfig', 'umount') def test_parse_arguments_allows_progress_before_create(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) module.parse_arguments('--progress', 'create', 'list') def test_parse_arguments_allows_progress_after_create(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) module.parse_arguments('create', '--progress', 'list') def test_parse_arguments_allows_progress_and_extract(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) module.parse_arguments('--progress', 'extract', '--archive', 'test', 'list') def test_parse_arguments_disallows_progress_without_create(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(SystemExit): module.parse_arguments('--progress', 'list') def test_parse_arguments_with_stats_and_create_flags_does_not_raise(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) module.parse_arguments('--stats', 'create', 'list') def test_parse_arguments_with_stats_and_prune_flags_does_not_raise(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) module.parse_arguments('--stats', 'prune', 'list') def test_parse_arguments_with_stats_flag_but_no_create_or_prune_flag_raises_value_error(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(SystemExit): module.parse_arguments('--stats', 'list') def test_parse_arguments_with_list_and_create_flags_does_not_raise(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) module.parse_arguments('--list', 'create') def test_parse_arguments_with_list_and_prune_flags_does_not_raise(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) module.parse_arguments('--list', 'prune') def test_parse_arguments_with_list_flag_but_no_relevant_action_raises_value_error(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(SystemExit): module.parse_arguments('--list', 'rcreate') def test_parse_arguments_disallows_list_with_progress_for_create_action(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(ValueError): module.parse_arguments('create', '--list', '--progress') def test_parse_arguments_allows_json_with_list_or_info(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) module.parse_arguments('list', '--json') module.parse_arguments('info', '--json') def test_parse_arguments_disallows_json_with_both_list_and_info(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(ValueError): module.parse_arguments('list', 'info', '--json') def test_parse_arguments_disallows_json_with_both_list_and_rinfo(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(ValueError): module.parse_arguments('list', 'rinfo', '--json') def test_parse_arguments_disallows_json_with_both_rinfo_and_info(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(ValueError): module.parse_arguments('rinfo', 'info', '--json') def test_parse_arguments_disallows_transfer_with_both_archive_and_match_archives(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(ValueError): module.parse_arguments( 'transfer', '--source-repository', 'source.borg', '--archive', 'foo', '--match-archives', 'sh:*bar', ) def test_parse_arguments_disallows_info_with_both_archive_and_match_archives(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(ValueError): module.parse_arguments('info', '--archive', 'foo', '--match-archives', 'sh:*bar') def test_parse_arguments_disallows_info_with_both_archive_and_prefix(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(ValueError): module.parse_arguments('info', '--archive', 'foo', '--prefix', 'bar') def test_parse_arguments_disallows_info_with_both_prefix_and_match_archives(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) with pytest.raises(ValueError): module.parse_arguments('info', '--prefix', 'foo', '--match-archives', 'sh:*bar') def test_parse_arguments_check_only_extract_does_not_raise_extract_subparser_error(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) module.parse_arguments('check', '--only', 'extract') def test_parse_arguments_extract_archive_check_does_not_raise_check_subparser_error(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) module.parse_arguments('extract', '--archive', 'check') def test_parse_arguments_extract_with_check_only_extract_does_not_raise(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) module.parse_arguments('extract', '--archive', 'name', 'check', '--only', 'extract') borgmatic-1.7.9/tests/integration/commands/test_borgmatic.py000066400000000000000000000006601440467744700243700ustar00rootroot00000000000000import subprocess from flexmock import flexmock from borgmatic.commands import borgmatic as module def test_borgmatic_version_matches_news_version(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) borgmatic_version = subprocess.check_output(('borgmatic', '--version')).decode('ascii') news_version = open('NEWS').readline() assert borgmatic_version == news_version borgmatic-1.7.9/tests/integration/commands/test_completion.py000066400000000000000000000002051440467744700245650ustar00rootroot00000000000000from borgmatic.commands import completion as module def test_bash_completion_does_not_raise(): assert module.bash_completion() borgmatic-1.7.9/tests/integration/commands/test_convert_config.py000066400000000000000000000032071440467744700254260ustar00rootroot00000000000000import os import pytest from flexmock import flexmock from borgmatic.commands import convert_config as module def test_parse_arguments_with_no_arguments_uses_defaults(): flexmock(os.path).should_receive('exists').and_return(True) parser = module.parse_arguments() assert parser.source_config_filename == module.DEFAULT_SOURCE_CONFIG_FILENAME assert parser.source_excludes_filename == module.DEFAULT_SOURCE_EXCLUDES_FILENAME assert parser.destination_config_filename == module.DEFAULT_DESTINATION_CONFIG_FILENAME def test_parse_arguments_with_filename_arguments_overrides_defaults(): flexmock(os.path).should_receive('exists').and_return(True) parser = module.parse_arguments( '--source-config', 'config', '--source-excludes', 'excludes', '--destination-config', 'config.yaml', ) assert parser.source_config_filename == 'config' assert parser.source_excludes_filename == 'excludes' assert parser.destination_config_filename == 'config.yaml' def test_parse_arguments_with_missing_default_excludes_file_sets_filename_to_none(): flexmock(os.path).should_receive('exists').and_return(False) parser = module.parse_arguments() assert parser.source_config_filename == module.DEFAULT_SOURCE_CONFIG_FILENAME assert parser.source_excludes_filename is None assert parser.destination_config_filename == module.DEFAULT_DESTINATION_CONFIG_FILENAME def test_parse_arguments_with_invalid_arguments_exits(): flexmock(os.path).should_receive('exists').and_return(True) with pytest.raises(SystemExit): module.parse_arguments('--posix-me-harder') borgmatic-1.7.9/tests/integration/commands/test_generate_config.py000066400000000000000000000014431440467744700255400ustar00rootroot00000000000000from borgmatic.commands import generate_config as module def test_parse_arguments_with_no_arguments_uses_default_destination(): parser = module.parse_arguments() assert parser.destination_filename == module.DEFAULT_DESTINATION_CONFIG_FILENAME def test_parse_arguments_with_destination_argument_overrides_default(): parser = module.parse_arguments('--destination', 'config.yaml') assert parser.destination_filename == 'config.yaml' def test_parse_arguments_parses_source(): parser = module.parse_arguments('--source', 'source.yaml', '--destination', 'config.yaml') assert parser.source_filename == 'source.yaml' def test_parse_arguments_parses_overwrite(): parser = module.parse_arguments('--destination', 'config.yaml', '--overwrite') assert parser.overwrite borgmatic-1.7.9/tests/integration/commands/test_validate_config.py000066400000000000000000000012401440467744700255320ustar00rootroot00000000000000from flexmock import flexmock from borgmatic.commands import validate_config as module def test_parse_arguments_with_no_arguments_uses_defaults(): config_paths = ['default'] flexmock(module.collect).should_receive('get_default_config_paths').and_return(config_paths) parser = module.parse_arguments() assert parser.config_paths == config_paths def test_parse_arguments_with_multiple_config_paths_parses_as_list(): flexmock(module.collect).should_receive('get_default_config_paths').and_return(['default']) parser = module.parse_arguments('--config', 'myconfig', 'otherconfig') assert parser.config_paths == ['myconfig', 'otherconfig'] borgmatic-1.7.9/tests/integration/config/000077500000000000000000000000001440467744700204525ustar00rootroot00000000000000borgmatic-1.7.9/tests/integration/config/__init__.py000066400000000000000000000000001440467744700225510ustar00rootroot00000000000000borgmatic-1.7.9/tests/integration/config/test_generate.py000066400000000000000000000173671440467744700236730ustar00rootroot00000000000000import os import sys from io import StringIO import pytest from flexmock import flexmock from borgmatic.config import generate as module def test_insert_newline_before_comment_does_not_raise(): field_name = 'foo' config = module.yaml.comments.CommentedMap([(field_name, 33)]) config.yaml_set_comment_before_after_key(key=field_name, before='Comment') module._insert_newline_before_comment(config, field_name) def test_comment_out_line_skips_blank_line(): line = ' \n' assert module._comment_out_line(line) == line def test_comment_out_line_skips_already_commented_out_line(): line = ' # foo' assert module._comment_out_line(line) == line def test_comment_out_line_comments_section_name(): line = 'figgy-pudding:' assert module._comment_out_line(line) == '# ' + line def test_comment_out_line_comments_indented_option(): line = ' enabled: true' assert module._comment_out_line(line) == ' # enabled: true' def test_comment_out_line_comments_twice_indented_option(): line = ' - item' assert module._comment_out_line(line) == ' # - item' def test_comment_out_optional_configuration_comments_optional_config_only(): # The "# COMMENT_OUT" comment is a sentinel used to express that the following key is optional. # It's stripped out of the final output. flexmock(module)._comment_out_line = lambda line: '# ' + line config = ''' # COMMENT_OUT foo: # COMMENT_OUT bar: - baz - quux location: repositories: - one - two # This comment should be kept. # COMMENT_OUT other: thing ''' # flake8: noqa expected_config = ''' # foo: # bar: # - baz # - quux location: repositories: - one - two # This comment should be kept. # other: thing ''' assert module._comment_out_optional_configuration(config.strip()) == expected_config.strip() def test_render_configuration_converts_configuration_to_yaml_string(): yaml_string = module.render_configuration({'foo': 'bar'}) assert yaml_string == 'foo: bar\n' def test_write_configuration_does_not_raise(): flexmock(os.path).should_receive('exists').and_return(False) flexmock(os).should_receive('makedirs') builtins = flexmock(sys.modules['builtins']) builtins.should_receive('open').and_return(StringIO()) flexmock(os).should_receive('chmod') module.write_configuration('config.yaml', 'config: yaml') def test_write_configuration_with_already_existing_file_raises(): flexmock(os.path).should_receive('exists').and_return(True) with pytest.raises(FileExistsError): module.write_configuration('config.yaml', 'config: yaml') def test_write_configuration_with_already_existing_file_and_overwrite_does_not_raise(): flexmock(os.path).should_receive('exists').and_return(True) module.write_configuration('/tmp/config.yaml', 'config: yaml', overwrite=True) def test_write_configuration_with_already_existing_directory_does_not_raise(): flexmock(os.path).should_receive('exists').and_return(False) flexmock(os).should_receive('makedirs').and_raise(FileExistsError) builtins = flexmock(sys.modules['builtins']) builtins.should_receive('open').and_return(StringIO()) flexmock(os).should_receive('chmod') module.write_configuration('config.yaml', 'config: yaml') def test_add_comments_to_configuration_sequence_of_strings_does_not_raise(): config = module.yaml.comments.CommentedSeq(['foo', 'bar']) schema = {'type': 'array', 'items': {'type': 'string'}} module.add_comments_to_configuration_sequence(config, schema) def test_add_comments_to_configuration_sequence_of_maps_does_not_raise(): config = module.yaml.comments.CommentedSeq([module.yaml.comments.CommentedMap([('foo', 'yo')])]) schema = { 'type': 'array', 'items': {'type': 'object', 'properties': {'foo': {'description': 'yo'}}}, } module.add_comments_to_configuration_sequence(config, schema) def test_add_comments_to_configuration_sequence_of_maps_without_description_does_not_raise(): config = module.yaml.comments.CommentedSeq([module.yaml.comments.CommentedMap([('foo', 'yo')])]) schema = {'type': 'array', 'items': {'type': 'object', 'properties': {'foo': {}}}} module.add_comments_to_configuration_sequence(config, schema) def test_add_comments_to_configuration_object_does_not_raise(): # Ensure that it can deal with fields both in the schema and missing from the schema. config = module.yaml.comments.CommentedMap([('foo', 33), ('bar', 44), ('baz', 55)]) schema = { 'type': 'object', 'properties': {'foo': {'description': 'Foo'}, 'bar': {'description': 'Bar'}}, } module.add_comments_to_configuration_object(config, schema) def test_add_comments_to_configuration_object_with_skip_first_does_not_raise(): config = module.yaml.comments.CommentedMap([('foo', 33)]) schema = {'type': 'object', 'properties': {'foo': {'description': 'Foo'}}} module.add_comments_to_configuration_object(config, schema, skip_first=True) def test_remove_commented_out_sentinel_keeps_other_comments(): field_name = 'foo' config = module.yaml.comments.CommentedMap([(field_name, 33)]) config.yaml_set_comment_before_after_key(key=field_name, before='Actual comment.\nCOMMENT_OUT') module.remove_commented_out_sentinel(config, field_name) comments = config.ca.items[field_name][module.RUAMEL_YAML_COMMENTS_INDEX] assert len(comments) == 1 assert comments[0].value == '# Actual comment.\n' def test_remove_commented_out_sentinel_without_sentinel_keeps_other_comments(): field_name = 'foo' config = module.yaml.comments.CommentedMap([(field_name, 33)]) config.yaml_set_comment_before_after_key(key=field_name, before='Actual comment.') module.remove_commented_out_sentinel(config, field_name) comments = config.ca.items[field_name][module.RUAMEL_YAML_COMMENTS_INDEX] assert len(comments) == 1 assert comments[0].value == '# Actual comment.\n' def test_remove_commented_out_sentinel_on_unknown_field_does_not_raise(): field_name = 'foo' config = module.yaml.comments.CommentedMap([(field_name, 33)]) config.yaml_set_comment_before_after_key(key=field_name, before='Actual comment.') module.remove_commented_out_sentinel(config, 'unknown') def test_generate_sample_configuration_does_not_raise(): builtins = flexmock(sys.modules['builtins']) builtins.should_receive('open').with_args('schema.yaml').and_return('') flexmock(module.yaml).should_receive('round_trip_load') flexmock(module).should_receive('_schema_to_sample_configuration') flexmock(module).should_receive('merge_source_configuration_into_destination') flexmock(module).should_receive('render_configuration') flexmock(module).should_receive('_comment_out_optional_configuration') flexmock(module).should_receive('write_configuration') module.generate_sample_configuration(None, 'dest.yaml', 'schema.yaml') def test_generate_sample_configuration_with_source_filename_does_not_raise(): builtins = flexmock(sys.modules['builtins']) builtins.should_receive('open').with_args('schema.yaml').and_return('') flexmock(module.yaml).should_receive('round_trip_load') flexmock(module.load).should_receive('load_configuration') flexmock(module.normalize).should_receive('normalize') flexmock(module).should_receive('_schema_to_sample_configuration') flexmock(module).should_receive('merge_source_configuration_into_destination') flexmock(module).should_receive('render_configuration') flexmock(module).should_receive('_comment_out_optional_configuration') flexmock(module).should_receive('write_configuration') module.generate_sample_configuration('source.yaml', 'dest.yaml', 'schema.yaml') borgmatic-1.7.9/tests/integration/config/test_legacy.py000066400000000000000000000011021440467744700233210ustar00rootroot00000000000000import string from collections import OrderedDict from io import StringIO from borgmatic.config import legacy as module def test_parse_section_options_with_punctuation_should_return_section_options(): parser = module.RawConfigParser() parser.read_file(StringIO('[section]\nfoo: {}\n'.format(string.punctuation))) section_format = module.Section_format( 'section', (module.Config_option('foo', str, required=True),) ) config = module.parse_section_options(parser, section_format) assert config == OrderedDict((('foo', string.punctuation),)) borgmatic-1.7.9/tests/integration/config/test_load.py000066400000000000000000000355231440467744700230120ustar00rootroot00000000000000import io import sys import pytest import ruamel.yaml from flexmock import flexmock from borgmatic.config import load as module def test_load_configuration_parses_contents(): builtins = flexmock(sys.modules['builtins']) builtins.should_receive('open').with_args('config.yaml').and_return('key: value') assert module.load_configuration('config.yaml') == {'key': 'value'} def test_load_configuration_inlines_include_relative_to_current_directory(): builtins = flexmock(sys.modules['builtins']) flexmock(module.os).should_receive('getcwd').and_return('/tmp') flexmock(module.os.path).should_receive('isabs').and_return(False) flexmock(module.os.path).should_receive('exists').and_return(True) include_file = io.StringIO('value') include_file.name = 'include.yaml' builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file) config_file = io.StringIO('key: !include include.yaml') config_file.name = 'config.yaml' builtins.should_receive('open').with_args('config.yaml').and_return(config_file) assert module.load_configuration('config.yaml') == {'key': 'value'} def test_load_configuration_inlines_include_relative_to_config_parent_directory(): builtins = flexmock(sys.modules['builtins']) flexmock(module.os).should_receive('getcwd').and_return('/tmp') flexmock(module.os.path).should_receive('isabs').with_args('/etc').and_return(True) flexmock(module.os.path).should_receive('isabs').with_args('/etc/config.yaml').and_return(True) flexmock(module.os.path).should_receive('isabs').with_args('include.yaml').and_return(False) flexmock(module.os.path).should_receive('exists').with_args('/tmp/include.yaml').and_return( False ) flexmock(module.os.path).should_receive('exists').with_args('/etc/include.yaml').and_return( True ) include_file = io.StringIO('value') include_file.name = 'include.yaml' builtins.should_receive('open').with_args('/etc/include.yaml').and_return(include_file) config_file = io.StringIO('key: !include include.yaml') config_file.name = '/etc/config.yaml' builtins.should_receive('open').with_args('/etc/config.yaml').and_return(config_file) assert module.load_configuration('/etc/config.yaml') == {'key': 'value'} def test_load_configuration_raises_if_relative_include_does_not_exist(): builtins = flexmock(sys.modules['builtins']) flexmock(module.os).should_receive('getcwd').and_return('/tmp') flexmock(module.os.path).should_receive('isabs').with_args('/etc').and_return(True) flexmock(module.os.path).should_receive('isabs').with_args('/etc/config.yaml').and_return(True) flexmock(module.os.path).should_receive('isabs').with_args('include.yaml').and_return(False) flexmock(module.os.path).should_receive('exists').and_return(False) config_file = io.StringIO('key: !include include.yaml') config_file.name = '/etc/config.yaml' builtins.should_receive('open').with_args('/etc/config.yaml').and_return(config_file) with pytest.raises(FileNotFoundError): module.load_configuration('/etc/config.yaml') def test_load_configuration_inlines_absolute_include(): builtins = flexmock(sys.modules['builtins']) flexmock(module.os).should_receive('getcwd').and_return('/tmp') flexmock(module.os.path).should_receive('isabs').and_return(True) flexmock(module.os.path).should_receive('exists').never() include_file = io.StringIO('value') include_file.name = '/root/include.yaml' builtins.should_receive('open').with_args('/root/include.yaml').and_return(include_file) config_file = io.StringIO('key: !include /root/include.yaml') config_file.name = 'config.yaml' builtins.should_receive('open').with_args('config.yaml').and_return(config_file) assert module.load_configuration('config.yaml') == {'key': 'value'} def test_load_configuration_raises_if_absolute_include_does_not_exist(): builtins = flexmock(sys.modules['builtins']) flexmock(module.os).should_receive('getcwd').and_return('/tmp') flexmock(module.os.path).should_receive('isabs').and_return(True) builtins.should_receive('open').with_args('/root/include.yaml').and_raise(FileNotFoundError) config_file = io.StringIO('key: !include /root/include.yaml') config_file.name = 'config.yaml' builtins.should_receive('open').with_args('config.yaml').and_return(config_file) with pytest.raises(FileNotFoundError): assert module.load_configuration('config.yaml') def test_load_configuration_merges_include(): builtins = flexmock(sys.modules['builtins']) flexmock(module.os).should_receive('getcwd').and_return('/tmp') flexmock(module.os.path).should_receive('isabs').and_return(False) flexmock(module.os.path).should_receive('exists').and_return(True) include_file = io.StringIO( ''' foo: bar baz: quux ''' ) include_file.name = 'include.yaml' builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file) config_file = io.StringIO( ''' foo: override <<: !include include.yaml ''' ) config_file.name = 'config.yaml' builtins.should_receive('open').with_args('config.yaml').and_return(config_file) assert module.load_configuration('config.yaml') == {'foo': 'override', 'baz': 'quux'} def test_load_configuration_does_not_merge_include_list(): builtins = flexmock(sys.modules['builtins']) flexmock(module.os).should_receive('getcwd').and_return('/tmp') flexmock(module.os.path).should_receive('isabs').and_return(False) flexmock(module.os.path).should_receive('exists').and_return(True) include_file = io.StringIO( ''' - one - two ''' ) include_file.name = 'include.yaml' builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file) config_file = io.StringIO( ''' foo: bar repositories: <<: !include include.yaml ''' ) config_file.name = 'config.yaml' builtins.should_receive('open').with_args('config.yaml').and_return(config_file) with pytest.raises(ruamel.yaml.error.YAMLError): assert module.load_configuration('config.yaml') def test_deep_merge_nodes_replaces_colliding_scalar_values(): node_values = [ ( ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='retention'), ruamel.yaml.nodes.MappingNode( tag='tag:yaml.org,2002:map', value=[ ( ruamel.yaml.nodes.ScalarNode( tag='tag:yaml.org,2002:str', value='keep_hourly' ), ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='24'), ), ( ruamel.yaml.nodes.ScalarNode( tag='tag:yaml.org,2002:str', value='keep_daily' ), ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='7'), ), ], ), ), ( ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='retention'), ruamel.yaml.nodes.MappingNode( tag='tag:yaml.org,2002:map', value=[ ( ruamel.yaml.nodes.ScalarNode( tag='tag:yaml.org,2002:str', value='keep_daily' ), ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='5'), ), ], ), ), ] result = module.deep_merge_nodes(node_values) assert len(result) == 1 (section_key, section_value) = result[0] assert section_key.value == 'retention' options = section_value.value assert len(options) == 2 assert options[0][0].value == 'keep_hourly' assert options[0][1].value == '24' assert options[1][0].value == 'keep_daily' assert options[1][1].value == '5' def test_deep_merge_nodes_keeps_non_colliding_scalar_values(): node_values = [ ( ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='retention'), ruamel.yaml.nodes.MappingNode( tag='tag:yaml.org,2002:map', value=[ ( ruamel.yaml.nodes.ScalarNode( tag='tag:yaml.org,2002:str', value='keep_hourly' ), ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='24'), ), ( ruamel.yaml.nodes.ScalarNode( tag='tag:yaml.org,2002:str', value='keep_daily' ), ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='7'), ), ], ), ), ( ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='retention'), ruamel.yaml.nodes.MappingNode( tag='tag:yaml.org,2002:map', value=[ ( ruamel.yaml.nodes.ScalarNode( tag='tag:yaml.org,2002:str', value='keep_minutely' ), ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='10'), ), ], ), ), ] result = module.deep_merge_nodes(node_values) assert len(result) == 1 (section_key, section_value) = result[0] assert section_key.value == 'retention' options = section_value.value assert len(options) == 3 assert options[0][0].value == 'keep_hourly' assert options[0][1].value == '24' assert options[1][0].value == 'keep_daily' assert options[1][1].value == '7' assert options[2][0].value == 'keep_minutely' assert options[2][1].value == '10' def test_deep_merge_nodes_keeps_deeply_nested_values(): node_values = [ ( ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='storage'), ruamel.yaml.nodes.MappingNode( tag='tag:yaml.org,2002:map', value=[ ( ruamel.yaml.nodes.ScalarNode( tag='tag:yaml.org,2002:str', value='lock_wait' ), ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:int', value='5'), ), ( ruamel.yaml.nodes.ScalarNode( tag='tag:yaml.org,2002:str', value='extra_borg_options' ), ruamel.yaml.nodes.MappingNode( tag='tag:yaml.org,2002:map', value=[ ( ruamel.yaml.nodes.ScalarNode( tag='tag:yaml.org,2002:str', value='init' ), ruamel.yaml.nodes.ScalarNode( tag='tag:yaml.org,2002:str', value='--init-option' ), ), ], ), ), ], ), ), ( ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='storage'), ruamel.yaml.nodes.MappingNode( tag='tag:yaml.org,2002:map', value=[ ( ruamel.yaml.nodes.ScalarNode( tag='tag:yaml.org,2002:str', value='extra_borg_options' ), ruamel.yaml.nodes.MappingNode( tag='tag:yaml.org,2002:map', value=[ ( ruamel.yaml.nodes.ScalarNode( tag='tag:yaml.org,2002:str', value='prune' ), ruamel.yaml.nodes.ScalarNode( tag='tag:yaml.org,2002:str', value='--prune-option' ), ), ], ), ), ], ), ), ] result = module.deep_merge_nodes(node_values) assert len(result) == 1 (section_key, section_value) = result[0] assert section_key.value == 'storage' options = section_value.value assert len(options) == 2 assert options[0][0].value == 'lock_wait' assert options[0][1].value == '5' assert options[1][0].value == 'extra_borg_options' nested_options = options[1][1].value assert len(nested_options) == 2 assert nested_options[0][0].value == 'init' assert nested_options[0][1].value == '--init-option' assert nested_options[1][0].value == 'prune' assert nested_options[1][1].value == '--prune-option' def test_deep_merge_nodes_appends_colliding_sequence_values(): node_values = [ ( ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='hooks'), ruamel.yaml.nodes.MappingNode( tag='tag:yaml.org,2002:map', value=[ ( ruamel.yaml.nodes.ScalarNode( tag='tag:yaml.org,2002:str', value='before_backup' ), ruamel.yaml.nodes.SequenceNode( tag='tag:yaml.org,2002:int', value=['echo 1', 'echo 2'] ), ), ], ), ), ( ruamel.yaml.nodes.ScalarNode(tag='tag:yaml.org,2002:str', value='hooks'), ruamel.yaml.nodes.MappingNode( tag='tag:yaml.org,2002:map', value=[ ( ruamel.yaml.nodes.ScalarNode( tag='tag:yaml.org,2002:str', value='before_backup' ), ruamel.yaml.nodes.SequenceNode( tag='tag:yaml.org,2002:int', value=['echo 3', 'echo 4'] ), ), ], ), ), ] result = module.deep_merge_nodes(node_values) assert len(result) == 1 (section_key, section_value) = result[0] assert section_key.value == 'hooks' options = section_value.value assert len(options) == 1 assert options[0][0].value == 'before_backup' assert options[0][1].value == ['echo 1', 'echo 2', 'echo 3', 'echo 4'] borgmatic-1.7.9/tests/integration/config/test_override.py000066400000000000000000000020171440467744700237020ustar00rootroot00000000000000import pytest from borgmatic.config import override as module @pytest.mark.parametrize( 'value,expected_result', ( ('thing', 'thing'), ('33', 33), ('33b', '33b'), ('true', True), ('false', False), ('[foo]', ['foo']), ('[foo, bar]', ['foo', 'bar']), ), ) def test_convert_value_type_coerces_values(value, expected_result): assert module.convert_value_type(value) == expected_result def test_apply_overrides_updates_config(): raw_overrides = [ 'section.key=value1', 'other_section.thing=value2', 'section.nested.key=value3', 'new.foo=bar', ] config = { 'section': {'key': 'value', 'other': 'other_value'}, 'other_section': {'thing': 'thing_value'}, } module.apply_overrides(config, raw_overrides) assert config == { 'section': {'key': 'value1', 'other': 'other_value', 'nested': {'key': 'value3'}}, 'other_section': {'thing': 'value2'}, 'new': {'foo': 'bar'}, } borgmatic-1.7.9/tests/integration/config/test_schema.py000066400000000000000000000003521440467744700233230ustar00rootroot00000000000000MAXIMUM_LINE_LENGTH = 80 def test_schema_line_length_stays_under_limit(): schema_file = open('borgmatic/config/schema.yaml') for line in schema_file.readlines(): assert len(line.rstrip('\n')) <= MAXIMUM_LINE_LENGTH borgmatic-1.7.9/tests/integration/config/test_validate.py000066400000000000000000000170231440467744700236570ustar00rootroot00000000000000import io import string import sys import pytest from flexmock import flexmock from borgmatic.config import validate as module def test_schema_filename_returns_plausable_path(): schema_path = module.schema_filename() assert schema_path.endswith('/schema.yaml') def mock_config_and_schema(config_yaml, schema_yaml=None): ''' Set up mocks for the given config config YAML string and the schema YAML string, or the default schema if no schema is provided. The idea is that that the code under test consumes these mocks when parsing the configuration. ''' config_stream = io.StringIO(config_yaml) config_stream.name = 'config.yaml' if schema_yaml is None: schema_stream = open(module.schema_filename()) else: schema_stream = io.StringIO(schema_yaml) schema_stream.name = 'schema.yaml' builtins = flexmock(sys.modules['builtins']) flexmock(module.os).should_receive('getcwd').and_return('/tmp') flexmock(module.os.path).should_receive('isabs').and_return(False) flexmock(module.os.path).should_receive('exists').and_return(True) builtins.should_receive('open').with_args('/tmp/config.yaml').and_return(config_stream) builtins.should_receive('open').with_args('/tmp/schema.yaml').and_return(schema_stream) def test_parse_configuration_transforms_file_into_mapping(): mock_config_and_schema( ''' location: source_directories: - /home - /etc repositories: - hostname.borg retention: keep_minutely: 60 keep_hourly: 24 keep_daily: 7 consistency: checks: - name: repository - name: archives ''' ) config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') assert config == { 'location': {'source_directories': ['/home', '/etc'], 'repositories': ['hostname.borg']}, 'retention': {'keep_daily': 7, 'keep_hourly': 24, 'keep_minutely': 60}, 'consistency': {'checks': [{'name': 'repository'}, {'name': 'archives'}]}, } assert logs == [] def test_parse_configuration_passes_through_quoted_punctuation(): escaped_punctuation = string.punctuation.replace('\\', r'\\').replace('"', r'\"') mock_config_and_schema( f''' location: source_directories: - "/home/{escaped_punctuation}" repositories: - test.borg ''' ) config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') assert config == { 'location': { 'source_directories': [f'/home/{string.punctuation}'], 'repositories': ['test.borg'], } } assert logs == [] def test_parse_configuration_with_schema_lacking_examples_does_not_raise(): mock_config_and_schema( ''' location: source_directories: - /home repositories: - hostname.borg ''', ''' map: location: required: true map: source_directories: required: true seq: - type: scalar repositories: required: true seq: - type: scalar ''', ) module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') def test_parse_configuration_inlines_include(): mock_config_and_schema( ''' location: source_directories: - /home repositories: - hostname.borg retention: !include include.yaml ''' ) builtins = flexmock(sys.modules['builtins']) include_file = io.StringIO( ''' keep_daily: 7 keep_hourly: 24 ''' ) include_file.name = 'include.yaml' builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file) config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') assert config == { 'location': {'source_directories': ['/home'], 'repositories': ['hostname.borg']}, 'retention': {'keep_daily': 7, 'keep_hourly': 24}, } assert logs == [] def test_parse_configuration_merges_include(): mock_config_and_schema( ''' location: source_directories: - /home repositories: - hostname.borg retention: keep_daily: 1 <<: !include include.yaml ''' ) builtins = flexmock(sys.modules['builtins']) include_file = io.StringIO( ''' keep_daily: 7 keep_hourly: 24 ''' ) include_file.name = 'include.yaml' builtins.should_receive('open').with_args('/tmp/include.yaml').and_return(include_file) config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') assert config == { 'location': {'source_directories': ['/home'], 'repositories': ['hostname.borg']}, 'retention': {'keep_daily': 1, 'keep_hourly': 24}, } assert logs == [] def test_parse_configuration_raises_for_missing_config_file(): with pytest.raises(FileNotFoundError): module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') def test_parse_configuration_raises_for_missing_schema_file(): mock_config_and_schema('') builtins = flexmock(sys.modules['builtins']) builtins.should_receive('open').with_args('/tmp/schema.yaml').and_raise(FileNotFoundError) with pytest.raises(FileNotFoundError): module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') def test_parse_configuration_raises_for_syntax_error(): mock_config_and_schema('foo:\nbar') with pytest.raises(ValueError): module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') def test_parse_configuration_raises_for_validation_error(): mock_config_and_schema( ''' location: source_directories: yes repositories: - hostname.borg ''' ) with pytest.raises(module.Validation_error): module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') def test_parse_configuration_applies_overrides(): mock_config_and_schema( ''' location: source_directories: - /home repositories: - hostname.borg local_path: borg1 ''' ) config, logs = module.parse_configuration( '/tmp/config.yaml', '/tmp/schema.yaml', overrides=['location.local_path=borg2'] ) assert config == { 'location': { 'source_directories': ['/home'], 'repositories': ['hostname.borg'], 'local_path': 'borg2', } } assert logs == [] def test_parse_configuration_applies_normalization(): mock_config_and_schema( ''' location: source_directories: - /home repositories: - hostname.borg exclude_if_present: .nobackup ''' ) config, logs = module.parse_configuration('/tmp/config.yaml', '/tmp/schema.yaml') assert config == { 'location': { 'source_directories': ['/home'], 'repositories': ['hostname.borg'], 'exclude_if_present': ['.nobackup'], } } assert logs == [] borgmatic-1.7.9/tests/integration/hooks/000077500000000000000000000000001440467744700203305ustar00rootroot00000000000000borgmatic-1.7.9/tests/integration/hooks/test_healthchecks.py000066400000000000000000000013311440467744700243650ustar00rootroot00000000000000import logging from flexmock import flexmock from borgmatic.hooks import healthchecks as module def test_destroy_monitor_removes_healthchecks_handler(): logger = logging.getLogger() original_handlers = list(logger.handlers) logger.addHandler(module.Forgetful_buffering_handler(byte_capacity=100, log_level=1)) module.destroy_monitor(flexmock(), flexmock(), flexmock(), flexmock()) assert logger.handlers == original_handlers def test_destroy_monitor_without_healthchecks_handler_does_not_raise(): logger = logging.getLogger() original_handlers = list(logger.handlers) module.destroy_monitor(flexmock(), flexmock(), flexmock(), flexmock()) assert logger.handlers == original_handlers borgmatic-1.7.9/tests/integration/test_execute.py000066400000000000000000000257771440467744700223020ustar00rootroot00000000000000import logging import subprocess import sys import pytest from flexmock import flexmock from borgmatic import execute as module def test_log_outputs_logs_each_line_separately(): flexmock(module.logger).should_receive('log').with_args(logging.INFO, 'hi').once() flexmock(module.logger).should_receive('log').with_args(logging.INFO, 'there').once() flexmock(module).should_receive('exit_code_indicates_error').and_return(False) hi_process = subprocess.Popen(['echo', 'hi'], stdout=subprocess.PIPE) flexmock(module).should_receive('output_buffer_for_process').with_args( hi_process, () ).and_return(hi_process.stdout) there_process = subprocess.Popen(['echo', 'there'], stdout=subprocess.PIPE) flexmock(module).should_receive('output_buffer_for_process').with_args( there_process, () ).and_return(there_process.stdout) module.log_outputs( (hi_process, there_process), exclude_stdouts=(), output_log_level=logging.INFO, borg_local_path='borg', ) def test_log_outputs_skips_logs_for_process_with_none_stdout(): flexmock(module.logger).should_receive('log').with_args(logging.INFO, 'hi').never() flexmock(module.logger).should_receive('log').with_args(logging.INFO, 'there').once() flexmock(module).should_receive('exit_code_indicates_error').and_return(False) hi_process = subprocess.Popen(['echo', 'hi'], stdout=None) flexmock(module).should_receive('output_buffer_for_process').with_args( hi_process, () ).and_return(hi_process.stdout) there_process = subprocess.Popen(['echo', 'there'], stdout=subprocess.PIPE) flexmock(module).should_receive('output_buffer_for_process').with_args( there_process, () ).and_return(there_process.stdout) module.log_outputs( (hi_process, there_process), exclude_stdouts=(), output_log_level=logging.INFO, borg_local_path='borg', ) def test_log_outputs_returns_output_without_logging_for_output_log_level_none(): flexmock(module.logger).should_receive('log').never() flexmock(module).should_receive('exit_code_indicates_error').and_return(False) hi_process = subprocess.Popen(['echo', 'hi'], stdout=subprocess.PIPE) flexmock(module).should_receive('output_buffer_for_process').with_args( hi_process, () ).and_return(hi_process.stdout) there_process = subprocess.Popen(['echo', 'there'], stdout=subprocess.PIPE) flexmock(module).should_receive('output_buffer_for_process').with_args( there_process, () ).and_return(there_process.stdout) captured_outputs = module.log_outputs( (hi_process, there_process), exclude_stdouts=(), output_log_level=None, borg_local_path='borg', ) assert captured_outputs == {hi_process: 'hi', there_process: 'there'} def test_log_outputs_includes_error_output_in_exception(): flexmock(module.logger).should_receive('log') flexmock(module).should_receive('exit_code_indicates_error').and_return(True) flexmock(module).should_receive('command_for_process').and_return('grep') process = subprocess.Popen(['grep'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) flexmock(module).should_receive('output_buffer_for_process').and_return(process.stdout) with pytest.raises(subprocess.CalledProcessError) as error: module.log_outputs( (process,), exclude_stdouts=(), output_log_level=logging.INFO, borg_local_path='borg' ) assert error.value.output def test_log_outputs_logs_multiline_error_output(): ''' Make sure that all error output lines get logged, not just (for instance) the first few lines of a process' traceback. ''' flexmock(module.logger).should_receive('log') flexmock(module).should_receive('exit_code_indicates_error').and_return(True) flexmock(module).should_receive('command_for_process').and_return('grep') process = subprocess.Popen( ['python', '-c', 'foopydoo'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) flexmock(module).should_receive('output_buffer_for_process').and_return(process.stdout) flexmock(module.logger).should_call('log').at_least().times(3) with pytest.raises(subprocess.CalledProcessError): module.log_outputs( (process,), exclude_stdouts=(), output_log_level=logging.INFO, borg_local_path='borg' ) def test_log_outputs_skips_error_output_in_exception_for_process_with_none_stdout(): flexmock(module.logger).should_receive('log') flexmock(module).should_receive('exit_code_indicates_error').and_return(True) flexmock(module).should_receive('command_for_process').and_return('grep') process = subprocess.Popen(['grep'], stdout=None) flexmock(module).should_receive('output_buffer_for_process').and_return(process.stdout) with pytest.raises(subprocess.CalledProcessError) as error: module.log_outputs( (process,), exclude_stdouts=(), output_log_level=logging.INFO, borg_local_path='borg' ) assert error.value.returncode == 2 assert not error.value.output def test_log_outputs_kills_other_processes_when_one_errors(): flexmock(module.logger).should_receive('log') flexmock(module).should_receive('command_for_process').and_return('grep') process = subprocess.Popen(['grep'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) flexmock(module).should_receive('exit_code_indicates_error').with_args( process, None, 'borg' ).and_return(False) flexmock(module).should_receive('exit_code_indicates_error').with_args( process, 2, 'borg' ).and_return(True) other_process = subprocess.Popen( ['sleep', '2'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) flexmock(module).should_receive('exit_code_indicates_error').with_args( other_process, None, 'borg' ).and_return(False) flexmock(module).should_receive('output_buffer_for_process').with_args(process, ()).and_return( process.stdout ) flexmock(module).should_receive('output_buffer_for_process').with_args( other_process, () ).and_return(other_process.stdout) flexmock(other_process).should_receive('kill').once() with pytest.raises(subprocess.CalledProcessError) as error: module.log_outputs( (process, other_process), exclude_stdouts=(), output_log_level=logging.INFO, borg_local_path='borg', ) assert error.value.returncode == 2 assert error.value.output def test_log_outputs_vents_other_processes_when_one_exits(): ''' Execute a command to generate a longish random string and pipe it into another command that exits quickly. The test is basically to ensure we don't hang forever waiting for the exited process to read the pipe, and that the string-generating process eventually gets vented and exits. ''' flexmock(module.logger).should_receive('log') flexmock(module).should_receive('command_for_process').and_return('grep') process = subprocess.Popen( [ sys.executable, '-c', "import random, string; print(''.join(random.choice(string.ascii_letters) for _ in range(40000)))", ], stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) other_process = subprocess.Popen( ['true'], stdin=process.stdout, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) flexmock(module).should_receive('output_buffer_for_process').with_args( process, (process.stdout,) ).and_return(process.stderr) flexmock(module).should_receive('output_buffer_for_process').with_args( other_process, (process.stdout,) ).and_return(other_process.stdout) flexmock(process.stdout).should_call('readline').at_least().once() module.log_outputs( (process, other_process), exclude_stdouts=(process.stdout,), output_log_level=logging.INFO, borg_local_path='borg', ) def test_log_outputs_does_not_error_when_one_process_exits(): flexmock(module.logger).should_receive('log') flexmock(module).should_receive('command_for_process').and_return('grep') process = subprocess.Popen( [ sys.executable, '-c', "import random, string; print(''.join(random.choice(string.ascii_letters) for _ in range(40000)))", ], stdout=None, # Specifically test the case of a process without stdout captured. stderr=None, ) other_process = subprocess.Popen( ['true'], stdin=process.stdout, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) flexmock(module).should_receive('output_buffer_for_process').with_args( process, (process.stdout,) ).and_return(process.stderr) flexmock(module).should_receive('output_buffer_for_process').with_args( other_process, (process.stdout,) ).and_return(other_process.stdout) module.log_outputs( (process, other_process), exclude_stdouts=(process.stdout,), output_log_level=logging.INFO, borg_local_path='borg', ) def test_log_outputs_truncates_long_error_output(): flexmock(module).ERROR_OUTPUT_MAX_LINE_COUNT = 0 flexmock(module.logger).should_receive('log') flexmock(module).should_receive('command_for_process').and_return('grep') process = subprocess.Popen(['grep'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) flexmock(module).should_receive('exit_code_indicates_error').with_args( process, None, 'borg' ).and_return(False) flexmock(module).should_receive('exit_code_indicates_error').with_args( process, 2, 'borg' ).and_return(True) flexmock(module).should_receive('output_buffer_for_process').and_return(process.stdout) with pytest.raises(subprocess.CalledProcessError) as error: module.log_outputs( (process,), exclude_stdouts=(), output_log_level=logging.INFO, borg_local_path='borg' ) assert error.value.returncode == 2 assert error.value.output.startswith('...') def test_log_outputs_with_no_output_logs_nothing(): flexmock(module.logger).should_receive('log').never() flexmock(module).should_receive('exit_code_indicates_error').and_return(False) process = subprocess.Popen(['true'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) flexmock(module).should_receive('output_buffer_for_process').and_return(process.stdout) module.log_outputs( (process,), exclude_stdouts=(), output_log_level=logging.INFO, borg_local_path='borg' ) def test_log_outputs_with_unfinished_process_re_polls(): flexmock(module.logger).should_receive('log').never() flexmock(module).should_receive('exit_code_indicates_error').and_return(False) process = subprocess.Popen(['true'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) flexmock(process).should_receive('poll').and_return(None).and_return(0).times(3) flexmock(module).should_receive('output_buffer_for_process').and_return(process.stdout) module.log_outputs( (process,), exclude_stdouts=(), output_log_level=logging.INFO, borg_local_path='borg' ) borgmatic-1.7.9/tests/unit/000077500000000000000000000000001440467744700156415ustar00rootroot00000000000000borgmatic-1.7.9/tests/unit/__init__.py000066400000000000000000000000001440467744700177400ustar00rootroot00000000000000borgmatic-1.7.9/tests/unit/actions/000077500000000000000000000000001440467744700173015ustar00rootroot00000000000000borgmatic-1.7.9/tests/unit/actions/__init__.py000066400000000000000000000000001440467744700214000ustar00rootroot00000000000000borgmatic-1.7.9/tests/unit/actions/test_borg.py000066400000000000000000000013701440467744700216440ustar00rootroot00000000000000from flexmock import flexmock from borgmatic.actions import borg as module def test_run_borg_does_not_raise(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True) flexmock(module.borgmatic.borg.rlist).should_receive('resolve_archive_name').and_return( flexmock() ) flexmock(module.borgmatic.borg.borg).should_receive('run_arbitrary_borg') borg_arguments = flexmock(repository=flexmock(), archive=flexmock(), options=flexmock()) module.run_borg( repository='repo', storage={}, local_borg_version=None, borg_arguments=borg_arguments, local_path=None, remote_path=None, ) borgmatic-1.7.9/tests/unit/actions/test_break_lock.py000066400000000000000000000011741440467744700230110ustar00rootroot00000000000000from flexmock import flexmock from borgmatic.actions import break_lock as module def test_run_break_lock_does_not_raise(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True) flexmock(module.borgmatic.borg.break_lock).should_receive('break_lock') break_lock_arguments = flexmock(repository=flexmock()) module.run_break_lock( repository='repo', storage={}, local_borg_version=None, break_lock_arguments=break_lock_arguments, local_path=None, remote_path=None, ) borgmatic-1.7.9/tests/unit/actions/test_check.py000066400000000000000000000060411440467744700217700ustar00rootroot00000000000000from flexmock import flexmock from borgmatic.actions import check as module def test_run_check_calls_hooks_for_configured_repository(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.checks).should_receive( 'repository_enabled_for_checks' ).and_return(True) flexmock(module.borgmatic.config.validate).should_receive('repositories_match').never() flexmock(module.borgmatic.borg.check).should_receive('check_archives').once() flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2) check_arguments = flexmock( repository=None, progress=flexmock(), repair=flexmock(), only=flexmock(), force=flexmock(), ) global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) module.run_check( config_filename='test.yaml', repository='repo', location={'repositories': ['repo']}, storage={}, consistency={}, hooks={}, hook_context={}, local_borg_version=None, check_arguments=check_arguments, global_arguments=global_arguments, local_path=None, remote_path=None, ) def test_run_check_runs_with_selected_repository(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive( 'repositories_match' ).once().and_return(True) flexmock(module.borgmatic.borg.check).should_receive('check_archives').once() check_arguments = flexmock( repository=flexmock(), progress=flexmock(), repair=flexmock(), only=flexmock(), force=flexmock(), ) global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) module.run_check( config_filename='test.yaml', repository=flexmock(), location={'repositories': ['repo']}, storage={}, consistency={}, hooks={}, hook_context={}, local_borg_version=None, check_arguments=check_arguments, global_arguments=global_arguments, local_path=None, remote_path=None, ) def test_run_check_bails_if_repository_does_not_match(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive( 'repositories_match' ).once().and_return(False) flexmock(module.borgmatic.borg.check).should_receive('check_archives').never() check_arguments = flexmock( repository=flexmock(), progress=flexmock(), repair=flexmock(), only=flexmock(), force=flexmock(), ) global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) module.run_check( config_filename='test.yaml', repository='repo', location={'repositories': ['repo']}, storage={}, consistency={}, hooks={}, hook_context={}, local_borg_version=None, check_arguments=check_arguments, global_arguments=global_arguments, local_path=None, remote_path=None, ) borgmatic-1.7.9/tests/unit/actions/test_compact.py000066400000000000000000000061061440467744700223430ustar00rootroot00000000000000from flexmock import flexmock from borgmatic.actions import compact as module def test_compact_actions_calls_hooks_for_configured_repository(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.borg.feature).should_receive('available').and_return(True) flexmock(module.borgmatic.config.validate).should_receive('repositories_match').never() flexmock(module.borgmatic.borg.compact).should_receive('compact_segments').once() flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2) compact_arguments = flexmock( repository=None, progress=flexmock(), cleanup_commits=flexmock(), threshold=flexmock() ) global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) module.run_compact( config_filename='test.yaml', repository='repo', storage={}, retention={}, hooks={}, hook_context={}, local_borg_version=None, compact_arguments=compact_arguments, global_arguments=global_arguments, dry_run_label='', local_path=None, remote_path=None, ) def test_compact_runs_with_selected_repository(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive( 'repositories_match' ).once().and_return(True) flexmock(module.borgmatic.borg.feature).should_receive('available').and_return(True) flexmock(module.borgmatic.borg.compact).should_receive('compact_segments').once() compact_arguments = flexmock( repository=flexmock(), progress=flexmock(), cleanup_commits=flexmock(), threshold=flexmock() ) global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) module.run_compact( config_filename='test.yaml', repository='repo', storage={}, retention={}, hooks={}, hook_context={}, local_borg_version=None, compact_arguments=compact_arguments, global_arguments=global_arguments, dry_run_label='', local_path=None, remote_path=None, ) def test_compact_bails_if_repository_does_not_match(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.borg.feature).should_receive('available').and_return(True) flexmock(module.borgmatic.config.validate).should_receive( 'repositories_match' ).once().and_return(False) flexmock(module.borgmatic.borg.compact).should_receive('compact_segments').never() compact_arguments = flexmock( repository=flexmock(), progress=flexmock(), cleanup_commits=flexmock(), threshold=flexmock() ) global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) module.run_compact( config_filename='test.yaml', repository='repo', storage={}, retention={}, hooks={}, hook_context={}, local_borg_version=None, compact_arguments=compact_arguments, global_arguments=global_arguments, dry_run_label='', local_path=None, remote_path=None, ) borgmatic-1.7.9/tests/unit/actions/test_create.py000066400000000000000000000065321440467744700221630ustar00rootroot00000000000000from flexmock import flexmock from borgmatic.actions import create as module def test_run_create_executes_and_calls_hooks_for_configured_repository(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive('repositories_match').never() flexmock(module.borgmatic.borg.create).should_receive('create_archive').once() flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2) flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks').and_return({}) flexmock(module.borgmatic.hooks.dispatch).should_receive( 'call_hooks_even_if_unconfigured' ).and_return({}) create_arguments = flexmock( repository=None, progress=flexmock(), stats=flexmock(), json=flexmock(), list_files=flexmock(), ) global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) list( module.run_create( config_filename='test.yaml', repository='repo', location={}, storage={}, hooks={}, hook_context={}, local_borg_version=None, create_arguments=create_arguments, global_arguments=global_arguments, dry_run_label='', local_path=None, remote_path=None, ) ) def test_run_create_runs_with_selected_repository(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive( 'repositories_match' ).once().and_return(True) flexmock(module.borgmatic.borg.create).should_receive('create_archive').once() create_arguments = flexmock( repository=flexmock(), progress=flexmock(), stats=flexmock(), json=flexmock(), list_files=flexmock(), ) global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) list( module.run_create( config_filename='test.yaml', repository='repo', location={}, storage={}, hooks={}, hook_context={}, local_borg_version=None, create_arguments=create_arguments, global_arguments=global_arguments, dry_run_label='', local_path=None, remote_path=None, ) ) def test_run_create_bails_if_repository_does_not_match(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive( 'repositories_match' ).once().and_return(False) flexmock(module.borgmatic.borg.create).should_receive('create_archive').never() create_arguments = flexmock( repository=flexmock(), progress=flexmock(), stats=flexmock(), json=flexmock(), list_files=flexmock(), ) global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) list( module.run_create( config_filename='test.yaml', repository='repo', location={}, storage={}, hooks={}, hook_context={}, local_borg_version=None, create_arguments=create_arguments, global_arguments=global_arguments, dry_run_label='', local_path=None, remote_path=None, ) ) borgmatic-1.7.9/tests/unit/actions/test_export_tar.py000066400000000000000000000016761440467744700231130ustar00rootroot00000000000000from flexmock import flexmock from borgmatic.actions import export_tar as module def test_run_export_tar_does_not_raise(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True) flexmock(module.borgmatic.borg.export_tar).should_receive('export_tar_archive') export_tar_arguments = flexmock( repository=flexmock(), archive=flexmock(), paths=flexmock(), destination=flexmock(), tar_filter=flexmock(), list_files=flexmock(), strip_components=flexmock(), ) global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) module.run_export_tar( repository='repo', storage={}, local_borg_version=None, export_tar_arguments=export_tar_arguments, global_arguments=global_arguments, local_path=None, remote_path=None, ) borgmatic-1.7.9/tests/unit/actions/test_extract.py000066400000000000000000000021201440467744700223570ustar00rootroot00000000000000from flexmock import flexmock from borgmatic.actions import extract as module def test_run_extract_calls_hooks(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True) flexmock(module.borgmatic.borg.extract).should_receive('extract_archive') flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2) extract_arguments = flexmock( paths=flexmock(), progress=flexmock(), destination=flexmock(), strip_components=flexmock(), archive=flexmock(), repository='repo', ) global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) module.run_extract( config_filename='test.yaml', repository='repo', location={'repositories': ['repo']}, storage={}, hooks={}, hook_context={}, local_borg_version=None, extract_arguments=extract_arguments, global_arguments=global_arguments, local_path=None, remote_path=None, ) borgmatic-1.7.9/tests/unit/actions/test_info.py000066400000000000000000000014501440467744700216450ustar00rootroot00000000000000from flexmock import flexmock from borgmatic.actions import info as module def test_run_info_does_not_raise(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True) flexmock(module.borgmatic.borg.rlist).should_receive('resolve_archive_name').and_return( flexmock() ) flexmock(module.borgmatic.borg.info).should_receive('display_archives_info') info_arguments = flexmock(repository=flexmock(), archive=flexmock(), json=flexmock()) list( module.run_info( repository='repo', storage={}, local_borg_version=None, info_arguments=info_arguments, local_path=None, remote_path=None, ) ) borgmatic-1.7.9/tests/unit/actions/test_list.py000066400000000000000000000014371440467744700216720ustar00rootroot00000000000000from flexmock import flexmock from borgmatic.actions import list as module def test_run_list_does_not_raise(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True) flexmock(module.borgmatic.borg.rlist).should_receive('resolve_archive_name').and_return( flexmock() ) flexmock(module.borgmatic.borg.list).should_receive('list_archive') list_arguments = flexmock(repository=flexmock(), archive=flexmock(), json=flexmock()) list( module.run_list( repository='repo', storage={}, local_borg_version=None, list_arguments=list_arguments, local_path=None, remote_path=None, ) ) borgmatic-1.7.9/tests/unit/actions/test_mount.py000066400000000000000000000013741440467744700220610ustar00rootroot00000000000000from flexmock import flexmock from borgmatic.actions import mount as module def test_run_mount_does_not_raise(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True) flexmock(module.borgmatic.borg.mount).should_receive('mount_archive') mount_arguments = flexmock( repository=flexmock(), archive=flexmock(), mount_point=flexmock(), paths=flexmock(), foreground=flexmock(), options=flexmock(), ) module.run_mount( repository='repo', storage={}, local_borg_version=None, mount_arguments=mount_arguments, local_path=None, remote_path=None, ) borgmatic-1.7.9/tests/unit/actions/test_prune.py000066400000000000000000000052301440467744700220430ustar00rootroot00000000000000from flexmock import flexmock from borgmatic.actions import prune as module def test_run_prune_calls_hooks_for_configured_repository(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive('repositories_match').never() flexmock(module.borgmatic.borg.prune).should_receive('prune_archives').once() flexmock(module.borgmatic.hooks.command).should_receive('execute_hook').times(2) prune_arguments = flexmock(repository=None, stats=flexmock(), list_archives=flexmock()) global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) module.run_prune( config_filename='test.yaml', repository='repo', storage={}, retention={}, hooks={}, hook_context={}, local_borg_version=None, prune_arguments=prune_arguments, global_arguments=global_arguments, dry_run_label='', local_path=None, remote_path=None, ) def test_run_prune_runs_with_selected_repository(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive( 'repositories_match' ).once().and_return(True) flexmock(module.borgmatic.borg.prune).should_receive('prune_archives').once() prune_arguments = flexmock(repository=flexmock(), stats=flexmock(), list_archives=flexmock()) global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) module.run_prune( config_filename='test.yaml', repository='repo', storage={}, retention={}, hooks={}, hook_context={}, local_borg_version=None, prune_arguments=prune_arguments, global_arguments=global_arguments, dry_run_label='', local_path=None, remote_path=None, ) def test_run_prune_bails_if_repository_does_not_match(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive( 'repositories_match' ).once().and_return(False) flexmock(module.borgmatic.borg.prune).should_receive('prune_archives').never() prune_arguments = flexmock(repository=flexmock(), stats=flexmock(), list_archives=flexmock()) global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) module.run_prune( config_filename='test.yaml', repository='repo', storage={}, retention={}, hooks={}, hook_context={}, local_borg_version=None, prune_arguments=prune_arguments, global_arguments=global_arguments, dry_run_label='', local_path=None, remote_path=None, ) borgmatic-1.7.9/tests/unit/actions/test_rcreate.py000066400000000000000000000033021440467744700223350ustar00rootroot00000000000000from flexmock import flexmock from borgmatic.actions import rcreate as module def test_run_rcreate_does_not_raise(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True) flexmock(module.borgmatic.borg.rcreate).should_receive('create_repository') arguments = flexmock( encryption_mode=flexmock(), source_repository=flexmock(), repository=flexmock(), copy_crypt_key=flexmock(), append_only=flexmock(), storage_quota=flexmock(), make_parent_dirs=flexmock(), ) module.run_rcreate( repository='repo', storage={}, local_borg_version=None, rcreate_arguments=arguments, global_arguments=flexmock(dry_run=False), local_path=None, remote_path=None, ) def test_run_rcreate_bails_if_repository_does_not_match(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return( False ) flexmock(module.borgmatic.borg.rcreate).should_receive('create_repository').never() arguments = flexmock( encryption_mode=flexmock(), source_repository=flexmock(), repository=flexmock(), copy_crypt_key=flexmock(), append_only=flexmock(), storage_quota=flexmock(), make_parent_dirs=flexmock(), ) module.run_rcreate( repository='repo', storage={}, local_borg_version=None, rcreate_arguments=arguments, global_arguments=flexmock(dry_run=False), local_path=None, remote_path=None, ) borgmatic-1.7.9/tests/unit/actions/test_restore.py000066400000000000000000000437721440467744700224120ustar00rootroot00000000000000import pytest from flexmock import flexmock import borgmatic.actions.restore as module def test_get_configured_database_matches_database_by_name(): assert module.get_configured_database( hooks={ 'other_databases': [{'name': 'other'}], 'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}], }, archive_database_names={'postgresql_databases': ['other', 'foo', 'bar']}, hook_name='postgresql_databases', database_name='bar', ) == ('postgresql_databases', {'name': 'bar'}) def test_get_configured_database_matches_nothing_when_database_name_not_configured(): assert module.get_configured_database( hooks={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]}, archive_database_names={'postgresql_databases': ['foo']}, hook_name='postgresql_databases', database_name='quux', ) == (None, None) def test_get_configured_database_matches_nothing_when_database_name_not_in_archive(): assert module.get_configured_database( hooks={'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}]}, archive_database_names={'postgresql_databases': ['bar']}, hook_name='postgresql_databases', database_name='foo', ) == (None, None) def test_get_configured_database_matches_database_by_configuration_database_name(): assert module.get_configured_database( hooks={'postgresql_databases': [{'name': 'all'}, {'name': 'bar'}]}, archive_database_names={'postgresql_databases': ['foo']}, hook_name='postgresql_databases', database_name='foo', configuration_database_name='all', ) == ('postgresql_databases', {'name': 'all'}) def test_get_configured_database_with_unspecified_hook_matches_database_by_name(): assert module.get_configured_database( hooks={ 'other_databases': [{'name': 'other'}], 'postgresql_databases': [{'name': 'foo'}, {'name': 'bar'}], }, archive_database_names={'postgresql_databases': ['other', 'foo', 'bar']}, hook_name=module.UNSPECIFIED_HOOK, database_name='bar', ) == ('postgresql_databases', {'name': 'bar'}) def test_collect_archive_database_names_parses_archive_paths(): flexmock(module.borgmatic.hooks.dump).should_receive('make_database_dump_path').and_return('') flexmock(module.borgmatic.borg.list).should_receive('capture_archive_listing').and_return( [ '.borgmatic/postgresql_databases/localhost/foo', '.borgmatic/postgresql_databases/localhost/bar', '.borgmatic/mysql_databases/localhost/quux', ] ) archive_database_names = module.collect_archive_database_names( repository='repo', archive='archive', location={'borgmatic_source_directory': '.borgmatic'}, storage=flexmock(), local_borg_version=flexmock(), local_path=flexmock(), remote_path=flexmock(), ) assert archive_database_names == { 'postgresql_databases': ['foo', 'bar'], 'mysql_databases': ['quux'], } def test_collect_archive_database_names_parses_directory_format_archive_paths(): flexmock(module.borgmatic.hooks.dump).should_receive('make_database_dump_path').and_return('') flexmock(module.borgmatic.borg.list).should_receive('capture_archive_listing').and_return( [ '.borgmatic/postgresql_databases/localhost/foo/table1', '.borgmatic/postgresql_databases/localhost/foo/table2', ] ) archive_database_names = module.collect_archive_database_names( repository='repo', archive='archive', location={'borgmatic_source_directory': '.borgmatic'}, storage=flexmock(), local_borg_version=flexmock(), local_path=flexmock(), remote_path=flexmock(), ) assert archive_database_names == { 'postgresql_databases': ['foo'], } def test_collect_archive_database_names_skips_bad_archive_paths(): flexmock(module.borgmatic.hooks.dump).should_receive('make_database_dump_path').and_return('') flexmock(module.borgmatic.borg.list).should_receive('capture_archive_listing').and_return( ['.borgmatic/postgresql_databases/localhost/foo', '.borgmatic/invalid', 'invalid/as/well'] ) archive_database_names = module.collect_archive_database_names( repository='repo', archive='archive', location={'borgmatic_source_directory': '.borgmatic'}, storage=flexmock(), local_borg_version=flexmock(), local_path=flexmock(), remote_path=flexmock(), ) assert archive_database_names == { 'postgresql_databases': ['foo'], } def test_find_databases_to_restore_passes_through_requested_names_found_in_archive(): restore_names = module.find_databases_to_restore( requested_database_names=['foo', 'bar'], archive_database_names={'postresql_databases': ['foo', 'bar', 'baz']}, ) assert restore_names == {module.UNSPECIFIED_HOOK: ['foo', 'bar']} def test_find_databases_to_restore_raises_for_requested_names_missing_from_archive(): with pytest.raises(ValueError): module.find_databases_to_restore( requested_database_names=['foo', 'bar'], archive_database_names={'postresql_databases': ['foo']}, ) def test_find_databases_to_restore_without_requested_names_finds_all_archive_databases(): archive_database_names = {'postresql_databases': ['foo', 'bar']} restore_names = module.find_databases_to_restore( requested_database_names=[], archive_database_names=archive_database_names, ) assert restore_names == archive_database_names def test_find_databases_to_restore_with_all_in_requested_names_finds_all_archive_databases(): archive_database_names = {'postresql_databases': ['foo', 'bar']} restore_names = module.find_databases_to_restore( requested_database_names=['all'], archive_database_names=archive_database_names, ) assert restore_names == archive_database_names def test_find_databases_to_restore_with_all_in_requested_names_plus_additional_requested_names_omits_duplicates(): archive_database_names = {'postresql_databases': ['foo', 'bar']} restore_names = module.find_databases_to_restore( requested_database_names=['all', 'foo', 'bar'], archive_database_names=archive_database_names, ) assert restore_names == archive_database_names def test_find_databases_to_restore_raises_for_all_in_requested_names_and_requested_named_missing_from_archives(): with pytest.raises(ValueError): module.find_databases_to_restore( requested_database_names=['all', 'foo', 'bar'], archive_database_names={'postresql_databases': ['foo']}, ) def test_ensure_databases_found_with_all_databases_found_does_not_raise(): module.ensure_databases_found( restore_names={'postgresql_databases': ['foo']}, remaining_restore_names={'postgresql_databases': ['bar']}, found_names=['foo', 'bar'], ) def test_ensure_databases_found_with_no_databases_raises(): with pytest.raises(ValueError): module.ensure_databases_found( restore_names={'postgresql_databases': []}, remaining_restore_names={}, found_names=[], ) def test_ensure_databases_found_with_missing_databases_raises(): with pytest.raises(ValueError): module.ensure_databases_found( restore_names={'postgresql_databases': ['foo']}, remaining_restore_names={'postgresql_databases': ['bar']}, found_names=['foo'], ) def test_run_restore_restores_each_database(): restore_names = { 'postgresql_databases': ['foo', 'bar'], } flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True) flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks_even_if_unconfigured') flexmock(module.borgmatic.borg.rlist).should_receive('resolve_archive_name').and_return( flexmock() ) flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock()) flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names) flexmock(module).should_receive('get_configured_database').and_return( ('postgresql_databases', {'name': 'foo'}) ).and_return(('postgresql_databases', {'name': 'bar'})) flexmock(module).should_receive('restore_single_database').with_args( repository=object, location=object, storage=object, hooks=object, local_borg_version=object, global_arguments=object, local_path=object, remote_path=object, archive_name=object, hook_name='postgresql_databases', database={'name': 'foo'}, ).once() flexmock(module).should_receive('restore_single_database').with_args( repository=object, location=object, storage=object, hooks=object, local_borg_version=object, global_arguments=object, local_path=object, remote_path=object, archive_name=object, hook_name='postgresql_databases', database={'name': 'bar'}, ).once() flexmock(module).should_receive('ensure_databases_found') module.run_restore( repository='repo', location=flexmock(), storage=flexmock(), hooks=flexmock(), local_borg_version=flexmock(), restore_arguments=flexmock(repository='repo', archive='archive', databases=flexmock()), global_arguments=flexmock(dry_run=False), local_path=flexmock(), remote_path=flexmock(), ) def test_run_restore_bails_for_non_matching_repository(): flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return( False ) flexmock(module.borgmatic.hooks.dispatch).should_receive( 'call_hooks_even_if_unconfigured' ).never() flexmock(module).should_receive('restore_single_database').never() module.run_restore( repository='repo', location=flexmock(), storage=flexmock(), hooks=flexmock(), local_borg_version=flexmock(), restore_arguments=flexmock(repository='repo', archive='archive', databases=flexmock()), global_arguments=flexmock(dry_run=False), local_path=flexmock(), remote_path=flexmock(), ) def test_run_restore_restores_database_configured_with_all_name(): restore_names = { 'postgresql_databases': ['foo', 'bar'], } flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True) flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks_even_if_unconfigured') flexmock(module.borgmatic.borg.rlist).should_receive('resolve_archive_name').and_return( flexmock() ) flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock()) flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names) flexmock(module).should_receive('get_configured_database').with_args( hooks=object, archive_database_names=object, hook_name='postgresql_databases', database_name='foo', ).and_return(('postgresql_databases', {'name': 'foo'})) flexmock(module).should_receive('get_configured_database').with_args( hooks=object, archive_database_names=object, hook_name='postgresql_databases', database_name='bar', ).and_return((None, None)) flexmock(module).should_receive('get_configured_database').with_args( hooks=object, archive_database_names=object, hook_name='postgresql_databases', database_name='bar', configuration_database_name='all', ).and_return(('postgresql_databases', {'name': 'bar'})) flexmock(module).should_receive('restore_single_database').with_args( repository=object, location=object, storage=object, hooks=object, local_borg_version=object, global_arguments=object, local_path=object, remote_path=object, archive_name=object, hook_name='postgresql_databases', database={'name': 'foo'}, ).once() flexmock(module).should_receive('restore_single_database').with_args( repository=object, location=object, storage=object, hooks=object, local_borg_version=object, global_arguments=object, local_path=object, remote_path=object, archive_name=object, hook_name='postgresql_databases', database={'name': 'bar'}, ).once() flexmock(module).should_receive('ensure_databases_found') module.run_restore( repository='repo', location=flexmock(), storage=flexmock(), hooks=flexmock(), local_borg_version=flexmock(), restore_arguments=flexmock(repository='repo', archive='archive', databases=flexmock()), global_arguments=flexmock(dry_run=False), local_path=flexmock(), remote_path=flexmock(), ) def test_run_restore_skips_missing_database(): restore_names = { 'postgresql_databases': ['foo', 'bar'], } flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True) flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks_even_if_unconfigured') flexmock(module.borgmatic.borg.rlist).should_receive('resolve_archive_name').and_return( flexmock() ) flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock()) flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names) flexmock(module).should_receive('get_configured_database').with_args( hooks=object, archive_database_names=object, hook_name='postgresql_databases', database_name='foo', ).and_return(('postgresql_databases', {'name': 'foo'})) flexmock(module).should_receive('get_configured_database').with_args( hooks=object, archive_database_names=object, hook_name='postgresql_databases', database_name='bar', ).and_return((None, None)) flexmock(module).should_receive('get_configured_database').with_args( hooks=object, archive_database_names=object, hook_name='postgresql_databases', database_name='bar', configuration_database_name='all', ).and_return((None, None)) flexmock(module).should_receive('restore_single_database').with_args( repository=object, location=object, storage=object, hooks=object, local_borg_version=object, global_arguments=object, local_path=object, remote_path=object, archive_name=object, hook_name='postgresql_databases', database={'name': 'foo'}, ).once() flexmock(module).should_receive('restore_single_database').with_args( repository=object, location=object, storage=object, hooks=object, local_borg_version=object, global_arguments=object, local_path=object, remote_path=object, archive_name=object, hook_name='postgresql_databases', database={'name': 'bar'}, ).never() flexmock(module).should_receive('ensure_databases_found') module.run_restore( repository='repo', location=flexmock(), storage=flexmock(), hooks=flexmock(), local_borg_version=flexmock(), restore_arguments=flexmock(repository='repo', archive='archive', databases=flexmock()), global_arguments=flexmock(dry_run=False), local_path=flexmock(), remote_path=flexmock(), ) def test_run_restore_restores_databases_from_different_hooks(): restore_names = { 'postgresql_databases': ['foo'], 'mysql_databases': ['bar'], } flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True) flexmock(module.borgmatic.hooks.dispatch).should_receive('call_hooks_even_if_unconfigured') flexmock(module.borgmatic.borg.rlist).should_receive('resolve_archive_name').and_return( flexmock() ) flexmock(module).should_receive('collect_archive_database_names').and_return(flexmock()) flexmock(module).should_receive('find_databases_to_restore').and_return(restore_names) flexmock(module).should_receive('get_configured_database').with_args( hooks=object, archive_database_names=object, hook_name='postgresql_databases', database_name='foo', ).and_return(('postgresql_databases', {'name': 'foo'})) flexmock(module).should_receive('get_configured_database').with_args( hooks=object, archive_database_names=object, hook_name='mysql_databases', database_name='bar', ).and_return(('mysql_databases', {'name': 'bar'})) flexmock(module).should_receive('restore_single_database').with_args( repository=object, location=object, storage=object, hooks=object, local_borg_version=object, global_arguments=object, local_path=object, remote_path=object, archive_name=object, hook_name='postgresql_databases', database={'name': 'foo'}, ).once() flexmock(module).should_receive('restore_single_database').with_args( repository=object, location=object, storage=object, hooks=object, local_borg_version=object, global_arguments=object, local_path=object, remote_path=object, archive_name=object, hook_name='mysql_databases', database={'name': 'bar'}, ).once() flexmock(module).should_receive('ensure_databases_found') module.run_restore( repository='repo', location=flexmock(), storage=flexmock(), hooks=flexmock(), local_borg_version=flexmock(), restore_arguments=flexmock(repository='repo', archive='archive', databases=flexmock()), global_arguments=flexmock(dry_run=False), local_path=flexmock(), remote_path=flexmock(), ) borgmatic-1.7.9/tests/unit/actions/test_rinfo.py000066400000000000000000000012471440467744700220330ustar00rootroot00000000000000from flexmock import flexmock from borgmatic.actions import rinfo as module def test_run_rinfo_does_not_raise(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True) flexmock(module.borgmatic.borg.rinfo).should_receive('display_repository_info') rinfo_arguments = flexmock(repository=flexmock(), json=flexmock()) list( module.run_rinfo( repository='repo', storage={}, local_borg_version=None, rinfo_arguments=rinfo_arguments, local_path=None, remote_path=None, ) ) borgmatic-1.7.9/tests/unit/actions/test_rlist.py000066400000000000000000000012371440467744700220520ustar00rootroot00000000000000from flexmock import flexmock from borgmatic.actions import rlist as module def test_run_rlist_does_not_raise(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.config.validate).should_receive('repositories_match').and_return(True) flexmock(module.borgmatic.borg.rlist).should_receive('list_repository') rlist_arguments = flexmock(repository=flexmock(), json=flexmock()) list( module.run_rlist( repository='repo', storage={}, local_borg_version=None, rlist_arguments=rlist_arguments, local_path=None, remote_path=None, ) ) borgmatic-1.7.9/tests/unit/actions/test_transfer.py000066400000000000000000000011551440467744700225400ustar00rootroot00000000000000from flexmock import flexmock from borgmatic.actions import transfer as module def test_run_transfer_does_not_raise(): flexmock(module.logger).answer = lambda message: None flexmock(module.borgmatic.borg.transfer).should_receive('transfer_archives') transfer_arguments = flexmock() global_arguments = flexmock(monitoring_verbosity=1, dry_run=False) module.run_transfer( repository='repo', storage={}, local_borg_version=None, transfer_arguments=transfer_arguments, global_arguments=global_arguments, local_path=None, remote_path=None, ) borgmatic-1.7.9/tests/unit/borg/000077500000000000000000000000001440467744700165725ustar00rootroot00000000000000borgmatic-1.7.9/tests/unit/borg/__init__.py000066400000000000000000000000001440467744700206710ustar00rootroot00000000000000borgmatic-1.7.9/tests/unit/borg/test_borg.py000066400000000000000000000300731440467744700211370ustar00rootroot00000000000000import logging from flexmock import flexmock from borgmatic.borg import borg as module from ..test_verbosity import insert_logging_mock def test_run_arbitrary_borg_calls_borg_with_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'break-lock', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) module.run_arbitrary_borg( repository='repo', storage_config={}, local_borg_version='1.2.3', options=['break-lock'], ) def test_run_arbitrary_borg_with_log_info_calls_borg_with_info_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'break-lock', 'repo', '--info'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) insert_logging_mock(logging.INFO) module.run_arbitrary_borg( repository='repo', storage_config={}, local_borg_version='1.2.3', options=['break-lock'], ) def test_run_arbitrary_borg_with_log_debug_calls_borg_with_debug_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'break-lock', 'repo', '--debug', '--show-rc'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) insert_logging_mock(logging.DEBUG) module.run_arbitrary_borg( repository='repo', storage_config={}, local_borg_version='1.2.3', options=['break-lock'], ) def test_run_arbitrary_borg_with_lock_wait_calls_borg_with_lock_wait_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER storage_config = {'lock_wait': 5} flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.flags).should_receive('make_flags').and_return(()).and_return( ('--lock-wait', '5') ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'break-lock', 'repo', '--lock-wait', '5'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) module.run_arbitrary_borg( repository='repo', storage_config=storage_config, local_borg_version='1.2.3', options=['break-lock'], ) def test_run_arbitrary_borg_with_archive_calls_borg_with_archive_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'break-lock', 'repo::archive'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) module.run_arbitrary_borg( repository='repo', storage_config={}, local_borg_version='1.2.3', options=['break-lock'], archive='archive', ) def test_run_arbitrary_borg_with_local_path_calls_borg_via_local_path(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg1', 'break-lock', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg1', extra_environment=None, ) module.run_arbitrary_borg( repository='repo', storage_config={}, local_borg_version='1.2.3', options=['break-lock'], local_path='borg1', ) def test_run_arbitrary_borg_with_remote_path_calls_borg_with_remote_path_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.flags).should_receive('make_flags').and_return( ('--remote-path', 'borg1') ).and_return(()) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'break-lock', 'repo', '--remote-path', 'borg1'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) module.run_arbitrary_borg( repository='repo', storage_config={}, local_borg_version='1.2.3', options=['break-lock'], remote_path='borg1', ) def test_run_arbitrary_borg_passes_borg_specific_parameters_to_borg(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'list', 'repo', '--progress'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) module.run_arbitrary_borg( repository='repo', storage_config={}, local_borg_version='1.2.3', options=['list', '--progress'], ) def test_run_arbitrary_borg_omits_dash_dash_in_parameters_passed_to_borg(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'break-lock', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) module.run_arbitrary_borg( repository='repo', storage_config={}, local_borg_version='1.2.3', options=['--', 'break-lock'], ) def test_run_arbitrary_borg_without_borg_specific_parameters_does_not_raise(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_flags').never() flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg',), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) module.run_arbitrary_borg( repository='repo', storage_config={}, local_borg_version='1.2.3', options=[], ) def test_run_arbitrary_borg_passes_key_sub_command_to_borg_before_repository(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'key', 'export', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) module.run_arbitrary_borg( repository='repo', storage_config={}, local_borg_version='1.2.3', options=['key', 'export'], ) def test_run_arbitrary_borg_passes_debug_sub_command_to_borg_before_repository(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'debug', 'dump-manifest', 'repo', 'path'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) module.run_arbitrary_borg( repository='repo', storage_config={}, local_borg_version='1.2.3', options=['debug', 'dump-manifest', 'path'], ) def test_run_arbitrary_borg_with_debug_info_command_does_not_pass_borg_repository(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_flags').never() flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'debug', 'info'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) module.run_arbitrary_borg( repository='repo', storage_config={}, local_borg_version='1.2.3', options=['debug', 'info'], ) def test_run_arbitrary_borg_with_debug_convert_profile_command_does_not_pass_borg_repository(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_flags').never() flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'debug', 'convert-profile', 'in', 'out'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) module.run_arbitrary_borg( repository='repo', storage_config={}, local_borg_version='1.2.3', options=['debug', 'convert-profile', 'in', 'out'], ) borgmatic-1.7.9/tests/unit/borg/test_break_lock.py000066400000000000000000000047461440467744700223120ustar00rootroot00000000000000import logging from flexmock import flexmock from borgmatic.borg import break_lock as module from ..test_verbosity import insert_logging_mock def insert_execute_command_mock(command): flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( command, borg_local_path='borg', extra_environment=None, ).once() def test_break_lock_calls_borg_with_required_flags(): flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg', 'break-lock', 'repo')) module.break_lock( repository='repo', storage_config={}, local_borg_version='1.2.3', ) def test_break_lock_calls_borg_with_remote_path_flags(): flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg', 'break-lock', '--remote-path', 'borg1', 'repo')) module.break_lock( repository='repo', storage_config={}, local_borg_version='1.2.3', remote_path='borg1', ) def test_break_lock_calls_borg_with_umask_flags(): flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg', 'break-lock', '--umask', '0770', 'repo')) module.break_lock( repository='repo', storage_config={'umask': '0770'}, local_borg_version='1.2.3', ) def test_break_lock_calls_borg_with_lock_wait_flags(): flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg', 'break-lock', '--lock-wait', '5', 'repo')) module.break_lock( repository='repo', storage_config={'lock_wait': '5'}, local_borg_version='1.2.3', ) def test_break_lock_with_log_info_calls_borg_with_info_parameter(): flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg', 'break-lock', '--info', 'repo')) insert_logging_mock(logging.INFO) module.break_lock( repository='repo', storage_config={}, local_borg_version='1.2.3', ) def test_break_lock_with_log_debug_calls_borg_with_debug_flags(): flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg', 'break-lock', '--debug', '--show-rc', 'repo')) insert_logging_mock(logging.DEBUG) module.break_lock( repository='repo', storage_config={}, local_borg_version='1.2.3', ) borgmatic-1.7.9/tests/unit/borg/test_check.py000066400000000000000000000622651440467744700212730ustar00rootroot00000000000000import logging import pytest from flexmock import flexmock from borgmatic.borg import check as module from ..test_verbosity import insert_logging_mock def insert_execute_command_mock(command): flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( command, extra_environment=None ).once() def insert_execute_command_never(): flexmock(module).should_receive('execute_command').never() def test_parse_checks_returns_them_as_tuple(): checks = module.parse_checks({'checks': [{'name': 'foo'}, {'name': 'bar'}]}) assert checks == ('foo', 'bar') def test_parse_checks_with_missing_value_returns_defaults(): checks = module.parse_checks({}) assert checks == ('repository', 'archives') def test_parse_checks_with_empty_list_returns_defaults(): checks = module.parse_checks({'checks': []}) assert checks == ('repository', 'archives') def test_parse_checks_with_none_value_returns_defaults(): checks = module.parse_checks({'checks': None}) assert checks == ('repository', 'archives') def test_parse_checks_with_disabled_returns_no_checks(): checks = module.parse_checks({'checks': [{'name': 'foo'}, {'name': 'disabled'}]}) assert checks == () def test_parse_checks_prefers_override_checks_to_configured_checks(): checks = module.parse_checks( {'checks': [{'name': 'archives'}]}, only_checks=['repository', 'extract'] ) assert checks == ('repository', 'extract') @pytest.mark.parametrize( 'frequency,expected_result', ( (None, None), ('always', None), ('1 hour', module.datetime.timedelta(hours=1)), ('2 hours', module.datetime.timedelta(hours=2)), ('1 day', module.datetime.timedelta(days=1)), ('2 days', module.datetime.timedelta(days=2)), ('1 week', module.datetime.timedelta(weeks=1)), ('2 weeks', module.datetime.timedelta(weeks=2)), ('1 month', module.datetime.timedelta(days=30)), ('2 months', module.datetime.timedelta(days=60)), ('1 year', module.datetime.timedelta(days=365)), ('2 years', module.datetime.timedelta(days=365 * 2)), ), ) def test_parse_frequency_parses_into_timedeltas(frequency, expected_result): assert module.parse_frequency(frequency) == expected_result @pytest.mark.parametrize( 'frequency', ('sometime', 'x days', '3 decades',), ) def test_parse_frequency_raises_on_parse_error(frequency): with pytest.raises(ValueError): module.parse_frequency(frequency) def test_filter_checks_on_frequency_without_config_uses_default_checks(): flexmock(module).should_receive('parse_frequency').and_return( module.datetime.timedelta(weeks=4) ) flexmock(module).should_receive('make_check_time_path') flexmock(module).should_receive('read_check_time').and_return(None) assert module.filter_checks_on_frequency( location_config={}, consistency_config={}, borg_repository_id='repo', checks=('repository', 'archives'), force=False, ) == ('repository', 'archives') def test_filter_checks_on_frequency_retains_unconfigured_check(): assert module.filter_checks_on_frequency( location_config={}, consistency_config={}, borg_repository_id='repo', checks=('data',), force=False, ) == ('data',) def test_filter_checks_on_frequency_retains_check_without_frequency(): flexmock(module).should_receive('parse_frequency').and_return(None) assert module.filter_checks_on_frequency( location_config={}, consistency_config={'checks': [{'name': 'archives'}]}, borg_repository_id='repo', checks=('archives',), force=False, ) == ('archives',) def test_filter_checks_on_frequency_retains_check_with_elapsed_frequency(): flexmock(module).should_receive('parse_frequency').and_return( module.datetime.timedelta(hours=1) ) flexmock(module).should_receive('make_check_time_path') flexmock(module).should_receive('read_check_time').and_return( module.datetime.datetime(year=module.datetime.MINYEAR, month=1, day=1) ) assert module.filter_checks_on_frequency( location_config={}, consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, borg_repository_id='repo', checks=('archives',), force=False, ) == ('archives',) def test_filter_checks_on_frequency_retains_check_with_missing_check_time_file(): flexmock(module).should_receive('parse_frequency').and_return( module.datetime.timedelta(hours=1) ) flexmock(module).should_receive('make_check_time_path') flexmock(module).should_receive('read_check_time').and_return(None) assert module.filter_checks_on_frequency( location_config={}, consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, borg_repository_id='repo', checks=('archives',), force=False, ) == ('archives',) def test_filter_checks_on_frequency_skips_check_with_unelapsed_frequency(): flexmock(module).should_receive('parse_frequency').and_return( module.datetime.timedelta(hours=1) ) flexmock(module).should_receive('make_check_time_path') flexmock(module).should_receive('read_check_time').and_return(module.datetime.datetime.now()) assert ( module.filter_checks_on_frequency( location_config={}, consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, borg_repository_id='repo', checks=('archives',), force=False, ) == () ) def test_filter_checks_on_frequency_restains_check_with_unelapsed_frequency_and_force(): assert module.filter_checks_on_frequency( location_config={}, consistency_config={'checks': [{'name': 'archives', 'frequency': '1 hour'}]}, borg_repository_id='repo', checks=('archives',), force=True, ) == ('archives',) def test_make_check_flags_with_repository_check_returns_flag(): flexmock(module.feature).should_receive('available').and_return(True) flags = module.make_check_flags('1.2.3', ('repository',)) assert flags == ('--repository-only',) def test_make_check_flags_with_archives_check_returns_flag(): flexmock(module.feature).should_receive('available').and_return(True) flags = module.make_check_flags('1.2.3', ('archives',)) assert flags == ('--archives-only',) def test_make_check_flags_with_data_check_returns_flag_and_implies_archives(): flexmock(module.feature).should_receive('available').and_return(True) flags = module.make_check_flags('1.2.3', ('data',)) assert flags == ('--archives-only', '--verify-data',) def test_make_check_flags_with_extract_omits_extract_flag(): flexmock(module.feature).should_receive('available').and_return(True) flags = module.make_check_flags('1.2.3', ('extract',)) assert flags == () def test_make_check_flags_with_repository_and_data_checks_does_not_return_repository_only(): flexmock(module.feature).should_receive('available').and_return(True) flags = module.make_check_flags('1.2.3', ('repository', 'data',)) assert flags == ('--verify-data',) def test_make_check_flags_with_default_checks_and_default_prefix_returns_default_flags(): flexmock(module.feature).should_receive('available').and_return(True) flags = module.make_check_flags( '1.2.3', ('repository', 'archives'), prefix=module.DEFAULT_PREFIX ) assert flags == ('--match-archives', f'sh:{module.DEFAULT_PREFIX}*') def test_make_check_flags_with_all_checks_and_default_prefix_returns_default_flags(): flexmock(module.feature).should_receive('available').and_return(True) flags = module.make_check_flags( '1.2.3', ('repository', 'archives', 'extract'), prefix=module.DEFAULT_PREFIX ) assert flags == ('--match-archives', f'sh:{module.DEFAULT_PREFIX}*') def test_make_check_flags_with_all_checks_and_default_prefix_without_borg_features_returns_glob_archives_flags(): flexmock(module.feature).should_receive('available').and_return(False) flags = module.make_check_flags( '1.2.3', ('repository', 'archives', 'extract'), prefix=module.DEFAULT_PREFIX ) assert flags == ('--glob-archives', f'{module.DEFAULT_PREFIX}*') def test_make_check_flags_with_archives_check_and_last_includes_last_flag(): flexmock(module.feature).should_receive('available').and_return(True) flags = module.make_check_flags('1.2.3', ('archives',), check_last=3) assert flags == ('--archives-only', '--last', '3') def test_make_check_flags_with_data_check_and_last_includes_last_flag(): flexmock(module.feature).should_receive('available').and_return(True) flags = module.make_check_flags('1.2.3', ('data',), check_last=3) assert flags == ('--archives-only', '--last', '3', '--verify-data') def test_make_check_flags_with_repository_check_and_last_omits_last_flag(): flexmock(module.feature).should_receive('available').and_return(True) flags = module.make_check_flags('1.2.3', ('repository',), check_last=3) assert flags == ('--repository-only',) def test_make_check_flags_with_default_checks_and_last_includes_last_flag(): flexmock(module.feature).should_receive('available').and_return(True) flags = module.make_check_flags('1.2.3', ('repository', 'archives'), check_last=3) assert flags == ('--last', '3') def test_make_check_flags_with_archives_check_and_prefix_includes_match_archives_flag(): flexmock(module.feature).should_receive('available').and_return(True) flags = module.make_check_flags('1.2.3', ('archives',), prefix='foo-') assert flags == ('--archives-only', '--match-archives', 'sh:foo-*') def test_make_check_flags_with_data_check_and_prefix_includes_match_archives_flag(): flexmock(module.feature).should_receive('available').and_return(True) flags = module.make_check_flags('1.2.3', ('data',), prefix='foo-') assert flags == ('--archives-only', '--match-archives', 'sh:foo-*', '--verify-data') def test_make_check_flags_with_archives_check_and_empty_prefix_omits_match_archives_flag(): flexmock(module.feature).should_receive('available').and_return(True) flags = module.make_check_flags('1.2.3', ('archives',), prefix='') assert flags == ('--archives-only',) def test_make_check_flags_with_archives_check_and_none_prefix_omits_match_archives_flag(): flexmock(module.feature).should_receive('available').and_return(True) flags = module.make_check_flags('1.2.3', ('archives',), prefix=None) assert flags == ('--archives-only',) def test_make_check_flags_with_repository_check_and_prefix_omits_match_archives_flag(): flexmock(module.feature).should_receive('available').and_return(True) flags = module.make_check_flags('1.2.3', ('repository',), prefix='foo-') assert flags == ('--repository-only',) def test_make_check_flags_with_default_checks_and_prefix_includes_match_archives_flag(): flexmock(module.feature).should_receive('available').and_return(True) flags = module.make_check_flags('1.2.3', ('repository', 'archives'), prefix='foo-') assert flags == ('--match-archives', 'sh:foo-*') def test_read_check_time_does_not_raise(): flexmock(module.os).should_receive('stat').and_return(flexmock(st_mtime=123)) assert module.read_check_time('/path') def test_read_check_time_on_missing_file_does_not_raise(): flexmock(module.os).should_receive('stat').and_raise(FileNotFoundError) assert module.read_check_time('/path') is None def test_check_archives_with_progress_calls_borg_with_progress_parameter(): checks = ('repository',) consistency_config = {'check_last': None} flexmock(module).should_receive('parse_checks') flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) flexmock(module).should_receive('make_check_flags').and_return(()) flexmock(module).should_receive('execute_command').never() flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'check', '--progress', 'repo'), output_file=module.DO_NOT_CAPTURE, extra_environment=None, ).once() flexmock(module).should_receive('make_check_time_path') flexmock(module).should_receive('write_check_time') module.check_archives( repository='repo', location_config={}, storage_config={}, consistency_config=consistency_config, local_borg_version='1.2.3', progress=True, ) def test_check_archives_with_repair_calls_borg_with_repair_parameter(): checks = ('repository',) consistency_config = {'check_last': None} flexmock(module).should_receive('parse_checks') flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) flexmock(module).should_receive('make_check_flags').and_return(()) flexmock(module).should_receive('execute_command').never() flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'check', '--repair', 'repo'), output_file=module.DO_NOT_CAPTURE, extra_environment=None, ).once() flexmock(module).should_receive('make_check_time_path') flexmock(module).should_receive('write_check_time') module.check_archives( repository='repo', location_config={}, storage_config={}, consistency_config=consistency_config, local_borg_version='1.2.3', repair=True, ) @pytest.mark.parametrize( 'checks', ( ('repository',), ('archives',), ('repository', 'archives'), ('repository', 'archives', 'other'), ), ) def test_check_archives_calls_borg_with_parameters(checks): check_last = flexmock() consistency_config = {'check_last': check_last} flexmock(module).should_receive('parse_checks') flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) flexmock(module).should_receive('make_check_flags').with_args( '1.2.3', checks, check_last, module.DEFAULT_PREFIX ).and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg', 'check', 'repo')) flexmock(module).should_receive('make_check_time_path') flexmock(module).should_receive('write_check_time') module.check_archives( repository='repo', location_config={}, storage_config={}, consistency_config=consistency_config, local_borg_version='1.2.3', ) def test_check_archives_with_json_error_raises(): checks = ('archives',) check_last = flexmock() consistency_config = {'check_last': check_last} flexmock(module).should_receive('parse_checks') flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"unexpected": {"id": "repo"}}' ) with pytest.raises(ValueError): module.check_archives( repository='repo', location_config={}, storage_config={}, consistency_config=consistency_config, local_borg_version='1.2.3', ) def test_check_archives_with_missing_json_keys_raises(): checks = ('archives',) check_last = flexmock() consistency_config = {'check_last': check_last} flexmock(module).should_receive('parse_checks') flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) flexmock(module.rinfo).should_receive('display_repository_info').and_return('{invalid JSON') with pytest.raises(ValueError): module.check_archives( repository='repo', location_config={}, storage_config={}, consistency_config=consistency_config, local_borg_version='1.2.3', ) def test_check_archives_with_extract_check_calls_extract_only(): checks = ('extract',) check_last = flexmock() consistency_config = {'check_last': check_last} flexmock(module).should_receive('parse_checks') flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) flexmock(module).should_receive('make_check_flags').never() flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.extract).should_receive('extract_last_archive_dry_run').once() flexmock(module).should_receive('write_check_time') insert_execute_command_never() module.check_archives( repository='repo', location_config={}, storage_config={}, consistency_config=consistency_config, local_borg_version='1.2.3', ) def test_check_archives_with_log_info_calls_borg_with_info_parameter(): checks = ('repository',) consistency_config = {'check_last': None} flexmock(module).should_receive('parse_checks') flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) flexmock(module).should_receive('make_check_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_logging_mock(logging.INFO) insert_execute_command_mock(('borg', 'check', '--info', 'repo')) flexmock(module).should_receive('make_check_time_path') flexmock(module).should_receive('write_check_time') module.check_archives( repository='repo', location_config={}, storage_config={}, consistency_config=consistency_config, local_borg_version='1.2.3', ) def test_check_archives_with_log_debug_calls_borg_with_debug_parameter(): checks = ('repository',) consistency_config = {'check_last': None} flexmock(module).should_receive('parse_checks') flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) flexmock(module).should_receive('make_check_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_logging_mock(logging.DEBUG) insert_execute_command_mock(('borg', 'check', '--debug', '--show-rc', 'repo')) flexmock(module).should_receive('make_check_time_path') flexmock(module).should_receive('write_check_time') module.check_archives( repository='repo', location_config={}, storage_config={}, consistency_config=consistency_config, local_borg_version='1.2.3', ) def test_check_archives_without_any_checks_bails(): consistency_config = {'check_last': None} flexmock(module).should_receive('parse_checks') flexmock(module).should_receive('filter_checks_on_frequency').and_return(()) flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) insert_execute_command_never() module.check_archives( repository='repo', location_config={}, storage_config={}, consistency_config=consistency_config, local_borg_version='1.2.3', ) def test_check_archives_with_local_path_calls_borg_via_local_path(): checks = ('repository',) check_last = flexmock() consistency_config = {'check_last': check_last} flexmock(module).should_receive('parse_checks') flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) flexmock(module).should_receive('make_check_flags').with_args( '1.2.3', checks, check_last, module.DEFAULT_PREFIX ).and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg1', 'check', 'repo')) flexmock(module).should_receive('make_check_time_path') flexmock(module).should_receive('write_check_time') module.check_archives( repository='repo', location_config={}, storage_config={}, consistency_config=consistency_config, local_borg_version='1.2.3', local_path='borg1', ) def test_check_archives_with_remote_path_calls_borg_with_remote_path_parameters(): checks = ('repository',) check_last = flexmock() consistency_config = {'check_last': check_last} flexmock(module).should_receive('parse_checks') flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) flexmock(module).should_receive('make_check_flags').with_args( '1.2.3', checks, check_last, module.DEFAULT_PREFIX ).and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg', 'check', '--remote-path', 'borg1', 'repo')) flexmock(module).should_receive('make_check_time_path') flexmock(module).should_receive('write_check_time') module.check_archives( repository='repo', location_config={}, storage_config={}, consistency_config=consistency_config, local_borg_version='1.2.3', remote_path='borg1', ) def test_check_archives_with_lock_wait_calls_borg_with_lock_wait_parameters(): checks = ('repository',) check_last = flexmock() consistency_config = {'check_last': check_last} flexmock(module).should_receive('parse_checks') flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) flexmock(module).should_receive('make_check_flags').with_args( '1.2.3', checks, check_last, module.DEFAULT_PREFIX ).and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg', 'check', '--lock-wait', '5', 'repo')) flexmock(module).should_receive('make_check_time_path') flexmock(module).should_receive('write_check_time') module.check_archives( repository='repo', location_config={}, storage_config={'lock_wait': 5}, consistency_config=consistency_config, local_borg_version='1.2.3', ) def test_check_archives_with_retention_prefix(): checks = ('repository',) check_last = flexmock() prefix = 'foo-' consistency_config = {'check_last': check_last, 'prefix': prefix} flexmock(module).should_receive('parse_checks') flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) flexmock(module).should_receive('make_check_flags').with_args( '1.2.3', checks, check_last, prefix ).and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg', 'check', 'repo')) flexmock(module).should_receive('make_check_time_path') flexmock(module).should_receive('write_check_time') module.check_archives( repository='repo', location_config={}, storage_config={}, consistency_config=consistency_config, local_borg_version='1.2.3', ) def test_check_archives_with_extra_borg_options_calls_borg_with_extra_options(): checks = ('repository',) consistency_config = {'check_last': None} flexmock(module).should_receive('parse_checks') flexmock(module).should_receive('filter_checks_on_frequency').and_return(checks) flexmock(module.rinfo).should_receive('display_repository_info').and_return( '{"repository": {"id": "repo"}}' ) flexmock(module).should_receive('make_check_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg', 'check', '--extra', '--options', 'repo')) flexmock(module).should_receive('make_check_time_path') flexmock(module).should_receive('write_check_time') module.check_archives( repository='repo', location_config={}, storage_config={'extra_borg_options': {'check': '--extra --options'}}, consistency_config=consistency_config, local_borg_version='1.2.3', ) borgmatic-1.7.9/tests/unit/borg/test_compact.py000066400000000000000000000126271440467744700216410ustar00rootroot00000000000000import logging from flexmock import flexmock from borgmatic.borg import compact as module from ..test_verbosity import insert_logging_mock def insert_execute_command_mock(compact_command, output_log_level): flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( compact_command, output_log_level=output_log_level, borg_local_path=compact_command[0], extra_environment=None, ).once() COMPACT_COMMAND = ('borg', 'compact') def test_compact_segments_calls_borg_with_parameters(): flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(COMPACT_COMMAND + ('repo',), logging.INFO) module.compact_segments( dry_run=False, repository='repo', storage_config={}, local_borg_version='1.2.3' ) def test_compact_segments_with_log_info_calls_borg_with_info_parameter(): flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(COMPACT_COMMAND + ('--info', 'repo'), logging.INFO) insert_logging_mock(logging.INFO) module.compact_segments( repository='repo', storage_config={}, local_borg_version='1.2.3', dry_run=False ) def test_compact_segments_with_log_debug_calls_borg_with_debug_parameter(): flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(COMPACT_COMMAND + ('--debug', '--show-rc', 'repo'), logging.INFO) insert_logging_mock(logging.DEBUG) module.compact_segments( repository='repo', storage_config={}, local_borg_version='1.2.3', dry_run=False ) def test_compact_segments_with_dry_run_skips_borg_call(): flexmock(module).should_receive('execute_command').never() module.compact_segments( repository='repo', storage_config={}, local_borg_version='1.2.3', dry_run=True ) def test_compact_segments_with_local_path_calls_borg_via_local_path(): flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg1',) + COMPACT_COMMAND[1:] + ('repo',), logging.INFO) module.compact_segments( dry_run=False, repository='repo', storage_config={}, local_borg_version='1.2.3', local_path='borg1', ) def test_compact_segments_with_remote_path_calls_borg_with_remote_path_parameters(): flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(COMPACT_COMMAND + ('--remote-path', 'borg1', 'repo'), logging.INFO) module.compact_segments( dry_run=False, repository='repo', storage_config={}, local_borg_version='1.2.3', remote_path='borg1', ) def test_compact_segments_with_progress_calls_borg_with_progress_parameter(): flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(COMPACT_COMMAND + ('--progress', 'repo'), logging.INFO) module.compact_segments( dry_run=False, repository='repo', storage_config={}, local_borg_version='1.2.3', progress=True, ) def test_compact_segments_with_cleanup_commits_calls_borg_with_cleanup_commits_parameter(): flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(COMPACT_COMMAND + ('--cleanup-commits', 'repo'), logging.INFO) module.compact_segments( dry_run=False, repository='repo', storage_config={}, local_borg_version='1.2.3', cleanup_commits=True, ) def test_compact_segments_with_threshold_calls_borg_with_threshold_parameter(): flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(COMPACT_COMMAND + ('--threshold', '20', 'repo'), logging.INFO) module.compact_segments( dry_run=False, repository='repo', storage_config={}, local_borg_version='1.2.3', threshold=20, ) def test_compact_segments_with_umask_calls_borg_with_umask_parameters(): storage_config = {'umask': '077'} flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(COMPACT_COMMAND + ('--umask', '077', 'repo'), logging.INFO) module.compact_segments( dry_run=False, repository='repo', storage_config=storage_config, local_borg_version='1.2.3' ) def test_compact_segments_with_lock_wait_calls_borg_with_lock_wait_parameters(): storage_config = {'lock_wait': 5} flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(COMPACT_COMMAND + ('--lock-wait', '5', 'repo'), logging.INFO) module.compact_segments( dry_run=False, repository='repo', storage_config=storage_config, local_borg_version='1.2.3' ) def test_compact_segments_with_extra_borg_options_calls_borg_with_extra_options(): flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(COMPACT_COMMAND + ('--extra', '--options', 'repo'), logging.INFO) module.compact_segments( dry_run=False, repository='repo', storage_config={'extra_borg_options': {'compact': '--extra --options'}}, local_borg_version='1.2.3', ) borgmatic-1.7.9/tests/unit/borg/test_create.py000066400000000000000000003435411440467744700214600ustar00rootroot00000000000000import logging import sys import pytest from flexmock import flexmock from borgmatic.borg import create as module from ..test_verbosity import insert_logging_mock def test_expand_directory_with_basic_path_passes_it_through(): flexmock(module.os.path).should_receive('expanduser').and_return('foo') flexmock(module.glob).should_receive('glob').and_return([]) paths = module.expand_directory('foo') assert paths == ['foo'] def test_expand_directory_with_glob_expands(): flexmock(module.os.path).should_receive('expanduser').and_return('foo*') flexmock(module.glob).should_receive('glob').and_return(['foo', 'food']) paths = module.expand_directory('foo*') assert paths == ['foo', 'food'] def test_expand_directories_flattens_expanded_directories(): flexmock(module).should_receive('expand_directory').with_args('~/foo').and_return(['/root/foo']) flexmock(module).should_receive('expand_directory').with_args('bar*').and_return( ['bar', 'barf'] ) paths = module.expand_directories(('~/foo', 'bar*')) assert paths == ('/root/foo', 'bar', 'barf') def test_expand_directories_considers_none_as_no_directories(): paths = module.expand_directories(None) assert paths == () def test_expand_home_directories_expands_tildes(): flexmock(module.os.path).should_receive('expanduser').with_args('~/bar').and_return('/foo/bar') flexmock(module.os.path).should_receive('expanduser').with_args('baz').and_return('baz') paths = module.expand_home_directories(('~/bar', 'baz')) assert paths == ('/foo/bar', 'baz') def test_expand_home_directories_considers_none_as_no_directories(): paths = module.expand_home_directories(None) assert paths == () def test_map_directories_to_devices_gives_device_id_per_path(): flexmock(module.os).should_receive('stat').with_args('/foo').and_return(flexmock(st_dev=55)) flexmock(module.os).should_receive('stat').with_args('/bar').and_return(flexmock(st_dev=66)) device_map = module.map_directories_to_devices(('/foo', '/bar')) assert device_map == { '/foo': 55, '/bar': 66, } def test_map_directories_to_devices_with_missing_path_does_not_error(): flexmock(module.os).should_receive('stat').with_args('/foo').and_return(flexmock(st_dev=55)) flexmock(module.os).should_receive('stat').with_args('/bar').and_raise(FileNotFoundError) device_map = module.map_directories_to_devices(('/foo', '/bar')) assert device_map == { '/foo': 55, '/bar': None, } @pytest.mark.parametrize( 'directories,additional_directories,expected_directories', ( ({'/': 1, '/root': 1}, {}, ('/',)), ({'/': 1, '/root/': 1}, {}, ('/',)), ({'/': 1, '/root': 2}, {}, ('/', '/root')), ({'/root': 1, '/': 1}, {}, ('/',)), ({'/root': 1, '/root/foo': 1}, {}, ('/root',)), ({'/root/': 1, '/root/foo': 1}, {}, ('/root/',)), ({'/root': 1, '/root/foo/': 1}, {}, ('/root',)), ({'/root': 1, '/root/foo': 2}, {}, ('/root', '/root/foo')), ({'/root/foo': 1, '/root': 1}, {}, ('/root',)), ({'/root': None, '/root/foo': None}, {}, ('/root', '/root/foo')), ({'/root': 1, '/etc': 1, '/root/foo/bar': 1}, {}, ('/etc', '/root')), ({'/root': 1, '/root/foo': 1, '/root/foo/bar': 1}, {}, ('/root',)), ({'/dup': 1, '/dup': 1}, {}, ('/dup',)), ({'/foo': 1, '/bar': 1}, {}, ('/bar', '/foo')), ({'/foo': 1, '/bar': 2}, {}, ('/bar', '/foo')), ({'/root/foo': 1}, {'/root': 1}, ()), ({'/root/foo': 1}, {'/root': 2}, ('/root/foo',)), ({'/root/foo': 1}, {}, ('/root/foo',)), ), ) def test_deduplicate_directories_removes_child_paths_on_the_same_filesystem( directories, additional_directories, expected_directories ): assert ( module.deduplicate_directories(directories, additional_directories) == expected_directories ) def test_write_pattern_file_writes_pattern_lines(): temporary_file = flexmock(name='filename', flush=lambda: None) temporary_file.should_receive('write').with_args('R /foo\n+ /foo/bar') flexmock(module.tempfile).should_receive('NamedTemporaryFile').and_return(temporary_file) module.write_pattern_file(['R /foo', '+ /foo/bar']) def test_write_pattern_file_with_sources_writes_sources_as_roots(): temporary_file = flexmock(name='filename', flush=lambda: None) temporary_file.should_receive('write').with_args('R /foo\n+ /foo/bar\nR /baz\nR /quux') flexmock(module.tempfile).should_receive('NamedTemporaryFile').and_return(temporary_file) module.write_pattern_file(['R /foo', '+ /foo/bar'], sources=['/baz', '/quux']) def test_write_pattern_file_without_patterns_but_with_sources_writes_sources_as_roots(): temporary_file = flexmock(name='filename', flush=lambda: None) temporary_file.should_receive('write').with_args('R /baz\nR /quux') flexmock(module.tempfile).should_receive('NamedTemporaryFile').and_return(temporary_file) module.write_pattern_file([], sources=['/baz', '/quux']) def test_write_pattern_file_with_empty_exclude_patterns_does_not_raise(): module.write_pattern_file([]) def test_write_pattern_file_overwrites_existing_file(): pattern_file = flexmock(name='filename', flush=lambda: None) pattern_file.should_receive('seek').with_args(0).once() pattern_file.should_receive('write').with_args('R /foo\n+ /foo/bar') flexmock(module.tempfile).should_receive('NamedTemporaryFile').never() module.write_pattern_file(['R /foo', '+ /foo/bar'], pattern_file=pattern_file) @pytest.mark.parametrize( 'filename_lists,opened_filenames', ( ([('foo', 'bar'), ('baz', 'quux')], ('foo', 'bar', 'baz', 'quux')), ([None, ('foo', 'bar')], ('foo', 'bar')), ([None, None], ()), ), ) def test_ensure_files_readable_opens_filenames(filename_lists, opened_filenames): for expected_filename in opened_filenames: flexmock(sys.modules['builtins']).should_receive('open').with_args( expected_filename ).and_return(flexmock(close=lambda: None)) module.ensure_files_readable(*filename_lists) def test_make_pattern_flags_includes_pattern_filename_when_given(): pattern_flags = module.make_pattern_flags( location_config={'patterns': ['R /', '- /var']}, pattern_filename='/tmp/patterns' ) assert pattern_flags == ('--patterns-from', '/tmp/patterns') def test_make_pattern_flags_includes_patterns_from_filenames_when_in_config(): pattern_flags = module.make_pattern_flags( location_config={'patterns_from': ['patterns', 'other']} ) assert pattern_flags == ('--patterns-from', 'patterns', '--patterns-from', 'other') def test_make_pattern_flags_includes_both_filenames_when_patterns_given_and_patterns_from_in_config(): pattern_flags = module.make_pattern_flags( location_config={'patterns_from': ['patterns']}, pattern_filename='/tmp/patterns' ) assert pattern_flags == ('--patterns-from', 'patterns', '--patterns-from', '/tmp/patterns') def test_make_pattern_flags_considers_none_patterns_from_filenames_as_empty(): pattern_flags = module.make_pattern_flags(location_config={'patterns_from': None}) assert pattern_flags == () def test_make_exclude_flags_includes_exclude_patterns_filename_when_given(): exclude_flags = module.make_exclude_flags( location_config={'exclude_patterns': ['*.pyc', '/var']}, exclude_filename='/tmp/excludes' ) assert exclude_flags == ('--exclude-from', '/tmp/excludes') def test_make_exclude_flags_includes_exclude_from_filenames_when_in_config(): exclude_flags = module.make_exclude_flags( location_config={'exclude_from': ['excludes', 'other']} ) assert exclude_flags == ('--exclude-from', 'excludes', '--exclude-from', 'other') def test_make_exclude_flags_includes_both_filenames_when_patterns_given_and_exclude_from_in_config(): exclude_flags = module.make_exclude_flags( location_config={'exclude_from': ['excludes']}, exclude_filename='/tmp/excludes' ) assert exclude_flags == ('--exclude-from', 'excludes', '--exclude-from', '/tmp/excludes') def test_make_exclude_flags_considers_none_exclude_from_filenames_as_empty(): exclude_flags = module.make_exclude_flags(location_config={'exclude_from': None}) assert exclude_flags == () def test_make_exclude_flags_includes_exclude_caches_when_true_in_config(): exclude_flags = module.make_exclude_flags(location_config={'exclude_caches': True}) assert exclude_flags == ('--exclude-caches',) def test_make_exclude_flags_does_not_include_exclude_caches_when_false_in_config(): exclude_flags = module.make_exclude_flags(location_config={'exclude_caches': False}) assert exclude_flags == () def test_make_exclude_flags_includes_exclude_if_present_when_in_config(): exclude_flags = module.make_exclude_flags( location_config={'exclude_if_present': ['exclude_me', 'also_me']} ) assert exclude_flags == ( '--exclude-if-present', 'exclude_me', '--exclude-if-present', 'also_me', ) def test_make_exclude_flags_includes_keep_exclude_tags_when_true_in_config(): exclude_flags = module.make_exclude_flags(location_config={'keep_exclude_tags': True}) assert exclude_flags == ('--keep-exclude-tags',) def test_make_exclude_flags_does_not_include_keep_exclude_tags_when_false_in_config(): exclude_flags = module.make_exclude_flags(location_config={'keep_exclude_tags': False}) assert exclude_flags == () def test_make_exclude_flags_includes_exclude_nodump_when_true_in_config(): exclude_flags = module.make_exclude_flags(location_config={'exclude_nodump': True}) assert exclude_flags == ('--exclude-nodump',) def test_make_exclude_flags_does_not_include_exclude_nodump_when_false_in_config(): exclude_flags = module.make_exclude_flags(location_config={'exclude_nodump': False}) assert exclude_flags == () def test_make_exclude_flags_is_empty_when_config_has_no_excludes(): exclude_flags = module.make_exclude_flags(location_config={}) assert exclude_flags == () def test_make_list_filter_flags_with_debug_and_feature_available_includes_plus_and_minus(): flexmock(module.logger).should_receive('isEnabledFor').and_return(True) flexmock(module.feature).should_receive('available').and_return(True) assert module.make_list_filter_flags(local_borg_version=flexmock(), dry_run=False) == 'AME+-' def test_make_list_filter_flags_with_info_and_feature_available_omits_plus_and_minus(): flexmock(module.logger).should_receive('isEnabledFor').and_return(False) flexmock(module.feature).should_receive('available').and_return(True) assert module.make_list_filter_flags(local_borg_version=flexmock(), dry_run=False) == 'AME' def test_make_list_filter_flags_with_debug_and_feature_available_and_dry_run_includes_plus_and_minus(): flexmock(module.logger).should_receive('isEnabledFor').and_return(True) flexmock(module.feature).should_receive('available').and_return(True) assert module.make_list_filter_flags(local_borg_version=flexmock(), dry_run=True) == 'AME+-' def test_make_list_filter_flags_with_info_and_feature_available_and_dry_run_includes_plus_and_minus(): flexmock(module.logger).should_receive('isEnabledFor').and_return(False) flexmock(module.feature).should_receive('available').and_return(True) assert module.make_list_filter_flags(local_borg_version=flexmock(), dry_run=True) == 'AME+-' def test_make_list_filter_flags_with_debug_and_feature_not_available_includes_x(): flexmock(module.logger).should_receive('isEnabledFor').and_return(True) flexmock(module.feature).should_receive('available').and_return(False) assert module.make_list_filter_flags(local_borg_version=flexmock(), dry_run=False) == 'AMEx-' def test_make_list_filter_flags_with_info_and_feature_not_available_omits_x(): flexmock(module.logger).should_receive('isEnabledFor').and_return(False) flexmock(module.feature).should_receive('available').and_return(False) assert module.make_list_filter_flags(local_borg_version=flexmock(), dry_run=False) == 'AME-' def test_collect_borgmatic_source_directories_set_when_directory_exists(): flexmock(module.os.path).should_receive('exists').and_return(True) flexmock(module.os.path).should_receive('expanduser') assert module.collect_borgmatic_source_directories('/tmp') == ['/tmp'] def test_collect_borgmatic_source_directories_empty_when_directory_does_not_exist(): flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.os.path).should_receive('expanduser') assert module.collect_borgmatic_source_directories('/tmp') == [] def test_collect_borgmatic_source_directories_defaults_when_directory_not_given(): flexmock(module.os.path).should_receive('exists').and_return(True) flexmock(module.os.path).should_receive('expanduser') assert module.collect_borgmatic_source_directories(None) == [ module.state.DEFAULT_BORGMATIC_SOURCE_DIRECTORY ] def test_pattern_root_directories_deals_with_none_patterns(): assert module.pattern_root_directories(patterns=None) == [] def test_pattern_root_directories_parses_roots_and_ignores_others(): assert module.pattern_root_directories( ['R /root', '+ /root/foo', '- /root/foo/bar', 'R /baz'] ) == ['/root', '/baz'] @pytest.mark.parametrize( 'character_device,block_device,fifo,expected_result', ( (False, False, False, False), (True, False, False, True), (False, True, False, True), (True, True, False, True), (False, False, True, True), (False, True, True, True), (True, False, True, True), ), ) def test_special_file_looks_at_file_type(character_device, block_device, fifo, expected_result): flexmock(module.os).should_receive('stat').and_return(flexmock(st_mode=flexmock())) flexmock(module.stat).should_receive('S_ISCHR').and_return(character_device) flexmock(module.stat).should_receive('S_ISBLK').and_return(block_device) flexmock(module.stat).should_receive('S_ISFIFO').and_return(fifo) assert module.special_file('/dev/special') == expected_result def test_special_file_treats_broken_symlink_as_non_special(): flexmock(module.os).should_receive('stat').and_raise(FileNotFoundError) assert module.special_file('/broken/symlink') is False def test_any_parent_directories_treats_parents_as_match(): module.any_parent_directories('/foo/bar.txt', ('/foo', '/etc')) def test_any_parent_directories_treats_grandparents_as_match(): module.any_parent_directories('/foo/bar/baz.txt', ('/foo', '/etc')) def test_any_parent_directories_treats_unrelated_paths_as_non_match(): module.any_parent_directories('/foo/bar.txt', ('/usr', '/etc')) def test_collect_special_file_paths_parses_special_files_from_borg_dry_run_file_list(): flexmock(module).should_receive('execute_command_and_capture_output').and_return( 'Processing files ...\n- /foo\n+ /bar\n- /baz' ) flexmock(module).should_receive('special_file').and_return(True) flexmock(module).should_receive('any_parent_directories').and_return(False) assert module.collect_special_file_paths( ('borg', 'create'), local_path=None, working_directory=None, borg_environment=None, skip_directories=flexmock(), ) == ('/foo', '/bar', '/baz') def test_collect_special_file_paths_excludes_requested_directories(): flexmock(module).should_receive('execute_command_and_capture_output').and_return( '+ /foo\n- /bar\n- /baz' ) flexmock(module).should_receive('special_file').and_return(True) flexmock(module).should_receive('any_parent_directories').and_return(False).and_return( True ).and_return(False) assert module.collect_special_file_paths( ('borg', 'create'), local_path=None, working_directory=None, borg_environment=None, skip_directories=flexmock(), ) == ('/foo', '/baz') def test_collect_special_file_paths_excludes_non_special_files(): flexmock(module).should_receive('execute_command_and_capture_output').and_return( '+ /foo\n+ /bar\n+ /baz' ) flexmock(module).should_receive('special_file').and_return(True).and_return(False).and_return( True ) flexmock(module).should_receive('any_parent_directories').and_return(False) assert module.collect_special_file_paths( ('borg', 'create'), local_path=None, working_directory=None, borg_environment=None, skip_directories=flexmock(), ) == ('/foo', '/baz') DEFAULT_ARCHIVE_NAME = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}' REPO_ARCHIVE_WITH_PATHS = (f'repo::{DEFAULT_ARCHIVE_NAME}', 'foo', 'bar') def test_create_archive_calls_borg_with_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create') + REPO_ARCHIVE_WITH_PATHS, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', ) def test_create_archive_calls_borg_with_environment(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) environment = {'BORG_THINGY': 'YUP'} flexmock(module.environment).should_receive('make_environment').and_return(environment) flexmock(module).should_receive('execute_command').with_args( ('borg', 'create') + REPO_ARCHIVE_WITH_PATHS, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=environment, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', ) def test_create_archive_with_patterns_calls_borg_with_patterns_including_converted_source_directories(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER pattern_flags = ('--patterns-from', 'patterns') flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return( flexmock(name='/tmp/patterns') ).and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(pattern_flags) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create') + pattern_flags + (f'repo::{DEFAULT_ARCHIVE_NAME}',), output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'patterns': ['pattern'], }, storage_config={}, local_borg_version='1.2.3', ) def test_create_archive_with_exclude_patterns_calls_borg_with_excludes(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER exclude_flags = ('--exclude-from', 'excludes') flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(('exclude',)) flexmock(module).should_receive('write_pattern_file').and_return(None).and_return( flexmock(name='/tmp/excludes') ) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(exclude_flags) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create') + exclude_flags + REPO_ARCHIVE_WITH_PATHS, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': ['exclude'], }, storage_config={}, local_borg_version='1.2.3', ) def test_create_archive_with_log_info_calls_borg_with_info_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create') + REPO_ARCHIVE_WITH_PATHS + ('--info',), output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) insert_logging_mock(logging.INFO) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', ) def test_create_archive_with_log_info_and_json_suppresses_most_borg_output(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'create') + REPO_ARCHIVE_WITH_PATHS + ('--json',), working_directory=None, extra_environment=None, ) insert_logging_mock(logging.INFO) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', json=True, ) def test_create_archive_with_log_debug_calls_borg_with_debug_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create') + REPO_ARCHIVE_WITH_PATHS + ('--debug', '--show-rc'), output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) insert_logging_mock(logging.DEBUG) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', ) def test_create_archive_with_log_debug_and_json_suppresses_most_borg_output(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'create') + REPO_ARCHIVE_WITH_PATHS + ('--json',), working_directory=None, extra_environment=None, ) insert_logging_mock(logging.DEBUG) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', json=True, ) def test_create_archive_with_dry_run_calls_borg_with_dry_run_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create', '--dry-run') + REPO_ARCHIVE_WITH_PATHS, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=True, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', ) def test_create_archive_with_stats_and_dry_run_calls_borg_without_stats_parameter(): # --dry-run and --stats are mutually exclusive, see: # https://borgbackup.readthedocs.io/en/stable/usage/create.html#description flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create', '--dry-run') + REPO_ARCHIVE_WITH_PATHS + ('--info',), output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) insert_logging_mock(logging.INFO) module.create_archive( dry_run=True, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', stats=True, ) def test_create_archive_with_checkpoint_interval_calls_borg_with_checkpoint_interval_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create', '--checkpoint-interval', '600') + REPO_ARCHIVE_WITH_PATHS, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={'checkpoint_interval': 600}, local_borg_version='1.2.3', ) def test_create_archive_with_checkpoint_volume_calls_borg_with_checkpoint_volume_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create', '--checkpoint-volume', '1024') + REPO_ARCHIVE_WITH_PATHS, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={'checkpoint_volume': 1024}, local_borg_version='1.2.3', ) def test_create_archive_with_chunker_params_calls_borg_with_chunker_params_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create', '--chunker-params', '1,2,3,4') + REPO_ARCHIVE_WITH_PATHS, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={'chunker_params': '1,2,3,4'}, local_borg_version='1.2.3', ) def test_create_archive_with_compression_calls_borg_with_compression_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create', '--compression', 'rle') + REPO_ARCHIVE_WITH_PATHS, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={'compression': 'rle'}, local_borg_version='1.2.3', ) @pytest.mark.parametrize( 'feature_available,option_flag', ((True, '--upload-ratelimit'), (False, '--remote-ratelimit')), ) def test_create_archive_with_upload_rate_limit_calls_borg_with_upload_ratelimit_parameters( feature_available, option_flag ): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(feature_available) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create', option_flag, '100') + REPO_ARCHIVE_WITH_PATHS, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={'upload_rate_limit': 100}, local_borg_version='1.2.3', ) def test_create_archive_with_working_directory_calls_borg_with_working_directory(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').with_args('/working/dir').and_return( '/working/dir' ) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create') + REPO_ARCHIVE_WITH_PATHS, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory='/working/dir', extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'working_directory': '/working/dir', 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', ) def test_create_archive_with_one_file_system_calls_borg_with_one_file_system_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create', '--one-file-system') + REPO_ARCHIVE_WITH_PATHS, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'one_file_system': True, 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', ) @pytest.mark.parametrize( 'feature_available,option_flag', ((True, '--numeric-ids'), (False, '--numeric-owner')), ) def test_create_archive_with_numeric_ids_calls_borg_with_numeric_ids_parameter( feature_available, option_flag ): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(feature_available) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create', option_flag) + REPO_ARCHIVE_WITH_PATHS, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'numeric_ids': True, 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', ) def test_create_archive_with_read_special_calls_borg_with_read_special_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('collect_special_file_paths').and_return(()) create_command = ('borg', 'create', '--read-special') + REPO_ARCHIVE_WITH_PATHS flexmock(module).should_receive('execute_command').with_args( create_command + ('--dry-run', '--list'), output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) flexmock(module).should_receive('execute_command').with_args( create_command, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'read_special': True, 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', ) @pytest.mark.parametrize( 'option_name,option_value', (('ctime', True), ('ctime', False), ('birthtime', True), ('birthtime', False),), ) def test_create_archive_with_basic_option_calls_borg_with_corresponding_parameter( option_name, option_value ): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER option_flag = '--no' + option_name.replace('', '') if option_value is False else None flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create') + ((option_flag,) if option_flag else ()) + REPO_ARCHIVE_WITH_PATHS, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], option_name: option_value, 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', ) @pytest.mark.parametrize( 'option_value,feature_available,option_flag', ( (True, True, '--atime'), (True, False, None), (False, True, None), (False, False, '--noatime'), ), ) def test_create_archive_with_atime_option_calls_borg_with_corresponding_parameter( option_value, feature_available, option_flag ): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(feature_available) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create') + ((option_flag,) if option_flag else ()) + REPO_ARCHIVE_WITH_PATHS, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'atime': option_value, 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', ) @pytest.mark.parametrize( 'option_value,feature_available,option_flag', ( (True, True, None), (True, False, None), (False, True, '--noflags'), (False, False, '--nobsdflags'), ), ) def test_create_archive_with_flags_option_calls_borg_with_corresponding_parameter( option_value, feature_available, option_flag ): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(feature_available) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create') + ((option_flag,) if option_flag else ()) + REPO_ARCHIVE_WITH_PATHS, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'flags': option_value, 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', ) def test_create_archive_with_files_cache_calls_borg_with_files_cache_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create', '--files-cache', 'ctime,size') + REPO_ARCHIVE_WITH_PATHS, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'files_cache': 'ctime,size', 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', ) def test_create_archive_with_local_path_calls_borg_via_local_path(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg1', 'create') + REPO_ARCHIVE_WITH_PATHS, output_log_level=logging.INFO, output_file=None, borg_local_path='borg1', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', local_path='borg1', ) def test_create_archive_with_remote_path_calls_borg_with_remote_path_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create', '--remote-path', 'borg1') + REPO_ARCHIVE_WITH_PATHS, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', remote_path='borg1', ) def test_create_archive_with_umask_calls_borg_with_umask_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create', '--umask', '740') + REPO_ARCHIVE_WITH_PATHS, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={'umask': 740}, local_borg_version='1.2.3', ) def test_create_archive_with_lock_wait_calls_borg_with_lock_wait_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create', '--lock-wait', '5') + REPO_ARCHIVE_WITH_PATHS, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={'lock_wait': 5}, local_borg_version='1.2.3', ) def test_create_archive_with_stats_calls_borg_with_stats_parameter_and_answer_output_log_level(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create') + REPO_ARCHIVE_WITH_PATHS + ('--stats',), output_log_level=module.borgmatic.logger.ANSWER, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', stats=True, ) def test_create_archive_with_files_calls_borg_with_list_parameter_and_answer_output_log_level(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create', '--list', '--filter', 'FOO') + REPO_ARCHIVE_WITH_PATHS, output_log_level=module.borgmatic.logger.ANSWER, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', list_files=True, ) def test_create_archive_with_progress_and_log_info_calls_borg_with_progress_parameter_and_no_list(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create') + REPO_ARCHIVE_WITH_PATHS + ('--info', '--progress',), output_log_level=logging.INFO, output_file=module.DO_NOT_CAPTURE, borg_local_path='borg', working_directory=None, extra_environment=None, ) insert_logging_mock(logging.INFO) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', progress=True, ) def test_create_archive_with_progress_calls_borg_with_progress_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create') + REPO_ARCHIVE_WITH_PATHS + ('--progress',), output_log_level=logging.INFO, output_file=module.DO_NOT_CAPTURE, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', progress=True, ) def test_create_archive_with_progress_and_stream_processes_calls_borg_with_progress_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER processes = flexmock() flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('collect_special_file_paths').and_return(()) create_command = ( ('borg', 'create', '--one-file-system', '--read-special') + REPO_ARCHIVE_WITH_PATHS + ('--progress',) ) flexmock(module).should_receive('execute_command_with_processes').with_args( create_command + ('--dry-run', '--list'), processes=processes, output_log_level=logging.INFO, output_file=module.DO_NOT_CAPTURE, borg_local_path='borg', working_directory=None, extra_environment=None, ) flexmock(module).should_receive('execute_command_with_processes').with_args( create_command, processes=processes, output_log_level=logging.INFO, output_file=module.DO_NOT_CAPTURE, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', progress=True, stream_processes=processes, ) def test_create_archive_with_stream_processes_ignores_read_special_false_and_logs_warnings(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER processes = flexmock() flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(flexmock(name='/tmp/excludes')) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module.logger).should_receive('warning').twice() flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('collect_special_file_paths').and_return(('/dev/null',)) create_command = ( 'borg', 'create', '--one-file-system', '--read-special', ) + REPO_ARCHIVE_WITH_PATHS flexmock(module).should_receive('execute_command_with_processes').with_args( create_command + ('--dry-run', '--list'), processes=processes, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) flexmock(module).should_receive('execute_command_with_processes').with_args( create_command, processes=processes, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, 'read_special': False, }, storage_config={}, local_borg_version='1.2.3', stream_processes=processes, ) def test_create_archive_with_stream_processes_adds_special_files_to_excludes(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER processes = flexmock() flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()).and_return( ('special',) ) flexmock(module).should_receive('write_pattern_file').and_return(None).and_return( flexmock(name='/excludes') ) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()).and_return( '--exclude-from', '/excludes' ) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('collect_special_file_paths').and_return(('special',)) create_command = ( 'borg', 'create', '--one-file-system', '--read-special', ) + REPO_ARCHIVE_WITH_PATHS flexmock(module).should_receive('execute_command_with_processes').with_args( create_command + ('--dry-run', '--list'), processes=processes, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) flexmock(module).should_receive('execute_command_with_processes').with_args( create_command + ('--exclude-from', '/excludes'), processes=processes, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', stream_processes=processes, ) def test_create_archive_with_stream_processes_and_read_special_does_not_add_special_files_to_excludes(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER processes = flexmock() flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()).and_return( ('special',) ) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('collect_special_file_paths').and_return(('special',)) create_command = ( 'borg', 'create', '--one-file-system', '--read-special', ) + REPO_ARCHIVE_WITH_PATHS flexmock(module).should_receive('execute_command_with_processes').with_args( create_command + ('--dry-run', '--list'), processes=processes, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) flexmock(module).should_receive('execute_command_with_processes').with_args( create_command, processes=processes, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, 'read_special': True, }, storage_config={}, local_borg_version='1.2.3', stream_processes=processes, ) def test_create_archive_with_json_calls_borg_with_json_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'create') + REPO_ARCHIVE_WITH_PATHS + ('--json',), working_directory=None, extra_environment=None, ).and_return('[]') json_output = module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', json=True, ) assert json_output == '[]' def test_create_archive_with_stats_and_json_calls_borg_without_stats_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'create') + REPO_ARCHIVE_WITH_PATHS + ('--json',), working_directory=None, extra_environment=None, ).and_return('[]') json_output = module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', json=True, stats=True, ) assert json_output == '[]' def test_create_archive_with_source_directories_glob_expands(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'food')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create', 'repo::{}'.format(DEFAULT_ARCHIVE_NAME), 'foo', 'food'), output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) flexmock(module.glob).should_receive('glob').with_args('foo*').and_return(['foo', 'food']) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo*'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', ) def test_create_archive_with_non_matching_source_directories_glob_passes_through(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo*',)) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create', 'repo::{}'.format(DEFAULT_ARCHIVE_NAME), 'foo*'), output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) flexmock(module.glob).should_receive('glob').with_args('foo*').and_return([]) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo*'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', ) def test_create_archive_with_glob_calls_borg_with_expanded_directories(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'food')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create', 'repo::{}'.format(DEFAULT_ARCHIVE_NAME), 'foo', 'food'), output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo*'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', ) def test_create_archive_with_archive_name_format_calls_borg_with_archive_name(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::ARCHIVE_NAME',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create', 'repo::ARCHIVE_NAME', 'foo', 'bar'), output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={'archive_name_format': 'ARCHIVE_NAME'}, local_borg_version='1.2.3', ) def test_create_archive_with_archive_name_format_accepts_borg_placeholders(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER repository_archive_pattern = 'repo::Documents_{hostname}-{now}' flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (repository_archive_pattern,) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create', repository_archive_pattern, 'foo', 'bar'), output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={'archive_name_format': 'Documents_{hostname}-{now}'}, local_borg_version='1.2.3', ) def test_create_archive_with_repository_accepts_borg_placeholders(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER repository_archive_pattern = '{fqdn}::Documents_{hostname}-{now}' flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (repository_archive_pattern,) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create', repository_archive_pattern, 'foo', 'bar'), output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='{fqdn}', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['{fqdn}'], 'exclude_patterns': None, }, storage_config={'archive_name_format': 'Documents_{hostname}-{now}'}, local_borg_version='1.2.3', ) def test_create_archive_with_extra_borg_options_calls_borg_with_extra_options(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'create', '--extra', '--options') + REPO_ARCHIVE_WITH_PATHS, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={'extra_borg_options': {'create': '--extra --options'}}, local_borg_version='1.2.3', ) def test_create_archive_with_stream_processes_calls_borg_with_processes_and_read_special(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER processes = flexmock() flexmock(module).should_receive('collect_borgmatic_source_directories').and_return([]) flexmock(module).should_receive('deduplicate_directories').and_return(('foo', 'bar')) flexmock(module).should_receive('map_directories_to_devices').and_return({}) flexmock(module).should_receive('expand_directories').and_return(()) flexmock(module).should_receive('pattern_root_directories').and_return([]) flexmock(module.os.path).should_receive('expanduser').and_raise(TypeError) flexmock(module).should_receive('expand_home_directories').and_return(()) flexmock(module).should_receive('write_pattern_file').and_return(None) flexmock(module).should_receive('make_list_filter_flags').and_return('FOO') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module).should_receive('ensure_files_readable') flexmock(module).should_receive('make_pattern_flags').and_return(()) flexmock(module).should_receive('make_exclude_flags').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( (f'repo::{DEFAULT_ARCHIVE_NAME}',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('collect_special_file_paths').and_return(()) create_command = ( 'borg', 'create', '--one-file-system', '--read-special', ) + REPO_ARCHIVE_WITH_PATHS flexmock(module).should_receive('execute_command_with_processes').with_args( create_command + ('--dry-run', 'list'), processes=processes, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) flexmock(module).should_receive('execute_command_with_processes').with_args( create_command, processes=processes, output_log_level=logging.INFO, output_file=None, borg_local_path='borg', working_directory=None, extra_environment=None, ) module.create_archive( dry_run=False, repository='repo', location_config={ 'source_directories': ['foo', 'bar'], 'repositories': ['repo'], 'exclude_patterns': None, }, storage_config={}, local_borg_version='1.2.3', stream_processes=processes, ) borgmatic-1.7.9/tests/unit/borg/test_environment.py000066400000000000000000000022261440467744700225510ustar00rootroot00000000000000from borgmatic.borg import environment as module def test_make_environment_with_passcommand_should_set_environment(): environment = module.make_environment({'encryption_passcommand': 'command'}) assert environment.get('BORG_PASSCOMMAND') == 'command' def test_make_environment_with_passphrase_should_set_environment(): environment = module.make_environment({'encryption_passphrase': 'pass'}) assert environment.get('BORG_PASSPHRASE') == 'pass' def test_make_environment_with_ssh_command_should_set_environment(): environment = module.make_environment({'ssh_command': 'ssh -C'}) assert environment.get('BORG_RSH') == 'ssh -C' def test_make_environment_without_configuration_should_only_set_default_environment(): environment = module.make_environment({}) assert environment == { 'BORG_RELOCATED_REPO_ACCESS_IS_OK': 'no', 'BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK': 'no', } def test_make_environment_with_relocated_repo_access_should_override_default(): environment = module.make_environment({'relocated_repo_access_is_ok': True}) assert environment.get('BORG_RELOCATED_REPO_ACCESS_IS_OK') == 'yes' borgmatic-1.7.9/tests/unit/borg/test_export_tar.py000066400000000000000000000250031440467744700223720ustar00rootroot00000000000000import logging from flexmock import flexmock from borgmatic.borg import export_tar as module from ..test_verbosity import insert_logging_mock def insert_execute_command_mock( command, output_log_level=logging.INFO, borg_local_path='borg', capture=True ): flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( command, output_file=None if capture else module.DO_NOT_CAPTURE, output_log_level=output_log_level, borg_local_path=borg_local_path, extra_environment=None, ).once() def test_export_tar_archive_calls_borg_with_path_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock( ('borg', 'export-tar', 'repo::archive', 'test.tar', 'path1', 'path2') ) module.export_tar_archive( dry_run=False, repository='repo', archive='archive', paths=['path1', 'path2'], destination_path='test.tar', storage_config={}, local_borg_version='1.2.3', ) def test_export_tar_archive_calls_borg_with_local_path_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock( ('borg1', 'export-tar', 'repo::archive', 'test.tar'), borg_local_path='borg1' ) module.export_tar_archive( dry_run=False, repository='repo', archive='archive', paths=None, destination_path='test.tar', storage_config={}, local_borg_version='1.2.3', local_path='borg1', ) def test_export_tar_archive_calls_borg_with_remote_path_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock( ('borg', 'export-tar', '--remote-path', 'borg1', 'repo::archive', 'test.tar') ) module.export_tar_archive( dry_run=False, repository='repo', archive='archive', paths=None, destination_path='test.tar', storage_config={}, local_borg_version='1.2.3', remote_path='borg1', ) def test_export_tar_archive_calls_borg_with_umask_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock( ('borg', 'export-tar', '--umask', '0770', 'repo::archive', 'test.tar') ) module.export_tar_archive( dry_run=False, repository='repo', archive='archive', paths=None, destination_path='test.tar', storage_config={'umask': '0770'}, local_borg_version='1.2.3', ) def test_export_tar_archive_calls_borg_with_lock_wait_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock( ('borg', 'export-tar', '--lock-wait', '5', 'repo::archive', 'test.tar') ) module.export_tar_archive( dry_run=False, repository='repo', archive='archive', paths=None, destination_path='test.tar', storage_config={'lock_wait': '5'}, local_borg_version='1.2.3', ) def test_export_tar_archive_with_log_info_calls_borg_with_info_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock(('borg', 'export-tar', '--info', 'repo::archive', 'test.tar')) insert_logging_mock(logging.INFO) module.export_tar_archive( dry_run=False, repository='repo', archive='archive', paths=None, destination_path='test.tar', storage_config={}, local_borg_version='1.2.3', ) def test_export_tar_archive_with_log_debug_calls_borg_with_debug_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock( ('borg', 'export-tar', '--debug', '--show-rc', 'repo::archive', 'test.tar') ) insert_logging_mock(logging.DEBUG) module.export_tar_archive( dry_run=False, repository='repo', archive='archive', paths=None, destination_path='test.tar', storage_config={}, local_borg_version='1.2.3', ) def test_export_tar_archive_calls_borg_with_dry_run_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) flexmock(module.os.path).should_receive('abspath').and_return('repo') flexmock(module).should_receive('execute_command').never() module.export_tar_archive( dry_run=True, repository='repo', archive='archive', paths=None, destination_path='test.tar', storage_config={}, local_borg_version='1.2.3', ) def test_export_tar_archive_calls_borg_with_tar_filter_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock( ('borg', 'export-tar', '--tar-filter', 'bzip2', 'repo::archive', 'test.tar') ) module.export_tar_archive( dry_run=False, repository='repo', archive='archive', paths=None, destination_path='test.tar', storage_config={}, local_borg_version='1.2.3', tar_filter='bzip2', ) def test_export_tar_archive_calls_borg_with_list_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock( ('borg', 'export-tar', '--list', 'repo::archive', 'test.tar'), output_log_level=logging.ANSWER, ) module.export_tar_archive( dry_run=False, repository='repo', archive='archive', paths=None, destination_path='test.tar', storage_config={}, local_borg_version='1.2.3', list_files=True, ) def test_export_tar_archive_calls_borg_with_strip_components_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock( ('borg', 'export-tar', '--strip-components', '5', 'repo::archive', 'test.tar') ) module.export_tar_archive( dry_run=False, repository='repo', archive='archive', paths=None, destination_path='test.tar', storage_config={}, local_borg_version='1.2.3', strip_components=5, ) def test_export_tar_archive_skips_abspath_for_remote_repository_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('server:repo::archive',) ) flexmock(module.os.path).should_receive('abspath').never() insert_execute_command_mock(('borg', 'export-tar', 'server:repo::archive', 'test.tar')) module.export_tar_archive( dry_run=False, repository='server:repo', archive='archive', paths=None, destination_path='test.tar', storage_config={}, local_borg_version='1.2.3', ) def test_export_tar_archive_calls_borg_with_stdout_destination_path(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock(('borg', 'export-tar', 'repo::archive', '-'), capture=False) module.export_tar_archive( dry_run=False, repository='repo', archive='archive', paths=None, destination_path='-', storage_config={}, local_borg_version='1.2.3', ) borgmatic-1.7.9/tests/unit/borg/test_extract.py000066400000000000000000000371751440467744700216720ustar00rootroot00000000000000import logging import pytest from flexmock import flexmock from borgmatic.borg import extract as module from ..test_verbosity import insert_logging_mock def insert_execute_command_mock(command, working_directory=None): flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( command, working_directory=working_directory, extra_environment=None, ).once() def test_extract_last_archive_dry_run_calls_borg_with_last_archive(): flexmock(module.rlist).should_receive('resolve_archive_name').and_return('archive') insert_execute_command_mock(('borg', 'extract', '--dry-run', 'repo::archive')) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) module.extract_last_archive_dry_run( storage_config={}, local_borg_version='1.2.3', repository='repo', lock_wait=None ) def test_extract_last_archive_dry_run_without_any_archives_should_not_raise(): flexmock(module.rlist).should_receive('resolve_archive_name').and_raise(ValueError) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return(('repo',)) module.extract_last_archive_dry_run( storage_config={}, local_borg_version='1.2.3', repository='repo', lock_wait=None ) def test_extract_last_archive_dry_run_with_log_info_calls_borg_with_info_parameter(): flexmock(module.rlist).should_receive('resolve_archive_name').and_return('archive') insert_execute_command_mock(('borg', 'extract', '--dry-run', '--info', 'repo::archive')) insert_logging_mock(logging.INFO) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) module.extract_last_archive_dry_run( storage_config={}, local_borg_version='1.2.3', repository='repo', lock_wait=None ) def test_extract_last_archive_dry_run_with_log_debug_calls_borg_with_debug_parameter(): flexmock(module.rlist).should_receive('resolve_archive_name').and_return('archive') insert_execute_command_mock( ('borg', 'extract', '--dry-run', '--debug', '--show-rc', '--list', 'repo::archive') ) insert_logging_mock(logging.DEBUG) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) module.extract_last_archive_dry_run( storage_config={}, local_borg_version='1.2.3', repository='repo', lock_wait=None ) def test_extract_last_archive_dry_run_calls_borg_via_local_path(): flexmock(module.rlist).should_receive('resolve_archive_name').and_return('archive') insert_execute_command_mock(('borg1', 'extract', '--dry-run', 'repo::archive')) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) module.extract_last_archive_dry_run( storage_config={}, local_borg_version='1.2.3', repository='repo', lock_wait=None, local_path='borg1', ) def test_extract_last_archive_dry_run_calls_borg_with_remote_path_parameters(): flexmock(module.rlist).should_receive('resolve_archive_name').and_return('archive') insert_execute_command_mock( ('borg', 'extract', '--dry-run', '--remote-path', 'borg1', 'repo::archive') ) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) module.extract_last_archive_dry_run( storage_config={}, local_borg_version='1.2.3', repository='repo', lock_wait=None, remote_path='borg1', ) def test_extract_last_archive_dry_run_calls_borg_with_lock_wait_parameters(): flexmock(module.rlist).should_receive('resolve_archive_name').and_return('archive') insert_execute_command_mock( ('borg', 'extract', '--dry-run', '--lock-wait', '5', 'repo::archive') ) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) module.extract_last_archive_dry_run( storage_config={}, local_borg_version='1.2.3', repository='repo', lock_wait=5 ) def test_extract_archive_calls_borg_with_path_parameters(): flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock(('borg', 'extract', 'repo::archive', 'path1', 'path2')) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) module.extract_archive( dry_run=False, repository='repo', archive='archive', paths=['path1', 'path2'], location_config={}, storage_config={}, local_borg_version='1.2.3', ) def test_extract_archive_calls_borg_with_remote_path_parameters(): flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock(('borg', 'extract', '--remote-path', 'borg1', 'repo::archive')) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) module.extract_archive( dry_run=False, repository='repo', archive='archive', paths=None, location_config={}, storage_config={}, local_borg_version='1.2.3', remote_path='borg1', ) @pytest.mark.parametrize( 'feature_available,option_flag', ((True, '--numeric-ids'), (False, '--numeric-owner'),), ) def test_extract_archive_calls_borg_with_numeric_ids_parameter(feature_available, option_flag): flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock(('borg', 'extract', option_flag, 'repo::archive')) flexmock(module.feature).should_receive('available').and_return(feature_available) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) module.extract_archive( dry_run=False, repository='repo', archive='archive', paths=None, location_config={'numeric_ids': True}, storage_config={}, local_borg_version='1.2.3', ) def test_extract_archive_calls_borg_with_umask_parameters(): flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock(('borg', 'extract', '--umask', '0770', 'repo::archive')) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) module.extract_archive( dry_run=False, repository='repo', archive='archive', paths=None, location_config={}, storage_config={'umask': '0770'}, local_borg_version='1.2.3', ) def test_extract_archive_calls_borg_with_lock_wait_parameters(): flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock(('borg', 'extract', '--lock-wait', '5', 'repo::archive')) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) module.extract_archive( dry_run=False, repository='repo', archive='archive', paths=None, location_config={}, storage_config={'lock_wait': '5'}, local_borg_version='1.2.3', ) def test_extract_archive_with_log_info_calls_borg_with_info_parameter(): flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock(('borg', 'extract', '--info', 'repo::archive')) insert_logging_mock(logging.INFO) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) module.extract_archive( dry_run=False, repository='repo', archive='archive', paths=None, location_config={}, storage_config={}, local_borg_version='1.2.3', ) def test_extract_archive_with_log_debug_calls_borg_with_debug_parameters(): flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock( ('borg', 'extract', '--debug', '--list', '--show-rc', 'repo::archive') ) insert_logging_mock(logging.DEBUG) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) module.extract_archive( dry_run=False, repository='repo', archive='archive', paths=None, location_config={}, storage_config={}, local_borg_version='1.2.3', ) def test_extract_archive_calls_borg_with_dry_run_parameter(): flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock(('borg', 'extract', '--dry-run', 'repo::archive')) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) module.extract_archive( dry_run=True, repository='repo', archive='archive', paths=None, location_config={}, storage_config={}, local_borg_version='1.2.3', ) def test_extract_archive_calls_borg_with_destination_path(): flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock(('borg', 'extract', 'repo::archive'), working_directory='/dest') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) module.extract_archive( dry_run=False, repository='repo', archive='archive', paths=None, location_config={}, storage_config={}, local_borg_version='1.2.3', destination_path='/dest', ) def test_extract_archive_calls_borg_with_strip_components(): flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock(('borg', 'extract', '--strip-components', '5', 'repo::archive')) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) module.extract_archive( dry_run=False, repository='repo', archive='archive', paths=None, location_config={}, storage_config={}, local_borg_version='1.2.3', strip_components=5, ) def test_extract_archive_calls_borg_with_strip_components_calculated_from_all(): flexmock(module.os.path).should_receive('abspath').and_return('repo') insert_execute_command_mock( ( 'borg', 'extract', '--strip-components', '2', 'repo::archive', 'foo/bar/baz.txt', 'foo/bar.txt', ) ) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) module.extract_archive( dry_run=False, repository='repo', archive='archive', paths=['foo/bar/baz.txt', 'foo/bar.txt'], location_config={}, storage_config={}, local_borg_version='1.2.3', strip_components='all', ) def test_extract_archive_with_strip_components_all_and_no_paths_raises(): flexmock(module.os.path).should_receive('abspath').and_return('repo') flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) flexmock(module).should_receive('execute_command').never() with pytest.raises(ValueError): module.extract_archive( dry_run=False, repository='repo', archive='archive', paths=None, location_config={}, storage_config={}, local_borg_version='1.2.3', strip_components='all', ) def test_extract_archive_calls_borg_with_progress_parameter(): flexmock(module.os.path).should_receive('abspath').and_return('repo') flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'extract', '--progress', 'repo::archive'), output_file=module.DO_NOT_CAPTURE, working_directory=None, extra_environment=None, ).once() flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) module.extract_archive( dry_run=False, repository='repo', archive='archive', paths=None, location_config={}, storage_config={}, local_borg_version='1.2.3', progress=True, ) def test_extract_archive_with_progress_and_extract_to_stdout_raises(): flexmock(module).should_receive('execute_command').never() with pytest.raises(ValueError): module.extract_archive( dry_run=False, repository='repo', archive='archive', paths=None, location_config={}, storage_config={}, local_borg_version='1.2.3', progress=True, extract_to_stdout=True, ) def test_extract_archive_calls_borg_with_stdout_parameter_and_returns_process(): flexmock(module.os.path).should_receive('abspath').and_return('repo') process = flexmock() flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'extract', '--stdout', 'repo::archive'), output_file=module.subprocess.PIPE, working_directory=None, run_to_completion=False, extra_environment=None, ).and_return(process).once() flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) assert ( module.extract_archive( dry_run=False, repository='repo', archive='archive', paths=None, location_config={}, storage_config={}, local_borg_version='1.2.3', extract_to_stdout=True, ) == process ) def test_extract_archive_skips_abspath_for_remote_repository(): flexmock(module.os.path).should_receive('abspath').never() flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'extract', 'server:repo::archive'), working_directory=None, extra_environment=None, ).once() flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('server:repo::archive',) ) module.extract_archive( dry_run=False, repository='server:repo', archive='archive', paths=None, location_config={}, storage_config={}, local_borg_version='1.2.3', ) borgmatic-1.7.9/tests/unit/borg/test_flags.py000066400000000000000000000054361440467744700213070ustar00rootroot00000000000000from flexmock import flexmock from borgmatic.borg import flags as module def test_make_flags_formats_string_value(): assert module.make_flags('foo', 'bar') == ('--foo', 'bar') def test_make_flags_formats_integer_value(): assert module.make_flags('foo', 3) == ('--foo', '3') def test_make_flags_formats_true_value(): assert module.make_flags('foo', True) == ('--foo',) def test_make_flags_omits_false_value(): assert module.make_flags('foo', False) == () def test_make_flags_formats_name_with_underscore(): assert module.make_flags('posix_me_harder', 'okay') == ('--posix-me-harder', 'okay') def test_make_flags_from_arguments_flattens_and_sorts_multiple_arguments(): flexmock(module).should_receive('make_flags').with_args('foo', 'bar').and_return(('foo', 'bar')) flexmock(module).should_receive('make_flags').with_args('baz', 'quux').and_return( ('baz', 'quux') ) arguments = flexmock(foo='bar', baz='quux') assert module.make_flags_from_arguments(arguments) == ('baz', 'quux', 'foo', 'bar') def test_make_flags_from_arguments_excludes_underscored_argument_names(): flexmock(module).should_receive('make_flags').with_args('foo', 'bar').and_return(('foo', 'bar')) arguments = flexmock(foo='bar', _baz='quux') assert module.make_flags_from_arguments(arguments) == ('foo', 'bar') def test_make_flags_from_arguments_omits_excludes(): flexmock(module).should_receive('make_flags').with_args('foo', 'bar').and_return(('foo', 'bar')) arguments = flexmock(foo='bar', baz='quux') assert module.make_flags_from_arguments(arguments, excludes=('baz', 'other')) == ('foo', 'bar') def test_make_repository_flags_with_borg_features_includes_repo_flag(): flexmock(module.feature).should_receive('available').and_return(True) assert module.make_repository_flags(repository='repo', local_borg_version='1.2.3') == ( '--repo', 'repo', ) def test_make_repository_flags_without_borg_features_includes_omits_flag(): flexmock(module.feature).should_receive('available').and_return(False) assert module.make_repository_flags(repository='repo', local_borg_version='1.2.3') == ('repo',) def test_make_repository_archive_flags_with_borg_features_separates_repository_and_archive(): flexmock(module.feature).should_receive('available').and_return(True) assert module.make_repository_archive_flags( repository='repo', archive='archive', local_borg_version='1.2.3' ) == ('--repo', 'repo', 'archive',) def test_make_repository_archive_flags_with_borg_features_joins_repository_and_archive(): flexmock(module.feature).should_receive('available').and_return(False) assert module.make_repository_archive_flags( repository='repo', archive='archive', local_borg_version='1.2.3' ) == ('repo::archive',) borgmatic-1.7.9/tests/unit/borg/test_info.py000066400000000000000000000317561440467744700211520ustar00rootroot00000000000000import logging import pytest from flexmock import flexmock from borgmatic.borg import info as module from ..test_verbosity import insert_logging_mock def test_display_archives_info_calls_borg_with_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'info', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) module.display_archives_info( repository='repo', storage_config={}, local_borg_version='2.3.4', info_arguments=flexmock(archive=None, json=False, prefix=None), ) def test_display_archives_info_with_log_info_calls_borg_with_info_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'info', '--info', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) insert_logging_mock(logging.INFO) module.display_archives_info( repository='repo', storage_config={}, local_borg_version='2.3.4', info_arguments=flexmock(archive=None, json=False, prefix=None), ) def test_display_archives_info_with_log_info_and_json_suppresses_most_borg_output(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(('--json',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'info', '--json', '--repo', 'repo'), extra_environment=None, ).and_return('[]') insert_logging_mock(logging.INFO) json_output = module.display_archives_info( repository='repo', storage_config={}, local_borg_version='2.3.4', info_arguments=flexmock(archive=None, json=True, prefix=None), ) assert json_output == '[]' def test_display_archives_info_with_log_debug_calls_borg_with_debug_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'info', '--debug', '--show-rc', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) insert_logging_mock(logging.DEBUG) module.display_archives_info( repository='repo', storage_config={}, local_borg_version='2.3.4', info_arguments=flexmock(archive=None, json=False, prefix=None), ) def test_display_archives_info_with_log_debug_and_json_suppresses_most_borg_output(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(('--json',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'info', '--json', '--repo', 'repo'), extra_environment=None, ).and_return('[]') insert_logging_mock(logging.DEBUG) json_output = module.display_archives_info( repository='repo', storage_config={}, local_borg_version='2.3.4', info_arguments=flexmock(archive=None, json=True, prefix=None), ) assert json_output == '[]' def test_display_archives_info_with_json_calls_borg_with_json_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(('--json',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'info', '--json', '--repo', 'repo'), extra_environment=None, ).and_return('[]') json_output = module.display_archives_info( repository='repo', storage_config={}, local_borg_version='2.3.4', info_arguments=flexmock(archive=None, json=True, prefix=None), ) assert json_output == '[]' def test_display_archives_info_with_archive_calls_borg_with_match_archives_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags').with_args( 'match-archives', 'archive' ).and_return(('--match-archives', 'archive')) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'info', '--repo', 'repo', '--match-archives', 'archive'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) module.display_archives_info( repository='repo', storage_config={}, local_borg_version='2.3.4', info_arguments=flexmock(archive='archive', json=False, prefix=None), ) def test_display_archives_info_with_local_path_calls_borg_via_local_path(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg1', 'info', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg1', extra_environment=None, ) module.display_archives_info( repository='repo', storage_config={}, local_borg_version='2.3.4', info_arguments=flexmock(archive=None, json=False, prefix=None), local_path='borg1', ) def test_display_archives_info_with_remote_path_calls_borg_with_remote_path_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags').with_args( 'remote-path', 'borg1' ).and_return(('--remote-path', 'borg1')) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'info', '--remote-path', 'borg1', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) module.display_archives_info( repository='repo', storage_config={}, local_borg_version='2.3.4', info_arguments=flexmock(archive=None, json=False, prefix=None), remote_path='borg1', ) def test_display_archives_info_with_lock_wait_calls_borg_with_lock_wait_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags').with_args('lock-wait', 5).and_return( ('--lock-wait', '5') ) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) storage_config = {'lock_wait': 5} flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'info', '--lock-wait', '5', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) module.display_archives_info( repository='repo', storage_config=storage_config, local_borg_version='2.3.4', info_arguments=flexmock(archive=None, json=False, prefix=None), ) def test_display_archives_info_with_prefix_calls_borg_with_match_archives_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags').with_args( 'match-archives', 'sh:foo*' ).and_return(('--match-archives', 'sh:foo*')) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'info', '--match-archives', 'sh:foo*', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) module.display_archives_info( repository='repo', storage_config={}, local_borg_version='2.3.4', info_arguments=flexmock(archive=None, json=False, prefix='foo'), ) @pytest.mark.parametrize('argument_name', ('match_archives', 'sort_by', 'first', 'last')) def test_display_archives_info_passes_through_arguments_to_borg(argument_name): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flag_name = f"--{argument_name.replace('_', ' ')}" flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return( (flag_name, 'value') ) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'info', flag_name, 'value', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) module.display_archives_info( repository='repo', storage_config={}, local_borg_version='2.3.4', info_arguments=flexmock(archive=None, json=False, prefix=None, **{argument_name: 'value'}), ) borgmatic-1.7.9/tests/unit/borg/test_list.py000066400000000000000000000554051440467744700211670ustar00rootroot00000000000000import argparse import logging import pytest from flexmock import flexmock from borgmatic.borg import list as module from ..test_verbosity import insert_logging_mock def test_make_list_command_includes_log_info(): insert_logging_mock(logging.INFO) flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) command = module.make_list_command( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=flexmock(archive=None, paths=None, json=False), ) assert command == ('borg', 'list', '--info', 'repo') def test_make_list_command_includes_json_but_not_info(): insert_logging_mock(logging.INFO) flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(('--json',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) command = module.make_list_command( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=flexmock(archive=None, paths=None, json=True), ) assert command == ('borg', 'list', '--json', 'repo') def test_make_list_command_includes_log_debug(): insert_logging_mock(logging.DEBUG) flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) command = module.make_list_command( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=flexmock(archive=None, paths=None, json=False), ) assert command == ('borg', 'list', '--debug', '--show-rc', 'repo') def test_make_list_command_includes_json_but_not_debug(): insert_logging_mock(logging.DEBUG) flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(('--json',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) command = module.make_list_command( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=flexmock(archive=None, paths=None, json=True), ) assert command == ('borg', 'list', '--json', 'repo') def test_make_list_command_includes_json(): flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(('--json',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) command = module.make_list_command( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=flexmock(archive=None, paths=None, json=True), ) assert command == ('borg', 'list', '--json', 'repo') def test_make_list_command_includes_lock_wait(): flexmock(module.flags).should_receive('make_flags').and_return(()).and_return( ('--lock-wait', '5') ) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) command = module.make_list_command( repository='repo', storage_config={'lock_wait': 5}, local_borg_version='1.2.3', list_arguments=flexmock(archive=None, paths=None, json=False), ) assert command == ('borg', 'list', '--lock-wait', '5', 'repo') def test_make_list_command_includes_archive(): flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) command = module.make_list_command( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=flexmock(archive='archive', paths=None, json=False), ) assert command == ('borg', 'list', 'repo::archive') def test_make_list_command_includes_archive_and_path(): flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) command = module.make_list_command( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=flexmock(archive='archive', paths=['var/lib'], json=False), ) assert command == ('borg', 'list', 'repo::archive', 'var/lib') def test_make_list_command_includes_local_path(): flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) command = module.make_list_command( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=flexmock(archive=None, paths=None, json=False), local_path='borg2', ) assert command == ('borg2', 'list', 'repo') def test_make_list_command_includes_remote_path(): flexmock(module.flags).should_receive('make_flags').and_return( ('--remote-path', 'borg2') ).and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) command = module.make_list_command( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=flexmock(archive=None, paths=None, json=False), remote_path='borg2', ) assert command == ('borg', 'list', '--remote-path', 'borg2', 'repo') def test_make_list_command_includes_short(): flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(('--short',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) command = module.make_list_command( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=flexmock(archive=None, paths=None, json=False, short=True), ) assert command == ('borg', 'list', '--short', 'repo') @pytest.mark.parametrize( 'argument_name', ( 'prefix', 'match_archives', 'sort_by', 'first', 'last', 'exclude', 'exclude_from', 'pattern', 'patterns_from', ), ) def test_make_list_command_includes_additional_flags(argument_name): flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return( (f"--{argument_name.replace('_', '-')}", 'value') ) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) command = module.make_list_command( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=flexmock( archive=None, paths=None, json=False, find_paths=None, format=None, **{argument_name: 'value'}, ), ) assert command == ('borg', 'list', '--' + argument_name.replace('_', '-'), 'value', 'repo') def test_make_find_paths_considers_none_as_empty_paths(): assert module.make_find_paths(None) == () def test_make_find_paths_passes_through_patterns(): find_paths = ( 'fm:*', 'sh:**/*.txt', 're:^.*$', 'pp:root/somedir', 'pf:root/foo.txt', 'R /', 'r /', 'p /', 'P /', '+ /', '- /', '! /', ) assert module.make_find_paths(find_paths) == find_paths def test_make_find_paths_adds_globs_to_path_fragments(): assert module.make_find_paths(('foo.txt',)) == ('sh:**/*foo.txt*/**',) def test_capture_archive_listing_does_not_raise(): flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').and_return('') flexmock(module).should_receive('make_list_command') module.capture_archive_listing( repository='repo', archive='archive', storage_config=flexmock(), local_borg_version=flexmock(), ) def test_list_archive_calls_borg_with_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logger).answer = lambda message: None list_arguments = argparse.Namespace( archive='archive', paths=None, json=False, find_paths=None, prefix=None, match_archives=None, sort_by=None, first=None, last=None, ) flexmock(module.feature).should_receive('available').and_return(False) flexmock(module).should_receive('make_list_command').with_args( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=list_arguments, local_path='borg', remote_path=None, ).and_return(('borg', 'list', 'repo::archive')) flexmock(module).should_receive('make_find_paths').and_return(()) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'list', 'repo::archive'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ).once() module.list_archive( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=list_arguments, ) def test_list_archive_with_archive_and_json_errors(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logger).answer = lambda message: None list_arguments = argparse.Namespace(archive='archive', paths=None, json=True, find_paths=None) flexmock(module.feature).should_receive('available').and_return(False) with pytest.raises(ValueError): module.list_archive( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=list_arguments, ) def test_list_archive_calls_borg_with_local_path(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logger).answer = lambda message: None list_arguments = argparse.Namespace( archive='archive', paths=None, json=False, find_paths=None, prefix=None, match_archives=None, sort_by=None, first=None, last=None, ) flexmock(module.feature).should_receive('available').and_return(False) flexmock(module).should_receive('make_list_command').with_args( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=list_arguments, local_path='borg2', remote_path=None, ).and_return(('borg2', 'list', 'repo::archive')) flexmock(module).should_receive('make_find_paths').and_return(()) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg2', 'list', 'repo::archive'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg2', extra_environment=None, ).once() module.list_archive( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=list_arguments, local_path='borg2', ) def test_list_archive_calls_borg_multiple_times_with_find_paths(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logger).answer = lambda message: None glob_paths = ('**/*foo.txt*/**',) list_arguments = argparse.Namespace( archive=None, json=False, find_paths=['foo.txt'], prefix=None, match_archives=None, sort_by=None, first=None, last=None, ) flexmock(module.feature).should_receive('available').and_return(False) flexmock(module.rlist).should_receive('make_rlist_command').and_return(('borg', 'list', 'repo')) flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'list', 'repo'), extra_environment=None, ).and_return('archive1\narchive2').once() flexmock(module).should_receive('make_list_command').and_return( ('borg', 'list', 'repo::archive1') ).and_return(('borg', 'list', 'repo::archive2')) flexmock(module).should_receive('make_find_paths').and_return(glob_paths) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'list', 'repo::archive1') + glob_paths, output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ).once() flexmock(module).should_receive('execute_command').with_args( ('borg', 'list', 'repo::archive2') + glob_paths, output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ).once() module.list_archive( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=list_arguments, ) def test_list_archive_calls_borg_with_archive(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logger).answer = lambda message: None list_arguments = argparse.Namespace( archive='archive', paths=None, json=False, find_paths=None, prefix=None, match_archives=None, sort_by=None, first=None, last=None, ) flexmock(module.feature).should_receive('available').and_return(False) flexmock(module).should_receive('make_list_command').with_args( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=list_arguments, local_path='borg', remote_path=None, ).and_return(('borg', 'list', 'repo::archive')) flexmock(module).should_receive('make_find_paths').and_return(()) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'list', 'repo::archive'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ).once() module.list_archive( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=list_arguments, ) def test_list_archive_without_archive_delegates_to_list_repository(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logger).answer = lambda message: None list_arguments = argparse.Namespace( archive=None, short=None, format=None, json=None, prefix=None, match_archives=None, sort_by=None, first=None, last=None, find_paths=None, ) flexmock(module.feature).should_receive('available').and_return(False) flexmock(module.rlist).should_receive('list_repository') flexmock(module.environment).should_receive('make_environment').never() flexmock(module).should_receive('execute_command').never() module.list_archive( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=list_arguments, ) def test_list_archive_with_borg_features_without_archive_delegates_to_list_repository(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logger).answer = lambda message: None list_arguments = argparse.Namespace( archive=None, short=None, format=None, json=None, prefix=None, match_archives=None, sort_by=None, first=None, last=None, find_paths=None, ) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.rlist).should_receive('list_repository') flexmock(module.environment).should_receive('make_environment').never() flexmock(module).should_receive('execute_command').never() module.list_archive( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=list_arguments, ) @pytest.mark.parametrize( 'archive_filter_flag', ('prefix', 'match_archives', 'sort_by', 'first', 'last',), ) def test_list_archive_with_archive_ignores_archive_filter_flag(archive_filter_flag,): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logger).answer = lambda message: None default_filter_flags = { 'prefix': None, 'match_archives': None, 'sort_by': None, 'first': None, 'last': None, } altered_filter_flags = {**default_filter_flags, **{archive_filter_flag: 'foo'}} flexmock(module.feature).should_receive('available').with_args( module.feature.Feature.RLIST, '1.2.3' ).and_return(False) flexmock(module).should_receive('make_list_command').with_args( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=argparse.Namespace( archive='archive', paths=None, json=False, find_paths=None, **default_filter_flags ), local_path='borg', remote_path=None, ).and_return(('borg', 'list', 'repo::archive')) flexmock(module).should_receive('make_find_paths').and_return(()) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'list', 'repo::archive'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ).once() module.list_archive( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=argparse.Namespace( archive='archive', paths=None, json=False, find_paths=None, **altered_filter_flags ), ) @pytest.mark.parametrize( 'archive_filter_flag', ('prefix', 'match_archives', 'sort_by', 'first', 'last',), ) def test_list_archive_with_find_paths_allows_archive_filter_flag_but_only_passes_it_to_rlist( archive_filter_flag, ): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.logger).answer = lambda message: None default_filter_flags = { 'prefix': None, 'match_archives': None, 'sort_by': None, 'first': None, 'last': None, } altered_filter_flags = {**default_filter_flags, **{archive_filter_flag: 'foo'}} glob_paths = ('**/*foo.txt*/**',) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.rlist).should_receive('make_rlist_command').with_args( repository='repo', storage_config={}, local_borg_version='1.2.3', rlist_arguments=argparse.Namespace( repository='repo', short=True, format=None, json=None, **altered_filter_flags ), local_path='borg', remote_path=None, ).and_return(('borg', 'rlist', '--repo', 'repo')) flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'rlist', '--repo', 'repo'), extra_environment=None, ).and_return('archive1\narchive2').once() flexmock(module).should_receive('make_list_command').with_args( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=argparse.Namespace( repository='repo', archive='archive1', paths=None, short=True, format=None, json=None, find_paths=['foo.txt'], **default_filter_flags, ), local_path='borg', remote_path=None, ).and_return(('borg', 'list', '--repo', 'repo', 'archive1')) flexmock(module).should_receive('make_list_command').with_args( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=argparse.Namespace( repository='repo', archive='archive2', paths=None, short=True, format=None, json=None, find_paths=['foo.txt'], **default_filter_flags, ), local_path='borg', remote_path=None, ).and_return(('borg', 'list', '--repo', 'repo', 'archive2')) flexmock(module).should_receive('make_find_paths').and_return(glob_paths) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'list', '--repo', 'repo', 'archive1') + glob_paths, output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ).once() flexmock(module).should_receive('execute_command').with_args( ('borg', 'list', '--repo', 'repo', 'archive2') + glob_paths, output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ).once() module.list_archive( repository='repo', storage_config={}, local_borg_version='1.2.3', list_arguments=argparse.Namespace( repository='repo', archive=None, paths=None, short=True, format=None, json=None, find_paths=['foo.txt'], **altered_filter_flags, ), ) borgmatic-1.7.9/tests/unit/borg/test_mount.py000066400000000000000000000162771440467744700213620ustar00rootroot00000000000000import logging from flexmock import flexmock from borgmatic.borg import mount as module from ..test_verbosity import insert_logging_mock def insert_execute_command_mock(command): flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( command, borg_local_path='borg', extra_environment=None, ).once() def test_mount_archive_calls_borg_with_required_flags(): flexmock(module.feature).should_receive('available').and_return(False) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg', 'mount', 'repo', '/mnt')) module.mount_archive( repository='repo', archive=None, mount_point='/mnt', paths=None, foreground=False, options=None, storage_config={}, local_borg_version='1.2.3', ) def test_mount_archive_with_borg_features_calls_borg_with_repository_and_match_archives_flags(): flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) insert_execute_command_mock( ('borg', 'mount', '--repo', 'repo', '--match-archives', 'archive', '/mnt') ) module.mount_archive( repository='repo', archive='archive', mount_point='/mnt', paths=None, foreground=False, options=None, storage_config={}, local_borg_version='1.2.3', ) def test_mount_archive_without_archive_calls_borg_with_repository_flags_only(): flexmock(module.feature).should_receive('available').and_return(False) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) insert_execute_command_mock(('borg', 'mount', 'repo::archive', '/mnt')) module.mount_archive( repository='repo', archive='archive', mount_point='/mnt', paths=None, foreground=False, options=None, storage_config={}, local_borg_version='1.2.3', ) def test_mount_archive_calls_borg_with_path_flags(): flexmock(module.feature).should_receive('available').and_return(False) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) insert_execute_command_mock(('borg', 'mount', 'repo::archive', '/mnt', 'path1', 'path2')) module.mount_archive( repository='repo', archive='archive', mount_point='/mnt', paths=['path1', 'path2'], foreground=False, options=None, storage_config={}, local_borg_version='1.2.3', ) def test_mount_archive_calls_borg_with_remote_path_flags(): flexmock(module.feature).should_receive('available').and_return(False) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) insert_execute_command_mock( ('borg', 'mount', '--remote-path', 'borg1', 'repo::archive', '/mnt') ) module.mount_archive( repository='repo', archive='archive', mount_point='/mnt', paths=None, foreground=False, options=None, storage_config={}, local_borg_version='1.2.3', remote_path='borg1', ) def test_mount_archive_calls_borg_with_umask_flags(): flexmock(module.feature).should_receive('available').and_return(False) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) insert_execute_command_mock(('borg', 'mount', '--umask', '0770', 'repo::archive', '/mnt')) module.mount_archive( repository='repo', archive='archive', mount_point='/mnt', paths=None, foreground=False, options=None, storage_config={'umask': '0770'}, local_borg_version='1.2.3', ) def test_mount_archive_calls_borg_with_lock_wait_flags(): flexmock(module.feature).should_receive('available').and_return(False) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) insert_execute_command_mock(('borg', 'mount', '--lock-wait', '5', 'repo::archive', '/mnt')) module.mount_archive( repository='repo', archive='archive', mount_point='/mnt', paths=None, foreground=False, options=None, storage_config={'lock_wait': '5'}, local_borg_version='1.2.3', ) def test_mount_archive_with_log_info_calls_borg_with_info_parameter(): flexmock(module.feature).should_receive('available').and_return(False) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) insert_execute_command_mock(('borg', 'mount', '--info', 'repo::archive', '/mnt')) insert_logging_mock(logging.INFO) module.mount_archive( repository='repo', archive='archive', mount_point='/mnt', paths=None, foreground=False, options=None, storage_config={}, local_borg_version='1.2.3', ) def test_mount_archive_with_log_debug_calls_borg_with_debug_flags(): flexmock(module.feature).should_receive('available').and_return(False) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) insert_execute_command_mock(('borg', 'mount', '--debug', '--show-rc', 'repo::archive', '/mnt')) insert_logging_mock(logging.DEBUG) module.mount_archive( repository='repo', archive='archive', mount_point='/mnt', paths=None, foreground=False, options=None, storage_config={}, local_borg_version='1.2.3', ) def test_mount_archive_calls_borg_with_foreground_parameter(): flexmock(module.feature).should_receive('available').and_return(False) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'mount', '--foreground', 'repo::archive', '/mnt'), output_file=module.DO_NOT_CAPTURE, borg_local_path='borg', extra_environment=None, ).once() module.mount_archive( repository='repo', archive='archive', mount_point='/mnt', paths=None, foreground=True, options=None, storage_config={}, local_borg_version='1.2.3', ) def test_mount_archive_calls_borg_with_options_flags(): flexmock(module.feature).should_receive('available').and_return(False) flexmock(module.flags).should_receive('make_repository_archive_flags').and_return( ('repo::archive',) ) insert_execute_command_mock(('borg', 'mount', '-o', 'super_mount', 'repo::archive', '/mnt')) module.mount_archive( repository='repo', archive='archive', mount_point='/mnt', paths=None, foreground=False, options='super_mount', storage_config={}, local_borg_version='1.2.3', ) borgmatic-1.7.9/tests/unit/borg/test_prune.py000066400000000000000000000245401440467744700213410ustar00rootroot00000000000000import logging from collections import OrderedDict from flexmock import flexmock from borgmatic.borg import prune as module from ..test_verbosity import insert_logging_mock def insert_execute_command_mock(prune_command, output_log_level): flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( prune_command, output_log_level=output_log_level, borg_local_path=prune_command[0], extra_environment=None, ).once() BASE_PRUNE_FLAGS = (('--keep-daily', '1'), ('--keep-weekly', '2'), ('--keep-monthly', '3')) def test_make_prune_flags_returns_flags_from_config_plus_default_prefix_glob(): retention_config = OrderedDict((('keep_daily', 1), ('keep_weekly', 2), ('keep_monthly', 3))) flexmock(module.feature).should_receive('available').and_return(True) result = module.make_prune_flags(retention_config, local_borg_version='1.2.3') assert tuple(result) == BASE_PRUNE_FLAGS + (('--match-archives', 'sh:{hostname}-*'),) def test_make_prune_flags_accepts_prefix_with_placeholders(): retention_config = OrderedDict((('keep_daily', 1), ('prefix', 'Documents_{hostname}-{now}'))) flexmock(module.feature).should_receive('available').and_return(True) result = module.make_prune_flags(retention_config, local_borg_version='1.2.3') expected = (('--keep-daily', '1'), ('--match-archives', 'sh:Documents_{hostname}-{now}*')) assert tuple(result) == expected def test_make_prune_flags_with_prefix_without_borg_features_uses_glob_archives(): retention_config = OrderedDict((('keep_daily', 1), ('prefix', 'Documents_{hostname}-{now}'))) flexmock(module.feature).should_receive('available').and_return(False) result = module.make_prune_flags(retention_config, local_borg_version='1.2.3') expected = (('--keep-daily', '1'), ('--glob-archives', 'Documents_{hostname}-{now}*')) assert tuple(result) == expected def test_make_prune_flags_treats_empty_prefix_as_no_prefix(): retention_config = OrderedDict((('keep_daily', 1), ('prefix', ''))) flexmock(module.feature).should_receive('available').and_return(True) result = module.make_prune_flags(retention_config, local_borg_version='1.2.3') expected = (('--keep-daily', '1'),) assert tuple(result) == expected def test_make_prune_flags_treats_none_prefix_as_no_prefix(): retention_config = OrderedDict((('keep_daily', 1), ('prefix', None))) flexmock(module.feature).should_receive('available').and_return(True) result = module.make_prune_flags(retention_config, local_borg_version='1.2.3') expected = (('--keep-daily', '1'),) assert tuple(result) == expected PRUNE_COMMAND = ('borg', 'prune', '--keep-daily', '1', '--keep-weekly', '2', '--keep-monthly', '3') def test_prune_archives_calls_borg_with_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(PRUNE_COMMAND + ('repo',), logging.INFO) module.prune_archives( dry_run=False, repository='repo', storage_config={}, retention_config=flexmock(), local_borg_version='1.2.3', ) def test_prune_archives_with_log_info_calls_borg_with_info_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(PRUNE_COMMAND + ('--info', 'repo'), logging.INFO) insert_logging_mock(logging.INFO) module.prune_archives( repository='repo', storage_config={}, dry_run=False, retention_config=flexmock(), local_borg_version='1.2.3', ) def test_prune_archives_with_log_debug_calls_borg_with_debug_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(PRUNE_COMMAND + ('--debug', '--show-rc', 'repo'), logging.INFO) insert_logging_mock(logging.DEBUG) module.prune_archives( repository='repo', storage_config={}, dry_run=False, retention_config=flexmock(), local_borg_version='1.2.3', ) def test_prune_archives_with_dry_run_calls_borg_with_dry_run_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(PRUNE_COMMAND + ('--dry-run', 'repo'), logging.INFO) module.prune_archives( repository='repo', storage_config={}, dry_run=True, retention_config=flexmock(), local_borg_version='1.2.3', ) def test_prune_archives_with_local_path_calls_borg_via_local_path(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(('borg1',) + PRUNE_COMMAND[1:] + ('repo',), logging.INFO) module.prune_archives( dry_run=False, repository='repo', storage_config={}, retention_config=flexmock(), local_borg_version='1.2.3', local_path='borg1', ) def test_prune_archives_with_remote_path_calls_borg_with_remote_path_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(PRUNE_COMMAND + ('--remote-path', 'borg1', 'repo'), logging.INFO) module.prune_archives( dry_run=False, repository='repo', storage_config={}, retention_config=flexmock(), local_borg_version='1.2.3', remote_path='borg1', ) def test_prune_archives_with_stats_calls_borg_with_stats_parameter_and_answer_output_log_level(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(PRUNE_COMMAND + ('--stats', 'repo'), module.borgmatic.logger.ANSWER) module.prune_archives( dry_run=False, repository='repo', storage_config={}, retention_config=flexmock(), local_borg_version='1.2.3', stats=True, ) def test_prune_archives_with_files_calls_borg_with_list_parameter_and_answer_output_log_level(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(PRUNE_COMMAND + ('--list', 'repo'), module.borgmatic.logger.ANSWER) module.prune_archives( dry_run=False, repository='repo', storage_config={}, retention_config=flexmock(), local_borg_version='1.2.3', list_archives=True, ) def test_prune_archives_with_umask_calls_borg_with_umask_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER storage_config = {'umask': '077'} flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(PRUNE_COMMAND + ('--umask', '077', 'repo'), logging.INFO) module.prune_archives( dry_run=False, repository='repo', storage_config=storage_config, retention_config=flexmock(), local_borg_version='1.2.3', ) def test_prune_archives_with_lock_wait_calls_borg_with_lock_wait_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER storage_config = {'lock_wait': 5} flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(PRUNE_COMMAND + ('--lock-wait', '5', 'repo'), logging.INFO) module.prune_archives( dry_run=False, repository='repo', storage_config=storage_config, retention_config=flexmock(), local_borg_version='1.2.3', ) def test_prune_archives_with_extra_borg_options_calls_borg_with_extra_options(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module).should_receive('make_prune_flags').and_return(BASE_PRUNE_FLAGS) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) insert_execute_command_mock(PRUNE_COMMAND + ('--extra', '--options', 'repo'), logging.INFO) module.prune_archives( dry_run=False, repository='repo', storage_config={'extra_borg_options': {'prune': '--extra --options'}}, retention_config=flexmock(), local_borg_version='1.2.3', ) borgmatic-1.7.9/tests/unit/borg/test_rcreate.py000066400000000000000000000230771440467744700216410ustar00rootroot00000000000000import logging import subprocess import pytest from flexmock import flexmock from borgmatic.borg import rcreate as module from ..test_verbosity import insert_logging_mock RINFO_SOME_UNKNOWN_EXIT_CODE = -999 RCREATE_COMMAND = ('borg', 'rcreate', '--encryption', 'repokey') def insert_rinfo_command_found_mock(): flexmock(module.rinfo).should_receive('display_repository_info') def insert_rinfo_command_not_found_mock(): flexmock(module.rinfo).should_receive('display_repository_info').and_raise( subprocess.CalledProcessError(module.RINFO_REPOSITORY_NOT_FOUND_EXIT_CODE, []) ) def insert_rcreate_command_mock(rcreate_command, **kwargs): flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( rcreate_command, output_file=module.DO_NOT_CAPTURE, borg_local_path=rcreate_command[0], extra_environment=None, ).once() def test_create_repository_calls_borg_with_flags(): insert_rinfo_command_not_found_mock() insert_rcreate_command_mock(RCREATE_COMMAND + ('--repo', 'repo')) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) module.create_repository( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', encryption_mode='repokey', ) def test_create_repository_with_dry_run_skips_borg_call(): insert_rinfo_command_not_found_mock() flexmock(module).should_receive('execute_command').never() flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) module.create_repository( dry_run=True, repository='repo', storage_config={}, local_borg_version='2.3.4', encryption_mode='repokey', ) def test_create_repository_raises_for_borg_rcreate_error(): insert_rinfo_command_not_found_mock() flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').and_raise( module.subprocess.CalledProcessError(2, 'borg rcreate') ) with pytest.raises(subprocess.CalledProcessError): module.create_repository( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', encryption_mode='repokey', ) def test_create_repository_skips_creation_when_repository_already_exists(): insert_rinfo_command_found_mock() flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) module.create_repository( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', encryption_mode='repokey', ) def test_create_repository_raises_for_unknown_rinfo_command_error(): flexmock(module.rinfo).should_receive('display_repository_info').and_raise( subprocess.CalledProcessError(RINFO_SOME_UNKNOWN_EXIT_CODE, []) ) with pytest.raises(subprocess.CalledProcessError): module.create_repository( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', encryption_mode='repokey', ) def test_create_repository_with_source_repository_calls_borg_with_other_repo_flag(): insert_rinfo_command_not_found_mock() insert_rcreate_command_mock(RCREATE_COMMAND + ('--other-repo', 'other.borg', '--repo', 'repo')) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) module.create_repository( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', encryption_mode='repokey', source_repository='other.borg', ) def test_create_repository_with_copy_crypt_key_calls_borg_with_copy_crypt_key_flag(): insert_rinfo_command_not_found_mock() insert_rcreate_command_mock(RCREATE_COMMAND + ('--copy-crypt-key', '--repo', 'repo')) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) module.create_repository( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', encryption_mode='repokey', copy_crypt_key=True, ) def test_create_repository_with_append_only_calls_borg_with_append_only_flag(): insert_rinfo_command_not_found_mock() insert_rcreate_command_mock(RCREATE_COMMAND + ('--append-only', '--repo', 'repo')) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) module.create_repository( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', encryption_mode='repokey', append_only=True, ) def test_create_repository_with_storage_quota_calls_borg_with_storage_quota_flag(): insert_rinfo_command_not_found_mock() insert_rcreate_command_mock(RCREATE_COMMAND + ('--storage-quota', '5G', '--repo', 'repo')) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) module.create_repository( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', encryption_mode='repokey', storage_quota='5G', ) def test_create_repository_with_make_parent_dirs_calls_borg_with_make_parent_dirs_flag(): insert_rinfo_command_not_found_mock() insert_rcreate_command_mock(RCREATE_COMMAND + ('--make-parent-dirs', '--repo', 'repo')) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) module.create_repository( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', encryption_mode='repokey', make_parent_dirs=True, ) def test_create_repository_with_log_info_calls_borg_with_info_flag(): insert_rinfo_command_not_found_mock() insert_rcreate_command_mock(RCREATE_COMMAND + ('--info', '--repo', 'repo')) insert_logging_mock(logging.INFO) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) module.create_repository( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', encryption_mode='repokey', ) def test_create_repository_with_log_debug_calls_borg_with_debug_flag(): insert_rinfo_command_not_found_mock() insert_rcreate_command_mock(RCREATE_COMMAND + ('--debug', '--repo', 'repo')) insert_logging_mock(logging.DEBUG) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) module.create_repository( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', encryption_mode='repokey', ) def test_create_repository_with_local_path_calls_borg_via_local_path(): insert_rinfo_command_not_found_mock() insert_rcreate_command_mock(('borg1',) + RCREATE_COMMAND[1:] + ('--repo', 'repo')) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) module.create_repository( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', encryption_mode='repokey', local_path='borg1', ) def test_create_repository_with_remote_path_calls_borg_with_remote_path_flag(): insert_rinfo_command_not_found_mock() insert_rcreate_command_mock(RCREATE_COMMAND + ('--remote-path', 'borg1', '--repo', 'repo')) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) module.create_repository( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', encryption_mode='repokey', remote_path='borg1', ) def test_create_repository_with_extra_borg_options_calls_borg_with_extra_options(): insert_rinfo_command_not_found_mock() insert_rcreate_command_mock(RCREATE_COMMAND + ('--extra', '--options', '--repo', 'repo')) flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) module.create_repository( dry_run=False, repository='repo', storage_config={'extra_borg_options': {'rcreate': '--extra --options'}}, local_borg_version='2.3.4', encryption_mode='repokey', ) borgmatic-1.7.9/tests/unit/borg/test_rinfo.py000066400000000000000000000221761440467744700213300ustar00rootroot00000000000000import logging from flexmock import flexmock from borgmatic.borg import rinfo as module from ..test_verbosity import insert_logging_mock def test_display_repository_info_calls_borg_with_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'rinfo', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) module.display_repository_info( repository='repo', storage_config={}, local_borg_version='2.3.4', rinfo_arguments=flexmock(json=False), ) def test_display_repository_info_without_borg_features_calls_borg_with_info_sub_command(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.feature).should_receive('available').and_return(False) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'info', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) module.display_repository_info( repository='repo', storage_config={}, local_borg_version='2.3.4', rinfo_arguments=flexmock(json=False), ) def test_display_repository_info_with_log_info_calls_borg_with_info_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'rinfo', '--info', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) insert_logging_mock(logging.INFO) module.display_repository_info( repository='repo', storage_config={}, local_borg_version='2.3.4', rinfo_arguments=flexmock(json=False), ) def test_display_repository_info_with_log_info_and_json_suppresses_most_borg_output(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'rinfo', '--json', '--repo', 'repo'), extra_environment=None, ).and_return('[]') insert_logging_mock(logging.INFO) json_output = module.display_repository_info( repository='repo', storage_config={}, local_borg_version='2.3.4', rinfo_arguments=flexmock(json=True), ) assert json_output == '[]' def test_display_repository_info_with_log_debug_calls_borg_with_debug_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'rinfo', '--debug', '--show-rc', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) insert_logging_mock(logging.DEBUG) module.display_repository_info( repository='repo', storage_config={}, local_borg_version='2.3.4', rinfo_arguments=flexmock(json=False), ) def test_display_repository_info_with_log_debug_and_json_suppresses_most_borg_output(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'rinfo', '--json', '--repo', 'repo'), extra_environment=None, ).and_return('[]') insert_logging_mock(logging.DEBUG) json_output = module.display_repository_info( repository='repo', storage_config={}, local_borg_version='2.3.4', rinfo_arguments=flexmock(json=True), ) assert json_output == '[]' def test_display_repository_info_with_json_calls_borg_with_json_parameter(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'rinfo', '--json', '--repo', 'repo'), extra_environment=None, ).and_return('[]') json_output = module.display_repository_info( repository='repo', storage_config={}, local_borg_version='2.3.4', rinfo_arguments=flexmock(json=True), ) assert json_output == '[]' def test_display_repository_info_with_local_path_calls_borg_via_local_path(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg1', 'rinfo', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg1', extra_environment=None, ) module.display_repository_info( repository='repo', storage_config={}, local_borg_version='2.3.4', rinfo_arguments=flexmock(json=False), local_path='borg1', ) def test_display_repository_info_with_remote_path_calls_borg_with_remote_path_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'rinfo', '--remote-path', 'borg1', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) module.display_repository_info( repository='repo', storage_config={}, local_borg_version='2.3.4', rinfo_arguments=flexmock(json=False), remote_path='borg1', ) def test_display_repository_info_with_lock_wait_calls_borg_with_lock_wait_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER storage_config = {'lock_wait': 5} flexmock(module.feature).should_receive('available').and_return(True) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo',)) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'rinfo', '--lock-wait', '5', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ) module.display_repository_info( repository='repo', storage_config=storage_config, local_borg_version='2.3.4', rinfo_arguments=flexmock(json=False), ) borgmatic-1.7.9/tests/unit/borg/test_rlist.py000066400000000000000000000333701440467744700213460ustar00rootroot00000000000000import argparse import logging import pytest from flexmock import flexmock from borgmatic.borg import rlist as module from ..test_verbosity import insert_logging_mock BORG_LIST_LATEST_ARGUMENTS = ( '--last', '1', '--short', 'repo', ) def test_resolve_archive_name_passes_through_non_latest_archive_name(): archive = 'myhost-2030-01-01T14:41:17.647620' assert ( module.resolve_archive_name('repo', archive, storage_config={}, local_borg_version='1.2.3') == archive ) def test_resolve_archive_name_calls_borg_with_parameters(): expected_archive = 'archive-name' flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'list') + BORG_LIST_LATEST_ARGUMENTS, extra_environment=None, ).and_return(expected_archive + '\n') assert ( module.resolve_archive_name('repo', 'latest', storage_config={}, local_borg_version='1.2.3') == expected_archive ) def test_resolve_archive_name_with_log_info_calls_borg_without_info_parameter(): expected_archive = 'archive-name' flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'list') + BORG_LIST_LATEST_ARGUMENTS, extra_environment=None, ).and_return(expected_archive + '\n') insert_logging_mock(logging.INFO) assert ( module.resolve_archive_name('repo', 'latest', storage_config={}, local_borg_version='1.2.3') == expected_archive ) def test_resolve_archive_name_with_log_debug_calls_borg_without_debug_parameter(): expected_archive = 'archive-name' flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'list') + BORG_LIST_LATEST_ARGUMENTS, extra_environment=None, ).and_return(expected_archive + '\n') insert_logging_mock(logging.DEBUG) assert ( module.resolve_archive_name('repo', 'latest', storage_config={}, local_borg_version='1.2.3') == expected_archive ) def test_resolve_archive_name_with_local_path_calls_borg_via_local_path(): expected_archive = 'archive-name' flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg1', 'list') + BORG_LIST_LATEST_ARGUMENTS, extra_environment=None, ).and_return(expected_archive + '\n') assert ( module.resolve_archive_name( 'repo', 'latest', storage_config={}, local_borg_version='1.2.3', local_path='borg1' ) == expected_archive ) def test_resolve_archive_name_with_remote_path_calls_borg_with_remote_path_parameters(): expected_archive = 'archive-name' flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'list', '--remote-path', 'borg1') + BORG_LIST_LATEST_ARGUMENTS, extra_environment=None, ).and_return(expected_archive + '\n') assert ( module.resolve_archive_name( 'repo', 'latest', storage_config={}, local_borg_version='1.2.3', remote_path='borg1' ) == expected_archive ) def test_resolve_archive_name_without_archives_raises(): flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'list') + BORG_LIST_LATEST_ARGUMENTS, extra_environment=None, ).and_return('') with pytest.raises(ValueError): module.resolve_archive_name('repo', 'latest', storage_config={}, local_borg_version='1.2.3') def test_resolve_archive_name_with_lock_wait_calls_borg_with_lock_wait_parameters(): expected_archive = 'archive-name' flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('borg', 'list', '--lock-wait', 'okay') + BORG_LIST_LATEST_ARGUMENTS, extra_environment=None, ).and_return(expected_archive + '\n') assert ( module.resolve_archive_name( 'repo', 'latest', storage_config={'lock_wait': 'okay'}, local_borg_version='1.2.3' ) == expected_archive ) def test_make_rlist_command_includes_log_info(): insert_logging_mock(logging.INFO) flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) command = module.make_rlist_command( repository='repo', storage_config={}, local_borg_version='1.2.3', rlist_arguments=flexmock(archive=None, paths=None, json=False, prefix=None), ) assert command == ('borg', 'list', '--info', 'repo') def test_make_rlist_command_includes_json_but_not_info(): insert_logging_mock(logging.INFO) flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(('--json',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) command = module.make_rlist_command( repository='repo', storage_config={}, local_borg_version='1.2.3', rlist_arguments=flexmock(archive=None, paths=None, json=True, prefix=None), ) assert command == ('borg', 'list', '--json', 'repo') def test_make_rlist_command_includes_log_debug(): insert_logging_mock(logging.DEBUG) flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) command = module.make_rlist_command( repository='repo', storage_config={}, local_borg_version='1.2.3', rlist_arguments=flexmock(archive=None, paths=None, json=False, prefix=None), ) assert command == ('borg', 'list', '--debug', '--show-rc', 'repo') def test_make_rlist_command_includes_json_but_not_debug(): insert_logging_mock(logging.DEBUG) flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(('--json',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) command = module.make_rlist_command( repository='repo', storage_config={}, local_borg_version='1.2.3', rlist_arguments=flexmock(archive=None, paths=None, json=True, prefix=None), ) assert command == ('borg', 'list', '--json', 'repo') def test_make_rlist_command_includes_json(): flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(('--json',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) command = module.make_rlist_command( repository='repo', storage_config={}, local_borg_version='1.2.3', rlist_arguments=flexmock(archive=None, paths=None, json=True, prefix=None), ) assert command == ('borg', 'list', '--json', 'repo') def test_make_rlist_command_includes_lock_wait(): flexmock(module.flags).should_receive('make_flags').and_return(()).and_return( ('--lock-wait', '5') ).and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) command = module.make_rlist_command( repository='repo', storage_config={'lock_wait': 5}, local_borg_version='1.2.3', rlist_arguments=flexmock(archive=None, paths=None, json=False, prefix=None), ) assert command == ('borg', 'list', '--lock-wait', '5', 'repo') def test_make_rlist_command_includes_local_path(): flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) command = module.make_rlist_command( repository='repo', storage_config={}, local_borg_version='1.2.3', rlist_arguments=flexmock(archive=None, paths=None, json=False, prefix=None), local_path='borg2', ) assert command == ('borg2', 'list', 'repo') def test_make_rlist_command_includes_remote_path(): flexmock(module.flags).should_receive('make_flags').and_return( ('--remote-path', 'borg2') ).and_return(()).and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) command = module.make_rlist_command( repository='repo', storage_config={}, local_borg_version='1.2.3', rlist_arguments=flexmock(archive=None, paths=None, json=False, prefix=None), remote_path='borg2', ) assert command == ('borg', 'list', '--remote-path', 'borg2', 'repo') def test_make_rlist_command_transforms_prefix_into_match_archives(): flexmock(module.flags).should_receive('make_flags').and_return(()).and_return(()).and_return( ('--match-archives', 'sh:foo*') ) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) command = module.make_rlist_command( repository='repo', storage_config={}, local_borg_version='1.2.3', rlist_arguments=flexmock(archive=None, paths=None, json=False, prefix='foo'), ) assert command == ('borg', 'list', '--match-archives', 'sh:foo*', 'repo') def test_make_rlist_command_includes_short(): flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(('--short',)) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) command = module.make_rlist_command( repository='repo', storage_config={}, local_borg_version='1.2.3', rlist_arguments=flexmock(archive=None, paths=None, json=False, prefix=None, short=True), ) assert command == ('borg', 'list', '--short', 'repo') @pytest.mark.parametrize( 'argument_name', ( 'match_archives', 'sort_by', 'first', 'last', 'exclude', 'exclude_from', 'pattern', 'patterns_from', ), ) def test_make_rlist_command_includes_additional_flags(argument_name): flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return( (f"--{argument_name.replace('_', '-')}", 'value') ) flexmock(module.flags).should_receive('make_repository_flags').and_return(('repo',)) command = module.make_rlist_command( repository='repo', storage_config={}, local_borg_version='1.2.3', rlist_arguments=flexmock( archive=None, paths=None, json=False, prefix=None, find_paths=None, format=None, **{argument_name: 'value'}, ), ) assert command == ('borg', 'list', '--' + argument_name.replace('_', '-'), 'value', 'repo') def test_list_repository_calls_borg_with_parameters(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER rlist_arguments = argparse.Namespace(json=False) flexmock(module.feature).should_receive('available').and_return(False) flexmock(module).should_receive('make_rlist_command').with_args( repository='repo', storage_config={}, local_borg_version='1.2.3', rlist_arguments=rlist_arguments, local_path='borg', remote_path=None, ).and_return(('borg', 'rlist', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'rlist', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, borg_local_path='borg', extra_environment=None, ).once() module.list_repository( repository='repo', storage_config={}, local_borg_version='1.2.3', rlist_arguments=rlist_arguments, ) def test_list_repository_with_json_returns_borg_output(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER rlist_arguments = argparse.Namespace(json=True) json_output = flexmock() flexmock(module.feature).should_receive('available').and_return(False) flexmock(module).should_receive('make_rlist_command').with_args( repository='repo', storage_config={}, local_borg_version='1.2.3', rlist_arguments=rlist_arguments, local_path='borg', remote_path=None, ).and_return(('borg', 'rlist', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').and_return(json_output) assert ( module.list_repository( repository='repo', storage_config={}, local_borg_version='1.2.3', rlist_arguments=rlist_arguments, ) == json_output ) borgmatic-1.7.9/tests/unit/borg/test_transfer.py000066400000000000000000000346601440467744700220400ustar00rootroot00000000000000import logging import pytest from flexmock import flexmock from borgmatic.borg import transfer as module from ..test_verbosity import insert_logging_mock def test_transfer_archives_calls_borg_with_flags(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'transfer', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, output_file=None, borg_local_path='borg', extra_environment=None, ) module.transfer_archives( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=None, match_archives=None, source_repository=None ), ) def test_transfer_archives_with_dry_run_calls_borg_with_dry_run_flag(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags').with_args('dry-run', True).and_return( ('--dry-run',) ) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'transfer', '--repo', 'repo', '--dry-run'), output_log_level=module.borgmatic.logger.ANSWER, output_file=None, borg_local_path='borg', extra_environment=None, ) module.transfer_archives( dry_run=True, repository='repo', storage_config={}, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=None, match_archives=None, source_repository=None ), ) def test_transfer_archives_with_log_info_calls_borg_with_info_flag(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'transfer', '--info', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, output_file=None, borg_local_path='borg', extra_environment=None, ) insert_logging_mock(logging.INFO) module.transfer_archives( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=None, match_archives=None, source_repository=None ), ) def test_transfer_archives_with_log_debug_calls_borg_with_debug_flag(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'transfer', '--debug', '--show-rc', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, output_file=None, borg_local_path='borg', extra_environment=None, ) insert_logging_mock(logging.DEBUG) module.transfer_archives( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=None, match_archives=None, source_repository=None ), ) def test_transfer_archives_with_archive_calls_borg_with_match_archives_flag(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags').with_args( 'match-archives', 'archive' ).and_return(('--match-archives', 'archive')) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'transfer', '--match-archives', 'archive', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, output_file=None, borg_local_path='borg', extra_environment=None, ) module.transfer_archives( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', transfer_arguments=flexmock( archive='archive', progress=None, match_archives=None, source_repository=None ), ) def test_transfer_archives_with_match_archives_calls_borg_with_match_archives_flag(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags').with_args( 'match-archives', 'sh:foo*' ).and_return(('--match-archives', 'sh:foo*')) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'transfer', '--match-archives', 'sh:foo*', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, output_file=None, borg_local_path='borg', extra_environment=None, ) module.transfer_archives( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=None, match_archives='sh:foo*', source_repository=None ), ) def test_transfer_archives_with_local_path_calls_borg_via_local_path(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg2', 'transfer', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, output_file=None, borg_local_path='borg2', extra_environment=None, ) module.transfer_archives( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=None, match_archives=None, source_repository=None ), local_path='borg2', ) def test_transfer_archives_with_remote_path_calls_borg_with_remote_path_flags(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags').with_args( 'remote-path', 'borg2' ).and_return(('--remote-path', 'borg2')) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'transfer', '--remote-path', 'borg2', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, output_file=None, borg_local_path='borg', extra_environment=None, ) module.transfer_archives( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=None, match_archives=None, source_repository=None ), remote_path='borg2', ) def test_transfer_archives_with_lock_wait_calls_borg_with_lock_wait_flags(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags').with_args('lock-wait', 5).and_return( ('--lock-wait', '5') ) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) storage_config = {'lock_wait': 5} flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'transfer', '--lock-wait', '5', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, output_file=None, borg_local_path='borg', extra_environment=None, ) module.transfer_archives( dry_run=False, repository='repo', storage_config=storage_config, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=None, match_archives=None, source_repository=None ), ) def test_transfer_archives_with_progress_calls_borg_with_progress_flag(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'transfer', '--progress', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, output_file=module.DO_NOT_CAPTURE, borg_local_path='borg', extra_environment=None, ) module.transfer_archives( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=True, match_archives=None, source_repository=None ), ) @pytest.mark.parametrize('argument_name', ('upgrader', 'sort_by', 'first', 'last')) def test_transfer_archives_passes_through_arguments_to_borg(argument_name): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER flag_name = f"--{argument_name.replace('_', ' ')}" flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return( (flag_name, 'value') ) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'transfer', flag_name, 'value', '--repo', 'repo'), output_log_level=module.borgmatic.logger.ANSWER, output_file=None, borg_local_path='borg', extra_environment=None, ) module.transfer_archives( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=None, match_archives=None, source_repository=None, **{argument_name: 'value'}, ), ) def test_transfer_archives_with_source_repository_calls_borg_with_other_repo_flags(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.flags).should_receive('make_flags').and_return(()) flexmock(module.flags).should_receive('make_flags').with_args('other-repo', 'other').and_return( ('--other-repo', 'other') ) flexmock(module.flags).should_receive('make_flags_from_arguments').and_return(()) flexmock(module.flags).should_receive('make_repository_flags').and_return(('--repo', 'repo')) flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command').with_args( ('borg', 'transfer', '--repo', 'repo', '--other-repo', 'other'), output_log_level=module.borgmatic.logger.ANSWER, output_file=None, borg_local_path='borg', extra_environment=None, ) module.transfer_archives( dry_run=False, repository='repo', storage_config={}, local_borg_version='2.3.4', transfer_arguments=flexmock( archive=None, progress=None, match_archives=None, source_repository='other' ), ) borgmatic-1.7.9/tests/unit/borg/test_umount.py000066400000000000000000000016301440467744700215320ustar00rootroot00000000000000import logging from flexmock import flexmock from borgmatic.borg import umount as module from ..test_verbosity import insert_logging_mock def insert_execute_command_mock(command): flexmock(module).should_receive('execute_command').with_args(command).once() def test_unmount_archive_calls_borg_with_required_parameters(): insert_execute_command_mock(('borg', 'umount', '/mnt')) module.unmount_archive(mount_point='/mnt') def test_unmount_archive_with_log_info_calls_borg_with_info_parameter(): insert_execute_command_mock(('borg', 'umount', '--info', '/mnt')) insert_logging_mock(logging.INFO) module.unmount_archive(mount_point='/mnt') def test_unmount_archive_with_log_debug_calls_borg_with_debug_parameters(): insert_execute_command_mock(('borg', 'umount', '--debug', '--show-rc', '/mnt')) insert_logging_mock(logging.DEBUG) module.unmount_archive(mount_point='/mnt') borgmatic-1.7.9/tests/unit/borg/test_version.py000066400000000000000000000040171440467744700216720ustar00rootroot00000000000000import logging import pytest from flexmock import flexmock from borgmatic.borg import version as module from ..test_verbosity import insert_logging_mock VERSION = '1.2.3' def insert_execute_command_and_capture_output_mock( command, borg_local_path='borg', version_output=f'borg {VERSION}' ): flexmock(module.environment).should_receive('make_environment') flexmock(module).should_receive('execute_command_and_capture_output').with_args( command, extra_environment=None, ).once().and_return(version_output) def test_local_borg_version_calls_borg_with_required_parameters(): insert_execute_command_and_capture_output_mock(('borg', '--version')) flexmock(module.environment).should_receive('make_environment') assert module.local_borg_version({}) == VERSION def test_local_borg_version_with_log_info_calls_borg_with_info_parameter(): insert_execute_command_and_capture_output_mock(('borg', '--version', '--info')) insert_logging_mock(logging.INFO) flexmock(module.environment).should_receive('make_environment') assert module.local_borg_version({}) == VERSION def test_local_borg_version_with_log_debug_calls_borg_with_debug_parameters(): insert_execute_command_and_capture_output_mock(('borg', '--version', '--debug', '--show-rc')) insert_logging_mock(logging.DEBUG) flexmock(module.environment).should_receive('make_environment') assert module.local_borg_version({}) == VERSION def test_local_borg_version_with_local_borg_path_calls_borg_with_it(): insert_execute_command_and_capture_output_mock(('borg1', '--version'), borg_local_path='borg1') flexmock(module.environment).should_receive('make_environment') assert module.local_borg_version({}, 'borg1') == VERSION def test_local_borg_version_with_invalid_version_raises(): insert_execute_command_and_capture_output_mock(('borg', '--version'), version_output='wtf') flexmock(module.environment).should_receive('make_environment') with pytest.raises(ValueError): module.local_borg_version({}) borgmatic-1.7.9/tests/unit/commands/000077500000000000000000000000001440467744700174425ustar00rootroot00000000000000borgmatic-1.7.9/tests/unit/commands/__init__.py000066400000000000000000000000001440467744700215410ustar00rootroot00000000000000borgmatic-1.7.9/tests/unit/commands/test_arguments.py000066400000000000000000000131621440467744700230630ustar00rootroot00000000000000import collections from flexmock import flexmock from borgmatic.commands import arguments as module def test_parse_subparser_arguments_consumes_subparser_arguments_before_subparser_name(): action_namespace = flexmock(foo=True) subparsers = { 'action': flexmock(parse_known_args=lambda arguments: (action_namespace, ['action'])), 'other': flexmock(), } arguments, remaining_arguments = module.parse_subparser_arguments( ('--foo', 'true', 'action'), subparsers ) assert arguments == {'action': action_namespace} assert remaining_arguments == [] def test_parse_subparser_arguments_consumes_subparser_arguments_after_subparser_name(): action_namespace = flexmock(foo=True) subparsers = { 'action': flexmock(parse_known_args=lambda arguments: (action_namespace, ['action'])), 'other': flexmock(), } arguments, remaining_arguments = module.parse_subparser_arguments( ('action', '--foo', 'true'), subparsers ) assert arguments == {'action': action_namespace} assert remaining_arguments == [] def test_parse_subparser_arguments_consumes_subparser_arguments_with_alias(): action_namespace = flexmock(foo=True) action_subparser = flexmock(parse_known_args=lambda arguments: (action_namespace, ['action'])) subparsers = { 'action': action_subparser, '-a': action_subparser, 'other': flexmock(), '-o': flexmock(), } flexmock(module).SUBPARSER_ALIASES = {'action': ['-a'], 'other': ['-o']} arguments, remaining_arguments = module.parse_subparser_arguments( ('-a', '--foo', 'true'), subparsers ) assert arguments == {'action': action_namespace} assert remaining_arguments == [] def test_parse_subparser_arguments_consumes_multiple_subparser_arguments(): action_namespace = flexmock(foo=True) other_namespace = flexmock(bar=3) subparsers = { 'action': flexmock( parse_known_args=lambda arguments: (action_namespace, ['action', '--bar', '3']) ), 'other': flexmock(parse_known_args=lambda arguments: (other_namespace, [])), } arguments, remaining_arguments = module.parse_subparser_arguments( ('action', '--foo', 'true', 'other', '--bar', '3'), subparsers ) assert arguments == {'action': action_namespace, 'other': other_namespace} assert remaining_arguments == [] def test_parse_subparser_arguments_respects_command_line_action_ordering(): other_namespace = flexmock() action_namespace = flexmock(foo=True) subparsers = { 'action': flexmock( parse_known_args=lambda arguments: (action_namespace, ['action', '--foo', 'true']) ), 'other': flexmock(parse_known_args=lambda arguments: (other_namespace, ['other'])), } arguments, remaining_arguments = module.parse_subparser_arguments( ('other', '--foo', 'true', 'action'), subparsers ) assert arguments == collections.OrderedDict( [('other', other_namespace), ('action', action_namespace)] ) assert remaining_arguments == [] def test_parse_subparser_arguments_applies_default_subparsers(): prune_namespace = flexmock() compact_namespace = flexmock() create_namespace = flexmock(progress=True) check_namespace = flexmock() subparsers = { 'prune': flexmock( parse_known_args=lambda arguments: (prune_namespace, ['prune', '--progress']) ), 'compact': flexmock(parse_known_args=lambda arguments: (compact_namespace, [])), 'create': flexmock(parse_known_args=lambda arguments: (create_namespace, [])), 'check': flexmock(parse_known_args=lambda arguments: (check_namespace, [])), 'other': flexmock(), } arguments, remaining_arguments = module.parse_subparser_arguments(('--progress'), subparsers) assert arguments == { 'prune': prune_namespace, 'compact': compact_namespace, 'create': create_namespace, 'check': check_namespace, } assert remaining_arguments == [] def test_parse_subparser_arguments_passes_through_unknown_arguments_before_subparser_name(): action_namespace = flexmock() subparsers = { 'action': flexmock( parse_known_args=lambda arguments: (action_namespace, ['action', '--verbosity', 'lots']) ), 'other': flexmock(), } arguments, remaining_arguments = module.parse_subparser_arguments( ('--verbosity', 'lots', 'action'), subparsers ) assert arguments == {'action': action_namespace} assert remaining_arguments == ['--verbosity', 'lots'] def test_parse_subparser_arguments_passes_through_unknown_arguments_after_subparser_name(): action_namespace = flexmock() subparsers = { 'action': flexmock( parse_known_args=lambda arguments: (action_namespace, ['action', '--verbosity', 'lots']) ), 'other': flexmock(), } arguments, remaining_arguments = module.parse_subparser_arguments( ('action', '--verbosity', 'lots'), subparsers ) assert arguments == {'action': action_namespace} assert remaining_arguments == ['--verbosity', 'lots'] def test_parse_subparser_arguments_parses_borg_options_and_skips_other_subparsers(): action_namespace = flexmock(options=[]) subparsers = { 'borg': flexmock(parse_known_args=lambda arguments: (action_namespace, ['borg', 'list'])), 'list': flexmock(), } arguments, remaining_arguments = module.parse_subparser_arguments(('borg', 'list'), subparsers) assert arguments == {'borg': action_namespace} assert arguments['borg'].options == ['list'] assert remaining_arguments == [] borgmatic-1.7.9/tests/unit/commands/test_borgmatic.py000066400000000000000000001275551440467744700230410ustar00rootroot00000000000000import logging import subprocess import time from flexmock import flexmock import borgmatic.hooks.command from borgmatic.commands import borgmatic as module def test_run_configuration_runs_actions_for_each_repository(): flexmock(module).should_receive('verbosity_to_log_level').and_return(logging.INFO) flexmock(module.borg_version).should_receive('local_borg_version').and_return(flexmock()) expected_results = [flexmock(), flexmock()] flexmock(module).should_receive('run_actions').and_return(expected_results[:1]).and_return( expected_results[1:] ) config = {'location': {'repositories': ['foo', 'bar']}} arguments = {'global': flexmock(monitoring_verbosity=1)} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == expected_results def test_run_configuration_with_invalid_borg_version_errors(): flexmock(module).should_receive('verbosity_to_log_level').and_return(logging.INFO) flexmock(module.borg_version).should_receive('local_borg_version').and_raise(ValueError) flexmock(module.command).should_receive('execute_hook').never() flexmock(module.dispatch).should_receive('call_hooks').never() flexmock(module).should_receive('run_actions').never() config = {'location': {'repositories': ['foo']}} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'prune': flexmock()} list(module.run_configuration('test.yaml', config, arguments)) def test_run_configuration_logs_monitor_start_error(): flexmock(module).should_receive('verbosity_to_log_level').and_return(logging.INFO) flexmock(module.borg_version).should_receive('local_borg_version').and_return(flexmock()) flexmock(module.dispatch).should_receive('call_hooks').and_raise(OSError).and_return( None ).and_return(None).and_return(None) expected_results = [flexmock()] flexmock(module).should_receive('log_error_records').and_return(expected_results) flexmock(module).should_receive('run_actions').never() config = {'location': {'repositories': ['foo']}} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == expected_results def test_run_configuration_bails_for_monitor_start_soft_failure(): flexmock(module).should_receive('verbosity_to_log_level').and_return(logging.INFO) flexmock(module.borg_version).should_receive('local_borg_version').and_return(flexmock()) error = subprocess.CalledProcessError(borgmatic.hooks.command.SOFT_FAIL_EXIT_CODE, 'try again') flexmock(module.dispatch).should_receive('call_hooks').and_raise(error) flexmock(module).should_receive('log_error_records').never() flexmock(module).should_receive('run_actions').never() config = {'location': {'repositories': ['foo']}} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == [] def test_run_configuration_logs_actions_error(): flexmock(module).should_receive('verbosity_to_log_level').and_return(logging.INFO) flexmock(module.borg_version).should_receive('local_borg_version').and_return(flexmock()) flexmock(module.command).should_receive('execute_hook') flexmock(module.dispatch).should_receive('call_hooks') expected_results = [flexmock()] flexmock(module).should_receive('log_error_records').and_return(expected_results) flexmock(module).should_receive('run_actions').and_raise(OSError) config = {'location': {'repositories': ['foo']}} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False)} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == expected_results def test_run_configuration_bails_for_actions_soft_failure(): flexmock(module).should_receive('verbosity_to_log_level').and_return(logging.INFO) flexmock(module.borg_version).should_receive('local_borg_version').and_return(flexmock()) flexmock(module.dispatch).should_receive('call_hooks') error = subprocess.CalledProcessError(borgmatic.hooks.command.SOFT_FAIL_EXIT_CODE, 'try again') flexmock(module).should_receive('run_actions').and_raise(error) flexmock(module).should_receive('log_error_records').never() flexmock(module.command).should_receive('considered_soft_failure').and_return(True) config = {'location': {'repositories': ['foo']}} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == [] def test_run_configuration_logs_monitor_log_error(): flexmock(module).should_receive('verbosity_to_log_level').and_return(logging.INFO) flexmock(module.borg_version).should_receive('local_borg_version').and_return(flexmock()) flexmock(module.dispatch).should_receive('call_hooks').and_return(None).and_return( None ).and_raise(OSError) expected_results = [flexmock()] flexmock(module).should_receive('log_error_records').and_return(expected_results) flexmock(module).should_receive('run_actions').and_return([]) config = {'location': {'repositories': ['foo']}} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == expected_results def test_run_configuration_bails_for_monitor_log_soft_failure(): flexmock(module).should_receive('verbosity_to_log_level').and_return(logging.INFO) flexmock(module.borg_version).should_receive('local_borg_version').and_return(flexmock()) error = subprocess.CalledProcessError(borgmatic.hooks.command.SOFT_FAIL_EXIT_CODE, 'try again') flexmock(module.dispatch).should_receive('call_hooks').and_return(None).and_return( None ).and_raise(error) flexmock(module).should_receive('log_error_records').never() flexmock(module).should_receive('run_actions').and_return([]) flexmock(module.command).should_receive('considered_soft_failure').and_return(True) config = {'location': {'repositories': ['foo']}} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == [] def test_run_configuration_logs_monitor_finish_error(): flexmock(module).should_receive('verbosity_to_log_level').and_return(logging.INFO) flexmock(module.borg_version).should_receive('local_borg_version').and_return(flexmock()) flexmock(module.dispatch).should_receive('call_hooks').and_return(None).and_return( None ).and_return(None).and_raise(OSError) expected_results = [flexmock()] flexmock(module).should_receive('log_error_records').and_return(expected_results) flexmock(module).should_receive('run_actions').and_return([]) config = {'location': {'repositories': ['foo']}} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == expected_results def test_run_configuration_bails_for_monitor_finish_soft_failure(): flexmock(module).should_receive('verbosity_to_log_level').and_return(logging.INFO) flexmock(module.borg_version).should_receive('local_borg_version').and_return(flexmock()) error = subprocess.CalledProcessError(borgmatic.hooks.command.SOFT_FAIL_EXIT_CODE, 'try again') flexmock(module.dispatch).should_receive('call_hooks').and_return(None).and_return( None ).and_raise(None).and_raise(error) flexmock(module).should_receive('log_error_records').never() flexmock(module).should_receive('run_actions').and_return([]) flexmock(module.command).should_receive('considered_soft_failure').and_return(True) config = {'location': {'repositories': ['foo']}} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == [] def test_run_configuration_logs_on_error_hook_error(): flexmock(module).should_receive('verbosity_to_log_level').and_return(logging.INFO) flexmock(module.borg_version).should_receive('local_borg_version').and_return(flexmock()) flexmock(module.command).should_receive('execute_hook').and_raise(OSError) expected_results = [flexmock(), flexmock()] flexmock(module).should_receive('log_error_records').and_return( expected_results[:1] ).and_return(expected_results[1:]) flexmock(module).should_receive('run_actions').and_raise(OSError) config = {'location': {'repositories': ['foo']}} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == expected_results def test_run_configuration_bails_for_on_error_hook_soft_failure(): flexmock(module).should_receive('verbosity_to_log_level').and_return(logging.INFO) flexmock(module.borg_version).should_receive('local_borg_version').and_return(flexmock()) error = subprocess.CalledProcessError(borgmatic.hooks.command.SOFT_FAIL_EXIT_CODE, 'try again') flexmock(module.command).should_receive('execute_hook').and_raise(error) expected_results = [flexmock()] flexmock(module).should_receive('log_error_records').and_return(expected_results) flexmock(module).should_receive('run_actions').and_raise(OSError) config = {'location': {'repositories': ['foo']}} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == expected_results def test_run_configuration_retries_soft_error(): # Run action first fails, second passes flexmock(module).should_receive('verbosity_to_log_level').and_return(logging.INFO) flexmock(module.borg_version).should_receive('local_borg_version').and_return(flexmock()) flexmock(module.command).should_receive('execute_hook') flexmock(module).should_receive('run_actions').and_raise(OSError).and_return([]) flexmock(module).should_receive('log_error_records').and_return([flexmock()]).once() config = {'location': {'repositories': ['foo']}, 'storage': {'retries': 1}} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == [] def test_run_configuration_retries_hard_error(): # Run action fails twice flexmock(module).should_receive('verbosity_to_log_level').and_return(logging.INFO) flexmock(module.borg_version).should_receive('local_borg_version').and_return(flexmock()) flexmock(module.command).should_receive('execute_hook') flexmock(module).should_receive('run_actions').and_raise(OSError).times(2) flexmock(module).should_receive('log_error_records').with_args( 'foo: Error running actions for repository', OSError, levelno=logging.WARNING, log_command_error_output=True, ).and_return([flexmock()]) error_logs = [flexmock()] flexmock(module).should_receive('log_error_records').with_args( 'foo: Error running actions for repository', OSError, ).and_return(error_logs) config = {'location': {'repositories': ['foo']}, 'storage': {'retries': 1}} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == error_logs def test_run_configuration_repos_ordered(): flexmock(module).should_receive('verbosity_to_log_level').and_return(logging.INFO) flexmock(module.borg_version).should_receive('local_borg_version').and_return(flexmock()) flexmock(module.command).should_receive('execute_hook') flexmock(module).should_receive('run_actions').and_raise(OSError).times(2) expected_results = [flexmock(), flexmock()] flexmock(module).should_receive('log_error_records').with_args( 'foo: Error running actions for repository', OSError ).and_return(expected_results[:1]).ordered() flexmock(module).should_receive('log_error_records').with_args( 'bar: Error running actions for repository', OSError ).and_return(expected_results[1:]).ordered() config = {'location': {'repositories': ['foo', 'bar']}} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == expected_results def test_run_configuration_retries_round_robbin(): flexmock(module).should_receive('verbosity_to_log_level').and_return(logging.INFO) flexmock(module.borg_version).should_receive('local_borg_version').and_return(flexmock()) flexmock(module.command).should_receive('execute_hook') flexmock(module).should_receive('run_actions').and_raise(OSError).times(4) flexmock(module).should_receive('log_error_records').with_args( 'foo: Error running actions for repository', OSError, levelno=logging.WARNING, log_command_error_output=True, ).and_return([flexmock()]).ordered() flexmock(module).should_receive('log_error_records').with_args( 'bar: Error running actions for repository', OSError, levelno=logging.WARNING, log_command_error_output=True, ).and_return([flexmock()]).ordered() foo_error_logs = [flexmock()] flexmock(module).should_receive('log_error_records').with_args( 'foo: Error running actions for repository', OSError ).and_return(foo_error_logs).ordered() bar_error_logs = [flexmock()] flexmock(module).should_receive('log_error_records').with_args( 'bar: Error running actions for repository', OSError ).and_return(bar_error_logs).ordered() config = {'location': {'repositories': ['foo', 'bar']}, 'storage': {'retries': 1}} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == foo_error_logs + bar_error_logs def test_run_configuration_retries_one_passes(): flexmock(module).should_receive('verbosity_to_log_level').and_return(logging.INFO) flexmock(module.borg_version).should_receive('local_borg_version').and_return(flexmock()) flexmock(module.command).should_receive('execute_hook') flexmock(module).should_receive('run_actions').and_raise(OSError).and_raise(OSError).and_return( [] ).and_raise(OSError).times(4) flexmock(module).should_receive('log_error_records').with_args( 'foo: Error running actions for repository', OSError, levelno=logging.WARNING, log_command_error_output=True, ).and_return([flexmock()]).ordered() flexmock(module).should_receive('log_error_records').with_args( 'bar: Error running actions for repository', OSError, levelno=logging.WARNING, log_command_error_output=True, ).and_return(flexmock()).ordered() error_logs = [flexmock()] flexmock(module).should_receive('log_error_records').with_args( 'bar: Error running actions for repository', OSError ).and_return(error_logs).ordered() config = {'location': {'repositories': ['foo', 'bar']}, 'storage': {'retries': 1}} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == error_logs def test_run_configuration_retry_wait(): flexmock(module).should_receive('verbosity_to_log_level').and_return(logging.INFO) flexmock(module.borg_version).should_receive('local_borg_version').and_return(flexmock()) flexmock(module.command).should_receive('execute_hook') flexmock(module).should_receive('run_actions').and_raise(OSError).times(4) flexmock(module).should_receive('log_error_records').with_args( 'foo: Error running actions for repository', OSError, levelno=logging.WARNING, log_command_error_output=True, ).and_return([flexmock()]).ordered() flexmock(time).should_receive('sleep').with_args(10).and_return().ordered() flexmock(module).should_receive('log_error_records').with_args( 'foo: Error running actions for repository', OSError, levelno=logging.WARNING, log_command_error_output=True, ).and_return([flexmock()]).ordered() flexmock(time).should_receive('sleep').with_args(20).and_return().ordered() flexmock(module).should_receive('log_error_records').with_args( 'foo: Error running actions for repository', OSError, levelno=logging.WARNING, log_command_error_output=True, ).and_return([flexmock()]).ordered() flexmock(time).should_receive('sleep').with_args(30).and_return().ordered() error_logs = [flexmock()] flexmock(module).should_receive('log_error_records').with_args( 'foo: Error running actions for repository', OSError ).and_return(error_logs).ordered() config = {'location': {'repositories': ['foo']}, 'storage': {'retries': 3, 'retry_wait': 10}} arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == error_logs def test_run_configuration_retries_timeout_multiple_repos(): flexmock(module).should_receive('verbosity_to_log_level').and_return(logging.INFO) flexmock(module.borg_version).should_receive('local_borg_version').and_return(flexmock()) flexmock(module.command).should_receive('execute_hook') flexmock(module).should_receive('run_actions').and_raise(OSError).and_raise(OSError).and_return( [] ).and_raise(OSError).times(4) flexmock(module).should_receive('log_error_records').with_args( 'foo: Error running actions for repository', OSError, levelno=logging.WARNING, log_command_error_output=True, ).and_return([flexmock()]).ordered() flexmock(module).should_receive('log_error_records').with_args( 'bar: Error running actions for repository', OSError, levelno=logging.WARNING, log_command_error_output=True, ).and_return([flexmock()]).ordered() # Sleep before retrying foo (and passing) flexmock(time).should_receive('sleep').with_args(10).and_return().ordered() # Sleep before retrying bar (and failing) flexmock(time).should_receive('sleep').with_args(10).and_return().ordered() error_logs = [flexmock()] flexmock(module).should_receive('log_error_records').with_args( 'bar: Error running actions for repository', OSError ).and_return(error_logs).ordered() config = { 'location': {'repositories': ['foo', 'bar']}, 'storage': {'retries': 1, 'retry_wait': 10}, } arguments = {'global': flexmock(monitoring_verbosity=1, dry_run=False), 'create': flexmock()} results = list(module.run_configuration('test.yaml', config, arguments)) assert results == error_logs def test_run_actions_runs_rcreate(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.command).should_receive('execute_hook') flexmock(borgmatic.actions.rcreate).should_receive('run_rcreate').once() tuple( module.run_actions( arguments={'global': flexmock(dry_run=False), 'rcreate': flexmock()}, config_filename=flexmock(), location={'repositories': []}, storage=flexmock(), retention=flexmock(), consistency=flexmock(), hooks={}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), repository_path='repo', ) ) def test_run_actions_runs_transfer(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.command).should_receive('execute_hook') flexmock(borgmatic.actions.transfer).should_receive('run_transfer').once() tuple( module.run_actions( arguments={'global': flexmock(dry_run=False), 'transfer': flexmock()}, config_filename=flexmock(), location={'repositories': []}, storage=flexmock(), retention=flexmock(), consistency=flexmock(), hooks={}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), repository_path='repo', ) ) def test_run_actions_runs_create(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.command).should_receive('execute_hook') expected = flexmock() flexmock(borgmatic.actions.create).should_receive('run_create').and_yield(expected).once() result = tuple( module.run_actions( arguments={'global': flexmock(dry_run=False), 'create': flexmock()}, config_filename=flexmock(), location={'repositories': []}, storage=flexmock(), retention=flexmock(), consistency=flexmock(), hooks={}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), repository_path='repo', ) ) assert result == (expected,) def test_run_actions_runs_prune(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.command).should_receive('execute_hook') flexmock(borgmatic.actions.prune).should_receive('run_prune').once() tuple( module.run_actions( arguments={'global': flexmock(dry_run=False), 'prune': flexmock()}, config_filename=flexmock(), location={'repositories': []}, storage=flexmock(), retention=flexmock(), consistency=flexmock(), hooks={}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), repository_path='repo', ) ) def test_run_actions_runs_compact(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.command).should_receive('execute_hook') flexmock(borgmatic.actions.compact).should_receive('run_compact').once() tuple( module.run_actions( arguments={'global': flexmock(dry_run=False), 'compact': flexmock()}, config_filename=flexmock(), location={'repositories': []}, storage=flexmock(), retention=flexmock(), consistency=flexmock(), hooks={}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), repository_path='repo', ) ) def test_run_actions_runs_check_when_repository_enabled_for_checks(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.command).should_receive('execute_hook') flexmock(module.checks).should_receive('repository_enabled_for_checks').and_return(True) flexmock(borgmatic.actions.check).should_receive('run_check').once() tuple( module.run_actions( arguments={'global': flexmock(dry_run=False), 'check': flexmock()}, config_filename=flexmock(), location={'repositories': []}, storage=flexmock(), retention=flexmock(), consistency=flexmock(), hooks={}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), repository_path='repo', ) ) def test_run_actions_skips_check_when_repository_not_enabled_for_checks(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.command).should_receive('execute_hook') flexmock(module.checks).should_receive('repository_enabled_for_checks').and_return(False) flexmock(borgmatic.actions.check).should_receive('run_check').never() tuple( module.run_actions( arguments={'global': flexmock(dry_run=False), 'check': flexmock()}, config_filename=flexmock(), location={'repositories': []}, storage=flexmock(), retention=flexmock(), consistency=flexmock(), hooks={}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), repository_path='repo', ) ) def test_run_actions_runs_extract(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.command).should_receive('execute_hook') flexmock(borgmatic.actions.extract).should_receive('run_extract').once() tuple( module.run_actions( arguments={'global': flexmock(dry_run=False), 'extract': flexmock()}, config_filename=flexmock(), location={'repositories': []}, storage=flexmock(), retention=flexmock(), consistency=flexmock(), hooks={}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), repository_path='repo', ) ) def test_run_actions_runs_export_tar(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.command).should_receive('execute_hook') flexmock(borgmatic.actions.export_tar).should_receive('run_export_tar').once() tuple( module.run_actions( arguments={'global': flexmock(dry_run=False), 'export-tar': flexmock()}, config_filename=flexmock(), location={'repositories': []}, storage=flexmock(), retention=flexmock(), consistency=flexmock(), hooks={}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), repository_path='repo', ) ) def test_run_actions_runs_mount(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.command).should_receive('execute_hook') flexmock(borgmatic.actions.mount).should_receive('run_mount').once() tuple( module.run_actions( arguments={'global': flexmock(dry_run=False), 'mount': flexmock()}, config_filename=flexmock(), location={'repositories': []}, storage=flexmock(), retention=flexmock(), consistency=flexmock(), hooks={}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), repository_path='repo', ) ) def test_run_actions_runs_restore(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.command).should_receive('execute_hook') flexmock(borgmatic.actions.restore).should_receive('run_restore').once() tuple( module.run_actions( arguments={'global': flexmock(dry_run=False), 'restore': flexmock()}, config_filename=flexmock(), location={'repositories': []}, storage=flexmock(), retention=flexmock(), consistency=flexmock(), hooks={}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), repository_path='repo', ) ) def test_run_actions_runs_rlist(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.command).should_receive('execute_hook') expected = flexmock() flexmock(borgmatic.actions.rlist).should_receive('run_rlist').and_yield(expected).once() result = tuple( module.run_actions( arguments={'global': flexmock(dry_run=False), 'rlist': flexmock()}, config_filename=flexmock(), location={'repositories': []}, storage=flexmock(), retention=flexmock(), consistency=flexmock(), hooks={}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), repository_path='repo', ) ) assert result == (expected,) def test_run_actions_runs_list(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.command).should_receive('execute_hook') expected = flexmock() flexmock(borgmatic.actions.list).should_receive('run_list').and_yield(expected).once() result = tuple( module.run_actions( arguments={'global': flexmock(dry_run=False), 'list': flexmock()}, config_filename=flexmock(), location={'repositories': []}, storage=flexmock(), retention=flexmock(), consistency=flexmock(), hooks={}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), repository_path='repo', ) ) assert result == (expected,) def test_run_actions_runs_rinfo(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.command).should_receive('execute_hook') expected = flexmock() flexmock(borgmatic.actions.rinfo).should_receive('run_rinfo').and_yield(expected).once() result = tuple( module.run_actions( arguments={'global': flexmock(dry_run=False), 'rinfo': flexmock()}, config_filename=flexmock(), location={'repositories': []}, storage=flexmock(), retention=flexmock(), consistency=flexmock(), hooks={}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), repository_path='repo', ) ) assert result == (expected,) def test_run_actions_runs_info(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.command).should_receive('execute_hook') expected = flexmock() flexmock(borgmatic.actions.info).should_receive('run_info').and_yield(expected).once() result = tuple( module.run_actions( arguments={'global': flexmock(dry_run=False), 'info': flexmock()}, config_filename=flexmock(), location={'repositories': []}, storage=flexmock(), retention=flexmock(), consistency=flexmock(), hooks={}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), repository_path='repo', ) ) assert result == (expected,) def test_run_actions_runs_break_lock(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.command).should_receive('execute_hook') flexmock(borgmatic.actions.break_lock).should_receive('run_break_lock').once() tuple( module.run_actions( arguments={'global': flexmock(dry_run=False), 'break-lock': flexmock()}, config_filename=flexmock(), location={'repositories': []}, storage=flexmock(), retention=flexmock(), consistency=flexmock(), hooks={}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), repository_path='repo', ) ) def test_run_actions_runs_borg(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.command).should_receive('execute_hook') flexmock(borgmatic.actions.borg).should_receive('run_borg').once() tuple( module.run_actions( arguments={'global': flexmock(dry_run=False), 'borg': flexmock()}, config_filename=flexmock(), location={'repositories': []}, storage=flexmock(), retention=flexmock(), consistency=flexmock(), hooks={}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), repository_path='repo', ) ) def test_run_actions_runs_multiple_actions_in_argument_order(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.command).should_receive('execute_hook') flexmock(borgmatic.actions.borg).should_receive('run_borg').once().ordered() flexmock(borgmatic.actions.restore).should_receive('run_restore').once().ordered() tuple( module.run_actions( arguments={ 'global': flexmock(dry_run=False), 'borg': flexmock(), 'restore': flexmock(), }, config_filename=flexmock(), location={'repositories': []}, storage=flexmock(), retention=flexmock(), consistency=flexmock(), hooks={}, local_path=flexmock(), remote_path=flexmock(), local_borg_version=flexmock(), repository_path='repo', ) ) def test_load_configurations_collects_parsed_configurations_and_logs(): configuration = flexmock() other_configuration = flexmock() test_expected_logs = [flexmock(), flexmock()] other_expected_logs = [flexmock(), flexmock()] flexmock(module.validate).should_receive('parse_configuration').and_return( configuration, test_expected_logs ).and_return(other_configuration, other_expected_logs) configs, logs = tuple(module.load_configurations(('test.yaml', 'other.yaml'))) assert configs == {'test.yaml': configuration, 'other.yaml': other_configuration} assert logs == test_expected_logs + other_expected_logs def test_load_configurations_logs_warning_for_permission_error(): flexmock(module.validate).should_receive('parse_configuration').and_raise(PermissionError) configs, logs = tuple(module.load_configurations(('test.yaml',))) assert configs == {} assert {log.levelno for log in logs} == {logging.WARNING} def test_load_configurations_logs_critical_for_parse_error(): flexmock(module.validate).should_receive('parse_configuration').and_raise(ValueError) configs, logs = tuple(module.load_configurations(('test.yaml',))) assert configs == {} assert {log.levelno for log in logs} == {logging.CRITICAL} def test_log_record_does_not_raise(): module.log_record(levelno=1, foo='bar', baz='quux') def test_log_record_with_suppress_does_not_raise(): module.log_record(levelno=1, foo='bar', baz='quux', suppress_log=True) def test_log_error_records_generates_output_logs_for_message_only(): flexmock(module).should_receive('log_record').replace_with(dict) logs = tuple(module.log_error_records('Error')) assert {log['levelno'] for log in logs} == {logging.CRITICAL} def test_log_error_records_generates_output_logs_for_called_process_error(): flexmock(module).should_receive('log_record').replace_with(dict) flexmock(module.logger).should_receive('getEffectiveLevel').and_return(logging.WARNING) logs = tuple( module.log_error_records('Error', subprocess.CalledProcessError(1, 'ls', 'error output')) ) assert {log['levelno'] for log in logs} == {logging.CRITICAL} assert any(log for log in logs if 'error output' in str(log)) def test_log_error_records_generates_logs_for_value_error(): flexmock(module).should_receive('log_record').replace_with(dict) logs = tuple(module.log_error_records('Error', ValueError())) assert {log['levelno'] for log in logs} == {logging.CRITICAL} def test_log_error_records_generates_logs_for_os_error(): flexmock(module).should_receive('log_record').replace_with(dict) logs = tuple(module.log_error_records('Error', OSError())) assert {log['levelno'] for log in logs} == {logging.CRITICAL} def test_log_error_records_generates_nothing_for_other_error(): flexmock(module).should_receive('log_record').replace_with(dict) logs = tuple(module.log_error_records('Error', KeyError())) assert logs == () def test_get_local_path_uses_configuration_value(): assert module.get_local_path({'test.yaml': {'location': {'local_path': 'borg1'}}}) == 'borg1' def test_get_local_path_without_location_defaults_to_borg(): assert module.get_local_path({'test.yaml': {}}) == 'borg' def test_get_local_path_without_local_path_defaults_to_borg(): assert module.get_local_path({'test.yaml': {'location': {}}}) == 'borg' def test_collect_configuration_run_summary_logs_info_for_success(): flexmock(module.command).should_receive('execute_hook').never() flexmock(module.validate).should_receive('guard_configuration_contains_repository') flexmock(module).should_receive('run_configuration').and_return([]) arguments = {} logs = tuple( module.collect_configuration_run_summary_logs({'test.yaml': {}}, arguments=arguments) ) assert {log.levelno for log in logs} == {logging.INFO} def test_collect_configuration_run_summary_executes_hooks_for_create(): flexmock(module.validate).should_receive('guard_configuration_contains_repository') flexmock(module).should_receive('run_configuration').and_return([]) arguments = {'create': flexmock(), 'global': flexmock(monitoring_verbosity=1, dry_run=False)} logs = tuple( module.collect_configuration_run_summary_logs({'test.yaml': {}}, arguments=arguments) ) assert {log.levelno for log in logs} == {logging.INFO} def test_collect_configuration_run_summary_logs_info_for_success_with_extract(): flexmock(module.validate).should_receive('guard_single_repository_selected') flexmock(module.validate).should_receive('guard_configuration_contains_repository') flexmock(module).should_receive('run_configuration').and_return([]) arguments = {'extract': flexmock(repository='repo')} logs = tuple( module.collect_configuration_run_summary_logs({'test.yaml': {}}, arguments=arguments) ) assert {log.levelno for log in logs} == {logging.INFO} def test_collect_configuration_run_summary_logs_extract_with_repository_error(): flexmock(module.validate).should_receive('guard_configuration_contains_repository').and_raise( ValueError ) expected_logs = (flexmock(),) flexmock(module).should_receive('log_error_records').and_return(expected_logs) arguments = {'extract': flexmock(repository='repo')} logs = tuple( module.collect_configuration_run_summary_logs({'test.yaml': {}}, arguments=arguments) ) assert logs == expected_logs def test_collect_configuration_run_summary_logs_info_for_success_with_mount(): flexmock(module.validate).should_receive('guard_single_repository_selected') flexmock(module.validate).should_receive('guard_configuration_contains_repository') flexmock(module).should_receive('run_configuration').and_return([]) arguments = {'mount': flexmock(repository='repo')} logs = tuple( module.collect_configuration_run_summary_logs({'test.yaml': {}}, arguments=arguments) ) assert {log.levelno for log in logs} == {logging.INFO} def test_collect_configuration_run_summary_logs_mount_with_repository_error(): flexmock(module.validate).should_receive('guard_configuration_contains_repository').and_raise( ValueError ) expected_logs = (flexmock(),) flexmock(module).should_receive('log_error_records').and_return(expected_logs) arguments = {'mount': flexmock(repository='repo')} logs = tuple( module.collect_configuration_run_summary_logs({'test.yaml': {}}, arguments=arguments) ) assert logs == expected_logs def test_collect_configuration_run_summary_logs_missing_configs_error(): arguments = {'global': flexmock(config_paths=[])} expected_logs = (flexmock(),) flexmock(module).should_receive('log_error_records').and_return(expected_logs) logs = tuple(module.collect_configuration_run_summary_logs({}, arguments=arguments)) assert logs == expected_logs def test_collect_configuration_run_summary_logs_pre_hook_error(): flexmock(module.command).should_receive('execute_hook').and_raise(ValueError) expected_logs = (flexmock(),) flexmock(module).should_receive('log_error_records').and_return(expected_logs) arguments = {'create': flexmock(), 'global': flexmock(monitoring_verbosity=1, dry_run=False)} logs = tuple( module.collect_configuration_run_summary_logs({'test.yaml': {}}, arguments=arguments) ) assert logs == expected_logs def test_collect_configuration_run_summary_logs_post_hook_error(): flexmock(module.command).should_receive('execute_hook').and_return(None).and_raise(ValueError) flexmock(module.validate).should_receive('guard_configuration_contains_repository') flexmock(module).should_receive('run_configuration').and_return([]) expected_logs = (flexmock(),) flexmock(module).should_receive('log_error_records').and_return(expected_logs) arguments = {'create': flexmock(), 'global': flexmock(monitoring_verbosity=1, dry_run=False)} logs = tuple( module.collect_configuration_run_summary_logs({'test.yaml': {}}, arguments=arguments) ) assert expected_logs[0] in logs def test_collect_configuration_run_summary_logs_for_list_with_archive_and_repository_error(): flexmock(module.validate).should_receive('guard_configuration_contains_repository').and_raise( ValueError ) expected_logs = (flexmock(),) flexmock(module).should_receive('log_error_records').and_return(expected_logs) arguments = {'list': flexmock(repository='repo', archive='test')} logs = tuple( module.collect_configuration_run_summary_logs({'test.yaml': {}}, arguments=arguments) ) assert logs == expected_logs def test_collect_configuration_run_summary_logs_info_for_success_with_list(): flexmock(module.validate).should_receive('guard_configuration_contains_repository') flexmock(module).should_receive('run_configuration').and_return([]) arguments = {'list': flexmock(repository='repo', archive=None)} logs = tuple( module.collect_configuration_run_summary_logs({'test.yaml': {}}, arguments=arguments) ) assert {log.levelno for log in logs} == {logging.INFO} def test_collect_configuration_run_summary_logs_run_configuration_error(): flexmock(module.validate).should_receive('guard_configuration_contains_repository') flexmock(module).should_receive('run_configuration').and_return( [logging.makeLogRecord(dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg='Error'))] ) flexmock(module).should_receive('log_error_records').and_return([]) arguments = {} logs = tuple( module.collect_configuration_run_summary_logs({'test.yaml': {}}, arguments=arguments) ) assert {log.levelno for log in logs} == {logging.CRITICAL} def test_collect_configuration_run_summary_logs_run_umount_error(): flexmock(module.validate).should_receive('guard_configuration_contains_repository') flexmock(module).should_receive('run_configuration').and_return([]) flexmock(module.borg_umount).should_receive('unmount_archive').and_raise(OSError) flexmock(module).should_receive('log_error_records').and_return( [logging.makeLogRecord(dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg='Error'))] ) arguments = {'umount': flexmock(mount_point='/mnt')} logs = tuple( module.collect_configuration_run_summary_logs({'test.yaml': {}}, arguments=arguments) ) assert {log.levelno for log in logs} == {logging.INFO, logging.CRITICAL} def test_collect_configuration_run_summary_logs_outputs_merged_json_results(): flexmock(module.validate).should_receive('guard_configuration_contains_repository') flexmock(module).should_receive('run_configuration').and_return(['foo', 'bar']).and_return( ['baz'] ) stdout = flexmock() stdout.should_receive('write').with_args('["foo", "bar", "baz"]').once() flexmock(module.sys).stdout = stdout arguments = {} tuple( module.collect_configuration_run_summary_logs( {'test.yaml': {}, 'test2.yaml': {}}, arguments=arguments ) ) borgmatic-1.7.9/tests/unit/config/000077500000000000000000000000001440467744700171065ustar00rootroot00000000000000borgmatic-1.7.9/tests/unit/config/__init__.py000066400000000000000000000000001440467744700212050ustar00rootroot00000000000000borgmatic-1.7.9/tests/unit/config/test_checks.py000066400000000000000000000013051440467744700217560ustar00rootroot00000000000000from borgmatic.config import checks as module def test_repository_enabled_for_checks_defaults_to_enabled_for_all_repositories(): enabled = module.repository_enabled_for_checks('repo.borg', consistency={}) assert enabled def test_repository_enabled_for_checks_is_enabled_for_specified_repositories(): enabled = module.repository_enabled_for_checks( 'repo.borg', consistency={'check_repositories': ['repo.borg', 'other.borg']} ) assert enabled def test_repository_enabled_for_checks_is_disabled_for_other_repositories(): enabled = module.repository_enabled_for_checks( 'repo.borg', consistency={'check_repositories': ['other.borg']} ) assert not enabled borgmatic-1.7.9/tests/unit/config/test_collect.py000066400000000000000000000166301440467744700221520ustar00rootroot00000000000000import sys from flexmock import flexmock from borgmatic.config import collect as module def test_get_default_config_paths_includes_absolute_user_config_path(): flexmock(module.os, environ={'XDG_CONFIG_HOME': None, 'HOME': '/home/user'}) config_paths = module.get_default_config_paths() assert '/home/user/.config/borgmatic/config.yaml' in config_paths def test_get_default_config_paths_prefers_xdg_config_home_for_user_config_path(): flexmock(module.os, environ={'XDG_CONFIG_HOME': '/home/user/.etc', 'HOME': '/home/user'}) config_paths = module.get_default_config_paths() assert '/home/user/.etc/borgmatic/config.yaml' in config_paths def test_get_default_config_paths_does_not_expand_home_when_false(): flexmock(module.os, environ={'HOME': '/home/user'}) config_paths = module.get_default_config_paths(expand_home=False) assert '$HOME/.config/borgmatic/config.yaml' in config_paths def test_collect_config_filenames_collects_given_files(): config_paths = ('config.yaml', 'other.yaml') flexmock(module.os.path).should_receive('isdir').and_return(False) config_filenames = tuple(module.collect_config_filenames(config_paths)) assert config_filenames == config_paths def test_collect_config_filenames_collects_yml_file_endings(): config_paths = ('config.yaml', '/etc/borgmatic.d') mock_path = flexmock(module.os.path) mock_path.should_receive('exists').and_return(True) mock_path.should_receive('isdir').with_args('config.yaml').and_return(False) mock_path.should_receive('isdir').with_args('/etc/borgmatic.d').and_return(True) mock_path.should_receive('isdir').with_args('/etc/borgmatic.d/foo.yml').and_return(False) flexmock(module.os).should_receive('access').and_return(True) flexmock(module.os).should_receive('listdir') flexmock(sys.modules['builtins']).should_receive('sorted').and_return(['foo.yml']) config_filenames = tuple(module.collect_config_filenames(config_paths)) assert config_filenames == ('config.yaml', '/etc/borgmatic.d/foo.yml') def test_collect_config_filenames_collects_files_from_given_directories_and_ignores_sub_directories(): config_paths = ('config.yaml', '/etc/borgmatic.d') mock_path = flexmock(module.os.path) mock_path.should_receive('exists').and_return(True) mock_path.should_receive('isdir').with_args('config.yaml').and_return(False) mock_path.should_receive('isdir').with_args('/etc/borgmatic.d').and_return(True) mock_path.should_receive('isdir').with_args('/etc/borgmatic.d/foo.yaml').and_return(False) mock_path.should_receive('isdir').with_args('/etc/borgmatic.d/bar').and_return(True) mock_path.should_receive('isdir').with_args('/etc/borgmatic.d/baz.yaml').and_return(False) flexmock(module.os).should_receive('access').and_return(True) flexmock(module.os).should_receive('listdir') flexmock(sys.modules['builtins']).should_receive('sorted').and_return( ['foo.yaml', 'bar', 'baz.yaml'] ) config_filenames = tuple(module.collect_config_filenames(config_paths)) assert config_filenames == ( 'config.yaml', '/etc/borgmatic.d/foo.yaml', '/etc/borgmatic.d/baz.yaml', ) def test_collect_config_filenames_collects_files_from_given_directories_and_ignores_non_yaml_filenames(): config_paths = ('/etc/borgmatic.d',) mock_path = flexmock(module.os.path) mock_path.should_receive('exists').and_return(True) mock_path.should_receive('isdir').with_args('/etc/borgmatic.d').and_return(True) mock_path.should_receive('isdir').with_args('/etc/borgmatic.d/foo.yaml').and_return(False) mock_path.should_receive('isdir').with_args('/etc/borgmatic.d/bar.yaml~').and_return(False) mock_path.should_receive('isdir').with_args('/etc/borgmatic.d/baz.txt').and_return(False) flexmock(module.os).should_receive('access').and_return(True) flexmock(module.os).should_receive('listdir') flexmock(sys.modules['builtins']).should_receive('sorted').and_return( ['foo.yaml', 'bar.yaml~', 'baz.txt'] ) config_filenames = tuple(module.collect_config_filenames(config_paths)) assert config_filenames == ('/etc/borgmatic.d/foo.yaml',) def test_collect_config_filenames_skips_permission_denied_directories(): config_paths = ('config.yaml', '/etc/borgmatic.d') mock_path = flexmock(module.os.path) mock_path.should_receive('exists').and_return(True) mock_path.should_receive('isdir').with_args('config.yaml').and_return(False) mock_path.should_receive('isdir').with_args('/etc/borgmatic.d').and_return(True) flexmock(module.os).should_receive('access').and_return(False) flexmock(module.os).should_receive('listdir') flexmock(sys.modules['builtins']).should_receive('sorted').and_return(['config.yaml']) config_filenames = tuple(module.collect_config_filenames(config_paths)) assert config_filenames == ('config.yaml',) def test_collect_config_filenames_skips_etc_borgmatic_config_dot_yaml_if_it_does_not_exist(): config_paths = ('config.yaml', '/etc/borgmatic/config.yaml') mock_path = flexmock(module.os.path) mock_path.should_receive('exists').with_args('config.yaml').and_return(True) mock_path.should_receive('exists').with_args('/etc/borgmatic/config.yaml').and_return(False) mock_path.should_receive('isdir').with_args('config.yaml').and_return(False) mock_path.should_receive('isdir').with_args('/etc/borgmatic/config.yaml').and_return(True) config_filenames = tuple(module.collect_config_filenames(config_paths)) assert config_filenames == ('config.yaml',) def test_collect_config_filenames_skips_etc_borgmatic_dot_d_if_it_does_not_exist(): config_paths = ('config.yaml', '/etc/borgmatic.d') mock_path = flexmock(module.os.path) mock_path.should_receive('exists').with_args('config.yaml').and_return(True) mock_path.should_receive('exists').with_args('/etc/borgmatic.d').and_return(False) mock_path.should_receive('isdir').with_args('config.yaml').and_return(False) mock_path.should_receive('isdir').with_args('/etc/borgmatic.d').and_return(True) config_filenames = tuple(module.collect_config_filenames(config_paths)) assert config_filenames == ('config.yaml',) def test_collect_config_filenames_skips_non_canonical_etc_borgmatic_dot_d_if_it_does_not_exist(): config_paths = ('config.yaml', '/etc/../etc/borgmatic.d') mock_path = flexmock(module.os.path) mock_path.should_receive('exists').with_args('config.yaml').and_return(True) mock_path.should_receive('exists').with_args('/etc/../etc/borgmatic.d').and_return(False) mock_path.should_receive('isdir').with_args('config.yaml').and_return(False) mock_path.should_receive('isdir').with_args('/etc/../etc/borgmatic.d').and_return(True) config_filenames = tuple(module.collect_config_filenames(config_paths)) assert config_filenames == ('config.yaml',) def test_collect_config_filenames_includes_other_directory_if_it_does_not_exist(): config_paths = ('config.yaml', '/my/directory') mock_path = flexmock(module.os.path) mock_path.should_receive('exists').with_args('config.yaml').and_return(True) mock_path.should_receive('exists').with_args('/my/directory').and_return(False) mock_path.should_receive('isdir').with_args('config.yaml').and_return(False) mock_path.should_receive('isdir').with_args('/my/directory').and_return(True) config_filenames = tuple(module.collect_config_filenames(config_paths)) assert config_filenames == config_paths borgmatic-1.7.9/tests/unit/config/test_convert.py000066400000000000000000000124041440467744700222000ustar00rootroot00000000000000import os from collections import OrderedDict, defaultdict, namedtuple import pytest from flexmock import flexmock from borgmatic.config import convert as module Parsed_config = namedtuple('Parsed_config', ('location', 'storage', 'retention', 'consistency')) def test_convert_section_generates_integer_value_for_integer_type_in_schema(): flexmock(module.yaml.comments).should_receive('CommentedMap').replace_with(OrderedDict) source_section_config = OrderedDict([('check_last', '3')]) section_schema = {'type': 'object', 'properties': {'check_last': {'type': 'integer'}}} destination_config = module._convert_section(source_section_config, section_schema) assert destination_config == OrderedDict([('check_last', 3)]) def test_convert_legacy_parsed_config_transforms_source_config_to_mapping(): flexmock(module.yaml.comments).should_receive('CommentedMap').replace_with(OrderedDict) flexmock(module.generate).should_receive('add_comments_to_configuration_object') source_config = Parsed_config( location=OrderedDict([('source_directories', '/home'), ('repository', 'hostname.borg')]), storage=OrderedDict([('encryption_passphrase', 'supersecret')]), retention=OrderedDict([('keep_daily', 7)]), consistency=OrderedDict([('checks', 'repository')]), ) source_excludes = ['/var'] schema = { 'type': 'object', 'properties': defaultdict(lambda: {'type': 'object', 'properties': {}}), } destination_config = module.convert_legacy_parsed_config(source_config, source_excludes, schema) assert destination_config == OrderedDict( [ ( 'location', OrderedDict( [ ('source_directories', ['/home']), ('repositories', ['hostname.borg']), ('exclude_patterns', ['/var']), ] ), ), ('storage', OrderedDict([('encryption_passphrase', 'supersecret')])), ('retention', OrderedDict([('keep_daily', 7)])), ('consistency', OrderedDict([('checks', ['repository'])])), ] ) def test_convert_legacy_parsed_config_splits_space_separated_values(): flexmock(module.yaml.comments).should_receive('CommentedMap').replace_with(OrderedDict) flexmock(module.generate).should_receive('add_comments_to_configuration_object') source_config = Parsed_config( location=OrderedDict( [('source_directories', '/home /etc'), ('repository', 'hostname.borg')] ), storage=OrderedDict(), retention=OrderedDict(), consistency=OrderedDict([('checks', 'repository archives')]), ) source_excludes = ['/var'] schema = { 'type': 'object', 'properties': defaultdict(lambda: {'type': 'object', 'properties': {}}), } destination_config = module.convert_legacy_parsed_config(source_config, source_excludes, schema) assert destination_config == OrderedDict( [ ( 'location', OrderedDict( [ ('source_directories', ['/home', '/etc']), ('repositories', ['hostname.borg']), ('exclude_patterns', ['/var']), ] ), ), ('storage', OrderedDict()), ('retention', OrderedDict()), ('consistency', OrderedDict([('checks', ['repository', 'archives'])])), ] ) def test_guard_configuration_upgraded_raises_when_only_source_config_present(): flexmock(os.path).should_receive('exists').with_args('config').and_return(True) flexmock(os.path).should_receive('exists').with_args('config.yaml').and_return(False) flexmock(os.path).should_receive('exists').with_args('other.yaml').and_return(False) with pytest.raises(module.Legacy_configuration_not_upgraded): module.guard_configuration_upgraded('config', ('config.yaml', 'other.yaml')) def test_guard_configuration_upgraded_does_not_raise_when_only_destination_config_present(): flexmock(os.path).should_receive('exists').with_args('config').and_return(False) flexmock(os.path).should_receive('exists').with_args('config.yaml').and_return(False) flexmock(os.path).should_receive('exists').with_args('other.yaml').and_return(True) module.guard_configuration_upgraded('config', ('config.yaml', 'other.yaml')) def test_guard_configuration_upgraded_does_not_raise_when_both_configs_present(): flexmock(os.path).should_receive('exists').with_args('config').and_return(True) flexmock(os.path).should_receive('exists').with_args('config.yaml').and_return(False) flexmock(os.path).should_receive('exists').with_args('other.yaml').and_return(True) module.guard_configuration_upgraded('config', ('config.yaml', 'other.yaml')) def test_guard_configuration_upgraded_does_not_raise_when_neither_config_present(): flexmock(os.path).should_receive('exists').with_args('config').and_return(False) flexmock(os.path).should_receive('exists').with_args('config.yaml').and_return(False) flexmock(os.path).should_receive('exists').with_args('other.yaml').and_return(False) module.guard_configuration_upgraded('config', ('config.yaml', 'other.yaml')) borgmatic-1.7.9/tests/unit/config/test_environment.py000066400000000000000000000055001440467744700230630ustar00rootroot00000000000000import pytest from borgmatic.config import environment as module def test_env(monkeypatch): monkeypatch.setenv('MY_CUSTOM_VALUE', 'foo') config = {'key': 'Hello $MY_CUSTOM_VALUE'} module.resolve_env_variables(config) assert config == {'key': 'Hello $MY_CUSTOM_VALUE'} def test_env_braces(monkeypatch): monkeypatch.setenv('MY_CUSTOM_VALUE', 'foo') config = {'key': 'Hello ${MY_CUSTOM_VALUE}'} module.resolve_env_variables(config) assert config == {'key': 'Hello foo'} def test_env_multi(monkeypatch): monkeypatch.setenv('MY_CUSTOM_VALUE', 'foo') monkeypatch.setenv('MY_CUSTOM_VALUE2', 'bar') config = {'key': 'Hello ${MY_CUSTOM_VALUE}${MY_CUSTOM_VALUE2}'} module.resolve_env_variables(config) assert config == {'key': 'Hello foobar'} def test_env_escape(monkeypatch): monkeypatch.setenv('MY_CUSTOM_VALUE', 'foo') monkeypatch.setenv('MY_CUSTOM_VALUE2', 'bar') config = {'key': r'Hello ${MY_CUSTOM_VALUE} \${MY_CUSTOM_VALUE}'} module.resolve_env_variables(config) assert config == {'key': r'Hello foo ${MY_CUSTOM_VALUE}'} def test_env_default_value(monkeypatch): monkeypatch.delenv('MY_CUSTOM_VALUE', raising=False) config = {'key': 'Hello ${MY_CUSTOM_VALUE:-bar}'} module.resolve_env_variables(config) assert config == {'key': 'Hello bar'} def test_env_unknown(monkeypatch): monkeypatch.delenv('MY_CUSTOM_VALUE', raising=False) config = {'key': 'Hello ${MY_CUSTOM_VALUE}'} with pytest.raises(ValueError): module.resolve_env_variables(config) def test_env_full(monkeypatch): monkeypatch.setenv('MY_CUSTOM_VALUE', 'foo') monkeypatch.delenv('MY_CUSTOM_VALUE2', raising=False) config = { 'key': 'Hello $MY_CUSTOM_VALUE is not resolved', 'dict': { 'key': 'value', 'anotherdict': { 'key': 'My ${MY_CUSTOM_VALUE} here', 'other': '${MY_CUSTOM_VALUE}', 'escaped': r'\${MY_CUSTOM_VALUE}', 'list': [ '/home/${MY_CUSTOM_VALUE}/.local', '/var/log/', '/home/${MY_CUSTOM_VALUE2:-bar}/.config', ], }, }, 'list': [ '/home/${MY_CUSTOM_VALUE}/.local', '/var/log/', '/home/${MY_CUSTOM_VALUE2-bar}/.config', ], } module.resolve_env_variables(config) assert config == { 'key': 'Hello $MY_CUSTOM_VALUE is not resolved', 'dict': { 'key': 'value', 'anotherdict': { 'key': 'My foo here', 'other': 'foo', 'escaped': '${MY_CUSTOM_VALUE}', 'list': ['/home/foo/.local', '/var/log/', '/home/bar/.config'], }, }, 'list': ['/home/foo/.local', '/var/log/', '/home/bar/.config'], } borgmatic-1.7.9/tests/unit/config/test_generate.py000066400000000000000000000126551440467744700223220ustar00rootroot00000000000000from collections import OrderedDict import pytest from flexmock import flexmock from borgmatic.config import generate as module def test_schema_to_sample_configuration_generates_config_map_with_examples(): flexmock(module.yaml.comments).should_receive('CommentedMap').replace_with(OrderedDict) flexmock(module).should_receive('add_comments_to_configuration_object') schema = { 'type': 'object', 'properties': OrderedDict( [ ( 'section1', { 'type': 'object', 'properties': {'field1': OrderedDict([('example', 'Example 1')])}, }, ), ( 'section2', { 'type': 'object', 'properties': OrderedDict( [ ('field2', {'example': 'Example 2'}), ('field3', {'example': 'Example 3'}), ] ), }, ), ] ), } config = module._schema_to_sample_configuration(schema) assert config == OrderedDict( [ ('section1', OrderedDict([('field1', 'Example 1')])), ('section2', OrderedDict([('field2', 'Example 2'), ('field3', 'Example 3')])), ] ) def test_schema_to_sample_configuration_generates_config_sequence_of_strings_with_example(): flexmock(module.yaml.comments).should_receive('CommentedSeq').replace_with(list) flexmock(module).should_receive('add_comments_to_configuration_sequence') schema = {'type': 'array', 'items': {'type': 'string'}, 'example': ['hi']} config = module._schema_to_sample_configuration(schema) assert config == ['hi'] def test_schema_to_sample_configuration_generates_config_sequence_of_maps_with_examples(): flexmock(module.yaml.comments).should_receive('CommentedSeq').replace_with(list) flexmock(module).should_receive('add_comments_to_configuration_sequence') flexmock(module).should_receive('add_comments_to_configuration_object') schema = { 'type': 'array', 'items': { 'type': 'object', 'properties': OrderedDict( [('field1', {'example': 'Example 1'}), ('field2', {'example': 'Example 2'})] ), }, } config = module._schema_to_sample_configuration(schema) assert config == [OrderedDict([('field1', 'Example 1'), ('field2', 'Example 2')])] def test_schema_to_sample_configuration_with_unsupported_schema_raises(): schema = {'gobbledygook': [{'type': 'not-your'}]} with pytest.raises(ValueError): module._schema_to_sample_configuration(schema) def test_merge_source_configuration_into_destination_inserts_map_fields(): destination_config = {'foo': 'dest1', 'bar': 'dest2'} source_config = {'foo': 'source1', 'baz': 'source2'} flexmock(module).should_receive('remove_commented_out_sentinel') flexmock(module).should_receive('yaml.comments.CommentedSeq').replace_with(list) module.merge_source_configuration_into_destination(destination_config, source_config) assert destination_config == {'foo': 'source1', 'bar': 'dest2', 'baz': 'source2'} def test_merge_source_configuration_into_destination_inserts_nested_map_fields(): destination_config = {'foo': {'first': 'dest1', 'second': 'dest2'}, 'bar': 'dest3'} source_config = {'foo': {'first': 'source1'}} flexmock(module).should_receive('remove_commented_out_sentinel') flexmock(module).should_receive('yaml.comments.CommentedSeq').replace_with(list) module.merge_source_configuration_into_destination(destination_config, source_config) assert destination_config == {'foo': {'first': 'source1', 'second': 'dest2'}, 'bar': 'dest3'} def test_merge_source_configuration_into_destination_inserts_sequence_fields(): destination_config = {'foo': ['dest1', 'dest2'], 'bar': ['dest3'], 'baz': ['dest4']} source_config = {'foo': ['source1'], 'bar': ['source2', 'source3']} flexmock(module).should_receive('remove_commented_out_sentinel') flexmock(module).should_receive('yaml.comments.CommentedSeq').replace_with(list) module.merge_source_configuration_into_destination(destination_config, source_config) assert destination_config == { 'foo': ['source1'], 'bar': ['source2', 'source3'], 'baz': ['dest4'], } def test_merge_source_configuration_into_destination_inserts_sequence_of_maps(): destination_config = {'foo': [{'first': 'dest1', 'second': 'dest2'}], 'bar': 'dest3'} source_config = {'foo': [{'first': 'source1'}, {'other': 'source2'}]} flexmock(module).should_receive('remove_commented_out_sentinel') flexmock(module).should_receive('yaml.comments.CommentedSeq').replace_with(list) module.merge_source_configuration_into_destination(destination_config, source_config) assert destination_config == { 'foo': [{'first': 'source1', 'second': 'dest2'}, {'other': 'source2'}], 'bar': 'dest3', } def test_merge_source_configuration_into_destination_without_source_does_nothing(): original_destination_config = {'foo': 'dest1', 'bar': 'dest2'} destination_config = dict(original_destination_config) module.merge_source_configuration_into_destination(destination_config, None) assert destination_config == original_destination_config borgmatic-1.7.9/tests/unit/config/test_legacy.py000066400000000000000000000164561440467744700217770ustar00rootroot00000000000000from collections import OrderedDict import pytest from flexmock import flexmock from borgmatic.config import legacy as module def test_option_should_create_config_option(): option = module.option('name', bool, required=False) assert option == module.Config_option('name', bool, False) def test_option_should_create_config_option_with_defaults(): option = module.option('name') assert option == module.Config_option('name', str, True) def test_validate_configuration_format_with_valid_config_should_not_raise(): parser = flexmock() parser.should_receive('sections').and_return(('section', 'other')) parser.should_receive('options').with_args('section').and_return(('stuff',)) parser.should_receive('options').with_args('other').and_return(('such',)) config_format = ( module.Section_format( 'section', options=(module.Config_option('stuff', str, required=True),) ), module.Section_format('other', options=(module.Config_option('such', str, required=True),)), ) module.validate_configuration_format(parser, config_format) def test_validate_configuration_format_with_missing_required_section_should_raise(): parser = flexmock() parser.should_receive('sections').and_return(('section',)) config_format = ( module.Section_format( 'section', options=(module.Config_option('stuff', str, required=True),) ), # At least one option in this section is required, so the section is required. module.Section_format( 'missing', options=( module.Config_option('such', str, required=False), module.Config_option('things', str, required=True), ), ), ) with pytest.raises(ValueError): module.validate_configuration_format(parser, config_format) def test_validate_configuration_format_with_missing_optional_section_should_not_raise(): parser = flexmock() parser.should_receive('sections').and_return(('section',)) parser.should_receive('options').with_args('section').and_return(('stuff',)) config_format = ( module.Section_format( 'section', options=(module.Config_option('stuff', str, required=True),) ), # No options in the section are required, so the section is optional. module.Section_format( 'missing', options=( module.Config_option('such', str, required=False), module.Config_option('things', str, required=False), ), ), ) module.validate_configuration_format(parser, config_format) def test_validate_configuration_format_with_unknown_section_should_raise(): parser = flexmock() parser.should_receive('sections').and_return(('section', 'extra')) config_format = (module.Section_format('section', options=()),) with pytest.raises(ValueError): module.validate_configuration_format(parser, config_format) def test_validate_configuration_format_with_missing_required_option_should_raise(): parser = flexmock() parser.should_receive('sections').and_return(('section',)) parser.should_receive('options').with_args('section').and_return(('option',)) config_format = ( module.Section_format( 'section', options=( module.Config_option('option', str, required=True), module.Config_option('missing', str, required=True), ), ), ) with pytest.raises(ValueError): module.validate_configuration_format(parser, config_format) def test_validate_configuration_format_with_missing_optional_option_should_not_raise(): parser = flexmock() parser.should_receive('sections').and_return(('section',)) parser.should_receive('options').with_args('section').and_return(('option',)) config_format = ( module.Section_format( 'section', options=( module.Config_option('option', str, required=True), module.Config_option('missing', str, required=False), ), ), ) module.validate_configuration_format(parser, config_format) def test_validate_configuration_format_with_extra_option_should_raise(): parser = flexmock() parser.should_receive('sections').and_return(('section',)) parser.should_receive('options').with_args('section').and_return(('option', 'extra')) config_format = ( module.Section_format( 'section', options=(module.Config_option('option', str, required=True),) ), ) with pytest.raises(ValueError): module.validate_configuration_format(parser, config_format) def test_parse_section_options_should_return_section_options(): parser = flexmock() parser.should_receive('get').with_args('section', 'foo').and_return('value') parser.should_receive('getint').with_args('section', 'bar').and_return(1) parser.should_receive('getboolean').never() parser.should_receive('has_option').with_args('section', 'foo').and_return(True) parser.should_receive('has_option').with_args('section', 'bar').and_return(True) section_format = module.Section_format( 'section', ( module.Config_option('foo', str, required=True), module.Config_option('bar', int, required=True), ), ) config = module.parse_section_options(parser, section_format) assert config == OrderedDict((('foo', 'value'), ('bar', 1))) def test_parse_section_options_for_missing_section_should_return_empty_dict(): parser = flexmock() parser.should_receive('get').never() parser.should_receive('getint').never() parser.should_receive('getboolean').never() parser.should_receive('has_option').with_args('section', 'foo').and_return(False) parser.should_receive('has_option').with_args('section', 'bar').and_return(False) section_format = module.Section_format( 'section', ( module.Config_option('foo', str, required=False), module.Config_option('bar', int, required=False), ), ) config = module.parse_section_options(parser, section_format) assert config == OrderedDict() def insert_mock_parser(): parser = flexmock() parser.should_receive('read').and_return([flexmock()]) module.RawConfigParser = lambda: parser return parser def test_parse_configuration_should_return_section_configs(): parser = insert_mock_parser() config_format = (flexmock(name='items'), flexmock(name='things')) mock_module = flexmock(module) mock_module.should_receive('validate_configuration_format').with_args( parser, config_format ).once() mock_section_configs = (flexmock(), flexmock()) for section_format, section_config in zip(config_format, mock_section_configs): mock_module.should_receive('parse_section_options').with_args( parser, section_format ).and_return(section_config).once() parsed_config = module.parse_configuration('filename', config_format) assert parsed_config == type(parsed_config)(*mock_section_configs) def test_parse_configuration_with_file_open_error_should_raise(): parser = insert_mock_parser() parser.should_receive('read').and_return([]) with pytest.raises(ValueError): module.parse_configuration('filename', config_format=flexmock()) borgmatic-1.7.9/tests/unit/config/test_normalize.py000066400000000000000000000064651440467744700225320ustar00rootroot00000000000000import pytest from borgmatic.config import normalize as module @pytest.mark.parametrize( 'config,expected_config,produces_logs', ( ( {'location': {'exclude_if_present': '.nobackup'}}, {'location': {'exclude_if_present': ['.nobackup']}}, False, ), ( {'location': {'exclude_if_present': ['.nobackup']}}, {'location': {'exclude_if_present': ['.nobackup']}}, False, ), ( {'location': {'source_directories': ['foo', 'bar']}}, {'location': {'source_directories': ['foo', 'bar']}}, False, ), ({'location': None}, {'location': None}, False,), ( {'storage': {'compression': 'yes_please'}}, {'storage': {'compression': 'yes_please'}}, False, ), ({'storage': None}, {'storage': None}, False,), ( {'hooks': {'healthchecks': 'https://example.com'}}, {'hooks': {'healthchecks': {'ping_url': 'https://example.com'}}}, False, ), ( {'hooks': {'cronitor': 'https://example.com'}}, {'hooks': {'cronitor': {'ping_url': 'https://example.com'}}}, False, ), ( {'hooks': {'pagerduty': 'https://example.com'}}, {'hooks': {'pagerduty': {'integration_key': 'https://example.com'}}}, False, ), ( {'hooks': {'cronhub': 'https://example.com'}}, {'hooks': {'cronhub': {'ping_url': 'https://example.com'}}}, False, ), ({'hooks': None}, {'hooks': None}, False,), ( {'consistency': {'checks': ['archives']}}, {'consistency': {'checks': [{'name': 'archives'}]}}, False, ), ( {'consistency': {'checks': ['archives']}}, {'consistency': {'checks': [{'name': 'archives'}]}}, False, ), ({'consistency': None}, {'consistency': None}, False,), ({'location': {'numeric_owner': False}}, {'location': {'numeric_ids': False}}, False,), ({'location': {'bsd_flags': False}}, {'location': {'flags': False}}, False,), ( {'storage': {'remote_rate_limit': False}}, {'storage': {'upload_rate_limit': False}}, False, ), ( {'location': {'repositories': ['foo@bar:/repo']}}, {'location': {'repositories': ['ssh://foo@bar/repo']}}, True, ), ( {'location': {'repositories': ['foo@bar:repo']}}, {'location': {'repositories': ['ssh://foo@bar/./repo']}}, True, ), ( {'location': {'repositories': ['foo@bar:~/repo']}}, {'location': {'repositories': ['ssh://foo@bar/~/repo']}}, True, ), ( {'location': {'repositories': ['ssh://foo@bar:1234/repo']}}, {'location': {'repositories': ['ssh://foo@bar:1234/repo']}}, False, ), ), ) def test_normalize_applies_hard_coded_normalization_to_config( config, expected_config, produces_logs ): logs = module.normalize('test.yaml', config) assert config == expected_config if produces_logs: assert logs else: assert logs == [] borgmatic-1.7.9/tests/unit/config/test_override.py000066400000000000000000000054131440467744700223410ustar00rootroot00000000000000import pytest import ruamel.yaml from flexmock import flexmock from borgmatic.config import override as module def test_set_values_with_empty_keys_bails(): config = {} module.set_values(config, keys=(), value='value') assert config == {} def test_set_values_with_one_key_sets_it_into_config(): config = {} module.set_values(config, keys=('key',), value='value') assert config == {'key': 'value'} def test_set_values_with_one_key_overwrites_existing_key(): config = {'key': 'old_value', 'other': 'other_value'} module.set_values(config, keys=('key',), value='value') assert config == {'key': 'value', 'other': 'other_value'} def test_set_values_with_multiple_keys_creates_hierarchy(): config = {} module.set_values(config, ('section', 'key'), 'value') assert config == {'section': {'key': 'value'}} def test_set_values_with_multiple_keys_updates_hierarchy(): config = {'section': {'other': 'other_value'}} module.set_values(config, ('section', 'key'), 'value') assert config == {'section': {'key': 'value', 'other': 'other_value'}} def test_parse_overrides_splits_keys_and_values(): flexmock(module).should_receive('convert_value_type').replace_with(lambda value: value) raw_overrides = ['section.my_option=value1', 'section.other_option=value2'] expected_result = ( (('section', 'my_option'), 'value1'), (('section', 'other_option'), 'value2'), ) module.parse_overrides(raw_overrides) == expected_result def test_parse_overrides_allows_value_with_equal_sign(): flexmock(module).should_receive('convert_value_type').replace_with(lambda value: value) raw_overrides = ['section.option=this===value'] expected_result = ((('section', 'option'), 'this===value'),) module.parse_overrides(raw_overrides) == expected_result def test_parse_overrides_raises_on_missing_equal_sign(): flexmock(module).should_receive('convert_value_type').replace_with(lambda value: value) raw_overrides = ['section.option'] with pytest.raises(ValueError): module.parse_overrides(raw_overrides) def test_parse_overrides_raises_on_invalid_override_value(): flexmock(module).should_receive('convert_value_type').and_raise(ruamel.yaml.parser.ParserError) raw_overrides = ['section.option=[in valid]'] with pytest.raises(ValueError): module.parse_overrides(raw_overrides) def test_parse_overrides_allows_value_with_single_key(): flexmock(module).should_receive('convert_value_type').replace_with(lambda value: value) raw_overrides = ['option=value'] expected_result = ((('option',), 'value'),) module.parse_overrides(raw_overrides) == expected_result def test_parse_overrides_handles_empty_overrides(): module.parse_overrides(raw_overrides=None) == () borgmatic-1.7.9/tests/unit/config/test_validate.py000066400000000000000000000141061440467744700223120ustar00rootroot00000000000000import pytest from flexmock import flexmock from borgmatic.config import validate as module def test_format_json_error_path_element_formats_array_index(): module.format_json_error_path_element(3) == '[3]' def test_format_json_error_path_element_formats_property(): module.format_json_error_path_element('foo') == '.foo' def test_format_json_error_formats_error_including_path(): flexmock(module).format_json_error_path_element = lambda element: '.{}'.format(element) error = flexmock(message='oops', path=['foo', 'bar']) assert module.format_json_error(error) == "At 'foo.bar': oops" def test_format_json_error_formats_error_without_path(): flexmock(module).should_receive('format_json_error_path_element').never() error = flexmock(message='oops', path=[]) assert module.format_json_error(error) == 'At the top level: oops' def test_validation_error_string_contains_errors(): flexmock(module).format_json_error = lambda error: error.message error = module.Validation_error('config.yaml', ('oops', 'uh oh')) result = str(error) assert 'config.yaml' in result assert 'oops' in result assert 'uh oh' in result def test_apply_locical_validation_raises_if_unknown_repository_in_check_repositories(): flexmock(module).format_json_error = lambda error: error.message with pytest.raises(module.Validation_error): module.apply_logical_validation( 'config.yaml', { 'location': {'repositories': ['repo.borg', 'other.borg']}, 'retention': {'keep_secondly': 1000}, 'consistency': {'check_repositories': ['repo.borg', 'unknown.borg']}, }, ) def test_apply_locical_validation_does_not_raise_if_known_repository_in_check_repositories(): module.apply_logical_validation( 'config.yaml', { 'location': {'repositories': ['repo.borg', 'other.borg']}, 'retention': {'keep_secondly': 1000}, 'consistency': {'check_repositories': ['repo.borg']}, }, ) def test_apply_logical_validation_does_not_raise_if_archive_name_format_and_prefix_present(): module.apply_logical_validation( 'config.yaml', { 'storage': {'archive_name_format': '{hostname}-{now}'}, 'retention': {'prefix': '{hostname}-'}, 'consistency': {'prefix': '{hostname}-'}, }, ) def test_apply_logical_validation_does_not_raise_otherwise(): module.apply_logical_validation('config.yaml', {'retention': {'keep_secondly': 1000}}) def test_normalize_repository_path_passes_through_remote_repository(): repository = 'example.org:test.borg' module.normalize_repository_path(repository) == repository def test_normalize_repository_path_passes_through_absolute_repository(): repository = '/foo/bar/test.borg' flexmock(module.os.path).should_receive('abspath').and_return(repository) module.normalize_repository_path(repository) == repository def test_normalize_repository_path_resolves_relative_repository(): repository = 'test.borg' absolute = '/foo/bar/test.borg' flexmock(module.os.path).should_receive('abspath').and_return(absolute) module.normalize_repository_path(repository) == absolute def test_repositories_match_does_not_raise(): flexmock(module).should_receive('normalize_repository_path') module.repositories_match('foo', 'bar') def test_guard_configuration_contains_repository_does_not_raise_when_repository_in_config(): flexmock(module).should_receive('repositories_match').replace_with( lambda first, second: first == second ) module.guard_configuration_contains_repository( repository='repo', configurations={'config.yaml': {'location': {'repositories': ['repo']}}} ) def test_guard_configuration_contains_repository_does_not_raise_when_repository_not_given(): module.guard_configuration_contains_repository( repository=None, configurations={'config.yaml': {'location': {'repositories': ['repo']}}} ) def test_guard_configuration_contains_repository_errors_when_repository_missing_from_config(): flexmock(module).should_receive('repositories_match').replace_with( lambda first, second: first == second ) with pytest.raises(ValueError): module.guard_configuration_contains_repository( repository='nope', configurations={'config.yaml': {'location': {'repositories': ['repo', 'repo2']}}}, ) def test_guard_configuration_contains_repository_errors_when_repository_matches_config_twice(): flexmock(module).should_receive('repositories_match').replace_with( lambda first, second: first == second ) with pytest.raises(ValueError): module.guard_configuration_contains_repository( repository='repo', configurations={ 'config.yaml': {'location': {'repositories': ['repo', 'repo2']}}, 'other.yaml': {'location': {'repositories': ['repo']}}, }, ) def test_guard_single_repository_selected_raises_when_multiple_repositories_configured_and_none_selected(): with pytest.raises(ValueError): module.guard_single_repository_selected( repository=None, configurations={'config.yaml': {'location': {'repositories': ['repo', 'repo2']}}}, ) def test_guard_single_repository_selected_does_not_raise_when_single_repository_configured_and_none_selected(): module.guard_single_repository_selected( repository=None, configurations={'config.yaml': {'location': {'repositories': ['repo']}}}, ) def test_guard_single_repository_selected_does_not_raise_when_no_repositories_configured_and_one_selected(): module.guard_single_repository_selected( repository='repo', configurations={'config.yaml': {'location': {'repositories': []}}}, ) def test_guard_single_repository_selected_does_not_raise_when_repositories_configured_and_one_selected(): module.guard_single_repository_selected( repository='repo', configurations={'config.yaml': {'location': {'repositories': ['repo', 'repo2']}}}, ) borgmatic-1.7.9/tests/unit/hooks/000077500000000000000000000000001440467744700167645ustar00rootroot00000000000000borgmatic-1.7.9/tests/unit/hooks/__init__.py000066400000000000000000000000001440467744700210630ustar00rootroot00000000000000borgmatic-1.7.9/tests/unit/hooks/test_command.py000066400000000000000000000077741440467744700220320ustar00rootroot00000000000000import logging import subprocess from flexmock import flexmock from borgmatic.hooks import command as module def test_interpolate_context_passes_through_command_without_variable(): assert module.interpolate_context('test.yaml', 'pre-backup', 'ls', {'foo': 'bar'}) == 'ls' def test_interpolate_context_passes_through_command_with_unknown_variable(): assert ( module.interpolate_context('test.yaml', 'pre-backup', 'ls {baz}', {'foo': 'bar'}) == 'ls {baz}' ) def test_interpolate_context_interpolates_variables(): context = {'foo': 'bar', 'baz': 'quux'} assert ( module.interpolate_context('test.yaml', 'pre-backup', 'ls {foo}{baz} {baz}', context) == 'ls barquux quux' ) def test_interpolate_context_does_not_touch_unknown_variables(): context = {'foo': 'bar', 'baz': 'quux'} assert module.interpolate_context('test.yaml', 'pre-backup', 'ls {wtf}', context) == 'ls {wtf}' def test_execute_hook_invokes_each_command(): flexmock(module).should_receive('interpolate_context').replace_with( lambda config_file, hook_description, command, context: command ) flexmock(module.execute).should_receive('execute_command').with_args( [':'], output_log_level=logging.WARNING, shell=True ).once() module.execute_hook([':'], None, 'config.yaml', 'pre-backup', dry_run=False) def test_execute_hook_with_multiple_commands_invokes_each_command(): flexmock(module).should_receive('interpolate_context').replace_with( lambda config_file, hook_description, command, context: command ) flexmock(module.execute).should_receive('execute_command').with_args( [':'], output_log_level=logging.WARNING, shell=True ).once() flexmock(module.execute).should_receive('execute_command').with_args( ['true'], output_log_level=logging.WARNING, shell=True ).once() module.execute_hook([':', 'true'], None, 'config.yaml', 'pre-backup', dry_run=False) def test_execute_hook_with_umask_sets_that_umask(): flexmock(module).should_receive('interpolate_context').replace_with( lambda config_file, hook_description, command, context: command ) flexmock(module.os).should_receive('umask').with_args(0o77).and_return(0o22).once() flexmock(module.os).should_receive('umask').with_args(0o22).once() flexmock(module.execute).should_receive('execute_command').with_args( [':'], output_log_level=logging.WARNING, shell=True ) module.execute_hook([':'], 77, 'config.yaml', 'pre-backup', dry_run=False) def test_execute_hook_with_dry_run_skips_commands(): flexmock(module).should_receive('interpolate_context').replace_with( lambda config_file, hook_description, command, context: command ) flexmock(module.execute).should_receive('execute_command').never() module.execute_hook([':', 'true'], None, 'config.yaml', 'pre-backup', dry_run=True) def test_execute_hook_with_empty_commands_does_not_raise(): module.execute_hook([], None, 'config.yaml', 'post-backup', dry_run=False) def test_execute_hook_on_error_logs_as_error(): flexmock(module).should_receive('interpolate_context').replace_with( lambda config_file, hook_description, command, context: command ) flexmock(module.execute).should_receive('execute_command').with_args( [':'], output_log_level=logging.ERROR, shell=True ).once() module.execute_hook([':'], None, 'config.yaml', 'on-error', dry_run=False) def test_considered_soft_failure_treats_soft_fail_exit_code_as_soft_fail(): error = subprocess.CalledProcessError(module.SOFT_FAIL_EXIT_CODE, 'try again') assert module.considered_soft_failure('config.yaml', error) def test_considered_soft_failure_does_not_treat_other_exit_code_as_soft_fail(): error = subprocess.CalledProcessError(1, 'error') assert not module.considered_soft_failure('config.yaml', error) def test_considered_soft_failure_does_not_treat_other_exception_type_as_soft_fail(): assert not module.considered_soft_failure('config.yaml', Exception()) borgmatic-1.7.9/tests/unit/hooks/test_cronhub.py000066400000000000000000000067051440467744700220450ustar00rootroot00000000000000from flexmock import flexmock from borgmatic.hooks import cronhub as module def test_ping_monitor_rewrites_ping_url_for_start_state(): hook_config = {'ping_url': 'https://example.com/start/abcdef'} flexmock(module.requests).should_receive('get').with_args( 'https://example.com/start/abcdef' ).and_return(flexmock(ok=True)) module.ping_monitor( hook_config, 'config.yaml', module.monitor.State.START, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_rewrites_ping_url_and_state_for_start_state(): hook_config = {'ping_url': 'https://example.com/ping/abcdef'} flexmock(module.requests).should_receive('get').with_args( 'https://example.com/start/abcdef' ).and_return(flexmock(ok=True)) module.ping_monitor( hook_config, 'config.yaml', module.monitor.State.START, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_rewrites_ping_url_for_finish_state(): hook_config = {'ping_url': 'https://example.com/start/abcdef'} flexmock(module.requests).should_receive('get').with_args( 'https://example.com/finish/abcdef' ).and_return(flexmock(ok=True)) module.ping_monitor( hook_config, 'config.yaml', module.monitor.State.FINISH, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_rewrites_ping_url_for_fail_state(): hook_config = {'ping_url': 'https://example.com/start/abcdef'} flexmock(module.requests).should_receive('get').with_args( 'https://example.com/fail/abcdef' ).and_return(flexmock(ok=True)) module.ping_monitor( hook_config, 'config.yaml', module.monitor.State.FAIL, monitoring_log_level=1, dry_run=False ) def test_ping_monitor_dry_run_does_not_hit_ping_url(): hook_config = {'ping_url': 'https://example.com'} flexmock(module.requests).should_receive('get').never() module.ping_monitor( hook_config, 'config.yaml', module.monitor.State.START, monitoring_log_level=1, dry_run=True ) def test_ping_monitor_with_connection_error_logs_warning(): hook_config = {'ping_url': 'https://example.com/start/abcdef'} flexmock(module.requests).should_receive('get').and_raise( module.requests.exceptions.ConnectionError ) flexmock(module.logger).should_receive('warning').once() module.ping_monitor( hook_config, 'config.yaml', module.monitor.State.START, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_with_other_error_logs_warning(): hook_config = {'ping_url': 'https://example.com/start/abcdef'} response = flexmock(ok=False) response.should_receive('raise_for_status').and_raise( module.requests.exceptions.RequestException ) flexmock(module.requests).should_receive('get').with_args( 'https://example.com/start/abcdef' ).and_return(response) flexmock(module.logger).should_receive('warning').once() module.ping_monitor( hook_config, 'config.yaml', module.monitor.State.START, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_with_unsupported_monitoring_state(): hook_config = {'ping_url': 'https://example.com'} flexmock(module.requests).should_receive('get').never() module.ping_monitor( hook_config, 'config.yaml', module.monitor.State.LOG, monitoring_log_level=1, dry_run=False, ) borgmatic-1.7.9/tests/unit/hooks/test_cronitor.py000066400000000000000000000056351440467744700222450ustar00rootroot00000000000000from flexmock import flexmock from borgmatic.hooks import cronitor as module def test_ping_monitor_hits_ping_url_for_start_state(): hook_config = {'ping_url': 'https://example.com'} flexmock(module.requests).should_receive('get').with_args('https://example.com/run').and_return( flexmock(ok=True) ) module.ping_monitor( hook_config, 'config.yaml', module.monitor.State.START, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_hits_ping_url_for_finish_state(): hook_config = {'ping_url': 'https://example.com'} flexmock(module.requests).should_receive('get').with_args( 'https://example.com/complete' ).and_return(flexmock(ok=True)) module.ping_monitor( hook_config, 'config.yaml', module.monitor.State.FINISH, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_hits_ping_url_for_fail_state(): hook_config = {'ping_url': 'https://example.com'} flexmock(module.requests).should_receive('get').with_args( 'https://example.com/fail' ).and_return(flexmock(ok=True)) module.ping_monitor( hook_config, 'config.yaml', module.monitor.State.FAIL, monitoring_log_level=1, dry_run=False ) def test_ping_monitor_dry_run_does_not_hit_ping_url(): hook_config = {'ping_url': 'https://example.com'} flexmock(module.requests).should_receive('get').never() module.ping_monitor( hook_config, 'config.yaml', module.monitor.State.START, monitoring_log_level=1, dry_run=True ) def test_ping_monitor_with_connection_error_logs_warning(): hook_config = {'ping_url': 'https://example.com'} flexmock(module.requests).should_receive('get').and_raise( module.requests.exceptions.ConnectionError ) flexmock(module.logger).should_receive('warning').once() module.ping_monitor( hook_config, 'config.yaml', module.monitor.State.START, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_with_other_error_logs_warning(): hook_config = {'ping_url': 'https://example.com'} response = flexmock(ok=False) response.should_receive('raise_for_status').and_raise( module.requests.exceptions.RequestException ) flexmock(module.requests).should_receive('get').with_args('https://example.com/run').and_return( response ) flexmock(module.logger).should_receive('warning').once() module.ping_monitor( hook_config, 'config.yaml', module.monitor.State.START, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_with_unsupported_monitoring_state(): hook_config = {'ping_url': 'https://example.com'} flexmock(module.requests).should_receive('get').never() module.ping_monitor( hook_config, 'config.yaml', module.monitor.State.LOG, monitoring_log_level=1, dry_run=False, ) borgmatic-1.7.9/tests/unit/hooks/test_dispatch.py000066400000000000000000000104621440467744700221770ustar00rootroot00000000000000import sys import pytest from flexmock import flexmock from borgmatic.hooks import dispatch as module def hook_function(config, log_prefix, thing, value): ''' This test function gets mocked out below. ''' pass def test_call_hook_invokes_module_function_with_arguments_and_returns_value(): hooks = {'super_hook': flexmock(), 'other_hook': flexmock()} expected_return_value = flexmock() test_module = sys.modules[__name__] flexmock(module).HOOK_NAME_TO_MODULE = {'super_hook': test_module} flexmock(test_module).should_receive('hook_function').with_args( hooks['super_hook'], 'prefix', 55, value=66 ).and_return(expected_return_value).once() return_value = module.call_hook('hook_function', hooks, 'prefix', 'super_hook', 55, value=66) assert return_value == expected_return_value def test_call_hook_without_hook_config_invokes_module_function_with_arguments_and_returns_value(): hooks = {'other_hook': flexmock()} expected_return_value = flexmock() test_module = sys.modules[__name__] flexmock(module).HOOK_NAME_TO_MODULE = {'super_hook': test_module} flexmock(test_module).should_receive('hook_function').with_args( {}, 'prefix', 55, value=66 ).and_return(expected_return_value).once() return_value = module.call_hook('hook_function', hooks, 'prefix', 'super_hook', 55, value=66) assert return_value == expected_return_value def test_call_hook_without_corresponding_module_raises(): hooks = {'super_hook': flexmock(), 'other_hook': flexmock()} test_module = sys.modules[__name__] flexmock(module).HOOK_NAME_TO_MODULE = {'other_hook': test_module} flexmock(test_module).should_receive('hook_function').never() with pytest.raises(ValueError): module.call_hook('hook_function', hooks, 'prefix', 'super_hook', 55, value=66) def test_call_hooks_calls_each_hook_and_collects_return_values(): hooks = {'super_hook': flexmock(), 'other_hook': flexmock()} expected_return_values = {'super_hook': flexmock(), 'other_hook': flexmock()} flexmock(module).should_receive('call_hook').and_return( expected_return_values['super_hook'] ).and_return(expected_return_values['other_hook']) return_values = module.call_hooks('do_stuff', hooks, 'prefix', ('super_hook', 'other_hook'), 55) assert return_values == expected_return_values def test_call_hooks_calls_skips_return_values_for_missing_hooks(): hooks = {'super_hook': flexmock()} expected_return_values = {'super_hook': flexmock()} flexmock(module).should_receive('call_hook').and_return(expected_return_values['super_hook']) return_values = module.call_hooks('do_stuff', hooks, 'prefix', ('super_hook', 'other_hook'), 55) assert return_values == expected_return_values def test_call_hooks_calls_skips_return_values_for_null_hooks(): hooks = {'super_hook': flexmock(), 'other_hook': None} expected_return_values = {'super_hook': flexmock()} flexmock(module).should_receive('call_hook').and_return(expected_return_values['super_hook']) return_values = module.call_hooks('do_stuff', hooks, 'prefix', ('super_hook', 'other_hook'), 55) assert return_values == expected_return_values def test_call_hooks_even_if_unconfigured_calls_each_hook_and_collects_return_values(): hooks = {'super_hook': flexmock(), 'other_hook': flexmock()} expected_return_values = {'super_hook': flexmock(), 'other_hook': flexmock()} flexmock(module).should_receive('call_hook').and_return( expected_return_values['super_hook'] ).and_return(expected_return_values['other_hook']) return_values = module.call_hooks_even_if_unconfigured( 'do_stuff', hooks, 'prefix', ('super_hook', 'other_hook'), 55 ) assert return_values == expected_return_values def test_call_hooks_even_if_unconfigured_calls_each_hook_configured_or_not_and_collects_return_values(): hooks = {'other_hook': flexmock()} expected_return_values = {'super_hook': flexmock(), 'other_hook': flexmock()} flexmock(module).should_receive('call_hook').and_return( expected_return_values['super_hook'] ).and_return(expected_return_values['other_hook']) return_values = module.call_hooks_even_if_unconfigured( 'do_stuff', hooks, 'prefix', ('super_hook', 'other_hook'), 55 ) assert return_values == expected_return_values borgmatic-1.7.9/tests/unit/hooks/test_dump.py000066400000000000000000000055721440467744700213530ustar00rootroot00000000000000import pytest from flexmock import flexmock from borgmatic.hooks import dump as module def test_make_database_dump_path_joins_arguments(): assert module.make_database_dump_path('/tmp', 'super_databases') == '/tmp/super_databases' def test_make_database_dump_path_defaults_without_source_directory(): assert module.make_database_dump_path(None, 'super_databases') == '~/.borgmatic/super_databases' def test_make_database_dump_filename_uses_name_and_hostname(): flexmock(module.os.path).should_receive('expanduser').and_return('databases') assert ( module.make_database_dump_filename('databases', 'test', 'hostname') == 'databases/hostname/test' ) def test_make_database_dump_filename_without_hostname_defaults_to_localhost(): flexmock(module.os.path).should_receive('expanduser').and_return('databases') assert module.make_database_dump_filename('databases', 'test') == 'databases/localhost/test' def test_make_database_dump_filename_with_invalid_name_raises(): flexmock(module.os.path).should_receive('expanduser').and_return('databases') with pytest.raises(ValueError): module.make_database_dump_filename('databases', 'invalid/name') def test_create_parent_directory_for_dump_does_not_raise(): flexmock(module.os).should_receive('makedirs') module.create_parent_directory_for_dump('/path/to/parent') def test_create_named_pipe_for_dump_does_not_raise(): flexmock(module).should_receive('create_parent_directory_for_dump') flexmock(module.os).should_receive('mkfifo') module.create_named_pipe_for_dump('/path/to/pipe') def test_remove_database_dumps_removes_dump_path(): flexmock(module.os.path).should_receive('expanduser').and_return('databases/localhost') flexmock(module.os.path).should_receive('exists').and_return(True) flexmock(module.shutil).should_receive('rmtree').with_args('databases/localhost').once() module.remove_database_dumps('databases', 'SuperDB', 'test.yaml', dry_run=False) def test_remove_database_dumps_with_dry_run_skips_removal(): flexmock(module.os.path).should_receive('expanduser').and_return('databases/localhost') flexmock(module.os.path).should_receive('exists').never() flexmock(module.shutil).should_receive('rmtree').never() module.remove_database_dumps('databases', 'SuperDB', 'test.yaml', dry_run=True) def test_remove_database_dumps_without_dump_path_present_skips_removal(): flexmock(module.os.path).should_receive('expanduser').and_return('databases/localhost') flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.shutil).should_receive('rmtree').never() module.remove_database_dumps('databases', 'SuperDB', 'test.yaml', dry_run=False) def test_convert_glob_patterns_to_borg_patterns_removes_leading_slash(): assert module.convert_glob_patterns_to_borg_patterns(('/etc/foo/bar',)) == ['sh:etc/foo/bar'] borgmatic-1.7.9/tests/unit/hooks/test_healthchecks.py000066400000000000000000000264671440467744700230420ustar00rootroot00000000000000from flexmock import flexmock from borgmatic.hooks import healthchecks as module def test_forgetful_buffering_handler_emit_collects_log_records(): handler = module.Forgetful_buffering_handler(byte_capacity=100, log_level=1) handler.emit(flexmock(getMessage=lambda: 'foo')) handler.emit(flexmock(getMessage=lambda: 'bar')) assert handler.buffer == ['foo\n', 'bar\n'] assert not handler.forgot def test_forgetful_buffering_handler_emit_collects_log_records_with_zero_byte_capacity(): handler = module.Forgetful_buffering_handler(byte_capacity=0, log_level=1) handler.emit(flexmock(getMessage=lambda: 'foo')) handler.emit(flexmock(getMessage=lambda: 'bar')) assert handler.buffer == ['foo\n', 'bar\n'] assert not handler.forgot def test_forgetful_buffering_handler_emit_forgets_log_records_when_capacity_reached(): handler = module.Forgetful_buffering_handler(byte_capacity=len('foo\nbar\n'), log_level=1) handler.emit(flexmock(getMessage=lambda: 'foo')) assert handler.buffer == ['foo\n'] handler.emit(flexmock(getMessage=lambda: 'bar')) assert handler.buffer == ['foo\n', 'bar\n'] handler.emit(flexmock(getMessage=lambda: 'baz')) assert handler.buffer == ['bar\n', 'baz\n'] handler.emit(flexmock(getMessage=lambda: 'quux')) assert handler.buffer == ['quux\n'] assert handler.forgot def test_format_buffered_logs_for_payload_flattens_log_buffer(): handler = module.Forgetful_buffering_handler(byte_capacity=100, log_level=1) handler.buffer = ['foo\n', 'bar\n'] logger = flexmock(handlers=[handler]) logger.should_receive('removeHandler') flexmock(module.logging).should_receive('getLogger').and_return(logger) payload = module.format_buffered_logs_for_payload() assert payload == 'foo\nbar\n' def test_format_buffered_logs_for_payload_inserts_truncation_indicator_when_logs_forgotten(): handler = module.Forgetful_buffering_handler(byte_capacity=100, log_level=1) handler.buffer = ['foo\n', 'bar\n'] handler.forgot = True logger = flexmock(handlers=[handler]) logger.should_receive('removeHandler') flexmock(module.logging).should_receive('getLogger').and_return(logger) payload = module.format_buffered_logs_for_payload() assert payload == '...\nfoo\nbar\n' def test_format_buffered_logs_for_payload_without_handler_produces_empty_payload(): logger = flexmock(handlers=[module.logging.Handler()]) logger.should_receive('removeHandler') flexmock(module.logging).should_receive('getLogger').and_return(logger) payload = module.format_buffered_logs_for_payload() assert payload == '' def mock_logger(): logger = flexmock() logger.should_receive('addHandler') logger.should_receive('removeHandler') flexmock(module.logging).should_receive('getLogger').and_return(logger) def test_initialize_monitor_creates_log_handler_with_ping_body_limit(): ping_body_limit = 100 monitoring_log_level = 1 mock_logger() flexmock(module).should_receive('Forgetful_buffering_handler').with_args( ping_body_limit - len(module.PAYLOAD_TRUNCATION_INDICATOR), monitoring_log_level ).once() module.initialize_monitor( {'ping_body_limit': ping_body_limit}, 'test.yaml', monitoring_log_level, dry_run=False ) def test_initialize_monitor_creates_log_handler_with_default_ping_body_limit(): monitoring_log_level = 1 mock_logger() flexmock(module).should_receive('Forgetful_buffering_handler').with_args( module.DEFAULT_PING_BODY_LIMIT_BYTES - len(module.PAYLOAD_TRUNCATION_INDICATOR), monitoring_log_level, ).once() module.initialize_monitor({}, 'test.yaml', monitoring_log_level, dry_run=False) def test_initialize_monitor_creates_log_handler_with_zero_ping_body_limit(): ping_body_limit = 0 monitoring_log_level = 1 mock_logger() flexmock(module).should_receive('Forgetful_buffering_handler').with_args( ping_body_limit, monitoring_log_level ).once() module.initialize_monitor( {'ping_body_limit': ping_body_limit}, 'test.yaml', monitoring_log_level, dry_run=False ) def test_initialize_monitor_creates_log_handler_when_send_logs_true(): mock_logger() flexmock(module).should_receive('Forgetful_buffering_handler').once() module.initialize_monitor( {'send_logs': True}, 'test.yaml', monitoring_log_level=1, dry_run=False ) def test_initialize_monitor_bails_when_send_logs_false(): mock_logger() flexmock(module).should_receive('Forgetful_buffering_handler').never() module.initialize_monitor( {'send_logs': False}, 'test.yaml', monitoring_log_level=1, dry_run=False ) def test_ping_monitor_hits_ping_url_for_start_state(): flexmock(module).should_receive('Forgetful_buffering_handler') hook_config = {'ping_url': 'https://example.com'} flexmock(module.requests).should_receive('post').with_args( 'https://example.com/start', data=''.encode('utf-8'), verify=True ).and_return(flexmock(ok=True)) module.ping_monitor( hook_config, 'config.yaml', state=module.monitor.State.START, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_hits_ping_url_for_finish_state(): hook_config = {'ping_url': 'https://example.com'} payload = 'data' flexmock(module).should_receive('format_buffered_logs_for_payload').and_return(payload) flexmock(module.requests).should_receive('post').with_args( 'https://example.com', data=payload.encode('utf-8'), verify=True ).and_return(flexmock(ok=True)) module.ping_monitor( hook_config, 'config.yaml', state=module.monitor.State.FINISH, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_hits_ping_url_for_fail_state(): hook_config = {'ping_url': 'https://example.com'} payload = 'data' flexmock(module).should_receive('format_buffered_logs_for_payload').and_return(payload) flexmock(module.requests).should_receive('post').with_args( 'https://example.com/fail', data=payload.encode('utf'), verify=True ).and_return(flexmock(ok=True)) module.ping_monitor( hook_config, 'config.yaml', state=module.monitor.State.FAIL, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_hits_ping_url_for_log_state(): hook_config = {'ping_url': 'https://example.com'} payload = 'data' flexmock(module).should_receive('format_buffered_logs_for_payload').and_return(payload) flexmock(module.requests).should_receive('post').with_args( 'https://example.com/log', data=payload.encode('utf'), verify=True ).and_return(flexmock(ok=True)) module.ping_monitor( hook_config, 'config.yaml', state=module.monitor.State.LOG, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_with_ping_uuid_hits_corresponding_url(): hook_config = {'ping_url': 'abcd-efgh-ijkl-mnop'} payload = 'data' flexmock(module).should_receive('format_buffered_logs_for_payload').and_return(payload) flexmock(module.requests).should_receive('post').with_args( 'https://hc-ping.com/{}'.format(hook_config['ping_url']), data=payload.encode('utf-8'), verify=True, ).and_return(flexmock(ok=True)) module.ping_monitor( hook_config, 'config.yaml', state=module.monitor.State.FINISH, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_skips_ssl_verification_when_verify_tls_false(): hook_config = {'ping_url': 'https://example.com', 'verify_tls': False} payload = 'data' flexmock(module).should_receive('format_buffered_logs_for_payload').and_return(payload) flexmock(module.requests).should_receive('post').with_args( 'https://example.com', data=payload.encode('utf-8'), verify=False ).and_return(flexmock(ok=True)) module.ping_monitor( hook_config, 'config.yaml', state=module.monitor.State.FINISH, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_executes_ssl_verification_when_verify_tls_true(): hook_config = {'ping_url': 'https://example.com', 'verify_tls': True} payload = 'data' flexmock(module).should_receive('format_buffered_logs_for_payload').and_return(payload) flexmock(module.requests).should_receive('post').with_args( 'https://example.com', data=payload.encode('utf-8'), verify=True ).and_return(flexmock(ok=True)) module.ping_monitor( hook_config, 'config.yaml', state=module.monitor.State.FINISH, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_dry_run_does_not_hit_ping_url(): flexmock(module).should_receive('Forgetful_buffering_handler') hook_config = {'ping_url': 'https://example.com'} flexmock(module.requests).should_receive('post').never() module.ping_monitor( hook_config, 'config.yaml', state=module.monitor.State.START, monitoring_log_level=1, dry_run=True, ) def test_ping_monitor_does_not_hit_ping_url_when_states_not_matching(): flexmock(module).should_receive('Forgetful_buffering_handler') hook_config = {'ping_url': 'https://example.com', 'states': ['finish']} flexmock(module.requests).should_receive('post').never() module.ping_monitor( hook_config, 'config.yaml', state=module.monitor.State.START, monitoring_log_level=1, dry_run=True, ) def test_ping_monitor_hits_ping_url_when_states_matching(): flexmock(module).should_receive('Forgetful_buffering_handler') hook_config = {'ping_url': 'https://example.com', 'states': ['start', 'finish']} flexmock(module.requests).should_receive('post').with_args( 'https://example.com/start', data=''.encode('utf-8'), verify=True ).and_return(flexmock(ok=True)) module.ping_monitor( hook_config, 'config.yaml', state=module.monitor.State.START, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_with_connection_error_logs_warning(): flexmock(module).should_receive('Forgetful_buffering_handler') hook_config = {'ping_url': 'https://example.com'} flexmock(module.requests).should_receive('post').with_args( 'https://example.com/start', data=''.encode('utf-8'), verify=True ).and_raise(module.requests.exceptions.ConnectionError) flexmock(module.logger).should_receive('warning').once() module.ping_monitor( hook_config, 'config.yaml', state=module.monitor.State.START, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_with_other_error_logs_warning(): flexmock(module).should_receive('Forgetful_buffering_handler') hook_config = {'ping_url': 'https://example.com'} response = flexmock(ok=False) response.should_receive('raise_for_status').and_raise( module.requests.exceptions.RequestException ) flexmock(module.requests).should_receive('post').with_args( 'https://example.com/start', data=''.encode('utf-8'), verify=True ).and_return(response) flexmock(module.logger).should_receive('warning').once() module.ping_monitor( hook_config, 'config.yaml', state=module.monitor.State.START, monitoring_log_level=1, dry_run=False, ) borgmatic-1.7.9/tests/unit/hooks/test_mongodb.py000066400000000000000000000265061440467744700220330ustar00rootroot00000000000000import logging import pytest from flexmock import flexmock from borgmatic.hooks import mongodb as module def test_dump_databases_runs_mongodump_for_each_database(): databases = [{'name': 'foo'}, {'name': 'bar'}] processes = [flexmock(), flexmock()] flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module.dump).should_receive('make_database_dump_filename').and_return( 'databases/localhost/foo' ).and_return('databases/localhost/bar') flexmock(module.dump).should_receive('create_named_pipe_for_dump') for name, process in zip(('foo', 'bar'), processes): flexmock(module).should_receive('execute_command').with_args( ['mongodump', '--db', name, '--archive', '>', 'databases/localhost/{}'.format(name)], shell=True, run_to_completion=False, ).and_return(process).once() assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == processes def test_dump_databases_with_dry_run_skips_mongodump(): databases = [{'name': 'foo'}, {'name': 'bar'}] flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module.dump).should_receive('make_database_dump_filename').and_return( 'databases/localhost/foo' ).and_return('databases/localhost/bar') flexmock(module.dump).should_receive('create_named_pipe_for_dump').never() flexmock(module).should_receive('execute_command').never() assert module.dump_databases(databases, 'test.yaml', {}, dry_run=True) == [] def test_dump_databases_runs_mongodump_with_hostname_and_port(): databases = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}] process = flexmock() flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module.dump).should_receive('make_database_dump_filename').and_return( 'databases/database.example.org/foo' ) flexmock(module.dump).should_receive('create_named_pipe_for_dump') flexmock(module).should_receive('execute_command').with_args( [ 'mongodump', '--host', 'database.example.org', '--port', '5433', '--db', 'foo', '--archive', '>', 'databases/database.example.org/foo', ], shell=True, run_to_completion=False, ).and_return(process).once() assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] def test_dump_databases_runs_mongodump_with_username_and_password(): databases = [ { 'name': 'foo', 'username': 'mongo', 'password': 'trustsome1', 'authentication_database': 'admin', } ] process = flexmock() flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module.dump).should_receive('make_database_dump_filename').and_return( 'databases/localhost/foo' ) flexmock(module.dump).should_receive('create_named_pipe_for_dump') flexmock(module).should_receive('execute_command').with_args( [ 'mongodump', '--username', 'mongo', '--password', 'trustsome1', '--authenticationDatabase', 'admin', '--db', 'foo', '--archive', '>', 'databases/localhost/foo', ], shell=True, run_to_completion=False, ).and_return(process).once() assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] def test_dump_databases_runs_mongodump_with_directory_format(): databases = [{'name': 'foo', 'format': 'directory'}] flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module.dump).should_receive('make_database_dump_filename').and_return( 'databases/localhost/foo' ) flexmock(module.dump).should_receive('create_parent_directory_for_dump') flexmock(module.dump).should_receive('create_named_pipe_for_dump').never() flexmock(module).should_receive('execute_command').with_args( ['mongodump', '--out', 'databases/localhost/foo', '--db', 'foo'], shell=True, ).and_return(flexmock()).once() assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [] def test_dump_databases_runs_mongodump_with_options(): databases = [{'name': 'foo', 'options': '--stuff=such'}] process = flexmock() flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module.dump).should_receive('make_database_dump_filename').and_return( 'databases/localhost/foo' ) flexmock(module.dump).should_receive('create_named_pipe_for_dump') flexmock(module).should_receive('execute_command').with_args( ['mongodump', '--db', 'foo', '--stuff=such', '--archive', '>', 'databases/localhost/foo'], shell=True, run_to_completion=False, ).and_return(process).once() assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] def test_dump_databases_runs_mongodumpall_for_all_databases(): databases = [{'name': 'all'}] process = flexmock() flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module.dump).should_receive('make_database_dump_filename').and_return( 'databases/localhost/all' ) flexmock(module.dump).should_receive('create_named_pipe_for_dump') flexmock(module).should_receive('execute_command').with_args( ['mongodump', '--archive', '>', 'databases/localhost/all'], shell=True, run_to_completion=False, ).and_return(process).once() assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] def test_restore_database_dump_runs_mongorestore(): database_config = [{'name': 'foo'}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('make_dump_path') flexmock(module.dump).should_receive('make_database_dump_filename') flexmock(module).should_receive('execute_command_with_processes').with_args( ['mongorestore', '--archive', '--drop', '--db', 'foo'], processes=[extract_process], output_log_level=logging.DEBUG, input_file=extract_process.stdout, ).once() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process ) def test_restore_database_dump_errors_on_multiple_database_config(): database_config = [{'name': 'foo'}, {'name': 'bar'}] flexmock(module).should_receive('make_dump_path') flexmock(module.dump).should_receive('make_database_dump_filename') flexmock(module).should_receive('execute_command_with_processes').never() flexmock(module).should_receive('execute_command').never() with pytest.raises(ValueError): module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=flexmock() ) def test_restore_database_dump_runs_mongorestore_with_hostname_and_port(): database_config = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('make_dump_path') flexmock(module.dump).should_receive('make_database_dump_filename') flexmock(module).should_receive('execute_command_with_processes').with_args( [ 'mongorestore', '--archive', '--drop', '--db', 'foo', '--host', 'database.example.org', '--port', '5433', ], processes=[extract_process], output_log_level=logging.DEBUG, input_file=extract_process.stdout, ).once() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process ) def test_restore_database_dump_runs_mongorestore_with_username_and_password(): database_config = [ { 'name': 'foo', 'username': 'mongo', 'password': 'trustsome1', 'authentication_database': 'admin', } ] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('make_dump_path') flexmock(module.dump).should_receive('make_database_dump_filename') flexmock(module).should_receive('execute_command_with_processes').with_args( [ 'mongorestore', '--archive', '--drop', '--db', 'foo', '--username', 'mongo', '--password', 'trustsome1', '--authenticationDatabase', 'admin', ], processes=[extract_process], output_log_level=logging.DEBUG, input_file=extract_process.stdout, ).once() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process ) def test_restore_database_dump_runs_mongorestore_with_options(): database_config = [{'name': 'foo', 'restore_options': '--harder'}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('make_dump_path') flexmock(module.dump).should_receive('make_database_dump_filename') flexmock(module).should_receive('execute_command_with_processes').with_args( ['mongorestore', '--archive', '--drop', '--db', 'foo', '--harder'], processes=[extract_process], output_log_level=logging.DEBUG, input_file=extract_process.stdout, ).once() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process ) def test_restore_database_dump_runs_psql_for_all_database_dump(): database_config = [{'name': 'all'}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('make_dump_path') flexmock(module.dump).should_receive('make_database_dump_filename') flexmock(module).should_receive('execute_command_with_processes').with_args( ['mongorestore', '--archive'], processes=[extract_process], output_log_level=logging.DEBUG, input_file=extract_process.stdout, ).once() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process ) def test_restore_database_dump_with_dry_run_skips_restore(): database_config = [{'name': 'foo'}] flexmock(module).should_receive('make_dump_path') flexmock(module.dump).should_receive('make_database_dump_filename') flexmock(module).should_receive('execute_command_with_processes').never() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=True, extract_process=flexmock() ) def test_restore_database_dump_without_extract_process_restores_from_disk(): database_config = [{'name': 'foo', 'format': 'directory'}] flexmock(module).should_receive('make_dump_path') flexmock(module.dump).should_receive('make_database_dump_filename').and_return('/dump/path') flexmock(module).should_receive('execute_command_with_processes').with_args( ['mongorestore', '--dir', '/dump/path', '--drop', '--db', 'foo'], processes=[], output_log_level=logging.DEBUG, input_file=None, ).once() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=None ) borgmatic-1.7.9/tests/unit/hooks/test_mysql.py000066400000000000000000000364001440467744700215450ustar00rootroot00000000000000import logging import pytest from flexmock import flexmock from borgmatic.hooks import mysql as module def test_database_names_to_dump_passes_through_name(): extra_environment = flexmock() log_prefix = '' names = module.database_names_to_dump( {'name': 'foo'}, extra_environment, log_prefix, dry_run=False ) assert names == ('foo',) def test_database_names_to_dump_bails_for_dry_run(): extra_environment = flexmock() log_prefix = '' flexmock(module).should_receive('execute_command_and_capture_output').never() names = module.database_names_to_dump( {'name': 'all'}, extra_environment, log_prefix, dry_run=True ) assert names == () def test_database_names_to_dump_queries_mysql_for_database_names(): extra_environment = flexmock() log_prefix = '' flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('mysql', '--skip-column-names', '--batch', '--execute', 'show schemas'), extra_environment=extra_environment, ).and_return('foo\nbar\nmysql\n').once() names = module.database_names_to_dump( {'name': 'all'}, extra_environment, log_prefix, dry_run=False ) assert names == ('foo', 'bar') def test_dump_databases_dumps_each_database(): databases = [{'name': 'foo'}, {'name': 'bar'}] processes = [flexmock(), flexmock()] flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('database_names_to_dump').and_return(('foo',)).and_return( ('bar',) ) for name, process in zip(('foo', 'bar'), processes): flexmock(module).should_receive('execute_dump_command').with_args( database={'name': name}, log_prefix=object, dump_path=object, database_names=(name,), extra_environment=object, dry_run=object, dry_run_label=object, ).and_return(process).once() assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == processes def test_dump_databases_dumps_with_password(): database = {'name': 'foo', 'username': 'root', 'password': 'trustsome1'} process = flexmock() flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('database_names_to_dump').and_return(('foo',)).and_return( ('bar',) ) flexmock(module).should_receive('execute_dump_command').with_args( database=database, log_prefix=object, dump_path=object, database_names=('foo',), extra_environment={'MYSQL_PWD': 'trustsome1'}, dry_run=object, dry_run_label=object, ).and_return(process).once() assert module.dump_databases([database], 'test.yaml', {}, dry_run=False) == [process] def test_dump_databases_dumps_all_databases_at_once(): databases = [{'name': 'all'}] process = flexmock() flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('database_names_to_dump').and_return(('foo', 'bar')) flexmock(module).should_receive('execute_dump_command').with_args( database={'name': 'all'}, log_prefix=object, dump_path=object, database_names=('foo', 'bar'), extra_environment=object, dry_run=object, dry_run_label=object, ).and_return(process).once() assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] def test_dump_databases_dumps_all_databases_separately_when_format_configured(): databases = [{'name': 'all', 'format': 'sql'}] processes = [flexmock(), flexmock()] flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('database_names_to_dump').and_return(('foo', 'bar')) for name, process in zip(('foo', 'bar'), processes): flexmock(module).should_receive('execute_dump_command').with_args( database={'name': name, 'format': 'sql'}, log_prefix=object, dump_path=object, database_names=(name,), extra_environment=object, dry_run=object, dry_run_label=object, ).and_return(process).once() assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == processes def test_database_names_to_dump_runs_mysql_with_list_options(): database = {'name': 'all', 'list_options': '--defaults-extra-file=my.cnf'} flexmock(module).should_receive('execute_command_and_capture_output').with_args( ( 'mysql', '--defaults-extra-file=my.cnf', '--skip-column-names', '--batch', '--execute', 'show schemas', ), extra_environment=None, ).and_return(('foo\nbar')).once() assert module.database_names_to_dump(database, None, 'test.yaml', '') == ('foo', 'bar') def test_execute_dump_command_runs_mysqldump(): process = flexmock() flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump') flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.dump).should_receive('create_named_pipe_for_dump') flexmock(module).should_receive('execute_command').with_args( ('mysqldump', '--add-drop-database', '--databases', 'foo', '>', 'dump',), shell=True, extra_environment=None, run_to_completion=False, ).and_return(process).once() assert ( module.execute_dump_command( database={'name': 'foo'}, log_prefix='log', dump_path=flexmock(), database_names=('foo',), extra_environment=None, dry_run=False, dry_run_label='', ) == process ) def test_execute_dump_command_runs_mysqldump_without_add_drop_database(): process = flexmock() flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump') flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.dump).should_receive('create_named_pipe_for_dump') flexmock(module).should_receive('execute_command').with_args( ('mysqldump', '--databases', 'foo', '>', 'dump',), shell=True, extra_environment=None, run_to_completion=False, ).and_return(process).once() assert ( module.execute_dump_command( database={'name': 'foo', 'add_drop_database': False}, log_prefix='log', dump_path=flexmock(), database_names=('foo',), extra_environment=None, dry_run=False, dry_run_label='', ) == process ) def test_execute_dump_command_runs_mysqldump_with_hostname_and_port(): process = flexmock() flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump') flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.dump).should_receive('create_named_pipe_for_dump') flexmock(module).should_receive('execute_command').with_args( ( 'mysqldump', '--add-drop-database', '--host', 'database.example.org', '--port', '5433', '--protocol', 'tcp', '--databases', 'foo', '>', 'dump', ), shell=True, extra_environment=None, run_to_completion=False, ).and_return(process).once() assert ( module.execute_dump_command( database={'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}, log_prefix='log', dump_path=flexmock(), database_names=('foo',), extra_environment=None, dry_run=False, dry_run_label='', ) == process ) def test_execute_dump_command_runs_mysqldump_with_username_and_password(): process = flexmock() flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump') flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.dump).should_receive('create_named_pipe_for_dump') flexmock(module).should_receive('execute_command').with_args( ('mysqldump', '--add-drop-database', '--user', 'root', '--databases', 'foo', '>', 'dump',), shell=True, extra_environment={'MYSQL_PWD': 'trustsome1'}, run_to_completion=False, ).and_return(process).once() assert ( module.execute_dump_command( database={'name': 'foo', 'username': 'root', 'password': 'trustsome1'}, log_prefix='log', dump_path=flexmock(), database_names=('foo',), extra_environment={'MYSQL_PWD': 'trustsome1'}, dry_run=False, dry_run_label='', ) == process ) def test_execute_dump_command_runs_mysqldump_with_options(): process = flexmock() flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump') flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.dump).should_receive('create_named_pipe_for_dump') flexmock(module).should_receive('execute_command').with_args( ('mysqldump', '--stuff=such', '--add-drop-database', '--databases', 'foo', '>', 'dump',), shell=True, extra_environment=None, run_to_completion=False, ).and_return(process).once() assert ( module.execute_dump_command( database={'name': 'foo', 'options': '--stuff=such'}, log_prefix='log', dump_path=flexmock(), database_names=('foo',), extra_environment=None, dry_run=False, dry_run_label='', ) == process ) def test_execute_dump_command_with_duplicate_dump_skips_mysqldump(): flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump') flexmock(module.os.path).should_receive('exists').and_return(True) flexmock(module.dump).should_receive('create_named_pipe_for_dump').never() flexmock(module).should_receive('execute_command').never() assert ( module.execute_dump_command( database={'name': 'foo'}, log_prefix='log', dump_path=flexmock(), database_names=('foo',), extra_environment=None, dry_run=True, dry_run_label='SO DRY', ) is None ) def test_execute_dump_command_with_dry_run_skips_mysqldump(): flexmock(module.dump).should_receive('make_database_dump_filename').and_return('dump') flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.dump).should_receive('create_named_pipe_for_dump') flexmock(module).should_receive('execute_command').never() assert ( module.execute_dump_command( database={'name': 'foo'}, log_prefix='log', dump_path=flexmock(), database_names=('foo',), extra_environment=None, dry_run=True, dry_run_label='SO DRY', ) is None ) def test_dump_databases_errors_for_missing_all_databases(): databases = [{'name': 'all'}] flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module.dump).should_receive('make_database_dump_filename').and_return( 'databases/localhost/all' ) flexmock(module).should_receive('database_names_to_dump').and_return(()) with pytest.raises(ValueError): assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) def test_dump_databases_does_not_error_for_missing_all_databases_with_dry_run(): databases = [{'name': 'all'}] flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module.dump).should_receive('make_database_dump_filename').and_return( 'databases/localhost/all' ) flexmock(module).should_receive('database_names_to_dump').and_return(()) assert module.dump_databases(databases, 'test.yaml', {}, dry_run=True) == [] def test_restore_database_dump_runs_mysql_to_restore(): database_config = [{'name': 'foo'}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('execute_command_with_processes').with_args( ('mysql', '--batch'), processes=[extract_process], output_log_level=logging.DEBUG, input_file=extract_process.stdout, extra_environment=None, ).once() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process ) def test_restore_database_dump_errors_on_multiple_database_config(): database_config = [{'name': 'foo'}, {'name': 'bar'}] flexmock(module).should_receive('execute_command_with_processes').never() flexmock(module).should_receive('execute_command').never() with pytest.raises(ValueError): module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=flexmock() ) def test_restore_database_dump_runs_mysql_with_options(): database_config = [{'name': 'foo', 'restore_options': '--harder'}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('execute_command_with_processes').with_args( ('mysql', '--batch', '--harder'), processes=[extract_process], output_log_level=logging.DEBUG, input_file=extract_process.stdout, extra_environment=None, ).once() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process ) def test_restore_database_dump_runs_mysql_with_hostname_and_port(): database_config = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('execute_command_with_processes').with_args( ( 'mysql', '--batch', '--host', 'database.example.org', '--port', '5433', '--protocol', 'tcp', ), processes=[extract_process], output_log_level=logging.DEBUG, input_file=extract_process.stdout, extra_environment=None, ).once() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process ) def test_restore_database_dump_runs_mysql_with_username_and_password(): database_config = [{'name': 'foo', 'username': 'root', 'password': 'trustsome1'}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('execute_command_with_processes').with_args( ('mysql', '--batch', '--user', 'root'), processes=[extract_process], output_log_level=logging.DEBUG, input_file=extract_process.stdout, extra_environment={'MYSQL_PWD': 'trustsome1'}, ).once() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process ) def test_restore_database_dump_with_dry_run_skips_restore(): database_config = [{'name': 'foo'}] flexmock(module).should_receive('execute_command_with_processes').never() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=True, extract_process=flexmock() ) borgmatic-1.7.9/tests/unit/hooks/test_ntfy.py000066400000000000000000000161431440467744700213620ustar00rootroot00000000000000from enum import Enum from flexmock import flexmock import borgmatic.hooks.monitor from borgmatic.hooks import ntfy as module default_base_url = 'https://ntfy.sh' custom_base_url = 'https://ntfy.example.com' topic = 'borgmatic-unit-testing' custom_message_config = { 'title': 'Borgmatic unit testing', 'message': 'Borgmatic unit testing', 'priority': 'min', 'tags': '+1', } custom_message_headers = { 'X-Title': custom_message_config['title'], 'X-Message': custom_message_config['message'], 'X-Priority': custom_message_config['priority'], 'X-Tags': custom_message_config['tags'], } def return_default_message_headers(state=Enum): headers = { 'X-Title': f'A Borgmatic {state.name} event happened', 'X-Message': f'A Borgmatic {state.name} event happened', 'X-Priority': 'default', 'X-Tags': 'borgmatic', } return headers def test_ping_monitor_minimal_config_hits_hosted_ntfy_on_fail(): hook_config = {'topic': topic} flexmock(module.requests).should_receive('post').with_args( f'{default_base_url}/{topic}', headers=return_default_message_headers(borgmatic.hooks.monitor.State.FAIL), auth=None, ).and_return(flexmock(ok=True)).once() module.ping_monitor( hook_config, 'config.yaml', borgmatic.hooks.monitor.State.FAIL, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_with_auth_hits_hosted_ntfy_on_fail(): hook_config = { 'topic': topic, 'username': 'testuser', 'password': 'fakepassword', } flexmock(module.requests).should_receive('post').with_args( f'{default_base_url}/{topic}', headers=return_default_message_headers(borgmatic.hooks.monitor.State.FAIL), auth=module.requests.auth.HTTPBasicAuth('testuser', 'fakepassword'), ).and_return(flexmock(ok=True)).once() module.ping_monitor( hook_config, 'config.yaml', borgmatic.hooks.monitor.State.FAIL, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_auth_with_no_username_warning(): hook_config = {'topic': topic, 'password': 'fakepassword'} flexmock(module.requests).should_receive('post').with_args( f'{default_base_url}/{topic}', headers=return_default_message_headers(borgmatic.hooks.monitor.State.FAIL), auth=None, ).and_return(flexmock(ok=True)).once() flexmock(module.logger).should_receive('warning').once() module.ping_monitor( hook_config, 'config.yaml', borgmatic.hooks.monitor.State.FAIL, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_auth_with_no_password_warning(): hook_config = {'topic': topic, 'username': 'testuser'} flexmock(module.requests).should_receive('post').with_args( f'{default_base_url}/{topic}', headers=return_default_message_headers(borgmatic.hooks.monitor.State.FAIL), auth=None, ).and_return(flexmock(ok=True)).once() flexmock(module.logger).should_receive('warning').once() module.ping_monitor( hook_config, 'config.yaml', borgmatic.hooks.monitor.State.FAIL, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_minimal_config_does_not_hit_hosted_ntfy_on_start(): hook_config = {'topic': topic} flexmock(module.requests).should_receive('post').never() module.ping_monitor( hook_config, 'config.yaml', borgmatic.hooks.monitor.State.START, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_minimal_config_does_not_hit_hosted_ntfy_on_finish(): hook_config = {'topic': topic} flexmock(module.requests).should_receive('post').never() module.ping_monitor( hook_config, 'config.yaml', borgmatic.hooks.monitor.State.FINISH, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_minimal_config_hits_selfhosted_ntfy_on_fail(): hook_config = {'topic': topic, 'server': custom_base_url} flexmock(module.requests).should_receive('post').with_args( f'{custom_base_url}/{topic}', headers=return_default_message_headers(borgmatic.hooks.monitor.State.FAIL), auth=None, ).and_return(flexmock(ok=True)).once() module.ping_monitor( hook_config, 'config.yaml', borgmatic.hooks.monitor.State.FAIL, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_minimal_config_does_not_hit_hosted_ntfy_on_fail_dry_run(): hook_config = {'topic': topic} flexmock(module.requests).should_receive('post').never() module.ping_monitor( hook_config, 'config.yaml', borgmatic.hooks.monitor.State.FAIL, monitoring_log_level=1, dry_run=True, ) def test_ping_monitor_custom_message_hits_hosted_ntfy_on_fail(): hook_config = {'topic': topic, 'fail': custom_message_config} flexmock(module.requests).should_receive('post').with_args( f'{default_base_url}/{topic}', headers=custom_message_headers, auth=None ).and_return(flexmock(ok=True)).once() module.ping_monitor( hook_config, 'config.yaml', borgmatic.hooks.monitor.State.FAIL, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_custom_state_hits_hosted_ntfy_on_start(): hook_config = {'topic': topic, 'states': ['start', 'fail']} flexmock(module.requests).should_receive('post').with_args( f'{default_base_url}/{topic}', headers=return_default_message_headers(borgmatic.hooks.monitor.State.START), auth=None, ).and_return(flexmock(ok=True)).once() module.ping_monitor( hook_config, 'config.yaml', borgmatic.hooks.monitor.State.START, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_with_connection_error_logs_warning(): hook_config = {'topic': topic} flexmock(module.requests).should_receive('post').with_args( f'{default_base_url}/{topic}', headers=return_default_message_headers(borgmatic.hooks.monitor.State.FAIL), auth=None, ).and_raise(module.requests.exceptions.ConnectionError) flexmock(module.logger).should_receive('warning').once() module.ping_monitor( hook_config, 'config.yaml', borgmatic.hooks.monitor.State.FAIL, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_with_other_error_logs_warning(): hook_config = {'topic': topic} response = flexmock(ok=False) response.should_receive('raise_for_status').and_raise( module.requests.exceptions.RequestException ) flexmock(module.requests).should_receive('post').with_args( f'{default_base_url}/{topic}', headers=return_default_message_headers(borgmatic.hooks.monitor.State.FAIL), auth=None, ).and_return(response) flexmock(module.logger).should_receive('warning').once() module.ping_monitor( hook_config, 'config.yaml', borgmatic.hooks.monitor.State.FAIL, monitoring_log_level=1, dry_run=False, ) borgmatic-1.7.9/tests/unit/hooks/test_pagerduty.py000066400000000000000000000042671440467744700224120ustar00rootroot00000000000000from flexmock import flexmock from borgmatic.hooks import pagerduty as module def test_ping_monitor_ignores_start_state(): flexmock(module.requests).should_receive('post').never() module.ping_monitor( {'integration_key': 'abc123'}, 'config.yaml', module.monitor.State.START, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_ignores_finish_state(): flexmock(module.requests).should_receive('post').never() module.ping_monitor( {'integration_key': 'abc123'}, 'config.yaml', module.monitor.State.FINISH, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_calls_api_for_fail_state(): flexmock(module.requests).should_receive('post').and_return(flexmock(ok=True)) module.ping_monitor( {'integration_key': 'abc123'}, 'config.yaml', module.monitor.State.FAIL, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_dry_run_does_not_call_api(): flexmock(module.requests).should_receive('post').never() module.ping_monitor( {'integration_key': 'abc123'}, 'config.yaml', module.monitor.State.FAIL, monitoring_log_level=1, dry_run=True, ) def test_ping_monitor_with_connection_error_logs_warning(): flexmock(module.requests).should_receive('post').and_raise( module.requests.exceptions.ConnectionError ) flexmock(module.logger).should_receive('warning').once() module.ping_monitor( {'integration_key': 'abc123'}, 'config.yaml', module.monitor.State.FAIL, monitoring_log_level=1, dry_run=False, ) def test_ping_monitor_with_other_error_logs_warning(): response = flexmock(ok=False) response.should_receive('raise_for_status').and_raise( module.requests.exceptions.RequestException ) flexmock(module.requests).should_receive('post').and_return(response) flexmock(module.logger).should_receive('warning') module.ping_monitor( {'integration_key': 'abc123'}, 'config.yaml', module.monitor.State.FAIL, monitoring_log_level=1, dry_run=False, ) borgmatic-1.7.9/tests/unit/hooks/test_postgresql.py000066400000000000000000000631461440467744700226120ustar00rootroot00000000000000import logging import pytest from flexmock import flexmock from borgmatic.hooks import postgresql as module def test_database_names_to_dump_passes_through_individual_database_name(): database = {'name': 'foo'} assert module.database_names_to_dump(database, flexmock(), flexmock(), dry_run=False) == ( 'foo', ) def test_database_names_to_dump_passes_through_individual_database_name_with_format(): database = {'name': 'foo', 'format': 'custom'} assert module.database_names_to_dump(database, flexmock(), flexmock(), dry_run=False) == ( 'foo', ) def test_database_names_to_dump_passes_through_all_without_format(): database = {'name': 'all'} assert module.database_names_to_dump(database, flexmock(), flexmock(), dry_run=False) == ( 'all', ) def test_database_names_to_dump_with_all_and_format_and_dry_run_bails(): database = {'name': 'all', 'format': 'custom'} flexmock(module).should_receive('execute_command_and_capture_output').never() assert module.database_names_to_dump(database, flexmock(), flexmock(), dry_run=True) == () def test_database_names_to_dump_with_all_and_format_lists_databases(): database = {'name': 'all', 'format': 'custom'} flexmock(module).should_receive('execute_command_and_capture_output').and_return( 'foo,test,\nbar,test,"stuff and such"' ) assert module.database_names_to_dump(database, flexmock(), flexmock(), dry_run=False) == ( 'foo', 'bar', ) def test_database_names_to_dump_with_all_and_format_lists_databases_with_hostname_and_port(): database = {'name': 'all', 'format': 'custom', 'hostname': 'localhost', 'port': 1234} flexmock(module).should_receive('execute_command_and_capture_output').with_args( ( 'psql', '--list', '--no-password', '--csv', '--tuples-only', '--host', 'localhost', '--port', '1234', ), extra_environment=object, ).and_return('foo,test,\nbar,test,"stuff and such"') assert module.database_names_to_dump(database, flexmock(), flexmock(), dry_run=False) == ( 'foo', 'bar', ) def test_database_names_to_dump_with_all_and_format_lists_databases_with_username(): database = {'name': 'all', 'format': 'custom', 'username': 'postgres'} flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('psql', '--list', '--no-password', '--csv', '--tuples-only', '--username', 'postgres'), extra_environment=object, ).and_return('foo,test,\nbar,test,"stuff and such"') assert module.database_names_to_dump(database, flexmock(), flexmock(), dry_run=False) == ( 'foo', 'bar', ) def test_database_names_to_dump_with_all_and_format_lists_databases_with_options(): database = {'name': 'all', 'format': 'custom', 'list_options': '--harder'} flexmock(module).should_receive('execute_command_and_capture_output').with_args( ('psql', '--list', '--no-password', '--csv', '--tuples-only', '--harder'), extra_environment=object, ).and_return('foo,test,\nbar,test,"stuff and such"') assert module.database_names_to_dump(database, flexmock(), flexmock(), dry_run=False) == ( 'foo', 'bar', ) def test_database_names_to_dump_with_all_and_format_excludes_particular_databases(): database = {'name': 'all', 'format': 'custom'} flexmock(module).should_receive('execute_command_and_capture_output').and_return( 'foo,test,\ntemplate0,test,blah' ) assert module.database_names_to_dump(database, flexmock(), flexmock(), dry_run=False) == ( 'foo', ) def test_dump_databases_runs_pg_dump_for_each_database(): databases = [{'name': 'foo'}, {'name': 'bar'}] processes = [flexmock(), flexmock()] flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('database_names_to_dump').and_return(('foo',)).and_return( ('bar',) ) flexmock(module.dump).should_receive('make_database_dump_filename').and_return( 'databases/localhost/foo' ).and_return('databases/localhost/bar') flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.dump).should_receive('create_named_pipe_for_dump') for name, process in zip(('foo', 'bar'), processes): flexmock(module).should_receive('execute_command').with_args( ( 'pg_dump', '--no-password', '--clean', '--if-exists', '--format', 'custom', name, '>', 'databases/localhost/{}'.format(name), ), shell=True, extra_environment={'PGSSLMODE': 'disable'}, run_to_completion=False, ).and_return(process).once() assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == processes def test_dump_databases_raises_when_no_database_names_to_dump(): databases = [{'name': 'foo'}, {'name': 'bar'}] flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('database_names_to_dump').and_return(()) with pytest.raises(ValueError): module.dump_databases(databases, 'test.yaml', {}, dry_run=False) def test_dump_databases_does_not_raise_when_no_database_names_to_dump(): databases = [{'name': 'foo'}, {'name': 'bar'}] flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('database_names_to_dump').and_return(()) module.dump_databases(databases, 'test.yaml', {}, dry_run=True) == [] def test_dump_databases_with_duplicate_dump_skips_pg_dump(): databases = [{'name': 'foo'}, {'name': 'bar'}] flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('database_names_to_dump').and_return(('foo',)).and_return( ('bar',) ) flexmock(module.dump).should_receive('make_database_dump_filename').and_return( 'databases/localhost/foo' ).and_return('databases/localhost/bar') flexmock(module.os.path).should_receive('exists').and_return(True) flexmock(module.dump).should_receive('create_named_pipe_for_dump').never() flexmock(module).should_receive('execute_command').never() assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [] def test_dump_databases_with_dry_run_skips_pg_dump(): databases = [{'name': 'foo'}, {'name': 'bar'}] flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('database_names_to_dump').and_return(('foo',)).and_return( ('bar',) ) flexmock(module.dump).should_receive('make_database_dump_filename').and_return( 'databases/localhost/foo' ).and_return('databases/localhost/bar') flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.dump).should_receive('create_named_pipe_for_dump').never() flexmock(module).should_receive('execute_command').never() assert module.dump_databases(databases, 'test.yaml', {}, dry_run=True) == [] def test_dump_databases_runs_pg_dump_with_hostname_and_port(): databases = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}] process = flexmock() flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('database_names_to_dump').and_return(('foo',)) flexmock(module.dump).should_receive('make_database_dump_filename').and_return( 'databases/database.example.org/foo' ) flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.dump).should_receive('create_named_pipe_for_dump') flexmock(module).should_receive('execute_command').with_args( ( 'pg_dump', '--no-password', '--clean', '--if-exists', '--host', 'database.example.org', '--port', '5433', '--format', 'custom', 'foo', '>', 'databases/database.example.org/foo', ), shell=True, extra_environment={'PGSSLMODE': 'disable'}, run_to_completion=False, ).and_return(process).once() assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] def test_dump_databases_runs_pg_dump_with_username_and_password(): databases = [{'name': 'foo', 'username': 'postgres', 'password': 'trustsome1'}] process = flexmock() flexmock(module).should_receive('make_extra_environment').and_return( {'PGPASSWORD': 'trustsome1', 'PGSSLMODE': 'disable'} ) flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('database_names_to_dump').and_return(('foo',)) flexmock(module.dump).should_receive('make_database_dump_filename').and_return( 'databases/localhost/foo' ) flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.dump).should_receive('create_named_pipe_for_dump') flexmock(module).should_receive('execute_command').with_args( ( 'pg_dump', '--no-password', '--clean', '--if-exists', '--username', 'postgres', '--format', 'custom', 'foo', '>', 'databases/localhost/foo', ), shell=True, extra_environment={'PGPASSWORD': 'trustsome1', 'PGSSLMODE': 'disable'}, run_to_completion=False, ).and_return(process).once() assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] def test_make_extra_environment_maps_options_to_environment(): database = { 'name': 'foo', 'password': 'pass', 'ssl_mode': 'require', 'ssl_cert': 'cert.crt', 'ssl_key': 'key.key', 'ssl_root_cert': 'root.crt', 'ssl_crl': 'crl.crl', } expected = { 'PGPASSWORD': 'pass', 'PGSSLMODE': 'require', 'PGSSLCERT': 'cert.crt', 'PGSSLKEY': 'key.key', 'PGSSLROOTCERT': 'root.crt', 'PGSSLCRL': 'crl.crl', } extra_env = module.make_extra_environment(database) assert extra_env == expected def test_dump_databases_runs_pg_dump_with_directory_format(): databases = [{'name': 'foo', 'format': 'directory'}] flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('database_names_to_dump').and_return(('foo',)) flexmock(module.dump).should_receive('make_database_dump_filename').and_return( 'databases/localhost/foo' ) flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.dump).should_receive('create_parent_directory_for_dump') flexmock(module.dump).should_receive('create_named_pipe_for_dump').never() flexmock(module).should_receive('execute_command').with_args( ( 'pg_dump', '--no-password', '--clean', '--if-exists', '--format', 'directory', '--file', 'databases/localhost/foo', 'foo', ), shell=True, extra_environment={'PGSSLMODE': 'disable'}, ).and_return(flexmock()).once() assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [] def test_dump_databases_runs_pg_dump_with_options(): databases = [{'name': 'foo', 'options': '--stuff=such'}] process = flexmock() flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('database_names_to_dump').and_return(('foo',)) flexmock(module.dump).should_receive('make_database_dump_filename').and_return( 'databases/localhost/foo' ) flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.dump).should_receive('create_named_pipe_for_dump') flexmock(module).should_receive('execute_command').with_args( ( 'pg_dump', '--no-password', '--clean', '--if-exists', '--format', 'custom', '--stuff=such', 'foo', '>', 'databases/localhost/foo', ), shell=True, extra_environment={'PGSSLMODE': 'disable'}, run_to_completion=False, ).and_return(process).once() assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] def test_dump_databases_runs_pg_dumpall_for_all_databases(): databases = [{'name': 'all'}] process = flexmock() flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('database_names_to_dump').and_return(('all',)) flexmock(module.dump).should_receive('make_database_dump_filename').and_return( 'databases/localhost/all' ) flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.dump).should_receive('create_named_pipe_for_dump') flexmock(module).should_receive('execute_command').with_args( ('pg_dumpall', '--no-password', '--clean', '--if-exists', '>', 'databases/localhost/all'), shell=True, extra_environment={'PGSSLMODE': 'disable'}, run_to_completion=False, ).and_return(process).once() assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] def test_dump_databases_runs_non_default_pg_dump(): databases = [{'name': 'foo', 'pg_dump_command': 'special_pg_dump'}] process = flexmock() flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_dump_path').and_return('') flexmock(module).should_receive('database_names_to_dump').and_return(('foo',)) flexmock(module.dump).should_receive('make_database_dump_filename').and_return( 'databases/localhost/foo' ) flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.dump).should_receive('create_named_pipe_for_dump') flexmock(module).should_receive('execute_command').with_args( ( 'special_pg_dump', '--no-password', '--clean', '--if-exists', '--format', 'custom', 'foo', '>', 'databases/localhost/foo', ), shell=True, extra_environment={'PGSSLMODE': 'disable'}, run_to_completion=False, ).and_return(process).once() assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [process] def test_restore_database_dump_runs_pg_restore(): database_config = [{'name': 'foo'}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_dump_path') flexmock(module.dump).should_receive('make_database_dump_filename') flexmock(module).should_receive('execute_command_with_processes').with_args( ( 'pg_restore', '--no-password', '--if-exists', '--exit-on-error', '--clean', '--dbname', 'foo', ), processes=[extract_process], output_log_level=logging.DEBUG, input_file=extract_process.stdout, extra_environment={'PGSSLMODE': 'disable'}, ).once() flexmock(module).should_receive('execute_command').with_args( ('psql', '--no-password', '--quiet', '--dbname', 'foo', '--command', 'ANALYZE'), extra_environment={'PGSSLMODE': 'disable'}, ).once() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process ) def test_restore_database_dump_errors_on_multiple_database_config(): database_config = [{'name': 'foo'}, {'name': 'bar'}] flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_dump_path') flexmock(module.dump).should_receive('make_database_dump_filename') flexmock(module).should_receive('execute_command_with_processes').never() flexmock(module).should_receive('execute_command').never() with pytest.raises(ValueError): module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=flexmock() ) def test_restore_database_dump_runs_pg_restore_with_hostname_and_port(): database_config = [{'name': 'foo', 'hostname': 'database.example.org', 'port': 5433}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_dump_path') flexmock(module.dump).should_receive('make_database_dump_filename') flexmock(module).should_receive('execute_command_with_processes').with_args( ( 'pg_restore', '--no-password', '--if-exists', '--exit-on-error', '--clean', '--dbname', 'foo', '--host', 'database.example.org', '--port', '5433', ), processes=[extract_process], output_log_level=logging.DEBUG, input_file=extract_process.stdout, extra_environment={'PGSSLMODE': 'disable'}, ).once() flexmock(module).should_receive('execute_command').with_args( ( 'psql', '--no-password', '--quiet', '--host', 'database.example.org', '--port', '5433', '--dbname', 'foo', '--command', 'ANALYZE', ), extra_environment={'PGSSLMODE': 'disable'}, ).once() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process ) def test_restore_database_dump_runs_pg_restore_with_username_and_password(): database_config = [{'name': 'foo', 'username': 'postgres', 'password': 'trustsome1'}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('make_extra_environment').and_return( {'PGPASSWORD': 'trustsome1', 'PGSSLMODE': 'disable'} ) flexmock(module).should_receive('make_dump_path') flexmock(module.dump).should_receive('make_database_dump_filename') flexmock(module).should_receive('execute_command_with_processes').with_args( ( 'pg_restore', '--no-password', '--if-exists', '--exit-on-error', '--clean', '--dbname', 'foo', '--username', 'postgres', ), processes=[extract_process], output_log_level=logging.DEBUG, input_file=extract_process.stdout, extra_environment={'PGPASSWORD': 'trustsome1', 'PGSSLMODE': 'disable'}, ).once() flexmock(module).should_receive('execute_command').with_args( ( 'psql', '--no-password', '--quiet', '--username', 'postgres', '--dbname', 'foo', '--command', 'ANALYZE', ), extra_environment={'PGPASSWORD': 'trustsome1', 'PGSSLMODE': 'disable'}, ).once() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process ) def test_restore_database_dump_runs_pg_restore_with_options(): database_config = [ {'name': 'foo', 'restore_options': '--harder', 'analyze_options': '--smarter'} ] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_dump_path') flexmock(module.dump).should_receive('make_database_dump_filename') flexmock(module).should_receive('execute_command_with_processes').with_args( ( 'pg_restore', '--no-password', '--if-exists', '--exit-on-error', '--clean', '--dbname', 'foo', '--harder', ), processes=[extract_process], output_log_level=logging.DEBUG, input_file=extract_process.stdout, extra_environment={'PGSSLMODE': 'disable'}, ).once() flexmock(module).should_receive('execute_command').with_args( ( 'psql', '--no-password', '--quiet', '--dbname', 'foo', '--smarter', '--command', 'ANALYZE', ), extra_environment={'PGSSLMODE': 'disable'}, ).once() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process ) def test_restore_database_dump_runs_psql_for_all_database_dump(): database_config = [{'name': 'all'}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_dump_path') flexmock(module.dump).should_receive('make_database_dump_filename') flexmock(module).should_receive('execute_command_with_processes').with_args( ('psql', '--no-password'), processes=[extract_process], output_log_level=logging.DEBUG, input_file=extract_process.stdout, extra_environment={'PGSSLMODE': 'disable'}, ).once() flexmock(module).should_receive('execute_command').with_args( ('psql', '--no-password', '--quiet', '--command', 'ANALYZE'), extra_environment={'PGSSLMODE': 'disable'}, ).once() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process ) def test_restore_database_dump_runs_non_default_pg_restore_and_psql(): database_config = [ {'name': 'foo', 'pg_restore_command': 'special_pg_restore', 'psql_command': 'special_psql'} ] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_dump_path') flexmock(module.dump).should_receive('make_database_dump_filename') flexmock(module).should_receive('execute_command_with_processes').with_args( ( 'special_pg_restore', '--no-password', '--if-exists', '--exit-on-error', '--clean', '--dbname', 'foo', ), processes=[extract_process], output_log_level=logging.DEBUG, input_file=extract_process.stdout, extra_environment={'PGSSLMODE': 'disable'}, ).once() flexmock(module).should_receive('execute_command').with_args( ('special_psql', '--no-password', '--quiet', '--dbname', 'foo', '--command', 'ANALYZE'), extra_environment={'PGSSLMODE': 'disable'}, ).once() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process ) def test_restore_database_dump_with_dry_run_skips_restore(): database_config = [{'name': 'foo'}] flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_dump_path') flexmock(module.dump).should_receive('make_database_dump_filename') flexmock(module).should_receive('execute_command_with_processes').never() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=True, extract_process=flexmock() ) def test_restore_database_dump_without_extract_process_restores_from_disk(): database_config = [{'name': 'foo'}] flexmock(module).should_receive('make_extra_environment').and_return({'PGSSLMODE': 'disable'}) flexmock(module).should_receive('make_dump_path') flexmock(module.dump).should_receive('make_database_dump_filename').and_return('/dump/path') flexmock(module).should_receive('execute_command_with_processes').with_args( ( 'pg_restore', '--no-password', '--if-exists', '--exit-on-error', '--clean', '--dbname', 'foo', '/dump/path', ), processes=[], output_log_level=logging.DEBUG, input_file=None, extra_environment={'PGSSLMODE': 'disable'}, ).once() flexmock(module).should_receive('execute_command').with_args( ('psql', '--no-password', '--quiet', '--dbname', 'foo', '--command', 'ANALYZE'), extra_environment={'PGSSLMODE': 'disable'}, ).once() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=None ) borgmatic-1.7.9/tests/unit/hooks/test_sqlite.py000066400000000000000000000116341440467744700217030ustar00rootroot00000000000000import pytest from flexmock import flexmock from borgmatic.hooks import sqlite as module def test_dump_databases_logs_and_skips_if_dump_already_exists(): databases = [{'path': '/path/to/database', 'name': 'database'}] flexmock(module).should_receive('make_dump_path').and_return('/path/to/dump') flexmock(module.dump).should_receive('make_database_dump_filename').and_return( '/path/to/dump/database' ) flexmock(module.os.path).should_receive('exists').and_return(True) flexmock(module.dump).should_receive('create_parent_directory_for_dump').never() flexmock(module).should_receive('execute_command').never() assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == [] def test_dump_databases_dumps_each_database(): databases = [ {'path': '/path/to/database1', 'name': 'database1'}, {'path': '/path/to/database2', 'name': 'database2'}, ] processes = [flexmock(), flexmock()] flexmock(module).should_receive('make_dump_path').and_return('/path/to/dump') flexmock(module.dump).should_receive('make_database_dump_filename').and_return( '/path/to/dump/database' ) flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.dump).should_receive('create_parent_directory_for_dump') flexmock(module).should_receive('execute_command').and_return(processes[0]).and_return( processes[1] ) assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == processes def test_dumping_database_with_non_existent_path_warns_and_dumps_database(): databases = [ {'path': '/path/to/database1', 'name': 'database1'}, ] processes = [flexmock()] flexmock(module).should_receive('make_dump_path').and_return('/path/to/dump') flexmock(module.logger).should_receive('warning').once() flexmock(module.dump).should_receive('make_database_dump_filename').and_return( '/path/to/dump/database' ) flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.dump).should_receive('create_parent_directory_for_dump') flexmock(module).should_receive('execute_command').and_return(processes[0]) assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == processes def test_dumping_database_with_name_all_warns_and_dumps_all_databases(): databases = [ {'path': '/path/to/database1', 'name': 'all'}, ] processes = [flexmock()] flexmock(module).should_receive('make_dump_path').and_return('/path/to/dump') flexmock(module.logger).should_receive( 'warning' ).twice() # once for the name=all, once for the non-existent path flexmock(module.dump).should_receive('make_database_dump_filename').and_return( '/path/to/dump/database' ) flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.dump).should_receive('create_parent_directory_for_dump') flexmock(module).should_receive('execute_command').and_return(processes[0]) assert module.dump_databases(databases, 'test.yaml', {}, dry_run=False) == processes def test_dump_databases_does_not_dump_if_dry_run(): databases = [{'path': '/path/to/database', 'name': 'database'}] flexmock(module).should_receive('make_dump_path').and_return('/path/to/dump') flexmock(module.dump).should_receive('make_database_dump_filename').and_return( '/path/to/dump/database' ) flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.dump).should_receive('create_parent_directory_for_dump').never() flexmock(module).should_receive('execute_command').never() assert module.dump_databases(databases, 'test.yaml', {}, dry_run=True) == [] def test_restore_database_dump_restores_database(): database_config = [{'path': '/path/to/database', 'name': 'database'}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('execute_command_with_processes').once() flexmock(module.os).should_receive('remove').once() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process ) def test_restore_database_dump_does_not_restore_database_if_dry_run(): database_config = [{'path': '/path/to/database', 'name': 'database'}] extract_process = flexmock(stdout=flexmock()) flexmock(module).should_receive('execute_command_with_processes').never() flexmock(module.os).should_receive('remove').never() module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=True, extract_process=extract_process ) def test_restore_database_dump_raises_error_if_database_config_is_invalid(): database_config = [] extract_process = flexmock(stdout=flexmock()) with pytest.raises(ValueError): module.restore_database_dump( database_config, 'test.yaml', {}, dry_run=False, extract_process=extract_process ) borgmatic-1.7.9/tests/unit/test_execute.py000066400000000000000000000373321440467744700207240ustar00rootroot00000000000000import subprocess import pytest from flexmock import flexmock from borgmatic import execute as module @pytest.mark.parametrize( 'process,exit_code,borg_local_path,expected_result', ( (flexmock(args=['grep']), 2, None, True), (flexmock(args=['grep']), 2, 'borg', True), (flexmock(args=['borg']), 2, 'borg', True), (flexmock(args=['borg1']), 2, 'borg1', True), (flexmock(args=['grep']), 1, None, True), (flexmock(args=['grep']), 1, 'borg', True), (flexmock(args=['borg']), 1, 'borg', False), (flexmock(args=['borg1']), 1, 'borg1', False), (flexmock(args=['grep']), 0, None, False), (flexmock(args=['grep']), 0, 'borg', False), (flexmock(args=['borg']), 0, 'borg', False), (flexmock(args=['borg1']), 0, 'borg1', False), # -9 exit code occurs when child process get SIGKILLed. (flexmock(args=['grep']), -9, None, True), (flexmock(args=['grep']), -9, 'borg', True), (flexmock(args=['borg']), -9, 'borg', True), (flexmock(args=['borg1']), -9, 'borg1', True), (flexmock(args=['borg']), None, None, False), ), ) def test_exit_code_indicates_error_respects_exit_code_and_borg_local_path( process, exit_code, borg_local_path, expected_result ): assert module.exit_code_indicates_error(process, exit_code, borg_local_path) is expected_result def test_command_for_process_converts_sequence_command_to_string(): process = flexmock(args=['foo', 'bar', 'baz']) assert module.command_for_process(process) == 'foo bar baz' def test_command_for_process_passes_through_string_command(): process = flexmock(args='foo bar baz') assert module.command_for_process(process) == 'foo bar baz' def test_output_buffer_for_process_returns_stderr_when_stdout_excluded(): stdout = flexmock() stderr = flexmock() process = flexmock(stdout=stdout, stderr=stderr) assert module.output_buffer_for_process(process, exclude_stdouts=[flexmock(), stdout]) == stderr def test_output_buffer_for_process_returns_stdout_when_not_excluded(): stdout = flexmock() process = flexmock(stdout=stdout) assert ( module.output_buffer_for_process(process, exclude_stdouts=[flexmock(), flexmock()]) == stdout ) def test_execute_command_calls_full_command(): full_command = ['foo', 'bar'] flexmock(module.os, environ={'a': 'b'}) flexmock(module.subprocess).should_receive('Popen').with_args( full_command, stdin=None, stdout=module.subprocess.PIPE, stderr=module.subprocess.STDOUT, shell=False, env=None, cwd=None, ).and_return(flexmock(stdout=None)).once() flexmock(module).should_receive('log_outputs') output = module.execute_command(full_command) assert output is None def test_execute_command_calls_full_command_with_output_file(): full_command = ['foo', 'bar'] output_file = flexmock(name='test') flexmock(module.os, environ={'a': 'b'}) flexmock(module.subprocess).should_receive('Popen').with_args( full_command, stdin=None, stdout=output_file, stderr=module.subprocess.PIPE, shell=False, env=None, cwd=None, ).and_return(flexmock(stderr=None)).once() flexmock(module).should_receive('log_outputs') output = module.execute_command(full_command, output_file=output_file) assert output is None def test_execute_command_calls_full_command_without_capturing_output(): full_command = ['foo', 'bar'] flexmock(module.os, environ={'a': 'b'}) flexmock(module.subprocess).should_receive('Popen').with_args( full_command, stdin=None, stdout=None, stderr=None, shell=False, env=None, cwd=None ).and_return(flexmock(wait=lambda: 0)).once() flexmock(module).should_receive('exit_code_indicates_error').and_return(False) flexmock(module).should_receive('log_outputs') output = module.execute_command(full_command, output_file=module.DO_NOT_CAPTURE) assert output is None def test_execute_command_calls_full_command_with_input_file(): full_command = ['foo', 'bar'] input_file = flexmock(name='test') flexmock(module.os, environ={'a': 'b'}) flexmock(module.subprocess).should_receive('Popen').with_args( full_command, stdin=input_file, stdout=module.subprocess.PIPE, stderr=module.subprocess.STDOUT, shell=False, env=None, cwd=None, ).and_return(flexmock(stdout=None)).once() flexmock(module).should_receive('log_outputs') output = module.execute_command(full_command, input_file=input_file) assert output is None def test_execute_command_calls_full_command_with_shell(): full_command = ['foo', 'bar'] flexmock(module.os, environ={'a': 'b'}) flexmock(module.subprocess).should_receive('Popen').with_args( ' '.join(full_command), stdin=None, stdout=module.subprocess.PIPE, stderr=module.subprocess.STDOUT, shell=True, env=None, cwd=None, ).and_return(flexmock(stdout=None)).once() flexmock(module).should_receive('log_outputs') output = module.execute_command(full_command, shell=True) assert output is None def test_execute_command_calls_full_command_with_extra_environment(): full_command = ['foo', 'bar'] flexmock(module.os, environ={'a': 'b'}) flexmock(module.subprocess).should_receive('Popen').with_args( full_command, stdin=None, stdout=module.subprocess.PIPE, stderr=module.subprocess.STDOUT, shell=False, env={'a': 'b', 'c': 'd'}, cwd=None, ).and_return(flexmock(stdout=None)).once() flexmock(module).should_receive('log_outputs') output = module.execute_command(full_command, extra_environment={'c': 'd'}) assert output is None def test_execute_command_calls_full_command_with_working_directory(): full_command = ['foo', 'bar'] flexmock(module.os, environ={'a': 'b'}) flexmock(module.subprocess).should_receive('Popen').with_args( full_command, stdin=None, stdout=module.subprocess.PIPE, stderr=module.subprocess.STDOUT, shell=False, env=None, cwd='/working', ).and_return(flexmock(stdout=None)).once() flexmock(module).should_receive('log_outputs') output = module.execute_command(full_command, working_directory='/working') assert output is None def test_execute_command_without_run_to_completion_returns_process(): full_command = ['foo', 'bar'] process = flexmock() flexmock(module.os, environ={'a': 'b'}) flexmock(module.subprocess).should_receive('Popen').with_args( full_command, stdin=None, stdout=module.subprocess.PIPE, stderr=module.subprocess.STDOUT, shell=False, env=None, cwd=None, ).and_return(process).once() flexmock(module).should_receive('log_outputs') assert module.execute_command(full_command, run_to_completion=False) == process def test_execute_command_and_capture_output_returns_stdout(): full_command = ['foo', 'bar'] expected_output = '[]' flexmock(module.os, environ={'a': 'b'}) flexmock(module.subprocess).should_receive('check_output').with_args( full_command, stderr=None, shell=False, env=None, cwd=None ).and_return(flexmock(decode=lambda: expected_output)).once() output = module.execute_command_and_capture_output(full_command) assert output == expected_output def test_execute_command_and_capture_output_with_capture_stderr_returns_stderr(): full_command = ['foo', 'bar'] expected_output = '[]' flexmock(module.os, environ={'a': 'b'}) flexmock(module.subprocess).should_receive('check_output').with_args( full_command, stderr=module.subprocess.STDOUT, shell=False, env=None, cwd=None ).and_return(flexmock(decode=lambda: expected_output)).once() output = module.execute_command_and_capture_output(full_command, capture_stderr=True) assert output == expected_output def test_execute_command_and_capture_output_returns_output_with_shell(): full_command = ['foo', 'bar'] expected_output = '[]' flexmock(module.os, environ={'a': 'b'}) flexmock(module.subprocess).should_receive('check_output').with_args( 'foo bar', stderr=None, shell=True, env=None, cwd=None ).and_return(flexmock(decode=lambda: expected_output)).once() output = module.execute_command_and_capture_output(full_command, shell=True) assert output == expected_output def test_execute_command_and_capture_output_returns_output_with_extra_environment(): full_command = ['foo', 'bar'] expected_output = '[]' flexmock(module.os, environ={'a': 'b'}) flexmock(module.subprocess).should_receive('check_output').with_args( full_command, stderr=None, shell=False, env={'a': 'b', 'c': 'd'}, cwd=None, ).and_return(flexmock(decode=lambda: expected_output)).once() output = module.execute_command_and_capture_output( full_command, shell=False, extra_environment={'c': 'd'} ) assert output == expected_output def test_execute_command_and_capture_output_returns_output_with_working_directory(): full_command = ['foo', 'bar'] expected_output = '[]' flexmock(module.os, environ={'a': 'b'}) flexmock(module.subprocess).should_receive('check_output').with_args( full_command, stderr=None, shell=False, env=None, cwd='/working' ).and_return(flexmock(decode=lambda: expected_output)).once() output = module.execute_command_and_capture_output( full_command, shell=False, working_directory='/working' ) assert output == expected_output def test_execute_command_with_processes_calls_full_command(): full_command = ['foo', 'bar'] processes = (flexmock(),) flexmock(module.os, environ={'a': 'b'}) flexmock(module.subprocess).should_receive('Popen').with_args( full_command, stdin=None, stdout=module.subprocess.PIPE, stderr=module.subprocess.STDOUT, shell=False, env=None, cwd=None, ).and_return(flexmock(stdout=None)).once() flexmock(module).should_receive('log_outputs') output = module.execute_command_with_processes(full_command, processes) assert output is None def test_execute_command_with_processes_returns_output_with_output_log_level_none(): full_command = ['foo', 'bar'] processes = (flexmock(),) flexmock(module.os, environ={'a': 'b'}) process = flexmock(stdout=None) flexmock(module.subprocess).should_receive('Popen').with_args( full_command, stdin=None, stdout=module.subprocess.PIPE, stderr=module.subprocess.STDOUT, shell=False, env=None, cwd=None, ).and_return(process).once() flexmock(module).should_receive('log_outputs').and_return({process: 'out'}) output = module.execute_command_with_processes(full_command, processes, output_log_level=None) assert output == 'out' def test_execute_command_with_processes_calls_full_command_with_output_file(): full_command = ['foo', 'bar'] processes = (flexmock(),) output_file = flexmock(name='test') flexmock(module.os, environ={'a': 'b'}) flexmock(module.subprocess).should_receive('Popen').with_args( full_command, stdin=None, stdout=output_file, stderr=module.subprocess.PIPE, shell=False, env=None, cwd=None, ).and_return(flexmock(stderr=None)).once() flexmock(module).should_receive('log_outputs') output = module.execute_command_with_processes(full_command, processes, output_file=output_file) assert output is None def test_execute_command_with_processes_calls_full_command_without_capturing_output(): full_command = ['foo', 'bar'] processes = (flexmock(),) flexmock(module.os, environ={'a': 'b'}) flexmock(module.subprocess).should_receive('Popen').with_args( full_command, stdin=None, stdout=None, stderr=None, shell=False, env=None, cwd=None ).and_return(flexmock(wait=lambda: 0)).once() flexmock(module).should_receive('exit_code_indicates_error').and_return(False) flexmock(module).should_receive('log_outputs') output = module.execute_command_with_processes( full_command, processes, output_file=module.DO_NOT_CAPTURE ) assert output is None def test_execute_command_with_processes_calls_full_command_with_input_file(): full_command = ['foo', 'bar'] processes = (flexmock(),) input_file = flexmock(name='test') flexmock(module.os, environ={'a': 'b'}) flexmock(module.subprocess).should_receive('Popen').with_args( full_command, stdin=input_file, stdout=module.subprocess.PIPE, stderr=module.subprocess.STDOUT, shell=False, env=None, cwd=None, ).and_return(flexmock(stdout=None)).once() flexmock(module).should_receive('log_outputs') output = module.execute_command_with_processes(full_command, processes, input_file=input_file) assert output is None def test_execute_command_with_processes_calls_full_command_with_shell(): full_command = ['foo', 'bar'] processes = (flexmock(),) flexmock(module.os, environ={'a': 'b'}) flexmock(module.subprocess).should_receive('Popen').with_args( ' '.join(full_command), stdin=None, stdout=module.subprocess.PIPE, stderr=module.subprocess.STDOUT, shell=True, env=None, cwd=None, ).and_return(flexmock(stdout=None)).once() flexmock(module).should_receive('log_outputs') output = module.execute_command_with_processes(full_command, processes, shell=True) assert output is None def test_execute_command_with_processes_calls_full_command_with_extra_environment(): full_command = ['foo', 'bar'] processes = (flexmock(),) flexmock(module.os, environ={'a': 'b'}) flexmock(module.subprocess).should_receive('Popen').with_args( full_command, stdin=None, stdout=module.subprocess.PIPE, stderr=module.subprocess.STDOUT, shell=False, env={'a': 'b', 'c': 'd'}, cwd=None, ).and_return(flexmock(stdout=None)).once() flexmock(module).should_receive('log_outputs') output = module.execute_command_with_processes( full_command, processes, extra_environment={'c': 'd'} ) assert output is None def test_execute_command_with_processes_calls_full_command_with_working_directory(): full_command = ['foo', 'bar'] processes = (flexmock(),) flexmock(module.os, environ={'a': 'b'}) flexmock(module.subprocess).should_receive('Popen').with_args( full_command, stdin=None, stdout=module.subprocess.PIPE, stderr=module.subprocess.STDOUT, shell=False, env=None, cwd='/working', ).and_return(flexmock(stdout=None)).once() flexmock(module).should_receive('log_outputs') output = module.execute_command_with_processes( full_command, processes, working_directory='/working' ) assert output is None def test_execute_command_with_processes_kills_processes_on_error(): full_command = ['foo', 'bar'] process = flexmock(stdout=flexmock(read=lambda count: None)) process.should_receive('poll') process.should_receive('kill').once() processes = (process,) flexmock(module.os, environ={'a': 'b'}) flexmock(module.subprocess).should_receive('Popen').with_args( full_command, stdin=None, stdout=module.subprocess.PIPE, stderr=module.subprocess.STDOUT, shell=False, env=None, cwd=None, ).and_raise(subprocess.CalledProcessError(1, full_command, 'error')).once() flexmock(module).should_receive('log_outputs').never() with pytest.raises(subprocess.CalledProcessError): module.execute_command_with_processes(full_command, processes) borgmatic-1.7.9/tests/unit/test_logger.py000066400000000000000000000323331440467744700205350ustar00rootroot00000000000000import logging import sys import pytest from flexmock import flexmock from borgmatic import logger as module @pytest.mark.parametrize('bool_val', (True, 'yes', 'on', '1', 'true', 'True', 1)) def test_to_bool_parses_true_values(bool_val): assert module.to_bool(bool_val) @pytest.mark.parametrize('bool_val', (False, 'no', 'off', '0', 'false', 'False', 0)) def test_to_bool_parses_false_values(bool_val): assert not module.to_bool(bool_val) def test_to_bool_passes_none_through(): assert module.to_bool(None) is None def test_interactive_console_false_when_not_isatty(capsys): with capsys.disabled(): flexmock(module.sys.stderr).should_receive('isatty').and_return(False) assert module.interactive_console() is False def test_interactive_console_false_when_TERM_is_dumb(capsys): with capsys.disabled(): flexmock(module.sys.stderr).should_receive('isatty').and_return(True) flexmock(module.os.environ).should_receive('get').with_args('TERM').and_return('dumb') assert module.interactive_console() is False def test_interactive_console_true_when_isatty_and_TERM_is_not_dumb(capsys): with capsys.disabled(): flexmock(module.sys.stderr).should_receive('isatty').and_return(True) flexmock(module.os.environ).should_receive('get').with_args('TERM').and_return('smart') assert module.interactive_console() is True def test_should_do_markup_respects_no_color_value(): assert module.should_do_markup(no_color=True, configs={}) is False def test_should_do_markup_respects_config_value(): assert ( module.should_do_markup(no_color=False, configs={'foo.yaml': {'output': {'color': False}}}) is False ) def test_should_do_markup_prefers_any_false_config_value(): assert ( module.should_do_markup( no_color=False, configs={ 'foo.yaml': {'output': {'color': True}}, 'bar.yaml': {'output': {'color': False}}, }, ) is False ) def test_should_do_markup_respects_PY_COLORS_environment_variable(): flexmock(module.os.environ).should_receive('get').and_return('True') flexmock(module).should_receive('to_bool').and_return(True) assert module.should_do_markup(no_color=False, configs={}) is True def test_should_do_markup_prefers_no_color_value_to_config_value(): assert ( module.should_do_markup(no_color=True, configs={'foo.yaml': {'output': {'color': True}}}) is False ) def test_should_do_markup_prefers_config_value_to_PY_COLORS(): flexmock(module.os.environ).should_receive('get').and_return('True') flexmock(module).should_receive('to_bool').and_return(True) assert ( module.should_do_markup(no_color=False, configs={'foo.yaml': {'output': {'color': False}}}) is False ) def test_should_do_markup_prefers_no_color_value_to_PY_COLORS(): flexmock(module.os.environ).should_receive('get').and_return('True') flexmock(module).should_receive('to_bool').and_return(True) assert module.should_do_markup(no_color=True, configs={}) is False def test_should_do_markup_respects_interactive_console_value(): flexmock(module.os.environ).should_receive('get').and_return(None) flexmock(module).should_receive('interactive_console').and_return(True) assert module.should_do_markup(no_color=False, configs={}) is True def test_should_do_markup_prefers_PY_COLORS_to_interactive_console_value(): flexmock(module.os.environ).should_receive('get').and_return('True') flexmock(module).should_receive('to_bool').and_return(True) flexmock(module).should_receive('interactive_console').and_return(False) assert module.should_do_markup(no_color=False, configs={}) is True def test_multi_stream_handler_logs_to_handler_for_log_level(): error_handler = flexmock() error_handler.should_receive('emit').once() info_handler = flexmock() multi_handler = module.Multi_stream_handler( {module.logging.ERROR: error_handler, module.logging.INFO: info_handler} ) multi_handler.emit(flexmock(levelno=module.logging.ERROR)) def test_console_color_formatter_format_includes_log_message(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.ANSWER plain_message = 'uh oh' record = flexmock(levelno=logging.CRITICAL, msg=plain_message) colored_message = module.Console_color_formatter().format(record) assert colored_message != plain_message assert plain_message in colored_message def test_color_text_does_not_raise(): module.color_text(module.colorama.Fore.RED, 'hi') def test_color_text_without_color_does_not_raise(): module.color_text(None, 'hi') def test_add_logging_level_adds_level_name_and_sets_global_attributes_and_methods(): logger = flexmock() flexmock(module.logging).should_receive('getLoggerClass').and_return(logger) flexmock(module.logging).should_receive('addLevelName').with_args(99, 'PLAID') builtins = flexmock(sys.modules['builtins']) builtins.should_call('setattr') builtins.should_receive('setattr').with_args(module.logging, 'PLAID', 99).once() builtins.should_receive('setattr').with_args(logger, 'plaid', object).once() builtins.should_receive('setattr').with_args(logging, 'plaid', object).once() module.add_logging_level('PLAID', 99) def test_add_logging_level_skips_global_setting_if_already_set(): logger = flexmock() flexmock(module.logging).should_receive('getLoggerClass').and_return(logger) flexmock(module.logging).PLAID = 99 flexmock(logger).plaid = flexmock() flexmock(logging).plaid = flexmock() flexmock(module.logging).should_receive('addLevelName').never() builtins = flexmock(sys.modules['builtins']) builtins.should_call('setattr') builtins.should_receive('setattr').with_args(module.logging, 'PLAID', 99).never() builtins.should_receive('setattr').with_args(logger, 'plaid', object).never() builtins.should_receive('setattr').with_args(logging, 'plaid', object).never() module.add_logging_level('PLAID', 99) def test_configure_logging_probes_for_log_socket_on_linux(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.ANSWER flexmock(module).should_receive('Multi_stream_handler').and_return( flexmock(setFormatter=lambda formatter: None, setLevel=lambda level: None) ) flexmock(module).should_receive('Console_color_formatter') flexmock(module).should_receive('interactive_console').and_return(False) flexmock(module.logging).should_receive('basicConfig').with_args( level=logging.INFO, handlers=tuple ) flexmock(module.os.path).should_receive('exists').with_args('/dev/log').and_return(True) syslog_handler = logging.handlers.SysLogHandler() flexmock(module.logging.handlers).should_receive('SysLogHandler').with_args( address='/dev/log' ).and_return(syslog_handler).once() module.configure_logging(logging.INFO) def test_configure_logging_probes_for_log_socket_on_macos(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.ANSWER flexmock(module).should_receive('Multi_stream_handler').and_return( flexmock(setFormatter=lambda formatter: None, setLevel=lambda level: None) ) flexmock(module).should_receive('Console_color_formatter') flexmock(module).should_receive('interactive_console').and_return(False) flexmock(module.logging).should_receive('basicConfig').with_args( level=logging.INFO, handlers=tuple ) flexmock(module.os.path).should_receive('exists').with_args('/dev/log').and_return(False) flexmock(module.os.path).should_receive('exists').with_args('/var/run/syslog').and_return(True) syslog_handler = logging.handlers.SysLogHandler() flexmock(module.logging.handlers).should_receive('SysLogHandler').with_args( address='/var/run/syslog' ).and_return(syslog_handler).once() module.configure_logging(logging.INFO) def test_configure_logging_probes_for_log_socket_on_freebsd(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.ANSWER flexmock(module).should_receive('Multi_stream_handler').and_return( flexmock(setFormatter=lambda formatter: None, setLevel=lambda level: None) ) flexmock(module).should_receive('Console_color_formatter') flexmock(module).should_receive('interactive_console').and_return(False) flexmock(module.logging).should_receive('basicConfig').with_args( level=logging.INFO, handlers=tuple ) flexmock(module.os.path).should_receive('exists').with_args('/dev/log').and_return(False) flexmock(module.os.path).should_receive('exists').with_args('/var/run/syslog').and_return(False) flexmock(module.os.path).should_receive('exists').with_args('/var/run/log').and_return(True) syslog_handler = logging.handlers.SysLogHandler() flexmock(module.logging.handlers).should_receive('SysLogHandler').with_args( address='/var/run/log' ).and_return(syslog_handler).once() module.configure_logging(logging.INFO) def test_configure_logging_sets_global_logger_to_most_verbose_log_level(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.ANSWER flexmock(module).should_receive('Multi_stream_handler').and_return( flexmock(setFormatter=lambda formatter: None, setLevel=lambda level: None) ) flexmock(module).should_receive('Console_color_formatter') flexmock(module.logging).should_receive('basicConfig').with_args( level=logging.DEBUG, handlers=tuple ).once() flexmock(module.os.path).should_receive('exists').and_return(False) module.configure_logging(console_log_level=logging.INFO, syslog_log_level=logging.DEBUG) def test_configure_logging_skips_syslog_if_not_found(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.ANSWER flexmock(module).should_receive('Multi_stream_handler').and_return( flexmock(setFormatter=lambda formatter: None, setLevel=lambda level: None) ) flexmock(module).should_receive('Console_color_formatter') flexmock(module.logging).should_receive('basicConfig').with_args( level=logging.INFO, handlers=tuple ) flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.logging.handlers).should_receive('SysLogHandler').never() module.configure_logging(console_log_level=logging.INFO) def test_configure_logging_skips_syslog_if_interactive_console(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.ANSWER flexmock(module).should_receive('Multi_stream_handler').and_return( flexmock(setFormatter=lambda formatter: None, setLevel=lambda level: None) ) flexmock(module).should_receive('Console_color_formatter') flexmock(module).should_receive('interactive_console').and_return(True) flexmock(module.logging).should_receive('basicConfig').with_args( level=logging.INFO, handlers=tuple ) flexmock(module.os.path).should_receive('exists').with_args('/dev/log').and_return(True) flexmock(module.logging.handlers).should_receive('SysLogHandler').never() module.configure_logging(console_log_level=logging.INFO) def test_configure_logging_to_logfile_instead_of_syslog(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.ANSWER flexmock(module).should_receive('Multi_stream_handler').and_return( flexmock(setFormatter=lambda formatter: None, setLevel=lambda level: None) ) # syslog skipped in non-interactive console if --log-file argument provided flexmock(module).should_receive('interactive_console').and_return(False) flexmock(module.logging).should_receive('basicConfig').with_args( level=logging.DEBUG, handlers=tuple ) flexmock(module.os.path).should_receive('exists').with_args('/dev/log').and_return(True) flexmock(module.logging.handlers).should_receive('SysLogHandler').never() file_handler = logging.handlers.WatchedFileHandler('/tmp/logfile') flexmock(module.logging.handlers).should_receive('WatchedFileHandler').with_args( '/tmp/logfile' ).and_return(file_handler).once() module.configure_logging( console_log_level=logging.INFO, log_file_log_level=logging.DEBUG, log_file='/tmp/logfile' ) def test_configure_logging_skips_logfile_if_argument_is_none(): flexmock(module).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.ANSWER flexmock(module).should_receive('Multi_stream_handler').and_return( flexmock(setFormatter=lambda formatter: None, setLevel=lambda level: None) ) # No WatchedFileHandler added if argument --log-file is None flexmock(module).should_receive('interactive_console').and_return(False) flexmock(module.logging).should_receive('basicConfig').with_args( level=logging.INFO, handlers=tuple ) flexmock(module.os.path).should_receive('exists').and_return(False) flexmock(module.logging.handlers).should_receive('WatchedFileHandler').never() module.configure_logging(console_log_level=logging.INFO, log_file=None) borgmatic-1.7.9/tests/unit/test_signals.py000066400000000000000000000025751440467744700207230ustar00rootroot00000000000000from flexmock import flexmock from borgmatic import signals as module def test_handle_signal_forwards_to_subprocesses(): signal_number = 100 frame = flexmock(f_back=flexmock(f_code=flexmock(co_name='something'))) process_group = flexmock() flexmock(module.os).should_receive('getpgrp').and_return(process_group) flexmock(module.os).should_receive('killpg').with_args(process_group, signal_number).once() module.handle_signal(signal_number, frame) def test_handle_signal_bails_on_recursion(): signal_number = 100 frame = flexmock(f_back=flexmock(f_code=flexmock(co_name='handle_signal'))) flexmock(module.os).should_receive('getpgrp').never() flexmock(module.os).should_receive('killpg').never() module.handle_signal(signal_number, frame) def test_handle_signal_exits_on_sigterm(): signal_number = module.signal.SIGTERM frame = flexmock(f_back=flexmock(f_code=flexmock(co_name='something'))) flexmock(module.os).should_receive('getpgrp').and_return(flexmock) flexmock(module.os).should_receive('killpg') flexmock(module.sys).should_receive('exit').with_args( module.EXIT_CODE_FROM_SIGNAL + signal_number ).once() module.handle_signal(signal_number, frame) def test_configure_signals_installs_signal_handlers(): flexmock(module.signal).should_receive('signal').at_least().once() module.configure_signals() borgmatic-1.7.9/tests/unit/test_verbosity.py000066400000000000000000000023611440467744700213020ustar00rootroot00000000000000import logging from flexmock import flexmock from borgmatic import verbosity as module def insert_logging_mock(log_level): ''' Mock the isEnabledFor from Python logging. ''' logging = flexmock(module.logging.Logger) logging.should_receive('isEnabledFor').replace_with(lambda level: level >= log_level) logging.should_receive('getEffectiveLevel').replace_with(lambda: log_level) def test_verbosity_to_log_level_maps_known_verbosity_to_log_level(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER assert module.verbosity_to_log_level(module.VERBOSITY_ERROR) == logging.ERROR assert module.verbosity_to_log_level(module.VERBOSITY_ANSWER) == module.borgmatic.logger.ANSWER assert module.verbosity_to_log_level(module.VERBOSITY_SOME) == logging.INFO assert module.verbosity_to_log_level(module.VERBOSITY_LOTS) == logging.DEBUG def test_verbosity_to_log_level_maps_unknown_verbosity_to_warning_level(): flexmock(module.borgmatic.logger).should_receive('add_custom_log_levels') flexmock(module.logging).ANSWER = module.borgmatic.logger.ANSWER assert module.verbosity_to_log_level('my pants') == logging.WARNING borgmatic-1.7.9/tox.ini000066400000000000000000000013071440467744700150340ustar00rootroot00000000000000[tox] envlist = py37,py38,py39,py310,py311 skip_missing_interpreters = True skipsdist = True minversion = 3.14.1 [testenv] usedevelop = True deps = -rtest_requirements.txt whitelist_externals = find sh passenv = COVERAGE_FILE commands = pytest {posargs} py38,py39,py310,py311: black --check . isort --check-only --settings-path setup.cfg . flake8 borgmatic tests [testenv:black] commands = black {posargs} . [testenv:test] commands = pytest {posargs} [testenv:end-to-end] deps = -rtest_requirements.txt passenv = COVERAGE_FILE commands = pytest {posargs} --no-cov tests/end-to-end [testenv:isort] deps = {[testenv]deps} commands = isort --settings-path setup.cfg .