pax_global_header00006660000000000000000000000064117623757720014533gustar00rootroot0000000000000052 comment=010941e12e9caaacf312cdb1a8149a1ec2c3e5a6 spaetz-offlineimap-c9e9690/000077500000000000000000000000001176237577200157025ustar00rootroot00000000000000spaetz-offlineimap-c9e9690/.gitignore000066400000000000000000000001371176237577200176730ustar00rootroot00000000000000# Generated files /docs/dev-doc/ /build/ *.pyc offlineimap.1 # backups .*.swp .*.swo *.html *~ spaetz-offlineimap-c9e9690/COPYING000066400000000000000000000442361176237577200167460ustar00rootroot00000000000000# This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. GNU GENERAL PUBLIC LICENSE Version 2, June 1991 Copyright (C) 1989, 1991 Free Software Foundation, Inc. 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Library General Public License instead.) You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. The precise terms and conditions for copying, distribution and modification follow. GNU GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you". Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. 1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. 4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. 6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. 7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. 10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA Also add information on how to contact you by electronic and paper mail. If the program is interactive, make it output a short notice like this when it starts in an interactive mode: Gnomovision version 69, Copyright (C) year name of author Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the program `Gnomovision' (which makes passes at compilers) written by James Hacker. , 1 April 1989 Ty Coon, President of Vice This General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Library General Public License instead of this License. spaetz-offlineimap-c9e9690/Changelog.maint.rst000066400000000000000000000011771176237577200214400ustar00rootroot00000000000000========= ChangeLog ========= :website: http://offlineimap.org This is the Changelog of the maintenance branch. **NOTE FROM THE MAINTAINER:** Contributors should use the `WIP` section in Changelog.draft.rst in order to add changes they are working on. I will use it to make the new changelog entry on releases. And because I'm lazy, it will also be used as a draft for the releases announces. OfflineIMAP v6.3.2.1 (2011-03-23) ================================= Bug Fixes --------- * Sanity checks for SSL cacertfile configuration. * Fix regression (UIBase is no more). * Make profiling mode really enforce single-threading. spaetz-offlineimap-c9e9690/Changelog.rst000066400000000000000000000604441176237577200203330ustar00rootroot00000000000000========= ChangeLog ========= :website: http://offlineimap.org WIP (add new stuff for the next release) ======================================== OfflineIMAP v6.5.4 (2012-06-02) ================================= * bump bundled imaplib2 library 2.29 --> 2.33 * Actually perform the SSL fingerprint check (reported by J. Cook) * Curses UI, don't use colors after we shut down curses already (C.Höger) * Document that '%' needs encoding as '%%' in *.conf * Fix crash when IMAP.quickchanged() led to an Error (reported by sharat87) * Implement the createfolders setting to disable folder propagation (see docs) OfflineIMAP v6.5.3.1 (2012-04-03) ================================= * Don't fail if no dry-run setting exists in offlineimap.conf (introduced in 6.5.3) OfflineIMAP v6.5.3 (2012-04-02) =============================== * --dry-run mode protects us from performing any actual action. It will not precisely give the exact information what will happen. If e.g. it would need to create a folder, it merely outputs "Would create folder X", but not how many and which mails it would transfer. * internal code changes to prepare for Python3 * Improve user documentation of nametrans/folderfilter * Fixed some cases where invalid nametrans rules were not caught and we would not propagate local folders to the remote repository. (now tested in test03) * Revert "* Slight performance enhancement uploading mails to an IMAP server in the common case." It might have led to instabilities. * Revamped documentation structure. `make` in the `docs` dir or `make doc` in the root dir will now create the 1) man page and 2) the user documentation using sphinx (requiring python-doctools, and sphinx). The resulting user docs are in `docs/html`. You can also only create the man pages with `make man` in the `docs` dir. * -f command line option only works on the untranslated remote repository folder names now. Previously folderfilters had to match both the local AND remote name which caused unwanted behavior in combination with nametrans rules. Clarify in the help text. * Some better output when using nonsensical configuration settings * Improve compatability of the curses UI with python 2.6 OfflineIMAP v6.5.2.1 (2012-04-04) ===================================== * Fix python2.6 compatibility with the TTYUI backend (crash) * Fix TTYUI regression from 6.5.2 in refresh loop (crash) * Fix crashes related to UIDVALIDITY returning "None" * Beginning of a test suite. So far there is only one test. Configure test/credentials.conf and invoke with "python setup.py test" * Make folders containing quotes work rather than crashing (reported by Mark Eichin) * Improve delete msg performance with SQLITE backend * Enforce basic UI when using the --info switch * Remove the Gmail "realdelete" option, as it could lead to potential data loss. OfflineIMAP v6.5.2 (2012-01-17) =============================== * Gmail "realdelete" option is considered harmful and has the potential for data loss. Analysis at http://article.gmane.org/gmane.mail.imap.offlineimap.general/5265 Warnings were added to offlineimap.conf * Rather than write out the nametrans'lated folder names for mbnames, we now write out the local untransformed box names. This is generally what we want. This became relevant since we support nametrans rules on the local side since only a short time. Reported by Paul Collignan. * Some sanity checks and improved error messages. * Revert 6.5.1.1 change to use public imaplib2 function, it was reported to not always work. * Don't fail when ~/netrc is not readable by us. * Don't emit noisy regular sleeping announcements in Basic UI. OfflineIMAP v6.5.1.2 (2012-01-07) - "Baby steps" ================================================ Smallish bug fixes that deserve to be put out. * Fix possible crash during --info run * Fix reading in Maildirs, where we would attempt to create empty directories on REMOTE. * Do not attempt to sync lower case custom Maildir flags. We do not support them (yet) (this prevents many scary bogus sync messages) * Add filter information to the filter list in --info output OfflineIMAP v6.5.1.1 (2012-01-07) - "Das machine control is nicht fur gerfinger-poken und mittengrabben" ================================================================================================================== Blinkenlights UI 6.5.0 regression fixes only. * Sleep led to crash ('abort_signal' not existing) * Make exit via 'q' key work again cleanly OfflineIMAP v6.5.1 (2012-01-07) - "Quest for stability" ======================================================= * Fixed Maildir regression "flagmatchre" not found. (regressed in 6.5.0) * Have console output go by default to STDOUT and not STDERR (regression in 6.5.0) * Fixed MachineUI to urlencode() output lines again, rather than outputting multi-line items. It's ugly as hell, but it had been that way for years. * Remove the old global locking system. We lock only the accounts that we currently sync, so you can invoke OfflineImap multiple times now as long as you sync different accounts. This system is compatible with all releases >= 6.4.0, so don't run older releases simultanous to this one. OfflineIMAP v6.5.0 (2012-01-06) =============================== This is a CRITICAL bug fix release for everyone who is on the 6.4.x series. Please upgrade to avoid potential data loss! The version has been bumped to 6.5.0, please let everyone know that the 6.4.x series is problematic. * Uploading multiple emails to an IMAP server would lead to wrong UIDs being returned (ie the same for all), which confused offlineimap and led to recurrent upload/download loops and inconsistencies in the IMAP<->IMAP uid mapping. * Uploading of Messages from Maildir and IMAP<->IMAP has been made more efficient by renaming files/mapping entries, rather than actually loading and saving the message under a new UID. * Fix regression that broke MachineUI OfflineIMAP v6.4.4 (2012-01-06) =============================== This is a bugfix release, fixing regressions occurring in or since 6.4.0. * Fix the missing folder error that occured when a new remote folder was detected (IMAP<->Maildir) * Possibly fixed bug that prevented us from ever re-reading Maildir folders, so flag changes and deletions were not detected when running in a refresh loop. This is a regression that was introduced in about 6.4.0. * Never mangle maildir file names when using nonstandard Maildir flags (such as 'a'), note that they will still be deleted as they are not supported in the sync to an IMAP server. OfflineIMAP v6.4.3 (2012-01-04) =============================== New Features ------------ * add a --info command line switch that outputs useful information about the server and the configuration for all enabled accounts. Changes ------- * Reworked logging which was reported to e.g. not flush output to files often enough. User-visible changes: a) console output goes to stderr (for now). b) file output has timestamps and looks identical in the basic and ttyui UIs. c) File output should be flushed after logging by default (do report if not). * Bumped bundled imaplib2 to release 2.29 * Make ctrl-c exit cleanly rather aborting brutally (which could leave around temporary files, half-written cache files, etc). Exiting on SIGTERM and CTRL-C can take a little longer, but will be clean. OfflineIMAP v6.4.2 (2011-12-01) =============================== * IMAP<->IMAP sync with a readonly local IMAP repository failed with a rather mysterious "TypeError: expected a character buffer object" error. Fix this my retrieving the list of folders early enough even for readonly repositories. * Fix regression from 6.4.0. When using local Maildirs with "/" as a folder separator, all folder names would get a trailing slash appended, which is plain wrong. OfflineIMAP v6.4.1 (2011-11-17) =============================== Changes ------- * Indicate progress when copying many messages (slightly change log format) * Output how long an account sync took (min:sec). Bug Fixes --------- * Syncing multiple accounts in single-threaded mode would fail as we try to "register" a thread as belonging to two accounts which was fatal. Make it non-fatal (it can be legitimate). * New folders on the remote would be skipped on the very sync run they are created and only by synced in subsequent runs. Fixed. * a readonly parameter to select() was not always treated correctly, which could result in some folders being opened read-only when we really needed read-write. OfflineIMAP v6.4.0 (2011-09-29) =============================== This is the first stable release to support the forward-compatible per-account locks and remote folder creation that has been introduced in the 6.3.5 series. * Various regression and bug fixes from the last couple of RCs OfflineIMAP v6.3.5-rc3 (2011-09-21) =================================== Changes ------- * Refresh server capabilities after login, so we know that Gmail supports UIDPLUS (it only announces that after login, not before). This prevents us from adding custom headers to Gmail uploads. Bug Fixes --------- * Fix the creation of folders on remote repositories, which was still botched on rc2. OfflineIMAP v6.3.5-rc2 (2011-09-19) =================================== New Features ------------ * Implement per-account locking, so that it will possible to sync different accounts at the same time. The old global lock is still in place for backward compatibility reasons (to be able to run old and new versions of OfflineImap concurrently) and will be removed in the future. Starting with this version, OfflineImap will be forward-compatible with the per-account locking style. * Implement RFC 2595 LOGINDISABLED. Warn the user and abort when we attempt a plaintext login but the server has explicitly disabled plaintext logins rather than crashing. * Folders will now also be automatically created on the REMOTE side of an account if they exist on the local side. Use the folderfilters setting on the local side to prevent some folders from migrating to the remote side. Also, if you have a nametrans setting on the remote repository, you might need a nametrans setting on the local repository that leads to the original name (reverse nametrans). Changes ------- * Documentation improvements concerning 'restoreatime' and some code cleanup * Maildir repositories now also respond to folderfilter= configurations. Bug Fixes --------- * New emails are not created with "-rwxr-xr-x" but as "-rw-r--r--" anymore, fixing a regression in 6.3.4. OfflineIMAP v6.3.5-rc1 (2011-09-12) =================================== Notes ----- Idle feature and SQLite backend leave the experimental stage! ,-) New Features ------------ * When a message upload/download fails, we do not abort the whole folder synchronization, but only skip that message, informing the user at the end of the sync run. * If you connect via ssl and 'cert_fingerprint' is configured, we check that the server certificate is actually known and identical by comparing the stored sha1 fingerprint with the current one. Changes ------- * Refactor our IMAPServer class. Background work without user-visible changes. * Remove the configurability of the Blinkenlights statuschar. It cluttered the main configuration file for little gain. * Updated bundled imaplib2 to version 2.28. Bug Fixes --------- * We protect more robustly against asking for inexistent messages from the IMAP server, when someone else deletes or moves messages while we sync. * Selecting inexistent folders specified in folderincludes now throws nice errors and continues to sync with all other folders rather than exiting offlineimap with a traceback. OfflineIMAP v6.3.4 (2011-08-10) =============================== Notes ----- Here we are. A nice release since v6.3.3, I think. Changes ------- * Handle when UID can't be found on saved messages. OfflineIMAP v6.3.4-rc4 (2011-07-27) =================================== Notes ----- There is nothing exciting in this release. This is somewhat expected due to the late merge on -rc3. New Features ------------ * Support maildir for Windows. Changes ------- * Manual improved. OfflineIMAP v6.3.4-rc3 (2011-07-07) =================================== Notes ----- Here is a surprising release. :-) As expected we have a lot bug fixes in this round (see git log for details), including a fix for a bug we had for ages (details below) which is a very good news. What makes this cycle so unusual is that I merged a feature to support StartTLS automatically (thanks Sebastian!). Another very good news. We usually don't do much changes so late in a cycle. Now, things are highly calming down and I hope a lot of people will test this release. Next one could be the stable! New Features ------------ * Added StartTLS support, it will automatically be used if the server supports it. Bug Fixes --------- * We protect more robustly against asking for inexistent messages from the IMAP server, when someone else deletes or moves messages while we sync. OfflineIMAP v6.3.4-rc2 (2011-06-15) =================================== Notes ----- This was a very active rc1 and we could expect a lot of new fixes for the next release. The most important fix is about a bug that could lead to data loss. Find more information about his bug here: http://permalink.gmane.org/gmane.mail.imap.offlineimap.general/3803 The IDLE support is merged as experimental feature. New Features ------------ * Implement experimental IDLE feature. Changes ------- * Maildirs use less memory while syncing. Bug Fixes --------- * Saving to Maildirs now checks for file existence without race conditions. * A bug in the underlying imap library has been fixed that could potentially lead to data loss if the server interrupted responses with unexpected but legal server status responses. This would mainly occur in folders with many thousands of emails. Upgrading from the previous release is strongly recommended. OfflineIMAP v6.3.4-rc1 (2011-05-16) =================================== Notes ----- Welcome to the v6.3.4 pre-release cycle. Your favorite IMAP tool wins 2 new features which were asked for a long time: * an experimental SQL-based backend for the local cache; * one-way synchronization cabability. Logic synchronization is reviewed and simplified (from 4 to 3 passes) giving improved performance. Lot of work was done to give OfflineIMAP a better code base. Raised errors can now rely on a new error system and should become the default in the coming releases. As usual, we ask our users to test this release as much as possible, especially the SQL backend. Have fun! New Features ------------ * Begin sphinx-based documentation for the code. * Enable 1-way synchronization by settting a [Repository ...] to readonly = True. When e.g. using offlineimap for backup purposes you can thus make sure that no changes in your backup trickle back into the main IMAP server. * Optional: experimental SQLite-based backend for the LocalStatus cache. Plain text remains the default. Changes ------- * Start a enhanced error handling background system. This is designed to not stop a whole sync process on all errors (not much used, yet). * Documentation improvements: the FAQ wins new entries and add a new HACKING file for developers. * Lot of code cleanups. * Reduced our sync logic from 4 passes to 3 passes (integrating upload of "new" and "existing" messages into one function). This should result in a slight speedup. * No whitespace is stripped from comma-separated arguments passed via the -f option. * Give more detailed error when encountering a corrupt UID mapping file. Bug Fixes --------- * Drop connection if synchronization failed. This is needed if resuming the system from suspend mode gives a wrong connection. * Fix the offlineimap crash when invoking debug option 'thread'. * Make 'thread' command line option work. OfflineIMAP v6.3.3 (2011-04-24) =============================== Notes ----- Make this last candidate cycle short. It looks like we don't need more tests as most issues were raised and solved in the second round. Also, we have huge work to merge big and expected features into OfflineIMAP. Thanks to all contributors, again. With such a contribution rate, we can release stable faster. I hope it will be confirmed in the longer run! Changes ------- * Improved documentation for querying password. OfflineIMAP v6.3.3-rc3 (2011-04-19) =================================== Notes ----- It's more than a week since the previous release. Most of the issues raised were discussed and fixed since last release. I think we can be glad and confident for the future while the project live his merry life. Changes ------- * The -f option did not work with Folder names with spaces. It works now, use with quoting e.g. -f "INBOX, Deleted Mails". * Improved documentation. * Bump from imaplib2 v2.20 to v2.22. * Code refactoring. Bug Fixes --------- * Fix IMAP4 tunnel with imaplib2. OfflineIMAP v6.3.3-rc2 (2011-04-07) =================================== Notes ----- We are now at the third week of the -rc1 cycle. I think it's welcome to begin the -rc2 cycle. Things are highly calming down in the code even if we had much more feedbacks than usual. Keep going your effort! I'd like to thank reporters who involved in this cycle: - Баталов Григорий - Alexander Skwar - Christoph Höger - dtk - Greg Grossmeier - h2oz7v - Iain Dalton - Pan Tsu - Vincent Beffara - Will Styler (my apologies if I forget somebody) ...and all active developers, of course! The imaplib2 migration looks to go the right way to be definetly released but still needs more tests. So, here we go... Changes ------- * Increase compatability with Gmail servers which claim to not support the UIDPLUS extension but in reality do. Bug Fixes --------- * Fix hang when using Ctrl+C in some cases. OfflineIMAP v6.3.3-rc1 (2011-03-16) =================================== Notes ----- Here is time to begin the tests cycle. If feature topics are sent, I may merge or delay them until the next stable release. Main change comes from the migration from imaplib to imaplib2. It's internal code changes and doesn't impact users. UIDPLUS and subjectAltName for SSL are also great improvements. This release includes a hang fix due to infinite loop. Users seeing OfflineIMAP hang and consuming a lot of CPU are asked to update. That beeing said, this is still an early release candidate you should use for non-critical data only! New Features ------------ * Implement UIDPLUS extension support. OfflineIMAP will now not insert an X-OfflineIMAP header if the mail server supports the UIDPLUS extension. * SSL: support subjectAltName. Changes ------- * Use imaplib2 instead of imaplib. * Makefile use magic to find the version number. * Rework the repository module * Change UI names to Blinkenlights,TTYUI,Basic,Quiet,MachineUI. Old names will still work, but are deprecated. Document that we don't accept a list of UIs anymore. * Reworked the syncing strategy. The only user-visible change is that blowing away LocalStatus will not require you to redownload ALL of your mails if you still have the local Maildir. It will simply recreate LocalStatus. * TTYUI ouput improved. * Code cleanups. Bug Fixes --------- * Fix ignoring output while determining the rst2xxx command name to build documentation. * Fix hang because of infinite loop reading EOF. * Allow SSL connections to send keep-alive messages. * Fix regression (UIBase is no more). * Make profiling mode really enforce single-threading * Do not send localized date strings to the IMAP server as it will either ignore or refuse them. OfflineIMAP v6.3.2 (2010-02-21) =============================== Notes ----- First of all I'm really happy to announce our new official `website `_. Most of the work started from the impulse of Philippe LeCavalier with the help of Sebastian Spaeth and other contributors. Thanks to everybody. In this release, we are still touched by the "SSL3 write pending" but I think time was long enough to try to fix it. We have our first entry in the "KNOWN BUG" section of the manual about that. I'm afraid it could impact a lot of users if some distribution package any SSL library not having underlying (still obscure) requirements. Distribution maintainers should be care of it. I hope this release will help us to have more reports. This release will also be the root of our long maintenance support. Other bugs were fixed. Bug Fixes --------- * Fix craches for getglobalui(). * Fix documentation build. * Restore compatibiliy with python 2.5. OfflineIMAP v6.3.2-rc3 (2010-02-06) =================================== Notes ----- We are still touched by the "SSL3 write pending" bug it would be really nice to fix before releasing the coming stable. In the worse case, we'll have to add the first entry in the "KNOWN BUG" section of the manual. I'm afraid it could impact a lot of users if some distribution package any SSL library not having underlying (still obscure) requirements. The best news with this release are the Curse UI fixed and the better reports on errors. In this release I won't merge any patch not fixing a bug or a security issue. More feedbacks on the main issue would be appreciated. Changes ------- * Sample offlineimap.conf states it expects a PEM formatted certificat. * Give better trace information if an error occurs. * Have --version ONLY print the version number. * Code cleanups. Bug Fixes --------- * Fix Curses UI (simplified by moving from MultiLock to Rlock implementation). * Makefile: docutils build work whether python extension command is stripped or not. * Makefile: clean now removes HTML documentation files. OfflineIMAP v6.3.2-rc2 (2010-12-21) =================================== Notes ----- We are beginning a new tests cycle. At this stage, I expect most people will try to intensively stuck OfflineIMAP. :-) New Features ------------ * Makefile learn to build the package and make it the default. * Introduce a Changelog to involve community in the releasing process. * Migrate documentation to restructuredtext. Changes ------- * Improve CustomConfig documentation. * Imply single threading mode in debug mode exept for "-d thread". * Code and import cleanups. * Allow UI to have arbitrary names. * Code refactoring around UI and UIBase. * Improve version managment and make it easier. * Introduce a true single threading mode. Bug Fixes --------- * Understand multiple EXISTS replies from servers like Zimbra. * Only verify hostname if we actually use CA cert. * Fix ssl ca-cert in the sample configuration file. * Fix 'Ctrl+C' interruptions in threads. * Fix makefile clean for files having whitespaces. * Fix makefile to not remove unrelated files. * Fixes in README. * Remove uneeded files. OfflineIMAP v6.3.2-rc1 (2010-12-19) =================================== Notes ----- We are beginning a tests cycle. If feature topics are sent, I may merge or delay them until the next stable release. New Features ------------ * Primitive implementation of SSL certificates check. Changes ------- * Use OptionParser instead of getopts. * Code cleanups. Bug Fixes --------- * Fix reading password from UI. OfflineIMAP v6.3.1 (2010-12-11) =============================== Notes ----- Yes, I know I've just annouced the v6.3.0 in the same week. As said, it was not really a true release for the software. This last release includes fixes and improvements it might be nice to update to. Thanks to every body who helped to make this release with patches and tips through the mailing list. This is clearly a release they own. Changes ------- * cProfile becomes the default profiler. Sebastian Spaeth did refactoring to prepare to the coming unit test suites. * UI output formating enhanced. * Some code cleanups. Bug Fixes --------- * Fix possible overflow while working with Exchange. * Fix time sleep while exiting threads. OfflineIMAP v6.3.0 (2010-12-09) =============================== Notes ----- This release is more "administrative" than anything else and mainly marks the change of the maintainer. New workflow and policy for developers come in. BTW, I don't think I'll maintain debian/changelog. At least, not in the debian way. Most users and maintainers may rather want to skip this release. Bug Fixes --------- * Fix terminal display on exit. * netrc password authentication. * User name querying from netrc. spaetz-offlineimap-c9e9690/Makefile000066400000000000000000000036661176237577200173550ustar00rootroot00000000000000# Copyright (C) 2002 - 2006 John Goerzen # # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA VERSION=`./offlineimap.py --version` TARGZ=offlineimap_$(VERSION).tar.gz SHELL=/bin/bash RST2HTML=`type rst2html >/dev/null 2>&1 && echo rst2html || echo rst2html.py` all: build build: python setup.py build @echo @echo "Build process finished, run 'python setup.py install' to install" \ "or 'python setup.py --help' for more information". clean: -python setup.py clean --all -rm -f bin/offlineimapc -find . -name '*.pyc' -exec rm -f {} \; -find . -name '*.pygc' -exec rm -f {} \; -find . -name '*.class' -exec rm -f {} \; -find . -name '.cache*' -exec rm -f {} \; -find . -name '*.html' -exec rm -f {} \; -rm -f manpage.links manpage.refs -find . -name auth -exec rm -vf {}/password {}/username \; @$(MAKE) -C docs clean man: @$(MAKE) -C docs man doc: @$(MAKE) -C docs $(RST2HTML) Changelog.rst Changelog.html targz: ../$(TARGZ) ../$(TARGZ): if ! pwd | grep -q "/offlineimap-$(VERSION)$$"; then \ echo "Containing directory must be called offlineimap-$(VERSION)"; \ exit 1; \ fi; \ pwd && cd .. && pwd && tar -zhcv --exclude '.git' -f $(TARGZ) offlineimap-$(VERSION) rpm: targz cd .. && sudo rpmbuild -ta $(TARGZ) spaetz-offlineimap-c9e9690/README000066400000000000000000000160451176237577200165700ustar00rootroot00000000000000OfflineImap README ================== Description ----------- OfflineIMAP is a tool to simplify your e-mail reading. With OfflineIMAP, you can read the same mailbox from multiple computers. You get a current copy of your messages on each computer, and changes you make one place will be visible on all other systems. For instance, you can delete a message on your home computer, and it will appear deleted on your work computer as well. OfflineIMAP is also useful if you want to use a mail reader that does not have IMAP support, has poor IMAP support, or does not provide disconnected operation. It's homepage at http://offlineimap.org contains more information, source code, and online documentation. OfflineIMAP does not require additional python dependencies beyond python >=2.6 (although python-sqlite is strongly recommended). OfflineIMAP is a Free Software project licensed under the GNU General Public License version 2 (or later). You can download it for free, and you can modify it. In fact, you are encouraged to contribute to OfflineIMAP. Documentation ------------- The documentation is included (in .rst format) in the `docs` directory. Read it directly or generate nice html docs (python-sphinx needed) and/or the man page (python-docutils needed) while being in the `docs` dir via:: 'make doc' (user docs), 'make man' (man page only) or 'make' (both) (`make html` will simply create html versions of all *.rst files in /docs) The resulting user documentation will be in `docs/html`. The full user docs are also at: http://docs.offlineimap.org. Please see there for detailed information on how to install and configure OfflineImap. Quick Start =========== First, install OfflineIMAP. See docs/INSTALL.rst or read http://docs.offlineimap.org/en/latest/INSTALL.html. (hint: `sudo python setup.py install`) Second, set up your configuration file and run it! The distribution includes offlineimap.conf.minimal (Debian users may find this at ``/usr/share/doc/offlineimap/examples/offlineimap.conf.minimal``) that provides you with the bare minimum of setting up OfflineIMAP. You can simply copy this file into your home directory and name it ``.offlineimaprc``. A command such as ``cp offlineimap.conf.minimal ~/.offlineimaprc`` will do it. Or, if you prefer, you can just copy this text to ``~/.offlineimaprc``:: [general] accounts = Test [Account Test] localrepository = Local remoterepository = Remote [Repository Local] type = Maildir localfolders = ~/Test [Repository Remote] type = IMAP remotehost = examplehost remoteuser = jgoerzen Now, edit the ``~/.offlineimaprc`` file with your favorite editor. All you have to do is specify a directory for your folders to be in (on the localfolders line), the host name of your IMAP server (on the remotehost line), and your login name on the remote (on the remoteuser line). That's it! To run OfflineIMAP, you just have to say `offlineimap` ― it will fire up, ask you for a login password if necessary, synchronize your folders, and exit. See? You can just throw away the rest of the finely-crafted, perfectly-honed user manual! Of course, if you want to see how you can make OfflineIMAP FIVE TIMES FASTER FOR JUST $19.95 (err, well, $0), you have to read on our full user documentation and peruse the sample offlineimap.conf (which includes all available options) for further tweaks! Mailing list & bug reporting ---------------------------- The user discussion, development and all exciting stuff take place in the OfflineImap mailing list at http://lists.alioth.debian.org/mailman/listinfo/offlineimap-project. You do not need to subscribe to send emails. Bugs, issues and contributions should be reported to the mailing list. Bugs can also be reported in the issue tracker at https://github.com/spaetz/offlineimap/issues. Configuration Examples ====================== Here are some example configurations for various situations. Please e-mail any other examples you have that may be useful to me. Multiple Accounts with Mutt --------------------------- This example shows you how to set up OfflineIMAP to synchronize multiple accounts with the mutt mail reader. Start by creating a directory to hold your folders by running ``mkdir ~/Mail``. Then, in your ``~/.offlineimaprc``, specify:: accounts = Personal, Work Make sure that you have both an [Account Personal] and an [Account Work] section. The local repository for each account must have different localfolder path names. Also, make sure to enable [mbnames]. In each local repository section, write something like this:: localfolders = ~/Mail/Personal Finally, add these lines to your ``~/.muttrc``:: source ~/path-to-mbnames-muttrc-mailboxes folder-hook Personal set from="youremail@personal.com" folder-hook Work set from="youremail@work.com" set mbox_type=Maildir set folder=$HOME/Mail spoolfile=+Personal/INBOX That's it! UW-IMAPD and References ----------------------- Some users with a UW-IMAPD server need to use OfflineIMAP's "reference" feature to get at their mailboxes, specifying a reference of ``~/Mail`` or ``#mh/`` depending on the configuration. The below configuration from (originally from docwhat@gerf.org) shows using a reference of Mail, a nametrans that strips the leading Mail/ off incoming folder names, and a folderfilter that limits the folders synced to just three:: [Account Gerf] localrepository = GerfLocal remoterepository = GerfRemote [Repository GerfLocal] type = Maildir localfolders = ~/Mail [Repository GerfRemote] type = IMAP remotehost = gerf.org ssl = yes remoteuser = docwhat reference = Mail # Trims off the preceeding Mail on all the folder names. nametrans = lambda foldername: \ re.sub('^Mail/', '', foldername) # Yeah, you have to mention the Mail dir, even though it # would seem intuitive that reference would trim it. folderfilter = lambda foldername: foldername in [ 'Mail/INBOX', 'Mail/list/zaurus-general', 'Mail/list/zaurus-dev', ] maxconnections = 1 holdconnectionopen = no pythonfile Configuration File Option ------------------------------------- You can have OfflineIMAP load up a Python file before evaluating the configuration file options that are Python expressions. This example is based on one supplied by Tommi Virtanen for this feature. In ~/.offlineimaprc, he adds these options:: [general] pythonfile=~/.offlineimap.py [Repository foo] foldersort=mycmp Then, the ~/.offlineimap.py file will contain:: prioritized = ['INBOX', 'personal', 'announce', 'list'] def mycmp(x, y): for prefix in prioritized: xsw = x.startswith(prefix) ysw = y.startswith(prefix) if xsw and ysw: return cmp(x, y) elif xsw: return -1 elif ysw: return +1 return cmp(x, y) def test_mycmp(): import os, os.path folders=os.listdir(os.path.expanduser('~/data/mail/tv@hq.yok.utu.fi')) folders.sort(mycmp) print folders This code snippet illustrates how the foldersort option can be customized with a Python function from the pythonfile to always synchronize certain folders first. spaetz-offlineimap-c9e9690/bin/000077500000000000000000000000001176237577200164525ustar00rootroot00000000000000spaetz-offlineimap-c9e9690/bin/offlineimap000077500000000000000000000016421176237577200206740ustar00rootroot00000000000000#!/usr/bin/env python # Startup from system-wide installation # Copyright (C) 2002 - 2009 John Goerzen # # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from offlineimap import OfflineImap oi = OfflineImap() oi.run() spaetz-offlineimap-c9e9690/docs/000077500000000000000000000000001176237577200166325ustar00rootroot00000000000000spaetz-offlineimap-c9e9690/docs/INSTALL.rst000066400000000000000000000112311176237577200204700ustar00rootroot00000000000000.. -*- coding: utf-8 -*- .. _OfflineIMAP: https://github.com/spaetz/offlineimap .. _OLI_git_repo: git://github.com/spaetz/offlineimap.git ============ Installation ============ .. contents:: .. .. sectnum:: ------------- Prerequisites ------------- In order to use `OfflineIMAP`_, you need to have these conditions satisfied: 1. Your mail server must support IMAP. Mail access via POP is not supported. A special Gmail mailbox type is available to interface with Gmail's IMAP front-end, although Gmail has a very peculiar and non-standard implementation of its IMAP interface. 2. You must have Python version 2.6 or above installed. If you are running on Debian GNU/Linux, this requirement will automatically be taken care of for you. If you intend to use the SSL interface, your Python must have been built with SSL support. 3. If you use OfflineImap as an IMAP<->Maildir synchronizer, you will obviously need to have a mail reader that supports the Maildir mailbox format. Most modern mail readers have this support built-in, so you can choose from a wide variety of mail servers. This format is also known as the "qmail" format, so any mail reader compatible with it will work with `OfflineIMAP`_. ------------ Installation ------------ Installing OfflineImap should usually be quite easy, as you can simply unpack and run OfflineImap in place if you wish to do so. There are a number of options though: #. system-wide :ref:`installation via your distribution package manager ` #. system-wide or single user :ref:`installation from the source package ` #. system-wide or single user :ref:`installation from a git checkout ` Having installed OfflineImap, you will need to configure it, to be actually useful. Please check the :ref:`Configuration` section in the :doc:`MANUAL` for more information on the configuration step. .. _inst_pkg_man: System-Wide Installation via distribution ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The easiest way to install OfflineIMAP is via your distribution's package manager. OfflineImap is available under the name `offlineimap` in most Linux and BSD distributions. .. _inst_src_tar: Installation from source package ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Download the latest source archive from our `download page `_. Simply click the "Download as .zip" or "Download as .tar.gz" buttons to get the latest "stable" code from the master branch. If you prefer command line, you will want to use: wget https://github.com/spaetz/offlineimap/tarball/master Unpack and continue with the :ref:`system-wide installation ` or the :ref:`single-user installation ` section. .. _inst_git: Installation from git checkout ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Get your own copy of the `official git repository `_ at `OfflineIMAP`_:: git clone git://github.com/spaetz/offlineimap.git This will download the source with history. By default, git sets up the `master` branch up, which is most likely what you want. If not, you can checkout a particular release like this:: cd offlineimap git checkout v6.5.2.1 You have now a source tree available and proceed with either the :ref:`system-wide installation ` or the :ref:`single-user installation `. .. _system_wide_inst: System-wide installation ++++++++++++++++++++++++ Then run these commands, to build the python package:: make clean make Finally, install the program (as root):: python setup.py install Next, proceed to below. You tofflineimap to invoke the program. .. _single_user_inst: Single-user installation ++++++++++++++++++++++++ Download the git repository as described above. Instead of installing the program as root, you type `./offlineimap.py`; there is no installation step necessary. --------- Uninstall --------- If you installed a system-wide installation via "python setup.py install", there are a few files to purge to cleanly uninstall `OfflineImap`_ again. Assuming that `/usr/local` is the standard prefix of your system and that you use python 2.7, you need to: #) Delete the OfflineImap installation itself:: /usr/local/lib/python2.7/dist-packages/offlineimap-6.4.4.egg-info /usr/local/lib/python2.7/dist-packages/offlineimap In case, you did the single-user installation, simply delete your offlineimap directory. #) Delete all files that OfflineImap creates during its operation. - The cache at (default location) ~/.offlineimap - Your manually created (default loc) ~/.offlineimaprc (It is possible that you created those in different spots) That's it. Have fun without OfflineImap. spaetz-offlineimap-c9e9690/docs/MANUAL.rst000066400000000000000000000514541176237577200203520ustar00rootroot00000000000000==================== OfflineIMAP Manual ==================== .. _OfflineIMAP: http://offlineimap.org -------------------------------------------------------- Powerful IMAP/Maildir synchronization and reader support -------------------------------------------------------- :Author: John Goerzen & contributors :Date: 2012-02-23 DESCRIPTION =========== OfflineImap operates on a REMOTE and a LOCAL repository and synchronizes emails between them, so that you can read the same mailbox from multiple computers. The REMOTE repository is some IMAP server, while LOCAL can be either a local Maildir or another IMAP server. Missing folders will be automatically created on both sides if needed. No folders will be deleted at the moment. Configuring OfflineImap in basic mode is quite easy, however it provides an amazing amount of flexibility for those with special needs. You can specify the number of connections to your IMAP server, use arbitrary python functions (including regular expressions) to limit the number of folders being synchronized. You can transpose folder names between repositories using any python function, to mangle and modify folder names on the LOCAL repository. There are six different ways to hand the IMAP password to OfflineImap from console input, specifying in the configuration file, .netrc support, specifying in a separate file, to using arbitrary python functions that somehow return the password. Finally, you can use IMAPs IDLE infrastructure to always keep a connection to your IMAP server open and immediately be notified (and synchronized) when a new mail arrives (aka Push mail). Most configuration is done via the configuration file. However, any setting can also be overriden by command line options handed to OfflineIMAP. OfflineImap is well suited to be frequently invoked by cron jobs, or can run in daemon mode to periodically check your email (however, it will exit in some error situations). The documentation is included in the git repository and can be created by issueing `make dev-doc` in the `doc` folder (python-sphinx required), or it can be viewed online at http://docs.offlineimap.org. .. _configuration: Configuration ============= `OfflineIMAP`_ is regulated by a configuration file that is normally stored in `~/.offlineimaprc`. `OfflineIMAP`_ ships with a file named `offlineimap.conf` that you should copy to that location and then edit. This file is vital to proper operation of the system; it sets everything you need to run `OfflineIMAP`_. Full documentation for the configuration file is included within the sample file. `OfflineIMAP`_ also ships a file named `offlineimap.conf.minimal` that you can also try. It's useful if you want to get started with the most basic feature set, and you can read about other features later with `offlineimap.conf`. Check out the `Use Cases`_ section for some example configurations. OPTIONS ======= The command line options are described by issueing `offlineimap --help`. Details on their use can be found either in the sample offlineimap.conf file or in the user docs at http://docs.offlineimap.org. User Interfaces =============== OfflineIMAP has various user interfaces that let you choose how the program communicates information to you. The 'ui' option in the configuration file specifies the user interface. The -u command-line option overrides the configuration file setting. The available values for the configuration file or command-line are described in this section. Blinkenlights --------------- Blinkenlights is an interface designed to be sleek, fun to watch, and informative of the overall picture of what OfflineIMAP is doing. Blinkenlights contains a row of "LEDs" with command buttons and a log. The log shows more detail about what is happening and is color-coded to match the color of the lights. Each light in the Blinkenlights interface represents a thread of execution -- that is, a particular task that OfflineIMAP is performing right now. The colors indicate what task the particular thread is performing, and are as follows: * Black: indicates that this light's thread has terminated; it will light up again later when new threads start up. So, black indicates no activity. * Red (Meaning 1): is the color of the main program's thread, which basically does nothing but monitor the others. It might remind you of HAL 9000 in 2001. * Gray: indicates that the thread is establishing a new connection to the IMAP server. * Purple: is the color of an account synchronization thread that is monitoring the progress of the folders in that account (not generating any I/O). * Cyan: indicates that the thread is syncing a folder. * Green: means that a folder's message list is being loaded. * Blue: is the color of a message synchronization controller thread. * Orange: indicates that an actual message is being copied. (We use fuchsia for fake messages.) * Red (meaning 2): indicates that a message is being deleted. * Yellow / bright orange: indicates that message flags are being added. * Pink / bright red: indicates that message flags are being removed. * Red / Black Flashing: corresponds to the countdown timer that runs between synchronizations. The name of this interfaces derives from a bit of computer history. Eric Raymond's Jargon File defines blinkenlights, in part, as: Front-panel diagnostic lights on a computer, esp. a dinosaur. Now that dinosaurs are rare, this term usually refers to status lights on a modem, network hub, or the like. This term derives from the last word of the famous blackletter-Gothic sign in mangled pseudo-German that once graced about half the computer rooms in the English-speaking world. One version ran in its entirety as follows: | ACHTUNG! ALLES LOOKENSPEEPERS! | | Das computermachine ist nicht fuer gefingerpoken und mittengrabben. | Ist easy schnappen der springenwerk, blowenfusen und poppencorken | mit spitzensparken. Ist nicht fuer gewerken bei das dumpkopfen. | Das rubbernecken sichtseeren keepen das cotten-pickenen hans in das | pockets muss; relaxen und watchen das blinkenlichten. TTYUI ------ TTYUI interface is for people running in terminals. It prints out basic status messages and is generally friendly to use on a console or xterm. Basic ------ Basic is designed for situations in which OfflineIMAP will be run non-attended and the status of its execution will be logged. This user interface is not capable of reading a password from the keyboard; account passwords must be specified using one of the configuration file options. For example, it will not print periodic sleep announcements and tends to be a tad less verbose, in general. Quiet ----- It will output nothing except errors and serious warnings. Like Basic, this user interface is not capable of reading a password from the keyboard; account passwords must be specified using one of the configuration file options. MachineUI --------- MachineUI generates output in a machine-parsable format. It is designed for other programs that will interface to OfflineIMAP. Synchronization Performance =========================== By default, we use fairly conservative settings that are safe for syncing but that might not be the best performing one. Once you got everything set up and running, you might want to look into speeding up your synchronization. Here are a couple of hints and tips on how to achieve this. 1) Use maxconnections > 1. By default we only use one connection to an IMAP server. Using 2 or even 3 speeds things up considerably in most cases. This setting goes into the [Repository XXX] section. 2) Use folderfilters. The quickest sync is a sync that can ignore some folders. I sort my inbox into monthly folders, and ignore every folder that is more than 2-3 months old, this lets me only inspect a fraction of my Mails on every sync. If you haven't done this yet, do it :). See the folderfilter section the example offlineimap.conf. 3) The default status cache is a plain text file that will write out the complete file for each single new message (or even changed flag) to a temporary file. If you have plenty of files in a folder, this is a few hundred kilo to megabytes for each mail and is bound to make things slower. I recommend to use the sqlite backend for that. See the status_backend = sqlite setting in the example offlineimap.conf. You will need to have python-sqlite installed in order to use this. This will save you plenty of disk activity. Do note that the sqlite backend is still considered experimental as it has only been included recently (although a loss of your status cache should not be a tragedy as that file can be rebuild automatically) 4) Use quick sync. A regular sync will request all flags and all UIDs of all mails in each folder which takes quite some time. A 'quick' sync only compares the number of messages in a folder on the IMAP side (it will detect flag changes on the Maildir side of things though). A quick sync on my smallish account will take 7 seconds rather than 40 seconds. Eg, I run a cron script that does a regular sync once a day, and does quick syncs (-q) only synchronizing the "-f INBOX" in between. 5) Turn off fsync. In the [general] section you can set fsync to True or False. If you want to play 110% safe and wait for all operations to hit the disk before continueing, you can set this to True. If you set it to False, you lose some of that safety, trading it for speed. Upgrading from plain text cache to SQLITE based cache ===================================================== OfflineImap uses a cache to store the last know status of mails (flags etc). Historically that has meant plain text files, but recently we introduced sqlite-based cache, which helps with performance and CPU usage on large folders. Here is how to upgrade existing plain text cache installations to sqlite based one: 1) Sync to make sure things are reasonably similar 2) Change the account section to status_backend = sqlite 3) A new sync will convert your plain text cache to an sqlite cache (but leave the old plain text cache around for easy reverting) This should be quick and not involve any mail up/downloading. 4) See if it works :-) 5) If it does not work, go back to the old version or set status_backend=plain 6) Or, once you are sure it works, you can delete the .offlineimap/Account-foo/LocalStatus folder (the new cache will be in the LocalStatus-sqlite folder) Security and SSL ================ Some words on OfflineImap and its use of SSL/TLS. By default, we will connect using any method that openssl supports, that is SSLv2, SSLv3, or TLSv1. Do note that SSLv2 is notoriously insecure and deprecated. Unfortunately, python2 does not offer easy ways to disable SSLv2. It is recommended you test your setup and make sure that the mail server does not use an SSLv2 connection. Use e.g. "openssl s_client -host mail.server -port 443" to find out the connection that is used by default. Certificate checking -------------------- Unfortunately, by default we will not verify the certificate of an IMAP TLS/SSL server we connect to, so connecting by SSL is no guarantee against man-in-the-middle attacks. While verifying a server certificate fingerprint is being planned, it is not implemented yet. There is currently only one safe way to ensure that you connect to the correct server in an encrypted manner: You can specify a 'sslcacertfile' setting in your repository section of offlineimap.conf pointing to a file that contains (among others) a CA Certificate in PEM format which validating your server certificate. In this case, we will check that: 1) The server SSL certificate is validated by the CA Certificate 2) The server host name matches the SSL certificate 3) The server certificate is not past its expiration date. The FAQ contains an entry on how to create your own certificate and CA certificate. StartTLS -------- If you have not configured your account to connect via SSL anyway, OfflineImap will still attempt to set up an SSL connection via the STARTTLS function, in case the imap server supports it. Do note, that there is no certificate or fingerprint checking involved at all, when using STARTTLS (the underlying imaplib library does not support this yet). This means that you will be protected against passively listening eavesdroppers and they will not be able to see your password or email contents. However, this will not protect you from active attacks, such as Man-In-The-Middle attacks which cause you to connect to the wrong server and pretend to be your mail server. DO NOT RELY ON STARTTLS AS A SAFE CONNECTION GUARANTEEING THE AUTHENTICITY OF YOUR IMAP SERVER! .. _UNIX signals: UNIX Signals ============ OfflineImap listens to the unix signals SIGUSR1 and SIGUSR2. If sent a SIGUSR1 it will abort any current (or next future) sleep of all accounts that are configured to "autorefresh". In effect, this will trigger a full sync of all accounts to be performed as soon as possible. If sent a SIGUSR2, it will stop "autorefresh mode" for all accounts. That is, accounts will abort any current sleep and will exit after a currently running synchronization has finished. This signal can be used to gracefully exit out of a running offlineimap "daemon". Folder filtering and nametrans ============================== OfflineImap offers flexible (and complex) ways of filtering and transforming folder names. Please see the docs/dev-docs-src/folderfilters.rst document about details how to use folder filters and name transformations. The documentation will be autogenerated by a "make dev-doc" in the docs directory. It is also viewable at :ref:`folder_filtering_and_name_translation`. KNOWN BUGS ========== * SSL3 write pending: users enabling SSL may hit a bug about "SSL3 write pending". If so, the account(s) will stay unsynchronised from the time the bug appeared. Running OfflineIMAP again can help. We are still working on this bug. Patches or detailed bug reports would be appreciated. Please check you're running the last stable version and send us a report to the mailing list including the full log. * IDLE support is incomplete and experimental. Bugs may be encountered. * No hook exists for "run after an IDLE response". Email will show up, but may not be processed until the next refresh cycle. * nametrans may not be supported correctly. * IMAP IDLE <-> IMAP IDLE doesn't work yet. * IDLE may only work "once" per refresh. If you encounter this bug, please send a report to the list! * Maildir support in Windows drive Maildir uses colon caracter (:) in message file names. Colon is however forbidden character in windows drives. There are several workarounds for that situation: * Use "maildir-windows-compatible = yes" account OfflineIMAP configuration. - That makes OfflineIMAP to use exclamation mark (!) instead of colon for storing messages. Such files can be written to windows partitions. But you will probably loose compatibility with other programs trying to read the same Maildir. - Exclamation mark was chosen because of the note in http://docs.python.org/library/mailbox.html - If you have some messages already stored without this option, you will have to re-sync them again * Enable file name character translation in windows registry (not tested) - http://support.microsoft.com/kb/289627 * Use cygwin managed mount (not tested) - not available anymore since cygwin 1.7 .. _pitfalls: PITFALLS & ISSUES ================= Sharing a maildir with multiple IMAP servers -------------------------------------------- Generally a word of caution mixing IMAP repositories on the same Maildir root. You have to be careful that you *never* use the same maildir folder for 2 IMAP servers. In the best case, the folder MD5 will be different, and you will get a loop where it will upload your mails to both servers in turn (infinitely!) as it thinks you have placed new mails in the local Maildir. In the worst case, the MD5 is the same (likely) and mail UIDs overlap (likely too!) and it will fail to sync some mails as it thinks they are already existent. I would create a new local Maildir Repository for the Personal Gmail and use a different root to be on the safe side here. You could e.g. use `~/mail/Pro` as Maildir root for the ProGmail and `~/mail/Personal` as root for the personal one. If you then point your local mutt, or whatever MUA you use to `~/mail/` as root, it should still recognize all folders. (see the 2 IMAP setup in the `Use Cases`_ section. USE CASES ========= Sync from GMail to another IMAP server -------------------------------------- This is an example of a setup where "TheOtherImap" requires all folders to be under INBOX:: [Repository Gmailserver-foo] #This is the remote repository type = Gmail remotepass = XXX remoteuser = XXX # The below will put all GMAIL folders as sub-folders of the 'local' INBOX, # assuming that your path separator on 'local' is a dot. nametrans = lambda x: 'INBOX.' + x [Repository TheOtherImap] #This is the 'local' repository type = IMAP remotehost = XXX remotepass = XXX remoteuser = XXX #Do not use nametrans here. Selecting only a few folders to sync ------------------------------------ Add this to the remote gmail repository section to only sync mails which are in a certain folder:: folderfilter = lambda folder: folder.startswith('MyLabel') To only get the All Mail folder from a Gmail account, you would e.g. do:: folderfilter = lambda folder: folder.startswith('[Gmail]/All Mail') Another nametrans transpose example ----------------------------------- Put everything in a GMX. subfolder except for the boxes INBOX, Draft, and Sent which should keep the same name:: nametrans: lambda folder: folder if folder in ['INBOX', 'Drafts', 'Sent'] \ else re.sub(r'^', r'GMX.', folder) 2 IMAP using name translations ------------------------------ Synchronizing 2 IMAP accounts to local Maildirs that are "next to each other", so that mutt can work on both. Full email setup described by Thomas Kahle at ``_ offlineimap.conf:: [general] accounts = acc1, acc2 maxsyncaccounts = 2 ui = ttyui pythonfile=~/bin/offlineimap-helpers.py socktimeout = 90 [Account acc1] localrepository = acc1local remoterepository = acc1remote autorefresh = 2 [Account acc2] localrepository = acc2local remoterepository = acc2remote autorefresh = 4 [Repository acc1local] type = Maildir localfolders = ~/Mail/acc1 [Repository acc2local] type = Maildir localfolders = ~/Mail/acc2 [Repository acc1remote] type = IMAP remotehost = imap.acc1.com remoteusereval = get_username("imap.acc1.net") remotepasseval = get_password("imap.acc1.net") nametrans = oimaptransfolder_acc1 ssl = yes maxconnections = 2 # Folders to get: folderfilter = lambda foldername: foldername in [ 'INBOX', 'Drafts', 'Sent', 'archiv'] [Repository acc2remote] type = IMAP remotehost = imap.acc2.net remoteusereval = get_username("imap.acc2.net") remotepasseval = get_password("imap.acc2.net") nametrans = oimaptransfolder_acc2 ssl = yes maxconnections = 2 One of the coolest things about offlineimap is that you can call arbitrary python code from your configuration. To do this, specify a pythonfile with:: pythonfile=~/bin/offlineimap-helpers.py Your pythonfile needs to contain implementations for the functions that you want to use in offflineimaprc. The example uses it for two purposes: Fetching passwords from the gnome-keyring and translating folder names on the server to local foldernames. An example implementation of get_username and get_password showing how to query gnome-keyring is contained in ``_ The folderfilter is a lambda term that, well, filters which folders to get. The function `oimaptransfolder_acc2` translates remote folders into local folders with a very simple logic. The `INBOX` folder will have the same name as the account while any other folder will have the account name and a dot as a prefix. This is useful for hierarchichal display in mutt. Offlineimap handles the renaming correctly in both directions:: import re def oimaptransfolder_acc1(foldername): if(foldername == "INBOX"): retval = "acc1" else: retval = "acc1." + foldername retval = re.sub("/", ".", retval) return retval def oimaptransfolder_acc2(foldername): if(foldername == "INBOX"): retval = "acc2" else: retval = "acc2." + foldername retval = re.sub("/", ".", retval) return retval spaetz-offlineimap-c9e9690/docs/Makefile000066400000000000000000000014121176237577200202700ustar00rootroot00000000000000# This program is free software under the terms of the GNU General Public # License. See the COPYING file which must come with this package. SOURCES = $(wildcard *.rst) HTML_TARGETS = $(patsubst %.rst,%.html,$(SOURCES)) RM = rm RST2HTML=`type rst2html >/dev/null 2>&1 && echo rst2html || echo rst2html.py` RST2MAN=`type rst2man >/dev/null 2>&1 && echo rst2man || echo rst2man.py` SPHINXBUILD = sphinx-build all: man doc html: $(HTML_TARGETS) $(HTML_TARGETS): %.html : %.rst $(RST2HTML) $? $@ man: offlineimap.1 offlineimap.1: MANUAL.rst $(RST2MAN) MANUAL.rst offlineimap.1 cp -f offlineimap.1 .. doc: $(SPHINXBUILD) -b html -d html/doctrees doc-src html clean: $(RM) -f $(HTML_TARGETS) $(RM) -f offlineimap.1 ../offlineimap.1 $(RM) -rf html/* .PHONY: clean doc spaetz-offlineimap-c9e9690/docs/doc-src/000077500000000000000000000000001176237577200201645ustar00rootroot00000000000000spaetz-offlineimap-c9e9690/docs/doc-src/API.rst000066400000000000000000000041221176237577200213260ustar00rootroot00000000000000.. OfflineImap API documentation .. currentmodule:: offlineimap .. _API docs: :mod:`offlineimap's` API documentation ====================================== Within :mod:`offlineimap`, the classes :class:`OfflineImap` provides the high-level functionality. The rest of the classes should usually not needed to be touched by the user. Email repositories are represented by a :class:`offlineimap.repository.Base.BaseRepository` or derivatives (see :mod:`offlineimap.repository` for details). A folder within a repository is represented by a :class:`offlineimap.folder.Base.BaseFolder` or any derivative from :mod:`offlineimap.folder`. This page contains the main API overview of OfflineImap |release|. OfflineImap can be imported as:: from offlineimap import OfflineImap The file ``SubmittingPatches.rst`` in the source distribution documents a number of resources and conventions you may find useful. It will eventually be merged into the main documentation. .. TODO: merge SubmittingPatches.rst to the main documentation :mod:`offlineimap` -- The OfflineImap module ============================================= .. module:: offlineimap .. autoclass:: offlineimap.OfflineImap(cmdline_opts = None) .. automethod:: run .. automethod:: parse_cmd_options .. .. autoattribute:: ui :todo: Document :class:`offlineimap.account` ============================ An :class:`accounts.Account` connects two email repositories that are to be synced. It comes in two flavors, normal and syncable. .. autoclass:: offlineimap.accounts.Account .. autoclass:: offlineimap.accounts.SyncableAccount :members: :inherited-members: .. autodata:: ui Contains the current :mod:`offlineimap.ui`, and can be used for logging etc. :exc:`OfflineImapError` -- A Notmuch execution error -------------------------------------------------------- .. autoexception:: offlineimap.error.OfflineImapError :members: This execption inherits directly from :exc:`Exception` and is raised on errors during the offlineimap execution. It has an attribute `severity` that denotes the severity level of the error. spaetz-offlineimap-c9e9690/docs/doc-src/FAQ.rst000066400000000000000000000503471176237577200213360ustar00rootroot00000000000000.. -*- coding: utf-8 -*- .. NOTE TO MAINTAINERS: Please add new questions to the end of their sections, so section/question numbers remain stable. ============================================= OfflineIMAP FAQ (Frequently Asked Questions) ============================================= :Web site: https://github.com/nicolas33/offlineimap :Copyright: This document is licensed under GPLv2. .. contents:: .. sectnum:: This is a work in progress. Please feel free to ask questions and/or provide answers; send email to the `mailing list`_. .. _mailing list: http://lists.alioth.debian.org/mailman/listinfo/offlineimap-project .. _OfflineIMAP: https://github.com/nicolas33/offlineimap .. _ssl.wrap_socket: http://docs.python.org/library/ssl.html#ssl.wrap_socket OfflineIMAP =========== Where do I get OfflineIMAP? --------------------------- See the information on the Home page `OfflineIMAP`_. How fast is it? --------------- OfflineIMAP has a multithreaded sync, so it should have very nice performance. OfflineIMAP versions 2.0 and above contain a multithreaded system. A good way to experiment is by setting maxsyncaccounts to 3 and maxconnections to 3 in each account clause. This lets OfflineIMAP open up multiple connections simultaneously. That will let it process multiple folders and messages at once. In most cases, this will increase performance of the sync. Don’t set the number too high. If you do that, things might actually slow down as your link gets saturated. Also, too many connections can cause mail servers to have excessive load. Administrators might take unkindly to this, and the server might bog down. There are many variables in the optimal setting; experimentation may help. See the Performance section in the MANUAL for some tips. What platforms does OfflineIMAP support? ---------------------------------------- It should run on most platforms supported by Python, with one exception: we do not support Windows, but some have made it work there. The following has been reported by OfflineIMAP users. We do not test OfflineIMAP on Windows, so we can’t directly address their accuracy. The basic answer is that it’s possible and doesn’t require hacking OfflineIMAP source code. However, it’s not necessarily trivial. The information below is based in instructions submitted by Chris Walker:: First, you must run OfflineIMAP in the Cygwin environment. The Windows filesystem is not powerful enough to accomodate Maildir by itself. Next, you’ll need to mount your Maildir directory in a special way. There is information for doing that at http://barnson.org/node/295. That site gives this example:: mount -f -s -b -o managed "d:/tmp/mail" "/home/of/mail" That URL also has more details on making OfflineIMAP work with Windows. Does OfflineIMAP support mbox, mh, or anything else other than Maildir? ----------------------------------------------------------------------- Not directly. Maildir was the easiest to implement. We are not planning to write an mbox-backend, though if someone sent me well-written mbox support and pledged to support it, it would be committed it to the tree. However, OfflineIMAP can directly sync accounts on two different IMAP servers together. So you could install an IMAP server on your local machine that supports mbox, sync to it, and then instruct your mail readers to use the mboxes. Or you could install whatever IMAP server you like on the local machine, and point your mail readers to that IMAP server on localhost. What is the UID validity problem for folder? -------------------------------------------- IMAP servers use a folders UIDVALIDITY value in combination with a unique ID (UID) to refer to a specific message. This is guaranteed to be unique to a particular message forever. No other message in the same folder will ever get the same UID as long as UIDVALIDITY remains unchanged. UIDs are an integral part of `OfflineIMAP`_'s synchronization scheme; they are used to match up messages on your computer to messages on the server. Sometimes, the UIDs on the server might get reset. Usually this will happen if you delete and then recreate a folder. When you create a folder, the server will often start the UID back from 1. But `OfflineIMAP`_ might still have the UIDs from the previous folder by the same name stored. `OfflineIMAP`_ will detect this condition because of the changed UIDVALIDITY value and skip the folder. This is GOOD, because it prevents data loss. In the IMAP<->Maildir case, you can fix it by removing your local folder and cache data. For instance, if your folders are under `~/Folders` and the folder with the problem is INBOX, you'd type this:: rm -r ~/Folders/INBOX rm -r ~/.offlineimap/Account-AccountName/LocalStatus/INBOX rm -r ~/.offlineimap/Repository-RemoteRepositoryName/FolderValidity/INBOX (Of course, replace AccountName and RemoteRepositoryName with the names as specified in `~/.offlineimaprc`). Next time you run `OfflineIMAP`_, it will re-download the folder with the new UIDs. Note that the procedure specified above will lose any local changes made to the folder. Some IMAP servers are broken and do not support UIDs properly. If you continue to get this error for all your folders even after performing the above procedure, it is likely that your IMAP server falls into this category. `OfflineIMAP`_ is incompatible with such servers. Using `OfflineIMAP`_ with them will not destroy any mail, but at the same time, it will not actually synchronize it either. (`OfflineIMAP`_ will detect this condition and abort prior to synchronization.) This question comes up frequently on the `mailing list`_. You can find a detailed discussion of the problem there http://lists.complete.org/offlineimap@complete.org/2003/04/msg00012.html.gz. How do I automatically delete a folder? --------------------------------------- OfflineIMAP does not currently provide this feature. You will have to delete folders manually. See next entry too. May I delete local folders? --------------------------- `OfflineIMAP`_ does a two-way synchronization. That is, if you make a change to the mail on the server, it will be propagated to your local copy, and vise-versa. Some people might think that it would be wise to just delete all their local mail folders periodically. If you do this with `OfflineIMAP`_, remember to also remove your local status cache (`~/.offlineimap` by default). Otherwise, `OfflineIMAP`_ will take this as an intentional deletion of many messages and will interpret your action as requesting them to be deleted from the server as well. (If you don't understand this, don't worry; you probably won't encounter this situation.) Can I run multiple instances? ----------------------------- `OfflineIMAP`_ is not designed to have several instances (for instance, a cron job and an interactive invocation) run over the same mailbox simultaneously. It will perform a check on startup and abort if another `OfflineIMAP`_ is already running. If you need to schedule synchronizations, you'll probably find autorefresh settings more convenient than cron. Alternatively, you can set a separate metadata directory for each instance. In the future, we will lock each account individually rather than having one global lock. Can I copy messages between folders? --------------------------------------- Normally, when you copy a message between folders or add a new message to a folder locally, `OfflineIMAP`_ will just do the right thing. However, sometimes this can be tricky ― if your IMAP server does not provide the SEARCH command, or does not return something useful, `OfflineIMAP`_ cannot determine the new UID of the message. So, in these rare instances, OfflineIMAP will upload the message to the IMAP server and delete it from your local folder. Then, on your next sync, the message will be re-downloaded with the proper UID. `OfflineIMAP`_ makes sure that the message was properly uploaded before deleting it, so there should be no risk of data loss. But if you try to sync between two IMAP servers, where both are unable to provide you with UID of the new message, then this will lead to infinite loop. `OfflineIMAP`_ will upload the message to one server and delete on second. On next run it will upload the message to second server and delete on first, etc. Does OfflineIMAP support POP? ----------------------------- No. How is OfflineIMAP conformance? ------------------------------- * Internet Message Access Protocol version 4rev1 (IMAP 4rev1) as specified in `2060`:RFC: and `3501`:RFC: * CRAM-MD5 as specified in `2195`:RFC: * Maildir as specified in the Maildir manpage and the qmail website * Standard Python 2.6 as implemented on POSIX-compliant systems Can I force OfflineIMAP to sync a folder right now? --------------------------------------------------- Yes: 1) if you use the `Blinkenlights` UI. That UI shows the active accounts as follows:: 4: [active] *Control: . 3: [ 4:36] personal: 2: [ 3:37] work: . 1: [ 6:28] uni: Simply press the appropriate digit (`3` for `personal`, etc.) to resync that account immediately. This will be ignored if a resync is already in progress for that account. 2) while in sleep mode, you can also send a SIGUSR1. See the :ref:`UNIX signals` section in the MANUAL for details. I get a "Mailbox already exists" error -------------------------------------- **Q:** When synchronizing, I receive errors such as:: Folder 'sent'[main-remote] could not be created. Server responded: ('NO', ['Mailbox already exists.']) **A:** IMAP folders are usually case sensitive. But some IMAP servers seem to treat "special" folders as case insensitive (e.g. the initial INBOX. part, or folders such as "Sent" or "Trash"). If you happen to have a folder "sent" on one side of things and a folder called "Sent" on the other side, offlineimap will try to create those folders on both sides. If you server happens to treat those folders as case-insensitive you can then see this warning. You can solve this by excluding the "sent" folder by filtering it from the repository settings:: folderfilter= lambda f: f not in ['sent'] Configuration Questions ======================= Can I synchronize multiple accounts with OfflineIMAP? ----------------------------------------------------- Of course! Just name them all in the accounts line in the general section of the configuration file, and add a per-account section for each one. You can also optionally use the -a option when you run OfflineIMAP to request that it only operate upon a subset of the accounts for a particular run. How do I specify the names of folders? -------------------------------------- You do not need to. OfflineIMAP is smart enough to automatically figure out what folders are present on the IMAP server and synchronize them. You can use the folderfilter and nametrans configuration file options to request only certain folders and rename them as they come in if you like. Also you can configure OfflineImap to only synchronize "subscribed" folders. How do I prevent certain folders from being synced? --------------------------------------------------- Use the folderfilter option. See the MANUAL for details and examples. What is the mailbox name recorder (mbnames) for? ------------------------------------------------ Some mail readers, such as mutt, are not capable of automatically determining the names of your mailboxes. OfflineIMAP can help these programs by writing the names of the folders in a format you specify. See the example offlineimap.conf for details. Does OfflineIMAP verify SSL certificates? ----------------------------------------- You can verify an imapserver's certificate by specifying the CA certificate on a per-repository basis by setting the `sslcacertfile` option in the config file. (See the example offlineimap.conf for details.) If you do not specify any CA certificate, you will be presented with the server's certificate fingerprint and add that to the configuration file, to make sure it remains unchanged. No verification happens if connecting via STARTTLS. How do I generate an `sslcacertfile` file? ------------------------------------------ The `sslcacertfile` file must contain an SSL certificate (or a concatenated certificates chain) in PEM format. (See the documentation of `ssl.wrap_socket`_'s `certfile` parameter for the gory details.) You can use either openssl or gnutls to create a certificate file in the required format. #. via openssl:: openssl s_client -CApath /etc/ssl/certs -connect ${hostname}:imaps -showcerts \ | perl -ne 'print if /BEGIN/../END/; print STDERR if /return/' > $sslcacertfile ^D #. via gnutls:: gnutls-cli --print-cert -p imaps ${host} $sslcacertfile The path `/etc/ssl/certs` is not standardized; your system may store SSL certificates elsewhere. (On some systems it may be in `/usr/local/share/certs/`.) Before using the resulting file, ensure that openssl verified the certificate successfully. In case of problems, you can test the certificate using a command such as (credits to Daniel Shahaf for this) to verify the certificate:: % openssl s_client -CAfile $sslcacertfile -connect ${hostname}:imaps 2>&1 ` documentation is included in the user documentation (next section) and online browsable at ``_. It is mostly auto-generated from the source code and is a work in progress. Contributions in this area would be very appreciated. Following new commits --------------------- You can follow upstream commits on - `CIA.vc `, - `Ohloh `, - `GitHub `, - or on the `commits mailing list`_. Git: OfflineImap's branching Model And Workflow =============================================== Introduction ------------ This optional section provides you with information on how we use git branches and do releases. You will need to know very little about git to get started. For the impatient, see the :ref:`contribution checklist` below. Git Branching model -------------------- OfflineIMAP uses the following branches: * master * next * maint * (pu) * & several topic oriented feature branches. A topic may consist of one or more patches. master ++++++ If you're not sure what branch you should use, this one is for you. This is the mainline. Simple users should use this branch to follow OfflineIMAP's evolution. Usually, patches submitted to the mailing list should start off of this branch. next ++++ Patches recently merged are good candidates for this branch. The content of next is merged into the mainline (master) at release time for both stable and -rc releases. When patches are sent to the mailing list, contributors discuss about them. Once done and when patches looks ready for mainline, patches are first merged into next. Advanced users and testers use this branch to test last merged patches before they hit the mainline. This helps not introducing strong breackages directly in master. pu +++ pu stands for "proposed updates". If a topic is not ready for master nor next, it may be merged into pu. This branch only help developers to work on someone else topic or an earlier pending topic. This branch is **not intended to be checkouted**; never. Even developers don't do that. Due to the way pu is built you can't expect content there to work in any way... unless you clearly want to run into troubles. Developers can extract a topic from this branch to work on it. See the following section "Extract a topic from pu" in this documentation. maint +++++ This is the maintenance branch. It gets its own releases starting from an old stable release. It helps both users having troubles with last stable releases and users not wanting latest features or so to still benefit from strong bug fixes and security fixes. Release cycles -------------- A typical release cycle works like this: 1. A stable release is out. 2. Feature topics are sent, discussed and merged. 3. When enough work was merged, we start the freeze cycle: the first release candidate is out. 4. During the freeze cycle, no more features are merged. It's time to test OfflineIMAP. New candidates version are released. The more we are late in -rc releases the less patches are merged but bug fixes. 5. When we think a release is stable enough, we restart from step 1. .. _contribution checklist: Contribution Checklist (and a short version for the impatient) =============================================================== Create commits -------------- * make commits of logical units * check for unnecessary whitespace with ``git diff --check`` before committing * do not check in commented out code or unneeded files * the first line of the commit message should be a short description (50 characters is the soft limit, see DISCUSSION in git-commit(1)), and should skip the full stop * the body should provide a meaningful commit message, which: * uses the imperative, present tense: **change**, not **changed** or **changes**. * includes motivation for the change, and contrasts its implementation with previous behaviour * add a ``Signed-off-by: Your Name `` line to the commit message (or just use the option `-s` when committing) to confirm that you agree to the **Developer's Certificate of Origin** * make sure that you have tests for the bug you are fixing * make sure that the test suite passes after your commit Export commits as patches ------------------------- * use ``git format-patch -M`` to create the patch * do not PGP sign your patch * do not attach your patch, but read in the mail body, unless you cannot teach your mailer to leave the formatting of the patch alone. * be careful doing cut & paste into your mailer, not to corrupt whitespaces. * provide additional information (which is unsuitable for the commit message) between the ``---`` and the diffstat * if you change, add, or remove a command line option or make some other user interface change, the associated documentation should be updated as well. * if your name is not writable in ASCII, make sure that you send off a message in the correct encoding. * send the patch to the `mailing list`_ and the maintainer (nicolas.s-dev@laposte.net) if (and only if) the patch is ready for inclusion. If you use `git-send-email(1)`, please test it first by sending email to yourself. * see below for instructions specific to your mailer Long version ------------ I started reading over the SubmittingPatches document for Git, primarily because I wanted to have a document similar to it for OfflineIMAP to make sure people understand what they are doing when they write `Signed-off-by` line. But the patch submission requirements are a lot more relaxed here on the technical/contents front, because the OfflineIMAP is a lot smaller ;-). So here is only the relevant bits. Decide what branch to base your work on +++++++++++++++++++++++++++++++++++++++ In general, always base your work on the oldest branch that your change is relevant to. * A bugfix should be based on 'maint' in general. If the bug is not present in 'maint', base it on 'master'. For a bug that's not yet in 'master', find the topic that introduces the regression, and base your work on the tip of the topic. * A new feature should be based on 'master' in general. If the new feature depends on a topic that is in 'pu', but not in 'master', base your work on the tip of that topic. * Corrections and enhancements to a topic not yet in 'master' should be based on the tip of that topic. If the topic has not been merged to 'next', it's alright to add a note to squash minor corrections into the series. * In the exceptional case that a new feature depends on several topics not in 'master', start working on 'next' or 'pu' privately and send out patches for discussion. Before the final merge, you may have to wait until some of the dependent topics graduate to 'master', and rebase your work. To find the tip of a topic branch, run ``git log --first-parent master..pu`` and look for the merge commit. The second parent of this commit is the tip of the topic branch. Make separate commits for logically separate changes ++++++++++++++++++++++++++++++++++++++++++++++++++++ Unless your patch is really trivial, you should not be sending your changes in a single patch. Instead, always make a commit with complete commit message and generate a series of small patches from your repository. Describe the technical detail of the change(s). If your description starts to get too long, that's a sign that you probably need to split up your commit to finer grained pieces. That being said, patches which plainly describe the things that help reviewers check the patch, and future maintainers understand the code, are the most beautiful patches. Descriptions that summarise the point in the subject well, and describe the motivation for the change, the approach taken by the change, and if relevant how this differs substantially from the prior version, can be found on Usenet archives back into the late 80's. Consider it like good Netiquette, but for code. Generate your patch using git tools out of your commits +++++++++++++++++++++++++++++++++++++++++++++++++++++++ git based diff tools (git, Cogito, and StGIT included) generate unidiff which is the preferred format. You do not have to be afraid to use -M option to ``git diff`` or ``git format-patch``, if your patch involves file renames. The receiving end can handle them just fine. Please make sure your patch does not include any extra files which do not belong in a patch submission. Make sure to review your patch after generating it, to ensure accuracy. Before sending out, please make sure it cleanly applies to the "master" branch head. If you are preparing a work based on "next" branch, that is fine, but please mark it as such. Sending your patches ++++++++++++++++++++ People on the mailing list need to be able to read and comment on the changes you are submitting. It is important for a developer to be able to "quote" your changes, using standard e-mail tools, so that they may comment on specific portions of your code. For this reason, all patches should be submitted "inline". WARNING: Be wary of your MUAs word-wrap corrupting your patch. Do not cut-n-paste your patch; you can lose tabs that way if you are not careful. It is a common convention to prefix your subject line with [PATCH]. This lets people easily distinguish patches from other e-mail discussions. Use of additional markers after PATCH and the closing bracket to mark the nature of the patch is also encouraged. E.g. [PATCH/RFC] is often used when the patch is not ready to be applied but it is for discussion, [PATCH v2], [PATCH v3] etc. are often seen when you are sending an update to what you have previously sent. ``git format-patch`` command follows the best current practice to format the body of an e-mail message. At the beginning of the patch should come your commit message, ending with the Signed-off-by: lines, and a line that consists of three dashes, followed by the diffstat information and the patch itself. If you are forwarding a patch from somebody else, optionally, at the beginning of the e-mail message just before the commit message starts, you can put a "From: " line to name that person. You often want to add additional explanation about the patch, other than the commit message itself. Place such "cover letter" material between the three dash lines and the diffstat. Do not attach the patch as a MIME attachment, compressed or not. Do not let your e-mail client send quoted-printable. Do not let your e-mail client send format=flowed which would destroy whitespaces in your patches. Many popular e-mail applications will not always transmit a MIME attachment as plain text, making it impossible to comment on your code. A MIME attachment also takes a bit more time to process. This does not decrease the likelihood of your MIME-attached change being accepted, but it makes it more likely that it will be postponed. Exception: If your mailer is mangling patches then someone may ask you to re-send them using MIME, that is OK. Do not PGP sign your patch, at least for now. Most likely, your maintainer or other people on the list would not have your PGP key and would not bother obtaining it anyway. Your patch is not judged by who you are; a good patch from an unknown origin has a far better chance of being accepted than a patch from a known, respected origin that is done poorly or does incorrect things. If you really really really really want to do a PGP signed patch, format it as "multipart/signed", not a text/plain message that starts with '-----BEGIN PGP SIGNED MESSAGE-----'. That is not a text/plain, it's something else. Unless your patch is a very trivial and an obviously correct one, first send it with "To:" set to the mailing list, with "cc:" listing people who are involved in the area you are touching (the output from "git blame $path" and "git shortlog --no-merges $path" would help to identify them), to solicit comments and reviews. After the list reached a consensus that it is a good idea to apply the patch, re-send it with "To:" set to the maintainer and optionally "cc:" the list for inclusion. Do not forget to add trailers such as "Acked-by:", "Reviewed-by:" and "Tested-by:" after your "Signed-off-by:" line as necessary. Sign your work ++++++++++++++ To improve tracking of who did what, we've borrowed the "sign-off" procedure from the Linux kernel project on patches that are being emailed around. Although OfflineIMAP is a lot smaller project it is a good discipline to follow it. The sign-off is a simple line at the end of the explanation for the patch, which **certifies that you wrote it or otherwise have the right to pass it on as a open-source patch**. The rules are pretty simple: if you can certify the below: **Developer's Certificate of Origin 1.1** ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ By making a contribution to this project, I certify that: (a) The contribution was created in whole or in part by me and I have the right to submit it under the open source license indicated in the file; or (b) The contribution is based upon previous work that, to the best of my knowledge, is covered under an appropriate open source license and I have the right under that license to submit that work with modifications, whether created in whole or in part by me, under the same open source license (unless I am permitted to submit under a different license), as indicated in the file; or (c) The contribution was provided directly to me by some other person who certified (a), (b) or (c) and I have not modified it. (d) I understand and agree that this project and the contribution are public and that a record of the contribution (including all personal information I submit with it, including my sign-off) is maintained indefinitely and may be redistributed consistent with this project or the open source license(s) involved. then you just add a line saying Signed-off-by: Random J Developer This line can be automatically added by git if you run the git-commit command with the -s option. Notice that you can place your own Signed-off-by: line when forwarding somebody else's patch with the above rules for D-C-O. Indeed you are encouraged to do so. Do not forget to place an in-body "From: " line at the beginning to properly attribute the change to its true author (see above). Also notice that a real name is used in the Signed-off-by: line. Please don't hide your real name. If you like, you can put extra tags at the end: * "Reported-by:" is used to to credit someone who found the bug that the patch attempts to fix. * "Acked-by:" says that the person who is more familiar with the area the patch attempts to modify liked the patch. * "Reviewed-by:", unlike the other tags, can only be offered by the reviewer and means that she is completely satisfied that the patch is ready for application. It is usually offered only after a detailed review. * "Tested-by:" is used to indicate that the person applied the patch and found it to have the desired effect. You can also create your own tag or use one that's in common usage such as "Thanks-to:", "Based-on-patch-by:", or "Mentored-by:". An ideal patch flow =================== Here is an ideal patch flow for this project the current maintainer suggests to the contributors: (0) You come up with an itch. You code it up. (1) Send it to the list and cc people who may need to know about the change. The people who may need to know are the ones whose code you are butchering. These people happen to be the ones who are most likely to be knowledgeable enough to help you, but they have no obligation to help you (i.e. you ask for help, don't demand). ``git log -p -- $area_you_are_modifying`` would help you find out who they are. (2) You get comments and suggestions for improvements. You may even get them in a "on top of your change" patch form. (3) Polish, refine, and re-send to the list and the people who spend their time to improve your patch. Go back to step (2). (4) The list forms consensus that the last round of your patch is good. Send it to the list and cc the maintainer. (5) A topic branch is created with the patch and is merged to 'next', and cooked further and eventually graduates to 'master'. In any time between the (2)-(3) cycle, the maintainer may pick it up from the list and queue it to 'pu', in order to make it easier for people play with it without having to pick up and apply the patch to their trees themselves. Know the status of your patch after submission ---------------------------------------------- * You can use Git itself to find out when your patch is merged in master. ``git pull --rebase`` will automatically skip already-applied patches, and will let you know. This works only if you rebase on top of the branch in which your patch has been merged (i.e. it will not tell you if your patch is merged in pu if you rebase on top of master). .. * Read the git mailing list, the maintainer regularly posts messages entitled "What's cooking in git.git" and "What's in git.git" giving the status of various proposed changes. MUA specific hints ================== Some of patches I receive or pick up from the list share common patterns of breakage. Please make sure your MUA is set up properly not to corrupt whitespaces. Here are two common ones I have seen: * Empty context lines that do not have _any_ whitespace. * Non empty context lines that have one extra whitespace at the beginning. One test you could do yourself if your MUA is set up correctly is: * Send the patch to yourself, exactly the way you would, except To: and Cc: lines, which would not contain the list and maintainer address. * Save that patch to a file in UNIX mailbox format. Call it say a.patch. * Try to apply to the tip of the "master" branch from the git.git public repository:: $ git fetch http://kernel.org/pub/scm/git/git.git master:test-apply $ git checkout test-apply $ git reset --hard $ git am a.patch If it does not apply correctly, there can be various reasons. * Your patch itself does not apply cleanly. That is _bad_ but does not have much to do with your MUA. Please rebase the patch appropriately. * Your MUA corrupted your patch; "am" would complain that the patch does not apply. Look at .git/rebase-apply/ subdirectory and see what 'patch' file contains and check for the common corruption patterns mentioned above. * While you are at it, check what are in 'info' and 'final-commit' files as well. If what is in 'final-commit' is not exactly what you would want to see in the commit log message, it is very likely that your maintainer would end up hand editing the log message when he applies your patch. Things like "Hi, this is my first patch.\n", if you really want to put in the patch e-mail, should come after the three-dash line that signals the end of the commit message. Pine ---- (Johannes Schindelin) I don't know how many people still use pine, but for those poor souls it may be good to mention that the quell-flowed-text is needed for recent versions. ... the "no-strip-whitespace-before-send" option, too. AFAIK it was introduced in 4.60. (Linus Torvalds) And 4.58 needs at least this :: --- diff-tree 8326dd8350be64ac7fc805f6563a1d61ad10d32c (from e886a61f76edf5410573e92e38ce22974f9c40f1) Author: Linus Torvalds Date: Mon Aug 15 17:23:51 2005 -0700 Fix pine whitespace-corruption bug There's no excuse for unconditionally removing whitespace from the pico buffers on close. diff --git a/pico/pico.c b/pico/pico.c --- a/pico/pico.c +++ b/pico/pico.c @@ -219,7 +219,9 @@ PICO *pm; switch(pico_all_done){ /* prepare for/handle final events */ case COMP_EXIT : /* already confirmed */ packheader(); +#if 0 stripwhitespace(); +#endif c |= COMP_EXIT; break; (Daniel Barkalow) > A patch to SubmittingPatches, MUA specific help section for > users of Pine 4.63 would be very much appreciated. Ah, it looks like a recent version changed the default behavior to do the right thing, and inverted the sense of the configuration option. (Either that or Gentoo did it.) So you need to set the "no-strip-whitespace-before-send" option, unless the option you have is "strip-whitespace-before-send", in which case you should avoid checking it. Thunderbird ----------- (A Large Angry SCM) By default, Thunderbird will both wrap emails as well as flag them as being 'format=flowed', both of which will make the resulting email unusable by git. Here are some hints on how to successfully submit patches inline using Thunderbird. There are two different approaches. One approach is to configure Thunderbird to not mangle patches. The second approach is to use an external editor to keep Thunderbird from mangling the patches. **Approach #1 (configuration):** This recipe is current as of Thunderbird 2.0.0.19. Three steps: 1. Configure your mail server composition as plain text Edit...Account Settings...Composition & Addressing, uncheck 'Compose Messages in HTML'. 2. Configure your general composition window to not wrap Edit..Preferences..Composition, wrap plain text messages at 0 3. Disable the use of format=flowed Edit..Preferences..Advanced..Config Editor. Search for: mailnews.send_plaintext_flowed toggle it to make sure it is set to 'false'. After that is done, you should be able to compose email as you otherwise would (cut + paste, git-format-patch | git-imap-send, etc), and the patches should not be mangled. **Approach #2 (external editor):** This recipe appears to work with the current [*1*] Thunderbird from Suse. The following Thunderbird extensions are needed: AboutConfig 0.5 http://aboutconfig.mozdev.org/ External Editor 0.7.2 http://globs.org/articles.php?lng=en&pg=8 1) Prepare the patch as a text file using your method of choice. 2) Before opening a compose window, use Edit->Account Settings to uncheck the "Compose messages in HTML format" setting in the "Composition & Addressing" panel of the account to be used to send the patch. [*2*] 3) In the main Thunderbird window, _before_ you open the compose window for the patch, use Tools->about:config to set the following to the indicated values:: mailnews.send_plaintext_flowed => false mailnews.wraplength => 0 4) Open a compose window and click the external editor icon. 5) In the external editor window, read in the patch file and exit the editor normally. 6) Back in the compose window: Add whatever other text you wish to the message, complete the addressing and subject fields, and press send. 7) Optionally, undo the about:config/account settings changes made in steps 2 & 3. [Footnotes] *1* Version 1.0 (20041207) from the MozillaThunderbird-1.0-5 rpm of Suse 9.3 professional updates. *2* It may be possible to do this with about:config and the following settings but I haven't tried, yet:: mail.html_compose => false mail.identity.default.compose_html => false mail.identity.id?.compose_html => false (Lukas Sandström) There is a script in contrib/thunderbird-patch-inline which can help you include patches with Thunderbird in an easy way. To use it, do the steps above and then use the script as the external editor. Gnus ---- '|' in the *Summary* buffer can be used to pipe the current message to an external program, and this is a handy way to drive "git am". However, if the message is MIME encoded, what is piped into the program is the representation you see in your *Article* buffer after unwrapping MIME. This is often not what you would want for two reasons. It tends to screw up non ASCII characters (most notably in people's names), and also whitespaces (fatal in patches). Running 'C-u g' to display the message in raw form before using '|' to run the pipe can work this problem around. KMail ----- This should help you to submit patches inline using KMail. 1) Prepare the patch as a text file. 2) Click on New Mail. 3) Go under "Options" in the Composer window and be sure that "Word wrap" is not set. 4) Use Message -> Insert file... and insert the patch. 5) Back in the compose window: add whatever other text you wish to the message, complete the addressing and subject fields, and press send. Gmail ----- GMail does not appear to have any way to turn off line wrapping in the web interface, so this will mangle any emails that you send. You can however use "git send-email" and send your patches through the GMail SMTP server, or use any IMAP email client to connect to the google IMAP server and forward the emails through that. To use ``git send-email`` and send your patches through the GMail SMTP server, edit `~/.gitconfig` to specify your account settings:: [sendemail] smtpencryption = tls smtpserver = smtp.gmail.com smtpuser = user@gmail.com smtppass = p4ssw0rd smtpserverport = 587 Once your commits are ready to be sent to the mailing list, run the following commands:: $ git format-patch --cover-letter -M origin/master -o outgoing/ $ edit outgoing/0000-* $ git send-email outgoing/* To submit using the IMAP interface, first, edit your `~/.gitconfig` to specify your account settings:: [imap] folder = "[Gmail]/Drafts" host = imaps://imap.gmail.com user = user@gmail.com pass = p4ssw0rd port = 993 sslverify = false You might need to instead use: folder = "[Google Mail]/Drafts" if you get an error that the "Folder doesn't exist". Once your commits are ready to be sent to the mailing list, run the following commands:: $ git format-patch --cover-letter -M --stdout origin/master | git imap-send Just make sure to disable line wrapping in the email client (GMail web interface will line wrap no matter what, so you need to use a real IMAP client). Working with Git ================ Extract a topic from pu ----------------------- pu is built this way:: git checkout pu git reset --keep next git merge --no-ff -X theirs topic1 git merge --no-ff -X theirs topic2 git merge --no-ff -X theirs blue git merge --no-ff -X theirs orange ... As a consequence: 1. Each topic merged uses a merge commit. A merge commit is a commit having 2 ancestors. Actually, Git allows more than 2 parents but we don't use this feature. It's intended. 2. Paths in pu may mix up multiple versions if all the topics don't use the same base commit. This is very often the case as topics aren't rebased: it guarantees each topic is strictly identical to the last version sent to the mailing list. No surprise. What you need to extract a particular topic is the sha1 of the tip of that branch (the last commit of the topic). Assume you want the branch of the topic called 'blue'. First, look at the log given by this command:: git log --reverse --merges --parents origin/next..origin/pu With this command you ask for the log: * from next to pu * in reverse order (older first) * merge commits only * with the sha1 of the ancestors In this list, find the topic you're looking for, basing you search on the lines like:: Merge branch 'topic/name' into pu By convention, it has the form /. When you're at it, pick the topic ancestor sha1. It's always the last sha1 in the line starting by 'commit'. For you to know: * the first is the sha1 of the commit you see: the merge commit * the following sha1 is the ancestor of the branch checkouted at merge time (always the previous merged topic or the ancien next in our case) * last is the branch merged Giving:: commit sha1_of_merge_commit sha1_of_ancient_pu sha1_of_topic_blue Then, you only have to checkout the topic from there:: git checkout -b blue sha1_of_topic_blue and you're done! You've just created a new branch called "blue" with the blue content. Be aware this topic is almostly not updated against current next branch. ,-) spaetz-offlineimap-c9e9690/docs/doc-src/INSTALL.rst000077700000000000000000000000001176237577200240722../INSTALL.rstustar00rootroot00000000000000spaetz-offlineimap-c9e9690/docs/doc-src/MANUAL.rst000077700000000000000000000000001176237577200236102../MANUAL.rstustar00rootroot00000000000000spaetz-offlineimap-c9e9690/docs/doc-src/advanced_config.rst000066400000000000000000000005671176237577200240200ustar00rootroot00000000000000Message filtering ================= There are two ways to selectively filter messages out of a folder, using `maxsize` and `maxage`. Setting each option will basically ignore all messages that are on the server by pretending they don't exist. :todo: explain them and give tipps on how to use and not use them. Use cases! maxage ------ :todo: ! maxsize ------- :todo: ! spaetz-offlineimap-c9e9690/docs/doc-src/conf.py000066400000000000000000000147551176237577200214770ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # pyDNS documentation build configuration file, created by # sphinx-quickstart on Tue Feb 2 10:00:47 2010. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0,os.path.abspath('../..')) from offlineimap import __version__,__author__ # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo'] autoclass_content = "both" # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8' # The master toctree document. master_doc = 'index' # General information about the project. project = u'OfflineImap' copyright = u'2002-2010, ' + __author__ # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = __version__ # The full version, including alpha/beta/rc tags. release = __version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. #unused_docs = [] # List of directories, relative to source directory, that shouldn't be searched # for source files. exclude_trees = [] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = False # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. html_theme = 'default' #html_style = '' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". #html_static_path = ['html'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. html_use_modindex = False # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'dev-doc' # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'offlineimap.tex', u'OfflineImap Documentation', u'OfflineImap contributors', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # Additional stuff for the LaTeX preamble. #latex_preamble = '' # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_use_modindex = True # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'http://docs.python.org/': None} spaetz-offlineimap-c9e9690/docs/doc-src/features.rst000066400000000000000000000073701176237577200225430ustar00rootroot00000000000000Description =========== OfflineIMAP is a tool to simplify your e-mail reading. With OfflineIMAP, you can read the same mailbox from multiple computers. You get a current copy of your messages on each computer, and changes you make one place will be visible on all other systems. For instance, you can delete a message on your home computer, and it will appear deleted on your work computer as well. OfflineIMAP is also useful if you want to use a mail reader that does not have IMAP support, has poor IMAP support, or does not provide disconnected operation. OfflineIMAP works on pretty much any POSIX operating system, such as Linux, BSD operating systems, MacOS X, Solaris, etc. OfflineIMAP is a Free Software project licensed under the GNU General Public License. You can download it for free, and you can modify it. In fact, you are encouraged to contribute to OfflineIMAP, and doing so is fast and easy. OfflineIMAP is FAST; it synchronizes my two accounts with over 50 folders in 3 seconds. Other similar tools might take over a minute, and achieve a less-reliable result. Some mail readers can take over 10 minutes to do the same thing, and some don't even support it at all. Unlike other mail tools, OfflineIMAP features a multi-threaded synchronization algorithm that can dramatically speed up performance in many situations by synchronizing several different things simultaneously. OfflineIMAP is FLEXIBLE; you can customize which folders are synced via regular expressions, lists, or Python expressions; a versatile and comprehensive configuration file is used to control behavior; two user interfaces are built-in; fine-tuning of synchronization performance is possible; internal or external automation is supported; SSL and PREAUTH tunnels are both supported; offline (or "unplugged") reading is supported; and esoteric IMAP features are supported to ensure compatibility with the widest variety of IMAP servers. OfflineIMAP is SAFE; it uses an algorithm designed to prevent mail loss at all costs. Because of the design of this algorithm, even programming errors should not result in loss of mail. I am so confident in the algorithm that I use my own personal and work accounts for testing of OfflineIMAP pre-release, development, and beta releases. Of course, legally speaking, OfflineIMAP comes with no warranty, so I am not responsible if this turns out to be wrong. .. note: OfflineImap was written by John Goerzen, who retired from maintaining. It is now maintained by Nicolas Sebrecht & Sebastian Spaeth at https://github.com/spaetz/offlineimap. Thanks to John for his great job and to have share this project with us. Method of Operation =================== OfflineIMAP traditionally operates by maintaining a hierarchy of mail folders in Maildir format locally. Your own mail reader will read mail from this tree, and need never know that the mail comes from IMAP. OfflineIMAP will detect changes to the mail folders on your IMAP server and your own computer and bi-directionally synchronize them, copying, marking, and deleting messages as necessary. With OfflineIMAP 4.0, a powerful new ability has been introduced ― the program can now synchronize two IMAP servers with each other, with no need to have a Maildir layer in-between. Many people use this if they use a mail reader on their local machine that does not support Maildirs. People may install an IMAP server on their local machine, and point both OfflineIMAP and their mail reader of choice at it. This is often preferable to the mail reader's own IMAP support since OfflineIMAP supports many features (offline reading, for one) that most IMAP-aware readers don't. However, this feature is not as time-tested as traditional syncing, so my advice is to stick with normal methods of operation for the time being. spaetz-offlineimap-c9e9690/docs/doc-src/index.rst000066400000000000000000000037661176237577200220410ustar00rootroot00000000000000.. OfflineImap documentation master file .. _OfflineImap: http://offlineimap.org Welcome to :mod:`offlineimaps`'s documentation ============================================== `OfflineImap`_ synchronizes email between an IMAP server and a MailDir or between two IMAP servers. It offers very powerful and flexible configuration options, that allow things such as the filtering of folders, transposing of names via static configuration or python scripting. It plays well with mutt and other MailDir consuming email clients. The documentation contains the end user documentation in a first part. It also contains use cases and example configurations. It is followed by the internal :doc:`API documentation ` for those interested in modifying the source code or otherwise peek into the OfflineImap internals in a second part. If you just want to get started with minimal fuzz, have a look at our `online quick start guide `_. Do note though, that our configuration options are many and powerful. Perusing our precious documentation does often pay off! More information on specific topics can be found on the following pages: **User documentation** * :doc:`Overview and features ` * :doc:`installation/uninstall ` **Configuration** * :doc:`user manual/Configuration ` * :doc:`Folder filtering & name transformation guide ` * :doc:`maxage ` * :doc:`command line options ` * :doc:`Frequently Asked Questions ` **Developer documentation** * :doc:`HACKING HowTo & git workflows ` * :doc:`API documentation ` for internal details on the :mod:`offlineimap` module .. toctree:: :hidden: features INSTALL MANUAL nametrans advanced_config offlineimap FAQ HACKING API repository ui .. moduleauthor:: John Goerzen, and many others. See AUTHORS and the git history for a full list. :License: This module is covered under the GNU GPL v2 (or later). spaetz-offlineimap-c9e9690/docs/doc-src/nametrans.rst000066400000000000000000000234711176237577200227150ustar00rootroot00000000000000.. _folder_filtering_and_name_translation: Folder filtering and Name translation ===================================== OfflineImap provides advanced and potentially complex possibilities for filtering and translating folder names. If you don't need any of this, you can safely skip this section. .. warning:: Starting with v6.4.0, OfflineImap supports the creation of folders on the remote repostory. This change means that people that only had a nametrans option on the remote repository (everyone) will need to have a nametrans setting on the local repository too that will reverse the name transformation. See section `Reverse nametrans`_ for details. folderfilter ------------ If you do not want to synchronize all your filters, you can specify a `folderfilter`_ function that determines which folders to include in a sync and which to exclude. Typically, you would set a folderfilter option on the remote repository only, and it would be a lambda or any other python function. The only parameter to that function is the folder name. If the filter function returns True, the folder will be synced, if it returns False, it. will be skipped. The folderfilter operates on the *UNTRANSLATED* name (before any `nametrans`_ fudging takes place). Consider the examples below to get an idea of what they do. Example 1: synchronizing only INBOX and Sent:: folderfilter = lambda folder: folder in ['INBOX', 'Sent'] Example 2: synchronizing everything except Trash:: folderfilter = lambda folder: folder not in ['Trash'] Example 3: Using a regular expression to exclude Trash and all folders containing the characters "Del":: folderfilter = lambda folder: not re.search('(^Trash$|Del)', folder) .. note:: If folderfilter is not specified, ALL remote folders will be synchronized. You can span multiple lines by indenting the others. (Use backslashes at the end when required by Python syntax) For instance:: folderfilter = lambda foldername: foldername in ['INBOX', 'Sent Mail', 'Deleted Items', 'Received'] Usually it suffices to put a `folderfilter`_ setting in the remote repository section. You might want to put a folderfilter option on the local repository if you want to prevent some folders on the local repository to be created on the remote one. (Even in this case, folder filters on the remote repository will prevent that) folderincludes -------------- You can specify `folderincludes`_ to manually include additional folders to be synced, even if they had been filtered out by a folderfilter setting. `folderincludes`_ should return a Python list. This can be used to 1) add a folder that was excluded by your folderfilter rule, 2) to include a folder that your server does not specify with its LIST option, or 3) to include a folder that is outside your basic `reference`. The `reference` value will not be prefixed to this folder name, even if you have specified one. For example:: folderincludes = ['debian.user', 'debian.personal'] This will add the "debian.user" and "debian.personal" folders even if you have filtered out everything starting with "debian" in your folderfilter settings. createfolders ------------- By default OfflineImap propagates new folders in both directions. Sometimes this is not what you want. E.g. you might want new folders on your IMAP server to propagate to your local MailDir, but not the other way around. The 'readonly' setting on a repository will not help here, as it prevents any change from occuring on that repository. This is what the `createfolders` setting is for. By default it is `True`, meaning that new folders can be created on this repository. To prevent folders from ever being created on a repository, set this to `False`. If you set this to False on the REMOTE repository, you will not have to create the `Reverse nametrans`_ rules on the LOCAL repository. nametrans ---------- Sometimes, folders need to have different names on the remote and the local repositories. To achieve this you can specify a folder name translator. This must be a eval-able Python expression that takes a foldername arg and returns the new value. We suggest a lambda function, but it could be any python function really. If you use nametrans rules, you will need to set them both on the remote and the local repository, see `Reverse nametrans`_ just below for details. The following examples are thought to be put in the remote repository section. The below will remove "INBOX." from the leading edge of folders (great for Courier IMAP users):: nametrans = lambda folder: re.sub('^INBOX\.', '', folder) Using Courier remotely and want to duplicate its mailbox naming locally? Try this:: nametrans = lambda folder: re.sub('^INBOX\.*', '.', folder) .. warning:: You MUST construct nametrans rules such that it NEVER returns the same value for two folders, UNLESS the second values are filtered out by folderfilter below. That is, two filters on one side may never point to the same folder on the other side. Failure to follow this rule will result in undefined behavior. See also *Sharing a maildir with multiple IMAP servers* in the :ref:`pitfalls` section. Reverse nametrans +++++++++++++++++ Since 6.4.0, OfflineImap supports the creation of folders on the remote repository and that complicates things. Previously, only one nametrans setting on the remote repository was needed and that transformed a remote to a local name. However, nametrans transformations are one-way, and OfflineImap has no way using those rules on the remote repository to back local names to remote names. Take a remote nametrans rule `lambda f: re.sub('^INBOX/','',f)` which cuts of any existing INBOX prefix. Now, if we parse a list of local folders, finding e.g. a folder "Sent", is it supposed to map to "INBOX/Sent" or to "Sent"? We have no way of knowing. This is why **every nametrans setting on a remote repository requires an equivalent nametrans rule on the local repository that reverses the transformation**. Take the above examples. If your remote nametrans setting was:: nametrans = lambda folder: re.sub('^INBOX\.', '', folder) then you will want to have this in your local repository, prepending "INBOX" to any local folder name:: nametrans = lambda folder: 'INBOX' + folder Failure to set the local nametrans rule will lead to weird-looking error messages of -for instance- this type:: ERROR: Creating folder moo.foo on repository remote Folder 'moo.foo'[remote] could not be created. Server responded: ('NO', ['Unknown namespace.']) (This indicates that you attempted to create a folder "Sent" when all remote folders needed to be under the prefix of "INBOX."). OfflineImap will make some sanity checks if it needs to create a new folder on the remote side and a back-and-forth nametrans-lation does not yield the original foldername (as that could potentially lead to infinite folder creation cycles). You can probably already see now that creating nametrans rules can be a pretty daunting and complex endeavour. Check out the Use cases in the manual. If you have some interesting use cases that we can present as examples here, please let us know. Debugging folderfilter and nametrans ------------------------------------ Given the complexity of the functions and regexes involved, it is easy to misconfigure things. One way to test your configuration without danger to corrupt anything or to create unwanted folders is to invoke offlineimap with the `--info` option. It will output a list of folders and their transformations on the screen (save them to a file with -l info.log), and will help you to tweak your rules as well as to understand your configuration. It also provides good output for bug reporting. FAQ on nametrans ---------------- Where to put nametrans rules, on the remote and/or local repository? ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ If you never intend to create new folders on the LOCAL repository that need to be synced to the REMOTE repository, it is sufficient to create a nametrans rule on the remote Repository section. This will be used to determine the names of new folder names on the LOCAL repository, and to match existing folders that correspond. *IF* you create folders on the local repository, that are supposed to be automatically created on the remote repository, you will need to create a nametrans rule that provides the reverse name translation. (A nametrans rule provides only a one-way translation of names and in order to know which names folders on the LOCAL side would have on the REMOTE side, you need to specify the reverse nametrans rule on the local repository) OfflineImap will complain if it needs to create a new folder on the remote side and a back-and-forth nametrans-lation does not yield the original foldername (as that could potentially lead to infinite folder creation cycles). What folder separators do I need to use in nametrans rules? +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ **Q:** If I sync from an IMAP server with folder separator '/' to a Maildir using the default folder separator '.' which do I need to use in nametrans rules?:: nametrans = lambda f: "INBOX/" + f or:: nametrans = lambda f: "INBOX." + f **A:** Generally use the folder separator as defined in the repository you write the nametrans rule for. That is, use '/' in the above case. We will pass in the untranslated name of the IMAP folder as parameter (here `f`). The translated name will ultimately have all folder separators be replaced with the destination repositories' folder separator. So if 'f' was "Sent", the first nametrans yields the translated name "INBOX/Sent" to be used on the other side. As that repository uses the folder separator '.' rather than '/', the ultimate name to be used will be "INBOX.Sent". (As a final note, the smart will see that both variants of the above nametrans rule would have worked identically in this case) spaetz-offlineimap-c9e9690/docs/doc-src/offlineimap.rst000066400000000000000000000117471176237577200232210ustar00rootroot00000000000000The offlineimap 'binary' command line options ============================================= Offlineimap is invoked with the following pattern: `offlineimap [args...]`. Where [args...] are as follows: Options: --dry-run This mode protects us from performing any actual action. It will not precisely give the exact information what will happen. If e.g. it would need to create a folder, it merely outputs "Would create folder X", but not how many and which mails it would transfer. --info Output information on the configured email repositories. Useful for debugging and bug reporting. Use in conjunction with the -a option to limit the output to a single account. --version show program's version number and exit -h, --help show this help message and exit -1 Disable all multithreading operations and use solely a single-thread sync. This effectively sets the maxsyncaccounts and all maxconnections configuration file variables to 1. -P DIR Sets OfflineIMAP into profile mode. The program will create DIR (it must not already exist). As it runs, Python profiling information about each thread is logged into profiledir. Please note: This option is present for debugging and optimization only, and should NOT be used unless you have a specific reason to do so. It will significantly slow program performance, may reduce reliability, and can generate huge amounts of data. This option implies the singlethreading option (-1). -a ACCOUNTS Overrides the accounts section in the config file. Lets you specify a particular account or set of accounts to sync without having to edit the config file. You might use this to exclude certain accounts, or to sync some accounts that you normally prefer not to. -c FILE Specifies a configuration file to use in lieu of ~/.offlineimaprc. -d type1,[type2...] Enables debugging for OfflineIMAP. This is useful if you are trying to track down a malfunction or figure out what is going on under the hood. I suggest that you use this with -1 in order to make the results more sensible. This option requires one or more debugtypes, separated by commas. These define what exactly will be debugged, and so far include the options: imap, thread,maildir or ALL. The imap option will enable IMAP protocol stream and parsing debugging. Note that the output may contain passwords, so take care to remove that from the debugging output before sending it to anyone else. The maildir option will enable debugging for certain Maildir operations. -l FILE Log to FILE -f folder1,[folder2...] Only sync the specified folders. The 'folder's are the *untranslated* foldernames. This command-line option overrides any 'folderfilter' and 'folderincludes' options in the configuration file. -k `[section:]option=value` Override configuration file option. If"section" is omitted, it defaults to "general". Any underscores "_" in the section name are replaced with spaces: for instance, to override option "autorefresh" in the "[Account Personal]" section in the config file one would use "-k Account_Personal:autorefresh=30". -o Run only once, ignoring any autorefresh setting in the configuration file. -q Run only quick synchronizations. Ignore any flag updates on IMAP servers. -u INTERFACE Specifies an alternative user interface to use. This overrides the default specified in the configuration file. The UI specified with -u will be forced to be used, even if checks determine that it is not usable. Possible interface choices are: Curses.Blinkenlights, TTY.TTYUI, Noninteractive.Basic, Noninteractive.Quiet, Machine.MachineUI Indices and tables ================== * :ref:`genindex` * :ref:`search` spaetz-offlineimap-c9e9690/docs/doc-src/repository.rst000066400000000000000000000043071176237577200231410ustar00rootroot00000000000000.. currentmodule:: offlineimap.repository :mod:`offlineimap.repository` -- Email repositories ------------------------------------------------------------ A derivative of class :class:`Base.BaseRepository` represents an email repository depending on the type of storage, possible options are: * :class:`IMAPRepository`, * :class:`MappedIMAPRepository` * :class:`GmailRepository`, * :class:`MaildirRepository`, or * :class:`LocalStatusRepository`. Which class you need depends on your account configuration. The helper class :class:`offlineimap.repository.Repository` is an *autoloader*, that returns the correct class depending on your configuration. So when you want to instanciate a new :mod:`offlineimap.repository`, you will mostly do it through this class. .. autoclass:: offlineimap.repository.Repository :members: :inherited-members: :mod:`offlineimap.repository.Base.BaseRepository` -- Representation of a mail repository ------------------------------------------------------------------------------------------ .. autoclass:: offlineimap.repository.Base.BaseRepository :members: :inherited-members: :undoc-members: .. .. note:: :meth:`foo` .. .. attribute:: Database.MODE Defines constants that are used as the mode in which to open a database. MODE.READ_ONLY Open the database in read-only mode MODE.READ_WRITE Open the database in read-write mode .. autoclass:: offlineimap.repository.IMAPRepository .. autoclass:: offlineimap.repository.MappedIMAPRepository .. autoclass:: offlineimap.repository.GmailRepository .. autoclass:: offlineimap.repository.MaildirRepository .. autoclass:: offlineimap.repository.LocalStatusRepository :mod:`offlineimap.folder` -- Basic representation of a local or remote Mail folder --------------------------------------------------------------------------------------------------------- .. autoclass:: offlineimap.folder.Base.BaseFolder :members: :inherited-members: :undoc-members: .. .. attribute:: Database.MODE Defines constants that are used as the mode in which to open a database. MODE.READ_ONLY Open the database in read-only mode MODE.READ_WRITE Open the database in read-write mode spaetz-offlineimap-c9e9690/docs/doc-src/ui.rst000066400000000000000000000015311176237577200213330ustar00rootroot00000000000000:mod:`offlineimap.ui` -- A flexible logging system -------------------------------------------------------- .. currentmodule:: offlineimap.ui OfflineImap has various ui systems, that can be selected. They offer various functionalities. They must implement all functions that the :class:`offlineimap.ui.UIBase` offers. Early on, the ui must be set using :meth:`getglobalui` .. automethod:: offlineimap.ui.setglobalui .. automethod:: offlineimap.ui.getglobalui Base UI plugin ^^^^^^^^^^^^^^^^^^^^^^^^^^ .. autoclass:: offlineimap.ui.UIBase.UIBase :members: :inherited-members: .. .. note:: :meth:`foo` .. .. attribute:: Database.MODE Defines constants that are used as the mode in which to open a database. MODE.READ_ONLY Open the database in read-only mode MODE.READ_WRITE Open the database in read-write mode spaetz-offlineimap-c9e9690/offlineimap.conf000066400000000000000000000542231176237577200210500ustar00rootroot00000000000000# Offlineimap sample configuration file # This file documents *all* possible options and can be quite scary. # Looking for a quick start? Take a look at offlineimap.conf.minimal. # More details can be found in the included user documention, which is # also available at: http://docs.offlineimap.org/en/latest/ # NOTE: Settings generally support python interpolation. This means # values can contain python format strings which refer to other values # in the same section, or values in a special DEFAULT section. This # allows you for example to use common settings for multiple accounts: # # [Repository Gmail1] # trashfolder: %(gmailtrashfolder)s # # [Repository Gmail2] # trashfolder: %(gmailtrashfolder)s # # [DEFAULT] # gmailtrashfolder = [Google Mail]/Papierkorb # # would set the trashfolder setting for your German gmail accounts. # NOTE2: This implies that any '%' needs to be encoded as '%%' ################################################## # General definitions ################################################## [general] # This specifies where offlineimap is to store its metadata. # This directory will be created if it does not already exist. #metadata = ~/.offlineimap # This variable specifies which accounts are defined. Separate them # with commas. Account names should be alphanumeric only. # You will need to specify one section per account below. You may # not use "general" for an account name. accounts = Test # Offlineimap can synchronize more than one account at a time. If you # want to enable this feature, set the below value to something # greater than 1. To force it to synchronize only one account at a # time, set it to 1. # # Note: if you are using autorefresh and have more than one account, # you must set this number to be >= to the number of accounts you have; # since any given sync run never "finishes" due to a timer, you will never # sync your additional accounts if this is 1. #maxsyncaccounts = 1 # You can specify one or more user interface modules for OfflineIMAP # to use. OfflineIMAP will try the first in the list, and if it # fails, the second, and so forth. # # The pre-defined options are: # Blinkenlights -- A fancy (terminal) interface # TTYUI -- a text-based (terminal) interface # Basic -- Noninteractive interface suitable for cron'ing # Quiet -- Noninteractive interface, generates no output # except for errors. # MachineUI -- Interactive interface suitable for machine # parsing. # # You can override this with a command-line option -u. #ui = basic # If you try to synchronize messages to a folder which the IMAP server # considers read-only, OfflineIMAP will generate a warning. If you want # to suppress these warnings, set ignore-readonly to yes. Read-only # IMAP folders allow reading but not modification, so if you try to # change messages in the local copy of such a folder, the IMAP server # will prevent OfflineIMAP from propagating those changes to the IMAP # server. Note that ignore-readonly is unrelated to the "readonly" # setting which prevents a repository from being modified at all. #ignore-readonly = no ########## Advanced settings # You can give a Python source filename here and all config file # python snippets will be evaluated in the context of that file. # This allows you to e.g. define helper functions in the Python # source file and call them from this config file. You can find # an example of this in the manual. # # pythonfile = ~/.offlineimap.py # # By default, OfflineIMAP will not exit due to a network error until # the operating system returns an error code. Operating systems can sometimes # take forever to notice this. Here you can activate a timeout on the # socket. This timeout applies to individual socket reads and writes, # not to an overall sync operation. You could perfectly well have a 30s # timeout here and your sync still take minutes. # # Values in the 30-120 second range are reasonable. # # The default is to have no timeout beyond the OS. Times are given in seconds. # # socktimeout = 60 # By default, OfflineIMAP will use fsync() to force data out to disk at # opportune times to ensure consistency. This can, however, reduce # performance. Users where /home is on SSD (Flash) may also wish to reduce # write cycles. Therefore, you can disable OfflineIMAP's use of fsync(). # Doing so will come at the expense of greater risk of message duplication # in the event of a system crash or power loss. Default is fsync = true. # Set fsync = false ot disable fsync. # # fsync = true ################################################## # Mailbox name recorder ################################################## [mbnames] # offlineimap can record your mailbox names in a format you specify. # You can define the header, each mailbox item, the separator, # and the footer. Here is an example for Mutt. # If enabled is yes, all six setting must be specified, even if they # are just the empty string "". # # The header, peritem, sep, and footer are all Python expressions passed # through eval, so you can (and must) use Python quoting. enabled = no filename = ~/Mutt/muttrc.mailboxes header = "mailboxes " peritem = "+%(accountname)s/%(foldername)s" sep = " " footer = "\n" # You can also specify a folderfilter. It will apply to the # *translated* folder name here, and it takes TWO arguments: # accountname and foldername. In all other ways, it will # behave identically to the folderfilter for accounts. Please see # that section for more information and examples. # # Note that this filter can be used only to further restrict mbnames # to a subset of folders that pass the account's folderfilter. ################################################## # Accounts ################################################## # This is an account definition clause. You'll have one of these # for each account listed in general/accounts above. [Account Test] ########## Basic settings # These settings specify the two folders that you will be syncing. # You'll need to have a "Repository ..." section for each one. localrepository = LocalExample remoterepository = RemoteExample ########## Advanced settings # You can have offlineimap continue running indefinitely, automatically # syncing your mail periodically. If you want that, specify how # frequently to do that (in minutes) here. You can also specify # fractional minutes (ie, 3.25). # autorefresh = 5 # OfflineImap can replace a number of full updates by quick # synchronizations. It only synchronizes a folder if 1) a Maildir # folder has changed, or 2) if an IMAP folder has received new messages # or had messages deleted, ie it does not update if only IMAP flags have # changed. Full updates need to fetch ALL flags for all messages, so # this makes quite a performance difference (especially if syncing # between two IMAP servers). # Specify 0 for never, -1 for always (works even in non-autorefresh # mode), or a positive integer to do quick updates before doing # another full synchronization (requires autorefresh). Updates are # always performed after minutes, be they quick or full. # quick = 10 # You can specify a pre and post sync hook to execute a external command. # In this case a call to imapfilter to filter mail before the sync process # starts and a custom shell script after the sync completes. # The pre sync script has to complete before a sync to the account will # start. # presynchook = imapfilter # postsynchook = notifysync.sh # You can also specify parameters to the commands # presynchook = imapfilter -c someotherconfig.lua # OfflineImap caches the state of the synchronisation to e.g. be able to # determine if a mail has been deleted on one side or added on the # other. # # The default and historical backend is 'plain' which writes out the # state in plain text files. On Repositories with large numbers of # mails, the performance might not be optimal, as we write out the # complete file for each change. Another new backend 'sqlite' is # available which stores the status in sqlite databases. # # If you switch the backend, you may want to delete the old cache # directory in ~/.offlineimap/Account-/LocalStatus manually # once you are sure that things work. # #status_backend = plain # If you have a limited amount of bandwidth available you can exclude larger # messages (e.g. those with large attachments etc). If you do this it # will appear to offlineimap that these messages do not exist at all. They # will not be copied, have flags changed etc. For this to work on an IMAP # server the server must have server side search enabled. This works with gmail # and most imap servers (e.g. cyrus etc) # The maximum size should be specified in bytes - e.g. 2000000 for approx 2MB # maxsize = 2000000 # When you are starting to sync an already existing account you can tell # offlineimap to sync messages from only the last x days. When you do # this messages older than x days will be completely ignored. This can # be useful for importing existing accounts when you do not want to # download large amounts of archive email. # # Messages older than maxage days will not be synced, their flags will # not be changed, they will not be deleted etc. For offlineimap it will # be like these messages do not exist. This will perform an IMAP search # in the case of IMAP or Gmail and therefor requires that the server # support server side searching. This will calculate the earliest day # that would be included in the search and include all messages from # that day until today. e.g. maxage = 3 to sync only the last 3 days # mail # # maxage = # Maildir file format uses colon (:) separator between uniq name and info. # Unfortunatelly colon is not allowed character in windows file name. If you # enable maildir-windows-compatible option, offlineimap will be able to store # messages on windows drive, but you will probably loose compatibility with # other programs working with the maildir # #maildir-windows-compatible = no [Repository LocalExample] # Each repository requires a "type" declaration. The types supported for # local repositories are Maildir and IMAP. type = Maildir # Specify local repository. Your IMAP folders will be synchronized # to maildirs created under this path. OfflineIMAP will create the # maildirs for you as needed. localfolders = ~/Test # You can specify the "folder separator character" used for your Maildir # folders. It is inserted in-between the components of the tree. If you # want your folders to be nested directories, set it to "/". 'sep' is # ignored for IMAP repositories, as it is queried automatically. # #sep = . # Some users may not want the atime (last access time) of folders to be # modified by OfflineIMAP. If 'restoreatime' is set to yes, OfflineIMAP # will restore the atime of the "new" and "cur" folders in each maildir # folder to their original value after each sync. # # In nearly all cases, the default should be fine. # #restoreatime = no [Repository RemoteExample] # And this is the remote repository. We only support IMAP or Gmail here. type = IMAP # The following can fetch the account credentials via a python expression that # is parsed from the pythonfile parameter. For example, a function called # "getcredentials" that parses a file "filename" and returns the account # details for "hostname". # remotehosteval = getcredentials("filename", "hostname", "hostname") # remoteusereval = getcredentials("filename", "hostname", "user") # remotepasseval = getcredentials("filename", "hostname", "passwd") # Specify the remote hostname. remotehost = examplehost # Whether or not to use SSL. ssl = yes # SSL Client certificate (optional) # sslclientcert = /path/to/file.crt # SSL Client key (optional) # sslclientkey = /path/to/file.key # SSL CA Cert(s) to verify the server cert against (optional). # No SSL verification is done without this option. If it is # specified, the CA Cert(s) need to verify the Server cert AND # match the hostname (* wildcard allowed on the left hand side) # The certificate should be in PEM format. # sslcacertfile = /path/to/cacertfile.crt # If you connect via SSL/TLS (ssl=true) and you have no CA certificate # specified, offlineimap will refuse to sync as it connects to a server # with an unknown "fingerprint". If you are sure you connect to the # correct server, you can then configure the presented server # fingerprint here. OfflineImap will verify that the server fingerprint # has not changed on each connect and refuse to connect otherwise. # You can also configure this in addition to CA certificate validation # above and it will check both ways. #cert_fingerprint = # Specify the port. If not specified, use a default port. # remoteport = 993 # Specify the remote user name. remoteuser = username # There are six ways to specify the password for the IMAP server: # # 1. No password at all specified in the config file. # If a matching entry is found in ~/.netrc (see netrc (5) for # information) this password will be used. Do note that netrc only # allows one entry per hostname. If there is no ~/.netrc file but # there is an /etc/netrc file, the password will instead be taken # from there. Otherwise you will be prompted for the password when # OfflineIMAP starts when using a UI that supports this. # # 2. The remote password stored in this file with the remotepass # option. Any '%' needs to be encoded as '%%'. Example: # remotepass = mypassword # # 3. The remote password stored as a single line in an external # file, which is referenced by the remotefile option. Example: # remotepassfile = ~/Password.IMAP.Account1 # # 4. With a preauth tunnel. With this method, you invoke an external # program that is guaranteed *NOT* to ask for a password, but rather # to read from stdin and write to stdout an IMAP procotol stream that # begins life in the PREAUTH state. When you use a tunnel, you do # NOT specify a user or password (if you do, they'll be ignored.) # Instead, you specify a preauthtunnel, as this example illustrates # for Courier IMAP on Debian: # preauthtunnel = ssh -q imaphost '/usr/bin/imapd ./Maildir' # # 5. If you are using Kerberos and have the Python Kerberos package # installed, you should not specify a remotepass. If the user has a # valid Kerberos TGT, OfflineIMAP will figure out the rest all by # itself, and fall back to password authentication if needed. # # 6. Using arbitrary python code. With this method, you invoke a # function from your pythonfile. To use this method assign the name # of the function to the variable 'remotepasseval'. Example: # remotepasseval = get_password("imap.example.net") # You can also query for the username: # remoteusereval = get_username("imap.example.net") # This method can be used to design more elaborate setups, e.g. by # querying the gnome-keyring via its python bindings. ########## Advanced settings # Some IMAP servers need a "reference" which often refers to the "folder # root". This is most commonly needed with UW IMAP, where you might # need to specify the directory in which your mail is stored. The # 'reference' value will be prefixed to all folder paths refering to # that repository. E.g. accessing folder 'INBOX' with reference = Mail # will try to access Mail/INBOX. Note that the nametrans and # folderfilter functions will still apply the full path including the # reference prefix. Most users will not need this. # # reference = Mail # In between synchronisations, OfflineIMAP can monitor mailboxes for new # messages using the IDLE command. If you want to enable this, specify here # the folders you wish to monitor. Note that the IMAP protocol requires a # separate connection for each folder monitored in this way, so setting # this option will force settings for: # maxconnections - to be at least the number of folders you give # holdconnectionopen - to be true # keepalive - to be 29 minutes unless you specify otherwise # # This feature isn't complete and may well have problems. See the manual # for more details. # # This option should return a Python list. For example # # idlefolders = ['INBOX', 'INBOX.Alerts'] # # OfflineIMAP can use multiple connections to the server in order # to perform multiple synchronization actions simultaneously. # This may place a higher burden on the server. In most cases, # setting this value to 2 or 3 will speed up the sync, but in some # cases, it may slow things down. The safe answer is 1. You should # probably never set it to a value more than 5. #maxconnections = 2 # OfflineIMAP normally closes IMAP server connections between refreshes if # the global option autorefresh is specified. If you wish it to keep the # connection open, set this to true. If not specified, the default is # false. Keeping the connection open means a faster sync start the # next time and may use fewer server resources on connection, but uses # more server memory. This setting has no effect if autorefresh is not set. # #holdconnectionopen = no # If you want to have "keepalives" sent while waiting between syncs, # specify the amount of time IN SECONDS between keepalives here. Note that # sometimes more than this amount of time might pass, so don't make it # tight. This setting has no effect if autorefresh and holdconnectionopen # are not both set. # # keepalive = 60 # Normally, OfflineIMAP will expunge deleted messages from the server. # You can disable that if you wish. This means that OfflineIMAP will # mark them deleted on the server, but not actually delete them. # You must use some other IMAP client to delete them if you use this # setting; otherwise, the messgaes will just pile up there forever. # Therefore, this setting is definitely NOT recommended. # #expunge = no # Specify whether to process all mail folders on the server, or only # those listed as "subscribed". # #subscribedonly = no # You can specify a folder translator. This must be a eval-able # Python expression that takes a foldername arg and returns the new # value. I suggest a lambda. This example below will remove "INBOX." from # the leading edge of folders (great for Courier IMAP users) # # See the user documentation for details and use cases. They are also # online at: # http://docs.offlineimap.org/en/latest/nametrans.html # # WARNING: you MUST construct this such that it NEVER returns # the same value for two folders, UNLESS the second values are # filtered out by folderfilter below. Failure to follow this rule # will result in undefined behavior # # nametrans = lambda foldername: re.sub('^INBOX\.', '', foldername) # Using Courier remotely and want to duplicate its mailbox naming # locally? Try this: # # nametrans = lambda foldername: re.sub('^INBOX\.*', '.', foldername) # You can specify which folders to sync using the folderfilter # setting. You can provide any python function (e.g. a lambda function) # which will be invoked for each foldername. If the filter function # returns True, the folder will be synced, if it returns False, it. The # folderfilter operates on the *UNTRANSLATED* name (before any nametrans # translation takes place). # # Example 1: synchronizing only INBOX and Sent. # # folderfilter = lambda foldername: foldername in ['INBOX', 'Sent'] # # Example 2: synchronizing everything except Trash. # # folderfilter = lambda foldername: foldername not in ['Trash'] # # Example 3: Using a regular expression to exclude Trash and all folders # containing the characters "Del". # # folderfilter = lambda foldername: not re.search('(^Trash$|Del)', foldername) # # If folderfilter is not specified, ALL remote folders will be # synchronized. # # You can span multiple lines by indenting the others. (Use backslashes # at the end when required by Python syntax) For instance: # # folderfilter = lambda foldername: foldername in # ['INBOX', 'Sent Mail', 'Deleted Items', # 'Received'] # You can specify folderincludes to include additional folders. It # should return a Python list. This might be used to include a folder # that was excluded by your folderfilter rule, to include a folder that # your server does not specify with its LIST option, or to include a # folder that is outside your basic reference. The 'reference' value # will not be prefixed to this folder name, even if you have specified # one. For example: # folderincludes = ['debian.user', 'debian.personal'] # If you do not want to have any folders created on this repository, # set the createfolders variable to False, the default is True. Using # this feature you can e.g. disable the propagation of new folders to # the new repository. #createfolders = True # You can specify 'foldersort' to determine how folders are sorted. # This affects order of synchronization and mbnames. The expression # should return -1, 0, or 1, as the default Python cmp() does. The two # arguments, x and y, are strings representing the names of the folders # to be sorted. The sorting is applied *AFTER* nametrans, if any. The # default is to sort IMAP folders alphabetically # (case-insensitive). Usually, you should never have to modify this. To # eg. reverse the sort: # # foldersort = lambda x, y: -cmp(x, y) # Enable 1-way synchronization. When setting 'readonly' to True, this # repository will not be modified during synchronization. Use to # e.g. backup an IMAP server. The readonly setting can be applied to any # type of Repository (Maildir, Imap, etc). # #readonly = False [Repository GmailExample] # A repository using Gmail's IMAP interface. Any configuration # parameter of `IMAP` type repositories can be used here. Only # `remoteuser` (or `remoteusereval` ) is mandatory. Default values # for other parameters are OK, and you should not need fiddle with # those. # # The Gmail repository will use hard-coded values for `remotehost`, # `remoteport`, `tunnel` and `ssl`. (See # http://mail.google.com/support/bin/answer.py?answer=78799&topic=12814) # Any attempt to set those parameters will be silently ignored. type = Gmail # Specify the Gmail user name. This is the only mandatory parameter. remoteuser = username@gmail.com # The trash folder name may be different from [Gmail]/Trash # for example on german googlemail, this setting should be # # trashfolder = [Google Mail]/Papierkorb # # The same is valid for the spam folder # # spamfolder = [Google Mail]/Spam # Enable 1-way synchronization. See above for explanation. # #readonly = False spaetz-offlineimap-c9e9690/offlineimap.conf.minimal000066400000000000000000000005031176237577200224650ustar00rootroot00000000000000# Sample minimal config file. Copy this to ~/.offlineimaprc and edit to # get started fast. [general] accounts = Test [Account Test] localrepository = Local remoterepository = Remote [Repository Local] type = Maildir localfolders = ~/Test [Repository Remote] type = IMAP remotehost = examplehost remoteuser = jgoerzen spaetz-offlineimap-c9e9690/offlineimap.py000077500000000000000000000016421176237577200205530ustar00rootroot00000000000000#!/usr/bin/env python # Startup from single-user installation # Copyright (C) 2002 - 2008 John Goerzen # # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA from offlineimap import OfflineImap oi = OfflineImap() oi.run() spaetz-offlineimap-c9e9690/offlineimap/000077500000000000000000000000001176237577200201735ustar00rootroot00000000000000spaetz-offlineimap-c9e9690/offlineimap/CustomConfig.py000066400000000000000000000123341176237577200231500ustar00rootroot00000000000000# Copyright (C) 2003-2012 John Goerzen & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA try: from ConfigParser import SafeConfigParser except ImportError: #python3 from configparser import SafeConfigParser from offlineimap.localeval import LocalEval import os class CustomConfigParser(SafeConfigParser): def getdefault(self, section, option, default, *args, **kwargs): """Same as config.get, but returns the "default" option if there is no such option specified.""" if self.has_option(section, option): return self.get(*(section, option) + args, **kwargs) else: return default def getdefaultint(self, section, option, default, *args, **kwargs): if self.has_option(section, option): return self.getint (*(section, option) + args, **kwargs) else: return default def getdefaultfloat(self, section, option, default, *args, **kwargs): if self.has_option(section, option): return self.getfloat(*(section, option) + args, **kwargs) else: return default def getdefaultboolean(self, section, option, default, *args, **kwargs): if self.has_option(section, option): return self.getboolean(*(section, option) + args, **kwargs) else: return default def getmetadatadir(self): metadatadir = os.path.expanduser(self.getdefault("general", "metadata", "~/.offlineimap")) if not os.path.exists(metadatadir): os.mkdir(metadatadir, 0o700) return metadatadir def getlocaleval(self): if self.has_option("general", "pythonfile"): path = os.path.expanduser(self.get("general", "pythonfile")) else: path = None return LocalEval(path) def getsectionlist(self, key): """Returns a list of sections that start with key + " ". That is, if key is "Account", returns all section names that start with "Account ", but strips off the "Account ". For instance, for "Account Test", returns "Test".""" key = key + ' ' return [x[len(key):] for x in self.sections() \ if x.startswith(key)] def set_if_not_exists(self, section, option, value): """Set a value if it does not exist yet This allows to set default if the user has not explicitly configured anything.""" if not self.has_option(section, option): self.set(section, option, value) def CustomConfigDefault(): """Just a constant that won't occur anywhere else. This allows us to differentiate if the user has passed in any default value to the getconf* functions in ConfigHelperMixin derived classes.""" pass class ConfigHelperMixin: """Allow comfortable retrieving of config values pertaining to a section. If a class inherits from this cls:`ConfigHelperMixin`, it needs to provide 2 functions: meth:`getconfig` (returning a ConfigParser object) and meth:`getsection` (returning a string which represents the section to look up). All calls to getconf* will then return the configuration values for the ConfigParser object in the specific section.""" def _confighelper_runner(self, option, default, defaultfunc, mainfunc): """Return config value for getsection()""" if default == CustomConfigDefault: return mainfunc(*[self.getsection(), option]) else: return defaultfunc(*[self.getsection(), option, default]) def getconf(self, option, default = CustomConfigDefault): return self._confighelper_runner(option, default, self.getconfig().getdefault, self.getconfig().get) def getconfboolean(self, option, default = CustomConfigDefault): return self._confighelper_runner(option, default, self.getconfig().getdefaultboolean, self.getconfig().getboolean) def getconfint(self, option, default = CustomConfigDefault): return self._confighelper_runner(option, default, self.getconfig().getdefaultint, self.getconfig().getint) def getconffloat(self, option, default = CustomConfigDefault): return self._confighelper_runner(option, default, self.getconfig().getdefaultfloat, self.getconfig().getfloat) spaetz-offlineimap-c9e9690/offlineimap/__init__.py000066400000000000000000000013271176237577200223070ustar00rootroot00000000000000__all__ = ['OfflineImap'] __productname__ = 'OfflineIMAP' __version__ = "6.5.4" __copyright__ = "Copyright 2002-2012 John Goerzen & contributors" __author__ = "John Goerzen" __author_email__= "john@complete.org" __description__ = "Disconnected Universal IMAP Mail Synchronization/Reader Support" __license__ = "Licensed under the GNU GPL v2+ (v2 or any later version)" __bigcopyright__ = """%(__productname__)s %(__version__)s %(__license__)s""" % locals() __homepage__ = "http://offlineimap.org" banner = __bigcopyright__ from offlineimap.error import OfflineImapError # put this last, so we don't run into circular dependencies using # e.g. offlineimap.__version__. from offlineimap.init import OfflineImap spaetz-offlineimap-c9e9690/offlineimap/accounts.py000066400000000000000000000440571176237577200223760ustar00rootroot00000000000000# Copyright (C) 2003-2011 John Goerzen & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from offlineimap import mbnames, CustomConfig, OfflineImapError from offlineimap.repository import Repository from offlineimap.ui import getglobalui from offlineimap.threadutil import InstanceLimitedThread from subprocess import Popen, PIPE from threading import Event import os from sys import exc_info import traceback try: import fcntl except: pass # ok if this fails, we can do without def getaccountlist(customconfig): return customconfig.getsectionlist('Account') def AccountListGenerator(customconfig): return [Account(customconfig, accountname) for accountname in getaccountlist(customconfig)] def AccountHashGenerator(customconfig): retval = {} for item in AccountListGenerator(customconfig): retval[item.getname()] = item return retval class Account(CustomConfig.ConfigHelperMixin): """Represents an account (ie. 2 repositories) to sync Most of the time you will actually want to use the derived :class:`accounts.SyncableAccount` which contains all functions used for syncing an account.""" #signal gets set when we should stop looping abort_soon_signal = Event() #signal gets set on CTRL-C/SIGTERM abort_NOW_signal = Event() def __init__(self, config, name): """ :param config: Representing the offlineimap configuration file. :type config: :class:`offlineimap.CustomConfig.CustomConfigParser` :param name: A string denoting the name of the Account as configured""" self.config = config self.name = name self.metadatadir = config.getmetadatadir() self.localeval = config.getlocaleval() # current :mod:`offlineimap.ui`, can be used for logging: self.ui = getglobalui() self.refreshperiod = self.getconffloat('autorefresh', 0.0) # should we run in "dry-run" mode? self.dryrun = self.config.getboolean('general', 'dry-run') self.quicknum = 0 if self.refreshperiod == 0.0: self.refreshperiod = None def getlocaleval(self): return self.localeval def getconfig(self): return self.config def getname(self): return self.name def __str__(self): return self.name def getaccountmeta(self): return os.path.join(self.metadatadir, 'Account-' + self.name) def getsection(self): return 'Account ' + self.getname() @classmethod def set_abort_event(cls, config, signum): """Set skip sleep/abort event for all accounts If we want to skip a current (or the next) sleep, or if we want to abort an autorefresh loop, the main thread can use set_abort_event() to send the corresponding signal. Signum = 1 implies that we want all accounts to abort or skip the current or next sleep phase. Signum = 2 will end the autorefresh loop, ie all accounts will return after they finished a sync. signum=3 means, abort NOW, e.g. on SIGINT or SIGTERM. This is a class method, it will send the signal to all accounts. """ if signum == 1: # resync signal, set config option for all accounts for acctsection in getaccountlist(config): config.set('Account ' + acctsection, "skipsleep", '1') elif signum == 2: # don't autorefresh anymore cls.abort_soon_signal.set() elif signum == 3: # abort ASAP cls.abort_NOW_signal.set() def get_abort_event(self): """Checks if an abort signal had been sent If the 'skipsleep' config option for this account had been set, with `set_abort_event(config, 1)` it will get cleared in this function. Ie, we will only skip one sleep and not all. :returns: True, if the main thread had called :meth:`set_abort_event` earlier, otherwise 'False'. """ skipsleep = self.getconfboolean("skipsleep", 0) if skipsleep: self.config.set(self.getsection(), "skipsleep", '0') return skipsleep or Account.abort_soon_signal.is_set() or \ Account.abort_NOW_signal.is_set() def sleeper(self): """Sleep if the account is set to autorefresh :returns: 0:timeout expired, 1: canceled the timer, 2:request to abort the program, 100: if configured to not sleep at all. """ if not self.refreshperiod: return 100 kaobjs = [] if hasattr(self, 'localrepos'): kaobjs.append(self.localrepos) if hasattr(self, 'remoterepos'): kaobjs.append(self.remoterepos) for item in kaobjs: item.startkeepalive() refreshperiod = int(self.refreshperiod * 60) sleepresult = self.ui.sleep(refreshperiod, self) # Cancel keepalive for item in kaobjs: item.stopkeepalive() if sleepresult: if Account.abort_soon_signal.is_set() or \ Account.abort_NOW_signal.is_set(): return 2 self.quicknum = 0 return 1 return 0 def serverdiagnostics(self): """Output diagnostics for all involved repositories""" remote_repo = Repository(self, 'remote') local_repo = Repository(self, 'local') #status_repo = Repository(self, 'status') self.ui.serverdiagnostics(remote_repo, 'Remote') self.ui.serverdiagnostics(local_repo, 'Local') #self.ui.serverdiagnostics(statusrepos, 'Status') class SyncableAccount(Account): """A syncable email account connecting 2 repositories Derives from :class:`accounts.Account` but contains the additional functions :meth:`syncrunner`, :meth:`sync`, :meth:`syncfolders`, used for syncing.""" def __init__(self, *args, **kwargs): Account.__init__(self, *args, **kwargs) self._lockfd = None self._lockfilepath = os.path.join(self.config.getmetadatadir(), "%s.lock" % self) def lock(self): """Lock the account, throwing an exception if it is locked already""" self._lockfd = open(self._lockfilepath, 'w') try: fcntl.lockf(self._lockfd, fcntl.LOCK_EX|fcntl.LOCK_NB) except NameError: #fcntl not available (Windows), disable file locking... :( pass except IOError: self._lockfd.close() raise OfflineImapError("Could not lock account %s. Is another " "instance using this account?" % self, OfflineImapError.ERROR.REPO) def unlock(self): """Unlock the account, deleting the lock file""" #If we own the lock file, delete it if self._lockfd and not self._lockfd.closed: self._lockfd.close() try: os.unlink(self._lockfilepath) except OSError: pass #Failed to delete for some reason. def syncrunner(self): self.ui.registerthread(self) accountmetadata = self.getaccountmeta() if not os.path.exists(accountmetadata): os.mkdir(accountmetadata, 0o700) self.remoterepos = Repository(self, 'remote') self.localrepos = Repository(self, 'local') self.statusrepos = Repository(self, 'status') # Loop account sync if needed (bail out after 3 failures) looping = 3 while looping: self.ui.acct(self) try: self.lock() self.sync() except (KeyboardInterrupt, SystemExit): raise except OfflineImapError as e: # Stop looping and bubble up Exception if needed. if e.severity >= OfflineImapError.ERROR.REPO: if looping: looping -= 1 if e.severity >= OfflineImapError.ERROR.CRITICAL: raise self.ui.error(e, exc_info()[2]) except Exception as e: self.ui.error(e, exc_info()[2], msg = "While attempting to sync" " account '%s'" % self) else: # after success sync, reset the looping counter to 3 if self.refreshperiod: looping = 3 finally: self.ui.acctdone(self) self.unlock() if looping and self.sleeper() >= 2: looping = 0 def sync(self): """Synchronize the account once, then return Assumes that `self.remoterepos`, `self.localrepos`, and `self.statusrepos` has already been populated, so it should only be called from the :meth:`syncrunner` function. """ folderthreads = [] hook = self.getconf('presynchook', '') self.callhook(hook) quickconfig = self.getconfint('quick', 0) if quickconfig < 0: quick = True elif quickconfig > 0: if self.quicknum == 0 or self.quicknum > quickconfig: self.quicknum = 1 quick = False else: self.quicknum = self.quicknum + 1 quick = True else: quick = False try: remoterepos = self.remoterepos localrepos = self.localrepos statusrepos = self.statusrepos # init repos with list of folders, so we have them (and the # folder delimiter etc) remoterepos.getfolders() localrepos.getfolders() statusrepos.getfolders() remoterepos.sync_folder_structure(localrepos, statusrepos) # replicate the folderstructure between REMOTE to LOCAL if not localrepos.getconfboolean('readonly', False): self.ui.syncfolders(remoterepos, localrepos) # iterate through all folders on the remote repo and sync for remotefolder in remoterepos.getfolders(): # check for CTRL-C or SIGTERM if Account.abort_NOW_signal.is_set(): break if not remotefolder.sync_this: self.ui.debug('', "Not syncing filtered remote folder '%s'" "[%s]" % (remotefolder, remoterepos)) continue # Filtered out remote folder thread = InstanceLimitedThread(\ instancename = 'FOLDER_' + self.remoterepos.getname(), target = syncfolder, name = "Folder %s [acc: %s]" % (remotefolder, self), args = (self, remotefolder, quick)) thread.start() folderthreads.append(thread) # wait for all threads to finish for thr in folderthreads: thr.join() # Write out mailbox names if required and not in dry-run mode if not self.dryrun: mbnames.write() localrepos.forgetfolders() remoterepos.forgetfolders() except: #error while syncing. Drop all connections that we have, they #might be bogus by now (e.g. after suspend) localrepos.dropconnections() remoterepos.dropconnections() raise else: # sync went fine. Hold or drop depending on config localrepos.holdordropconnections() remoterepos.holdordropconnections() hook = self.getconf('postsynchook', '') self.callhook(hook) def callhook(self, cmd): # check for CTRL-C or SIGTERM and run postsynchook if Account.abort_NOW_signal.is_set(): return if not cmd: return try: self.ui.callhook("Calling hook: " + cmd) if self.dryrun: # don't if we are in dry-run mode return p = Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True) r = p.communicate() self.ui.callhook("Hook stdout: %s\nHook stderr:%s\n" % r) self.ui.callhook("Hook return code: %d" % p.returncode) except (KeyboardInterrupt, SystemExit): raise except Exception as e: self.ui.error(e, exc_info()[2], msg = "Calling hook") def syncfolder(account, remotefolder, quick): """This function is called as target for the InstanceLimitedThread invokation in SyncableAccount. Filtered folders on the remote side will not invoke this function.""" remoterepos = account.remoterepos localrepos = account.localrepos statusrepos = account.statusrepos ui = getglobalui() ui.registerthread(account) try: # Load local folder. localfolder = localrepos.\ getfolder(remotefolder.getvisiblename().\ replace(remoterepos.getsep(), localrepos.getsep())) #Filtered folders on the remote side will not invoke this #function, but we need to NOOP if the local folder is filtered #out too: if not localfolder.sync_this: ui.debug('', "Not syncing filtered local folder '%s'" \ % localfolder) return # Write the mailboxes mbnames.add(account.name, localfolder.getname()) # Load status folder. statusfolder = statusrepos.getfolder(remotefolder.getvisiblename().\ replace(remoterepos.getsep(), statusrepos.getsep())) if localfolder.get_uidvalidity() == None: # This is a new folder, so delete the status cache to be # sure we don't have a conflict. # TODO: This does not work. We always return a value, need # to rework this... statusfolder.deletemessagelist() statusfolder.cachemessagelist() if quick: if not localfolder.quickchanged(statusfolder) \ and not remotefolder.quickchanged(statusfolder): ui.skippingfolder(remotefolder) localrepos.restore_atime() return # Load local folder ui.syncingfolder(remoterepos, remotefolder, localrepos, localfolder) ui.loadmessagelist(localrepos, localfolder) localfolder.cachemessagelist() ui.messagelistloaded(localrepos, localfolder, localfolder.getmessagecount()) # If either the local or the status folder has messages and # there is a UID validity problem, warn and abort. If there are # no messages, UW IMAPd loses UIDVALIDITY. But we don't really # need it if both local folders are empty. So, in that case, # just save it off. if localfolder.getmessagecount() or statusfolder.getmessagecount(): if not localfolder.check_uidvalidity(): ui.validityproblem(localfolder) localrepos.restore_atime() return if not remotefolder.check_uidvalidity(): ui.validityproblem(remotefolder) localrepos.restore_atime() return else: # Both folders empty, just save new UIDVALIDITY localfolder.save_uidvalidity() remotefolder.save_uidvalidity() # Load remote folder. ui.loadmessagelist(remoterepos, remotefolder) remotefolder.cachemessagelist() ui.messagelistloaded(remoterepos, remotefolder, remotefolder.getmessagecount()) # Synchronize remote changes. if not localrepos.getconfboolean('readonly', False): ui.syncingmessages(remoterepos, remotefolder, localrepos, localfolder) remotefolder.syncmessagesto(localfolder, statusfolder) else: ui.debug('imap', "Not syncing to read-only repository '%s'" \ % localrepos.getname()) # Synchronize local changes if not remoterepos.getconfboolean('readonly', False): ui.syncingmessages(localrepos, localfolder, remoterepos, remotefolder) localfolder.syncmessagesto(remotefolder, statusfolder) else: ui.debug('', "Not syncing to read-only repository '%s'" \ % remoterepos.getname()) statusfolder.save() localrepos.restore_atime() except (KeyboardInterrupt, SystemExit): raise except OfflineImapError as e: # bubble up severe Errors, skip folder otherwise if e.severity > OfflineImapError.ERROR.FOLDER: raise else: #if the initial localfolder assignement bailed out, the localfolder var will not be available, so we need ui.error(e, exc_info()[2], msg = "Aborting sync, folder '%s' " "[acc: '%s']" % ( remotefolder.getvisiblename().\ replace(remoterepos.getsep(), localrepos.getsep()), account)) # we reconstruct foldername above rather than using # localfolder, as the localfolder var is not # available if assignment fails. except Exception as e: ui.error(e, msg = "ERROR in syncfolder for %s folder %s: %s" % \ (account, remotefolder.getvisiblename(), traceback.format_exc())) spaetz-offlineimap-c9e9690/offlineimap/error.py000066400000000000000000000026351176237577200217040ustar00rootroot00000000000000class OfflineImapError(Exception): """An Error during offlineimap synchronization""" class ERROR: """Severity level of an Exception * **MESSAGE**: Abort the current message, but continue with folder * **FOLDER_RETRY**: Error syncing folder, but do retry * **FOLDER**: Abort folder sync, but continue with next folder * **REPO**: Abort repository sync, continue with next account * **CRITICAL**: Immediately exit offlineimap """ MESSAGE, FOLDER_RETRY, FOLDER, REPO, CRITICAL = 0, 10, 15, 20, 30 def __init__(self, reason, severity, errcode=None): """ :param reason: Human readable string suitable for logging :param severity: denoting which operations should be aborted. E.g. a ERROR.MESSAGE can occur on a faulty message, but a ERROR.REPO occurs when the server is offline. :param errcode: optional number denoting a predefined error situation (which let's us exit with a predefined exit value). So far, no errcodes have been defined yet. :type severity: OfflineImapError.ERROR value""" self.errcode = errcode self.severity = severity # 'reason' is stored in the Exception().args tuple. super(OfflineImapError, self).__init__(reason) @property def reason(self): return self.args[0] spaetz-offlineimap-c9e9690/offlineimap/folder/000077500000000000000000000000001176237577200214465ustar00rootroot00000000000000spaetz-offlineimap-c9e9690/offlineimap/folder/Base.py000066400000000000000000000544241176237577200227030ustar00rootroot00000000000000# Base folder support # Copyright (C) 2002-2011 John Goerzen & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from offlineimap import threadutil from offlineimap.ui import getglobalui from offlineimap.error import OfflineImapError import offlineimap.accounts import os.path import re from sys import exc_info import traceback class BaseFolder(object): def __init__(self, name, repository): """ :para name: Path & name of folder minus root or reference :para repository: Repository() in which the folder is. """ self.sync_this = True """Should this folder be included in syncing?""" self.ui = getglobalui() # Top level dir name is always '' self.name = name if not name == self.getsep() else '' self.repository = repository self.visiblename = repository.nametrans(name) # In case the visiblename becomes '.' or '/' (top-level) we use # '' as that is the name that e.g. the Maildir scanning will # return for the top-level dir. if self.visiblename == self.getsep(): self.visiblename = '' self.config = repository.getconfig() def getname(self): """Returns name""" return self.name def __str__(self): return self.name @property def accountname(self): """Account name as string""" return self.repository.accountname def suggeststhreads(self): """Returns true if this folder suggests using threads for actions; false otherwise. Probably only IMAP will return true.""" return 0 def getcopyinstancelimit(self): """For threading folders, returns the instancelimitname for InstanceLimitedThreads.""" raise NotImplementedException def storesmessages(self): """Should be true for any backend that actually saves message bodies. (Almost all of them). False for the LocalStatus backend. Saves us from having to slurp up messages just for localstatus purposes.""" return 1 def getvisiblename(self): """The nametrans-transposed name of the folder's name""" return self.visiblename def getrepository(self): """Returns the repository object that this folder is within.""" return self.repository def getroot(self): """Returns the root of the folder, in a folder-specific fashion.""" return self.root def getsep(self): """Returns the separator for this folder type.""" return self.sep def getfullname(self): if self.getroot(): return self.getroot() + self.getsep() + self.getname() else: return self.getname() def getfolderbasename(self): """Return base file name of file to store Status/UID info in""" if not self.name: basename = '.' else: #avoid directory hierarchies and file names such as '/' basename = self.name.replace('/', '.') # replace with literal 'dot' if final path name is '.' as '.' is # an invalid file name. basename = re.sub('(^|\/)\.$','\\1dot', basename) return basename def check_uidvalidity(self): """Tests if the cached UIDVALIDITY match the real current one If required it saves the UIDVALIDITY value. In this case the function is not threadsafe. So don't attempt to call it from concurrent threads. :returns: Boolean indicating the match. Returns True in case it implicitely saved the UIDVALIDITY.""" if self.get_saveduidvalidity() != None: return self.get_saveduidvalidity() == self.get_uidvalidity() else: self.save_uidvalidity() return True def _getuidfilename(self): """provides UIDVALIDITY cache filename for class internal purposes""" return os.path.join(self.repository.getuiddir(), self.getfolderbasename()) def get_saveduidvalidity(self): """Return the previously cached UIDVALIDITY value :returns: UIDVALIDITY as (long) number or None, if None had been saved yet.""" if hasattr(self, '_base_saved_uidvalidity'): return self._base_saved_uidvalidity uidfilename = self._getuidfilename() if not os.path.exists(uidfilename): self._base_saved_uidvalidity = None else: file = open(uidfilename, "rt") self._base_saved_uidvalidity = long(file.readline().strip()) file.close() return self._base_saved_uidvalidity def save_uidvalidity(self): """Save the UIDVALIDITY value of the folder to the cache This function is not threadsafe, so don't attempt to call it from concurrent threads.""" newval = self.get_uidvalidity() uidfilename = self._getuidfilename() with open(uidfilename + ".tmp", "wt") as file: file.write("%d\n" % newval) os.rename(uidfilename + ".tmp", uidfilename) self._base_saved_uidvalidity = newval def get_uidvalidity(self): """Retrieve the current connections UIDVALIDITY value This function needs to be implemented by each Backend :returns: UIDVALIDITY as a (long) number""" raise NotImplementedException def cachemessagelist(self): """Reads the message list from disk or network and stores it in memory for later use. This list will not be re-read from disk or memory unless this function is called again.""" raise NotImplementedException def getmessagelist(self): """Gets the current message list. You must call cachemessagelist() before calling this function!""" raise NotImplementedException def uidexists(self, uid): """Returns True if uid exists""" return uid in self.getmessagelist() def getmessageuidlist(self): """Gets a list of UIDs. You may have to call cachemessagelist() before calling this function!""" return self.getmessagelist().keys() def getmessagecount(self): """Gets the number of messages.""" return len(self.getmessagelist()) def getmessage(self, uid): """Returns the content of the specified message.""" raise NotImplementedException def savemessage(self, uid, content, flags, rtime): """Writes a new message, with the specified uid. If the uid is < 0: The backend should assign a new uid and return it. In case it cannot assign a new uid, it returns the negative uid passed in WITHOUT saving the message. If the backend CAN assign a new uid, but cannot find out what this UID is (as is the case with some IMAP servers), it returns 0 but DOES save the message. IMAP backend should be the only one that can assign a new uid. If the uid is > 0, the backend should set the uid to this, if it can. If it cannot set the uid to that, it will save it anyway. It will return the uid assigned in any case. Note that savemessage() does not check against dryrun settings, so you need to ensure that savemessage is never called in a dryrun mode. """ raise NotImplementedException def getmessagetime(self, uid): """Return the received time for the specified message.""" raise NotImplementedException def getmessageflags(self, uid): """Returns the flags for the specified message.""" raise NotImplementedException def savemessageflags(self, uid, flags): """Sets the specified message's flags to the given set. Note that this function does not check against dryrun settings, so you need to ensure that it is never called in a dryrun mode.""" raise NotImplementedException def addmessageflags(self, uid, flags): """Adds the specified flags to the message's flag set. If a given flag is already present, it will not be duplicated. Note that this function does not check against dryrun settings, so you need to ensure that it is never called in a dryrun mode. :param flags: A set() of flags""" newflags = self.getmessageflags(uid) | flags self.savemessageflags(uid, newflags) def addmessagesflags(self, uidlist, flags): """ Note that this function does not check against dryrun settings, so you need to ensure that it is never called in a dryrun mode.""" for uid in uidlist: self.addmessageflags(uid, flags) def deletemessageflags(self, uid, flags): """Removes each flag given from the message's flag set. If a given flag is already removed, no action will be taken for that flag. Note that this function does not check against dryrun settings, so you need to ensure that it is never called in a dryrun mode.""" newflags = self.getmessageflags(uid) - flags self.savemessageflags(uid, newflags) def deletemessagesflags(self, uidlist, flags): """ Note that this function does not check against dryrun settings, so you need to ensure that it is never called in a dryrun mode.""" for uid in uidlist: self.deletemessageflags(uid, flags) def change_message_uid(self, uid, new_uid): """Change the message from existing uid to new_uid If the backend supports it (IMAP does not). :param new_uid: (optional) If given, the old UID will be changed to a new UID. This allows backends efficient renaming of messages if the UID has changed.""" raise NotImplementedException def deletemessage(self, uid): """ Note that this function does not check against dryrun settings, so you need to ensure that it is never called in a dryrun mode.""" raise NotImplementedException def deletemessages(self, uidlist): """ Note that this function does not check against dryrun settings, so you need to ensure that it is never called in a dryrun mode.""" for uid in uidlist: self.deletemessage(uid) def copymessageto(self, uid, dstfolder, statusfolder, register = 1): """Copies a message from self to dst if needed, updating the status Note that this function does not check against dryrun settings, so you need to ensure that it is never called in a dryrun mode. :param uid: uid of the message to be copied. :param dstfolder: A BaseFolder-derived instance :param statusfolder: A LocalStatusFolder instance :param register: whether we should register a new thread." :returns: Nothing on success, or raises an Exception.""" # Sometimes, it could be the case that if a sync takes awhile, # a message might be deleted from the maildir before it can be # synced to the status cache. This is only a problem with # self.getmessage(). So, don't call self.getmessage unless # really needed. if register: # output that we start a new thread self.ui.registerthread(self.repository.account) try: message = None flags = self.getmessageflags(uid) rtime = self.getmessagetime(uid) if uid > 0 and dstfolder.uidexists(uid): # dst has message with that UID already, only update status statusfolder.savemessage(uid, None, flags, rtime) return # If any of the destinations actually stores the message body, # load it up. if dstfolder.storesmessages(): message = self.getmessage(uid) #Succeeded? -> IMAP actually assigned a UID. If newid #remained negative, no server was willing to assign us an #UID. If newid is 0, saving succeeded, but we could not #retrieve the new UID. Ignore message in this case. new_uid = dstfolder.savemessage(uid, message, flags, rtime) if new_uid > 0: if new_uid != uid: # Got new UID, change the local uid to match the new one. self.change_message_uid(uid, new_uid) statusfolder.deletemessage(uid) # Got new UID, change the local uid. # Save uploaded status in the statusfolder statusfolder.savemessage(new_uid, message, flags, rtime) elif new_uid == 0: # Message was stored to dstfolder, but we can't find it's UID # This means we can't link current message to the one created # in IMAP. So we just delete local message and on next run # we'll sync it back # XXX This could cause infinite loop on syncing between two # IMAP servers ... self.deletemessage(uid) else: raise OfflineImapError("Trying to save msg (uid %d) on folder " "%s returned invalid uid %d" % (uid, dstfolder.getvisiblename(), new_uid), OfflineImapError.ERROR.MESSAGE) except (KeyboardInterrupt): # bubble up CTRL-C raise except OfflineImapError as e: if e.severity > OfflineImapError.ERROR.MESSAGE: raise # buble severe errors up self.ui.error(e, exc_info()[2]) except Exception as e: self.ui.error(e, "Copying message %s [acc: %s]:\n %s" %\ (uid, self.accountname, exc_info()[2])) raise #raise on unknown errors, so we can fix those def syncmessagesto_copy(self, dstfolder, statusfolder): """Pass1: Copy locally existing messages not on the other side This will copy messages to dstfolder that exist locally but are not in the statusfolder yet. The strategy is: 1) Look for messages present in self but not in statusfolder. 2) invoke copymessageto() on those which: - If dstfolder doesn't have it yet, add them to dstfolder. - Update statusfolder This function checks and protects us from action in ryrun mode. """ threads = [] copylist = filter(lambda uid: not \ statusfolder.uidexists(uid), self.getmessageuidlist()) num_to_copy = len(copylist) if num_to_copy and self.repository.account.dryrun: self.ui.info("[DRYRUN] Copy {} messages from {}[{}] to {}".format( num_to_copy, self, self.repository, dstfolder.repository)) return for num, uid in enumerate(copylist): # bail out on CTRL-C or SIGTERM if offlineimap.accounts.Account.abort_NOW_signal.is_set(): break self.ui.copyingmessage(uid, num+1, num_to_copy, self, dstfolder) # exceptions are caught in copymessageto() if self.suggeststhreads(): self.waitforthread() thread = threadutil.InstanceLimitedThread(\ self.getcopyinstancelimit(), target = self.copymessageto, name = "Copy message from %s:%s" % (self.repository, self), args = (uid, dstfolder, statusfolder)) thread.start() threads.append(thread) else: self.copymessageto(uid, dstfolder, statusfolder, register = 0) for thread in threads: thread.join() def syncmessagesto_delete(self, dstfolder, statusfolder): """Pass 2: Remove locally deleted messages on dst Get all UIDS in statusfolder but not self. These are messages that were deleted in 'self'. Delete those from dstfolder and statusfolder. This function checks and protects us from action in ryrun mode. """ deletelist = filter(lambda uid: uid>=0 \ and not self.uidexists(uid), statusfolder.getmessageuidlist()) if len(deletelist): self.ui.deletingmessages(deletelist, [dstfolder]) if self.repository.account.dryrun: return #don't delete messages in dry-run mode # delete in statusfolder first to play safe. In case of abort, we # won't lose message, we will just retransmit some unneccessary. for folder in [statusfolder, dstfolder]: folder.deletemessages(deletelist) def syncmessagesto_flags(self, dstfolder, statusfolder): """Pass 3: Flag synchronization Compare flag mismatches in self with those in statusfolder. If msg has a valid UID and exists on dstfolder (has not e.g. been deleted there), sync the flag change to both dstfolder and statusfolder. This function checks and protects us from action in ryrun mode. """ # For each flag, we store a list of uids to which it should be # added. Then, we can call addmessagesflags() to apply them in # bulk, rather than one call per message. addflaglist = {} delflaglist = {} for uid in self.getmessageuidlist(): # Ignore messages with negative UIDs missed by pass 1 and # don't do anything if the message has been deleted remotely if uid < 0 or not dstfolder.uidexists(uid): continue selfflags = self.getmessageflags(uid) statusflags = statusfolder.getmessageflags(uid) #if we could not get message flags from LocalStatus, assume empty. if statusflags is None: statusflags = set() addflags = selfflags - statusflags delflags = statusflags - selfflags for flag in addflags: if not flag in addflaglist: addflaglist[flag] = [] addflaglist[flag].append(uid) for flag in delflags: if not flag in delflaglist: delflaglist[flag] = [] delflaglist[flag].append(uid) for flag, uids in addflaglist.items(): self.ui.addingflags(uids, flag, dstfolder) if self.repository.account.dryrun: continue #don't actually add in a dryrun dstfolder.addmessagesflags(uids, set(flag)) statusfolder.addmessagesflags(uids, set(flag)) for flag,uids in delflaglist.items(): self.ui.deletingflags(uids, flag, dstfolder) if self.repository.account.dryrun: continue #don't actually remove in a dryrun dstfolder.deletemessagesflags(uids, set(flag)) statusfolder.deletemessagesflags(uids, set(flag)) def syncmessagesto(self, dstfolder, statusfolder): """Syncs messages in this folder to the destination dstfolder. This is the high level entry for syncing messages in one direction. Syncsteps are: Pass1: Copy locally existing messages Copy messages in self, but not statusfolder to dstfolder if not already in dstfolder. dstfolder might assign a new UID (e.g. if uploading to IMAP). Update statusfolder. Pass2: Remove locally deleted messages Get all UIDS in statusfolder but not self. These are messages that were deleted in 'self'. Delete those from dstfolder and statusfolder. After this pass, the message lists should be identical wrt the uids present (except for potential negative uids that couldn't be placed anywhere). Pass3: Synchronize flag changes Compare flag mismatches in self with those in statusfolder. If msg has a valid UID and exists on dstfolder (has not e.g. been deleted there), sync the flag change to both dstfolder and statusfolder. :param dstfolder: Folderinstance to sync the msgs to. :param statusfolder: LocalStatus instance to sync against. """ passes = [('copying messages' , self.syncmessagesto_copy), ('deleting messages' , self.syncmessagesto_delete), ('syncing flags' , self.syncmessagesto_flags)] for (passdesc, action) in passes: # bail out on CTRL-C or SIGTERM if offlineimap.accounts.Account.abort_NOW_signal.is_set(): break try: action(dstfolder, statusfolder) except (KeyboardInterrupt): raise except OfflineImapError as e: if e.severity > OfflineImapError.ERROR.FOLDER: raise self.ui.error(e, exc_info()[2]) except Exception as e: self.ui.error(e, exc_info()[2], "Syncing folder %s [acc: %s]" %\ (self, self.accountname)) raise # raise unknown Exceptions so we can fix them def __eq__(self, other): """Comparisons work either on string comparing folder names or on the same instance MailDirFolder('foo') == 'foo' --> True a = MailDirFolder('foo'); a == b --> True MailDirFolder('foo') == 'moo' --> False MailDirFolder('foo') == IMAPFolder('foo') --> False MailDirFolder('foo') == MaildirFolder('foo') --> False """ if isinstance(other, basestring): return other == self.name return id(self) == id(other) def __ne__(self, other): return not self.__eq__(other) spaetz-offlineimap-c9e9690/offlineimap/folder/Gmail.py000066400000000000000000000040251176237577200230520ustar00rootroot00000000000000# Gmail IMAP folder support # Copyright (C) 2008 Riccardo Murri # Copyright (C) 2002-2007 John Goerzen # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA """Folder implementation to support features of the Gmail IMAP server. """ from .IMAP import IMAPFolder class GmailFolder(IMAPFolder): """Folder implementation to support features of the Gmail IMAP server. Removing a message from a folder will only remove the "label" from the message and keep it in the "All mails" folder. To really delete a message it needs to be copied to the Trash folder. However, this is dangerous as our folder moves are implemented as a 1) delete in one folder and 2) append to the other. If 2 comes before 1, this will effectively delete the message from all folders. So we cannot do that until we have a smarter folder move mechanism. For more information on the Gmail IMAP server: http://mail.google.com/support/bin/answer.py?answer=77657&topic=12815 """ def __init__(self, imapserver, name, repository): super(GmailFolder, self).__init__(imapserver, name, repository) self.trash_folder = repository.gettrashfolder(name) # Gmail will really delete messages upon EXPUNGE in these folders self.real_delete_folders = [ self.trash_folder, repository.getspamfolder() ] spaetz-offlineimap-c9e9690/offlineimap/folder/IMAP.py000066400000000000000000001022351176237577200225510ustar00rootroot00000000000000# IMAP folder support # Copyright (C) 2002-2012 John Goerzen & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import email import random import binascii import re import time from sys import exc_info from .Base import BaseFolder from offlineimap import imaputil, imaplibutil, OfflineImapError from offlineimap.imaplib2 import MonthNames class IMAPFolder(BaseFolder): def __init__(self, imapserver, name, repository): name = imaputil.dequote(name) self.sep = imapserver.delim super(IMAPFolder, self).__init__(name, repository) self.expunge = repository.getexpunge() self.root = None # imapserver.root self.imapserver = imapserver self.messagelist = None self.randomgenerator = random.Random() #self.ui is set in BaseFolder def selectro(self, imapobj, force = False): """Select this folder when we do not need write access. Prefer SELECT to EXAMINE if we can, since some servers (Courier) do not stabilize UID validity until the folder is selected. .. todo: Still valid? Needs verification :param: Enforce new SELECT even if we are on that folder already. :returns: raises :exc:`OfflineImapError` severity FOLDER on error""" try: imapobj.select(self.getfullname(), force = force) except imapobj.readonly: imapobj.select(self.getfullname(), readonly = True, force = force) def suggeststhreads(self): return 1 def waitforthread(self): self.imapserver.connectionwait() def getcopyinstancelimit(self): return 'MSGCOPY_' + self.repository.getname() def get_uidvalidity(self): """Retrieve the current connections UIDVALIDITY value UIDVALIDITY value will be cached on the first call. :returns: The UIDVALIDITY as (long) number.""" if hasattr(self, '_uidvalidity'): # use cached value if existing return self._uidvalidity imapobj = self.imapserver.acquireconnection() try: # SELECT (if not already done) and get current UIDVALIDITY self.selectro(imapobj) typ, uidval = imapobj.response('UIDVALIDITY') assert uidval != [None] and uidval != None, \ "response('UIDVALIDITY') returned [None]!" self._uidvalidity = long(uidval[-1]) return self._uidvalidity finally: self.imapserver.releaseconnection(imapobj) def quickchanged(self, statusfolder): # An IMAP folder has definitely changed if the number of # messages or the UID of the last message have changed. Otherwise # only flag changes could have occurred. retry = True # Should we attempt another round or exit? while retry: retry = False imapobj = self.imapserver.acquireconnection() try: # Select folder and get number of messages restype, imapdata = imapobj.select(self.getfullname(), True, True) self.imapserver.releaseconnection(imapobj) except OfflineImapError as e: # retry on dropped connections, raise otherwise self.imapserver.releaseconnection(imapobj, True) if e.severity == OfflineImapError.ERROR.FOLDER_RETRY: retry = True else: raise except: # cleanup and raise on all other errors self.imapserver.releaseconnection(imapobj, True) raise # 1. Some mail servers do not return an EXISTS response # if the folder is empty. 2. ZIMBRA servers can return # multiple EXISTS replies in the form 500, 1000, 1500, # 1623 so check for potentially multiple replies. if imapdata == [None]: return True maxmsgid = 0 for msgid in imapdata: maxmsgid = max(long(msgid), maxmsgid) # Different number of messages than last time? if maxmsgid != statusfolder.getmessagecount(): return True return False def cachemessagelist(self): maxage = self.config.getdefaultint("Account %s" % self.accountname, "maxage", -1) maxsize = self.config.getdefaultint("Account %s" % self.accountname, "maxsize", -1) self.messagelist = {} imapobj = self.imapserver.acquireconnection() try: res_type, imapdata = imapobj.select(self.getfullname(), True, True) if imapdata == [None] or imapdata[0] == '0': # Empty folder, no need to populate message list return # By default examine all UIDs in this folder msgsToFetch = '1:*' if (maxage != -1) | (maxsize != -1): search_cond = "("; if(maxage != -1): #find out what the oldest message is that we should look at oldest_struct = time.gmtime(time.time() - (60*60*24*maxage)) if oldest_struct[0] < 1900: raise OfflineImapError("maxage setting led to year %d. " "Abort syncing." % oldest_struct[0], OfflineImapError.ERROR.REPO) search_cond += "SINCE %02d-%s-%d" % ( oldest_struct[2], MonthNames[oldest_struct[1]], oldest_struct[0]) if(maxsize != -1): if(maxage != -1): # There are two conditions, add space search_cond += " " search_cond += "SMALLER %d" % maxsize search_cond += ")" res_type, res_data = imapobj.search(None, search_cond) if res_type != 'OK': raise OfflineImapError("SEARCH in folder [%s]%s failed. " "Search string was '%s'. Server responded '[%s] %s'" % ( self.getrepository(), self, search_cond, res_type, res_data), OfflineImapError.ERROR.FOLDER) # Result UIDs are seperated by space, coalesce into ranges msgsToFetch = imaputil.uid_sequence(res_data[0].split()) if not msgsToFetch: return # No messages to sync # Get the flags and UIDs for these. single-quotes prevent # imaplib2 from quoting the sequence. res_type, response = imapobj.fetch("'%s'" % msgsToFetch, '(FLAGS UID)') if res_type != 'OK': raise OfflineImapError("FETCHING UIDs in folder [%s]%s failed. " "Server responded '[%s] %s'" % ( self.getrepository(), self, res_type, response), OfflineImapError.ERROR.FOLDER) finally: self.imapserver.releaseconnection(imapobj) for messagestr in response: # looks like: '1 (FLAGS (\\Seen Old) UID 4807)' or None if no msg # Discard initial message number. if messagestr == None: continue messagestr = messagestr.split(' ', 1)[1] options = imaputil.flags2hash(messagestr) if not 'UID' in options: self.ui.warn('No UID in message with options %s' %\ str(options), minor = 1) else: uid = long(options['UID']) flags = imaputil.flagsimap2maildir(options['FLAGS']) rtime = imaplibutil.Internaldate2epoch(messagestr) self.messagelist[uid] = {'uid': uid, 'flags': flags, 'time': rtime} def getmessagelist(self): return self.messagelist def getmessage(self, uid): """Retrieve message with UID from the IMAP server (incl body) :returns: the message body or throws and OfflineImapError (probably severity MESSAGE) if e.g. no message with this UID could be found. """ imapobj = self.imapserver.acquireconnection() try: fails_left = 2 # retry on dropped connection while fails_left: try: imapobj.select(self.getfullname(), readonly = True) res_type, data = imapobj.uid('fetch', str(uid), '(BODY.PEEK[])') fails_left = 0 except imapobj.abort as e: # Release dropped connection, and get a new one self.imapserver.releaseconnection(imapobj, True) imapobj = self.imapserver.acquireconnection() self.ui.error(e, exc_info()[2]) fails_left -= 1 if not fails_left: raise e if data == [None] or res_type != 'OK': #IMAP server says bad request or UID does not exist severity = OfflineImapError.ERROR.MESSAGE reason = "IMAP server '%s' failed to fetch message UID '%d'."\ "Server responded: %s %s" % (self.getrepository(), uid, res_type, data) if data == [None]: #IMAP server did not find a message with this UID reason = "IMAP server '%s' does not have a message "\ "with UID '%s'" % (self.getrepository(), uid) raise OfflineImapError(reason, severity) # data looks now e.g. [('320 (UID 17061 BODY[] # {2565}','msgbody....')] we only asked for one message, # and that msg is in data[0]. msbody is in [0][1] data = data[0][1].replace("\r\n", "\n") if len(data)>200: dbg_output = "%s...%s" % (str(data)[:150], str(data)[-50:]) else: dbg_output = data self.ui.debug('imap', "Returned object from fetching %d: '%s'" % (uid, dbg_output)) finally: self.imapserver.releaseconnection(imapobj) return data def getmessagetime(self, uid): return self.messagelist[uid]['time'] def getmessageflags(self, uid): return self.messagelist[uid]['flags'] def generate_randomheader(self, content): """Returns a unique X-OfflineIMAP header Generate an 'X-OfflineIMAP' mail header which contains a random unique value (which is based on the mail content, and a random number). This header allows us to fetch a mail after APPENDing it to an IMAP server and thus find out the UID that the server assigned it. :returns: (headername, headervalue) tuple, consisting of strings headername == 'X-OfflineIMAP' and headervalue will be a random string """ headername = 'X-OfflineIMAP' # We need a random component too. If we ever upload the same # mail twice (e.g. in different folders), we would still need to # get the UID for the correct one. As we won't have too many # mails with identical content, the randomness requirements are # not extremly critial though. # compute unsigned crc32 of 'content' as unique hash # NB: crc32 returns unsigned only starting with python 3.0 headervalue = str( binascii.crc32(content) & 0xffffffff ) + '-' headervalue += str(self.randomgenerator.randint(0,9999999999)) return (headername, headervalue) def savemessage_addheader(self, content, headername, headervalue): self.ui.debug('imap', 'savemessage_addheader: called to add %s: %s' % (headername, headervalue)) insertionpoint = content.find("\r\n\r\n") self.ui.debug('imap', 'savemessage_addheader: insertionpoint = %d' % insertionpoint) leader = content[0:insertionpoint] self.ui.debug('imap', 'savemessage_addheader: leader = %s' % repr(leader)) if insertionpoint == 0 or insertionpoint == -1: newline = '' insertionpoint = 0 else: newline = "\r\n" newline += "%s: %s" % (headername, headervalue) self.ui.debug('imap', 'savemessage_addheader: newline = ' + repr(newline)) trailer = content[insertionpoint:] self.ui.debug('imap', 'savemessage_addheader: trailer = ' + repr(trailer)) return leader + newline + trailer def savemessage_searchforheader(self, imapobj, headername, headervalue): self.ui.debug('imap', 'savemessage_searchforheader called for %s: %s' % \ (headername, headervalue)) # Now find the UID it got. headervalue = imapobj._quote(headervalue) try: matchinguids = imapobj.uid('search', 'HEADER', headername, headervalue)[1][0] except imapobj.error as err: # IMAP server doesn't implement search or had a problem. self.ui.debug('imap', "savemessage_searchforheader: got IMAP error '%s' while attempting to UID SEARCH for message with header %s" % (err, headername)) return 0 self.ui.debug('imap', 'savemessage_searchforheader got initial matchinguids: ' + repr(matchinguids)) if matchinguids == '': self.ui.debug('imap', "savemessage_searchforheader: UID SEARCH for message with header %s yielded no results" % headername) return 0 matchinguids = matchinguids.split(' ') self.ui.debug('imap', 'savemessage_searchforheader: matchinguids now ' + \ repr(matchinguids)) if len(matchinguids) != 1 or matchinguids[0] == None: raise ValueError("While attempting to find UID for message with " "header %s, got wrong-sized matchinguids of %s" %\ (headername, str(matchinguids))) return long(matchinguids[0]) def savemessage_fetchheaders(self, imapobj, headername, headervalue): """ We fetch all new mail headers and search for the right X-OfflineImap line by hand. The response from the server has form: ( 'OK', [ ( '185 (RFC822.HEADER {1789}', '... mail headers ...' ), ' UID 2444)', ( '186 (RFC822.HEADER {1789}', '... 2nd mail headers ...' ), ' UID 2445)' ] ) We need to locate the UID just after mail headers containing our X-OfflineIMAP line. Returns UID when found, 0 when not found. """ self.ui.debug('imap', 'savemessage_fetchheaders called for %s: %s' % \ (headername, headervalue)) # run "fetch X:* rfc822.header" # since we stored the mail we are looking for just recently, it would # not be optimal to fetch all messages. So we'll find highest message # UID in our local messagelist and search from there (exactly from # UID+1). That works because UIDs are guaranteed to be unique and # ascending. if self.getmessagelist(): start = 1+max(self.getmessagelist().keys()) else: # Folder was empty - start from 1 start = 1 # Imaplib quotes all parameters of a string type. That must not happen # with the range X:*. So we use bytearray to stop imaplib from getting # in our way result = imapobj.uid('FETCH', bytearray('%d:*' % start), 'rfc822.header') if result[0] != 'OK': raise OfflineImapError('Error fetching mail headers: ' + '. '.join(result[1]), OfflineImapError.ERROR.MESSAGE) result = result[1] found = 0 for item in result: if found == 0 and type(item) == type( () ): # Walk just tuples if re.search("(?:^|\\r|\\n)%s:\s*%s(?:\\r|\\n)" % (headername, headervalue), item[1], flags=re.IGNORECASE): found = 1 elif found == 1: if type(item) == type (""): uid = re.search("UID\s+(\d+)", item, flags=re.IGNORECASE) if uid: return int(uid.group(1)) else: self.ui.warn("Can't parse FETCH response, can't find UID: %s", result.__repr__()) else: self.ui.warn("Can't parse FETCH response, we awaited string: %s", result.__repr__()) return 0 def getmessageinternaldate(self, content, rtime=None): """Parses mail and returns an INTERNALDATE string It will use information in the following order, falling back as an attempt fails: - rtime parameter - Date header of email We return None, if we couldn't find a valid date. In this case the IMAP server will use the server local time when appening (per RFC). Note, that imaplib's Time2Internaldate is inherently broken as it returns localized date strings which are invalid for IMAP servers. However, that function is called for *every* append() internally. So we need to either pass in `None` or the correct string (in which case Time2Internaldate() will do nothing) to append(). The output of this function is designed to work as input to the imapobj.append() function. TODO: We should probably be returning a bytearray rather than a string here, because the IMAP server will expect plain ASCII. However, imaplib.Time2INternaldate currently returns a string so we go with the same for now. :param rtime: epoch timestamp to be used rather than analyzing the email. :returns: string in the form of "DD-Mmm-YYYY HH:MM:SS +HHMM" (including double quotes) or `None` in case of failure (which is fine as value for append).""" if rtime is None: message = email.message_from_string(content) # parsedate returns a 9-tuple that can be passed directly to # time.mktime(); Will be None if missing or not in a valid # format. Note that indexes 6, 7, and 8 of the result tuple are # not usable. datetuple = email.utils.parsedate(message.get('Date')) if datetuple is None: #could not determine the date, use the local time. return None #make it a real struct_time, so we have named attributes datetuple = time.struct_time(datetuple) else: #rtime is set, use that instead datetuple = time.localtime(rtime) try: # Check for invalid dates if datetuple[0] < 1981: raise ValueError # Check for invalid dates datetuple_check = time.localtime(time.mktime(datetuple)) if datetuple[:2] != datetuple_check[:2]: raise ValueError except (ValueError, OverflowError): # Argh, sometimes it's a valid format but year is 0102 # or something. Argh. It seems that Time2Internaldate # will rause a ValueError if the year is 0102 but not 1902, # but some IMAP servers nonetheless choke on 1902. self.ui.debug('imap', "Message with invalid date %s. Server will use local time." \ % datetuple) return None #produce a string representation of datetuple that works as #INTERNALDATE num2mon = {1:'Jan', 2:'Feb', 3:'Mar', 4:'Apr', 5:'May', 6:'Jun', 7:'Jul', 8:'Aug', 9:'Sep', 10:'Oct', 11:'Nov', 12:'Dec'} #tm_isdst coming from email.parsedate is not usable, we still use it here, mhh if datetuple.tm_isdst == '1': zone = -time.altzone else: zone = -time.timezone offset_h, offset_m = divmod(zone//60, 60) internaldate = '"%02d-%s-%04d %02d:%02d:%02d %+03d%02d"' \ % (datetuple.tm_mday, num2mon[datetuple.tm_mon], datetuple.tm_year, \ datetuple.tm_hour, datetuple.tm_min, datetuple.tm_sec, offset_h, offset_m) return internaldate def savemessage(self, uid, content, flags, rtime): """Save the message on the Server This backend always assigns a new uid, so the uid arg is ignored. This function will update the self.messagelist dict to contain the new message after sucessfully saving it. See folder/Base for details. Note that savemessage() does not check against dryrun settings, so you need to ensure that savemessage is never called in a dryrun mode. :param rtime: A timestamp to be used as the mail date :returns: the UID of the new message as assigned by the server. If the message is saved, but it's UID can not be found, it will return 0. If the message can't be written (folder is read-only for example) it will return -1.""" self.ui.savemessage('imap', uid, flags, self) # already have it, just save modified flags if uid > 0 and self.uidexists(uid): self.savemessageflags(uid, flags) return uid retry_left = 2 # succeeded in APPENDING? imapobj = self.imapserver.acquireconnection() try: while retry_left: # UIDPLUS extension provides us with an APPENDUID response. use_uidplus = 'UIDPLUS' in imapobj.capabilities # get the date of the message, so we can pass it to the server. date = self.getmessageinternaldate(content, rtime) content = re.sub("(?200: dbg_output = "%s...%s" % (content[:150], content[-50:]) else: dbg_output = content self.ui.debug('imap', "savemessage: date: %s, content: '%s'" % (date, dbg_output)) try: # Select folder for append and make the box READ-WRITE imapobj.select(self.getfullname()) except imapobj.readonly: # readonly exception. Return original uid to notify that # we did not save the message. (see savemessage in Base.py) self.ui.msgtoreadonly(self, uid, content, flags) return uid #Do the APPEND try: (typ, dat) = imapobj.append(self.getfullname(), imaputil.flagsmaildir2imap(flags), date, content) retry_left = 0 # Mark as success except imapobj.abort as e: # connection has been reset, release connection and retry. retry_left -= 1 self.imapserver.releaseconnection(imapobj, True) imapobj = self.imapserver.acquireconnection() if not retry_left: raise OfflineImapError("Saving msg in folder '%s', " "repository '%s' failed (abort). Server reponded: %s\n" "Message content was: %s" % (self, self.getrepository(), str(e), dbg_output), OfflineImapError.ERROR.MESSAGE) self.ui.error(e, exc_info()[2]) except imapobj.error as e: # APPEND failed # If the server responds with 'BAD', append() # raise()s directly. So we catch that too. # drop conn, it might be bad. self.imapserver.releaseconnection(imapobj, True) imapobj = None raise OfflineImapError("Saving msg folder '%s', repo '%s'" "failed (error). Server reponded: %s\nMessage content was: " "%s" % (self, self.getrepository(), str(e), dbg_output), OfflineImapError.ERROR.MESSAGE) # Checkpoint. Let it write out stuff, etc. Eg searches for # just uploaded messages won't work if we don't do this. (typ,dat) = imapobj.check() assert(typ == 'OK') # get the new UID. Test for APPENDUID response even if the # server claims to not support it, as e.g. Gmail does :-( if use_uidplus or imapobj._get_untagged_response('APPENDUID', True): # get new UID from the APPENDUID response, it could look # like OK [APPENDUID 38505 3955] APPEND completed with # 38505 bein folder UIDvalidity and 3955 the new UID. # note: we would want to use .response() here but that # often seems to return [None], even though we have # data. TODO resp = imapobj._get_untagged_response('APPENDUID') if resp == [None]: self.ui.warn("Server supports UIDPLUS but got no APPENDUID " "appending a message.") return 0 uid = long(resp[-1].split(' ')[1]) if uid == 0: self.ui.warn("savemessage: Server supports UIDPLUS, but" " we got no usable uid back. APPENDUID reponse was " "'%s'" % str(resp)) else: # we don't support UIDPLUS uid = self.savemessage_searchforheader(imapobj, headername, headervalue) # See docs for savemessage in Base.py for explanation # of this and other return values if uid == 0: self.ui.debug('imap', 'savemessage: attempt to get new UID ' 'UID failed. Search headers manually.') uid = self.savemessage_fetchheaders(imapobj, headername, headervalue) self.ui.warn('imap', "savemessage: Searching mails for new " "Message-ID failed. Could not determine new UID.") finally: self.imapserver.releaseconnection(imapobj) if uid: # avoid UID FETCH 0 crash happening later on self.messagelist[uid] = {'uid': uid, 'flags': flags} self.ui.debug('imap', 'savemessage: returning new UID %d' % uid) return uid def savemessageflags(self, uid, flags): """Change a message's flags to `flags`. Note that this function does not check against dryrun settings, so you need to ensure that it is never called in a dryrun mode.""" imapobj = self.imapserver.acquireconnection() try: try: imapobj.select(self.getfullname()) except imapobj.readonly: self.ui.flagstoreadonly(self, [uid], flags) return result = imapobj.uid('store', '%d' % uid, 'FLAGS', imaputil.flagsmaildir2imap(flags)) assert result[0] == 'OK', 'Error with store: ' + '. '.join(result[1]) finally: self.imapserver.releaseconnection(imapobj) result = result[1][0] if not result: self.messagelist[uid]['flags'] = flags else: flags = imaputil.flags2hash(imaputil.imapsplit(result)[1])['FLAGS'] self.messagelist[uid]['flags'] = imaputil.flagsimap2maildir(flags) def addmessageflags(self, uid, flags): self.addmessagesflags([uid], flags) def addmessagesflags_noconvert(self, uidlist, flags): self.processmessagesflags('+', uidlist, flags) def addmessagesflags(self, uidlist, flags): """This is here for the sake of UIDMaps.py -- deletemessages must add flags and get a converted UID, and if we don't have noconvert, then UIDMaps will try to convert it twice.""" self.addmessagesflags_noconvert(uidlist, flags) def deletemessageflags(self, uid, flags): self.deletemessagesflags([uid], flags) def deletemessagesflags(self, uidlist, flags): self.processmessagesflags('-', uidlist, flags) def processmessagesflags(self, operation, uidlist, flags): if len(uidlist) > 101: # Hack for those IMAP ervers with a limited line length self.processmessagesflags(operation, uidlist[:100], flags) self.processmessagesflags(operation, uidlist[100:], flags) return imapobj = self.imapserver.acquireconnection() try: try: imapobj.select(self.getfullname()) except imapobj.readonly: self.ui.flagstoreadonly(self, uidlist, flags) return r = imapobj.uid('store', imaputil.uid_sequence(uidlist), operation + 'FLAGS', imaputil.flagsmaildir2imap(flags)) assert r[0] == 'OK', 'Error with store: ' + '. '.join(r[1]) r = r[1] finally: self.imapserver.releaseconnection(imapobj) # Some IMAP servers do not always return a result. Therefore, # only update the ones that it talks about, and manually fix # the others. needupdate = list(uidlist) for result in r: if result == None: # Compensate for servers that don't return anything from # STORE. continue attributehash = imaputil.flags2hash(imaputil.imapsplit(result)[1]) if not ('UID' in attributehash and 'FLAGS' in attributehash): # Compensate for servers that don't return a UID attribute. continue flagstr = attributehash['FLAGS'] uid = long(attributehash['UID']) self.messagelist[uid]['flags'] = imaputil.flagsimap2maildir(flagstr) try: needupdate.remove(uid) except ValueError: # Let it slide if it's not in the list pass for uid in needupdate: if operation == '+': self.messagelist[uid]['flags'] |= flags elif operation == '-': self.messagelist[uid]['flags'] -= flags def change_message_uid(self, uid, new_uid): """Change the message from existing uid to new_uid If the backend supports it. IMAP does not and will throw errors.""" raise OfflineImapError('IMAP backend cannot change a messages UID from ' '%d to %d' % (uid, new_uid), OfflineImapError.ERROR.MESSAGE) def deletemessage(self, uid): self.deletemessages_noconvert([uid]) def deletemessages(self, uidlist): self.deletemessages_noconvert(uidlist) def deletemessages_noconvert(self, uidlist): # Weed out ones not in self.messagelist uidlist = [uid for uid in uidlist if self.uidexists(uid)] if not len(uidlist): return self.addmessagesflags_noconvert(uidlist, set('T')) imapobj = self.imapserver.acquireconnection() try: try: imapobj.select(self.getfullname()) except imapobj.readonly: self.ui.deletereadonly(self, uidlist) return if self.expunge: assert(imapobj.expunge()[0] == 'OK') finally: self.imapserver.releaseconnection(imapobj) for uid in uidlist: del self.messagelist[uid] spaetz-offlineimap-c9e9690/offlineimap/folder/LocalStatus.py000066400000000000000000000117541176237577200242660ustar00rootroot00000000000000# Local status cache virtual folder # Copyright (C) 2002 - 2011 John Goerzen & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from .Base import BaseFolder import os import threading magicline = "OFFLINEIMAP LocalStatus CACHE DATA - DO NOT MODIFY - FORMAT 1" class LocalStatusFolder(BaseFolder): def __init__(self, name, repository): self.sep = '.' #needs to be set before super.__init__() super(LocalStatusFolder, self).__init__(name, repository) self.filename = os.path.join(self.getroot(), self.getfolderbasename()) self.messagelist = {} self.savelock = threading.Lock() self.doautosave = self.config.getdefaultboolean("general", "fsync", False) """Should we perform fsyncs as often as possible?""" def storesmessages(self): return 0 def isnewfolder(self): return not os.path.exists(self.filename) def getname(self): return self.name def getroot(self): return self.repository.root def getsep(self): return self.sep def getfullname(self): return self.filename def deletemessagelist(self): if not self.isnewfolder(): os.unlink(self.filename) def cachemessagelist(self): if self.isnewfolder(): self.messagelist = {} return file = open(self.filename, "rt") self.messagelist = {} line = file.readline().strip() if not line: # The status file is empty - should not have happened, # but somehow did. errstr = "Cache file '%s' is empty. Closing..." % self.filename self.ui.warn(errstr) file.close() return assert(line == magicline) for line in file.xreadlines(): line = line.strip() try: uid, flags = line.split(':') uid = long(uid) flags = set(flags) except ValueError as e: errstr = "Corrupt line '%s' in cache file '%s'" % \ (line, self.filename) self.ui.warn(errstr) raise ValueError(errstr) self.messagelist[uid] = {'uid': uid, 'flags': flags} file.close() def save(self): self.savelock.acquire() try: file = open(self.filename + ".tmp", "wt") file.write(magicline + "\n") for msg in self.messagelist.values(): flags = msg['flags'] flags = ''.join(sorted(flags)) file.write("%s:%s\n" % (msg['uid'], flags)) file.flush() if self.doautosave: os.fsync(file.fileno()) file.close() os.rename(self.filename + ".tmp", self.filename) if self.doautosave: fd = os.open(os.path.dirname(self.filename), os.O_RDONLY) os.fsync(fd) os.close(fd) finally: self.savelock.release() def getmessagelist(self): return self.messagelist def savemessage(self, uid, content, flags, rtime): """Writes a new message, with the specified uid. See folder/Base for detail. Note that savemessage() does not check against dryrun settings, so you need to ensure that savemessage is never called in a dryrun mode.""" if uid < 0: # We cannot assign a uid. return uid if uid in self.messagelist: # already have it self.savemessageflags(uid, flags) return uid self.messagelist[uid] = {'uid': uid, 'flags': flags, 'time': rtime} self.save() return uid def getmessageflags(self, uid): return self.messagelist[uid]['flags'] def getmessagetime(self, uid): return self.messagelist[uid]['time'] def savemessageflags(self, uid, flags): self.messagelist[uid]['flags'] = flags self.save() def deletemessage(self, uid): self.deletemessages([uid]) def deletemessages(self, uidlist): # Weed out ones not in self.messagelist uidlist = [uid for uid in uidlist if uid in self.messagelist] if not len(uidlist): return for uid in uidlist: del(self.messagelist[uid]) self.save() spaetz-offlineimap-c9e9690/offlineimap/folder/LocalStatusSQLite.py000066400000000000000000000256371176237577200253550ustar00rootroot00000000000000# Local status cache virtual folder: SQLite backend # Copyright (C) 2009-2011 Stewart Smith and contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import os.path import re from threading import Lock from .LocalStatus import LocalStatusFolder try: import sqlite3 as sqlite except: pass #fail only if needed later on, not on import class LocalStatusSQLiteFolder(LocalStatusFolder): """LocalStatus backend implemented with an SQLite database As python-sqlite currently does not allow to access the same sqlite objects from various threads, we need to open get and close a db connection and cursor for all operations. This is a big disadvantage and we might want to investigate if we cannot hold an object open for a thread somehow.""" #though. According to sqlite docs, you need to commit() before #the connection is closed or your changes will be lost!""" #get db connection which autocommits #connection = sqlite.connect(self.filename, isolation_level=None) #cursor = connection.cursor() #return connection, cursor #current version of our db format cur_version = 1 def __init__(self, name, repository): super(LocalStatusSQLiteFolder, self).__init__(name, repository) # dblock protects against concurrent writes in same connection self._dblock = Lock() #Try to establish connection, no need for threadsafety in __init__ try: self.connection = sqlite.connect(self.filename, check_same_thread = False) except NameError: # sqlite import had failed raise UserWarning('SQLite backend chosen, but no sqlite python ' 'bindings available. Please install.') #Make sure sqlite is in multithreading SERIALIZE mode assert sqlite.threadsafety == 1, 'Your sqlite is not multithreading safe.' #Test if db version is current enough and if db is readable. try: cursor = self.connection.execute("SELECT value from metadata WHERE key='db_version'") except sqlite.DatabaseError: #db file missing or corrupt, recreate it. self.upgrade_db(0) else: # fetch db version and upgrade if needed version = int(cursor.fetchone()[0]) if version < LocalStatusSQLiteFolder.cur_version: self.upgrade_db(version) def sql_write(self, sql, vars=None, executemany=False): """Execute some SQL, retrying if the db was locked. :param sql: the SQL string passed to execute() :param vars: the variable values to `sql`. E.g. (1,2) or {uid:1, flags:'T'}. See sqlite docs for possibilities. :param executemany: bool indicating whether we want to perform conn.executemany() or conn.execute(). :returns: the Cursor() or raises an Exception""" success = False while not success: self._dblock.acquire() try: if vars is None: if executemany: cursor = self.connection.executemany(sql) else: cursor = self.connection.execute(sql) else: if executemany: cursor = self.connection.executemany(sql, vars) else: cursor = self.connection.execute(sql, vars) success = True self.connection.commit() except sqlite.OperationalError as e: if e.args[0] == 'cannot commit - no transaction is active': pass elif e.args[0] == 'database is locked': self.ui.debug('', "Locked sqlite database, retrying.") success = False else: raise finally: self._dblock.release() return cursor def upgrade_db(self, from_ver): """Upgrade the sqlite format from version 'from_ver' to current""" if hasattr(self, 'connection'): self.connection.close() #close old connections first self.connection = sqlite.connect(self.filename, check_same_thread = False) if from_ver == 0: # from_ver==0: no db existent: plain text migration? self.create_db() # below was derived from repository.getfolderfilename() logic plaintextfilename = os.path.join( self.repository.account.getaccountmeta(), 'LocalStatus', self.getfolderbasename()) # MIGRATE from plaintext if needed if os.path.exists(plaintextfilename): self.ui._msg('Migrating LocalStatus cache from plain text ' 'to sqlite database for %s:%s' %\ (self.repository, self)) file = open(plaintextfilename, "rt") line = file.readline().strip() data = [] for line in file.xreadlines(): uid, flags = line.strip().split(':') uid = long(uid) flags = ''.join(sorted(flags)) data.append((uid,flags)) self.connection.executemany('INSERT INTO status (id,flags) VALUES (?,?)', data) self.connection.commit() file.close() os.rename(plaintextfilename, plaintextfilename + ".old") # Future version upgrades come here... # if from_ver <= 1: ... #upgrade from 1 to 2 # if from_ver <= 2: ... #upgrade from 2 to 3 def create_db(self): """Create a new db file""" self.ui._msg('Creating new Local Status db for %s:%s' \ % (self.repository, self)) if hasattr(self, 'connection'): self.connection.close() #close old connections first self.connection = sqlite.connect(self.filename, check_same_thread = False) self.connection.executescript(""" CREATE TABLE metadata (key VARCHAR(50) PRIMARY KEY, value VARCHAR(128)); INSERT INTO metadata VALUES('db_version', '1'); CREATE TABLE status (id INTEGER PRIMARY KEY, flags VARCHAR(50)); """) self.connection.commit() def isnewfolder(self): # testing the existence of the db file won't work. It is created # as soon as this class instance was intitiated. So say it is a # new folder when there are no messages at all recorded in it. return self.getmessagecount() > 0 def deletemessagelist(self): """delete all messages in the db""" self.sql_write('DELETE FROM status') def cachemessagelist(self): self.messagelist = {} cursor = self.connection.execute('SELECT id,flags from status') for row in cursor: flags = set(row[1]) self.messagelist[row[0]] = {'uid': row[0], 'flags': flags} def save(self): #Noop in this backend pass # Following some pure SQLite functions, where we chose to use # BaseFolder() methods instead. Doing those on the in-memory list is # quicker anyway. If our db becomes so big that we don't want to # maintain the in-memory list anymore, these might come in handy # in the future though. # #def uidexists(self,uid): # conn, cursor = self.get_cursor() # with conn: # cursor.execute('SELECT id FROM status WHERE id=:id',{'id': uid}) # return cursor.fetchone() # This would be the pure SQLite solution, use BaseFolder() method, # to avoid threading with sqlite... #def getmessageuidlist(self): # conn, cursor = self.get_cursor() # with conn: # cursor.execute('SELECT id from status') # r = [] # for row in cursor: # r.append(row[0]) # return r #def getmessagecount(self): # conn, cursor = self.get_cursor() # with conn: # cursor.execute('SELECT count(id) from status'); # return cursor.fetchone()[0] #def getmessageflags(self, uid): # conn, cursor = self.get_cursor() # with conn: # cursor.execute('SELECT flags FROM status WHERE id=:id', # {'id': uid}) # for row in cursor: # flags = [x for x in row[0]] # return flags # assert False,"getmessageflags() called on non-existing message" def savemessage(self, uid, content, flags, rtime): """Writes a new message, with the specified uid. See folder/Base for detail. Note that savemessage() does not check against dryrun settings, so you need to ensure that savemessage is never called in a dryrun mode.""" if uid < 0: # We cannot assign a uid. return uid if self.uidexists(uid): # already have it self.savemessageflags(uid, flags) return uid self.messagelist[uid] = {'uid': uid, 'flags': flags, 'time': rtime} flags = ''.join(sorted(flags)) self.sql_write('INSERT INTO status (id,flags) VALUES (?,?)', (uid,flags)) return uid def savemessageflags(self, uid, flags): self.messagelist[uid] = {'uid': uid, 'flags': flags} flags = ''.join(sorted(flags)) self.sql_write('UPDATE status SET flags=? WHERE id=?',(flags,uid)) def deletemessage(self, uid): if not uid in self.messagelist: return self.sql_write('DELETE FROM status WHERE id=?', (uid, )) del(self.messagelist[uid]) def deletemessages(self, uidlist): """Delete list of UIDs from status cache This function uses sqlites executemany() function which is much faster than iterating through deletemessage() when we have many messages to delete.""" # Weed out ones not in self.messagelist uidlist = [uid for uid in uidlist if uid in self.messagelist] if not len(uidlist): return # arg2 needs to be an iterable of 1-tuples [(1,),(2,),...] self.sql_write('DELETE FROM status WHERE id=?', zip(uidlist, ), True) for uid in uidlist: del(self.messagelist[uid]) spaetz-offlineimap-c9e9690/offlineimap/folder/Maildir.py000066400000000000000000000363661176237577200234170ustar00rootroot00000000000000# Maildir folder support # Copyright (C) 2002 - 2011 John Goerzen & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import socket import time import re import os from .Base import BaseFolder from threading import Lock try: from hashlib import md5 except ImportError: from md5 import md5 try: # python 2.6 has set() built in set except NameError: from sets import Set as set from offlineimap import OfflineImapError # Find the UID in a message filename re_uidmatch = re.compile(',U=(\d+)') # Find a numeric timestamp in a string (filename prefix) re_timestampmatch = re.compile('(\d+)'); timeseq = 0 lasttime = 0 timelock = Lock() def gettimeseq(): global lasttime, timeseq, timelock timelock.acquire() try: thistime = long(time.time()) if thistime == lasttime: timeseq += 1 return (thistime, timeseq) else: lasttime = thistime timeseq = 0 return (thistime, timeseq) finally: timelock.release() class MaildirFolder(BaseFolder): def __init__(self, root, name, sep, repository): self.sep = sep # needs to be set before super().__init__ super(MaildirFolder, self).__init__(name, repository) self.dofsync = self.config.getdefaultboolean("general", "fsync", True) self.root = root self.messagelist = None # check if we should use a different infosep to support Win file systems self.wincompatible = self.config.getdefaultboolean( "Account "+self.accountname, "maildir-windows-compatible", False) self.infosep = '!' if self.wincompatible else ':' """infosep is the separator between maildir name and flag appendix""" self.re_flagmatch = re.compile('%s2,(\w*)' % self.infosep) #self.ui is set in BaseFolder.init() # Everything up to the first comma or colon (or ! if Windows): self.re_prefixmatch = re.compile('([^'+ self.infosep + ',]*)') #folder's md, so we can match with recorded file md5 for validity self._foldermd5 = md5(self.getvisiblename()).hexdigest() # Cache the full folder path, as we use getfullname() very often self._fullname = os.path.join(self.getroot(), self.getname()) def getfullname(self): """Return the absolute file path to the Maildir folder (sans cur|new)""" return self._fullname def get_uidvalidity(self): """Retrieve the current connections UIDVALIDITY value Maildirs have no notion of uidvalidity, so we just return a magic token.""" return 42 #Checks to see if the given message is within the maximum age according #to the maildir name which should begin with a timestamp def _iswithinmaxage(self, messagename, maxage): #In order to have the same behaviour as SINCE in an IMAP search #we must convert this to the oldest time and then strip off hrs/mins #from that day oldest_time_utc = time.time() - (60*60*24*maxage) oldest_time_struct = time.gmtime(oldest_time_utc) oldest_time_today_seconds = ((oldest_time_struct[3] * 3600) \ + (oldest_time_struct[4] * 60) \ + oldest_time_struct[5]) oldest_time_utc -= oldest_time_today_seconds timestampmatch = re_timestampmatch.search(messagename) timestampstr = timestampmatch.group() timestamplong = long(timestampstr) if(timestamplong < oldest_time_utc): return False else: return True def _parse_filename(self, filename): """Returns a messages file name components Receives the file name (without path) of a msg. Usual format is '<%d_%d.%d.%s>,U=<%d>,FMD5=<%s>:2,' (pointy brackets denoting the various components). If FMD5 does not correspond with the current folder MD5, we will return None for the UID & FMD5 (as it is not valid in this folder). If UID or FMD5 can not be detected, we return `None` for the respective element. If flags are empty or cannot be detected, we return an empty flags list. :returns: (prefix, UID, FMD5, flags). UID is a numeric "long" type. flags is a set() of Maildir flags""" prefix, uid, fmd5, flags = None, None, None, set() prefixmatch = self.re_prefixmatch.match(filename) if prefixmatch: prefix = prefixmatch.group(1) folderstr = ',FMD5=%s' % self._foldermd5 foldermatch = folderstr in filename # If there was no folder MD5 specified, or if it mismatches, # assume it is a foreign (new) message and ret: uid, fmd5 = None, None if foldermatch: uidmatch = re_uidmatch.search(filename) if uidmatch: uid = long(uidmatch.group(1)) flagmatch = self.re_flagmatch.search(filename) if flagmatch: # Filter out all lowercase (custom maildir) flags. We don't # handle them yet. flags = set((c for c in flagmatch.group(1) if not c.islower())) return prefix, uid, fmd5, flags def _scanfolder(self): """Cache the message list from a Maildir. Maildir flags are: R (replied) S (seen) T (trashed) D (draft) F (flagged). :returns: dict that can be used as self.messagelist""" maxage = self.config.getdefaultint("Account " + self.accountname, "maxage", None) maxsize = self.config.getdefaultint("Account " + self.accountname, "maxsize", None) retval = {} files = [] nouidcounter = -1 # Messages without UIDs get negative UIDs. for dirannex in ['new', 'cur']: fulldirname = os.path.join(self.getfullname(), dirannex) files.extend((dirannex, filename) for filename in os.listdir(fulldirname)) for dirannex, filename in files: # We store just dirannex and filename, ie 'cur/123...' filepath = os.path.join(dirannex, filename) # check maxage/maxsize if this message should be considered if maxage and not self._iswithinmaxage(filename, maxage): continue if maxsize and (os.path.getsize(os.path.join( self.getfullname(), filepath)) > maxsize): continue (prefix, uid, fmd5, flags) = self._parse_filename(filename) if uid is None: # assign negative uid to upload it. uid = nouidcounter nouidcounter -= 1 else: # It comes from our folder. uidmatch = re_uidmatch.search(filename) uid = None if not uidmatch: uid = nouidcounter nouidcounter -= 1 else: uid = long(uidmatch.group(1)) # 'filename' is 'dirannex/filename', e.g. cur/123,U=1,FMD5=1:2,S retval[uid] = {'flags': flags, 'filename': filepath} return retval def quickchanged(self, statusfolder): """Returns True if the Maildir has changed""" self.cachemessagelist() # Folder has different uids than statusfolder => TRUE if sorted(self.getmessageuidlist()) != \ sorted(statusfolder.getmessageuidlist()): return True # Also check for flag changes, it's quick on a Maildir for (uid, message) in self.getmessagelist().iteritems(): if message['flags'] != statusfolder.getmessageflags(uid): return True return False #Nope, nothing changed def cachemessagelist(self): if self.messagelist is None: self.messagelist = self._scanfolder() def getmessagelist(self): return self.messagelist def getmessage(self, uid): """Return the content of the message""" filename = self.messagelist[uid]['filename'] filepath = os.path.join(self.getfullname(), filename) file = open(filepath, 'rt') retval = file.read() file.close() #TODO: WHY are we replacing \r\n with \n here? And why do we # read it as text? return retval.replace("\r\n", "\n") def getmessagetime(self, uid): filename = self.messagelist[uid]['filename'] filepath = os.path.join(self.getfullname(), filename) return os.path.getmtime(filepath) def new_message_filename(self, uid, flags=set()): """Creates a new unique Maildir filename :param uid: The UID`None`, or a set of maildir flags :param flags: A set of maildir flags :returns: String containing unique message filename""" timeval, timeseq = gettimeseq() return '%d_%d.%d.%s,U=%d,FMD5=%s%s2,%s' % \ (timeval, timeseq, os.getpid(), socket.gethostname(), uid, self._foldermd5, self.infosep, ''.join(sorted(flags))) def savemessage(self, uid, content, flags, rtime): """Writes a new message, with the specified uid. See folder/Base for detail. Note that savemessage() does not check against dryrun settings, so you need to ensure that savemessage is never called in a dryrun mode.""" # This function only ever saves to tmp/, # but it calls savemessageflags() to actually save to cur/ or new/. self.ui.savemessage('maildir', uid, flags, self) if uid < 0: # We cannot assign a new uid. return uid if uid in self.messagelist: # We already have it, just update flags. self.savemessageflags(uid, flags) return uid # Otherwise, save the message in tmp/ and then call savemessageflags() # to give it a permanent home. tmpdir = os.path.join(self.getfullname(), 'tmp') messagename = self.new_message_filename(uid, flags) # open file and write it out try: fd = os.open(os.path.join(tmpdir, messagename), os.O_EXCL|os.O_CREAT|os.O_WRONLY, 0o666) except OSError as e: if e.errno == 17: #FILE EXISTS ALREADY severity = OfflineImapError.ERROR.MESSAGE raise OfflineImapError("Unique filename %s already existing." %\ messagename, severity) else: raise file = os.fdopen(fd, 'wt') file.write(content) # Make sure the data hits the disk file.flush() if self.dofsync: os.fsync(fd) file.close() if rtime != None: os.utime(os.path.join(tmpdir, messagename), (rtime, rtime)) self.messagelist[uid] = {'flags': flags, 'filename': os.path.join('tmp', messagename)} # savemessageflags moves msg to 'cur' or 'new' as appropriate self.savemessageflags(uid, flags) self.ui.debug('maildir', 'savemessage: returning uid %d' % uid) return uid def getmessageflags(self, uid): return self.messagelist[uid]['flags'] def savemessageflags(self, uid, flags): """Sets the specified message's flags to the given set. This function moves the message to the cur or new subdir, depending on the 'S'een flag. Note that this function does not check against dryrun settings, so you need to ensure that it is never called in a dryrun mode.""" oldfilename = self.messagelist[uid]['filename'] dir_prefix, filename = os.path.split(oldfilename) # If a message has been seen, it goes into 'cur' dir_prefix = 'cur' if 'S' in flags else 'new' if flags != self.messagelist[uid]['flags']: # Flags have actually changed, construct new filename Strip # off existing infostring (possibly discarding small letter # flags that dovecot uses TODO) infomatch = self.re_flagmatch.search(filename) if infomatch: filename = filename[:-len(infomatch.group())] #strip off infostr = '%s2,%s' % (self.infosep, ''.join(sorted(flags))) filename += infostr newfilename = os.path.join(dir_prefix, filename) if (newfilename != oldfilename): try: os.rename(os.path.join(self.getfullname(), oldfilename), os.path.join(self.getfullname(), newfilename)) except OSError as e: raise OfflineImapError("Can't rename file '%s' to '%s': %s" % ( oldfilename, newfilename, e[1]), OfflineImapError.ERROR.FOLDER) self.messagelist[uid]['flags'] = flags self.messagelist[uid]['filename'] = newfilename def change_message_uid(self, uid, new_uid): """Change the message from existing uid to new_uid This will not update the statusfolder UID, you need to do that yourself. :param new_uid: (optional) If given, the old UID will be changed to a new UID. The Maildir backend can implement this as an efficient rename.""" if not uid in self.messagelist: raise OfflineImapError("Cannot change unknown Maildir UID %s" % uid) if uid == new_uid: return oldfilename = self.messagelist[uid]['filename'] dir_prefix, filename = os.path.split(oldfilename) flags = self.getmessageflags(uid) filename = self.new_message_filename(new_uid, flags) os.rename(os.path.join(self.getfullname(), oldfilename), os.path.join(self.getfullname(), dir_prefix, filename)) self.messagelist[new_uid] = self.messagelist[uid] del self.messagelist[uid] def deletemessage(self, uid): """Unlinks a message file from the Maildir. :param uid: UID of a mail message :type uid: String :return: Nothing, or an Exception if UID but no corresponding file found. """ if not self.uidexists(uid): return filename = self.messagelist[uid]['filename'] filepath = os.path.join(self.getfullname(), filename) try: os.unlink(filepath) except OSError: # Can't find the file -- maybe already deleted? newmsglist = self._scanfolder() if uid in newmsglist: # Nope, try new filename. filename = newmsglist[uid]['filename'] filepath = os.path.join(self.getfullname(), filename) os.unlink(filepath) # Yep -- return. del(self.messagelist[uid]) spaetz-offlineimap-c9e9690/offlineimap/folder/UIDMaps.py000066400000000000000000000262341176237577200232710ustar00rootroot00000000000000# Base folder support # Copyright (C) 2002-2012 John Goerzen & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from threading import Lock from .IMAP import IMAPFolder import os.path class MappedIMAPFolder(IMAPFolder): """IMAP class to map between Folder() instances where both side assign a uid This Folder is used on the local side, while the remote side should be an IMAPFolder. Instance variables (self.): r2l: dict mapping message uids: self.r2l[remoteuid]=localuid l2r: dict mapping message uids: self.r2l[localuid]=remoteuid #TODO: what is the difference, how are they used? diskr2l: dict mapping message uids: self.r2l[remoteuid]=localuid diskl2r: dict mapping message uids: self.r2l[localuid]=remoteuid""" def __init__(self, *args, **kwargs): IMAPFolder.__init__(self, *args, **kwargs) self.maplock = Lock() (self.diskr2l, self.diskl2r) = self._loadmaps() self._mb = IMAPFolder(*args, **kwargs) """Representing the local IMAP Folder using local UIDs""" def _getmapfilename(self): return os.path.join(self.repository.getmapdir(), self.getfolderbasename()) def _loadmaps(self): self.maplock.acquire() try: mapfilename = self._getmapfilename() if not os.path.exists(mapfilename): return ({}, {}) file = open(mapfilename, 'rt') r2l = {} l2r = {} while 1: line = file.readline() if not len(line): break try: line = line.strip() except ValueError: raise Exception("Corrupt line '%s' in UID mapping file '%s'" \ %(line, mapfilename)) (str1, str2) = line.split(':') loc = long(str1) rem = long(str2) r2l[rem] = loc l2r[loc] = rem return (r2l, l2r) finally: self.maplock.release() def _savemaps(self, dolock = 1): mapfilename = self._getmapfilename() if dolock: self.maplock.acquire() try: file = open(mapfilename + ".tmp", 'wt') for (key, value) in self.diskl2r.iteritems(): file.write("%d:%d\n" % (key, value)) file.close() os.rename(mapfilename + '.tmp', mapfilename) finally: if dolock: self.maplock.release() def _uidlist(self, mapping, items): try: return [mapping[x] for x in items] except KeyError as e: raise OfflineImapError("Could not find UID for msg '{0}' (f:'{1}'." " This is usually a bad thing and should be reported on the ma" "iling list.".format(e.args[0], self), OfflineImapError.ERROR.MESSAGE) def cachemessagelist(self): self._mb.cachemessagelist() reallist = self._mb.getmessagelist() self.maplock.acquire() try: # OK. Now we've got a nice list. First, delete things from the # summary that have been deleted from the folder. for luid in self.diskl2r.keys(): if not luid in reallist: ruid = self.diskl2r[luid] del self.diskr2l[ruid] del self.diskl2r[luid] # Now, assign negative UIDs to local items. self._savemaps(dolock = 0) nextneg = -1 self.r2l = self.diskr2l.copy() self.l2r = self.diskl2r.copy() for luid in reallist.keys(): if not luid in self.l2r: ruid = nextneg nextneg -= 1 self.l2r[luid] = ruid self.r2l[ruid] = luid finally: self.maplock.release() def uidexists(self, ruid): """Checks if the (remote) UID exists in this Folder""" # This implementation overrides the one in BaseFolder, as it is # much more efficient for the mapped case. return ruid in self.r2l def getmessageuidlist(self): """Gets a list of (remote) UIDs. You may have to call cachemessagelist() before calling this function!""" # This implementation overrides the one in BaseFolder, as it is # much more efficient for the mapped case. return self.r2l.keys() def getmessagecount(self): """Gets the number of messages in this folder. You may have to call cachemessagelist() before calling this function!""" # This implementation overrides the one in BaseFolder, as it is # much more efficient for the mapped case. return len(self.r2l) def getmessagelist(self): """Gets the current message list. This function's implementation is quite expensive for the mapped UID case. You must call cachemessagelist() before calling this function!""" retval = {} localhash = self._mb.getmessagelist() self.maplock.acquire() try: for key, value in localhash.items(): try: key = self.l2r[key] except KeyError: # Sometimes, the IMAP backend may put in a new message, # then this function acquires the lock before the system # has the chance to note it in the mapping. In that case, # just ignore it. continue value = value.copy() value['uid'] = self.l2r[value['uid']] retval[key] = value return retval finally: self.maplock.release() def getmessage(self, uid): """Returns the content of the specified message.""" return self._mb.getmessage(self.r2l[uid]) def savemessage(self, uid, content, flags, rtime): """Writes a new message, with the specified uid. The UIDMaps class will not return a newly assigned uid, as it internally maps different uids between IMAP servers. So a successful savemessage() invocation will return the same uid it has been invoked with. As it maps between 2 IMAP servers which means the source message must already have an uid, it requires a positive uid to be passed in. Passing in a message with a negative uid will do nothing and return the negative uid. If the uid is > 0, the backend should set the uid to this, if it can. If it cannot set the uid to that, it will save it anyway. It will return the uid assigned in any case. See folder/Base for details. Note that savemessage() does not check against dryrun settings, so you need to ensure that savemessage is never called in a dryrun mode. """ self.ui.savemessage('imap', uid, flags, self) # Mapped UID instances require the source to already have a # positive UID, so simply return here. if uid < 0: return uid #if msg uid already exists, just modify the flags if uid in self.r2l: self.savemessageflags(uid, flags) return uid newluid = self._mb.savemessage(-1, content, flags, rtime) if newluid < 1: raise ValueError("Backend could not find uid for message, returned " "%s" % newluid) self.maplock.acquire() try: self.diskl2r[newluid] = uid self.diskr2l[uid] = newluid self.l2r[newluid] = uid self.r2l[uid] = newluid self._savemaps(dolock = 0) finally: self.maplock.release() return uid def getmessageflags(self, uid): return self._mb.getmessageflags(self.r2l[uid]) def getmessagetime(self, uid): return None def savemessageflags(self, uid, flags): """ Note that this function does not check against dryrun settings, so you need to ensure that it is never called in a dryrun mode.""" self._mb.savemessageflags(self.r2l[uid], flags) def addmessageflags(self, uid, flags): self._mb.addmessageflags(self.r2l[uid], flags) def addmessagesflags(self, uidlist, flags): self._mb.addmessagesflags(self._uidlist(self.r2l, uidlist), flags) def change_message_uid(self, ruid, new_ruid): """Change the message from existing ruid to new_ruid :param new_uid: The old remote UID will be changed to a new UID. The UIDMaps case handles this efficiently by simply changing the mappings file.""" if ruid not in self.r2l: raise OfflineImapError("Cannot change unknown Maildir UID %s" % ruid, OfflineImapError.ERROR.MESSAGE) if ruid == new_ruid: return # sanity check shortcut self.maplock.acquire() try: luid = self.r2l[ruid] self.l2r[luid] = new_ruid del self.r2l[ruid] self.r2l[new_ruid] = luid #TODO: diskl2r|r2l are a pain to sync and should be done away with #diskl2r only contains positive UIDs, so wrap in ifs if luid>0: self.diskl2r[luid] = new_ruid if ruid>0: del self.diskr2l[ruid] if new_ruid > 0: self.diskr2l[new_ruid] = luid self._savemaps(dolock = 0) finally: self.maplock.release() def _mapped_delete(self, uidlist): self.maplock.acquire() try: needssave = 0 for ruid in uidlist: luid = self.r2l[ruid] del self.r2l[ruid] del self.l2r[luid] if ruid > 0: del self.diskr2l[ruid] del self.diskl2r[luid] needssave = 1 if needssave: self._savemaps(dolock = 0) finally: self.maplock.release() def deletemessageflags(self, uid, flags): self._mb.deletemessageflags(self.r2l[uid], flags) def deletemessagesflags(self, uidlist, flags): self._mb.deletemessagesflags(self._uidlist(self.r2l, uidlist), flags) def deletemessage(self, uid): self._mb.deletemessage(self.r2l[uid]) self._mapped_delete([uid]) def deletemessages(self, uidlist): self._mb.deletemessages(self._uidlist(self.r2l, uidlist)) self._mapped_delete(uidlist) spaetz-offlineimap-c9e9690/offlineimap/folder/__init__.py000066400000000000000000000000671176237577200235620ustar00rootroot00000000000000from . import Base, Gmail, IMAP, Maildir, LocalStatus spaetz-offlineimap-c9e9690/offlineimap/imaplib2.py000066400000000000000000002505561176237577200222610ustar00rootroot00000000000000#!/usr/bin/env python """Threaded IMAP4 client. Based on RFC 3501 and original imaplib module. Public classes: IMAP4 IMAP4_SSL IMAP4_stream Public functions: Internaldate2Time ParseFlags Time2Internaldate """ __all__ = ("IMAP4", "IMAP4_SSL", "IMAP4_stream", "Internaldate2Time", "ParseFlags", "Time2Internaldate") __version__ = "2.33" __release__ = "2" __revision__ = "33" __credits__ = """ Authentication code contributed by Donn Cave June 1998. String method conversion by ESR, February 2001. GET/SETACL contributed by Anthony Baxter April 2001. IMAP4_SSL contributed by Tino Lange March 2002. GET/SETQUOTA contributed by Andreas Zeidler June 2002. PROXYAUTH contributed by Rick Holbert November 2002. IDLE via threads suggested by Philippe Normand January 2005. GET/SETANNOTATION contributed by Tomas Lindroos June 2005. COMPRESS/DEFLATE contributed by Bron Gondwana May 2009. STARTTLS from Jython's imaplib by Alan Kennedy. ID contributed by Dave Baggett November 2009. Improved untagged responses handling suggested by Dave Baggett November 2009. Improved thread naming, and 0 read detection contributed by Grant Edwards June 2010. Improved timeout handling contributed by Ivan Vovnenko October 2010. Timeout handling further improved by Ethan Glasser-Camp December 2010. Time2Internaldate() patch to match RFC2060 specification of English month names from bugs.python.org/issue11024 March 2011. starttls() bug fixed with the help of Sebastian Spaeth April 2011. Threads now set the "daemon" flag (suggested by offlineimap-project) April 2011. Single quoting introduced with the help of Vladimir Marek August 2011.""" __author__ = "Piers Lauder " __URL__ = "http://imaplib2.sourceforge.net" __license__ = "Python License" import binascii, errno, os, Queue, random, re, select, socket, sys, time, threading, zlib select_module = select # Globals CRLF = '\r\n' Debug = None # Backward compatibility IMAP4_PORT = 143 IMAP4_SSL_PORT = 993 IDLE_TIMEOUT_RESPONSE = '* IDLE TIMEOUT\r\n' IDLE_TIMEOUT = 60*29 # Don't stay in IDLE state longer READ_POLL_TIMEOUT = 30 # Without this timeout interrupted network connections can hang reader READ_SIZE = 32768 # Consume all available in socket DFLT_DEBUG_BUF_LVL = 3 # Level above which the logging output goes directly to stderr AllowedVersions = ('IMAP4REV1', 'IMAP4') # Most recent first # Commands CMD_VAL_STATES = 0 CMD_VAL_ASYNC = 1 NONAUTH, AUTH, SELECTED, LOGOUT = 'NONAUTH', 'AUTH', 'SELECTED', 'LOGOUT' Commands = { # name valid states asynchronous 'APPEND': ((AUTH, SELECTED), False), 'AUTHENTICATE': ((NONAUTH,), False), 'CAPABILITY': ((NONAUTH, AUTH, SELECTED), True), 'CHECK': ((SELECTED,), True), 'CLOSE': ((SELECTED,), False), 'COMPRESS': ((AUTH,), False), 'COPY': ((SELECTED,), True), 'CREATE': ((AUTH, SELECTED), True), 'DELETE': ((AUTH, SELECTED), True), 'DELETEACL': ((AUTH, SELECTED), True), 'EXAMINE': ((AUTH, SELECTED), False), 'EXPUNGE': ((SELECTED,), True), 'FETCH': ((SELECTED,), True), 'GETACL': ((AUTH, SELECTED), True), 'GETANNOTATION':((AUTH, SELECTED), True), 'GETQUOTA': ((AUTH, SELECTED), True), 'GETQUOTAROOT': ((AUTH, SELECTED), True), 'ID': ((NONAUTH, AUTH, LOGOUT, SELECTED), True), 'IDLE': ((SELECTED,), False), 'LIST': ((AUTH, SELECTED), True), 'LOGIN': ((NONAUTH,), False), 'LOGOUT': ((NONAUTH, AUTH, LOGOUT, SELECTED), False), 'LSUB': ((AUTH, SELECTED), True), 'MYRIGHTS': ((AUTH, SELECTED), True), 'NAMESPACE': ((AUTH, SELECTED), True), 'NOOP': ((NONAUTH, AUTH, SELECTED), True), 'PARTIAL': ((SELECTED,), True), 'PROXYAUTH': ((AUTH,), False), 'RENAME': ((AUTH, SELECTED), True), 'SEARCH': ((SELECTED,), True), 'SELECT': ((AUTH, SELECTED), False), 'SETACL': ((AUTH, SELECTED), False), 'SETANNOTATION':((AUTH, SELECTED), True), 'SETQUOTA': ((AUTH, SELECTED), False), 'SORT': ((SELECTED,), True), 'STARTTLS': ((NONAUTH,), False), 'STATUS': ((AUTH, SELECTED), True), 'STORE': ((SELECTED,), True), 'SUBSCRIBE': ((AUTH, SELECTED), False), 'THREAD': ((SELECTED,), True), 'UID': ((SELECTED,), True), 'UNSUBSCRIBE': ((AUTH, SELECTED), False), } UID_direct = ('SEARCH', 'SORT', 'THREAD') def Int2AP(num): """string = Int2AP(num) Return 'num' converted to a string using characters from the set 'A'..'P' """ val, a2p = [], 'ABCDEFGHIJKLMNOP' num = int(abs(num)) while num: num, mod = divmod(num, 16) val.insert(0, a2p[mod]) return ''.join(val) class Request(object): """Private class to represent a request awaiting response.""" def __init__(self, parent, name=None, callback=None, cb_arg=None, cb_self=False): self.parent = parent self.name = name self.callback = callback # Function called to process result if not cb_self: self.callback_arg = cb_arg # Optional arg passed to "callback" else: self.callback_arg = (self, cb_arg) # Self reference required in callback arg self.tag = '%s%s' % (parent.tagpre, parent.tagnum) parent.tagnum += 1 self.ready = threading.Event() self.response = None self.aborted = None self.data = None def abort(self, typ, val): self.aborted = (typ, val) self.deliver(None) def get_response(self, exc_fmt=None): self.callback = None if __debug__: self.parent._log(3, '%s:%s.ready.wait' % (self.name, self.tag)) self.ready.wait() if self.aborted is not None: typ, val = self.aborted if exc_fmt is None: exc_fmt = '%s - %%s' % typ raise typ(exc_fmt % str(val)) return self.response def deliver(self, response): if self.callback is not None: self.callback((response, self.callback_arg, self.aborted)) return self.response = response self.ready.set() if __debug__: self.parent._log(3, '%s:%s.ready.set' % (self.name, self.tag)) class IMAP4(object): """Threaded IMAP4 client class. Instantiate with: IMAP4(host=None, port=None, debug=None, debug_file=None, identifier=None, timeout=None, debug_buf_lvl=None) host - host's name (default: localhost); port - port number (default: standard IMAP4 port); debug - debug level (default: 0 - no debug); debug_file - debug stream (default: sys.stderr); identifier - thread identifier prefix (default: host); timeout - timeout in seconds when expecting a command response (default: no timeout), debug_buf_lvl - debug level at which buffering is turned off. All IMAP4rev1 commands are supported by methods of the same name. Each command returns a tuple: (type, [data, ...]) where 'type' is usually 'OK' or 'NO', and 'data' is either the text from the tagged response, or untagged results from command. Each 'data' is either a string, or a tuple. If a tuple, then the first part is the header of the response, and the second part contains the data (ie: 'literal' value). Errors raise the exception class .error(""). IMAP4 server errors raise .abort(""), which is a sub-class of 'error'. Mailbox status changes from READ-WRITE to READ-ONLY raise the exception class .readonly(""), which is a sub-class of 'abort'. "error" exceptions imply a program error. "abort" exceptions imply the connection should be reset, and the command re-tried. "readonly" exceptions imply the command should be re-tried. All commands take two optional named arguments: 'callback' and 'cb_arg' If 'callback' is provided then the command is asynchronous, so after the command is queued for transmission, the call returns immediately with the tuple (None, None). The result will be posted by invoking "callback" with one arg, a tuple: callback((result, cb_arg, None)) or, if there was a problem: callback((None, cb_arg, (exception class, reason))) Otherwise the command is synchronous (waits for result). But note that state-changing commands will both block until previous commands have completed, and block subsequent commands until they have finished. All (non-callback) arguments to commands are converted to strings, except for AUTHENTICATE, and the last argument to APPEND which is passed as an IMAP4 literal. If necessary (the string contains any non-printing characters or white-space and isn't enclosed with either parentheses or double or single quotes) each string is quoted. However, the 'password' argument to the LOGIN command is always quoted. If you want to avoid having an argument string quoted (eg: the 'flags' argument to STORE) then enclose the string in parentheses (eg: "(\Deleted)"). If you are using "sequence sets" containing the wildcard character '*', then enclose the argument in single quotes: the quotes will be removed and the resulting string passed unquoted. Note also that you can pass in an argument with a type that doesn't evaluate to 'basestring' (eg: 'bytearray') and it will be converted to a string without quoting. There is one instance variable, 'state', that is useful for tracking whether the client needs to login to the server. If it has the value "AUTH" after instantiating the class, then the connection is pre-authenticated (otherwise it will be "NONAUTH"). Selecting a mailbox changes the state to be "SELECTED", closing a mailbox changes back to "AUTH", and once the client has logged out, the state changes to "LOGOUT" and no further commands may be issued. Note: to use this module, you must read the RFCs pertaining to the IMAP4 protocol, as the semantics of the arguments to each IMAP4 command are left to the invoker, not to mention the results. Also, most IMAP servers implement a sub-set of the commands available here. Note also that you must call logout() to shut down threads before discarding an instance. """ class error(Exception): pass # Logical errors - debug required class abort(error): pass # Service errors - close and retry class readonly(abort): pass # Mailbox status changed to READ-ONLY continuation_cre = re.compile(r'\+( (?P.*))?') literal_cre = re.compile(r'.*{(?P\d+)}$') mapCRLF_cre = re.compile(r'\r\n|\r|\n') # Need to quote "atom-specials" :- # "(" / ")" / "{" / SP / 0x00 - 0x1f / 0x7f / "%" / "*" / DQUOTE / "\" / "]" # so match not the inverse set mustquote_cre = re.compile(r"[^!#$&'+,./0-9:;<=>?@A-Z\[^_`a-z|}~-]") response_code_cre = re.compile(r'\[(?P[A-Z-]+)( (?P[^\]]*))?\]') # sequence_set_cre = re.compile(r"^[0-9]+(:([0-9]+|\*))?(,[0-9]+(:([0-9]+|\*))?)*$") untagged_response_cre = re.compile(r'\* (?P[A-Z-]+)( (?P.*))?') untagged_status_cre = re.compile(r'\* (?P\d+) (?P[A-Z-]+)( (?P.*))?') def __init__(self, host=None, port=None, debug=None, debug_file=None, identifier=None, timeout=None, debug_buf_lvl=None): self.state = NONAUTH # IMAP4 protocol state self.literal = None # A literal argument to a command self.tagged_commands = {} # Tagged commands awaiting response self.untagged_responses = [] # [[typ: [data, ...]], ...] self.mailbox = None # Current mailbox selected self.mailboxes = {} # Untagged responses state per mailbox self.is_readonly = False # READ-ONLY desired state self.idle_rqb = None # Server IDLE Request - see _IdleCont self.idle_timeout = None # Must prod server occasionally self._expecting_data = 0 # Expecting message data self._accumulated_data = [] # Message data accumulated so far self._literal_expected = None # Message data descriptor self.compressor = None # COMPRESS/DEFLATE if not None self.decompressor = None # Create unique tag for this session, # and compile tagged response matcher. self.tagnum = 0 self.tagpre = Int2AP(random.randint(4096, 65535)) self.tagre = re.compile(r'(?P' + self.tagpre + r'\d+) (?P[A-Z]+) (?P.*)') if __debug__: self._init_debug(debug, debug_file, debug_buf_lvl) self.resp_timeout = timeout # Timeout waiting for command response if timeout is not None and timeout < READ_POLL_TIMEOUT: self.read_poll_timeout = timeout else: self.read_poll_timeout = READ_POLL_TIMEOUT self.read_size = READ_SIZE # Open socket to server. self.open(host, port) if __debug__: if debug: self._mesg('connected to %s on port %s' % (self.host, self.port)) # Threading if identifier is not None: self.identifier = identifier else: self.identifier = self.host if self.identifier: self.identifier += ' ' self.Terminate = self.TerminateReader = False self.state_change_free = threading.Event() self.state_change_pending = threading.Lock() self.commands_lock = threading.Lock() self.idle_lock = threading.Lock() self.ouq = Queue.Queue(10) self.inq = Queue.Queue() self.wrth = threading.Thread(target=self._writer) self.wrth.setDaemon(True) self.wrth.start() self.rdth = threading.Thread(target=self._reader) self.rdth.setDaemon(True) self.rdth.start() self.inth = threading.Thread(target=self._handler) self.inth.setDaemon(True) self.inth.start() # Get server welcome message, # request and store CAPABILITY response. try: self.welcome = self._request_push(tag='continuation').get_response('IMAP4 protocol error: %s')[1] if self._get_untagged_response('PREAUTH'): self.state = AUTH if __debug__: self._log(1, 'state => AUTH') elif self._get_untagged_response('OK'): if __debug__: self._log(1, 'state => NONAUTH') else: raise self.error('unrecognised server welcome message: %s' % `self.welcome`) typ, dat = self.capability() if dat == [None]: raise self.error('no CAPABILITY response from server') self.capabilities = tuple(dat[-1].upper().split()) if __debug__: self._log(1, 'CAPABILITY: %r' % (self.capabilities,)) for version in AllowedVersions: if not version in self.capabilities: continue self.PROTOCOL_VERSION = version break else: raise self.error('server not IMAP4 compliant') except: self._close_threads() raise def __getattr__(self, attr): # Allow UPPERCASE variants of IMAP4 command methods. if attr in Commands: return getattr(self, attr.lower()) raise AttributeError("Unknown IMAP4 command: '%s'" % attr) # Overridable methods def open(self, host=None, port=None): """open(host=None, port=None) Setup connection to remote server on "host:port" (default: localhost:standard IMAP4 port). This connection will be used by the routines: read, send, shutdown, socket.""" self.host = self._choose_nonull_or_dflt('', host) self.port = self._choose_nonull_or_dflt(IMAP4_PORT, port) self.sock = self.open_socket() self.read_fd = self.sock.fileno() def open_socket(self): """open_socket() Open socket choosing first address family available.""" msg = (-1, 'could not open socket') for res in socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res try: s = socket.socket(af, socktype, proto) except socket.error, msg: continue try: for i in (0, 1): try: s.connect(sa) break except socket.error, msg: if len(msg.args) < 2 or msg.args[0] != errno.EINTR: raise else: raise socket.error(msg) except socket.error, msg: s.close() continue break else: raise socket.error(msg) return s def ssl_wrap_socket(self): # Allow sending of keep-alive messages - seems to prevent some servers # from closing SSL, leading to deadlocks. self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) try: import ssl if self.ca_certs is not None: cert_reqs = ssl.CERT_REQUIRED else: cert_reqs = ssl.CERT_NONE self.sock = ssl.wrap_socket(self.sock, self.keyfile, self.certfile, ca_certs=self.ca_certs, cert_reqs=cert_reqs) ssl_exc = ssl.SSLError self.read_fd = self.sock.fileno() except ImportError: # No ssl module, and socket.ssl has no fileno(), and does not allow certificate verification raise socket.sslerror("imaplib2 SSL mode does not work without ssl module") if self.cert_verify_cb is not None: cert_err = self.cert_verify_cb(self.sock.getpeercert(), self.host) if cert_err: raise ssl_exc(cert_err) def start_compressing(self): """start_compressing() Enable deflate compression on the socket (RFC 4978).""" # rfc 1951 - pure DEFLATE, so use -15 for both windows self.decompressor = zlib.decompressobj(-15) self.compressor = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -15) def read(self, size): """data = read(size) Read at most 'size' bytes from remote.""" if self.decompressor is None: return self.sock.recv(size) if self.decompressor.unconsumed_tail: data = self.decompressor.unconsumed_tail else: data = self.sock.recv(READ_SIZE) return self.decompressor.decompress(data, size) def send(self, data): """send(data) Send 'data' to remote.""" if self.compressor is not None: data = self.compressor.compress(data) data += self.compressor.flush(zlib.Z_SYNC_FLUSH) self.sock.sendall(data) def shutdown(self): """shutdown() Close I/O established in "open".""" self.sock.close() def socket(self): """socket = socket() Return socket instance used to connect to IMAP4 server.""" return self.sock # Utility methods def enable_compression(self): """enable_compression() Ask the server to start compressing the connection. Should be called from user of this class after instantiation, as in: if 'COMPRESS=DEFLATE' in imapobj.capabilities: imapobj.enable_compression()""" try: typ, dat = self._simple_command('COMPRESS', 'DEFLATE') if typ == 'OK': self.start_compressing() if __debug__: self._log(1, 'Enabled COMPRESS=DEFLATE') finally: self._release_state_change() def pop_untagged_responses(self): """ for typ,data in pop_untagged_responses(): pass Generator for any remaining untagged responses. Returns and removes untagged responses in order of reception. Use at your own risk!""" while self.untagged_responses: self.commands_lock.acquire() try: yield self.untagged_responses.pop(0) finally: self.commands_lock.release() def recent(self, **kw): """(typ, [data]) = recent() Return 'RECENT' responses if any exist, else prompt server for an update using the 'NOOP' command. 'data' is None if no new messages, else list of RECENT responses, most recent last.""" name = 'RECENT' typ, dat = self._untagged_response(None, [None], name) if dat != [None]: return self._deliver_dat(typ, dat, kw) kw['untagged_response'] = name return self.noop(**kw) # Prod server for response def response(self, code, **kw): """(code, [data]) = response(code) Return data for response 'code' if received, or None. Old value for response 'code' is cleared.""" typ, dat = self._untagged_response(code, [None], code.upper()) return self._deliver_dat(typ, dat, kw) # IMAP4 commands def append(self, mailbox, flags, date_time, message, **kw): """(typ, [data]) = append(mailbox, flags, date_time, message) Append message to named mailbox. All args except `message' can be None.""" name = 'APPEND' if not mailbox: mailbox = 'INBOX' if flags: if (flags[0],flags[-1]) != ('(',')'): flags = '(%s)' % flags else: flags = None if date_time: date_time = Time2Internaldate(date_time) else: date_time = None self.literal = self.mapCRLF_cre.sub(CRLF, message) try: return self._simple_command(name, mailbox, flags, date_time, **kw) finally: self._release_state_change() def authenticate(self, mechanism, authobject, **kw): """(typ, [data]) = authenticate(mechanism, authobject) Authenticate command - requires response processing. 'mechanism' specifies which authentication mechanism is to be used - it must appear in .capabilities in the form AUTH=. 'authobject' must be a callable object: data = authobject(response) It will be called to process server continuation responses. It should return data that will be encoded and sent to server. It should return None if the client abort response '*' should be sent instead.""" self.literal = _Authenticator(authobject).process try: typ, dat = self._simple_command('AUTHENTICATE', mechanism.upper()) if typ != 'OK': self._deliver_exc(self.error, dat[-1], kw) self.state = AUTH if __debug__: self._log(1, 'state => AUTH') finally: self._release_state_change() return self._deliver_dat(typ, dat, kw) def capability(self, **kw): """(typ, [data]) = capability() Fetch capabilities list from server.""" name = 'CAPABILITY' kw['untagged_response'] = name return self._simple_command(name, **kw) def check(self, **kw): """(typ, [data]) = check() Checkpoint mailbox on server.""" return self._simple_command('CHECK', **kw) def close(self, **kw): """(typ, [data]) = close() Close currently selected mailbox. Deleted messages are removed from writable mailbox. This is the recommended command before 'LOGOUT'.""" if self.state != 'SELECTED': raise self.error('No mailbox selected.') try: typ, dat = self._simple_command('CLOSE') finally: self.state = AUTH if __debug__: self._log(1, 'state => AUTH') self._release_state_change() return self._deliver_dat(typ, dat, kw) def copy(self, message_set, new_mailbox, **kw): """(typ, [data]) = copy(message_set, new_mailbox) Copy 'message_set' messages onto end of 'new_mailbox'.""" return self._simple_command('COPY', message_set, new_mailbox, **kw) def create(self, mailbox, **kw): """(typ, [data]) = create(mailbox) Create new mailbox.""" return self._simple_command('CREATE', mailbox, **kw) def delete(self, mailbox, **kw): """(typ, [data]) = delete(mailbox) Delete old mailbox.""" return self._simple_command('DELETE', mailbox, **kw) def deleteacl(self, mailbox, who, **kw): """(typ, [data]) = deleteacl(mailbox, who) Delete the ACLs (remove any rights) set for who on mailbox.""" return self._simple_command('DELETEACL', mailbox, who, **kw) def examine(self, mailbox='INBOX', **kw): """(typ, [data]) = examine(mailbox='INBOX') Select a mailbox for READ-ONLY access. (Flushes all untagged responses.) 'data' is count of messages in mailbox ('EXISTS' response). Mandated responses are ('FLAGS', 'EXISTS', 'RECENT', 'UIDVALIDITY'), so other responses should be obtained via "response('FLAGS')" etc.""" return self.select(mailbox=mailbox, readonly=True, **kw) def expunge(self, **kw): """(typ, [data]) = expunge() Permanently remove deleted items from selected mailbox. Generates 'EXPUNGE' response for each deleted message. 'data' is list of 'EXPUNGE'd message numbers in order received.""" name = 'EXPUNGE' kw['untagged_response'] = name return self._simple_command(name, **kw) def fetch(self, message_set, message_parts, **kw): """(typ, [data, ...]) = fetch(message_set, message_parts) Fetch (parts of) messages. 'message_parts' should be a string of selected parts enclosed in parentheses, eg: "(UID BODY[TEXT])". 'data' are tuples of message part envelope and data, followed by a string containing the trailer.""" name = 'FETCH' kw['untagged_response'] = name return self._simple_command(name, message_set, message_parts, **kw) def getacl(self, mailbox, **kw): """(typ, [data]) = getacl(mailbox) Get the ACLs for a mailbox.""" kw['untagged_response'] = 'ACL' return self._simple_command('GETACL', mailbox, **kw) def getannotation(self, mailbox, entry, attribute, **kw): """(typ, [data]) = getannotation(mailbox, entry, attribute) Retrieve ANNOTATIONs.""" kw['untagged_response'] = 'ANNOTATION' return self._simple_command('GETANNOTATION', mailbox, entry, attribute, **kw) def getquota(self, root, **kw): """(typ, [data]) = getquota(root) Get the quota root's resource usage and limits. (Part of the IMAP4 QUOTA extension defined in rfc2087.)""" kw['untagged_response'] = 'QUOTA' return self._simple_command('GETQUOTA', root, **kw) def getquotaroot(self, mailbox, **kw): # Hmmm, this is non-std! Left for backwards-compatibility, sigh. # NB: usage should have been defined as: # (typ, [QUOTAROOT responses...]) = getquotaroot(mailbox) # (typ, [QUOTA responses...]) = response('QUOTA') """(typ, [[QUOTAROOT responses...], [QUOTA responses...]]) = getquotaroot(mailbox) Get the list of quota roots for the named mailbox.""" typ, dat = self._simple_command('GETQUOTAROOT', mailbox) typ, quota = self._untagged_response(typ, dat, 'QUOTA') typ, quotaroot = self._untagged_response(typ, dat, 'QUOTAROOT') return self._deliver_dat(typ, [quotaroot, quota], kw) def id(self, *kv_pairs, **kw): """(typ, [data]) = .id(kv_pairs) 'kv_pairs' is a possibly empty list of keys and values. 'data' is a list of ID key value pairs or NIL. NB: a single argument is assumed to be correctly formatted and is passed through unchanged (for backward compatibility with earlier version). Exchange information for problem analysis and determination. The ID extension is defined in RFC 2971. """ name = 'ID' kw['untagged_response'] = name if not kv_pairs: data = 'NIL' elif len(kv_pairs) == 1: data = kv_pairs[0] # Assume invoker passing correctly formatted string (back-compat) else: data = '(%s)' % ' '.join([(arg and self._quote(arg) or 'NIL') for arg in kv_pairs]) return self._simple_command(name, (data,), **kw) def idle(self, timeout=None, **kw): """"(typ, [data]) = idle(timeout=None) Put server into IDLE mode until server notifies some change, or 'timeout' (secs) occurs (default: 29 minutes), or another IMAP4 command is scheduled.""" name = 'IDLE' self.literal = _IdleCont(self, timeout).process try: return self._simple_command(name, **kw) finally: self._release_state_change() def list(self, directory='""', pattern='*', **kw): """(typ, [data]) = list(directory='""', pattern='*') List mailbox names in directory matching pattern. 'data' is list of LIST responses. NB: for 'pattern': % matches all except separator ( so LIST "" "%" returns names at root) * matches all (so LIST "" "*" returns whole directory tree from root)""" name = 'LIST' kw['untagged_response'] = name return self._simple_command(name, directory, pattern, **kw) def login(self, user, password, **kw): """(typ, [data]) = login(user, password) Identify client using plaintext password. NB: 'password' will be quoted.""" try: typ, dat = self._simple_command('LOGIN', user, self._quote(password)) if typ != 'OK': self._deliver_exc(self.error, dat[-1], kw) self.state = AUTH if __debug__: self._log(1, 'state => AUTH') finally: self._release_state_change() return self._deliver_dat(typ, dat, kw) def login_cram_md5(self, user, password, **kw): """(typ, [data]) = login_cram_md5(user, password) Force use of CRAM-MD5 authentication.""" self.user, self.password = user, password return self.authenticate('CRAM-MD5', self._CRAM_MD5_AUTH, **kw) def _CRAM_MD5_AUTH(self, challenge): """Authobject to use with CRAM-MD5 authentication.""" import hmac return self.user + " " + hmac.HMAC(self.password, challenge).hexdigest() def logout(self, **kw): """(typ, [data]) = logout() Shutdown connection to server. Returns server 'BYE' response. NB: You must call this to shut down threads before discarding an instance.""" self.state = LOGOUT if __debug__: self._log(1, 'state => LOGOUT') try: try: typ, dat = self._simple_command('LOGOUT') except: typ, dat = 'NO', ['%s: %s' % sys.exc_info()[:2]] if __debug__: self._log(1, dat) self._close_threads() finally: self._release_state_change() if __debug__: self._log(1, 'connection closed') bye = self._get_untagged_response('BYE', leave=True) if bye: typ, dat = 'BYE', bye return self._deliver_dat(typ, dat, kw) def lsub(self, directory='""', pattern='*', **kw): """(typ, [data, ...]) = lsub(directory='""', pattern='*') List 'subscribed' mailbox names in directory matching pattern. 'data' are tuples of message part envelope and data.""" name = 'LSUB' kw['untagged_response'] = name return self._simple_command(name, directory, pattern, **kw) def myrights(self, mailbox, **kw): """(typ, [data]) = myrights(mailbox) Show my ACLs for a mailbox (i.e. the rights that I have on mailbox).""" name = 'MYRIGHTS' kw['untagged_response'] = name return self._simple_command(name, mailbox, **kw) def namespace(self, **kw): """(typ, [data, ...]) = namespace() Returns IMAP namespaces ala rfc2342.""" name = 'NAMESPACE' kw['untagged_response'] = name return self._simple_command(name, **kw) def noop(self, **kw): """(typ, [data]) = noop() Send NOOP command.""" if __debug__: self._dump_ur(3) return self._simple_command('NOOP', **kw) def partial(self, message_num, message_part, start, length, **kw): """(typ, [data, ...]) = partial(message_num, message_part, start, length) Fetch truncated part of a message. 'data' is tuple of message part envelope and data. NB: obsolete.""" name = 'PARTIAL' kw['untagged_response'] = 'FETCH' return self._simple_command(name, message_num, message_part, start, length, **kw) def proxyauth(self, user, **kw): """(typ, [data]) = proxyauth(user) Assume authentication as 'user'. (Allows an authorised administrator to proxy into any user's mailbox.)""" try: return self._simple_command('PROXYAUTH', user, **kw) finally: self._release_state_change() def rename(self, oldmailbox, newmailbox, **kw): """(typ, [data]) = rename(oldmailbox, newmailbox) Rename old mailbox name to new.""" return self._simple_command('RENAME', oldmailbox, newmailbox, **kw) def search(self, charset, *criteria, **kw): """(typ, [data]) = search(charset, criterion, ...) Search mailbox for matching messages. 'data' is space separated list of matching message numbers.""" name = 'SEARCH' kw['untagged_response'] = name if charset: return self._simple_command(name, 'CHARSET', charset, *criteria, **kw) return self._simple_command(name, *criteria, **kw) def select(self, mailbox='INBOX', readonly=False, **kw): """(typ, [data]) = select(mailbox='INBOX', readonly=False) Select a mailbox. (Restores any previous untagged responses.) 'data' is count of messages in mailbox ('EXISTS' response). Mandated responses are ('FLAGS', 'EXISTS', 'RECENT', 'UIDVALIDITY'), so other responses should be obtained via "response('FLAGS')" etc.""" self.commands_lock.acquire() # Save state of old mailbox, restore state for new... self.mailboxes[self.mailbox] = self.untagged_responses self.untagged_responses = self.mailboxes.setdefault(mailbox, []) self.commands_lock.release() self.mailbox = mailbox self.is_readonly = readonly and True or False if readonly: name = 'EXAMINE' else: name = 'SELECT' try: rqb = self._command(name, mailbox) typ, dat = rqb.get_response('command: %s => %%s' % rqb.name) if typ != 'OK': if self.state == SELECTED: self.state = AUTH if __debug__: self._log(1, 'state => AUTH') if typ == 'BAD': self._deliver_exc(self.error, '%s command error: %s %s. Data: %.100s' % (name, typ, dat, mailbox), kw) return self._deliver_dat(typ, dat, kw) self.state = SELECTED if __debug__: self._log(1, 'state => SELECTED') finally: self._release_state_change() if self._get_untagged_response('READ-ONLY', leave=True) and not readonly: if __debug__: self._dump_ur(1) self._deliver_exc(self.readonly, '%s is not writable' % mailbox, kw) typ, dat = self._untagged_response(typ, [None], 'EXISTS') return self._deliver_dat(typ, dat, kw) def setacl(self, mailbox, who, what, **kw): """(typ, [data]) = setacl(mailbox, who, what) Set a mailbox acl.""" try: return self._simple_command('SETACL', mailbox, who, what, **kw) finally: self._release_state_change() def setannotation(self, *args, **kw): """(typ, [data]) = setannotation(mailbox[, entry, attribute]+) Set ANNOTATIONs.""" kw['untagged_response'] = 'ANNOTATION' return self._simple_command('SETANNOTATION', *args, **kw) def setquota(self, root, limits, **kw): """(typ, [data]) = setquota(root, limits) Set the quota root's resource limits.""" kw['untagged_response'] = 'QUOTA' try: return self._simple_command('SETQUOTA', root, limits, **kw) finally: self._release_state_change() def sort(self, sort_criteria, charset, *search_criteria, **kw): """(typ, [data]) = sort(sort_criteria, charset, search_criteria, ...) IMAP4rev1 extension SORT command.""" name = 'SORT' if (sort_criteria[0],sort_criteria[-1]) != ('(',')'): sort_criteria = '(%s)' % sort_criteria kw['untagged_response'] = name return self._simple_command(name, sort_criteria, charset, *search_criteria, **kw) def starttls(self, keyfile=None, certfile=None, ca_certs=None, cert_verify_cb=None, **kw): """(typ, [data]) = starttls(keyfile=None, certfile=None, ca_certs=None, cert_verify_cb=None) Start TLS negotiation as per RFC 2595.""" name = 'STARTTLS' if name not in self.capabilities: raise self.abort('TLS not supported by server') if hasattr(self, '_tls_established') and self._tls_established: raise self.abort('TLS session already established') # Must now shutdown reader thread after next response, and restart after changing read_fd self.read_size = 1 # Don't consume TLS handshake self.TerminateReader = True try: typ, dat = self._simple_command(name) finally: self._release_state_change() self.rdth.join() self.TerminateReader = False self.read_size = READ_SIZE if typ != 'OK': # Restart reader thread and error self.rdth = threading.Thread(target=self._reader) self.rdth.setDaemon(True) self.rdth.start() raise self.error("Couldn't establish TLS session: %s" % dat) self.keyfile = keyfile self.certfile = certfile self.ca_certs = ca_certs self.cert_verify_cb = cert_verify_cb try: self.ssl_wrap_socket() finally: # Restart reader thread self.rdth = threading.Thread(target=self._reader) self.rdth.setDaemon(True) self.rdth.start() typ, dat = self.capability() if dat == [None]: raise self.error('no CAPABILITY response from server') self.capabilities = tuple(dat[-1].upper().split()) self._tls_established = True typ, dat = self._untagged_response(typ, dat, name) return self._deliver_dat(typ, dat, kw) def status(self, mailbox, names, **kw): """(typ, [data]) = status(mailbox, names) Request named status conditions for mailbox.""" name = 'STATUS' kw['untagged_response'] = name return self._simple_command(name, mailbox, names, **kw) def store(self, message_set, command, flags, **kw): """(typ, [data]) = store(message_set, command, flags) Alters flag dispositions for messages in mailbox.""" if (flags[0],flags[-1]) != ('(',')'): flags = '(%s)' % flags # Avoid quoting the flags kw['untagged_response'] = 'FETCH' return self._simple_command('STORE', message_set, command, flags, **kw) def subscribe(self, mailbox, **kw): """(typ, [data]) = subscribe(mailbox) Subscribe to new mailbox.""" try: return self._simple_command('SUBSCRIBE', mailbox, **kw) finally: self._release_state_change() def thread(self, threading_algorithm, charset, *search_criteria, **kw): """(type, [data]) = thread(threading_alogrithm, charset, search_criteria, ...) IMAPrev1 extension THREAD command.""" name = 'THREAD' kw['untagged_response'] = name return self._simple_command(name, threading_algorithm, charset, *search_criteria, **kw) def uid(self, command, *args, **kw): """(typ, [data]) = uid(command, arg, ...) Execute "command arg ..." with messages identified by UID, rather than message number. Assumes 'command' is legal in current state. Returns response appropriate to 'command'.""" command = command.upper() if command in UID_direct: resp = command else: resp = 'FETCH' kw['untagged_response'] = resp return self._simple_command('UID', command, *args, **kw) def unsubscribe(self, mailbox, **kw): """(typ, [data]) = unsubscribe(mailbox) Unsubscribe from old mailbox.""" try: return self._simple_command('UNSUBSCRIBE', mailbox, **kw) finally: self._release_state_change() def xatom(self, name, *args, **kw): """(typ, [data]) = xatom(name, arg, ...) Allow simple extension commands notified by server in CAPABILITY response. Assumes extension command 'name' is legal in current state. Returns response appropriate to extension command 'name'.""" name = name.upper() if not name in Commands: Commands[name] = ((self.state,), False) try: return self._simple_command(name, *args, **kw) finally: self._release_state_change() # Internal methods def _append_untagged(self, typ, dat): # Append new 'dat' to end of last untagged response if same 'typ', # else append new response. if dat is None: dat = '' self.commands_lock.acquire() if self.untagged_responses: urn, urd = self.untagged_responses[-1] if urn != typ: urd = None else: urd = None if urd is None: urd = [] self.untagged_responses.append([typ, urd]) urd.append(dat) self.commands_lock.release() if __debug__: self._log(5, 'untagged_responses[%s] %s += ["%s"]' % (typ, len(urd)-1, dat)) def _check_bye(self): bye = self._get_untagged_response('BYE', leave=True) if bye: raise self.abort(bye[-1]) def _checkquote(self, arg): # Must quote command args if "atom-specials" present, # and not already quoted. NB: single quotes are removed. if not isinstance(arg, basestring): return arg if len(arg) >= 2 and (arg[0],arg[-1]) in (('(',')'),('"','"')): return arg if len(arg) >= 2 and (arg[0],arg[-1]) in (("'","'"),): return arg[1:-1] if arg and self.mustquote_cre.search(arg) is None: return arg return self._quote(arg) def _choose_nonull_or_dflt(self, dflt, *args): if isinstance(dflt, basestring): dflttyp = basestring # Allow any string type else: dflttyp = type(dflt) for arg in args: if arg is not None: if isinstance(arg, dflttyp): return arg if __debug__: self._log(0, 'bad arg is %s, expecting %s' % (type(arg), dflttyp)) return dflt def _command(self, name, *args, **kw): if Commands[name][CMD_VAL_ASYNC]: cmdtyp = 'async' else: cmdtyp = 'sync' if __debug__: self._log(1, '[%s] %s %s' % (cmdtyp, name, args)) if __debug__: self._log(3, 'state_change_pending.acquire') self.state_change_pending.acquire() self._end_idle() if cmdtyp == 'async': self.state_change_pending.release() if __debug__: self._log(3, 'state_change_pending.release') else: # Need to wait for all async commands to complete self._check_bye() self.commands_lock.acquire() if self.tagged_commands: self.state_change_free.clear() need_event = True else: need_event = False self.commands_lock.release() if need_event: if __debug__: self._log(3, 'sync command %s waiting for empty commands Q' % name) self.state_change_free.wait() if __debug__: self._log(3, 'sync command %s proceeding' % name) if self.state not in Commands[name][CMD_VAL_STATES]: self.literal = None raise self.error('command %s illegal in state %s' % (name, self.state)) self._check_bye() for typ in ('OK', 'NO', 'BAD'): self._get_untagged_response(typ) if self._get_untagged_response('READ-ONLY', leave=True) and not self.is_readonly: self.literal = None raise self.readonly('mailbox status changed to READ-ONLY') if self.Terminate: raise self.abort('connection closed') rqb = self._request_push(name=name, **kw) data = '%s %s' % (rqb.tag, name) for arg in args: if arg is None: continue data = '%s %s' % (data, self._checkquote(arg)) literal = self.literal if literal is not None: self.literal = None if isinstance(literal, basestring): literator = None data = '%s {%s}' % (data, len(literal)) else: literator = literal if __debug__: self._log(4, 'data=%s' % data) rqb.data = '%s%s' % (data, CRLF) if literal is None: self.ouq.put(rqb) return rqb # Must setup continuation expectancy *before* ouq.put crqb = self._request_push(tag='continuation') self.ouq.put(rqb) while True: # Wait for continuation response ok, data = crqb.get_response('command: %s => %%s' % name) if __debug__: self._log(4, 'continuation => %s, %s' % (ok, data)) # NO/BAD response? if not ok: break # Send literal if literator is not None: literal = literator(data, rqb) if literal is None: break if literator is not None: # Need new request for next continuation response crqb = self._request_push(tag='continuation') if __debug__: self._log(4, 'write literal size %s' % len(literal)) crqb.data = '%s%s' % (literal, CRLF) self.ouq.put(crqb) if literator is None: break return rqb def _command_complete(self, rqb, kw): # Called for non-callback commands typ, dat = rqb.get_response('command: %s => %%s' % rqb.name) self._check_bye() if typ == 'BAD': if __debug__: self._print_log() raise self.error('%s command error: %s %s. Data: %.100s' % (rqb.name, typ, dat, rqb.data)) if 'untagged_response' in kw: return self._untagged_response(typ, dat, kw['untagged_response']) return typ, dat def _command_completer(self, (response, cb_arg, error)): # Called for callback commands rqb, kw = cb_arg rqb.callback = kw['callback'] rqb.callback_arg = kw.get('cb_arg') if error is not None: if __debug__: self._print_log() typ, val = error rqb.abort(typ, val) return bye = self._get_untagged_response('BYE', leave=True) if bye: rqb.abort(self.abort, bye[-1]) return typ, dat = response if typ == 'BAD': if __debug__: self._print_log() rqb.abort(self.error, '%s command error: %s %s. Data: %.100s' % (rqb.name, typ, dat, rqb.data)) return if 'untagged_response' in kw: response = self._untagged_response(typ, dat, kw['untagged_response']) rqb.deliver(response) def _deliver_dat(self, typ, dat, kw): if 'callback' in kw: kw['callback'](((typ, dat), kw.get('cb_arg'), None)) return typ, dat def _deliver_exc(self, exc, dat, kw): if 'callback' in kw: kw['callback']((None, kw.get('cb_arg'), (exc, dat))) raise exc(dat) def _end_idle(self): self.idle_lock.acquire() irqb = self.idle_rqb if irqb is None: self.idle_lock.release() return self.idle_rqb = None self.idle_timeout = None self.idle_lock.release() irqb.data = 'DONE%s' % CRLF self.ouq.put(irqb) if __debug__: self._log(2, 'server IDLE finished') def _get_untagged_response(self, name, leave=False): self.commands_lock.acquire() for i, (typ, dat) in enumerate(self.untagged_responses): if typ == name: if not leave: del self.untagged_responses[i] self.commands_lock.release() if __debug__: self._log(5, '_get_untagged_response(%s) => %s' % (name, dat)) return dat self.commands_lock.release() return None def _match(self, cre, s): # Run compiled regular expression 'cre' match method on 's'. # Save result, return success. self.mo = cre.match(s) return self.mo is not None def _put_response(self, resp): if self._expecting_data > 0: rlen = len(resp) dlen = min(self._expecting_data, rlen) self._expecting_data -= dlen if rlen <= dlen: self._accumulated_data.append(resp) return self._accumulated_data.append(resp[:dlen]) resp = resp[dlen:] if self._accumulated_data: typ, dat = self._literal_expected self._append_untagged(typ, (dat, ''.join(self._accumulated_data))) self._accumulated_data = [] # Protocol mandates all lines terminated by CRLF resp = resp[:-2] if 'continuation' in self.tagged_commands: continuation_expected = True else: continuation_expected = False if self._literal_expected is not None: dat = resp if self._match(self.literal_cre, dat): self._literal_expected[1] = dat self._expecting_data = int(self.mo.group('size')) if __debug__: self._log(4, 'expecting literal size %s' % self._expecting_data) return typ = self._literal_expected[0] self._literal_expected = None self._append_untagged(typ, dat) # Tail if __debug__: self._log(4, 'literal completed') else: # Command completion response? if self._match(self.tagre, resp): tag = self.mo.group('tag') typ = self.mo.group('type') dat = self.mo.group('data') if not tag in self.tagged_commands: if __debug__: self._log(1, 'unexpected tagged response: %s' % resp) else: self._request_pop(tag, (typ, [dat])) else: dat2 = None # '*' (untagged) responses? if not self._match(self.untagged_response_cre, resp): if self._match(self.untagged_status_cre, resp): dat2 = self.mo.group('data2') if self.mo is None: # Only other possibility is '+' (continuation) response... if self._match(self.continuation_cre, resp): if not continuation_expected: if __debug__: self._log(1, "unexpected continuation response: '%s'" % resp) return self._request_pop('continuation', (True, self.mo.group('data'))) return if __debug__: self._log(1, "unexpected response: '%s'" % resp) return typ = self.mo.group('type') dat = self.mo.group('data') if dat is None: dat = '' # Null untagged response if dat2: dat = dat + ' ' + dat2 # Is there a literal to come? if self._match(self.literal_cre, dat): self._expecting_data = int(self.mo.group('size')) if __debug__: self._log(4, 'read literal size %s' % self._expecting_data) self._literal_expected = [typ, dat] return self._append_untagged(typ, dat) if typ != 'OK': # NO, BYE, IDLE self._end_idle() # Bracketed response information? if typ in ('OK', 'NO', 'BAD') and self._match(self.response_code_cre, dat): self._append_untagged(self.mo.group('type'), self.mo.group('data')) # Command waiting for aborted continuation response? if continuation_expected: self._request_pop('continuation', (False, resp)) # Bad news? if typ in ('NO', 'BAD', 'BYE'): if typ == 'BYE': self.Terminate = True if __debug__: self._log(1, '%s response: %s' % (typ, dat)) def _quote(self, arg): return '"%s"' % arg.replace('\\', '\\\\').replace('"', '\\"') def _release_state_change(self): if self.state_change_pending.locked(): self.state_change_pending.release() if __debug__: self._log(3, 'state_change_pending.release') def _request_pop(self, name, data): self.commands_lock.acquire() rqb = self.tagged_commands.pop(name) if not self.tagged_commands: if __debug__: self._log(3, 'state_change_free.set') self.state_change_free.set() self.commands_lock.release() if __debug__: self._log(4, '_request_pop(%s, %s) = %s' % (name, data, rqb.tag)) rqb.deliver(data) def _request_push(self, tag=None, name=None, **kw): self.commands_lock.acquire() rqb = Request(self, name=name, **kw) if tag is None: tag = rqb.tag self.tagged_commands[tag] = rqb self.commands_lock.release() if __debug__: self._log(4, '_request_push(%s, %s, %s) = %s' % (tag, name, `kw`, rqb.tag)) return rqb def _simple_command(self, name, *args, **kw): if 'callback' in kw: # Note: old calling sequence for back-compat with python <2.6 self._command(name, callback=self._command_completer, cb_arg=kw, cb_self=True, *args) return (None, None) return self._command_complete(self._command(name, *args), kw) def _untagged_response(self, typ, dat, name): if typ == 'NO': return typ, dat data = self._get_untagged_response(name) if not data: return typ, [None] while True: dat = self._get_untagged_response(name) if not dat: break data += dat if __debug__: self._log(4, '_untagged_response(%s, ?, %s) => %s' % (typ, name, data)) return typ, data # Threads def _close_threads(self): if __debug__: self._log(1, '_close_threads') self.ouq.put(None) self.wrth.join() if __debug__: self._log(1, 'call shutdown') self.shutdown() self.rdth.join() self.inth.join() def _handler(self): resp_timeout = self.resp_timeout threading.currentThread().setName(self.identifier + 'handler') time.sleep(0.1) # Don't start handling before main thread ready if __debug__: self._log(1, 'starting') typ, val = self.abort, 'connection terminated' while not self.Terminate: try: if self.idle_timeout is not None: timeout = self.idle_timeout - time.time() if timeout <= 0: timeout = 1 if __debug__: if self.idle_rqb is not None: self._log(5, 'server IDLING, timeout=%.2f' % timeout) else: timeout = resp_timeout line = self.inq.get(True, timeout) except Queue.Empty: if self.idle_rqb is None: if resp_timeout is not None and self.tagged_commands: if __debug__: self._log(1, 'response timeout') typ, val = self.abort, 'no response after %s secs' % resp_timeout break continue if self.idle_timeout > time.time(): continue if __debug__: self._log(2, 'server IDLE timedout') line = IDLE_TIMEOUT_RESPONSE if line is None: if __debug__: self._log(1, 'inq None - terminating') break if not isinstance(line, basestring): typ, val = line break try: self._put_response(line) except: typ, val = self.error, 'program error: %s - %s' % sys.exc_info()[:2] break self.Terminate = True if __debug__: self._log(1, 'terminating: %s' % `val`) while not self.ouq.empty(): try: self.ouq.get_nowait().abort(typ, val) except Queue.Empty: break self.ouq.put(None) self.commands_lock.acquire() for name in self.tagged_commands.keys(): rqb = self.tagged_commands.pop(name) rqb.abort(typ, val) self.state_change_free.set() self.commands_lock.release() if __debug__: self._log(3, 'state_change_free.set') if __debug__: self._log(1, 'finished') if hasattr(select_module, "poll"): def _reader(self): threading.currentThread().setName(self.identifier + 'reader') if __debug__: self._log(1, 'starting using poll') def poll_error(state): PollErrors = { select.POLLERR: 'Error', select.POLLHUP: 'Hang up', select.POLLNVAL: 'Invalid request: descriptor not open', } return ' '.join([PollErrors[s] for s in PollErrors.keys() if (s & state)]) line_part = '' poll = select.poll() poll.register(self.read_fd, select.POLLIN) rxzero = 0 terminate = False read_poll_timeout = self.read_poll_timeout * 1000 # poll() timeout is in millisecs while not (terminate or self.Terminate): if self.state == LOGOUT: timeout = 1 else: timeout = read_poll_timeout try: r = poll.poll(timeout) if __debug__: self._log(5, 'poll => %s' % `r`) if not r: continue # Timeout fd,state = r[0] if state & select.POLLIN: data = self.read(self.read_size) # Drain ssl buffer if present start = 0 dlen = len(data) if __debug__: self._log(5, 'rcvd %s' % dlen) if dlen == 0: rxzero += 1 if rxzero > 5: raise IOError("Too many read 0") time.sleep(0.1) continue # Try again rxzero = 0 while True: stop = data.find('\n', start) if stop < 0: line_part += data[start:] break stop += 1 line_part, start, line = \ '', stop, line_part + data[start:stop] if __debug__: self._log(4, '< %s' % line) self.inq.put(line) if self.TerminateReader: terminate = True if state & ~(select.POLLIN): raise IOError(poll_error(state)) except: reason = 'socket error: %s - %s' % sys.exc_info()[:2] if __debug__: if not self.Terminate: self._print_log() if self.debug: self.debug += 4 # Output all self._log(1, reason) self.inq.put((self.abort, reason)) break poll.unregister(self.read_fd) if __debug__: self._log(1, 'finished') else: # No "poll" - use select() def _reader(self): threading.currentThread().setName(self.identifier + 'reader') if __debug__: self._log(1, 'starting using select') line_part = '' rxzero = 0 terminate = False while not (terminate or self.Terminate): if self.state == LOGOUT: timeout = 1 else: timeout = self.read_poll_timeout try: r,w,e = select.select([self.read_fd], [], [], timeout) if __debug__: self._log(5, 'select => %s, %s, %s' % (r,w,e)) if not r: # Timeout continue data = self.read(self.read_size) # Drain ssl buffer if present start = 0 dlen = len(data) if __debug__: self._log(5, 'rcvd %s' % dlen) if dlen == 0: rxzero += 1 if rxzero > 5: raise IOError("Too many read 0") time.sleep(0.1) continue # Try again rxzero = 0 while True: stop = data.find('\n', start) if stop < 0: line_part += data[start:] break stop += 1 line_part, start, line = \ '', stop, line_part + data[start:stop] if __debug__: self._log(4, '< %s' % line) self.inq.put(line) if self.TerminateReader: terminate = True except: reason = 'socket error: %s - %s' % sys.exc_info()[:2] if __debug__: if not self.Terminate: self._print_log() if self.debug: self.debug += 4 # Output all self._log(1, reason) self.inq.put((self.abort, reason)) break if __debug__: self._log(1, 'finished') def _writer(self): threading.currentThread().setName(self.identifier + 'writer') if __debug__: self._log(1, 'starting') reason = 'Terminated' while not self.Terminate: rqb = self.ouq.get() if rqb is None: break # Outq flushed try: self.send(rqb.data) if __debug__: self._log(4, '> %s' % rqb.data) except: reason = 'socket error: %s - %s' % sys.exc_info()[:2] if __debug__: if not self.Terminate: self._print_log() if self.debug: self.debug += 4 # Output all self._log(1, reason) rqb.abort(self.abort, reason) break self.inq.put((self.abort, reason)) if __debug__: self._log(1, 'finished') # Debugging if __debug__: def _init_debug(self, debug=None, debug_file=None, debug_buf_lvl=None): self.debug = self._choose_nonull_or_dflt(0, debug, Debug) self.debug_file = self._choose_nonull_or_dflt(sys.stderr, debug_file) self.debug_buf_lvl = self._choose_nonull_or_dflt(DFLT_DEBUG_BUF_LVL, debug_buf_lvl) self.debug_lock = threading.Lock() self._cmd_log_len = 20 self._cmd_log_idx = 0 self._cmd_log = {} # Last `_cmd_log_len' interactions if self.debug: self._mesg('imaplib2 version %s' % __version__) self._mesg('imaplib2 debug level %s, buffer level %s' % (self.debug, self.debug_buf_lvl)) def _dump_ur(self, lvl): if lvl > self.debug: return l = self.untagged_responses if not l: return t = '\n\t\t' l = map(lambda x:'%s: "%s"' % (x[0], x[1][0] and '" "'.join(x[1]) or ''), l) self.debug_lock.acquire() self._mesg('untagged responses dump:%s%s' % (t, t.join(l))) self.debug_lock.release() def _log(self, lvl, line): if lvl > self.debug: return if line[-2:] == CRLF: line = line[:-2] + '\\r\\n' tn = threading.currentThread().getName() if lvl <= 1 or self.debug > self.debug_buf_lvl: self.debug_lock.acquire() self._mesg(line, tn) self.debug_lock.release() if lvl != 1: return # Keep log of last `_cmd_log_len' interactions for debugging. self.debug_lock.acquire() self._cmd_log[self._cmd_log_idx] = (line, tn, time.time()) self._cmd_log_idx += 1 if self._cmd_log_idx >= self._cmd_log_len: self._cmd_log_idx = 0 self.debug_lock.release() def _mesg(self, s, tn=None, secs=None): if secs is None: secs = time.time() if tn is None: tn = threading.currentThread().getName() tm = time.strftime('%M:%S', time.localtime(secs)) try: self.debug_file.write(' %s.%02d %s %s\n' % (tm, (secs*100)%100, tn, s)) self.debug_file.flush() finally: pass def _print_log(self): self.debug_lock.acquire() i, n = self._cmd_log_idx, self._cmd_log_len if n: self._mesg('last %d log messages:' % n) while n: try: self._mesg(*self._cmd_log[i]) except: pass i += 1 if i >= self._cmd_log_len: i = 0 n -= 1 self.debug_lock.release() class IMAP4_SSL(IMAP4): """IMAP4 client class over SSL connection Instantiate with: IMAP4_SSL(host=None, port=None, keyfile=None, certfile=None, debug=None, debug_file=None, identifier=None, timeout=None) host - host's name (default: localhost); port - port number (default: standard IMAP4 SSL port); keyfile - PEM formatted file that contains your private key (default: None); certfile - PEM formatted certificate chain file (default: None); ca_certs - PEM formatted certificate chain file used to validate server certificates (default: None); cert_verify_cb - function to verify authenticity of server certificates (default: None); debug - debug level (default: 0 - no debug); debug_file - debug stream (default: sys.stderr); identifier - thread identifier prefix (default: host); timeout - timeout in seconds when expecting a command response. debug_buf_lvl - debug level at which buffering is turned off. For more documentation see the docstring of the parent class IMAP4. """ def __init__(self, host=None, port=None, keyfile=None, certfile=None, ca_certs=None, cert_verify_cb=None, debug=None, debug_file=None, identifier=None, timeout=None, debug_buf_lvl=None): self.keyfile = keyfile self.certfile = certfile self.ca_certs = ca_certs self.cert_verify_cb = cert_verify_cb IMAP4.__init__(self, host, port, debug, debug_file, identifier, timeout, debug_buf_lvl) def open(self, host=None, port=None): """open(host=None, port=None) Setup secure connection to remote server on "host:port" (default: localhost:standard IMAP4 SSL port). This connection will be used by the routines: read, send, shutdown, socket, ssl.""" self.host = self._choose_nonull_or_dflt('', host) self.port = self._choose_nonull_or_dflt(IMAP4_SSL_PORT, port) self.sock = self.open_socket() self.ssl_wrap_socket() def read(self, size): """data = read(size) Read at most 'size' bytes from remote.""" if self.decompressor is None: return self.sock.read(size) if self.decompressor.unconsumed_tail: data = self.decompressor.unconsumed_tail else: data = self.sock.read(READ_SIZE) return self.decompressor.decompress(data, size) def send(self, data): """send(data) Send 'data' to remote.""" if self.compressor is not None: data = self.compressor.compress(data) data += self.compressor.flush(zlib.Z_SYNC_FLUSH) if hasattr(self.sock, "sendall"): self.sock.sendall(data) else: bytes = len(data) while bytes > 0: sent = self.sock.write(data) if sent == bytes: break # avoid copy data = data[sent:] bytes = bytes - sent def ssl(self): """ssl = ssl() Return ssl instance used to communicate with the IMAP4 server.""" return self.sock class IMAP4_stream(IMAP4): """IMAP4 client class over a stream Instantiate with: IMAP4_stream(command, debug=None, debug_file=None, identifier=None, timeout=None, debug_buf_lvl=None) command - string that can be passed to subprocess.Popen(); debug - debug level (default: 0 - no debug); debug_file - debug stream (default: sys.stderr); identifier - thread identifier prefix (default: host); timeout - timeout in seconds when expecting a command response. debug_buf_lvl - debug level at which buffering is turned off. For more documentation see the docstring of the parent class IMAP4. """ def __init__(self, command, debug=None, debug_file=None, identifier=None, timeout=None, debug_buf_lvl=None): self.command = command self.host = command self.port = None self.sock = None self.writefile, self.readfile = None, None self.read_fd = None IMAP4.__init__(self, None, None, debug, debug_file, identifier, timeout, debug_buf_lvl) def open(self, host=None, port=None): """open(host=None, port=None) Setup a stream connection via 'self.command'. This connection will be used by the routines: read, send, shutdown, socket.""" from subprocess import Popen, PIPE self._P = Popen(self.command, shell=True, stdin=PIPE, stdout=PIPE, close_fds=True) self.writefile, self.readfile = self._P.stdin, self._P.stdout self.read_fd = self.readfile.fileno() def read(self, size): """Read 'size' bytes from remote.""" if self.decompressor is None: return os.read(self.read_fd, size) if self.decompressor.unconsumed_tail: data = self.decompressor.unconsumed_tail else: data = os.read(self.read_fd, READ_SIZE) return self.decompressor.decompress(data, size) def send(self, data): """Send data to remote.""" if self.compressor is not None: data = self.compressor.compress(data) data += self.compressor.flush(zlib.Z_SYNC_FLUSH) self.writefile.write(data) self.writefile.flush() def shutdown(self): """Close I/O established in "open".""" self.readfile.close() self.writefile.close() class _Authenticator(object): """Private class to provide en/de-coding for base64 authentication conversation.""" def __init__(self, mechinst): self.mech = mechinst # Callable object to provide/process data def process(self, data, rqb): ret = self.mech(self.decode(data)) if ret is None: return '*' # Abort conversation return self.encode(ret) def encode(self, inp): # # Invoke binascii.b2a_base64 iteratively with # short even length buffers, strip the trailing # line feed from the result and append. "Even" # means a number that factors to both 6 and 8, # so when it gets to the end of the 8-bit input # there's no partial 6-bit output. # oup = '' while inp: if len(inp) > 48: t = inp[:48] inp = inp[48:] else: t = inp inp = '' e = binascii.b2a_base64(t) if e: oup = oup + e[:-1] return oup def decode(self, inp): if not inp: return '' return binascii.a2b_base64(inp) class _IdleCont(object): """When process is called, server is in IDLE state and will send asynchronous changes.""" def __init__(self, parent, timeout): self.parent = parent self.timeout = parent._choose_nonull_or_dflt(IDLE_TIMEOUT, timeout) self.parent.idle_timeout = self.timeout + time.time() def process(self, data, rqb): self.parent.idle_lock.acquire() self.parent.idle_rqb = rqb self.parent.idle_timeout = self.timeout + time.time() self.parent.idle_lock.release() if __debug__: self.parent._log(2, 'server IDLE started, timeout in %.2f secs' % self.timeout) return None MonthNames = [None, 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] Mon2num = dict(zip((x.encode() for x in MonthNames[1:]), range(1, 13))) InternalDate = re.compile(r'.*INTERNALDATE "' r'(?P[ 0123][0-9])-(?P[A-Z][a-z][a-z])-(?P[0-9][0-9][0-9][0-9])' r' (?P[0-9][0-9]):(?P[0-9][0-9]):(?P[0-9][0-9])' r' (?P[-+])(?P[0-9][0-9])(?P[0-9][0-9])' r'"') def Internaldate2Time(resp): """time_tuple = Internaldate2Time(resp) Convert IMAP4 INTERNALDATE to UT.""" mo = InternalDate.match(resp) if not mo: return None mon = Mon2num[mo.group('mon')] zonen = mo.group('zonen') day = int(mo.group('day')) year = int(mo.group('year')) hour = int(mo.group('hour')) min = int(mo.group('min')) sec = int(mo.group('sec')) zoneh = int(mo.group('zoneh')) zonem = int(mo.group('zonem')) # INTERNALDATE timezone must be subtracted to get UT zone = (zoneh*60 + zonem)*60 if zonen == '-': zone = -zone tt = (year, mon, day, hour, min, sec, -1, -1, -1) utc = time.mktime(tt) # Following is necessary because the time module has no 'mkgmtime'. # 'mktime' assumes arg in local timezone, so adds timezone/altzone. lt = time.localtime(utc) if time.daylight and lt[-1]: zone = zone + time.altzone else: zone = zone + time.timezone return time.localtime(utc - zone) Internaldate2tuple = Internaldate2Time # (Backward compatible) def Time2Internaldate(date_time): """'"DD-Mmm-YYYY HH:MM:SS +HHMM"' = Time2Internaldate(date_time) Convert 'date_time' to IMAP4 INTERNALDATE representation.""" if isinstance(date_time, (int, float)): tt = time.localtime(date_time) elif isinstance(date_time, (tuple, time.struct_time)): tt = date_time elif isinstance(date_time, str) and (date_time[0],date_time[-1]) == ('"','"'): return date_time # Assume in correct format else: raise ValueError("date_time not of a known type") if time.daylight and tt[-1]: zone = -time.altzone else: zone = -time.timezone return ('"%2d-%s-%04d %02d:%02d:%02d %+03d%02d"' % ((tt[2], MonthNames[tt[1]], tt[0]) + tt[3:6] + divmod(zone//60, 60))) FLAGS_cre = re.compile(r'.*FLAGS \((?P[^\)]*)\)') def ParseFlags(resp): """('flag', ...) = ParseFlags(line) Convert IMAP4 flags response to python tuple.""" mo = FLAGS_cre.match(resp) if not mo: return () return tuple(mo.group('flags').split()) if __name__ == '__main__': # To test: invoke either as 'python imaplib2.py [IMAP4_server_hostname]', # or as 'python imaplib2.py -s "rsh IMAP4_server_hostname exec /etc/rimapd"' # or as 'python imaplib2.py -l "keyfile[:certfile]" [IMAP4_SSL_server_hostname]' import getopt, getpass try: optlist, args = getopt.getopt(sys.argv[1:], 'd:l:s:p:') except getopt.error, val: optlist, args = (), () debug, debug_buf_lvl, port, stream_command, keyfile, certfile = (None,)*6 for opt,val in optlist: if opt == '-d': debug = int(val) debug_buf_lvl = debug - 1 elif opt == '-l': try: keyfile,certfile = val.split(':') except ValueError: keyfile,certfile = val,val elif opt == '-p': port = int(val) elif opt == '-s': stream_command = val if not args: args = (stream_command,) if not args: args = ('',) if not port: port = (keyfile is not None) and IMAP4_SSL_PORT or IMAP4_PORT host = args[0] USER = getpass.getuser() data = open(os.path.exists("test.data") and "test.data" or __file__).read(1000) test_mesg = 'From: %(user)s@localhost%(lf)sSubject: IMAP4 test%(lf)s%(lf)s%(data)s' \ % {'user':USER, 'lf':'\n', 'data':data} test_seq1 = [ ('list', ('""', '%')), ('create', ('/tmp/imaplib2_test.0',)), ('rename', ('/tmp/imaplib2_test.0', '/tmp/imaplib2_test.1')), ('CREATE', ('/tmp/imaplib2_test.2',)), ('append', ('/tmp/imaplib2_test.2', None, None, test_mesg)), ('list', ('/tmp', 'imaplib2_test*')), ('select', ('/tmp/imaplib2_test.2',)), ('search', (None, 'SUBJECT', 'IMAP4 test')), ('fetch', ("'1:*'", '(FLAGS INTERNALDATE RFC822)')), ('store', ('1', 'FLAGS', '(\Deleted)')), ('namespace', ()), ('expunge', ()), ('recent', ()), ('close', ()), ] test_seq2 = ( ('select', ()), ('response', ('UIDVALIDITY',)), ('response', ('EXISTS',)), ('append', (None, None, None, test_mesg)), ('uid', ('SEARCH', 'SUBJECT', 'IMAP4 test')), ('uid', ('SEARCH', 'ALL')), ('uid', ('THREAD', 'references', 'UTF-8', '(SEEN)')), ('recent', ()), ) AsyncError = None def responder((response, cb_arg, error)): global AsyncError cmd, args = cb_arg if error is not None: AsyncError = error M._log(0, '[cb] ERROR %s %.100s => %s' % (cmd, args, error)) return typ, dat = response M._log(0, '[cb] %s %.100s => %s %.100s' % (cmd, args, typ, dat)) if typ == 'NO': AsyncError = (Exception, dat[0]) def run(cmd, args, cb=True): if AsyncError: M._log(1, 'AsyncError') M.logout() typ, val = AsyncError raise typ(val) if not M.debug: M._log(0, '%s %.100s' % (cmd, args)) try: if cb: typ, dat = getattr(M, cmd)(callback=responder, cb_arg=(cmd, args), *args) M._log(1, '%s %.100s => %s %.100s' % (cmd, args, typ, dat)) else: typ, dat = getattr(M, cmd)(*args) M._log(1, '%s %.100s => %s %.100s' % (cmd, args, typ, dat)) except: M._log(1, '%s - %s' % sys.exc_info()[:2]) M.logout() raise if typ == 'NO': M._log(1, 'NO') M.logout() raise Exception(dat[0]) return dat try: threading.currentThread().setName('main') if keyfile is not None: if not keyfile: keyfile = None if not certfile: certfile = None M = IMAP4_SSL(host=host, port=port, keyfile=keyfile, certfile=certfile, debug=debug, identifier='', timeout=10, debug_buf_lvl=debug_buf_lvl) elif stream_command: M = IMAP4_stream(stream_command, debug=debug, identifier='', timeout=10, debug_buf_lvl=debug_buf_lvl) else: M = IMAP4(host=host, port=port, debug=debug, identifier='', timeout=10, debug_buf_lvl=debug_buf_lvl) if M.state != 'AUTH': # Login needed PASSWD = getpass.getpass("IMAP password for %s on %s: " % (USER, host or "localhost")) test_seq1.insert(0, ('login', (USER, PASSWD))) M._log(0, 'PROTOCOL_VERSION = %s' % M.PROTOCOL_VERSION) if 'COMPRESS=DEFLATE' in M.capabilities: M.enable_compression() for cmd,args in test_seq1: run(cmd, args) for ml in run('list', ('/tmp/', 'imaplib2_test%'), cb=False): mo = re.match(r'.*"([^"]+)"$', ml) if mo: path = mo.group(1) else: path = ml.split()[-1] run('delete', (path,)) if 'ID' in M.capabilities: run('id', ()) run('id', ('("name", "imaplib2")',)) run('id', ("version", __version__, "os", os.uname()[0])) for cmd,args in test_seq2: if (cmd,args) != ('uid', ('SEARCH', 'SUBJECT', 'IMAP4 test')): run(cmd, args) continue dat = run(cmd, args, cb=False) uid = dat[-1].split() if not uid: continue run('uid', ('FETCH', uid[-1], '(FLAGS INTERNALDATE RFC822.SIZE RFC822.HEADER RFC822.TEXT)')) run('uid', ('STORE', uid[-1], 'FLAGS', '(\Deleted)')) run('expunge', ()) if 'IDLE' in M.capabilities: run('idle', (2,), cb=False) run('idle', (99,)) # Asynchronous, to test interruption of 'idle' by 'noop' time.sleep(1) run('noop', (), cb=False) run('append', (None, None, None, test_mesg), cb=False) num = run('search', (None, 'ALL'), cb=False)[0].split()[0] dat = run('fetch', (num, '(FLAGS INTERNALDATE RFC822)'), cb=False) M._mesg('fetch %s => %s' % (num, `dat`)) run('idle', (2,)) run('store', (num, '-FLAGS', '(\Seen)'), cb=False), dat = run('fetch', (num, '(FLAGS INTERNALDATE RFC822)'), cb=False) M._mesg('fetch %s => %s' % (num, `dat`)) run('uid', ('STORE', num, 'FLAGS', '(\Deleted)')) run('expunge', ()) run('logout', (), cb=False) if debug: M._mesg('') M._print_log() M._mesg('') M._mesg('unused untagged responses in order, most recent last:') for typ,dat in M.pop_untagged_responses(): M._mesg('\t%s %s' % (typ, dat)) print 'All tests OK.' except: print 'Tests failed.' if not debug: print ''' If you would like to see debugging output, try: %s -d5 ''' % sys.argv[0] raise spaetz-offlineimap-c9e9690/offlineimap/imaplibutil.py000066400000000000000000000150521176237577200230630ustar00rootroot00000000000000# imaplib utilities # Copyright (C) 2002-2007 John Goerzen # 2012-2012 Sebastian Spaeth # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import os import fcntl import re import socket import ssl import time import subprocess import threading from hashlib import sha1 from offlineimap.ui import getglobalui from offlineimap import OfflineImapError from offlineimap.imaplib2 import IMAP4, IMAP4_SSL, zlib, IMAP4_PORT, InternalDate, Mon2num class UsefulIMAPMixIn(object): def getselectedfolder(self): if self.state == 'SELECTED': return self.mailbox return None def select(self, mailbox='INBOX', readonly=False, force = False): """Selects a mailbox on the IMAP server :returns: 'OK' on success, nothing if the folder was already selected or raises an :exc:`OfflineImapError`""" if self.getselectedfolder() == mailbox and self.is_readonly == readonly \ and not force: # No change; return. return # Wipe out all old responses, to maintain semantics with old imaplib2 del self.untagged_responses[:] try: result = super(UsefulIMAPMixIn, self).select(mailbox, readonly) except self.abort as e: # self.abort is raised when we are supposed to retry errstr = "Server '%s' closed connection, error on SELECT '%s'. Ser"\ "ver said: %s" % (self.host, mailbox, e.args[0]) severity = OfflineImapError.ERROR.FOLDER_RETRY raise OfflineImapError(errstr, severity) if result[0] != 'OK': #in case of error, bail out with OfflineImapError errstr = "Error SELECTing mailbox '%s', server reply:\n%s" %\ (mailbox, result) severity = OfflineImapError.ERROR.FOLDER raise OfflineImapError(errstr, severity) return result def _mesg(self, s, tn=None, secs=None): new_mesg(self, s, tn, secs) class IMAP4_Tunnel(UsefulIMAPMixIn, IMAP4): """IMAP4 client class over a tunnel Instantiate with: IMAP4_Tunnel(tunnelcmd) tunnelcmd -- shell command to generate the tunnel. The result will be in PREAUTH stage.""" def __init__(self, tunnelcmd, **kwargs): IMAP4.__init__(self, tunnelcmd, **kwargs) def open(self, host, port): """The tunnelcmd comes in on host!""" self.host = host self.process = subprocess.Popen(host, shell=True, close_fds=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE) (self.outfd, self.infd) = (self.process.stdin, self.process.stdout) # imaplib2 polls on this fd self.read_fd = self.infd.fileno() self.set_nonblocking(self.read_fd) def set_nonblocking(self, fd): "Mark fd as nonblocking" # get the file's current flag settings fl = fcntl.fcntl(fd, fcntl.F_GETFL) # clear non-blocking mode from flags fl = fl & ~os.O_NONBLOCK fcntl.fcntl(fd, fcntl.F_SETFL, fl) def read(self, size): """data = read(size) Read at most 'size' bytes from remote.""" if self.decompressor is None: return os.read(self.read_fd, size) if self.decompressor.unconsumed_tail: data = self.decompressor.unconsumed_tail else: data = os.read(self.read_fd, 8192) return self.decompressor.decompress(data, size) def send(self, data): if self.compressor is not None: data = self.compressor.compress(data) data += self.compressor.flush(zlib.Z_SYNC_FLUSH) self.outfd.write(data) def shutdown(self): self.infd.close() self.outfd.close() self.process.wait() def new_mesg(self, s, tn=None, secs=None): if secs is None: secs = time.time() if tn is None: tn = threading.currentThread().getName() tm = time.strftime('%M:%S', time.localtime(secs)) getglobalui().debug('imap', ' %s.%02d %s %s' % (tm, (secs*100)%100, tn, s)) class WrappedIMAP4_SSL(UsefulIMAPMixIn, IMAP4_SSL): """Improved version of imaplib.IMAP4_SSL overriding select()""" def __init__(self, *args, **kwargs): self._fingerprint = kwargs.get('fingerprint', None) if 'fingerprint' in kwargs: del kwargs['fingerprint'] super(WrappedIMAP4_SSL, self).__init__(*args, **kwargs) def open(self, host=None, port=None): super(WrappedIMAP4_SSL, self).open(host, port) if (self._fingerprint or not self.ca_certs): # compare fingerprints fingerprint = sha1(self.sock.getpeercert(True)).hexdigest() if fingerprint != self._fingerprint: raise OfflineImapError("Server SSL fingerprint '%s' for hostnam" "e '%s' does not match configured fingerprint. Please ver" "ify and set 'cert_fingerprint' accordingly if not set ye" "t." % (fingerprint, host), OfflineImapError.ERROR.REPO) class WrappedIMAP4(UsefulIMAPMixIn, IMAP4): """Improved version of imaplib.IMAP4 overriding select()""" pass def Internaldate2epoch(resp): """Convert IMAP4 INTERNALDATE to UT. Returns seconds since the epoch. """ mo = InternalDate.match(resp) if not mo: return None mon = Mon2num[mo.group('mon')] zonen = mo.group('zonen') day = int(mo.group('day')) year = int(mo.group('year')) hour = int(mo.group('hour')) min = int(mo.group('min')) sec = int(mo.group('sec')) zoneh = int(mo.group('zoneh')) zonem = int(mo.group('zonem')) # INTERNALDATE timezone must be subtracted to get UT zone = (zoneh*60 + zonem)*60 if zonen == '-': zone = -zone tt = (year, mon, day, hour, min, sec, -1, -1, -1) return time.mktime(tt) spaetz-offlineimap-c9e9690/offlineimap/imapserver.py000066400000000000000000000615771176237577200227420ustar00rootroot00000000000000# IMAP server support # Copyright (C) 2002 - 2011 John Goerzen & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from offlineimap import imaplibutil, imaputil, threadutil, OfflineImapError from offlineimap.ui import getglobalui from threading import Lock, BoundedSemaphore, Thread, Event, currentThread import offlineimap.accounts import hmac import socket import base64 import time import errno from sys import exc_info from socket import gaierror from ssl import SSLError, cert_time_to_seconds try: # do we have a recent pykerberos? have_gss = False import kerberos if 'authGSSClientWrap' in dir(kerberos): have_gss = True except ImportError: pass class IMAPServer: """Initializes all variables from an IMAPRepository() instance Various functions, such as acquireconnection() return an IMAP4 object on which we can operate. Public instance variables are: self.: delim The server's folder delimiter. Only valid after acquireconnection() """ GSS_STATE_STEP = 0 GSS_STATE_WRAP = 1 def __init__(self, repos): self.ui = getglobalui() self.repos = repos self.config = repos.getconfig() self.tunnel = repos.getpreauthtunnel() self.usessl = repos.getssl() self.username = None if self.tunnel else repos.getuser() self.password = None self.passworderror = None self.goodpassword = None self.hostname = None if self.tunnel else repos.gethost() self.port = repos.getport() if self.port == None: self.port = 993 if self.usessl else 143 self.sslclientcert = repos.getsslclientcert() self.sslclientkey = repos.getsslclientkey() self.sslcacertfile = repos.getsslcacertfile() if self.sslcacertfile is None: self.verifycert = None # disable cert verification self.delim = None self.root = None self.maxconnections = repos.getmaxconnections() self.availableconnections = [] self.assignedconnections = [] self.lastowner = {} self.semaphore = BoundedSemaphore(self.maxconnections) self.connectionlock = Lock() self.reference = repos.getreference() self.idlefolders = repos.getidlefolders() self.gss_step = self.GSS_STATE_STEP self.gss_vc = None self.gssapi = False def getpassword(self): """Returns the server password or None""" if self.goodpassword != None: # use cached good one first return self.goodpassword if self.password != None and self.passworderror == None: return self.password # non-failed preconfigured one # get 1) configured password first 2) fall back to asking via UI self.password = self.repos.getpassword() or \ self.ui.getpass(self.repos.getname(), self.config, self.passworderror) self.passworderror = None return self.password def getroot(self): """Returns this server's folder root. Can only be called after one or more calls to acquireconnection.""" return self.root def releaseconnection(self, connection, drop_conn=False): """Releases a connection, returning it to the pool. :param drop_conn: If True, the connection will be released and not be reused. This can be used to indicate broken connections.""" if connection is None: return #noop on bad connection self.connectionlock.acquire() self.assignedconnections.remove(connection) # Don't reuse broken connections if connection.Terminate or drop_conn: connection.logout() else: self.availableconnections.append(connection) self.connectionlock.release() self.semaphore.release() def md5handler(self, response): challenge = response.strip() self.ui.debug('imap', 'md5handler: got challenge %s' % challenge) passwd = self.getpassword() retval = self.username + ' ' + hmac.new(passwd, challenge).hexdigest() self.ui.debug('imap', 'md5handler: returning %s' % retval) return retval def plainauth(self, imapobj): self.ui.debug('imap', 'Attempting plain authentication') imapobj.login(self.username, self.getpassword()) def gssauth(self, response): data = base64.b64encode(response) try: if self.gss_step == self.GSS_STATE_STEP: if not self.gss_vc: rc, self.gss_vc = kerberos.authGSSClientInit('imap@' + self.hostname) response = kerberos.authGSSClientResponse(self.gss_vc) rc = kerberos.authGSSClientStep(self.gss_vc, data) if rc != kerberos.AUTH_GSS_CONTINUE: self.gss_step = self.GSS_STATE_WRAP elif self.gss_step == self.GSS_STATE_WRAP: rc = kerberos.authGSSClientUnwrap(self.gss_vc, data) response = kerberos.authGSSClientResponse(self.gss_vc) rc = kerberos.authGSSClientWrap(self.gss_vc, response, self.username) response = kerberos.authGSSClientResponse(self.gss_vc) except kerberos.GSSError as err: # Kerberos errored out on us, respond with None to cancel the # authentication self.ui.debug('imap', '%s: %s' % (err[0][0], err[1][0])) return None if not response: response = '' return base64.b64decode(response) def acquireconnection(self): """Fetches a connection from the pool, making sure to create a new one if needed, to obey the maximum connection limits, etc. Opens a connection to the server and returns an appropriate object.""" self.semaphore.acquire() self.connectionlock.acquire() curThread = currentThread() imapobj = None if len(self.availableconnections): # One is available. # Try to find one that previously belonged to this thread # as an optimization. Start from the back since that's where # they're popped on. imapobj = None for i in range(len(self.availableconnections) - 1, -1, -1): tryobj = self.availableconnections[i] if self.lastowner[tryobj] == curThread.ident: imapobj = tryobj del(self.availableconnections[i]) break if not imapobj: imapobj = self.availableconnections[0] del(self.availableconnections[0]) self.assignedconnections.append(imapobj) self.lastowner[imapobj] = curThread.ident self.connectionlock.release() return imapobj self.connectionlock.release() # Release until need to modify data """ Must be careful here that if we fail we should bail out gracefully and release locks / threads so that the next attempt can try... """ success = 0 try: while not success: # Generate a new connection. if self.tunnel: self.ui.connecting('tunnel', self.tunnel) imapobj = imaplibutil.IMAP4_Tunnel(self.tunnel, timeout=socket.getdefaulttimeout()) success = 1 elif self.usessl: self.ui.connecting(self.hostname, self.port) fingerprint = self.repos.get_ssl_fingerprint() imapobj = imaplibutil.WrappedIMAP4_SSL(self.hostname, self.port, self.sslclientkey, self.sslclientcert, self.sslcacertfile, self.verifycert, timeout=socket.getdefaulttimeout(), fingerprint=fingerprint ) else: self.ui.connecting(self.hostname, self.port) imapobj = imaplibutil.WrappedIMAP4(self.hostname, self.port, timeout=socket.getdefaulttimeout()) if not self.tunnel: try: # Try GSSAPI and continue if it fails if 'AUTH=GSSAPI' in imapobj.capabilities and have_gss: self.connectionlock.acquire() self.ui.debug('imap', 'Attempting GSSAPI authentication') try: imapobj.authenticate('GSSAPI', self.gssauth) except imapobj.error as val: self.gssapi = False self.ui.debug('imap', 'GSSAPI Authentication failed') else: self.gssapi = True kerberos.authGSSClientClean(self.gss_vc) self.gss_vc = None self.gss_step = self.GSS_STATE_STEP #if we do self.password = None then the next attempt cannot try... #self.password = None self.connectionlock.release() if not self.gssapi: if 'STARTTLS' in imapobj.capabilities and not\ self.usessl: self.ui.debug('imap', 'Using STARTTLS connection') imapobj.starttls() if 'AUTH=CRAM-MD5' in imapobj.capabilities: self.ui.debug('imap', 'Attempting CRAM-MD5 authentication') try: imapobj.authenticate('CRAM-MD5', self.md5handler) except imapobj.error as val: self.plainauth(imapobj) else: # Use plaintext login, unless # LOGINDISABLED (RFC2595) if 'LOGINDISABLED' in imapobj.capabilities: raise OfflineImapError("Plaintext login " "disabled by server. Need to use SSL?", OfflineImapError.ERROR.REPO) self.plainauth(imapobj) # Would bail by here if there was a failure. success = 1 self.goodpassword = self.password except imapobj.error as val: self.passworderror = str(val) raise # update capabilities after login, e.g. gmail serves different ones typ, dat = imapobj.capability() if dat != [None]: imapobj.capabilities = tuple(dat[-1].upper().split()) if self.delim == None: listres = imapobj.list(self.reference, '""')[1] if listres == [None] or listres == None: # Some buggy IMAP servers do not respond well to LIST "" "" # Work around them. listres = imapobj.list(self.reference, '"*"')[1] if listres == [None] or listres == None: # No Folders were returned. This occurs, e.g. if the # 'reference' prefix does not exist on the mail # server. Raise exception. err = "Server '%s' returned no folders in '%s'" % \ (self.repos.getname(), self.reference) self.ui.warn(err) raise Exception(err) self.delim, self.root = \ imaputil.imapsplit(listres[0])[1:] self.delim = imaputil.dequote(self.delim) self.root = imaputil.dequote(self.root) self.connectionlock.acquire() self.assignedconnections.append(imapobj) self.lastowner[imapobj] = curThread.ident self.connectionlock.release() return imapobj except Exception as e: """If we are here then we did not succeed in getting a connection - we should clean up and then re-raise the error...""" self.semaphore.release() if(self.connectionlock.locked()): self.connectionlock.release() severity = OfflineImapError.ERROR.REPO if type(e) == gaierror: #DNS related errors. Abort Repo sync #TODO: special error msg for e.errno == 2 "Name or service not known"? reason = "Could not resolve name '%s' for repository "\ "'%s'. Make sure you have configured the ser"\ "ver name correctly and that you are online."%\ (self.hostname, self.repos) raise OfflineImapError(reason, severity) elif isinstance(e, SSLError) and e.errno == 1: # SSL unknown protocol error # happens e.g. when connecting via SSL to a non-SSL service if self.port != 993: reason = "Could not connect via SSL to host '%s' and non-s"\ "tandard ssl port %d configured. Make sure you connect"\ " to the correct port." % (self.hostname, self.port) else: reason = "Unknown SSL protocol connecting to host '%s' for"\ "repository '%s'. OpenSSL responded:\n%s"\ % (self.hostname, self.repos, e) raise OfflineImapError(reason, severity) elif isinstance(e, socket.error) and e.args[0] == errno.ECONNREFUSED: # "Connection refused", can be a non-existing port, or an unauthorized # webproxy (open WLAN?) reason = "Connection to host '%s:%d' for repository '%s' was "\ "refused. Make sure you have the right host and port "\ "configured and that you are actually able to access the "\ "network." % (self.hostname, self.port, self.reposname) raise OfflineImapError(reason, severity) # Could not acquire connection to the remote; # socket.error(last_error) raised if str(e)[:24] == "can't open socket; error": raise OfflineImapError("Could not connect to remote server '%s' "\ "for repository '%s'. Remote does not answer." % (self.hostname, self.repos), OfflineImapError.ERROR.REPO) else: # re-raise all other errors raise def connectionwait(self): """Waits until there is a connection available. Note that between the time that a connection becomes available and the time it is requested, another thread may have grabbed it. This function is mainly present as a way to avoid spawning thousands of threads to copy messages, then have them all wait for 3 available connections. It's OK if we have maxconnections + 1 or 2 threads, which is what this will help us do.""" self.semaphore.acquire() self.semaphore.release() def close(self): # Make sure I own all the semaphores. Let the threads finish # their stuff. This is a blocking method. with self.connectionlock: # first, wait till all connections had been released. # TODO: won't work IMHO, as releaseconnection() also # requires the connectionlock, leading to a potential # deadlock! Audit & check! threadutil.semaphorereset(self.semaphore, self.maxconnections) for imapobj in self.assignedconnections + self.availableconnections: imapobj.logout() self.assignedconnections = [] self.availableconnections = [] self.lastowner = {} # reset kerberos state self.gss_step = self.GSS_STATE_STEP self.gss_vc = None self.gssapi = False def keepalive(self, timeout, event): """Sends a NOOP to each connection recorded. It will wait a maximum of timeout seconds between doing this, and will continue to do so until the Event object as passed is true. This method is expected to be invoked in a separate thread, which should be join()'d after the event is set.""" self.ui.debug('imap', 'keepalive thread started') while not event.isSet(): self.connectionlock.acquire() numconnections = len(self.assignedconnections) + \ len(self.availableconnections) self.connectionlock.release() threads = [] for i in range(numconnections): self.ui.debug('imap', 'keepalive: processing connection %d of %d' % (i, numconnections)) if len(self.idlefolders) > i: # IDLE thread idler = IdleThread(self, self.idlefolders[i]) else: # NOOP thread idler = IdleThread(self) idler.start() threads.append(idler) self.ui.debug('imap', 'keepalive: waiting for timeout') event.wait(timeout) self.ui.debug('imap', 'keepalive: after wait') for idler in threads: # Make sure all the commands have completed. idler.stop() idler.join() self.ui.debug('imap', 'keepalive: all threads joined') self.ui.debug('imap', 'keepalive: event is set; exiting') return def verifycert(self, cert, hostname): '''Verify that cert (in socket.getpeercert() format) matches hostname. CRLs are not handled. Returns error message if any problems are found and None on success. ''' errstr = "CA Cert verifying failed: " if not cert: return ('%s no certificate received' % errstr) dnsname = hostname.lower() certnames = [] # cert expired? notafter = cert.get('notAfter') if notafter: if time.time() >= cert_time_to_seconds(notafter): return '%s certificate expired %s' % (errstr, notafter) # First read commonName for s in cert.get('subject', []): key, value = s[0] if key == 'commonName': certnames.append(value.lower()) if len(certnames) == 0: return ('%s no commonName found in certificate' % errstr) # Then read subjectAltName for key, value in cert.get('subjectAltName', []): if key == 'DNS': certnames.append(value.lower()) # And finally try to match hostname with one of these names for certname in certnames: if (certname == dnsname or '.' in dnsname and certname == '*.' + dnsname.split('.', 1)[1]): return None return ('%s no matching domain name found in certificate' % errstr) class IdleThread(object): def __init__(self, parent, folder=None): """If invoked without 'folder', perform a NOOP and wait for self.stop() to be called. If invoked with folder, switch to IDLE mode and synchronize once we have a new message""" self.parent = parent self.folder = folder self.stop_sig = Event() self.ui = getglobalui() if folder is None: self.thread = Thread(target=self.noop) else: self.thread = Thread(target=self.idle) self.thread.setDaemon(1) def start(self): self.thread.start() def stop(self): self.stop_sig.set() def join(self): self.thread.join() def noop(self): #TODO: AFAIK this is not optimal, we will send a NOOP on one #random connection (ie not enough to keep all connections #open). In case we do the noop multiple times, we can well use #the same connection every time, as we get a random one. This #function should IMHO send a noop on ALL available connections #to the server. imapobj = self.parent.acquireconnection() try: imapobj.noop() except imapobj.abort: self.ui.warn('Attempting NOOP on dropped connection %s' % \ imapobj.identifier) self.parent.releaseconnection(imapobj, True) imapobj = None finally: if imapobj: self.parent.releaseconnection(imapobj) self.stop_sig.wait() # wait until we are supposed to quit def dosync(self): remoterepos = self.parent.repos account = remoterepos.account localrepos = account.localrepos remoterepos = account.remoterepos statusrepos = account.statusrepos remotefolder = remoterepos.getfolder(self.folder) offlineimap.accounts.syncfolder(account, remotefolder, quick=False) ui = getglobalui() ui.unregisterthread(currentThread()) #syncfolder registered the thread def idle(self): """Invoke IDLE mode until timeout or self.stop() is invoked""" def callback(args): """IDLE callback function invoked by imaplib2 This is invoked when a) The IMAP server tells us something while in IDLE mode, b) we get an Exception (e.g. on dropped connections, or c) the standard imaplib IDLE timeout of 29 minutes kicks in.""" result, cb_arg, exc_data = args if exc_data is None and not self.stop_sig.isSet(): # No Exception, and we are not supposed to stop: self.needsync = True self.stop_sig.set() # continue to sync while not self.stop_sig.isSet(): self.needsync = False success = False # successfully selected FOLDER? while not success: imapobj = self.parent.acquireconnection() try: imapobj.select(self.folder) except OfflineImapError as e: if e.severity == OfflineImapError.ERROR.FOLDER_RETRY: # Connection closed, release connection and retry self.ui.error(e, exc_info()[2]) self.parent.releaseconnection(imapobj, True) else: raise e else: success = True if "IDLE" in imapobj.capabilities: imapobj.idle(callback=callback) else: self.ui.warn("IMAP IDLE not supported on server '%s'." "Sleep until next refresh cycle." % imapobj.identifier) imapobj.noop() self.stop_sig.wait() # self.stop() or IDLE callback are invoked try: # End IDLE mode with noop, imapobj can point to a dropped conn. imapobj.noop() except imapobj.abort: self.ui.warn('Attempting NOOP on dropped connection %s' % \ imapobj.identifier) self.parent.releaseconnection(imapobj, True) else: self.parent.releaseconnection(imapobj) if self.needsync: # here not via self.stop, but because IDLE responded. Do # another round and invoke actual syncing. self.stop_sig.clear() self.dosync() spaetz-offlineimap-c9e9690/offlineimap/imaputil.py000066400000000000000000000204601176237577200223730ustar00rootroot00000000000000# IMAP utility module # Copyright (C) 2002 John Goerzen # # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import re import string from offlineimap.ui import getglobalui # find the first quote in a string quotere = re.compile( r"""(?P"(?:\\"|[^"])*") # Quote, possibly containing encoded # quotation mark \s*(?P.*)$ # Whitespace & remainder of string""", re.VERBOSE) def debug(*args): msg = [] for arg in args: msg.append(str(arg)) getglobalui().debug('imap', " ".join(msg)) def dequote(string): """Takes string which may or may not be quoted and unquotes it. It only considers double quotes. This function does NOT consider parenthised lists to be quoted. """ if string and string.startswith('"') and string.endswith('"'): string = string[1:-1] # Strip off the surrounding quotes. string = string.replace('\\"', '"') string = string.replace('\\\\', '\\') return string def flagsplit(string): """Converts a string of IMAP flags to a list :returns: E.g. '(\\Draft \\Deleted)' returns ['\\Draft','\\Deleted']. (FLAGS (\\Seen Old) UID 4807) returns ['FLAGS,'(\\Seen Old)','UID', '4807'] """ if string[0] != '(' or string[-1] != ')': raise ValueError("Passed string '%s' is not a flag list" % string) return imapsplit(string[1:-1]) def options2hash(list): """convert list [1,2,3,4,5,6] to {1:2, 3:4, 5:6}""" # effectively this does dict(zip(l[::2],l[1::2])), however # measurements seemed to have indicated that the manual variant is # faster for mosly small lists. retval = {} counter = 0 while (counter < len(list)): retval[list[counter]] = list[counter + 1] counter += 2 debug("options2hash returning:", retval) return retval def flags2hash(flags): """Converts IMAP response string from eg IMAP4.fetch() to a hash. E.g. '(FLAGS (\\Seen Old) UID 4807)' leads to {'FLAGS': '(\\Seen Old)', 'UID': '4807'}""" return options2hash(flagsplit(flags)) def imapsplit(imapstring): """Takes a string from an IMAP conversation and returns a list containing its components. One example string is: (\\HasNoChildren) "." "INBOX.Sent" The result from parsing this will be: ['(\\HasNoChildren)', '"."', '"INBOX.Sent"']""" if not isinstance(imapstring, basestring): debug("imapsplit() got a non-string input; working around.") # Sometimes, imaplib will throw us a tuple if the input # contains a literal. See Python bug # #619732 at https://sourceforge.net/tracker/index.php?func=detail&aid=619732&group_id=5470&atid=105470 # One example is: # result[0] = '() "\\\\" Admin' # result[1] = ('() "\\\\" {19}', 'Folder\\2') # # This function will effectively get result[0] or result[1], so # if we get the result[1] version, we need to parse apart the tuple # and figure out what to do with it. Each even-numbered # part of it should end with the {} number, and each odd-numbered # part should be directly a part of the result. We'll # artificially quote it to help out. retval = [] for i in range(len(imapstring)): if i % 2: # Odd: quote then append. arg = imapstring[i] # Quote code lifted from imaplib arg = arg.replace('\\', '\\\\') arg = arg.replace('"', '\\"') arg = '"%s"' % arg debug("imapsplit() non-string [%d]: Appending %s" %\ (i, arg)) retval.append(arg) else: # Even -- we have a string that ends with a literal # size specifier. We need to strip off that, then run # what remains through the regular imapsplit parser. # Recursion to the rescue. arg = imapstring[i] arg = re.sub('\{\d+\}$', '', arg) debug("imapsplit() non-string [%d]: Feeding %s to recursion" %\ (i, arg)) retval.extend(imapsplit(arg)) debug("imapsplit() non-string: returning %s" % str(retval)) return retval workstr = imapstring.strip() retval = [] while len(workstr): # handle parenthized fragments (...()...) if workstr[0] == '(': rparenc = 1 # count of right parenthesis to match rpareni = 1 # position to examine while rparenc: # Find the end of the group. if workstr[rpareni] == ')': # end of a group rparenc -= 1 elif workstr[rpareni] == '(': # start of a group rparenc += 1 rpareni += 1 # Move to next character. parenlist = workstr[0:rpareni] workstr = workstr[rpareni:].lstrip() retval.append(parenlist) elif workstr[0] == '"': # quoted fragments '"...\"..."' m = quotere.match(workstr) retval.append(m.group('quote')) workstr = m.group('rest') else: splits = string.split(workstr, maxsplit = 1) splitslen = len(splits) # The unquoted word is splits[0]; the remainder is splits[1] if splitslen == 2: # There's an unquoted word, and more string follows. retval.append(splits[0]) workstr = splits[1] # split will have already lstripped it continue elif splitslen == 1: # We got a last unquoted word, but nothing else retval.append(splits[0]) # Nothing remains. workstr would be '' break elif splitslen == 0: # There was not even an unquoted word. break return retval flagmap = [('\\Seen', 'S'), ('\\Answered', 'R'), ('\\Flagged', 'F'), ('\\Deleted', 'T'), ('\\Draft', 'D')] def flagsimap2maildir(flagstring): """Convert string '(\\Draft \\Deleted)' into a flags set(DR)""" retval = set() imapflaglist = flagstring[1:-1].split() for imapflag, maildirflag in flagmap: if imapflag in imapflaglist: retval.add(maildirflag) return retval def flagsmaildir2imap(maildirflaglist): """Convert set of flags ([DR]) into a string '(\\Deleted \\Draft)'""" retval = [] for imapflag, maildirflag in flagmap: if maildirflag in maildirflaglist: retval.append(imapflag) return '(' + ' '.join(sorted(retval)) + ')' def uid_sequence(uidlist): """Collapse UID lists into shorter sequence sets [1,2,3,4,5,10,12,13] will return "1:5,10,12:13". This function sorts the list, and only collapses if subsequent entries form a range. :returns: The collapsed UID list as string""" def getrange(start, end): if start == end: return(str(start)) return "%s:%s" % (start, end) if not len(uidlist): return '' # Empty list, return start, end = None, None retval = [] # Force items to be longs and sort them sorted_uids = sorted(map(int, uidlist)) for item in iter(sorted_uids): item = int(item) if start == None: # First item start, end = item, item elif item == end + 1: # Next item in a range end = item else: # Starting a new range retval.append(getrange(start, end)) start, end = item, item retval.append(getrange(start, end)) # Add final range/item return ",".join(retval) spaetz-offlineimap-c9e9690/offlineimap/init.py000066400000000000000000000426111176237577200215140ustar00rootroot00000000000000# OfflineIMAP initialization code # Copyright (C) 2002-2011 John Goerzen & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import os import sys import threading import offlineimap.imaplib2 as imaplib import signal import socket import logging from optparse import OptionParser import offlineimap from offlineimap import accounts, threadutil, syncmaster from offlineimap.error import OfflineImapError from offlineimap.ui import UI_LIST, setglobalui, getglobalui from offlineimap.CustomConfig import CustomConfigParser class OfflineImap: """The main class that encapsulates the high level use of OfflineImap. To invoke OfflineImap you would call it with:: oi = OfflineImap() oi.run() """ def run(self): """Parse the commandline and invoke everything""" # next line also sets self.config and self.ui options, args = self.parse_cmd_options() if options.diagnostics: self.serverdiagnostics(options) else: self.sync(options) def parse_cmd_options(self): parser = OptionParser(version=offlineimap.__version__, description="%s.\n\n%s" % (offlineimap.__copyright__, offlineimap.__license__)) parser.add_option("--dry-run", action="store_true", dest="dryrun", default=False, help="Do not actually modify any store but check and print " "what synchronization actions would be taken if a sync would be" " performed. It will not precisely give the exact information w" "hat will happen. If e.g. we need to create a folder, it merely" " outputs 'Would create folder X', but not how many and which m" "ails it would transfer.") parser.add_option("--info", action="store_true", dest="diagnostics", default=False, help="Output information on the configured email repositories" ". Useful for debugging and bug reporting. Use in conjunction wit" "h the -a option to limit the output to a single account. This mo" "de will prevent any actual sync to occur and exits after it outp" "ut the debug information.") parser.add_option("-1", action="store_true", dest="singlethreading", default=False, help="Disable all multithreading operations and use " "solely a single-thread sync. This effectively sets the " "maxsyncaccounts and all maxconnections configuration file " "variables to 1.") parser.add_option("-P", dest="profiledir", metavar="DIR", help="Sets OfflineIMAP into profile mode. The program " "will create DIR (it must not already exist). " "As it runs, Python profiling information about each " "thread is logged into profiledir. Please note: " "This option is present for debugging and optimization " "only, and should NOT be used unless you have a " "specific reason to do so. It will significantly " "decrease program performance, may reduce reliability, " "and can generate huge amounts of data. This option " "implies the -1 option.") parser.add_option("-a", dest="accounts", metavar="ACCOUNTS", help="Overrides the accounts section in the config file. " "Lets you specify a particular account or set of " "accounts to sync without having to edit the config " "file. You might use this to exclude certain accounts, " "or to sync some accounts that you normally prefer not to.") parser.add_option("-c", dest="configfile", metavar="FILE", default="~/.offlineimaprc", help="Specifies a configuration file to use in lieu of " "%default.") parser.add_option("-d", dest="debugtype", metavar="type1,[type2...]", help="Enables debugging for OfflineIMAP. This is useful " "if you are to track down a malfunction or figure out what is " "going on under the hood. This option requires one or more " "debugtypes, separated by commas. These define what exactly " "will be debugged, and so far include two options: imap, thread, " "maildir or ALL. The imap option will enable IMAP protocol " "stream and parsing debugging. Note that the output may contain " "passwords, so take care to remove that from the debugging " "output before sending it to anyone else. The maildir option " "will enable debugging for certain Maildir operations. " "The use of any debug option (unless 'thread' is included), " "implies the single-thread option -1.") parser.add_option("-l", dest="logfile", metavar="FILE", help="Log to FILE") parser.add_option("-f", dest="folders", metavar="folder1,[folder2...]", help="Only sync the specified folders. The folder names " "are the *untranslated* foldernames of the remote repository. " "This command-line option overrides any 'folderfilter' " "and 'folderincludes' options in the configuration file.") parser.add_option("-k", dest="configoverride", action="append", metavar="[section:]option=value", help= """Override configuration file option. If"section" is omitted, it defaults to "general". Any underscores in the section name are replaced with spaces: for instance, to override option "autorefresh" in the "[Account Personal]" section in the config file one would use "-k Account_Personal:autorefresh=30".""") parser.add_option("-o", action="store_true", dest="runonce", default=False, help="Run only once, ignoring any autorefresh setting " "in the configuration file.") parser.add_option("-q", action="store_true", dest="quick", default=False, help="Run only quick synchronizations. Ignore any " "flag updates on IMAP servers (if a flag on the remote IMAP " "changes, and we have the message locally, it will be left " "untouched in a quick run.") parser.add_option("-u", dest="interface", help="Specifies an alternative user interface to " "use. This overrides the default specified in the " "configuration file. The UI specified with -u will " "be forced to be used, even if checks determine that it is " "not usable. Possible interface choices are: %s " % ", ".join(UI_LIST.keys())) (options, args) = parser.parse_args() #read in configuration file configfilename = os.path.expanduser(options.configfile) config = CustomConfigParser() if not os.path.exists(configfilename): # TODO, initialize and make use of chosen ui for logging logging.error(" *** Config file '%s' does not exist; aborting!" % configfilename) sys.exit(1) config.read(configfilename) #profile mode chosen? if options.profiledir: if not options.singlethreading: # TODO, make use of chosen ui for logging logging.warn("Profile mode: Forcing to singlethreaded.") options.singlethreading = True if os.path.exists(options.profiledir): # TODO, make use of chosen ui for logging logging.warn("Profile mode: Directory '%s' already exists!" % options.profiledir) else: os.mkdir(options.profiledir) threadutil.ExitNotifyThread.set_profiledir(options.profiledir) # TODO, make use of chosen ui for logging logging.warn("Profile mode: Potentially large data will be " "created in '%s'" % options.profiledir) #override a config value if options.configoverride: for option in options.configoverride: (key, value) = option.split('=', 1) if ':' in key: (secname, key) = key.split(':', 1) section = secname.replace("_", " ") else: section = "general" config.set(section, key, value) #which ui to use? cmd line option overrides config file ui_type = config.getdefault('general','ui', 'ttyui') if options.interface != None: ui_type = options.interface if '.' in ui_type: #transform Curses.Blinkenlights -> Blinkenlights ui_type = ui_type.split('.')[-1] # TODO, make use of chosen ui for logging logging.warning('Using old interface name, consider using one ' 'of %s' % ', '.join(UI_LIST.keys())) if options.diagnostics: ui_type = 'basic' # enforce basic UI for --info #dry-run? Set [general]dry-run=True if options.dryrun: dryrun = config.set('general','dry-run', "True") config.set_if_not_exists('general','dry-run','False') try: # create the ui class self.ui = UI_LIST[ui_type.lower()](config) except KeyError: logging.error("UI '%s' does not exist, choose one of: %s" % \ (ui_type,', '.join(UI_LIST.keys()))) sys.exit(1) setglobalui(self.ui) #set up additional log files if options.logfile: self.ui.setlogfile(options.logfile) #welcome blurb self.ui.init_banner() if options.debugtype: self.ui.logger.setLevel(logging.DEBUG) if options.debugtype.lower() == 'all': options.debugtype = 'imap,maildir,thread' #force single threading? if not ('thread' in options.debugtype.split(',') \ and not options.singlethreading): self.ui._msg("Debug mode: Forcing to singlethreaded.") options.singlethreading = True debugtypes = options.debugtype.split(',') + [''] for type in debugtypes: type = type.strip() self.ui.add_debug(type) if type.lower() == 'imap': imaplib.Debug = 5 if options.runonce: # FIXME: maybe need a better for section in accounts.getaccountlist(config): config.remove_option('Account ' + section, "autorefresh") if options.quick: for section in accounts.getaccountlist(config): config.set('Account ' + section, "quick", '-1') #custom folder list specified? if options.folders: foldernames = options.folders.split(",") folderfilter = "lambda f: f in %s" % foldernames folderincludes = "[]" for accountname in accounts.getaccountlist(config): account_section = 'Account ' + accountname remote_repo_section = 'Repository ' + \ config.get(account_section, 'remoterepository') config.set(remote_repo_section, "folderfilter", folderfilter) config.set(remote_repo_section, "folderincludes", folderincludes) if options.logfile: sys.stderr = self.ui.logfile socktimeout = config.getdefaultint("general", "socktimeout", 0) if socktimeout > 0: socket.setdefaulttimeout(socktimeout) threadutil.initInstanceLimit('ACCOUNTLIMIT', config.getdefaultint('general', 'maxsyncaccounts', 1)) for reposname in config.getsectionlist('Repository'): for instancename in ["FOLDER_" + reposname, "MSGCOPY_" + reposname]: if options.singlethreading: threadutil.initInstanceLimit(instancename, 1) else: threadutil.initInstanceLimit(instancename, config.getdefaultint('Repository ' + reposname, 'maxconnections', 2)) self.config = config return (options, args) def sync(self, options): """Invoke the correct single/multithread syncing self.config is supposed to have been correctly initialized already.""" try: pidfd = open(self.config.getmetadatadir() + "/pid", "w") pidfd.write(str(os.getpid()) + "\n") pidfd.close() except: pass try: activeaccounts = self.config.get("general", "accounts") if options.accounts: activeaccounts = options.accounts activeaccounts = activeaccounts.replace(" ", "") activeaccounts = activeaccounts.split(",") allaccounts = accounts.AccountHashGenerator(self.config) syncaccounts = [] for account in activeaccounts: if account not in allaccounts: if len(allaccounts) == 0: errormsg = "The account '%s' does not exist because no"\ " accounts are defined!" % account else: errormsg = "The account '%s' does not exist. Valid ac"\ "counts are: " % account errormsg += ", ".join(allaccounts.keys()) self.ui.terminate(1, errormsg = errormsg) if account not in syncaccounts: syncaccounts.append(account) def sig_handler(sig, frame): if sig == signal.SIGUSR1 or sig == signal.SIGHUP: # tell each account to stop sleeping accounts.Account.set_abort_event(self.config, 1) elif sig == signal.SIGUSR2: # tell each account to stop looping getglobalui().warn("Terminating after this sync...") accounts.Account.set_abort_event(self.config, 2) elif sig == signal.SIGTERM or sig == signal.SIGINT: # tell each account to ABORT ASAP (ctrl-c) getglobalui().warn("Terminating NOW (this may "\ "take a few seconds)...") accounts.Account.set_abort_event(self.config, 3) signal.signal(signal.SIGHUP,sig_handler) signal.signal(signal.SIGUSR1,sig_handler) signal.signal(signal.SIGUSR2,sig_handler) signal.signal(signal.SIGTERM, sig_handler) signal.signal(signal.SIGINT, sig_handler) #various initializations that need to be performed: offlineimap.mbnames.init(self.config, syncaccounts) if options.singlethreading: #singlethreaded self.sync_singlethreaded(syncaccounts) else: # multithreaded t = threadutil.ExitNotifyThread(target=syncmaster.syncitall, name='Sync Runner', kwargs = {'accounts': syncaccounts, 'config': self.config}) t.start() threadutil.exitnotifymonitorloop(threadutil.threadexited) self.ui.terminate() except (SystemExit): raise except Exception as e: self.ui.error(e) self.ui.terminate() def sync_singlethreaded(self, accs): """Executed if we do not want a separate syncmaster thread :param accs: A list of accounts that should be synced """ for accountname in accs: account = offlineimap.accounts.SyncableAccount(self.config, accountname) threading.currentThread().name = "Account sync %s" % accountname account.syncrunner() def serverdiagnostics(self, options): activeaccounts = self.config.get("general", "accounts") if options.accounts: activeaccounts = options.accounts activeaccounts = activeaccounts.split(",") allaccounts = accounts.AccountListGenerator(self.config) for account in allaccounts: if account.name not in activeaccounts: continue account.serverdiagnostics() spaetz-offlineimap-c9e9690/offlineimap/localeval.py000066400000000000000000000027411176237577200225130ustar00rootroot00000000000000"""Eval python code with global namespace of a python source file.""" # Copyright (C) 2002 John Goerzen # # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import imp try: import errno except: pass class LocalEval: def __init__(self, path=None): self.namespace={} if path is not None: file=open(path, 'r') module=imp.load_module( '', file, path, ('', 'r', imp.PY_SOURCE)) for attr in dir(module): self.namespace[attr]=getattr(module, attr) def eval(self, text, namespace=None): names = {} names.update(self.namespace) if namespace is not None: names.update(namespace) return eval(text, names) spaetz-offlineimap-c9e9690/offlineimap/mbnames.py000066400000000000000000000052371176237577200221760ustar00rootroot00000000000000# Mailbox name generator # Copyright (C) 2002 John Goerzen # # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import os.path import re # for folderfilter from threading import Lock boxes = {} config = None accounts = None mblock = Lock() def init(conf, accts): global config, accounts config = conf accounts = accts def add(accountname, foldername): if not accountname in boxes: boxes[accountname] = [] if not foldername in boxes[accountname]: boxes[accountname].append(foldername) def write(): # See if we're ready to write it out. for account in accounts: if account not in boxes: return genmbnames() def genmbnames(): """Takes a configparser object and a boxlist, which is a list of hashes containing 'accountname' and 'foldername' keys.""" mblock.acquire() try: localeval = config.getlocaleval() if not config.getdefaultboolean("mbnames", "enabled", 0): return file = open(os.path.expanduser(config.get("mbnames", "filename")), "wt") file.write(localeval.eval(config.get("mbnames", "header"))) folderfilter = lambda accountname, foldername: 1 if config.has_option("mbnames", "folderfilter"): folderfilter = localeval.eval(config.get("mbnames", "folderfilter"), {'re': re}) itemlist = [] for accountname in boxes.keys(): for foldername in boxes[accountname]: if folderfilter(accountname, foldername): itemlist.append(config.get("mbnames", "peritem", raw=1) % \ {'accountname': accountname, 'foldername': foldername}) file.write(localeval.eval(config.get("mbnames", "sep")).join(itemlist)) file.write(localeval.eval(config.get("mbnames", "footer"))) file.close() finally: mblock.release() spaetz-offlineimap-c9e9690/offlineimap/repository/000077500000000000000000000000001176237577200224125ustar00rootroot00000000000000spaetz-offlineimap-c9e9690/offlineimap/repository/Base.py000066400000000000000000000246021176237577200236420ustar00rootroot00000000000000# Base repository support # Copyright (C) 2002-2012 John Goerzen & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import re import os.path import traceback from sys import exc_info from offlineimap import CustomConfig from offlineimap.ui import getglobalui from offlineimap.error import OfflineImapError class BaseRepository(CustomConfig.ConfigHelperMixin, object): def __init__(self, reposname, account): self.ui = getglobalui() self.account = account self.config = account.getconfig() self.name = reposname self.localeval = account.getlocaleval() self._accountname = self.account.getname() self._readonly = self.getconfboolean('readonly', False) self.uiddir = os.path.join(self.config.getmetadatadir(), 'Repository-' + self.name) if not os.path.exists(self.uiddir): os.mkdir(self.uiddir, 0o700) self.mapdir = os.path.join(self.uiddir, 'UIDMapping') if not os.path.exists(self.mapdir): os.mkdir(self.mapdir, 0o700) self.uiddir = os.path.join(self.uiddir, 'FolderValidity') if not os.path.exists(self.uiddir): os.mkdir(self.uiddir, 0o700) self.nametrans = lambda foldername: foldername self.folderfilter = lambda foldername: 1 self.folderincludes = [] self.foldersort = None if self.config.has_option(self.getsection(), 'nametrans'): self.nametrans = self.localeval.eval( self.getconf('nametrans'), {'re': re}) if self.config.has_option(self.getsection(), 'folderfilter'): self.folderfilter = self.localeval.eval( self.getconf('folderfilter'), {'re': re}) if self.config.has_option(self.getsection(), 'folderincludes'): self.folderincludes = self.localeval.eval( self.getconf('folderincludes'), {'re': re}) if self.config.has_option(self.getsection(), 'foldersort'): self.foldersort = self.localeval.eval( self.getconf('foldersort'), {'re': re}) def restore_atime(self): """Sets folders' atime back to their values after a sync Controlled by the 'restoreatime' config parameter (default False), applies only to local Maildir mailboxes and does nothing on all other repository types.""" pass def connect(self): """Establish a connection to the remote, if necessary. This exists so that IMAP connections can all be established up front, gathering passwords as needed. It was added in order to support the error recovery -- we need to connect first outside of the error trap in order to validate the password, and that's the point of this function.""" pass def holdordropconnections(self): pass def dropconnections(self): pass def getaccount(self): return self.account def getname(self): return self.name def __str__(self): return self.name @property def accountname(self): """Account name as string""" return self._accountname def getuiddir(self): return self.uiddir def getmapdir(self): return self.mapdir def getsection(self): return 'Repository ' + self.name def getconfig(self): return self.config @property def readonly(self): """Is the repository readonly?""" return self._readonly def getlocaleval(self): return self.account.getlocaleval() def getfolders(self): """Returns a list of ALL folders on this server.""" return [] def forgetfolders(self): """Forgets the cached list of folders, if any. Useful to run after a sync run.""" pass def getsep(self): raise NotImplementedError def get_create_folders(self): """Is folder creation enabled on this repository? It is disabled by either setting the whole repository 'readonly' or by using the 'createfolders' setting.""" return self._readonly or self.getconfboolean('createfolders', True) def makefolder(self, foldername): """Create a new folder""" raise NotImplementedError def deletefolder(self, foldername): raise NotImplementedError def getfolder(self, foldername): raise NotImplementedError def sync_folder_structure(self, dst_repo, status_repo): """Syncs the folders in this repository to those in dest. It does NOT sync the contents of those folders. nametrans rules in both directions will be honored, but there are NO checks yet that forward and backward nametrans actually match up! Configuring nametrans on BOTH repositories therefore could lead to infinite folder creation cycles.""" if not self.get_create_folders() and not dst_repo.get_create_folders(): # quick exit if no folder creation is enabled on either side. return src_repo = self src_folders = src_repo.getfolders() dst_folders = dst_repo.getfolders() # Do we need to refresh the folder list afterwards? src_haschanged, dst_haschanged = False, False # Create hashes with the names, but convert the source folders # to the dest folder's sep. src_hash = {} for folder in src_folders: src_hash[folder.getvisiblename().replace( src_repo.getsep(), dst_repo.getsep())] = folder dst_hash = {} for folder in dst_folders: dst_hash[folder.getvisiblename().replace( dst_repo.getsep(), src_repo.getsep())] = folder # Find new folders on src_repo. for src_name_t, src_folder in src_hash.iteritems(): # Don't create on dst_repo, if it is readonly if not dst_repo.get_create_folders(): break if src_folder.sync_this and not src_name_t in dst_folders: try: dst_repo.makefolder(src_name_t) dst_haschanged = True # Need to refresh list except OfflineImapError as e: self.ui.error(e, exc_info()[2], "Creating folder %s on repository %s" %\ (src_name_t, dst_repo)) raise status_repo.makefolder(src_name_t.replace(dst_repo.getsep(), status_repo.getsep())) # Find new folders on dst_repo. for dst_name_t, dst_folder in dst_hash.iteritems(): if not src_repo.get_create_folders(): # Don't create missing folder on readonly repo. break if dst_folder.sync_this and not dst_name_t in src_folders: # nametrans sanity check! # Does nametrans back&forth lead to identical names? # 1) would src repo filter out the new folder name? In this # case don't create it on it: if not self.folderfilter(dst_name_t): self.ui.debug('', "Not creating folder '%s' (repository '%s" "') as it would be filtered out on that repository." % (dst_name_t, self)) continue # get IMAPFolder and see if the reverse nametrans # works fine TODO: getfolder() works only because we # succeed in getting inexisting folders which I would # like to change. Take care! folder = self.getfolder(dst_name_t) # apply reverse nametrans to see if we end up with the same name newdst_name = folder.getvisiblename().replace( src_repo.getsep(), dst_repo.getsep()) if dst_folder.name != newdst_name: raise OfflineImapError("INFINITE FOLDER CREATION DETECTED! " "Folder '%s' (repository '%s') would be created as fold" "er '%s' (repository '%s'). The latter becomes '%s' in " "return, leading to infinite folder creation cycles.\n " "SOLUTION: 1) Do set your nametrans rules on both repos" "itories so they lead to identical names if applied bac" "k and forth. 2) Use folderfilter settings on a reposit" "ory to prevent some folders from being created on the " "other side." % (dst_folder.name, dst_repo, dst_name_t, src_repo, newdst_name), OfflineImapError.ERROR.REPO) # end sanity check, actually create the folder try: src_repo.makefolder(dst_name_t) src_haschanged = True # Need to refresh list except OfflineImapError as e: self.ui.error(e, exc_info()[2], "Creating folder %s on " "repository %s" % (dst_name_t, src_repo)) raise status_repo.makefolder(dst_name_t.replace( src_repo.getsep(), status_repo.getsep())) # Find deleted folders. # TODO: We don't delete folders right now. #Forget old list of cached folders so we get new ones if needed if src_haschanged: self.forgetfolders() if dst_haschanged: dst_repo.forgetfolders() def startkeepalive(self): """The default implementation will do nothing.""" pass def stopkeepalive(self): """Stop keep alive, but don't bother waiting for the threads to terminate.""" pass spaetz-offlineimap-c9e9690/offlineimap/repository/Gmail.py000066400000000000000000000052171176237577200240220ustar00rootroot00000000000000# Gmail IMAP repository support # Copyright (C) 2008 Riccardo Murri # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from offlineimap.repository.IMAP import IMAPRepository from offlineimap import folder, OfflineImapError class GmailRepository(IMAPRepository): """Gmail IMAP repository. Falls back to hard-coded gmail host name and port, if none were specified: http://mail.google.com/support/bin/answer.py?answer=78799&topic=12814 """ # Gmail IMAP server hostname HOSTNAME = "imap.gmail.com" # Gmail IMAP server port PORT = 993 def __init__(self, reposname, account): """Initialize a GmailRepository object.""" # Enforce SSL usage account.getconfig().set('Repository ' + reposname, 'ssl', 'yes') IMAPRepository.__init__(self, reposname, account) def gethost(self): """Return the server name to connect to. Gmail implementation first checks for the usual IMAP settings and falls back to imap.gmail.com if not specified.""" try: return super(GmailRepository, self).gethost() except OfflineImapError: # nothing was configured, cache and return hardcoded one self._host = GmailRepository.HOSTNAME return self._host def getport(self): return GmailRepository.PORT def getssl(self): return 1 def getpreauthtunnel(self): return None def getfolder(self, foldername): return self.getfoldertype()(self.imapserver, foldername, self) def getfoldertype(self): return folder.Gmail.GmailFolder def gettrashfolder(self, foldername): #: Where deleted mail should be moved return self.getconf('trashfolder','[Gmail]/Trash') def getspamfolder(self): #: Gmail also deletes messages upon EXPUNGE in the Spam folder return self.getconf('spamfolder','[Gmail]/Spam') spaetz-offlineimap-c9e9690/offlineimap/repository/IMAP.py000066400000000000000000000331271176237577200235200ustar00rootroot00000000000000# IMAP repository support # Copyright (C) 2002-2011 John Goerzen & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from offlineimap.repository.Base import BaseRepository from offlineimap import folder, imaputil, imapserver, OfflineImapError from offlineimap.folder.UIDMaps import MappedIMAPFolder from offlineimap.threadutil import ExitNotifyThread from threading import Event import os from sys import exc_info import netrc import errno class IMAPRepository(BaseRepository): def __init__(self, reposname, account): """Initialize an IMAPRepository object.""" BaseRepository.__init__(self, reposname, account) # self.ui is being set by the BaseRepository self._host = None self.imapserver = imapserver.IMAPServer(self) self.folders = None if self.getconf('sep', None): self.ui.info("The 'sep' setting is being ignored for IMAP " "repository '%s' (it's autodetected)" % self) def startkeepalive(self): keepalivetime = self.getkeepalive() if not keepalivetime: return self.kaevent = Event() self.kathread = ExitNotifyThread(target = self.imapserver.keepalive, name = "Keep alive " + self.getname(), args = (keepalivetime, self.kaevent)) self.kathread.setDaemon(1) self.kathread.start() def stopkeepalive(self): if not hasattr(self, 'kaevent'): # Keepalive is not active. return self.kaevent.set() del self.kathread del self.kaevent def holdordropconnections(self): if not self.getholdconnectionopen(): self.dropconnections() def dropconnections(self): self.imapserver.close() def getholdconnectionopen(self): if self.getidlefolders(): return 1 return self.getconfboolean("holdconnectionopen", 0) def getkeepalive(self): num = self.getconfint("keepalive", 0) if num == 0 and self.getidlefolders(): return 29*60 else: return num def getsep(self): """Return the folder separator for the IMAP repository This requires that self.imapserver has been initialized with an acquireconnection() or it will still be `None`""" assert self.imapserver.delim != None, "'%s' " \ "repository called getsep() before the folder separator was " \ "queried from the server" % self return self.imapserver.delim def gethost(self): """Return the configured hostname to connect to :returns: hostname as string or throws Exception""" if self._host: # use cached value if possible return self._host # 1) check for remotehosteval setting if self.config.has_option(self.getsection(), 'remotehosteval'): host = self.getconf('remotehosteval') try: host = self.localeval.eval(host) except Exception as e: raise OfflineImapError("remotehosteval option for repository "\ "'%s' failed:\n%s" % (self, e), OfflineImapError.ERROR.REPO) if host: self._host = host return self._host # 2) check for plain remotehost setting host = self.getconf('remotehost', None) if host != None: self._host = host return self._host # no success raise OfflineImapError("No remote host for repository "\ "'%s' specified." % self, OfflineImapError.ERROR.REPO) def getuser(self): user = None localeval = self.localeval if self.config.has_option(self.getsection(), 'remoteusereval'): user = self.getconf('remoteusereval') if user != None: return localeval.eval(user) user = self.getconf('remoteuser') if user != None: return user try: netrcentry = netrc.netrc().authenticators(self.gethost()) except IOError as inst: if inst.errno != errno.ENOENT: raise else: if netrcentry: return netrcentry[0] try: netrcentry = netrc.netrc('/etc/netrc').authenticators(self.gethost()) except IOError as inst: if inst.errno not in (errno.ENOENT, errno.EACCES): raise else: if netrcentry: return netrcentry[0] def getport(self): return self.getconfint('remoteport', None) def getssl(self): return self.getconfboolean('ssl', 0) def getsslclientcert(self): return self.getconf('sslclientcert', None) def getsslclientkey(self): return self.getconf('sslclientkey', None) def getsslcacertfile(self): """Return the absolute path of the CA certfile to use, if any""" cacertfile = self.getconf('sslcacertfile', None) if cacertfile is None: return None cacertfile = os.path.expanduser(cacertfile) cacertfile = os.path.abspath(cacertfile) if not os.path.isfile(cacertfile): raise SyntaxWarning("CA certfile for repository '%s' could " "not be found. No such file: '%s'" \ % (self.name, cacertfile)) return cacertfile def get_ssl_fingerprint(self): return self.getconf('cert_fingerprint', None) def getpreauthtunnel(self): return self.getconf('preauthtunnel', None) def getreference(self): return self.getconf('reference', '') def getidlefolders(self): localeval = self.localeval return localeval.eval(self.getconf('idlefolders', '[]')) def getmaxconnections(self): num1 = len(self.getidlefolders()) num2 = self.getconfint('maxconnections', 1) return max(num1, num2) def getexpunge(self): return self.getconfboolean('expunge', 1) def getpassword(self): """Return the IMAP password for this repository. It tries to get passwords in the following order: 1. evaluate Repository 'remotepasseval' 2. read password from Repository 'remotepass' 3. read password from file specified in Repository 'remotepassfile' 4. read password from ~/.netrc 5. read password from /etc/netrc On success we return the password. If all strategies fail we return None. """ # 1. evaluate Repository 'remotepasseval' passwd = self.getconf('remotepasseval', None) if passwd != None: return self.localeval.eval(passwd) # 2. read password from Repository 'remotepass' password = self.getconf('remotepass', None) if password != None: return password # 3. read password from file specified in Repository 'remotepassfile' passfile = self.getconf('remotepassfile', None) if passfile != None: fd = open(os.path.expanduser(passfile)) password = fd.readline().strip() fd.close() return password # 4. read password from ~/.netrc try: netrcentry = netrc.netrc().authenticators(self.gethost()) except IOError as inst: if inst.errno != errno.ENOENT: raise else: if netrcentry: user = self.getconf('remoteuser') if user == None or user == netrcentry[0]: return netrcentry[2] # 5. read password from /etc/netrc try: netrcentry = netrc.netrc('/etc/netrc').authenticators(self.gethost()) except IOError as inst: if inst.errno not in (errno.ENOENT, errno.EACCES): raise else: if netrcentry: user = self.getconf('remoteuser') if user == None or user == netrcentry[0]: return netrcentry[2] # no strategy yielded a password! return None def getfolder(self, foldername): return self.getfoldertype()(self.imapserver, foldername, self) def getfoldertype(self): return folder.IMAP.IMAPFolder def connect(self): imapobj = self.imapserver.acquireconnection() self.imapserver.releaseconnection(imapobj) def forgetfolders(self): self.folders = None def getfolders(self): if self.folders != None: return self.folders retval = [] imapobj = self.imapserver.acquireconnection() # check whether to list all folders, or subscribed only listfunction = imapobj.list if self.getconfboolean('subscribedonly', False): listfunction = imapobj.lsub try: listresult = listfunction(directory = self.imapserver.reference)[1] finally: self.imapserver.releaseconnection(imapobj) for string in listresult: if string == None or \ (isinstance(string, basestring) and string == ''): # Bug in imaplib: empty strings in results from # literals. TODO: still relevant? continue flags, delim, name = imaputil.imapsplit(string) flaglist = [x.lower() for x in imaputil.flagsplit(flags)] if '\\noselect' in flaglist: continue foldername = imaputil.dequote(name) retval.append(self.getfoldertype()(self.imapserver, foldername, self)) # filter out the folder? if not self.folderfilter(foldername): self.ui.debug('imap', "Filtering out '%s'[%s] due to folderfilt" "er" % (foldername, self)) retval[-1].sync_this = False # Add all folderincludes if len(self.folderincludes): imapobj = self.imapserver.acquireconnection() try: for foldername in self.folderincludes: try: imapobj.select(foldername, readonly = True) except OfflineImapError as e: # couldn't select this folderinclude, so ignore folder. if e.severity > OfflineImapError.ERROR.FOLDER: raise self.ui.error(e, exc_info()[2], 'Invalid folderinclude:') continue retval.append(self.getfoldertype()(self.imapserver, foldername, self)) finally: self.imapserver.releaseconnection(imapobj) if self.foldersort is None: # default sorting by case insensitive transposed name retval.sort(key=lambda x: str.lower(x.getvisiblename())) else: # do foldersort in a python3-compatible way # http://bytes.com/topic/python/answers/844614-python-3-sorting-comparison-function def cmp2key(mycmp): """Converts a cmp= function into a key= function We need to keep cmp functions for backward compatibility""" class K: def __init__(self, obj, *args): self.obj = obj def __cmp__(self, other): return mycmp(self.obj, other.obj) return K retval.sort(key=cmp2key(self.foldersort)) self.folders = retval return self.folders def makefolder(self, foldername): """Create a folder on the IMAP server This will not update the list cached in :meth:`getfolders`. You will need to invoke :meth:`forgetfolders` to force new caching when you are done creating folders yourself. :param foldername: Full path of the folder to be created.""" if self.getreference(): foldername = self.getreference() + self.getsep() + foldername if not foldername: # Create top level folder as folder separator foldername = self.getsep() self.ui.makefolder(self, foldername) if self.account.dryrun: return imapobj = self.imapserver.acquireconnection() try: result = imapobj.create(foldername) if result[0] != 'OK': raise OfflineImapError("Folder '%s'[%s] could not be created. " "Server responded: %s" % \ (foldername, self, str(result)), OfflineImapError.ERROR.FOLDER) finally: self.imapserver.releaseconnection(imapobj) class MappedIMAPRepository(IMAPRepository): def getfoldertype(self): return MappedIMAPFolder spaetz-offlineimap-c9e9690/offlineimap/repository/LocalStatus.py000066400000000000000000000075441176237577200252340ustar00rootroot00000000000000# Local status cache repository support # Copyright (C) 2002 John Goerzen # # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from offlineimap.folder.LocalStatus import LocalStatusFolder, magicline from offlineimap.folder.LocalStatusSQLite import LocalStatusSQLiteFolder from offlineimap.repository.Base import BaseRepository import os import re class LocalStatusRepository(BaseRepository): def __init__(self, reposname, account): BaseRepository.__init__(self, reposname, account) # Root directory in which the LocalStatus folders reside self.root = os.path.join(account.getaccountmeta(), 'LocalStatus') # statusbackend can be 'plain' or 'sqlite' backend = self.account.getconf('status_backend', 'plain') if backend == 'sqlite': self._backend = 'sqlite' self.LocalStatusFolderClass = LocalStatusSQLiteFolder self.root += '-sqlite' elif backend == 'plain': self._backend = 'plain' self.LocalStatusFolderClass = LocalStatusFolder else: raise SyntaxWarning("Unknown status_backend '%s' for account '%s'" \ % (backend, account.name)) if not os.path.exists(self.root): os.mkdir(self.root, 0o700) # self._folders is a list of LocalStatusFolders() self._folders = None def getsep(self): return '.' def getfolderfilename(self, foldername): """Return the full path of the status file This mimics the path that Folder().getfolderbasename() would return""" if not foldername: basename = '.' else: #avoid directory hierarchies and file names such as '/' basename = foldername.replace('/', '.') # replace with literal 'dot' if final path name is '.' as '.' is # an invalid file name. basename = re.sub('(^|\/)\.$','\\1dot', basename) return os.path.join(self.root, basename) def makefolder(self, foldername): """Create a LocalStatus Folder Empty Folder for plain backend. NoOp for sqlite backend as those are created on demand.""" if self._backend == 'sqlite': return # noop for sqlite which creates on-demand if self.account.dryrun: return # bail out in dry-run mode filename = self.getfolderfilename(foldername) file = open(filename + ".tmp", "wt") file.write(magicline + '\n') file.close() os.rename(filename + ".tmp", filename) # Invalidate the cache. self._folders = None def getfolder(self, foldername): """Return the Folder() object for a foldername""" return self.LocalStatusFolderClass(foldername, self) def getfolders(self): """Returns a list of all cached folders.""" if self._folders != None: return self._folders self._folders = [] for folder in os.listdir(self.root): self._folders.append(self.getfolder(folder)) return self._folders def forgetfolders(self): """Forgets the cached list of folders, if any. Useful to run after a sync run.""" self._folders = None spaetz-offlineimap-c9e9690/offlineimap/repository/Maildir.py000066400000000000000000000213141176237577200243460ustar00rootroot00000000000000# Maildir repository support # Copyright (C) 2002 John Goerzen # # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from offlineimap import folder from offlineimap.ui import getglobalui from offlineimap.error import OfflineImapError from offlineimap.repository.Base import BaseRepository import os from stat import * class MaildirRepository(BaseRepository): def __init__(self, reposname, account): """Initialize a MaildirRepository object. Takes a path name to the directory holding all the Maildir directories.""" BaseRepository.__init__(self, reposname, account) self.root = self.getlocalroot() self.folders = None self.ui = getglobalui() self.debug("MaildirRepository initialized, sep is " + repr(self.getsep())) self.folder_atimes = [] # Create the top-level folder if it doesn't exist if not os.path.isdir(self.root): os.mkdir(self.root, 0o700) def _append_folder_atimes(self, foldername): """Store the atimes of a folder's new|cur in self.folder_atimes""" p = os.path.join(self.root, foldername) new = os.path.join(p, 'new') cur = os.path.join(p, 'cur') atimes = (p, os.path.getatime(new), os.path.getatime(cur)) self.folder_atimes.append(atimes) def restore_atime(self): """Sets folders' atime back to their values after a sync Controlled by the 'restoreatime' config parameter.""" if not self.getconfboolean('restoreatime', False): return # not configured to restore for (dirpath, new_atime, cur_atime) in self.folder_atimes: new_dir = os.path.join(dirpath, 'new') cur_dir = os.path.join(dirpath, 'cur') os.utime(new_dir, (new_atime, os.path.getmtime(new_dir))) os.utime(cur_dir, (cur_atime, os.path.getmtime(cur_dir))) def getlocalroot(self): return os.path.expanduser(self.getconf('localfolders')) def debug(self, msg): self.ui.debug('maildir', msg) def getsep(self): return self.getconf('sep', '.').strip() def makefolder(self, foldername): """Create new Maildir folder if necessary This will not update the list cached in getfolders(). You will need to invoke :meth:`forgetfolders` to force new caching when you are done creating folders yourself. :param foldername: A relative mailbox name. The maildir will be created in self.root+'/'+foldername. All intermediate folder levels will be created if they do not exist yet. 'cur', 'tmp', and 'new' subfolders will be created in the maildir. """ self.ui.makefolder(self, foldername) if self.account.dryrun: return full_path = os.path.abspath(os.path.join(self.root, foldername)) # sanity tests if self.getsep() == '/': for component in foldername.split('/'): assert not component in ['new', 'cur', 'tmp'],\ "When using nested folders (/ as a Maildir separator), "\ "folder names may not contain 'new', 'cur', 'tmp'." assert foldername.find('../') == -1, "Folder names may not contain ../" assert not foldername.startswith('/'), "Folder names may not begin with /" # If we're using hierarchical folders, it's possible that # sub-folders may be created before higher-up ones. self.debug("makefolder: calling makedirs '%s'" % full_path) try: os.makedirs(full_path, 0o700) except OSError as e: if e.errno == 17 and os.path.isdir(full_path): self.debug("makefolder: '%s' already a directory" % foldername) else: raise for subdir in ['cur', 'new', 'tmp']: try: os.mkdir(os.path.join(full_path, subdir), 0o700) except OSError as e: if e.errno == 17 and os.path.isdir(full_path): self.debug("makefolder: '%s' already has subdir %s" % (foldername, subdir)) else: raise def deletefolder(self, foldername): self.ui.warn("NOT YET IMPLEMENTED: DELETE FOLDER %s" % foldername) def getfolder(self, foldername): """Return a Folder instance of this Maildir If necessary, scan and cache all foldernames to make sure that we only return existing folders and that 2 calls with the same name will return the same object.""" # getfolders() will scan and cache the values *if* necessary folders = self.getfolders() for folder in folders: if foldername == folder.name: return folder raise OfflineImapError("getfolder() asked for a nonexisting " "folder '%s'." % foldername, OfflineImapError.ERROR.FOLDER) def _getfolders_scandir(self, root, extension = None): """Recursively scan folder 'root'; return a list of MailDirFolder :param root: (absolute) path to Maildir root :param extension: (relative) subfolder to examine within root""" self.debug("_GETFOLDERS_SCANDIR STARTING. root = %s, extension = %s" \ % (root, extension)) retval = [] # Configure the full path to this repository -- "toppath" if extension: toppath = os.path.join(root, extension) else: toppath = root self.debug(" toppath = %s" % toppath) # Iterate over directories in top & top itself. for dirname in os.listdir(toppath) + ['']: self.debug(" dirname = %s" % dirname) if dirname == '' and extension is not None: self.debug(' skip this entry (already scanned)') continue if dirname in ['cur', 'new', 'tmp']: self.debug(" skip this entry (Maildir special)") # Bypass special files. continue fullname = os.path.join(toppath, dirname) if not os.path.isdir(fullname): self.debug(" skip this entry (not a directory)") # Not a directory -- not a folder. continue if extension: # extension can be None which fails. foldername = os.path.join(extension, dirname) else: foldername = dirname if (os.path.isdir(os.path.join(fullname, 'cur')) and os.path.isdir(os.path.join(fullname, 'new')) and os.path.isdir(os.path.join(fullname, 'tmp'))): # This directory has maildir stuff -- process self.debug(" This is maildir folder '%s'." % foldername) if self.getconfboolean('restoreatime', False): self._append_folder_atimes(foldername) retval.append(folder.Maildir.MaildirFolder(self.root, foldername, self.getsep(), self)) # filter out the folder? if not self.folderfilter(foldername): self.debug("Filtering out '%s'[%s] due to folderfilt" "er" % (foldername, self)) retval[-1].sync_this = False if self.getsep() == '/' and dirname != '': # Recursively check sub-directories for folders too. retval.extend(self._getfolders_scandir(root, foldername)) self.debug("_GETFOLDERS_SCANDIR RETURNING %s" % \ repr([x.getname() for x in retval])) return retval def getfolders(self): if self.folders == None: self.folders = self._getfolders_scandir(self.root) return self.folders def forgetfolders(self): """Forgets the cached list of folders, if any. Useful to run after a sync run.""" self.folders = None spaetz-offlineimap-c9e9690/offlineimap/repository/__init__.py000066400000000000000000000055151176237577200245310ustar00rootroot00000000000000# Copyright (C) 2002-2007 John Goerzen # 2010 Sebastian Spaeth and contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from offlineimap.repository.IMAP import IMAPRepository, MappedIMAPRepository from offlineimap.repository.Gmail import GmailRepository from offlineimap.repository.Maildir import MaildirRepository from offlineimap.repository.LocalStatus import LocalStatusRepository class Repository(object): """Abstract class that returns the correct Repository type instance based on 'account' and 'reqtype', e.g. a class:`ImapRepository` instance.""" def __new__(cls, account, reqtype): """ :param account: :class:`Account` :param regtype: 'remote', 'local', or 'status'""" if reqtype == 'remote': name = account.getconf('remoterepository') # We don't support Maildirs on the remote side. typemap = {'IMAP': IMAPRepository, 'Gmail': GmailRepository} elif reqtype == 'local': name = account.getconf('localrepository') typemap = {'IMAP': MappedIMAPRepository, 'Maildir': MaildirRepository} elif reqtype == 'status': # create and return a LocalStatusRepository name = account.getconf('localrepository') return LocalStatusRepository(name, account) else: raise ValueError("Request type %s not supported" % reqtype) config = account.getconfig() repostype = config.get('Repository ' + name, 'type').strip() try: repo = typemap[repostype] except KeyError: raise ValueError("'%s' repository not supported for %s repositories" "." % (repostype, reqtype)) return repo(name, account) def __init__(self, account, reqtype): """Load the correct Repository type and return that. The __init__ of the corresponding Repository class will be executed instead of this stub :param account: :class:`Account` :param regtype: 'remote', 'local', or 'status' """ pass spaetz-offlineimap-c9e9690/offlineimap/syncmaster.py000066400000000000000000000032571176237577200227440ustar00rootroot00000000000000# OfflineIMAP synchronization master code # Copyright (C) 2002-2007 John Goerzen # # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from offlineimap.threadutil import threadlist, InstanceLimitedThread from offlineimap.accounts import SyncableAccount from threading import currentThread def syncaccount(threads, config, accountname): account = SyncableAccount(config, accountname) thread = InstanceLimitedThread(instancename = 'ACCOUNTLIMIT', target = account.syncrunner, name = "Account sync %s" % accountname) thread.setDaemon(True) thread.start() threads.add(thread) def syncitall(accounts, config): # Special exit message for SyncRunner thread, so main thread can exit currentThread().exit_message = 'SYNCRUNNER_EXITED_NORMALLY' threads = threadlist() for accountname in accounts: syncaccount(threads, config, accountname) # Wait for the threads to finish. threads.reset() spaetz-offlineimap-c9e9690/offlineimap/threadutil.py000066400000000000000000000201201176237577200227050ustar00rootroot00000000000000# Copyright (C) 2002-2012 John Goerzen & contributors # Thread support module # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from threading import Lock, Thread, BoundedSemaphore, currentThread try: from Queue import Queue, Empty except ImportError: # python3 from queue import Queue, Empty import traceback import os.path import sys from offlineimap.ui import getglobalui ###################################################################### # General utilities ###################################################################### def semaphorereset(semaphore, originalstate): """Block until `semaphore` gets back to its original state, ie all acquired resources have been released.""" for i in range(originalstate): semaphore.acquire() # Now release these. for i in range(originalstate): semaphore.release() class threadlist: def __init__(self): self.lock = Lock() self.list = [] def add(self, thread): self.lock.acquire() try: self.list.append(thread) finally: self.lock.release() def remove(self, thread): self.lock.acquire() try: self.list.remove(thread) finally: self.lock.release() def pop(self): self.lock.acquire() try: if not len(self.list): return None return self.list.pop() finally: self.lock.release() def reset(self): while 1: thread = self.pop() if not thread: return thread.join() ###################################################################### # Exit-notify threads ###################################################################### exitthreads = Queue(100) def exitnotifymonitorloop(callback): """An infinite "monitoring" loop watching for finished ExitNotifyThread's. This one is supposed to run in the main thread. :param callback: the function to call when a thread terminated. That function is called with a single argument -- the ExitNotifyThread that has terminated. The monitor will not continue to monitor for other threads until 'callback' returns, so if it intends to perform long calculations, it should start a new thread itself -- but NOT an ExitNotifyThread, or else an infinite loop may result. Furthermore, the monitor will hold the lock all the while the other thread is waiting. :type callback: a callable function """ global exitthreads do_loop = True while do_loop: # Loop forever and call 'callback' for each thread that exited try: # we need a timeout in the get() call, so that ctrl-c can throw # a SIGINT (http://bugs.python.org/issue1360). A timeout with empty # Queue will raise `Empty`. thrd = exitthreads.get(True, 60) # request to abort when callback returns true do_loop = (callback(thrd) != True) except Empty: pass def threadexited(thread): """Called when a thread exits. Main thread is aborted when this returns True.""" ui = getglobalui() if thread.exit_exception: if isinstance(thread.exit_exception, SystemExit): # Bring a SystemExit into the main thread. # Do not send it back to UI layer right now. # Maybe later send it to ui.terminate? raise SystemExit ui.threadException(thread) # Expected to terminate sys.exit(100) # Just in case... elif thread.exit_message == 'SYNCRUNNER_EXITED_NORMALLY': return True else: ui.threadExited(thread) return False class ExitNotifyThread(Thread): """This class is designed to alert a "monitor" to the fact that a thread has exited and to provide for the ability for it to find out why. All instances are made daemon threads (setDaemon(True), so we bail out when the mainloop dies. The thread can set instance variables self.exit_message for a human readable reason of the thread exit.""" profiledir = None """class variable that is set to the profile directory if required""" def __init__(self, *args, **kwargs): super(ExitNotifyThread, self).__init__(*args, **kwargs) # These are all child threads that are supposed to go away when # the main thread is killed. self.setDaemon(True) self.exit_message = None self._exit_exc = None self._exit_stacktrace = None def run(self): global exitthreads try: if not ExitNotifyThread.profiledir: # normal case Thread.run(self) else: try: import cProfile as profile except ImportError: import profile prof = profile.Profile() try: prof = prof.runctx("Thread.run(self)", globals(), locals()) except SystemExit: pass prof.dump_stats(os.path.join(ExitNotifyThread.profiledir, "%s_%s.prof" % (self.ident, self.getName()))) except Exception as e: # Thread exited with Exception, store it tb = traceback.format_exc() self.set_exit_exception(e, tb) if exitthreads: exitthreads.put(self, True) def set_exit_exception(self, exc, st=None): """Sets Exception and stacktrace of a thread, so that other threads can query its exit status""" self._exit_exc = exc self._exit_stacktrace = st @property def exit_exception(self): """Returns the cause of the exit, one of: Exception() -- the thread aborted with this exception None -- normal termination.""" return self._exit_exc @property def exit_stacktrace(self): """Returns a string representing the stack trace if set""" return self._exit_stacktrace @classmethod def set_profiledir(cls, directory): """If set, will output profile information to 'directory'""" cls.profiledir = directory ###################################################################### # Instance-limited threads ###################################################################### instancelimitedsems = {} instancelimitedlock = Lock() def initInstanceLimit(instancename, instancemax): """Initialize the instance-limited thread implementation to permit up to intancemax threads with the given instancename.""" instancelimitedlock.acquire() if not instancename in instancelimitedsems: instancelimitedsems[instancename] = BoundedSemaphore(instancemax) instancelimitedlock.release() class InstanceLimitedThread(ExitNotifyThread): def __init__(self, instancename, *args, **kwargs): self.instancename = instancename super(InstanceLimitedThread, self).__init__(*args, **kwargs) def start(self): instancelimitedsems[self.instancename].acquire() ExitNotifyThread.start(self) def run(self): try: ExitNotifyThread.run(self) finally: if instancelimitedsems and instancelimitedsems[self.instancename]: instancelimitedsems[self.instancename].release() spaetz-offlineimap-c9e9690/offlineimap/ui/000077500000000000000000000000001176237577200206105ustar00rootroot00000000000000spaetz-offlineimap-c9e9690/offlineimap/ui/Curses.py000066400000000000000000000550501176237577200224330ustar00rootroot00000000000000# Curses-based interfaces # Copyright (C) 2003-2011 John Goerzen & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from threading import RLock, currentThread, Lock, Event from collections import deque import time import sys import os import signal import curses import logging from offlineimap.ui.UIBase import UIBase from offlineimap.threadutil import ExitNotifyThread import offlineimap class CursesUtil: def __init__(self, *args, **kwargs): # iolock protects access to the self.iolock = RLock() self.tframe_lock = RLock() """tframe_lock protects the self.threadframes manipulation to only happen from 1 thread""" self.colormap = {} """dict, translating color string to curses color pair number""" def curses_colorpair(self, col_name): """Return the curses color pair, that corresponds to the color""" return curses.color_pair(self.colormap[col_name]) def init_colorpairs(self): """initialize the curses color pairs available""" # set special colors 'gray' and 'banner' self.colormap['white'] = 0 #hardcoded by curses curses.init_pair(1, curses.COLOR_WHITE, curses.COLOR_BLUE) self.colormap['banner'] = 1 # color 'banner' for bannerwin bcol = curses.COLOR_BLACK colors = ( # name, color, bold? ('black', curses.COLOR_BLACK, False), ('blue', curses.COLOR_BLUE,False), ('red', curses.COLOR_RED, False), ('purple', curses.COLOR_MAGENTA, False), ('cyan', curses.COLOR_CYAN, False), ('green', curses.COLOR_GREEN, False), ('orange', curses.COLOR_YELLOW, False)) #set the rest of all colors starting at pair 2 i = 1 for name, fcol, bold in colors: i += 1 self.colormap[name] = i curses.init_pair(i, fcol, bcol) def lock(self, block=True): """Locks the Curses ui thread Can be invoked multiple times from the owning thread. Invoking from a non-owning thread blocks and waits until it has been unlocked by the owning thread.""" return self.iolock.acquire(block) def unlock(self): """Unlocks the Curses ui thread Decrease the lock counter by one and unlock the ui thread if the counter reaches 0. Only call this method when the calling thread owns the lock. A RuntimeError is raised if this method is called when the lock is unlocked.""" self.iolock.release() def exec_locked(self, target, *args, **kwargs): """Perform an operation with full locking.""" self.lock() try: target(*args, **kwargs) finally: self.unlock() def refresh(self): def lockedstuff(): curses.panel.update_panels() curses.doupdate() self.exec_locked(lockedstuff) def isactive(self): return hasattr(self, 'stdscr') class CursesAccountFrame: """Notable instance variables: - account: corresponding Account() - children - ui - key - window: curses window associated with an account """ def __init__(self, ui, account): """ :param account: An Account() or None (for eg SyncrunnerThread)""" self.children = [] self.account = account if account else '*Control' self.ui = ui self.window = None """Curses window associated with this acc""" self.acc_num = None """Account number (& hotkey) associated with this acc""" self.location = 0 """length of the account prefix string""" def drawleadstr(self, secs = 0): """Draw the account status string secs tells us how long we are going to sleep.""" sleepstr = '%3d:%02d' % (secs // 60, secs % 60) if secs else 'active' accstr = '%s: [%s] %12.12s: ' % (self.acc_num, sleepstr, self.account) self.ui.exec_locked(self.window.addstr, 0, 0, accstr) self.location = len(accstr) def setwindow(self, curses_win, acc_num): """Register an curses win and a hotkey as Account window :param curses_win: the curses window associated with an account :param acc_num: int denoting the hotkey associated with this account.""" self.window = curses_win self.acc_num = acc_num self.drawleadstr() # Update the child ThreadFrames for child in self.children: child.update(curses_win, self.location, 0) self.location += 1 def get_new_tframe(self): """Create a new ThreadFrame and append it to self.children :returns: The new ThreadFrame""" tf = CursesThreadFrame(self.ui, self.window, self.location, 0) self.location += 1 self.children.append(tf) return tf def sleeping(self, sleepsecs, remainingsecs): """show how long we are going to sleep and sleep :returns: Boolean, whether we want to abort the sleep""" self.drawleadstr(remainingsecs) self.ui.exec_locked(self.window.refresh) time.sleep(sleepsecs) return self.account.get_abort_event() def syncnow(self): """Request that we stop sleeping asap and continue to sync""" # if this belongs to an Account (and not *Control), set the # skipsleep pref if isinstance(self.account, offlineimap.accounts.Account): self.ui.info("Requested synchronization for acc: %s" % self.account) self.account.config.set('Account %s' % self.account.name, 'skipsleep', '1') class CursesThreadFrame: """ curses_color: current color pair for logging""" def __init__(self, ui, acc_win, x, y): """ :param ui: is a Blinkenlights() instance :param acc_win: curses Account window""" self.ui = ui self.window = acc_win self.x = x self.y = y self.curses_color = curses.color_pair(0) #default color def setcolor(self, color, modifier=0): """Draw the thread symbol '@' in the specified color :param modifier: Curses modified, such as curses.A_BOLD""" self.curses_color = modifier | self.ui.curses_colorpair(color) self.colorname = color self.display() def display(self): def locked_display(): self.window.addch(self.y, self.x, '@', self.curses_color) self.window.refresh() # lock the curses IO while fudging stuff self.ui.exec_locked(locked_display) def update(self, acc_win, x, y): """Update the xy position of the '.' (and possibly the aframe)""" self.window = acc_win self.y = y self.x = x self.display() def std_color(self): self.setcolor('black') class InputHandler(ExitNotifyThread): """Listens for input via the curses interfaces""" #TODO, we need to use the ugly exitnotifythread (rather than simply #threading.Thread here, so exiting this thread via the callback #handler, kills off all parents too. Otherwise, they would simply #continue. def __init__(self, ui): super(InputHandler, self).__init__() self.char_handler = None self.ui = ui self.enabled = Event() """We will only parse input if we are enabled""" self.inputlock = RLock() """denotes whether we should be handling the next char.""" self.start() #automatically start the thread def get_next_char(self): """return the key pressed or -1 Wait until `enabled` and loop internally every stdscr.timeout() msecs, releasing the inputlock. :returns: char or None if disabled while in here""" self.enabled.wait() while self.enabled.is_set(): with self.inputlock: char = self.ui.stdscr.getch() if char != -1: yield char def run(self): while True: char_gen = self.get_next_char() for char in char_gen: self.char_handler(char) #curses.ungetch(char) def set_char_hdlr(self, callback): """Sets a character callback handler If a key is pressed it will be passed to this handler. Keys include the curses.KEY_RESIZE key. callback is a function taking a single arg -- the char pressed. If callback is None, input will be ignored.""" with self.inputlock: self.char_handler = callback # start or stop the parsing of things if callback is None: self.enabled.clear() else: self.enabled.set() def input_acquire(self): """Call this method when you want exclusive input control. Make sure to call input_release afterwards! While this lockis held, input can go to e.g. the getpass input. """ self.enabled.clear() self.inputlock.acquire() def input_release(self): """Call this method when you are done getting input.""" self.inputlock.release() self.enabled.set() class CursesLogHandler(logging.StreamHandler): """self.ui has been set to the UI class before anything is invoked""" def emit(self, record): log_str = logging.StreamHandler.format(self, record) color = self.ui.gettf().curses_color # We must acquire both locks. Otherwise, deadlock can result. # This can happen if one thread calls _msg (locking curses, then # tf) and another tries to set the color (locking tf, then curses) # # By locking both up-front here, in this order, we prevent deadlock. self.ui.tframe_lock.acquire() self.ui.lock() try: y,x = self.ui.logwin.getyx() if y or x: self.ui.logwin.addch(10) # no \n before 1st item self.ui.logwin.addstr(log_str, color) finally: self.ui.unlock() self.ui.tframe_lock.release() self.ui.logwin.noutrefresh() self.ui.stdscr.refresh() class Blinkenlights(UIBase, CursesUtil): """Curses-cased fancy UI Notable instance variables self. ....: - stdscr: THe curses std screen - bannerwin: The top line banner window - width|height: The total curses screen dimensions - logheight: Available height for the logging part - log_con_handler: The CursesLogHandler() - threadframes: - accframes[account]: 'Accountframe'""" def __init__(self, *args, **kwargs): super(Blinkenlights, self).__init__(*args, **kwargs) CursesUtil.__init__(self) ################################################## UTILS def setup_consolehandler(self): """Backend specific console handler Sets up things and adds them to self.logger. :returns: The logging.Handler() for console output""" # create console handler with a higher log level ch = CursesLogHandler() #ch.setLevel(logging.DEBUG) # create formatter and add it to the handlers self.formatter = logging.Formatter("%(message)s") ch.setFormatter(self.formatter) # add the handlers to the logger self.logger.addHandler(ch) # the handler is not usable yet. We still need all the # intialization stuff currently done in init_banner. Move here? return ch def isusable(s): """Returns true if the backend is usable ie Curses works""" # Not a terminal? Can't use curses. if not sys.stdout.isatty() and sys.stdin.isatty(): return False # No TERM specified? Can't use curses. if not os.environ.get('TERM', None): return False # Test if ncurses actually starts up fine. Only do so for # python>=2.6.6 as calling initscr() twice messing things up. # see http://bugs.python.org/issue7567 in python 2.6 to 2.6.5 if sys.version_info[0:3] < (2,6) or sys.version_info[0:3] >= (2,6,6): try: curses.initscr() curses.endwin() except: return False return True def init_banner(self): self.availablethreadframes = {} self.threadframes = {} self.accframes = {} self.aflock = Lock() self.stdscr = curses.initscr() # turn off automatic echoing of keys to the screen curses.noecho() # react to keys instantly, without Enter key curses.cbreak() # return special key values, eg curses.KEY_LEFT self.stdscr.keypad(1) # wait 1s for input, so we don't block the InputHandler infinitely self.stdscr.timeout(1000) curses.start_color() # turn off cursor and save original state self.oldcursor = None try: self.oldcursor = curses.curs_set(0) except: pass self.stdscr.clear() self.stdscr.refresh() self.init_colorpairs() # set log handlers ui to ourself self._log_con_handler.ui = self self.setupwindows() # Settup keyboard handler self.inputhandler = InputHandler(self) self.inputhandler.set_char_hdlr(self.on_keypressed) self.gettf().setcolor('red') self.info(offlineimap.banner) def acct(self, *args): """Output that we start syncing an account (and start counting)""" self.gettf().setcolor('purple') super(Blinkenlights, self).acct(*args) def connecting(self, *args): self.gettf().setcolor('white') super(Blinkenlights, self).connecting(*args) def syncfolders(self, *args): self.gettf().setcolor('blue') super(Blinkenlights, self).syncfolders(*args) def syncingfolder(self, *args): self.gettf().setcolor('cyan') super(Blinkenlights, self).syncingfolder(*args) def skippingfolder(self, *args): self.gettf().setcolor('cyan') super(Blinkenlights, self).skippingfolder(*args) def loadmessagelist(self, *args): self.gettf().setcolor('green') super(Blinkenlights, self).loadmessagelist(*args) def syncingmessages(self, *args): self.gettf().setcolor('blue') super(Blinkenlights, self).syncingmessages(*args) def copyingmessage(self, *args): self.gettf().setcolor('orange') super(Blinkenlights, self).copyingmessage(*args) def deletingmessages(self, *args): self.gettf().setcolor('red') super(Blinkenlights, self).deletingmessages(*args) def addingflags(self, *args): self.gettf().setcolor('blue') super(Blinkenlights, self).addingflags(*args) def deletingflags(self, *args): self.gettf().setcolor('blue') super(Blinkenlights, self).deletingflags(*args) def callhook(self, *args): self.gettf().setcolor('white') super(Blinkenlights, self).callhook(*args) ############ Generic logging functions ############################# def warn(self, msg, minor=0): self.gettf().setcolor('red', curses.A_BOLD) super(Blinkenlights, self).warn(msg) def threadExited(self, thread): acc = self.getthreadaccount(thread) with self.tframe_lock: if thread in self.threadframes[acc]: tf = self.threadframes[acc][thread] tf.setcolor('black') self.availablethreadframes[acc].append(tf) del self.threadframes[acc][thread] super(Blinkenlights, self).threadExited(thread) def gettf(self): """Return the ThreadFrame() of the current thread""" cur_thread = currentThread() acc = self.getthreadaccount() #Account() or None with self.tframe_lock: # Ideally we already have self.threadframes[accountname][thread] try: if cur_thread in self.threadframes[acc]: return self.threadframes[acc][cur_thread] except KeyError: # Ensure threadframes already has an account dict self.threadframes[acc] = {} self.availablethreadframes[acc] = deque() # If available, return a ThreadFrame() if len(self.availablethreadframes[acc]): tf = self.availablethreadframes[acc].popleft() tf.std_color() else: tf = self.getaccountframe(acc).get_new_tframe() self.threadframes[acc][cur_thread] = tf return tf def on_keypressed(self, key): # received special KEY_RESIZE, resize terminal if key == curses.KEY_RESIZE: self.resizeterm() if key < 1 or key > 255: return if chr(key) == 'q': # Request to quit completely. self.warn("Requested shutdown via 'q'") offlineimap.accounts.Account.set_abort_event(self.config, 3) try: index = int(chr(key)) except ValueError: return # Key not a valid number: exit. if index >= len(self.hotkeys): # Not in our list of valid hotkeys. return # Trying to end sleep somewhere. self.getaccountframe(self.hotkeys[index]).syncnow() def sleep(self, sleepsecs, account): self.gettf().setcolor('red') self.info("Next sync in %d:%02d" % (sleepsecs / 60, sleepsecs % 60)) return super(Blinkenlights, self).sleep(sleepsecs, account) def sleeping(self, sleepsecs, remainingsecs): if not sleepsecs: # reset color to default if we are done sleeping. self.gettf().setcolor('white') accframe = self.getaccountframe(self.getthreadaccount()) return accframe.sleeping(sleepsecs, remainingsecs) def resizeterm(self): """Resize the current windows""" self.exec_locked(self.setupwindows, True) def mainException(self): UIBase.mainException(self) def getpass(self, accountname, config, errmsg = None): # disable the hotkeys inputhandler self.inputhandler.input_acquire() # See comment on _msg for info on why both locks are obtained. self.lock() try: #s.gettf().setcolor('white') self.warn(" *** Input Required") self.warn(" *** Please enter password for account %s: " % \ accountname) self.logwin.refresh() password = self.logwin.getstr() finally: self.unlock() self.inputhandler.input_release() return password def setupwindows(self, resize=False): """Setup and draw bannerwin and logwin If `resize`, don't create new windows, just adapt size. This function should be invoked with CursesUtils.locked().""" self.height, self.width = self.stdscr.getmaxyx() self.logheight = self.height - len(self.accframes) - 1 if resize: curses.resizeterm(self.height, self.width) self.bannerwin.resize(1, self.width) self.logwin.resize(self.logheight, self.width) else: self.bannerwin = curses.newwin(1, self.width, 0, 0) self.logwin = curses.newwin(self.logheight, self.width, 1, 0) self.draw_bannerwin() self.logwin.idlok(True) # needed for scrollok below self.logwin.scrollok(True) # scroll window when too many lines added self.draw_logwin() self.accounts = reversed(sorted(self.accframes.keys())) pos = self.height - 1 index = 0 self.hotkeys = [] for account in self.accounts: acc_win = curses.newwin(1, self.width, pos, 0) self.accframes[account].setwindow(acc_win, index) self.hotkeys.append(account) index += 1 pos -= 1 curses.doupdate() def draw_bannerwin(self): """Draw the top-line banner line""" if curses.has_colors(): color = curses.A_BOLD | self.curses_colorpair('banner') else: color = curses.A_REVERSE self.bannerwin.clear() # Delete old content (eg before resizes) self.bannerwin.bkgd(' ', color) # Fill background with that color string = "%s %s" % (offlineimap.__productname__, offlineimap.__version__) self.bannerwin.addstr(0, 0, string, color) self.bannerwin.addstr(0, self.width -len(offlineimap.__copyright__) -1, offlineimap.__copyright__, color) self.bannerwin.noutrefresh() def draw_logwin(self): """(Re)draw the current logwindow""" if curses.has_colors(): color = curses.color_pair(0) #default colors else: color = curses.A_NORMAL self.logwin.move(0, 0) self.logwin.erase() self.logwin.bkgd(' ', color) def getaccountframe(self, acc_name): """Return an AccountFrame() corresponding to acc_name Note that the *control thread uses acc_name `None`.""" with self.aflock: # 1) Return existing or 2) create a new CursesAccountFrame. if acc_name in self.accframes: return self.accframes[acc_name] self.accframes[acc_name] = CursesAccountFrame(self, acc_name) # update the window layout self.setupwindows(resize= True) return self.accframes[acc_name] def terminate(self, *args, **kwargs): curses.nocbreak(); self.stdscr.keypad(0); curses.echo() curses.endwin() # need to remove the Curses console handler now and replace with # basic one, so exceptions and stuff are properly displayed self.logger.removeHandler(self._log_con_handler) UIBase.setup_consolehandler(self) # reset the warning method, we do not have curses anymore self.warn = super(Blinkenlights, self).warn # finally call parent terminate which prints out exceptions etc super(Blinkenlights, self).terminate(*args, **kwargs) def threadException(self, thread): #self._log_con_handler.stop() UIBase.threadException(self, thread) spaetz-offlineimap-c9e9690/offlineimap/ui/Machine.py000066400000000000000000000150641176237577200225340ustar00rootroot00000000000000# Copyright (C) 2007-2011 John Goerzen & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA try: from urllib import urlencode except ImportError: # python3 from urllib.parse import urlencode import sys import time import logging from threading import currentThread from offlineimap.ui.UIBase import UIBase import offlineimap protocol = '7.0.0' class MachineLogFormatter(logging.Formatter): """urlencodes any outputted line, to avoid multi-line output""" def format(self, record): # urlencode the "mesg" attribute and append to regular line... line = super(MachineLogFormatter, self).format(record) return line + urlencode([('', record.mesg)])[1:] class MachineUI(UIBase): def __init__(self, config, loglevel = logging.INFO): super(MachineUI, self).__init__(config, loglevel) self._log_con_handler.createLock() """lock needed to block on password input""" # Set up the formatter that urlencodes the strings... self._log_con_handler.setFormatter(MachineLogFormatter()) def _printData(self, command, msg): self.logger.info("%s:%s:%s" % ( 'msg', command, currentThread().getName()), extra={'mesg': msg}) def _msg(s, msg): s._printData('_display', msg) def warn(self, msg, minor = 0): # TODO, remove and cleanup the unused minor stuff self.logger.warning("%s:%s:%s:%s" % ( 'warn', '', currentThread().getName(), msg)) def registerthread(self, account): super(MachineUI, self).registerthread(account) self._printData('registerthread', account) def unregisterthread(s, thread): UIBase.unregisterthread(s, thread) s._printData('unregisterthread', thread.getName()) def debugging(s, debugtype): s._printData('debugging', debugtype) def acct(s, accountname): s._printData('acct', accountname) def acctdone(s, accountname): s._printData('acctdone', accountname) def validityproblem(s, folder): s._printData('validityproblem', "%s\n%s\n%s\n%s" % \ (folder.getname(), folder.getrepository().getname(), folder.get_saveduidvalidity(), folder.get_uidvalidity())) def connecting(s, hostname, port): s._printData('connecting', "%s\n%s" % (hostname, str(port))) def syncfolders(s, srcrepos, destrepos): s._printData('syncfolders', "%s\n%s" % (s.getnicename(srcrepos), s.getnicename(destrepos))) def syncingfolder(s, srcrepos, srcfolder, destrepos, destfolder): s._printData('syncingfolder', "%s\n%s\n%s\n%s\n" % \ (s.getnicename(srcrepos), srcfolder.getname(), s.getnicename(destrepos), destfolder.getname())) def loadmessagelist(s, repos, folder): s._printData('loadmessagelist', "%s\n%s" % (s.getnicename(repos), folder.getvisiblename())) def messagelistloaded(s, repos, folder, count): s._printData('messagelistloaded', "%s\n%s\n%d" % \ (s.getnicename(repos), folder.getname(), count)) def syncingmessages(s, sr, sf, dr, df): s._printData('syncingmessages', "%s\n%s\n%s\n%s\n" % \ (s.getnicename(sr), sf.getname(), s.getnicename(dr), df.getname())) def copyingmessage(self, uid, num, num_to_copy, srcfolder, destfolder): self._printData('copyingmessage', "%d\n%s\n%s\n%s[%s]" % \ (uid, self.getnicename(srcfolder), srcfolder.getname(), self.getnicename(destfolder), destfolder)) def folderlist(s, list): return ("\f".join(["%s\t%s" % (s.getnicename(x), x.getname()) for x in list])) def uidlist(s, list): return ("\f".join([str(u) for u in list])) def deletingmessages(s, uidlist, destlist): ds = s.folderlist(destlist) s._printData('deletingmessages', "%s\n%s" % (s.uidlist(uidlist), ds)) def addingflags(s, uidlist, flags, dest): s._printData("addingflags", "%s\n%s\n%s" % (s.uidlist(uidlist), "\f".join(flags), dest)) def deletingflags(s, uidlist, flags, dest): s._printData('deletingflags', "%s\n%s\n%s" % (s.uidlist(uidlist), "\f".join(flags), dest)) def threadException(self, thread): self._printData('threadException', "%s\n%s" % \ (thread.getName(), self.getThreadExceptionString(thread))) self.delThreadDebugLog(thread) self.terminate(100) def terminate(s, exitstatus = 0, errortitle = '', errormsg = ''): s._printData('terminate', "%d\n%s\n%s" % (exitstatus, errortitle, errormsg)) sys.exit(exitstatus) def mainException(s): s._printData('mainException', s.getMainExceptionString()) def threadExited(s, thread): s._printData('threadExited', thread.getName()) UIBase.threadExited(s, thread) def sleeping(s, sleepsecs, remainingsecs): s._printData('sleeping', "%d\n%d" % (sleepsecs, remainingsecs)) if sleepsecs > 0: time.sleep(sleepsecs) return 0 def getpass(self, accountname, config, errmsg = None): if errmsg: self._printData('getpasserror', "%s\n%s" % (accountname, errmsg), False) self._log_con_handler.acquire() # lock the console output try: self._printData('getpass', accountname, False) return (sys.stdin.readline()[:-1]) finally: self._log_con_handler.release() def init_banner(self): self._printData('protocol', protocol) self._printData('initbanner', offlineimap.banner) def callhook(self, msg): self._printData('callhook', msg) spaetz-offlineimap-c9e9690/offlineimap/ui/Noninteractive.py000066400000000000000000000023401176237577200241510ustar00rootroot00000000000000# Noninteractive UI # Copyright (C) 2002-2012 John Goerzen & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import logging from offlineimap.ui.UIBase import UIBase class Basic(UIBase): """'Quiet' simply sets log level to INFO""" def __init__(self, config, loglevel = logging.INFO): return super(Basic, self).__init__(config, loglevel) class Quiet(UIBase): """'Quiet' simply sets log level to WARNING""" def __init__(self, config, loglevel = logging.WARNING): return super(Quiet, self).__init__(config, loglevel) spaetz-offlineimap-c9e9690/offlineimap/ui/TTY.py000066400000000000000000000101741176237577200216450ustar00rootroot00000000000000# TTY UI # Copyright (C) 2002-2011 John Goerzen & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import logging import sys import time from getpass import getpass from offlineimap import banner from offlineimap.ui.UIBase import UIBase class TTYFormatter(logging.Formatter): """Specific Formatter that adds thread information to the log output""" def __init__(self, *args, **kwargs): #super() doesn't work in py2.6 as 'logging' uses old-style class logging.Formatter.__init__(self, *args, **kwargs) self._last_log_thread = None def format(self, record): """Override format to add thread information""" #super() doesn't work in py2.6 as 'logging' uses old-style class log_str = logging.Formatter.format(self, record) # If msg comes from a different thread than our last, prepend # thread info. Most look like 'Account sync foo' or 'Folder # sync foo'. t_name = record.threadName if t_name == 'MainThread': return log_str # main thread doesn't get things prepended if t_name != self._last_log_thread: self._last_log_thread = t_name log_str = "%s:\n %s" % (t_name, log_str) else: log_str = " %s" % log_str return log_str class TTYUI(UIBase): def setup_consolehandler(self): """Backend specific console handler Sets up things and adds them to self.logger. :returns: The logging.Handler() for console output""" # create console handler with a higher log level ch = logging.StreamHandler() #ch.setLevel(logging.DEBUG) # create formatter and add it to the handlers self.formatter = TTYFormatter("%(message)s") ch.setFormatter(self.formatter) # add the handlers to the logger self.logger.addHandler(ch) self.logger.info(banner) # init lock for console output ch.createLock() return ch def isusable(self): """TTYUI is reported as usable when invoked on a terminal""" return sys.stdout.isatty() and sys.stdin.isatty() def getpass(self, accountname, config, errmsg = None): """TTYUI backend is capable of querying the password""" if errmsg: self.warn("%s: %s" % (accountname, errmsg)) self._log_con_handler.acquire() # lock the console output try: return getpass("Enter password for account '%s': " % accountname) finally: self._log_con_handler.release() def mainException(self): if isinstance(sys.exc_info()[1], KeyboardInterrupt): self.logger.warn("Timer interrupted at user request; program " "terminating.\n") self.terminate() else: UIBase.mainException(self) def sleeping(self, sleepsecs, remainingsecs): """Sleep for sleepsecs, display remainingsecs to go. Does nothing if sleepsecs <= 0. Display a message on the screen if we pass a full minute. This implementation in UIBase does not support this, but some implementations return 0 for successful sleep and 1 for an 'abort', ie a request to sync immediately. """ if sleepsecs > 0: if remainingsecs//60 != (remainingsecs-sleepsecs)//60: self.logger.info("Next refresh in %.1f minutes" % ( remainingsecs/60.0)) time.sleep(sleepsecs) return 0 spaetz-offlineimap-c9e9690/offlineimap/ui/UIBase.py000066400000000000000000000532661176237577200223060ustar00rootroot00000000000000# UI base class # Copyright (C) 2002-2011 John Goerzen & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import logging import re import time import sys import os import traceback import threading try: from Queue import Queue except ImportError: #python3 from queue import Queue from collections import deque from offlineimap.error import OfflineImapError import offlineimap debugtypes = {'':'Other offlineimap related sync messages', 'imap': 'IMAP protocol debugging', 'maildir': 'Maildir repository debugging', 'thread': 'Threading debugging'} globalui = None def setglobalui(newui): """Set the global ui object to be used for logging""" global globalui globalui = newui def getglobalui(): """Return the current ui object""" global globalui return globalui class UIBase(object): def __init__(self, config, loglevel = logging.INFO): self.config = config # Is this a 'dryrun'? self.dryrun = config.getdefaultboolean('general', 'dry-run', False) self.debuglist = [] """list of debugtypes we are supposed to log""" self.debugmessages = {} """debugmessages in a deque(v) per thread(k)""" self.debugmsglen = 15 self.threadaccounts = {} """dict linking active threads (k) to account names (v)""" self.acct_startimes = {} """linking active accounts with the time.time() when sync started""" self.logfile = None self.exc_queue = Queue() """saves all occuring exceptions, so we can output them at the end""" # create logger with 'OfflineImap' app self.logger = logging.getLogger('OfflineImap') self.logger.setLevel(loglevel) self._log_con_handler = self.setup_consolehandler() """The console handler (we need access to be able to lock it)""" ################################################## UTILS def setup_consolehandler(self): """Backend specific console handler Sets up things and adds them to self.logger. :returns: The logging.Handler() for console output""" # create console handler with a higher log level ch = logging.StreamHandler(sys.stdout) #ch.setLevel(logging.DEBUG) # create formatter and add it to the handlers self.formatter = logging.Formatter("%(message)s") ch.setFormatter(self.formatter) # add the handlers to the logger self.logger.addHandler(ch) self.logger.info(offlineimap.banner) return ch def setlogfile(self, logfile): """Create file handler which logs to file""" fh = logging.FileHandler(logfile, 'at') #fh.setLevel(logging.DEBUG) file_formatter = logging.Formatter("%(asctime)s %(levelname)s: " "%(message)s", '%Y-%m-%d %H:%M:%S') fh.setFormatter(file_formatter) self.logger.addHandler(fh) # write out more verbose initial info blurb on the log file p_ver = ".".join([str(x) for x in sys.version_info[0:3]]) msg = "OfflineImap %s starting...\n Python: %s Platform: %s\n "\ "Args: %s" % (offlineimap.__version__, p_ver, sys.platform, " ".join(sys.argv)) record = logging.LogRecord('OfflineImap', logging.INFO, __file__, None, msg, None, None) fh.emit(record) def _msg(self, msg): """Display a message.""" # TODO: legacy function, rip out. self.info(msg) def info(self, msg): """Display a message.""" self.logger.info(msg) def warn(self, msg, minor = 0): self.logger.warning(msg) def error(self, exc, exc_traceback=None, msg=None): """Log a message at severity level ERROR Log Exception 'exc' to error log, possibly prepended by a preceding error "msg", detailing at what point the error occurred. In debug mode, we also output the full traceback that occurred if one has been passed in via sys.info()[2]. Also save the Exception to a stack that can be output at the end of the sync run when offlineiamp exits. It is recommended to always pass in exceptions if possible, so we can give the user the best debugging info. One example of such a call might be: ui.error(exc, sys.exc_info()[2], msg="While syncing Folder %s in " "repo %s") """ if msg: self._msg("ERROR: %s\n %s" % (msg, exc)) else: self._msg("ERROR: %s" % (exc)) if not self.debuglist: # only output tracebacks in debug mode exc_traceback = None # push exc on the queue for later output self.exc_queue.put((msg, exc, exc_traceback)) if exc_traceback: self._msg(traceback.format_tb(exc_traceback)) def registerthread(self, account): """Register current thread as being associated with an account name""" cur_thread = threading.currentThread() if cur_thread in self.threadaccounts: # was already associated with an old account, update info self.debug('thread', "Register thread '%s' (previously '%s', now " "'%s')" % (cur_thread.getName(), self.getthreadaccount(cur_thread), account)) else: self.debug('thread', "Register new thread '%s' (account '%s')" %\ (cur_thread.getName(), account)) self.threadaccounts[cur_thread] = account def unregisterthread(self, thr): """Unregister a thread as being associated with an account name""" if thr in self.threadaccounts: del self.threadaccounts[thr] self.debug('thread', "Unregister thread '%s'" % thr.getName()) def getthreadaccount(self, thr = None): """Get Account() for a thread (current if None) If no account has been registered with this thread, return 'None'""" if thr == None: thr = threading.currentThread() if thr in self.threadaccounts: return self.threadaccounts[thr] return None def debug(self, debugtype, msg): cur_thread = threading.currentThread() if not cur_thread in self.debugmessages: # deque(..., self.debugmsglen) would be handy but was # introduced in p2.6 only, so we'll need to work around and # shorten our debugmsg list manually :-( self.debugmessages[cur_thread] = deque() self.debugmessages[cur_thread].append("%s: %s" % (debugtype, msg)) # Shorten queue if needed if len(self.debugmessages[cur_thread]) > self.debugmsglen: self.debugmessages[cur_thread].popleft() if debugtype in self.debuglist: # log if we are supposed to do so self.logger.debug("[%s]: %s" % (debugtype, msg)) def add_debug(self, debugtype): global debugtypes if debugtype in debugtypes: if not debugtype in self.debuglist: self.debuglist.append(debugtype) self.debugging(debugtype) else: self.invaliddebug(debugtype) def debugging(self, debugtype): global debugtypes self.logger.debug("Now debugging for %s: %s" % (debugtype, debugtypes[debugtype])) def invaliddebug(self, debugtype): self.warn("Invalid debug type: %s" % debugtype) def getnicename(self, object): """Return the type of a repository or Folder as string (IMAP, Gmail, Maildir, etc...)""" prelimname = object.__class__.__name__.split('.')[-1] # Strip off extra stuff. return re.sub('(Folder|Repository)', '', prelimname) def isusable(self): """Returns true if this UI object is usable in the current environment. For instance, an X GUI would return true if it's being run in X with a valid DISPLAY setting, and false otherwise.""" return True ################################################## INPUT def getpass(self, accountname, config, errmsg = None): raise NotImplementedError("Prompting for a password is not supported"\ " in this UI backend.") def folderlist(self, list): return ', '.join(["%s[%s]" % \ (self.getnicename(x), x.getname()) for x in list]) ################################################## WARNINGS def msgtoreadonly(self, destfolder, uid, content, flags): if self.config.has_option('general', 'ignore-readonly') and \ self.config.getboolean('general', 'ignore-readonly'): return self.warn("Attempted to synchronize message %d to folder %s[%s], " "but that folder is read-only. The message will not be " "copied to that folder." % ( uid, self.getnicename(destfolder), destfolder)) def flagstoreadonly(self, destfolder, uidlist, flags): if self.config.has_option('general', 'ignore-readonly') and \ self.config.getboolean('general', 'ignore-readonly'): return self.warn("Attempted to modify flags for messages %s in folder %s[%s], " "but that folder is read-only. No flags have been modified " "for that message." % ( str(uidlist), self.getnicename(destfolder), destfolder)) def deletereadonly(self, destfolder, uidlist): if self.config.has_option('general', 'ignore-readonly') and \ self.config.getboolean('general', 'ignore-readonly'): return self.warn("Attempted to delete messages %s in folder %s[%s], but that " "folder is read-only. No messages have been deleted in that " "folder." % (str(uidlist), self.getnicename(destfolder), destfolder)) ################################################## MESSAGES def init_banner(self): """Called when the UI starts. Must be called before any other UI call except isusable(). Displays the copyright banner. This is where the UI should do its setup -- TK, for instance, would create the application window here.""" pass def connecting(self, hostname, port): """Log 'Establishing connection to'""" if not self.logger.isEnabledFor(logging.INFO): return displaystr = '' hostname = hostname if hostname else '' port = "%s" % port if port else '' if hostname: displaystr = ' to %s:%s' % (hostname, port) self.logger.info("Establishing connection%s" % displaystr) def acct(self, account): """Output that we start syncing an account (and start counting)""" self.acct_startimes[account] = time.time() self.logger.info("*** Processing account %s" % account) def acctdone(self, account): """Output that we finished syncing an account (in which time)""" sec = time.time() - self.acct_startimes[account] del self.acct_startimes[account] self.logger.info("*** Finished account '%s' in %d:%02d" % (account, sec // 60, sec % 60)) def syncfolders(self, src_repo, dst_repo): """Log 'Copying folder structure...'""" if self.logger.isEnabledFor(logging.DEBUG): self.debug('', "Copying folder structure from %s to %s" %\ (src_repo, dst_repo)) ############################## Folder syncing def makefolder(self, repo, foldername): """Called when a folder is created""" prefix = "[DRYRUN] " if self.dryrun else "" self.info("{}Creating folder {}[{}]".format( prefix, foldername, repo)) def syncingfolder(self, srcrepos, srcfolder, destrepos, destfolder): """Called when a folder sync operation is started.""" self.logger.info("Syncing %s: %s -> %s" % (srcfolder, self.getnicename(srcrepos), self.getnicename(destrepos))) def skippingfolder(self, folder): """Called when a folder sync operation is started.""" self.logger.info("Skipping %s (not changed)" % folder) def validityproblem(self, folder): self.logger.warning("UID validity problem for folder %s (repo %s) " "(saved %d; got %d); skipping it. Please see FAQ " "and manual on how to handle this." % \ (folder, folder.getrepository(), folder.get_saveduidvalidity(), folder.get_uidvalidity())) def loadmessagelist(self, repos, folder): self.logger.debug("Loading message list for %s[%s]" % ( self.getnicename(repos), folder)) def messagelistloaded(self, repos, folder, count): self.logger.debug("Message list for %s[%s] loaded: %d messages" % ( self.getnicename(repos), folder, count)) ############################## Message syncing def syncingmessages(self, sr, srcfolder, dr, dstfolder): self.logger.debug("Syncing messages %s[%s] -> %s[%s]" % ( self.getnicename(sr), srcfolder, self.getnicename(dr), dstfolder)) def copyingmessage(self, uid, num, num_to_copy, src, destfolder): """Output a log line stating which message we copy""" self.logger.info("Copy message %s (%d of %d) %s:%s -> %s" % ( uid, num, num_to_copy, src.repository, src, destfolder.repository)) def deletingmessages(self, uidlist, destlist): ds = self.folderlist(destlist) prefix = "[DRYRUN] " if self.dryrun else "" self.info("{}Deleting {} messages ({}) in {}".format( prefix, len(uidlist), offlineimap.imaputil.uid_sequence(uidlist), ds)) def addingflags(self, uidlist, flags, dest): self.logger.info("Adding flag %s to %d messages on %s" % ( ", ".join(flags), len(uidlist), dest)) def deletingflags(self, uidlist, flags, dest): self.logger.info("Deleting flag %s from %d messages on %s" % ( ", ".join(flags), len(uidlist), dest)) def serverdiagnostics(self, repository, type): """Connect to repository and output useful information for debugging""" conn = None self._msg("%s repository '%s': type '%s'" % (type, repository.name, self.getnicename(repository))) try: if hasattr(repository, 'gethost'): # IMAP self._msg("Host: %s Port: %s SSL: %s" % (repository.gethost(), repository.getport(), repository.getssl())) try: conn = repository.imapserver.acquireconnection() except OfflineImapError as e: self._msg("Failed to connect. Reason %s" % e) else: if 'ID' in conn.capabilities: self._msg("Server supports ID extension.") #TODO: Debug and make below working, it hangs Gmail #res_type, response = conn.id(( # 'name', offlineimap.__productname__, # 'version', offlineimap.__version__)) #self._msg("Server ID: %s %s" % (res_type, response[0])) self._msg("Server welcome string: %s" % str(conn.welcome)) self._msg("Server capabilities: %s\n" % str(conn.capabilities)) repository.imapserver.releaseconnection(conn) if type != 'Status': folderfilter = repository.getconf('folderfilter', None) if folderfilter: self._msg("folderfilter= %s\n" % folderfilter) folderincludes = repository.getconf('folderincludes', None) if folderincludes: self._msg("folderincludes= %s\n" % folderincludes) nametrans = repository.getconf('nametrans', None) if nametrans: self._msg("nametrans= %s\n" % nametrans) folders = repository.getfolders() foldernames = [(f.name, f.getvisiblename(), f.sync_this) \ for f in folders] folders = [] for name, visiblename, sync_this in foldernames: syncstr = "" if sync_this else " (disabled)" if name == visiblename: folders.append("%s%s" % (name, syncstr)) else: folders.append("%s -> %s%s" % (name, visiblename, syncstr)) self._msg("Folderlist:\n %s\n" % "\n ".join(folders)) finally: if conn: #release any existing IMAP connection repository.imapserver.close() def savemessage(self, debugtype, uid, flags, folder): """Output a log line stating that we save a msg""" self.debug(debugtype, "Write mail '%s:%d' with flags %s" % (folder, uid, repr(flags))) ################################################## Threads def getThreadDebugLog(self, thread): if thread in self.debugmessages: message = "\nLast %d debug messages logged for %s prior to exception:\n"\ % (len(self.debugmessages[thread]), thread.getName()) message += "\n".join(self.debugmessages[thread]) else: message = "\nNo debug messages were logged for %s." % \ thread.getName() return message def delThreadDebugLog(self, thread): if thread in self.debugmessages: del self.debugmessages[thread] def getThreadExceptionString(self, thread): message = "Thread '%s' terminated with exception:\n%s" % \ (thread.getName(), thread.exit_stacktrace) message += "\n" + self.getThreadDebugLog(thread) return message def threadException(self, thread): """Called when a thread has terminated with an exception. The argument is the ExitNotifyThread that has so terminated.""" self.warn(self.getThreadExceptionString(thread)) self.delThreadDebugLog(thread) self.terminate(100) def terminate(self, exitstatus = 0, errortitle = None, errormsg = None): """Called to terminate the application.""" #print any exceptions that have occurred over the run if not self.exc_queue.empty(): self.warn("ERROR: Exceptions occurred during the run!") while not self.exc_queue.empty(): msg, exc, exc_traceback = self.exc_queue.get() if msg: self.warn("ERROR: %s\n %s" % (msg, exc)) else: self.warn("ERROR: %s" % (exc)) if exc_traceback: self.warn("\nTraceback:\n%s" %"".join( traceback.format_tb(exc_traceback))) if errormsg and errortitle: self.warn('ERROR: %s\n\n%s\n'%(errortitle, errormsg)) elif errormsg: self.warn('%s\n' % errormsg) sys.exit(exitstatus) def threadExited(self, thread): """Called when a thread has exited normally. Many UIs will just ignore this.""" self.delThreadDebugLog(thread) self.unregisterthread(thread) ################################################## Hooks def callhook(self, msg): if self.dryrun: self.info("[DRYRUN] {}".format(msg)) else: self.info(msg) ################################################## Other def sleep(self, sleepsecs, account): """This function does not actually output anything, but handles the overall sleep, dealing with updates as necessary. It will, however, call sleeping() which DOES output something. :returns: 0/False if timeout expired, 1/2/True if there is a request to cancel the timer. """ abortsleep = False while sleepsecs > 0 and not abortsleep: if account.get_abort_event(): abortsleep = True else: abortsleep = self.sleeping(10, sleepsecs) sleepsecs -= 10 self.sleeping(0, 0) # Done sleeping. return abortsleep def sleeping(self, sleepsecs, remainingsecs): """Sleep for sleepsecs, display remainingsecs to go. Does nothing if sleepsecs <= 0. Display a message on the screen if we pass a full minute. This implementation in UIBase does not support this, but some implementations return 0 for successful sleep and 1 for an 'abort', ie a request to sync immediately. """ if sleepsecs > 0: if remainingsecs//60 != (remainingsecs-sleepsecs)//60: self.logger.debug("Next refresh in %.1f minutes" % ( remainingsecs/60.0)) time.sleep(sleepsecs) return 0 spaetz-offlineimap-c9e9690/offlineimap/ui/__init__.py000066400000000000000000000023671176237577200227310ustar00rootroot00000000000000# UI module # Copyright (C) 2010-2011 Sebastian Spaeth & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from offlineimap.ui.UIBase import getglobalui, setglobalui from offlineimap.ui import TTY, Noninteractive, Machine UI_LIST = {'ttyui': TTY.TTYUI, 'basic': Noninteractive.Basic, 'quiet': Noninteractive.Quiet, 'machineui': Machine.MachineUI} #add Blinkenlights UI if it imports correctly (curses installed) try: from offlineimap.ui import Curses UI_LIST['blinkenlights'] = Curses.Blinkenlights except ImportError: pass spaetz-offlineimap-c9e9690/offlineimap/ui/debuglock.py000066400000000000000000000033301176237577200231200ustar00rootroot00000000000000# Locking debugging code -- temporary # Copyright (C) 2003 John Goerzen # # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA from threading import Lock, currentThread import traceback logfile = open("/tmp/logfile", "wt") loglock = Lock() class DebuggingLock: def __init__(self, name): self.lock = Lock() self.name = name def acquire(self, blocking = 1): self.print_tb("Acquire lock") self.lock.acquire(blocking) self.logmsg("===== %s: Thread %s acquired lock\n" % (self.name, currentThread().getName())) def release(self): self.print_tb("Release lock") self.lock.release() def logmsg(self, msg): loglock.acquire() logfile.write(msg + "\n") logfile.flush() loglock.release() def print_tb(self, msg): self.logmsg(".... %s: Thread %s attempting to %s\n" % \ (self.name, currentThread().getName(), msg) + \ "\n".join(traceback.format_list(traceback.extract_stack()))) spaetz-offlineimap-c9e9690/setup.py000066400000000000000000000045651176237577200174260ustar00rootroot00000000000000#!/usr/bin/env python # $Id: setup.py,v 1.1 2002/06/21 18:10:49 jgoerzen Exp $ # IMAP synchronization # Module: installer # COPYRIGHT # # Copyright (C) 2002 - 2006 John Goerzen # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA import os from distutils.core import setup, Command import offlineimap import logging from test.OLItest import TextTestRunner, TestLoader, OLITestLib class TestCommand(Command): """runs the OLI testsuite""" description = """Runs the test suite. In order to execute only a single test, you could also issue e.g. 'python -m unittest test.tests.test_01_basic.TestBasicFunctions.test_01_olistartup' on the command line.""" user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): logging.basicConfig(format='%(message)s') # set credentials and OfflineImap command to be executed: OLITestLib(cred_file='./test/credentials.conf', cmd='./offlineimap.py') suite = TestLoader().discover('./test/tests') #TODO: failfast does not seem to exist in python2.6? TextTestRunner(verbosity=2,failfast=True).run(suite) setup(name = "offlineimap", version = offlineimap.__version__, description = offlineimap.__description__, author = offlineimap.__author__, author_email = offlineimap.__author_email__, url = offlineimap.__homepage__, packages = ['offlineimap', 'offlineimap.folder', 'offlineimap.repository', 'offlineimap.ui'], scripts = ['bin/offlineimap'], license = offlineimap.__copyright__ + \ ", Licensed under the GPL version 2", cmdclass = { 'test': TestCommand} ) spaetz-offlineimap-c9e9690/test/000077500000000000000000000000001176237577200166615ustar00rootroot00000000000000spaetz-offlineimap-c9e9690/test/.gitignore000066400000000000000000000000261176237577200206470ustar00rootroot00000000000000credentials.conf tmp_*spaetz-offlineimap-c9e9690/test/OLItest/000077500000000000000000000000001176237577200202045ustar00rootroot00000000000000spaetz-offlineimap-c9e9690/test/OLItest/TestRunner.py000066400000000000000000000231331176237577200226710ustar00rootroot00000000000000# Copyright (C) 2012- Sebastian Spaeth & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import imaplib import unittest import logging import os import re import sys import shutil import subprocess import tempfile import random random.seed() from offlineimap.CustomConfig import CustomConfigParser from . import default_conf class OLITestLib(): cred_file = None testdir = None """Absolute path of the current temporary test directory""" cmd = None """command that will be executed to invoke offlineimap""" def __init__(self, cred_file = None, cmd='offlineimap'): """ :param cred_file: file of the configuration snippet for authenticating against the test IMAP server(s). :param cmd: command that will be executed to invoke offlineimap""" OLITestLib.cred_file = cred_file if not os.path.isfile(cred_file): raise UserWarning("Please copy 'credentials.conf.sample' to '%s' " "and set your credentials there." % cred_file) OLITestLib.cmd = cmd @classmethod def create_test_dir(cls, suffix=''): """Creates a test directory and places OLI config there Note that this is a class method. There can only be one test directory at a time. OLITestLib is not suited for running several tests in parallel. The user is responsible for cleaning that up herself.""" assert cls.cred_file != None # creating temporary dir for testing in same dir as credentials.conf cls.testdir = os.path.abspath( tempfile.mkdtemp(prefix='tmp_%s_'%suffix, dir=os.path.dirname(cls.cred_file))) cls.write_config_file() return cls.testdir @classmethod def get_default_config(cls): """Creates a default ConfigParser file and returns it The returned config can be manipulated and then saved with write_config_file()""" #TODO, only do first time and cache then for subsequent calls? assert cls.cred_file != None assert cls.testdir != None config = CustomConfigParser() config.readfp(default_conf) default_conf.seek(0) # rewind config_file to start config.read(cls.cred_file) config.set("general", "metadata", cls.testdir) return config @classmethod def write_config_file(cls, config=None): """Creates a OLI configuration file It is created in testdir (so create_test_dir has to be called earlier) using the credentials information given (so they had to be set earlier). Failure to do either of them will raise an AssertionException. If config is None, a default one will be used via get_default_config, otherwise it needs to be a config object derived from that.""" if config is None: config = cls.get_default_config() localfolders = os.path.join(cls.testdir, 'mail') config.set("Repository Maildir", "localfolders", localfolders) with open(os.path.join(cls.testdir, 'offlineimap.conf'), "wt") as f: config.write(f) @classmethod def delete_test_dir(cls): """Deletes the current test directory The users is responsible for cleaning that up herself.""" if os.path.isdir(cls.testdir): shutil.rmtree(cls.testdir) @classmethod def run_OLI(cls): """Runs OfflineImap :returns: (rescode, stdout (as unicode)) """ try: output = subprocess.check_output( [cls.cmd, "-c%s" % os.path.join(cls.testdir, 'offlineimap.conf')], shell=False) except subprocess.CalledProcessError as e: return (e.returncode, e.output.decode('utf-8')) return (0, output.decode('utf-8')) @classmethod def delete_remote_testfolders(cls, reponame=None): """Delete all INBOX.OLITEST* folders on the remote IMAP repository reponame: All on `reponame` or all IMAP-type repositories if None""" config = cls.get_default_config() if reponame: sections = ['Repository {}'.format(reponame)] else: sections = [r for r in config.sections() \ if r.startswith('Repository')] sections = filter(lambda s: \ config.get(s, 'Type').lower() == 'imap', sections) for sec in sections: # Connect to each IMAP repo and delete all folders # matching the folderfilter setting. We only allow basic # settings and no fancy password getting here... # 1) connect and get dir listing host = config.get(sec, 'remotehost') user = config.get(sec, 'remoteuser') passwd = config.get(sec, 'remotepass') imapobj = imaplib.IMAP4(host) imapobj.login(user, passwd) res_t, data = imapobj.list() assert res_t == 'OK' dirs = [] for d in data: m = re.search(br''' # Find last quote "((?: # Non-tripple quoted can contain... [^"] | # a non-quote \\" # a backslashded quote )*)" # closing quote [^"]*$ # followed by no more quotes ''', d, flags=re.VERBOSE) folder = bytearray(m.group(1)) #folder = folder.replace(br'\"', b'"') # remove quoting dirs.append(folder) # 2) filter out those not starting with INBOX.OLItest and del... dirs = [d for d in dirs if d.startswith(b'INBOX.OLItest')] for folder in dirs: res_t, data = imapobj.delete(b'\"'+folder+b'\"') assert res_t == 'OK', "Folder deletion of {} failed with error"\ ":\n{} {}".format(folder.decode('utf-8'), res_t, data) imapobj.logout() @classmethod def create_maildir(cls, folder): """Create empty maildir 'folder' in our test maildir Does not fail if it already exists""" assert cls.testdir != None maildir = os.path.join(cls.testdir, 'mail', folder) for subdir in ('','tmp','cur','new'): try: os.makedirs(os.path.join(maildir, subdir)) except OSError as e: if e.errno != 17: # 'already exists' is ok. raise @classmethod def delete_maildir(cls, folder): """Delete maildir 'folder' in our test maildir Does not fail if not existing""" assert cls.testdir != None maildir = os.path.join(cls.testdir, 'mail', folder) shutil.rmtree(maildir, ignore_errors=True) @classmethod def create_mail(cls, folder, mailfile=None, content=None): """Create a mail in maildir 'folder'/new Use default mailfilename if not given. Use some default content if not given""" assert cls.testdir != None while True: # Loop till we found a unique filename mailfile = '{}:2,'.format(random.randint(0,999999999)) mailfilepath = os.path.join(cls.testdir, 'mail', folder, 'new', mailfile) if not os.path.isfile(mailfilepath): break with open(mailfilepath,"wb") as mailf: mailf.write(b'''From: test Subject: Boo Date: 1 Jan 1980 To: test@offlineimap.org Content here.''') @classmethod def count_maildir_mails(cls, folder): """Returns the number of mails in maildir 'folder' Counting only those in cur&new (ignoring tmp).""" assert cls.testdir != None maildir = os.path.join(cls.testdir, 'mail', folder) boxes, mails = 0, 0 for dirpath, dirs, files in os.walk(maildir, False): if set(dirs) == set(['cur', 'new', 'tmp']): # New maildir folder boxes += 1 #raise RuntimeError("%s is not Maildir" % maildir) if dirpath.endswith(('/cur', '/new')): mails += len(files) return boxes, mails # find UID in a maildir filename re_uidmatch = re.compile(',U=(\d+)') @classmethod def get_maildir_uids(cls, folder): """Returns a list of maildir mail uids, 'None' if no valid uid""" assert cls.testdir != None mailfilepath = os.path.join(cls.testdir, 'mail', folder) assert os.path.isdir(mailfilepath) ret = [] for dirpath, dirs, files in os.walk(mailfilepath): if not dirpath.endswith((os.path.sep + 'new', os.path.sep + 'cur')): continue # only /new /cur are interesting for file in files: m = cls.re_uidmatch.search(file) uid = m.group(1) if m else None ret.append(uid) return ret spaetz-offlineimap-c9e9690/test/OLItest/__init__.py000066400000000000000000000026351176237577200223230ustar00rootroot00000000000000# OfflineImap test library # Copyright (C) 2012- Sebastian Spaeth & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA __all__ = ['OLITestLib', 'TextTestRunner','TestLoader'] __productname__ = 'OfflineIMAP Test suite' __version__ = '0' __copyright__ = "Copyright 2012- Sebastian Spaeth & contributors" __author__ = 'Sebastian Spaeth' __author_email__= 'Sebastian@SSpaeth.de' __description__ = 'Moo' __license__ = "Licensed under the GNU GPL v2+ (v2 or any later version)" __homepage__ = "http://offlineimap.org" banner = """%(__productname__)s %(__version__)s %(__license__)s""" % locals() import unittest from unittest import TestLoader, TextTestRunner from .globals import default_conf from .TestRunner import OLITestLib spaetz-offlineimap-c9e9690/test/OLItest/globals.py000066400000000000000000000025351176237577200222060ustar00rootroot00000000000000#Constants, that don't rely on anything else in the module # Copyright (C) 2012- Sebastian Spaeth & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA try: from cStringIO import StringIO except ImportError: #python3 from io import StringIO default_conf=StringIO("""[general] #will be set automatically metadata = accounts = test ui = quiet [Account test] localrepository = Maildir remoterepository = IMAP [Repository Maildir] Type = Maildir # will be set automatically during tests localfolders = [Repository IMAP] type=IMAP # Don't hammer the server with too many connection attempts: maxconnections=1 folderfilter= lambda f: f.startswith('INBOX.OLItest') """) spaetz-offlineimap-c9e9690/test/README000066400000000000000000000013511176237577200175410ustar00rootroot00000000000000Documentation for the OfflineImap Test suite. How to run the tests ==================== - Copy the credentials.conf.sample to credentials.conf and insert credentials for an IMAP account and a Gmail account. Delete the Gmail section if you don't have a Gmail account. Do note, that the tests will change the account and upload/delete/modify it's contents and folder structure. So don't use a real used account here... - go to the top level dir (one above this one) and execute: 'python setup.py test' System requirements =================== This test suite depend on python>=2.7 to run out of the box. If you want to run this with python 2.6 you will need to install the backport from http://pypi.python.org/pypi/unittest2 instead.spaetz-offlineimap-c9e9690/test/__init__.py000066400000000000000000000000001176237577200207600ustar00rootroot00000000000000spaetz-offlineimap-c9e9690/test/credentials.conf.sample000066400000000000000000000003401176237577200233020ustar00rootroot00000000000000[Repository IMAP] type = IMAP remotehost = localhost ssl = no #sslcacertfile = #cert_fingerprint = remoteuser = user@domain remotepass = SeKr3t [Repository Gmail] type = Gmail remoteuser = user@domain remotepass = SeKr3t spaetz-offlineimap-c9e9690/test/tests/000077500000000000000000000000001176237577200200235ustar00rootroot00000000000000spaetz-offlineimap-c9e9690/test/tests/__init__.py000066400000000000000000000000011176237577200221230ustar00rootroot00000000000000 spaetz-offlineimap-c9e9690/test/tests/test_00_imaputil.py000066400000000000000000000077151176237577200235710ustar00rootroot00000000000000# Copyright (C) 2012- Sebastian Spaeth & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import unittest import logging from offlineimap import imaputil from offlineimap.ui import UI_LIST, setglobalui from offlineimap.CustomConfig import CustomConfigParser from test.OLItest import OLITestLib # Things need to be setup first, usually setup.py initializes everything. # but if e.g. called from command line, we take care of default values here: if not OLITestLib.cred_file: OLITestLib(cred_file='./test/credentials.conf', cmd='./offlineimap.py') def setUpModule(): logging.info("Set Up test module %s" % __name__) tdir = OLITestLib.create_test_dir(suffix=__name__) def tearDownModule(): logging.info("Tear Down test module") # comment out next line to keep testdir after test runs. TODO: make nicer OLITestLib.delete_test_dir() #Stuff that can be used #self.assertEqual(self.seq, range(10)) # should raise an exception for an immutable sequence #self.assertRaises(TypeError, random.shuffle, (1,2,3)) #self.assertTrue(element in self.seq) #self.assertFalse(element in self.seq) class TestInternalFunctions(unittest.TestCase): """While the other test files test OfflineImap as a program, these tests directly invoke internal helper functions to guarantee that they deliver results as expected""" @classmethod def setUpClass(cls): #This is run before all tests in this class config= OLITestLib.get_default_config() setglobalui(UI_LIST['quiet'](config)) def test_01_imapsplit(self): """Test imaputil.imapsplit()""" res = imaputil.imapsplit(b'(\\HasNoChildren) "." "INBOX.Sent"') self.assertEqual(res, [b'(\\HasNoChildren)', b'"."', b'"INBOX.Sent"']) res = imaputil.imapsplit(b'"mo\\" o" sdfsdf') self.assertEqual(res, [b'"mo\\" o"', b'sdfsdf']) def test_02_flagsplit(self): """Test imaputil.flagsplit()""" res = imaputil.flagsplit(b'(\\Draft \\Deleted)') self.assertEqual(res, [b'\\Draft', b'\\Deleted']) res = imaputil.flagsplit(b'(FLAGS (\\Seen Old) UID 4807)') self.assertEqual(res, [b'FLAGS', b'(\\Seen Old)', b'UID', b'4807']) def test_03_options2hash(self): """Test imaputil.options2hash()""" res = imaputil.options2hash([1,2,3,4,5,6]) self.assertEqual(res, {1:2, 3:4, 5:6}) def test_04_flags2hash(self): """Test imaputil.flags2hash()""" res = imaputil.flags2hash(b'(FLAGS (\\Seen Old) UID 4807)') self.assertEqual(res, {b'FLAGS': b'(\\Seen Old)', b'UID': b'4807'}) def test_05_flagsimap2maildir(self): """Test imaputil.flagsimap2maildir()""" res = imaputil.flagsimap2maildir(b'(\\Draft \\Deleted)') self.assertEqual(res, set(b'DT')) def test_06_flagsmaildir2imap(self): """Test imaputil.flagsmaildir2imap()""" res = imaputil.flagsmaildir2imap(set(b'DR')) self.assertEqual(res, b'(\\Answered \\Draft)') # test all possible flags res = imaputil.flagsmaildir2imap(set(b'SRFTD')) self.assertEqual(res, b'(\\Answered \\Deleted \\Draft \\Flagged \\Seen)') def test_07_uid_sequence(self): """Test imaputil.uid_sequence()""" res = imaputil.uid_sequence([1,2,3,4,5,10,12,13]) self.assertEqual(res, b'1:5,10,12:13') spaetz-offlineimap-c9e9690/test/tests/test_01_basic.py000066400000000000000000000151651176237577200230250ustar00rootroot00000000000000# Copyright (C) 2012- Sebastian Spaeth & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import random import unittest import logging import os, sys from test.OLItest import OLITestLib # Things need to be setup first, usually setup.py initializes everything. # but if e.g. called from command line, we take care of default values here: if not OLITestLib.cred_file: OLITestLib(cred_file='./test/credentials.conf', cmd='./offlineimap.py') def setUpModule(): logging.info("Set Up test module %s" % __name__) tdir = OLITestLib.create_test_dir(suffix=__name__) def tearDownModule(): logging.info("Tear Down test module") # comment out next line to keep testdir after test runs. TODO: make nicer OLITestLib.delete_test_dir() #Stuff that can be used #self.assertEqual(self.seq, range(10)) # should raise an exception for an immutable sequence #self.assertRaises(TypeError, random.shuffle, (1,2,3)) #self.assertTrue(element in self.seq) #self.assertFalse(element in self.seq) class TestBasicFunctions(unittest.TestCase): #@classmethod #def setUpClass(cls): #This is run before all tests in this class # cls._connection = createExpensiveConnectionObject() #@classmethod #This is run after all tests in this class #def tearDownClass(cls): # cls._connection.destroy() # This will be run before each test #def setUp(self): # self.seq = range(10) def test_01_olistartup(self): """Tests if OLI can be invoked without exceptions Cleans existing remote tet folders. Then syncs all "OLItest* (specified in the default config) to our local Maildir. The result should be 0 folders and 0 mails.""" OLITestLib.delete_remote_testfolders() code, res = OLITestLib.run_OLI() self.assertEqual(res, "") boxes, mails = OLITestLib.count_maildir_mails('') self.assertTrue((boxes, mails)==(0,0), msg="Expected 0 folders and 0 " "mails, but sync led to {} folders and {} mails".format( boxes, mails)) def test_02_createdir(self): """Create local OLItest 1 & OLItest "1" maildir, sync Folder names with quotes used to fail and have been fixed, so one is included here as a small challenge.""" OLITestLib.create_maildir('INBOX.OLItest 1') OLITestLib.create_maildir('INBOX.OLItest "1"') code, res = OLITestLib.run_OLI() #logging.warn("%s %s "% (code, res)) self.assertEqual(res, "") boxes, mails = OLITestLib.count_maildir_mails('') self.assertTrue((boxes, mails)==(2,0), msg="Expected 2 folders and 0 " "mails, but sync led to {} folders and {} mails".format( boxes, mails)) def test_03_nametransmismatch(self): """Create mismatching remote and local nametrans rules This should raise an error.""" config = OLITestLib.get_default_config() config.set('Repository IMAP', 'nametrans', 'lambda f: f' ) config.set('Repository Maildir', 'nametrans', 'lambda f: f + "moo"' ) OLITestLib.write_config_file(config) code, res = OLITestLib.run_OLI() #logging.warn("%s %s "% (code, res)) # We expect an INFINITE FOLDER CREATION WARNING HERE.... mismatch = "ERROR: INFINITE FOLDER CREATION DETECTED!" in res self.assertEqual(mismatch, True, msg="Mismatching nametrans rules did " "NOT trigger an 'infinite folder generation' error. Output was:\n" "{}".format(res)) # Write out default config file again OLITestLib.write_config_file() def test_04_createmail(self): """Create mail in OLItest 1, sync, wipe folder sync Currently, this will mean the folder will be recreated locally. At some point when remote folder deletion is implemented, this behavior will change.""" OLITestLib.delete_remote_testfolders() OLITestLib.delete_maildir('') #Delete all local maildir folders OLITestLib.create_maildir('INBOX.OLItest') OLITestLib.create_mail('INBOX.OLItest') code, res = OLITestLib.run_OLI() #logging.warn("%s %s "% (code, res)) self.assertEqual(res, "") boxes, mails = OLITestLib.count_maildir_mails('') self.assertTrue((boxes, mails)==(1,1), msg="Expected 1 folders and 1 " "mails, but sync led to {} folders and {} mails".format( boxes, mails)) # The local Mail should have been assigned a proper UID now, check! uids = OLITestLib.get_maildir_uids('INBOX.OLItest') self.assertFalse (None in uids, msg = "All mails should have been "+ \ "assigned the IMAP's UID number, but {} messages had no valid ID "\ .format(len([None for x in uids if x==None]))) def test_05_createfolders(self): """Test if createfolders works as expected Create a local Maildir, then sync with remote "createfolders" disabled. Delete local Maildir and sync. We should have no new local maildir then. TODO: Rewrite this test to directly test and count the remote folders when the helper functions have been written""" config = OLITestLib.get_default_config() config.set('Repository IMAP', 'createfolders', 'False' ) OLITestLib.write_config_file(config) # delete all remote and local testfolders OLITestLib.delete_remote_testfolders() OLITestLib.delete_maildir('') OLITestLib.create_maildir('INBOX.OLItest') code, res = OLITestLib.run_OLI() #logging.warn("%s %s "% (code, res)) self.assertEqual(res, "") OLITestLib.delete_maildir('INBOX.OLItest') code, res = OLITestLib.run_OLI() boxes, mails = OLITestLib.count_maildir_mails('') self.assertTrue((boxes, mails)==(0,0), msg="Expected 0 folders and 0 " "mails, but sync led to {} folders and {} mails".format( boxes, mails)) spaetz-offlineimap-c9e9690/test/tests/test_02_MappedIMAP.py000066400000000000000000000052471176237577200236220ustar00rootroot00000000000000# Copyright (C) 2012- Sebastian Spaeth & contributors # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import random import unittest import logging import os, sys from test.OLItest import OLITestLib # Things need to be setup first, usually setup.py initializes everything. # but if e.g. called from command line, we take care of default values here: if not OLITestLib.cred_file: OLITestLib(cred_file='./test/credentials.conf', cmd='./offlineimap.py') def setUpModule(): logging.info("Set Up test module %s" % __name__) tdir = OLITestLib.create_test_dir(suffix=__name__) def tearDownModule(): logging.info("Tear Down test module") OLITestLib.delete_test_dir() #Stuff that can be used #self.assertEqual(self.seq, range(10)) # should raise an exception for an immutable sequence #self.assertRaises(TypeError, random.shuffle, (1,2,3)) #self.assertTrue(element in self.seq) #self.assertFalse(element in self.seq) class TestBasicFunctions(unittest.TestCase): #@classmethod #def setUpClass(cls): #This is run before all tests in this class # cls._connection = createExpensiveConnectionObject() #@classmethod #This is run after all tests in this class #def tearDownClass(cls): # cls._connection.destroy() # This will be run before each test #def setUp(self): # self.seq = range(10) def test_01_MappedImap(self): """Tests if a MappedIMAP sync can be invoked without exceptions Cleans existing remote test folders. Then syncs all "OLItest* (specified in the default config) to our local IMAP (Gmail). The result should be 0 folders and 0 mails.""" pass #TODO #OLITestLib.delete_remote_testfolders() #code, res = OLITestLib.run_OLI() #self.assertEqual(res, "") #boxes, mails = OLITestLib.count_maildir_mails('') #self.assertTrue((boxes, mails)==(0,0), msg="Expected 0 folders and 0" # "mails, but sync led to {} folders and {} mails".format( # boxes, mails))