pax_global_header 0000666 0000000 0000000 00000000064 13611126124 0014507 g ustar 00root root 0000000 0000000 52 comment=f8dabbc776b8714652bacba287592c78f1e0c54d
autosuspend-3.0/ 0000775 0000000 0000000 00000000000 13611126124 0013723 5 ustar 00root root 0000000 0000000 autosuspend-3.0/.github/ 0000775 0000000 0000000 00000000000 13611126124 0015263 5 ustar 00root root 0000000 0000000 autosuspend-3.0/.github/workflows/ 0000775 0000000 0000000 00000000000 13611126124 0017320 5 ustar 00root root 0000000 0000000 autosuspend-3.0/.github/workflows/ci.yml 0000664 0000000 0000000 00000004571 13611126124 0020445 0 ustar 00root root 0000000 0000000 name: CI build
on:
push: {}
pull_request: {}
schedule:
- cron: '* * * * 0'
jobs:
lint:
runs-on: ubuntu-latest
steps:
- name: Clone repo
uses: actions/checkout@v1
- name: Set up Python 3.8
uses: actions/setup-python@v1
with:
python-version: 3.8
- name: Install native dependencies
run: sudo apt-get -y install libdbus-1-dev libgirepository1.0-dev
- name: Install tox
run: |
python -m pip install --upgrade pip
pip install tox
- name: Lint with tox
run: tox -e check
try-build-docs:
runs-on: ubuntu-latest
steps:
- name: Clone repo
uses: actions/checkout@v1
- name: Set up Python 3.8
uses: actions/setup-python@v1
with:
python-version: 3.8
- name: Install native dependencies
run: sudo apt-get -y install libdbus-1-dev libgirepository1.0-dev plantuml
- name: Install tox
run: |
python -m pip install --upgrade pip
pip install tox
- name: Build Sphinx docs
run: tox -e docs
try-mindeps:
runs-on: ubuntu-latest
steps:
- name: Clone repo
uses: actions/checkout@v1
- name: Set up Python 3.8
uses: actions/setup-python@v1
with:
python-version: 3.8
- name: Install tox
run: |
python -m pip install --upgrade pip
pip install tox
- name: Test execution with minimal dependencies
run: tox -e mindeps
test:
runs-on: ubuntu-latest
strategy:
max-parallel: 4
matrix:
python-version: [3.7, 3.8]
steps:
- name: Clone repo
uses: actions/checkout@v1
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- name: Install native dependencies
run: sudo apt-get -y install libdbus-1-dev libgirepository1.0-dev
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install coverage tox tox-gh-actions
- name: Test with tox
run: |
tox
coverage xml --rcfile=setup.cfg
- name: Publish coverage to codecov.io
uses: codecov/codecov-action@v1
with:
token: ${{ secrets.CODECOV_TOKEN }} #required
autosuspend-3.0/.gitignore 0000664 0000000 0000000 00000000303 13611126124 0015707 0 ustar 00root root 0000000 0000000 /.cache
/.coverage*
*.egg-info
/.eggs
/build
/dist
/htmlcov
/tags
__pycache__
/pytestdebug.log
/doc/build/
/env/
/.ropeproject/
/.mypy_cache/
/.pytest_cache/
/.python-version
/.tox/
/Session.vim
autosuspend-3.0/LICENSE.txt 0000664 0000000 0000000 00000035562 13611126124 0015561 0 ustar 00root root 0000000 0000000 GNU GENERAL PUBLIC LICENSE
Version 2, June 1991
Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public
License is intended to guarantee your freedom to share and change free
software--to make sure the software is free for all its users. This
General Public License applies to most of the Free Software
Foundation's software and to any other program whose authors commit to
using it. (Some other Free Software Foundation software is covered by
the GNU Lesser General Public License instead.) You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
this service if you wish), that you receive source code or can get it
if you want it, that you can change the software or use pieces of it
in new free programs; and that you know you can do these things.
To protect your rights, we need to make restrictions that forbid
anyone to deny you these rights or to ask you to surrender the rights.
These restrictions translate to certain responsibilities for you if you
distribute copies of the software, or if you modify it.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must give the recipients all the rights that
you have. You must make sure that they, too, receive or can get the
source code. And you must show them these terms so they know their
rights.
We protect your rights with two steps: (1) copyright the software, and
(2) offer you this license which gives you legal permission to copy,
distribute and/or modify the software.
Also, for each author's protection and ours, we want to make certain
that everyone understands that there is no warranty for this free
software. If the software is modified by someone else and passed on, we
want its recipients to know that what they have is not the original, so
that any problems introduced by others will not reflect on the original
authors' reputations.
Finally, any free program is threatened constantly by software
patents. We wish to avoid the danger that redistributors of a free
program will individually obtain patent licenses, in effect making the
program proprietary. To prevent this, we have made it clear that any
patent must be licensed for everyone's free use or not licensed at all.
The precise terms and conditions for copying, distribution and
modification follow.
GNU GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License applies to any program or other work which contains
a notice placed by the copyright holder saying it may be distributed
under the terms of this General Public License. The "Program", below,
refers to any such program or work, and a "work based on the Program"
means either the Program or any derivative work under copyright law:
that is to say, a work containing the Program or a portion of it,
either verbatim or with modifications and/or translated into another
language. (Hereinafter, translation is included without limitation in
the term "modification".) Each licensee is addressed as "you".
Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running the Program is not restricted, and the output from the Program
is covered only if its contents constitute a work based on the
Program (independent of having been made by running the Program).
Whether that is true depends on what the Program does.
1. You may copy and distribute verbatim copies of the Program's
source code as you receive it, in any medium, provided that you
conspicuously and appropriately publish on each copy an appropriate
copyright notice and disclaimer of warranty; keep intact all the
notices that refer to this License and to the absence of any warranty;
and give any other recipients of the Program a copy of this License
along with the Program.
You may charge a fee for the physical act of transferring a copy, and
you may at your option offer warranty protection in exchange for a fee.
2. You may modify your copy or copies of the Program or any portion
of it, thus forming a work based on the Program, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
a) You must cause the modified files to carry prominent notices
stating that you changed the files and the date of any change.
b) You must cause any work that you distribute or publish, that in
whole or in part contains or is derived from the Program or any
part thereof, to be licensed as a whole at no charge to all third
parties under the terms of this License.
c) If the modified program normally reads commands interactively
when run, you must cause it, when started running for such
interactive use in the most ordinary way, to print or display an
announcement including an appropriate copyright notice and a
notice that there is no warranty (or else, saying that you provide
a warranty) and that users may redistribute the program under
these conditions, and telling the user how to view a copy of this
License. (Exception: if the Program itself is interactive but
does not normally print such an announcement, your work based on
the Program is not required to print an announcement.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Program,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Program, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote it.
Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Program.
In addition, mere aggregation of another work not based on the Program
with the Program (or with a work based on the Program) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.
3. You may copy and distribute the Program (or a work based on it,
under Section 2) in object code or executable form under the terms of
Sections 1 and 2 above provided that you also do one of the following:
a) Accompany it with the complete corresponding machine-readable
source code, which must be distributed under the terms of Sections
1 and 2 above on a medium customarily used for software interchange; or,
b) Accompany it with a written offer, valid for at least three
years, to give any third party, for a charge no more than your
cost of physically performing source distribution, a complete
machine-readable copy of the corresponding source code, to be
distributed under the terms of Sections 1 and 2 above on a medium
customarily used for software interchange; or,
c) Accompany it with the information you received as to the offer
to distribute corresponding source code. (This alternative is
allowed only for noncommercial distribution and only if you
received the program in object code or executable form with such
an offer, in accord with Subsection b above.)
The source code for a work means the preferred form of the work for
making modifications to it. For an executable work, complete source
code means all the source code for all modules it contains, plus any
associated interface definition files, plus the scripts used to
control compilation and installation of the executable. However, as a
special exception, the source code distributed need not include
anything that is normally distributed (in either source or binary
form) with the major components (compiler, kernel, and so on) of the
operating system on which the executable runs, unless that component
itself accompanies the executable.
If distribution of executable or object code is made by offering
access to copy from a designated place, then offering equivalent
access to copy the source code from the same place counts as
distribution of the source code, even though third parties are not
compelled to copy the source along with the object code.
4. You may not copy, modify, sublicense, or distribute the Program
except as expressly provided under this License. Any attempt
otherwise to copy, modify, sublicense or distribute the Program is
void, and will automatically terminate your rights under this License.
However, parties who have received copies, or rights, from you under
this License will not have their licenses terminated so long as such
parties remain in full compliance.
5. You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Program or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Program (or any work based on the
Program), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Program or works based on it.
6. Each time you redistribute the Program (or any work based on the
Program), the recipient automatically receives a license from the
original licensor to copy, distribute or modify the Program subject to
these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties to
this License.
7. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Program at all. For example, if a patent
license would not permit royalty-free redistribution of the Program by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Program.
If any portion of this section is held invalid or unenforceable under
any particular circumstance, the balance of the section is intended to
apply and the section as a whole is intended to apply in other
circumstances.
It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system, which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.
This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.
8. If the distribution and/or use of the Program is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Program under this License
may add an explicit geographical distribution limitation excluding
those countries, so that distribution is permitted only in or among
countries not thus excluded. In such case, this License incorporates
the limitation as if written in the body of this License.
9. The Free Software Foundation may publish revised and/or new versions
of the General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the Program
specifies a version number of this License which applies to it and "any
later version", you have the option of following the terms and conditions
either of that version or of any later version published by the Free
Software Foundation. If the Program does not specify a version number of
this License, you may choose any version ever published by the Free Software
Foundation.
10. If you wish to incorporate parts of the Program into other free
programs whose distribution conditions are different, write to the author
to ask for permission. For software which is copyrighted by the Free
Software Foundation, write to the Free Software Foundation; we sometimes
make exceptions for this. Our decision will be guided by the two goals
of preserving the free status of all derivatives of our free software and
of promoting the sharing and reuse of software generally.
NO WARRANTY
11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
REPAIR OR CORRECTION.
12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGES.
END OF TERMS AND CONDITIONS
autosuspend-3.0/MANIFEST.in 0000664 0000000 0000000 00000000020 13611126124 0015451 0 ustar 00root root 0000000 0000000 include VERSION
autosuspend-3.0/README.md 0000664 0000000 0000000 00000002135 13611126124 0015203 0 ustar 00root root 0000000 0000000 # autosuspend
[](https://github.com/languitar/autosuspend/actions) [](https://codecov.io/gh/languitar/autosuspend) [](http://autosuspend.readthedocs.io/en/latest/?badge=latest) [](https://buildd.debian.org/autosuspend) [](https://aur.archlinux.org/packages/autosuspend/)
`autosuspend` is a python daemon that suspends a system if certain conditions are met, or not met. This enables a server to sleep in case of inactivity without depending on the X infrastructure usually used by normal desktop environments.
Documentation is [available here](https://autosuspend.readthedocs.io).
## License
This software is licensed using the [GPL2 license](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html).
autosuspend-3.0/VERSION 0000664 0000000 0000000 00000000012 13611126124 0014764 0 ustar 00root root 0000000 0000000 3.0
3.0.0
autosuspend-3.0/cosmic-ray.toml 0000664 0000000 0000000 00000000427 13611126124 0016671 0 ustar 00root root 0000000 0000000 [cosmic-ray]
module-path = "src/autosuspend"
python-version = ""
timeout = 20.0
excluded-modules = []
test-command = "env PYTHONPATH=`pwd`/src pytest -x"
[cosmic-ray.execution-engine]
name = "local"
[cosmic-ray.cloning]
method = "copy"
commands = [
"pip install .[test]"
]
autosuspend-3.0/data/ 0000775 0000000 0000000 00000000000 13611126124 0014634 5 ustar 00root root 0000000 0000000 autosuspend-3.0/data/autosuspend-detect-suspend.service 0000664 0000000 0000000 00000000444 13611126124 0023517 0 ustar 00root root 0000000 0000000 [Unit]
Description=Notifies autosuspend about suspension
Documentation=https://autosuspend.readthedocs.io/en/latest/systemd_integration.html
Before=sleep.target
[Service]
Type=simple
ExecStart=/usr/bin/autosuspend -l /etc/autosuspend-logging.conf presuspend
[Install]
WantedBy=sleep.target
autosuspend-3.0/data/autosuspend-logging.conf 0000664 0000000 0000000 00000001033 13611126124 0021476 0 ustar 00root root 0000000 0000000 [loggers]
keys=root,autosuspend,checks
[handlers]
keys=consoleHandler
[formatters]
keys=simpleFormatter
[logger_root]
level=INFO
handlers=consoleHandler
[logger_autosuspend]
qualname=autosuspend
propagate=0
level=INFO
handlers=consoleHandler
[logger_checks]
qualname=autosuspend.checks
propagate=0
level=INFO
handlers=consoleHandler
[handler_consoleHandler]
class=StreamHandler
level=DEBUG
formatter=simpleFormatter
args=(sys.stdout,)
[formatter_simpleFormatter]
format=%(asctime)s - %(name)s - %(levelname)s - %(message)s
datefmt=
autosuspend-3.0/data/autosuspend.conf 0000664 0000000 0000000 00000003401 13611126124 0020053 0 ustar 00root root 0000000 0000000 ## This is an exemplary documentation file that mainly serves as a syntax explanation.
## For a list of available options and checks, please refer to `man autosuspend.conf` or the online documentation.
[general]
interval = 30
idle_time = 900
suspend_cmd = /usr/bin/systemctl suspend
wakeup_cmd = sh -c 'echo 0 > /sys/class/rtc/rtc0/wakealarm && echo {timestamp:.0f} > /sys/class/rtc/rtc0/wakealarm'
woke_up_file = /var/run/autosuspend-just-woke-up
lock_file = /var/lock/autosuspend.lock
lock_timeout = 30
# Can be used to call a command before suspending, either with scheduled wake up or not.
# notify_cmd_wakeup = su myuser -c notify-send -a autosuspend 'Suspending the system. Wake up at {iso}'
# notify_cmd_no_wakeup = su myuser -c notify-send -a autosuspend 'Suspending the system.'
# Basic activity check configuration.
# The check class name is derived from the section header (Ping in this case).
# Remember to enable desired checks. They are disabled by default.
[check.Ping]
enabled = true
hosts = 192.168.0.7
# This check is disabled.
[check.Smb]
enabled = false
# Example for a custom check name.
# This will use the Users check with the custom name RemoteUsers.
# Custom names are necessary in case a check class is used multiple times.
# Custom names can also be used for clarification.
[check.RemoteUsers]
class = Users
enabled = true
name = .*
terminal = .*
host = [0-9].*
# Here the Users activity check is used again with different settings and a different name
[check.LocalUsers]
class = Users
enabled = true
name = .*
terminal = .*
host = localhost
# Checks to determine the next scheduled wakeup are prefixed with 'wakeup'.
[wakeup.Calendar]
enabled = true
url = http://example.org/test.ics
# Apart from this, wake up checks reuse the same configuration mechanism.
autosuspend-3.0/data/autosuspend.service 0000664 0000000 0000000 00000000452 13611126124 0020571 0 ustar 00root root 0000000 0000000 [Unit]
Description=A daemon to suspend your server in case of inactivity
Documentation=https://autosuspend.readthedocs.io/en/latest/systemd_integration.html
After=network.target
[Service]
ExecStart=/usr/bin/autosuspend -l /etc/autosuspend-logging.conf daemon
[Install]
WantedBy=multi-user.target
autosuspend-3.0/doc/ 0000775 0000000 0000000 00000000000 13611126124 0014470 5 ustar 00root root 0000000 0000000 autosuspend-3.0/doc/source/ 0000775 0000000 0000000 00000000000 13611126124 0015770 5 ustar 00root root 0000000 0000000 autosuspend-3.0/doc/source/api.rst 0000664 0000000 0000000 00000000442 13611126124 0017273 0 ustar 00root root 0000000 0000000 Python API documentation
########################
In case custom checks are required, the following classes have to be subclassed.
.. autoclass:: autosuspend.checks.Activity
:members:
:inherited-members:
.. autoclass:: autosuspend.checks.Wakeup
:members:
:inherited-members:
autosuspend-3.0/doc/source/available_checks.rst 0000664 0000000 0000000 00000027014 13611126124 0021766 0 ustar 00root root 0000000 0000000 .. _available-checks:
Available activity checks
#########################
The following checks for activity are currently implemented.
Each of the is described with its available configuration options and required optional dependencies.
.. _check-active-calendar-event:
ActiveCalendarEvent
*******************
.. program:: check-active-calendar-event
Checks an online `iCalendar`_ file for events that are currently running.
If so, this indicates activity and prevents suspending the system.
Thus, a calendar can be provided with times at which the system should not go to sleep.
If this calendar resides on an online service like a groupware it might even be possible to invite the system.
Options
=======
.. option:: url
The URL to query for the iCalendar file
.. option:: timeout
Timeout for executed requests in seconds. Default: 5.
.. option:: username
Optional user name to use for authenticating at a server requiring authentication.
If used, also a password must be provided.
.. option:: password
Optional password to use for authenticating at a server requiring authentication.
If used, also a user name must be provided.
Requirements
============
* `requests`_
* `icalendar `_
* `dateutil`_
* `tzlocal`_
.. _check-active-connection:
ActiveConnection
****************
.. program:: check-active-connection
Checks whether there is currently a client connected to a TCP server at certain ports.
Can be used to e.g. block suspending the system in case SSH users are connected or a web server is used by clients.
Options
=======
.. option:: ports
list of comma-separated port numbers
Requirements
============
.. _check-external-command:
ExternalCommand
***************
.. program:: check-external-command
Executes an arbitrary command.
In case this command returns 0, the system is assumed to be active.
The command is executed as is using shell execution.
Beware of malicious commands in obtained configuration files.
Options
=======
.. option:: command
The command to execute including all arguments
Requirements
============
.. _check-kodi:
Kodi
****
.. program:: check-kodi
Checks whether an instance of `Kodi`_ is currently playing.
Options
=======
.. option:: url
Base URL of the JSON RPC API of the Kodi instance, default: ``http://localhost:8080/jsonrpc``
.. option:: timeout
Request timeout in seconds, default: ``5``
.. option:: username
Optional user name to use for authenticating at a server requiring authentication.
If used, also a password must be provided.
.. option:: password
Optional password to use for authenticating at a server requiring authentication.
If used, also a user name must be provided.
.. option:: suspend_while_paused
Also suspend the system when media playback is paused instead of only suspending
when playback is stopped.
Default: ``false``
Requirements
============
- `requests`_
.. _check-kodi-idle-time:
KodiIdleTime
************
.. program:: check-kodi-idle-time
Checks whether there has been interaction with the Kodi user interface recently.
This prevents suspending the system in case someone is currently browsing collections etc.
This check is redundant to :ref:`check-xidletime` on systems using an X server, but might be necessary in case Kodi is used standalone.
It does not replace the :ref:`check-kodi` check, as the idle time is not updated when media is playing.
Options
=======
.. option:: idle_time
Marks the system active in case a user interaction has appeared within the this amount of seconds until now.
Default: ``120``
.. option:: url
Base URL of the JSON RPC API of the Kodi instance, default: ``http://localhost:8080/jsonrpc``
.. option:: timeout
Request timeout in seconds, default: ``5``
.. option:: username
Optional user name to use for authenticating at a server requiring authentication.
If used, also a password must be provided.
.. option:: password
Optional password to use for authenticating at a server requiring authentication.
If used, also a user name must be provided.
Requirements
============
- `requests`_
.. _check-load:
Load
****
.. program:: check-load
Checks whether the `system load 5 `__ is below a certain value.
Options
=======
.. option:: threshold
a float for the maximum allowed load value, default: 2.5
Requirements
============
.. _check-logind-session-idle:
LogindSessionsIdle
******************
.. program:: check-logind-session-idle
Prevents suspending in case ``IdleHint`` for one of the running sessions `logind`_ sessions is set to ``no``.
Support for setting this hint currently varies greatly across display managers, screen lockers etc.
Thus, check exactly whether the hint is set on your system via ``loginctl show-session``.
Options
=======
.. option:: types
A comma-separated list of sessions types to inspect for activity.
The check ignores sessions of other types.
Default: ``tty``, ``x11``, ``wayland``
.. option:: states
A comma-separated list of session states to inspect.
For instance, ``lingering`` sessions used for background programs might not be of interest.
Default: ``active``, ``online``
Requirements
============
- `dbus-python`_
.. _check-mpd:
Mpd
***
.. program:: check-mpd
Checks whether an instance of `MPD`_ is currently playing music.
Options
=======
.. option:: host
Host containing the MPD daemon, default: ``localhost``
.. option:: port
Port to connect to the MPD daemon, default: ``6600``
.. option:: timeout
.. _mpd-timeout:
Request timeout in seconds, default: ``5``
Requirements
============
- `python-mpd2`_
.. _check-network-bandwidth:
NetworkBandwidth
****************
.. program:: check-network-bandwidth
Checks whether more network bandwidth is currently being used than specified.
A set of specified interfaces is checked in this regard, each of the individually, based on the average bandwidth on that interface.
This average is based on the global checking interval specified in the configuration file via the :option:`interval ` option.
Options
=======
.. option:: interfaces
Comma-separated list of network interfaces to check
.. option:: threshold_send
If the average sending bandwidth of one of the specified interfaces is above this threshold, then activity is detected. Specified in bytes/s, default: ``100``
.. option:: threshold_receive
If the average receive bandwidth of one of the specified interfaces is above this threshold, then activity is detected. Specified in bytes/s, default: ``100``
Requirements
============
.. _check-ping:
Ping
****
.. program:: check-ping
Checks whether one or more hosts answer to ICMP requests.
Options
=======
.. option:: hosts
Comma-separated list of host names or IPs.
Requirements
============
.. _check-processes:
Processes
*********
.. program:: check-processes
If currently running processes match an expression, the suspend will be blocked.
You might use this to hinder the system from suspending when for example your rsync runs.
Options
=======
.. option:: processes
list of comma-separated process names to check for
Requirements
============
.. _check-smb:
Smb
***
.. program:: check-smb
Any active Samba connection will block suspend.
Options
=======
.. option:: smbstatus
executable needs to be present.
Requirements
============
.. _check-users:
Users
*****
.. program:: check-users
Checks whether a user currently logged in at the system matches several criteria.
All provided criteria must match to indicate activity on the host.
Options
=======
All regular expressions are applied against the full string.
Capturing substrings needs to be explicitly enabled using wildcard matching.
.. option:: name
A regular expression specifying which users to capture, default: ``.*``.
.. option:: terminal
A regular expression specifying the terminal on which the user needs to be logged in, default: ``.*``.
.. option:: host
A regular expression specifying the host from which a user needs to be logged in, default: ``.*``.
Requirements
============
.. _check-xidletime:
XIdleTime
*********
.. program:: check-xidletime
Checks whether all active local X displays have been idle for a sufficiently long time.
Determining which X11 sessions currently exist on a running system is a harder problem than one might expect.
Sometimes, the server runs as root, sometimes under the real user, and many other configuration variants exist.
Thus, multiple sources for active X serer instances are implemented for this check, each of them having different requirements and limitations.
They can be changed using the provided configuration option.
Options
=======
.. option:: timeout
required idle time in seconds
.. option:: method
The method to use for acquiring running X sessions.
Valid options are ``sockets`` and ``logind``.
The default is ``sockets``.
``sockets``
Uses the X server sockets files found in :file:`/tmp/.X11-unix`.
This method requires that all X server instances run with user permissions and not as root.
``logind``
Uses `logind`_ to obtain the running X server instances.
This does not support manually started servers.
.. option:: ignore_if_process
A regular expression to match against the process names executed by each X session owner.
In case the use has a running process that matches this expression, the X idle time is ignored and the check continues as if there was no activity.
This can be useful in case of processes which inevitably tinker with the idle time.
.. option:: ignore_users
Do not check sessions of users matching this regular expressions.
Requirements
============
* `dbus-python`_ for the ``logind`` method
.. _check-xpath:
XPath
*****
.. program:: check-xpath
A generic check which queries a configured URL and expects the reply to contain XML data.
The returned XML document is checked against a configured `XPath`_ expression and in case the expression matches, the system is assumed to be active.
Some common applications and their respective configuration are:
`tvheadend`_
The required URL for `tvheadend`_ is (if running on the same host)::
http://127.0.0.1:9981/status.xml
In case you want to prevent suspending in case there are active subscriptions or recordings, use the following XPath::
/currentload/subscriptions[number(.) > 0] | /currentload/recordings/recording/start
If you have a permantently running subscriber like `Kodi`_, increase the ``0`` to ``1``.
`Plex`_
For `Plex`_, use the following URL (if running on the same host)::
http://127.0.0.1:32400/status/sessions/?X-Plex-Token={TOKEN}
Where acquiring the token is `documented here `_.
If suspending should be prevented in case of any activity, this simple `XPath`_ expression will suffice::
/MediaContainer[@size > 2]
Options
=======
.. option:: url
The URL to query for the XML reply.
.. option:: xpath
The XPath query to execute.
In case it returns a result, the system is assumed to be active.
.. option:: timeout
Timeout for executed requests in seconds. Default: 5.
.. option:: username
Optional user name to use for authenticating at a server requiring authentication.
If used, also a password must be provided.
.. option:: password
Optional password to use for authenticating at a server requiring authentication.
If used, also a user name must be provided.
Requirements
============
* `requests`_
* `lxml`_
autosuspend-3.0/doc/source/available_wakeups.rst 0000664 0000000 0000000 00000011175 13611126124 0022206 0 ustar 00root root 0000000 0000000 .. _available-wakeups:
Available wake up checks
########################
The following checks for wake up times are currently implemented.
Each of the checks is described with its available configuration options and required optional dependencies.
.. _wakeup-calendar:
Calendar
********
.. program:: wakeup-calendar
Determines next wake up time from an `iCalendar`_ file.
The next event that starts after the current time is chosen as the next wake up time.
Remember that updates to the calendar can only be reflected in case the system currently running.
Changes to the calendar made while the system is sleeping will obviously not trigger an earlier wake up.
Options
=======
.. option:: url
The URL to query for the XML reply.
.. option:: username
Optional user name to use for authenticating at a server requiring authentication.
If used, also a password must be provided.
.. option:: password
Optional password to use for authenticating at a server requiring authentication.
If used, also a user name must be provided.
.. option:: xpath
The XPath query to execute.
Must always return number strings or nothing.
.. option:: timeout
Timeout for executed requests in seconds. Default: 5.
Requirements
============
* `requests`_
* `icalendar `_
* `dateutil`_
* `tzlocal`_
.. _wakeup-command:
Command
*******
.. program:: wakeup-command
Determines the wake up time by calling an external command
The command always has to succeed.
If something is printed on stdout by the command, this has to be the next wake up time in UTC seconds.
The command is executed as is using shell execution.
Beware of malicious commands in obtained configuration files.
Options
=======
.. option:: command
The command to execute including all arguments
.. _wakeup-file:
File
****
.. program:: wakeup-file
Determines the wake up time by reading a file from a configured location.
The file has to contains the planned wake up time as an int or float in seconds UTC.
Options
=======
.. option:: path
path of the file to read in case it is present
.. _wakeup-periodic:
Periodic
********
.. program:: wakeup-periodic
Always schedules a wake up at a specified delta from now on.
Can be used to let the system wake up every once in a while, for instance, to refresh the calendar used in the :ref:`wakeup-calendar` check.
Options
=======
.. option:: unit
A string indicating in which unit the delta is specified.
Valid options are: ``microseconds``, ``milliseconds``, ``seconds``, ``minutes``, ``hours``, ``days``, ``weeks``.
.. option:: value
The value of the delta as an int.
.. _wakeup-xpath:
XPath
*****
.. program:: wakeup-xpath
A generic check which queries a configured URL and expects the reply to contain XML data.
The returned XML document is parsed using a configured `XPath`_ expression that has to return timestamps UTC (as strings, not elements).
These are interpreted as the wake up times.
In case multiple entries exist, the soonest one is used.
Options
=======
.. option:: url
The URL to query for the XML reply.
.. option:: xpath
The XPath query to execute.
Must always return number strings or nothing.
.. option:: timeout
Timeout for executed requests in seconds. Default: 5.
.. option:: username
Optional user name to use for authenticating at a server requiring authentication.
If used, also a password must be provided.
.. option:: password
Optional password to use for authenticating at a server requiring authentication.
If used, also a user name must be provided.
.. _wakeup-xpath-delta:
XPathDelta
**********
.. program:: wakeup-xpath-delta
Comparable to :ref:`wakeup-xpath`, but expects that the returned results represent the wake up time as a delta to the current time in a configurable unit.
This check can for instance be used for `tvheadend`_ with the following expression::
//recording/next/text()
Options
=======
.. option:: url
The URL to query for the XML reply.
.. option:: username
Optional user name to use for authenticating at a server requiring authentication.
If used, also a password must be provided.
.. option:: password
Optional password to use for authenticating at a server requiring authentication.
If used, also a user name must be provided.
.. option:: xpath
The XPath query to execute.
Must always return number strings or nothing.
.. option:: timeout
Timeout for executed requests in seconds. Default: 5.
.. option:: unit
A string indicating in which unit the delta is specified.
Valid options are: ``microseconds``, ``milliseconds``, ``seconds``, ``minutes``, ``hours``, ``days``, ``weeks``.
Default: minutes
autosuspend-3.0/doc/source/changelog.rst 0000664 0000000 0000000 00000012155 13611126124 0020455 0 ustar 00root root 0000000 0000000 Changelog
#########
3.0
***
This version splits the executable into two distinct subcommands, one for activity checking and one for scheduling wake ups.
This way, the wake up scheduling mechanism can be hooked into system tools such as `systemd`_ to ensure that wake ups are scheduled correctly every time the system suspends.
This increases the reliability of the mechanism but also changes the way |project_program| has to be called.
You now need to enable two `systemd`_ units as describe in :ref:`systemd-integration` and the command line interface has changed.
New features
============
* The :ref:`check-kodi-idle-time` activity check can now be parameterized whether to indicate activity on a paused player or not (:issue:`59`, :issue:`60`).
* New structure as described above in the version introduction (:issue:`43`).
Fixed bugs
==========
* Documented default URL for the ``Kodi*`` checks did not actually exist in code, which has been fixed now (:issue:`58`, :issue:`61`).
* A bug in :ref:`check-logind-session-idle` has been fixed (:issue:`71`, :issue:`72`).
Notable changes
===============
* The executable now uses subcommands.
The previous behavior as a long-running daemon is now available under the ``daemon`` subcommand.
* The command line flags for logging have changed.
The previous ``-l`` flag, which combined boolean behavior and file reading, has been split into two distinct flags: ``-d`` is a boolean switch to enable full debug logging to console, whereas the old ``-l`` is now only used for reading logging configuration files.
This change prevents nasty subtleties and issues when parsing the command line and became mandatory to support subcommands after the general configuration arguments such as logging.
* Dropped support for Python 3.6 and included Python 3.8 in CI infrastructure.
Everything works on Python 3.8.
* The documentation has been restructured and improved. For instance, there is now a :ref:`faq` section.
* Some build and test dependencies have changed.
* CI-builds have been converted to Github Actions.
2.0.4
*****
This is a minor bug fix release.
Fixed bugs
==========
* :ref:`check-active-connection` did not handle local IPv6 addresses with scope such as ``fe80::5193:518c:5c69:aedb%enp3s0`` (:issue:`50`)
2.0.3
*****
This is a minor bug fix release.
Fixed bugs
==========
* :ref:`check-network-bandwidth` did not update its internal state and therefore did not work as documented (:issue:`49`)
2.0.2
*****
This is a minor bug fix release.
Fixed bugs
==========
* :ref:`check-kodi` and :ref:`check-kodi-idle-time` checks now catch ``JSONDecodeErrors`` (:issue:`45`)
* :ref:`check-kodi` and :ref:`check-kodi-idle-time` checks now support authentication (:issue:`47`)
2.0
***
This version adds scheduled wake ups as its main features.
In addition to checks for activity, a set of checks for future activities can now be configured to determine times at which the systems needs to be online again.
The daemon will start suspending in case the next detected wake up time is far enough in the future and schedule an automatic system wake up at the closest determined wake up time.
This can, for instance, be used to ensure that the system is up again when a TV show has to be recorded to disk.
Below is a detailed list of notable changes.
New features
============
* Scheduled wake ups (:issue:`9`).
* Ability to call configurable user commands before suspending for notification purposes (:issue:`25`).
* Checks using network requests now support authentication (:issue:`32`).
* Checks using network requests now support ``file://`` URIs (:issue:`36`).
New activity checks
-------------------
* :ref:`check-active-calendar-event`: Uses an `iCalendar`_ file (via network request) to prevent suspending in case an event in the calendar is currently active (:issue:`24`).
* :ref:`check-kodi-idle-time`: Checks the idle time of `Kodi`_ to prevent suspending in case the menu is used (:issue:`33`).
New wakeup checks
-----------------
* :ref:`wakeup-calendar`: Wake up the system at the next event in an `iCalendar`_ file (requested via network, :issue:`30`).
* :ref:`wakeup-command`: Call an external command to determine the next wake up time (:issue:`26`).
* :ref:`wakeup-file`: Read the next wake up time from a file (:issue:`9`).
* :ref:`wakeup-periodic`: Wake up at a defined interval, for instance, to refresh calendars for the :ref:`wakeup-calendar` check (:issue:`34`).
* :ref:`wakeup-xpath` and :ref:`wakeup-xpath-delta`: Request an XML document and use `XPath`_ to extract the next wakeup time.
Fixed bugs
==========
* `XPath`_ checks now support responses with explicit encodings (:issue:`29`).
Notable changes
===============
* The namespace of the logging systems has been rearranged (:issue:`38`).
Existing logging configurations might require changes.
* The default configuration file has been reduced to explain the syntax and semantics.
For a list of all available checks, refer the manual instead (:issue:`39`).
For a complete list of all addressed issues and new features, please refer to the respective `Github milestone `_.
autosuspend-3.0/doc/source/conf.py 0000664 0000000 0000000 00000006177 13611126124 0017302 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import os.path
# needs_sphinx = '1.0'
extensions = [
"sphinx.ext.ifconfig",
"sphinx.ext.intersphinx",
"sphinx.ext.napoleon",
"sphinx.ext.autodoc",
"sphinx_autodoc_typehints",
"sphinxcontrib.plantuml",
"sphinx_issues",
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'autosuspend'
copyright = '2017, Johannes Wienke'
author = 'Johannes Wienke'
with open(os.path.join(
os.path.abspath(os.path.dirname(os.path.realpath(__file__))),
'../..',
'VERSION'), 'r') as version_file:
lines = version_file.readlines()
version = lines[0].strip()
release = lines[1].strip()
language = None
exclude_patterns = []
pygments_style = 'sphinx'
todo_include_todos = False
rst_epilog = '''
.. _autosuspend: https://github.com/languitar/autosuspend
.. _Python 3: https://docs.python.org/3/
.. _setuptools: https://setuptools.readthedocs.io
.. _configparser: https://docs.python.org/3/library/configparser.html
.. _psutil: https://github.com/giampaolo/psutil
.. _lxml: http://lxml.de/
.. _MPD: http://www.musicpd.org/
.. _python-mpd2: https://pypi.python.org/pypi/python-mpd2
.. _dbus-python: https://cgit.freedesktop.org/dbus/dbus-python/
.. _Kodi: https://kodi.tv/
.. _requests: https://pypi.python.org/pypi/requests
.. _systemd: https://www.freedesktop.org/wiki/Software/systemd/
.. _systemd service files: http://www.freedesktop.org/software/systemd/man/systemd.service.html
.. _broadcast-logging: https://github.com/languitar/broadcast-logging
.. _tvheadend: https://tvheadend.org/
.. _XPath: https://www.w3.org/TR/xpath/
.. _logind: https://www.freedesktop.org/wiki/Software/systemd/logind/
.. _iCalendar: https://tools.ietf.org/html/rfc5545
.. _dateutil: https://dateutil.readthedocs.io
.. _python-icalendar: https://icalendar.readthedocs.io
.. _tzlocal: https://pypi.org/project/tzlocal/
.. _requests-file: https://github.com/dashea/requests-file
.. _Plex: https://www.plex.tv/
.. _portalocker: https://portalocker.readthedocs.io
.. |project| replace:: {project}
.. |project_bold| replace:: **{project}**
.. |project_program| replace:: :program:`{project}`'''.format(project=project)
# Intersphinx
intersphinx_mapping = {'python': ('https://docs.python.org/3.7', None)}
# HTML options
html_theme = 'sphinx_rtd_theme'
# html_theme_options = {}
# html_static_path = ['_static']
html_sidebars = {
'**': [
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html',
]
}
# MANPAGE options
man_pages = [
('man_command',
'autosuspend',
'autosuspend Documentation',
[author],
1),
('man_config',
'autosuspend.conf',
'autosuspend config file Documentation',
[author],
5),
]
man_show_urls = True
# issues
issues_github_path = 'languitar/autosuspend'
# napoleon
napoleon_google_docstring = True
napoleon_numpye_docstring = False
napoleon_include_init_with_doc = True
typehints_fully_qualified = True
def setup(app):
app.add_config_value(
'is_preview',
os.environ.get('READTHEDOCS_VERSION', '') == 'latest',
'env',
)
autosuspend-3.0/doc/source/configuration_file.inc 0000664 0000000 0000000 00000013005 13611126124 0022330 0 ustar 00root root 0000000 0000000 Syntax
~~~~~~
The |project_program| configuration file uses INI syntax and needs to be processable by the Python `configparser`_ module.
A simple configuration file could look like:
.. code-block:: ini
[general]
interval = 30
idle_time = 900
suspend_cmd = /usr/bin/systemctl suspend
wakeup_cmd = echo {timestamp:.0f} > /sys/class/rtc/rtc0/wakealarm
notify_cmd_wakeup = su myuser -c notify-send -a autosuspend 'Suspending the system. Wake up at {iso}'
notify_cmd_no_wakeup = su myuser -c notify-send -a autosuspend 'Suspending the system.'
lock_file = /var/lock/autosuspend.lock
lock_timeout = 30
[check.Ping]
enabled = false
hosts = 192.168.0.7
[check.RemoteUsers]
class = Users
enabled = true
name = .*
terminal = .*
host = [0-9].*
[wakeup.File]
enabled = True
path = /var/run/autosuspend/wakeup
The configuration file consists of a ``[general]`` section, which specifies general processing options, and multiple sections of the format ``[check.*]`` and ``[wakeup.*]``.
These sections describe the activity and wake up checks to execute.
General configuration
~~~~~~~~~~~~~~~~~~~~~
.. program:: config-general
The ``[general]`` section contains options controlling the overall behavior of the |project_program| daemon. These are:
.. option:: interval
The time to wait after executing all checks in seconds.
.. option:: idle_time
The required amount of time in seconds with no detected activity before the host will be suspended.
Default: 300 seconds
.. option:: min_sleep_time
The minimal amount of time in seconds the system has to sleep for actually triggering suspension.
If a scheduled wake up results in an effective time below this value, the system will not sleep.
Default: 1200 seconds
.. option:: wakeup_delta
Wake up the system this amount of seconds earlier than the time that was determined for an event that requires the system to be up.
This value adds a safety margin for the time a the wake up effectively takes.
Default: 30 seconds
.. option:: suspend_cmd
The command to execute in case the host shall be suspended.
This line can contain additional command line arguments to the command to execute.
.. option:: wakeup_cmd
The command to execute for scheduling a wake up of the system.
The given string is processed using Python's :meth:`str.format` and a format argument called ``timestamp`` encodes the UTC timestamp of the planned wake up time (float).
Additionally ``iso`` can be used to acquire the timestamp in ISO 8601 format.
.. option:: notify_cmd_wakeup
A command to execute before the system is going to suspend for the purpose of notifying interested clients.
This command is only called in case a wake up is scheduled.
The given string is processed using Python's :meth:`str.format` and a format argument called ``timestamp`` encodes the UTC timestamp of the planned wake up time (float).
Additionally ``iso`` can be used to acquire the timestamp in ISO 8601 format.
If empty or not specified, no command will be called.
.. option:: notify_cmd_no_wakeup
A command to execute before the system is going to suspend for the purpose of notifying interested clients.
This command is only called in case NO wake up is scheduled.
Hence, no string formatting options are available.
If empty or not specified, no command will be called.
.. option:: woke_up_file
Location of a file that indicates to |project_program| that the computer has suspended since the last time checks were executed.
This file is usually created by a `systemd`_ service.
Thus, changing the location also requires adapting the respective service.
Refer to :ref:`systemd-integration` for further details.
.. option:: lock_file
Location of a file that is used to synchronize the continuously running daemon and the systemd callback.
.. option:: lock_timeout
Timeout in seconds used when trying to acquire the lock.
This should be longer than the maximum run time of all configured checks.
In the worst cases, suspending the system is delayed by this amount of time because ``presuspend`` hook has to wait before all checks have passed.
Activity check configuration
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. program:: config-check
For each activity check to execute, a section with the name format ``[check.*]`` needs to be created.
Each check has a name and an executing class which implements the behavior.
The fraction of the section name ``check.`` determines the name, and in case no class option is given inside the section, also the class which implements the check.
In case the :option:`class` option is specified, the name is completely user-defined and the same check can even be instantiated multiple times with differing names.
For each check, these generic options can be specified:
.. option:: class
Name of the class implementing the check.
If the name does not contain a dot (``.``), this is assumed to be one of the checks provided by |project| internally.
Otherwise, this can be used to pull in third-party checks.
If this option is not specified, the section name must represent a valid internal check class.
.. option:: enabled
Needs to be ``true`` for a check to actually execute.
``false`` is assumed if not specified.
Furthermore, each check might have custom options.
Wake up check configuration
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Wake up checks uses the same configuration logic as the previously described activity checks.
However, the configuration file sections start with ``wakeup.`` instead of ``check.``.
autosuspend-3.0/doc/source/configuration_file.rst 0000664 0000000 0000000 00000000264 13611126124 0022372 0 ustar 00root root 0000000 0000000 Configuration file
##################
.. include:: configuration_file.inc
For options of individual checks, please refer to :ref:`available-checks` and :ref:`available-wakeups`.
autosuspend-3.0/doc/source/debugging.rst 0000664 0000000 0000000 00000002611 13611126124 0020455 0 ustar 00root root 0000000 0000000 Debugging
#########
In case you need to track configuration issues to understand why a system suspends or does not, the extensive logging output of |project_program| might be used.
Each iteration of the daemon logs exactly which condition detected activity or not.
So you should be able to find out what is going on.
The command line flag :option:`autosuspend -l` allows specifying a Python logging configuration file which specifies what to log.
The provided `systemd`_ service files (see :ref:`systemd-integration`) already use :file:`/etc/autosuspend-logging.conf` as the standard location and a default file is usually installed.
If you launch |project_program| manually from the console, the command line flag :option:`autosuspend -d` might also be used to get full logging to the console instead.
In case one of the conditions you monitor prevents suspending the system if an external connection is established (logged-in users, open TCP port), then the logging configuration file can be changed to use the `broadcast-logging`_ package.
This way, the server will broadcast new log messages on the network and external clients on the same network can listen to these messages without creating an explicit connection.
Please refer to the documentation of the `broadcast-logging`_ package on how to enable and use it.
Additionally, one might also examine the ``journalctl`` for |project_program| after the fact.
autosuspend-3.0/doc/source/description.inc 0000664 0000000 0000000 00000001656 13611126124 0021016 0 ustar 00root root 0000000 0000000 |project_program| is a daemon that periodically suspends a system on inactivity and wakes it up again automatically in case it is needed.
For this purpose, |project_program| periodically iterates a number of user-configurable activity checks, which indicate whether an activity on the host is currently present that should prevent the host from suspending.
In case one of the checks indicates such activity, no action is taken and periodic checking continues.
Otherwise, in case no activity can be detected, this state needs to be present for a specified amount of time before the host is suspended by |project_program|.
In addition to the activity checks, wake up checks are used to determine planned future activities of the system (for instance, a TV recording or a periodic backup).
In case such activities are known before suspending, |project_program| triggers a command to wake up the system automatically before the soonest activity.
autosuspend-3.0/doc/source/faq.rst 0000664 0000000 0000000 00000010541 13611126124 0017272 0 ustar 00root root 0000000 0000000 .. _faq:
Frequently Asked Questions
##########################
Usage
*****
How do I add custom checks?
===========================
Two options:
* Use a script with the :ref:`check-external-command` check.
* Implement a Python module with you check being a subclass of
:class:`autosuspend.checks.Activity` or
:class:`autosuspend.checks.Wakeup` and install it alongside |project|.
The custom check class can then be referenced in the config with its full dotted path, for instance, ``mymodule.MyCheck``, in the `class` field.
How do I wake up my system if needed?
=====================================
|project_bold| itself only handles wake ups for events that were foreseeable at the time the system was put into sleep mode.
In case the system also has to be used on-demand, a simple way to wake up the system is to enable `Wake on LAN `_.
Here, a special network packet can be used to wake up the system again.
Multiple front-ends exist to send these magic packets.
The typical usage scenario with this approach is to manually send the magic packet when the system is needed, wait a few seconds, and then to perform the intended tasks with the system.
Wake on LAN needs to be specifically enabled on the system.
Typically, the documentation of common Linux distributions explains how to enable Wake on LAN:
* `Archlinux `__
* `Debian `__
* `Ubuntu `__
A set of front-ends for various platforms allows sending the magic packets.
For instance:
* `gWakeOnLan `__: GTK GUI, Linux
* `wol `__: command line, Linux
* `Wake On Lan `__: GUI, Windows
* `Wake On Lan `__: Android
* `Wake On Lan `__: Android, open-source
* `Kore (Kodi remote control) `__: Android, for Kodi users
* `Mocha WOL `__: iOS
How do I keep a system active at daytime
========================================
Imagine you want to have a NAS that is always available between 7 a.m. and 8 p.m.
After 8 p.m. the system should go to sleep in case no one else is using it.
Every morning at 7 a.m. it should wake up automatically.
This workflow can be realized using the :ref:`wakeup-calendar` wakeup check and the :ref:`check-active-calendar-event` activity check based on an `iCalendar`_ file residing on the local file system of the NAS.
The former check ensures that the system wakes up at the desired time of the day while the latter ensure that it stays active at daytime.
The first step is to create the `iCalendar`_ file, which can conveniently and graphically be edited with `Thunderbird Lightning `_ or any other calendar frontend.
Essentially, the ``*.ics`` may look like this::
BEGIN:VCALENDAR
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
VERSION:2.0
BEGIN:VEVENT
CREATED:20180602T151701Z
LAST-MODIFIED:20180602T152732Z
DTSTAMP:20180602T152732Z
UID:0ef23894-702e-40ac-ab09-94fa8c9c51fd
SUMMARY:keep active
RRULE:FREQ=DAILY
DTSTART:20180612T070000
DTEND:20180612T200000
TRANSP:OPAQUE
SEQUENCE:3
END:VEVENT
END:VCALENDAR
Afterwards, edit ``autosuspend.conf`` to contain the two aforementioned checks based on the created ``ics`` file.
This will end up with at least this config:
.. code-block:: ini
[general]
interval = 30
suspend_cmd = /usr/bin/systemctl suspend
wakeup_cmd = echo {timestamp:.0f} > /sys/class/rtc/rtc0/wakealarm
woke_up_file = /var/run/autosuspend-just-woke-up
[check.ActiveCalendarEvent]
enabled = true
url = file:///path/to/your.ics
[wakeup.Calendar]
enabled = true
url = file:///path/to/your.ics
Adding other activity checks will ensure that the system stays awake event after 8 p.m. if it is still used.
Error messages
**************
No connection adapters were found for '\file://\*'
==================================================
You need to install the `requests-file`_ package for ``file://`` URIs to work.
autosuspend-3.0/doc/source/index.rst 0000664 0000000 0000000 00000003322 13611126124 0017631 0 ustar 00root root 0000000 0000000 |project| - a daemon to automatically suspend and wake up a system
##################################################################
.. ifconfig:: is_preview
.. warning::
This is the documentation for an unreleased preview version of |project|.
.. include:: description.inc
The following diagram visualizes the periodic processing performed by |project|.
.. uml::
@startuml
skinparam shadowing false
skinparam backgroundcolor transparent
skinparam Padding 8
skinparam ActivityBackgroundColor #FFFFFF
skinparam ActivityDiamondBackgroundColor #FFFFFF
skinparam ActivityBorderColor #333333
skinparam ActivityDiamondBorderColor #333333
skinparam ArrowColor #333333
start
:Execute activity checks;
if (Is the system active?) then (no)
if (Was the system idle before?) then (no)
:Remember current time as start of system inactivity;
else (yes)
endif
if (Is system idle long enough?) then (yes)
:Execute wake up checks;
if (Is a wake up required soon?) then (yes)
stop
else
if (Is any wake up required?) then (yes)
#BBFFBB:Schedule the earliest wake up;
else (no)
endif
endif
#BBFFBB:Suspend the system;
else (no)
stop
endif
else (yes)
:Forget start of system inactivity;
stop
endif
stop
@enduml
.. toctree::
:maxdepth: 2
:caption: Usage
installation
options
configuration_file
available_checks
available_wakeups
systemd_integration
api
.. toctree::
:maxdepth: 2
:caption: Support
faq
debugging
support
changelog
Indices and tables
##################
* :ref:`genindex`
* :ref:`search`
autosuspend-3.0/doc/source/installation.rst 0000664 0000000 0000000 00000003665 13611126124 0021235 0 ustar 00root root 0000000 0000000 Installation instructions
#########################
|project_program| is designed for Python **3** and does not work with Python 2.
.. note::
After installation, do not forget to enable and start |project| vis `systemd`_ as described in :ref:`systemd-integration`.
Requirements
************
The minimal requirements are.
* `Python 3`_
* `psutil`_
* `portalocker`_
Additionally, the some checks need further dependencies to function properly.
Please refer to :ref:`available-checks` for individual requirements.
If checks using URLs to load data should support ``file://`` URLs, `requests-file`_ is needed.
Binary packages
***************
Debian
======
Installation from official package sources::
apt-get install autosuspend
Archlinux (AUR)
~~~~~~~~~~~~~~~
|project| is available as an `Archlinux AUR package `_.
Installation via :program:`aurman`::
aurman -S autosuspend
Other `AUR helpers `_ may be used, too.
Gentoo
======
Patrick Holthaus has provided an ebuild for Gentoo in `his overlay `_.
You can use it as follows::
eselect repository enable pholthaus-overlay
emaint sync -r pholthaus-overlay
emerge sys-apps/autosuspend
Other distributions
===================
In case you want to generate a package for a different Linux distribution, I'd be glad to hear about that.
From-source installation
************************
|project_program| provides a usual :file:`setup.py` file for installation using common `setuptools`_ methods.
Briefly, the following steps are necessary to install |project_program|:
.. code-block:: bash
git clone https://github.com/languitar/autosuspend.git
cd autosuspend
python3 setup.py install # with desired options
To build the documentation, the following command can be used:
.. code-block:: bash
python3 setup.py build_sphinx
autosuspend-3.0/doc/source/man_command.rst 0000664 0000000 0000000 00000001204 13611126124 0020770 0 ustar 00root root 0000000 0000000 :orphan:
.. _man-command:
|project|
#########
Synopsis
********
|project_bold| [*options*] **daemon|presuspend** [*subcommand options*]
Description
***********
.. include:: description.inc
If not specified via a command line argument, |project_program| looks for a default configuration at :file:`/etc/autosuspend.conf`.
:manpage:`autosuspend.conf(5)` describes the configuration file, the available checks, and their configuration options.
Options
*******
.. toctree::
options
Bugs
****
Please report bugs at the project repository at https://github.com/languitar/autosuspend.
See also
********
:manpage:`autosuspend.conf(5)`
autosuspend-3.0/doc/source/man_config.rst 0000664 0000000 0000000 00000000574 13611126124 0020630 0 ustar 00root root 0000000 0000000 :orphan:
|project|.conf
##############
Synopsis
********
:file:`/etc/autosuspend.conf`
General Configuration
*********************
Configures the |project_program| daemon.
.. toctree::
configuration_file
Available Activity Check
************************
.. toctree::
available_checks
Available Wakeup Check
**********************
.. toctree::
available_wakeups
autosuspend-3.0/doc/source/options.rst 0000664 0000000 0000000 00000002657 13611126124 0020227 0 ustar 00root root 0000000 0000000 Command line options
####################
General syntax:
|project_bold| [*options*] **daemon|presuspend** [*subcommand options*]
General options
***************
.. program:: autosuspend
.. option:: -h, --help
Displays an online help.
.. option:: -c FILE, --config FILE
Specifies an alternate config file to use instead of the default on at :file:`/etc/autosuspend.conf`.
.. option:: -l FILE, --logging FILE
Configure the logging system with the provided logging file.
This file needs to follow the conventions for :ref:`Python logging files `.
.. option:: -d
Configure full debug logging in the command line.
Mutually exclusive to :option:`autosuspend -l`.
Subcommand ``daemon``
*********************
Starts the continuously running daemon.
.. program:: autosuspend daemon
.. option:: -a, --allchecks
Usually, |project_program| stops checks in each iteration as soon as the first matching check indicates system activity.
If this flag is set, all subsequent checks are still executed.
Useful mostly for debugging purposes.
.. option:: -r SECONDS, --runfor SECONDS
If specified, do not run endlessly.
Instead, operate only for the specified amount of seconds, then exit.
Useful mostly for debugging purposes.
Subcommand ``presuspend``
*************************
Should be called by the system before suspending.
.. program:: autosuspend presuspend
No options
autosuspend-3.0/doc/source/support.rst 0000664 0000000 0000000 00000000637 13611126124 0020244 0 ustar 00root root 0000000 0000000 Support requests
################
For questions, please first consult the issue tracker at the `Github project `_ for existing issues and questions.
Questions are marked with the `question` tag.
If your question is not answered, open a new issue with the question.
In case you have found a bug or you want to request a new feature, please also open an issue at the `Github project `_.
autosuspend-3.0/doc/source/systemd_integration.rst 0000664 0000000 0000000 00000002534 13611126124 0022621 0 ustar 00root root 0000000 0000000 .. _systemd-integration:
systemd integration
###################
Even though it is possible to run |project_program| manually (cf. :ref:`the manpage `), in production use cases, the daemon will usually be run from `systemd`_.
For this purpose, the package ships with `service definition files `_ for `systemd`_, so that you should be able to manage |project_program| via `systemd`_.
These files need to be installed in the appropriate locations for such service files, which depend on the Linux distribution.
Some common locations are:
* :file:`/usr/lib/systemd/system` (e.g. Archlinux packaged service files)
* :file:`/lib/systemd/system` (e.g. Debian packaged service files)
* :file:`/etc/systemd/system` (e.g. Archlinux manually added service files)
Binary installation packages for Linux distributions should have installed the service files at the appropriate locations already.
To start |project_program| via `systemd`_, execute:
.. code-block:: bash
systemctl enable autosuspend.service
systemctl enable autosuspend-detect-suspend.service
.. note::
Do not forget the second ``enable`` call to ensure that wake ups are configured even if the system is manually placed into suspend.
To start |project_program| automatically at system start, execute:
.. code-block:: bash
systemctl start autosuspend.service
autosuspend-3.0/readthedocs.yml 0000664 0000000 0000000 00000000234 13611126124 0016732 0 ustar 00root root 0000000 0000000 version: 2
build:
image: latest
python:
version: 3.7
install:
- requirements: requirements-doc.txt
- method: pip
path: .
autosuspend-3.0/requirements-doc.txt 0000664 0000000 0000000 00000000126 13611126124 0017751 0 ustar 00root root 0000000 0000000 sphinx_issues
sphinx
sphinx_rtd_theme
sphinxcontrib-plantuml
sphinx-autodoc-typehints
autosuspend-3.0/setup.cfg 0000664 0000000 0000000 00000002127 13611126124 0015546 0 ustar 00root root 0000000 0000000 [aliases]
test=pytest
[build_sphinx]
source-dir = doc/source
build-dir = doc/build
[flake8]
exclude =
doc,
.tox,
.git,
__pycache__,
build,
dist,
.eggs,
env,
.mypy_cache
mypy_config=setup.cfg
per-file-ignores =
tests/*: D1, S106, S404, S604, TYP
tests/conftest.py: TYP
setup.py: BLK
max-line-length = 88
ignore =
C812,
C816,
E203,
E501,
D202,
D10,
D102,
D413,
P1,
W503,
TYP101,
TYP102,
TYP002,
TYP003,
S101
S404,
DUO116,
DUO107
application-import-names = autosuspend
import-order-style = google
[mypy]
ignore_missing_imports=True
[tool:pytest]
log_level = DEBUG
markers =
integration: longer-running integration tests
filterwarnings =
ignore::DeprecationWarning
default::DeprecationWarning:autosuspend
addopts =
--cov-config=setup.cfg
[coverage:run]
branch = True
source = autosuspend
[coverage:paths]
source =
src/
*/site-packages/
[coverage:report]
exclude_lines =
pragma: no cover
def __repr__
if __name__ == "__main__":
@abc.abstractmethod
autosuspend-3.0/setup.py 0000664 0000000 0000000 00000003246 13611126124 0015442 0 ustar 00root root 0000000 0000000 import os
import os.path
from setuptools import find_packages, setup
name = 'autosuspend'
with open(os.path.join(
os.path.abspath(os.path.dirname(os.path.realpath(__file__))),
'VERSION'), 'r') as version_file:
lines = version_file.readlines()
release = lines[1].strip()
extras_require = {
'Mpd': ['python-mpd2'],
'Kodi': ['requests'],
'XPath': ['lxml', 'requests'],
'Logind': ['dbus-python'],
'ical': ['requests', 'icalendar', 'python-dateutil', 'tzlocal'],
'localfiles': ['requests-file'],
'test': [
'pytest',
'pytest-cov',
'pytest-mock',
'freezegun',
'python-dbusmock',
'PyGObject',
'pytest-datadir',
'pytest-httpserver',
],
}
extras_require['test'].extend(
{dep for k, v in extras_require.items() if k != 'test' for dep in v},
)
setup(
name=name,
version=release,
description='A daemon to suspend your server in case of inactivity',
author='Johannes Wienke',
author_email='languitar@semipol.de',
license='GPL2',
zip_safe=False,
setup_requires=[
'pytest-runner',
],
install_requires=[
'psutil>=5.0',
'portalocker',
],
extras_require=extras_require,
package_dir={
'': 'src',
},
packages=find_packages('src'),
entry_points={
'console_scripts': [
'autosuspend = autosuspend:main',
],
},
data_files=[
('etc', ['data/autosuspend.conf',
'data/autosuspend-logging.conf']),
('lib/systemd/system', ['data/autosuspend.service',
'data/autosuspend-detect-suspend.service']),
],
)
autosuspend-3.0/src/ 0000775 0000000 0000000 00000000000 13611126124 0014512 5 ustar 00root root 0000000 0000000 autosuspend-3.0/src/autosuspend/ 0000775 0000000 0000000 00000000000 13611126124 0017064 5 ustar 00root root 0000000 0000000 autosuspend-3.0/src/autosuspend/__init__.py 0000775 0000000 0000000 00000060105 13611126124 0021202 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python3
"""A daemon to suspend a system on inactivity."""
import argparse
import configparser
import datetime
import functools
import logging
import logging.config
import os
import os.path
import pathlib
import subprocess
import time
from typing import (
Callable,
IO,
Iterable,
List,
Optional,
Sequence,
Type,
TypeVar,
Union,
)
import portalocker
from .checks import Activity, Check, ConfigurationError, TemporaryCheckError, Wakeup
from .util import logger_by_class_instance
# pylint: disable=invalid-name
_logger = logging.getLogger("autosuspend")
# pylint: enable=invalid-name
def execute_suspend(
command: Union[str, Sequence[str]], wakeup_at: Optional[datetime.datetime],
) -> None:
"""Suspend the system by calling the specified command.
Args:
command:
The command to execute, which will be executed using shell
execution
wakeup_at:
potential next wakeup time. Only informative.
"""
_logger.info("Suspending using command: %s", command)
try:
subprocess.check_call(command, shell=True) # noqa: S602
except subprocess.CalledProcessError:
_logger.warning("Unable to execute suspend command: %s", command, exc_info=True)
def notify_suspend(
command_wakeup_template: Optional[str],
command_no_wakeup: Optional[str],
wakeup_at: Optional[datetime.datetime],
) -> None:
"""Call a command to notify on suspending.
Args:
command_no_wakeup_template:
A template for the command to execute in case a wakeup is
scheduled.
It will be executed using shell execution.
The template is processed with string formatting to include
information on a potentially scheduled wakeup.
Notifications can be disable by providing ``None`` here.
command_no_wakeup:
Command to execute for notification in case no wake up is
scheduled.
Will be executed using shell execution.
wakeup_at:
if not ``None``, this is the time the system will wake up again
"""
def safe_exec(command: str) -> None:
_logger.info("Notifying using command: %s", command)
try:
subprocess.check_call(command, shell=True) # noqa: S602
except subprocess.CalledProcessError:
_logger.warning(
"Unable to execute notification command: %s", command, exc_info=True
)
if wakeup_at and command_wakeup_template:
command = command_wakeup_template.format(
timestamp=wakeup_at.timestamp(), iso=wakeup_at.isoformat()
)
safe_exec(command)
elif not wakeup_at and command_no_wakeup:
safe_exec(command_no_wakeup)
else:
_logger.info("No suitable notification command configured.")
def notify_and_suspend(
suspend_cmd: Union[str, Sequence[str]],
notify_cmd_wakeup_template: Optional[str],
notify_cmd_no_wakeup: Optional[str],
wakeup_at: Optional[datetime.datetime],
) -> None:
notify_suspend(notify_cmd_wakeup_template, notify_cmd_no_wakeup, wakeup_at)
execute_suspend(suspend_cmd, wakeup_at)
def schedule_wakeup(command_template: str, wakeup_at: datetime.datetime) -> None:
command = command_template.format(
timestamp=wakeup_at.timestamp(), iso=wakeup_at.isoformat()
)
_logger.info("Scheduling wakeup using command: %s", command)
try:
subprocess.check_call(command, shell=True) # noqa: S602
except subprocess.CalledProcessError:
_logger.warning(
"Unable to execute wakeup scheduling command: %s", command, exc_info=True
)
def execute_checks(
checks: Iterable[Activity], all_checks: bool, logger: logging.Logger
) -> bool:
"""Execute the provided checks sequentially.
Args:
checks:
the checks to execute
all_checks:
if ``True``, execute all checks even if a previous one already
matched.
Return:
``True`` if a check matched
"""
matched = False
for check in checks:
logger.debug("Executing check %s", check.name)
try:
result = check.check()
if result is not None:
logger.info("Check %s matched. Reason: %s", check.name, result)
matched = True
if not all_checks:
logger.debug("Skipping further checks")
break
except TemporaryCheckError:
logger.warning("Check %s failed. Ignoring...", check, exc_info=True)
return matched
def execute_wakeups(
wakeups: Iterable[Wakeup], timestamp: datetime.datetime, logger: logging.Logger
) -> Optional[datetime.datetime]:
wakeup_at = None
for wakeup in wakeups:
try:
this_at = wakeup.check(timestamp)
# sanity checks
if this_at is None:
continue
if this_at <= timestamp:
logger.warning(
"Wakeup %s returned a scheduled wakeup at %s, "
"which is earlier than the current time %s. "
"Ignoring.",
wakeup,
this_at,
timestamp,
)
continue
if wakeup_at is None:
wakeup_at = this_at
else:
wakeup_at = min(this_at, wakeup_at)
except TemporaryCheckError:
logger.warning("Wakeup %s failed. Ignoring...", wakeup, exc_info=True)
return wakeup_at
class Processor:
"""Implements the logic for triggering suspension.
Args:
activities:
the activity checks to execute
wakeups:
the wakeup checks to execute
idle_time:
the required amount of time the system has to be idle before
suspension is triggered in seconds
min_sleep_time:
the minimum time the system has to sleep before it is woken up
again in seconds.
wakeup_delta:
wake up this amount of seconds before the scheduled wake up time.
sleep_fn:
a callable that triggers suspension
wakeup_fn:
a callable that schedules the wakeup at the specified time in UTC
seconds
notify_fn:
a callable that is called before suspending.
One argument gives the scheduled wakeup time or ``None``.
all_activities:
if ``True``, execute all activity checks even if a previous one
already matched.
"""
def __init__(
self,
activities: Iterable[Activity],
wakeups: Iterable[Wakeup],
idle_time: float,
min_sleep_time: float,
wakeup_delta: float,
sleep_fn: Callable,
wakeup_fn: Callable[[datetime.datetime], None],
all_activities: bool,
) -> None:
self._logger = logger_by_class_instance(self)
self._activities = activities
self._wakeups = wakeups
self._idle_time = idle_time
self._min_sleep_time = min_sleep_time
self._wakeup_delta = wakeup_delta
self._sleep_fn = sleep_fn
self._wakeup_fn = wakeup_fn
self._all_activities = all_activities
self._idle_since = None # type: Optional[datetime.datetime]
def _reset_state(self, reason: str) -> None:
self._logger.info("%s. Resetting state", reason)
self._idle_since = None
def iteration(self, timestamp: datetime.datetime, just_woke_up: bool) -> None:
self._logger.info("Starting new check iteration")
# exit in case something prevents suspension
if just_woke_up:
self._reset_state("Just woke up from suspension.")
return
# determine system activity
active = execute_checks(self._activities, self._all_activities, self._logger)
self._logger.debug(
"All activity checks have been executed. " "Active: %s", active
)
if active:
self._reset_state("System is active")
return
# set idle timestamp if required
if self._idle_since is None:
self._idle_since = timestamp
self._logger.info("System is idle since %s", self._idle_since)
# determine if systems is idle long enough
self._logger.debug(
"Idle seconds: %s", (timestamp - self._idle_since).total_seconds()
)
if (timestamp - self._idle_since).total_seconds() > self._idle_time:
self._logger.info("System is idle long enough.")
# determine potential wake ups
wakeup_at = execute_wakeups(self._wakeups, timestamp, self._logger)
if wakeup_at is not None:
self._logger.debug("System wakeup required at %s", wakeup_at)
wakeup_at -= datetime.timedelta(seconds=self._wakeup_delta)
self._logger.debug(
"With delta applied, system should wake up at %s", wakeup_at,
)
else:
self._logger.debug("No automatic wakeup required")
# idle time would be reached, handle wake up
if wakeup_at is not None:
wakeup_in = wakeup_at - timestamp
if wakeup_in.total_seconds() < self._min_sleep_time:
self._logger.info(
"Would wake up in %s seconds, which is "
"below the minimum amount of %s s. "
"Not suspending.",
wakeup_in.total_seconds(),
self._min_sleep_time,
)
return
# schedule wakeup
self._logger.info("Scheduling wakeup at %s", wakeup_at)
self._wakeup_fn(wakeup_at)
self._reset_state("Going to suspend")
self._sleep_fn(wakeup_at)
else:
self._logger.info(
"Desired idle time of %s s not reached yet.", self._idle_time
)
def loop(
processor: Processor,
interval: float,
run_for: Optional[int],
woke_up_file: str,
lock_file: str,
lock_timeout: float,
) -> None:
"""Run the main loop of the daemon.
Args:
processor:
the processor to use for handling the suspension computations
interval:
the length of one iteration of the main loop in seconds
run_for:
if specified, run the main loop for the specified amount of seconds
before terminating (approximately)
woke_up_file:
path of a file that marks that the system was sleeping since the
last processing iterations
lock_file:
path of a file used for locking modifications to the `woke_up_file`
to ensure consistency
lock_timeout:
time in seconds to wait for acquiring the lock file
"""
start_time = datetime.datetime.now(datetime.timezone.utc)
while (run_for is None) or (
datetime.datetime.now(datetime.timezone.utc)
< (start_time + datetime.timedelta(seconds=run_for))
):
try:
_logger.debug("New iteration, trying to acquire lock")
with portalocker.Lock(lock_file, timeout=lock_timeout):
_logger.debug("Acquired lock")
just_woke_up = os.path.isfile(woke_up_file)
if just_woke_up:
_logger.debug("Removing woke up file at %s", woke_up_file)
try:
os.remove(woke_up_file)
except FileNotFoundError:
_logger.warning("Just woke up file disappeared", exc_info=True)
processor.iteration(
datetime.datetime.now(datetime.timezone.utc), just_woke_up
)
except portalocker.LockException:
_logger.warning("Failed to acquire lock, skipping iteration", exc_info=True)
time.sleep(interval)
CheckType = TypeVar("CheckType", bound=Check)
def config_section_string(section: configparser.SectionProxy) -> str:
data = {k: v if k != "password" else "" for k, v in section.items()}
return f"{data}"
def set_up_checks(
config: configparser.ConfigParser,
prefix: str,
internal_module: str,
target_class: Type[CheckType],
error_none: bool = False,
) -> List[CheckType]:
"""Set up :py.class:`Check` instances from a given configuration.
Args:
config:
the configuration to use
prefix:
The prefix of sections in the configuration file to use for
creating instances.
internal_module:
Name of the submodule of ``autosuspend.checks`` to use for
discovering internal check classes.
target_class:
the base class to check new instance against
error_none:
Raise an error if nothing was configured?
"""
configured_checks = [] # type: List[CheckType]
check_section = [s for s in config.sections() if s.startswith("{}.".format(prefix))]
for section in check_section:
name = section[len("{}.".format(prefix)) :]
# legacy method to determine the check name from the section header
class_name = name
# if there is an explicit class, use that one with higher priority
if "class" in config[section]:
class_name = config[section]["class"]
enabled = config.getboolean(section, "enabled", fallback=False)
if not enabled:
_logger.debug("Skipping disabled check {}".format(name))
continue
# try to find the required class
if "." in class_name:
# dot in class name means external class
import_module, import_class = class_name.rsplit(".", maxsplit=1)
else:
# no dot means internal class
import_module = "autosuspend.checks.{}".format(internal_module)
import_class = class_name
_logger.info(
"Configuring check %s with class %s from module %s "
"using config parameters %s",
name,
import_class,
import_module,
config_section_string(config[section]),
)
try:
klass = getattr(
__import__(import_module, fromlist=[import_class]), import_class
)
except AttributeError as error:
raise ConfigurationError(
"Cannot create built-in check named {}: "
"Class does not exist".format(class_name)
) from error
check = klass.create(name, config[section])
if not isinstance(check, target_class):
raise ConfigurationError(
"Check {} is not a correct {} instance".format(
check, target_class.__name__
)
)
_logger.debug(
"Created check instance {} with options {}".format(check, check.options())
)
configured_checks.append(check)
if not configured_checks and error_none:
raise ConfigurationError("No checks enabled")
return configured_checks
def parse_config(config_file: Iterable[str]) -> configparser.ConfigParser:
"""Parse the configuration file.
Args:
config_file:
The file to parse
"""
_logger.debug("Reading config file %s", config_file)
config = configparser.ConfigParser(
interpolation=configparser.ExtendedInterpolation()
)
config.read_file(config_file)
_logger.debug("Parsed config file: %s", config)
return config
def parse_arguments(args: Optional[Sequence[str]]) -> argparse.Namespace:
"""Parse command line arguments.
Args:
args:
if specified, use the provided arguments instead of the default
ones determined via the :module:`sys` module.
"""
parser = argparse.ArgumentParser(
description="Automatically suspends a server " "based on several criteria",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
IO # for making pyflakes happy
default_config = None # type: Optional[IO[str]]
try:
default_config = open("/etc/autosuspend.conf", "r")
except (FileNotFoundError, IsADirectoryError, PermissionError):
pass
parser.add_argument(
"-c",
"--config",
dest="config_file",
type=argparse.FileType("r"),
default=default_config,
required=default_config is None,
metavar="FILE",
help="The config file to use",
)
logging_group = parser.add_mutually_exclusive_group()
logging_group.add_argument(
"-l",
"--logging",
type=argparse.FileType("r"),
default=None,
metavar="FILE",
help="Configures the python logging system from the specified "
"configuration file.",
)
logging_group.add_argument(
"-d",
"--debug",
action="store_true",
default=False,
help="Configures the logging system to provide full debug output " "on stdout.",
)
subparsers = parser.add_subparsers(title="subcommands", dest="subcommand")
subparsers.required = True
parser_daemon = subparsers.add_parser(
"daemon", help="Execute the continuously operating daemon"
)
parser_daemon.set_defaults(func=main_daemon)
parser_daemon.add_argument(
"-a",
"--allchecks",
dest="all_checks",
default=False,
action="store_true",
help="Execute all checks even if one has already prevented "
"the system from going to sleep. Useful to debug individual "
"checks.",
)
parser_daemon.add_argument(
"-r",
"--runfor",
dest="run_for",
type=float,
default=None,
metavar="SEC",
help="If set, run for the specified amount of seconds before exiting "
"instead of endless execution.",
)
parser_hook = subparsers.add_parser(
"presuspend", help="Hook method to be called before suspending"
)
parser_hook.set_defaults(func=main_hook)
result = parser.parse_args(args)
_logger.debug("Parsed command line arguments %s", result)
return result
def configure_logging(config_file: Optional[IO], debug: bool) -> None:
"""Configure the python :mod:`logging` system.
Assumes that either a config file is provided, or debugging is enabled.
Both together are not possible.
Args:
config_file:
a configuration file pointed by a :ref:`file object
`
debug:
if ``True``, enable debug logging
"""
if config_file:
try:
logging.config.fileConfig(config_file)
except Exception:
# at least configure warnings
logging.basicConfig(level=logging.WARNING)
_logger.warning(
"Unable to configure logging from file %s. "
"Falling back to warning level.",
config_file,
exc_info=True,
)
else:
if debug:
logging.basicConfig(level=logging.DEBUG)
else:
# at least configure warnings
logging.basicConfig(level=logging.WARNING)
def get_notify_and_suspend_func(config: configparser.ConfigParser) -> Callable:
return functools.partial(
notify_and_suspend,
config.get("general", "suspend_cmd"),
config.get(
"general", # type: ignore # python/typeshed#2093
"notify_cmd_wakeup",
fallback=None,
),
config.get(
"general", # type: ignore # python/typeshed#2093
"notify_cmd_no_wakeup",
fallback=None,
),
)
def get_schedule_wakeup_func(
config: configparser.ConfigParser,
) -> Callable[[datetime.datetime], None]:
return functools.partial(schedule_wakeup, config.get("general", "wakeup_cmd"))
def get_woke_up_file(config: configparser.ConfigParser) -> str:
return config.get(
"general", "woke_up_file", fallback="/var/run/autosuspend-just-woke-up"
)
def get_lock_file(config: configparser.ConfigParser) -> str:
return config.get("general", "lock_file", fallback="/var/lock/autosuspend.lock")
def get_lock_timeout(config: configparser.ConfigParser) -> float:
return config.getfloat("general", "lock_timeout", fallback=30.0)
def get_wakeup_delta(config: configparser.ConfigParser) -> float:
return config.getfloat("general", "wakeup_delta", fallback=30)
def configure_processor(
args: argparse.Namespace,
config: configparser.ConfigParser,
checks: Iterable[Activity],
wakeups: Iterable[Wakeup],
) -> Processor:
return Processor(
checks,
wakeups,
config.getfloat("general", "idle_time", fallback=300),
config.getfloat("general", "min_sleep_time", fallback=1200),
get_wakeup_delta(config),
get_notify_and_suspend_func(config),
get_schedule_wakeup_func(config),
all_activities=args.all_checks,
)
def hook(
wakeups: List[Wakeup],
wakeup_delta: float,
wakeup_fn: Callable[[datetime.datetime], None],
woke_up_file: str,
lock_file: str,
lock_timeout: float,
) -> None:
"""Installs wake ups and notifies the daemon before suspending.
Args:
wakeups:
set of wakeup checks to use for determining the wake up time
wakeup_delta:
The amount of time in seconds to wake up before an event
wakeup_fn:
function to call with the next wake up time
woke_up_file:
location of the file that instructs the daemon that the system just
woke up
lock_file:
path of a file used for locking modifications to the `woke_up_file`
to ensure consistency
lock_timeout:
time in seconds to wait for acquiring the lock file
"""
_logger.info("Pre-suspend hook starting, trying to acquire lock")
try:
with portalocker.Lock(lock_file, timeout=lock_timeout):
_logger.debug("Hook acquired lock")
_logger.debug("Hook executing with configured wake ups: %s", wakeups)
wakeup_at = execute_wakeups(
wakeups, datetime.datetime.now(datetime.timezone.utc), _logger
)
_logger.debug("Hook next wake up at %s", wakeup_at)
if wakeup_at:
wakeup_at -= datetime.timedelta(seconds=wakeup_delta)
_logger.info("Scheduling next wake up at %s", wakeup_at)
wakeup_fn(wakeup_at)
else:
_logger.info("No wake up required. Terminating")
# create the just woke up file
pathlib.Path(woke_up_file).touch()
except portalocker.LockException:
_logger.warning(
"Hook unable to acquire lock. Not informing daemon.", exc_info=True
)
def main_hook(args: argparse.Namespace, config: configparser.ConfigParser) -> None:
wakeups = set_up_checks(
config, "wakeup", "wakeup", Wakeup, # type: ignore # python/mypy#5374
)
hook(
wakeups,
get_wakeup_delta(config),
get_schedule_wakeup_func(config),
get_woke_up_file(config),
get_lock_file(config),
get_lock_timeout(config),
)
def main_daemon(args: argparse.Namespace, config: configparser.ConfigParser) -> None:
"""Run the daemon."""
checks = set_up_checks(
config,
"check",
"activity",
Activity, # type: ignore
error_none=True,
)
wakeups = set_up_checks(
config, "wakeup", "wakeup", Wakeup, # type: ignore
)
processor = configure_processor(args, config, checks, wakeups)
loop(
processor,
config.getfloat("general", "interval", fallback=60),
run_for=args.run_for,
woke_up_file=get_woke_up_file(config),
lock_file=get_lock_file(config),
lock_timeout=get_lock_timeout(config),
)
def main(argv: Optional[Sequence[str]] = None) -> None:
"""Run the daemon."""
args = parse_arguments(argv)
configure_logging(args.logging, args.debug)
config = parse_config(args.config_file)
args.func(args, config)
if __name__ == "__main__":
main()
autosuspend-3.0/src/autosuspend/checks/ 0000775 0000000 0000000 00000000000 13611126124 0020324 5 ustar 00root root 0000000 0000000 autosuspend-3.0/src/autosuspend/checks/__init__.py 0000664 0000000 0000000 00000006747 13611126124 0022453 0 ustar 00root root 0000000 0000000 """Provides the basic types used for checks."""
import abc
import configparser
import datetime
from typing import Any, Mapping, Optional
from autosuspend.util import logger_by_class_instance
class ConfigurationError(RuntimeError):
"""Indicates an error in the configuration of a :class:`Check`."""
pass
class TemporaryCheckError(RuntimeError):
"""Indicates a temporary error while performing a check.
Such an error can be ignored for some time since it might recover
automatically.
"""
pass
class SevereCheckError(RuntimeError):
"""Indicates a sever check error that will probably not recover.
There no hope this situation recovers.
"""
pass
class Check(abc.ABC):
"""Base class for all kinds of checks.
Subclasses must call this class' ``__init__`` method.
Args:
name (str):
Configured name of the check
"""
@classmethod
@abc.abstractmethod
def create(cls, name: str, config: configparser.SectionProxy) -> "Check":
"""Create a new check instance from the provided configuration.
Args:
name:
user-defined name for the check
config:
config parser section with the configuration for this check
Raises:
ConfigurationError:
Configuration for this check is inappropriate
"""
pass
def __init__(self, name: str = None) -> None:
if name:
self.name = name
else:
self.name = self.__class__.__name__
self.logger = logger_by_class_instance(self, name)
def options(self) -> Mapping[str, Any]:
"""Return the configured options as a mapping.
This is used for debugging purposes only.
"""
return {
k: v for k, v in self.__dict__.items() if not callable(v) and k != "logger"
}
def __str__(self) -> str:
return "{name}[class={clazz}]".format(
name=self.name, clazz=self.__class__.__name__
)
class Activity(Check):
"""Base class for activity checks.
Subclasses must call this class' __init__ method.
"""
@abc.abstractmethod
def check(self) -> Optional[str]:
"""Determine if system activity exists that prevents suspending.
Returns:
A string describing which condition currently prevents sleep, else ``None``.
Raises:
TemporaryCheckError:
Check execution currently fails but might recover later
SevereCheckError:
Check executions fails severely
"""
pass
def __str__(self) -> str:
return "{name}[class={clazz}]".format(
name=self.name, clazz=self.__class__.__name__
)
class Wakeup(Check):
"""Represents a check for potential wake up points."""
@abc.abstractmethod
def check(self, timestamp: datetime.datetime) -> Optional[datetime.datetime]:
"""Indicate if a wakeup has to be scheduled for this check.
Args:
timestamp:
the time at which the call to the wakeup check is made
Returns:
a datetime describing when the system needs to be running again or
``None`` if no wakeup is required. Use timezone aware datetimes.
Raises:
TemporaryCheckError:
Check execution currently fails but might recover later
SevereCheckError:
Check executions fails severely
"""
pass
autosuspend-3.0/src/autosuspend/checks/activity.py 0000664 0000000 0000000 00000063166 13611126124 0022546 0 ustar 00root root 0000000 0000000 import configparser
import copy
from datetime import datetime, timedelta, timezone
import glob
from io import BytesIO
import json
import os
import pwd
import re
import socket
import subprocess
import time
from typing import Any, Dict, Iterable, Optional, Pattern, Sequence, Tuple
import warnings
import psutil
from . import Activity, Check, ConfigurationError, SevereCheckError, TemporaryCheckError
from .util import CommandMixin, NetworkMixin, XPathMixin
from ..util.systemd import list_logind_sessions
class ActiveCalendarEvent(NetworkMixin, Activity):
"""Determines activity by checking against events in an icalendar file."""
def __init__(self, name: str, **kwargs) -> None:
NetworkMixin.__init__(self, **kwargs)
Activity.__init__(self, name)
def check(self) -> Optional[str]:
from ..util.ical import list_calendar_events
response = self.request()
start = datetime.now(timezone.utc)
end = start + timedelta(minutes=1)
events = list_calendar_events(BytesIO(response.content), start, end)
self.logger.debug(
"Listing active events between %s and %s returned %s events",
start,
end,
len(events),
)
if events:
return "Calendar event {} is active".format(events[0])
else:
return None
class ActiveConnection(Activity):
"""Checks if a client connection exists on specified ports."""
@classmethod
def create(
cls, name: str, config: configparser.SectionProxy,
) -> "ActiveConnection":
try:
split_ports = config["ports"].split(",")
ports = {int(p.strip()) for p in split_ports}
return cls(name, ports)
except KeyError as error:
raise ConfigurationError("Missing option ports") from error
except ValueError as error:
raise ConfigurationError("Ports must be integers") from error
def __init__(self, name: str, ports: Iterable[int]) -> None:
Activity.__init__(self, name)
self._ports = ports
def check(self) -> Optional[str]:
own_addresses = [
(item.family, item.address.split("%")[0])
for sublist in psutil.net_if_addrs().values()
for item in sublist
]
connected = [
c.laddr[1]
for c in psutil.net_connections()
if (
(c.family, c.laddr[0]) in own_addresses
and c.status == "ESTABLISHED"
and c.laddr[1] in self._ports
)
]
if connected:
return "Ports {} are connected".format(connected)
else:
return None
class ExternalCommand(CommandMixin, Activity):
def __init__(self, name: str, command: str) -> None:
CommandMixin.__init__(self, command)
Check.__init__(self, name)
def check(self) -> Optional[str]:
try:
subprocess.check_call(self._command, shell=True) # noqa: S602
return "Command {} succeeded".format(self._command)
except subprocess.CalledProcessError:
return None
def _add_default_kodi_url(config: configparser.SectionProxy) -> None:
if "url" not in config:
config["url"] = "http://localhost:8080/jsonrpc"
class Kodi(NetworkMixin, Activity):
@classmethod
def collect_init_args(cls, config: configparser.SectionProxy) -> Dict[str, Any]:
try:
_add_default_kodi_url(config)
args = NetworkMixin.collect_init_args(config)
args["suspend_while_paused"] = config.getboolean(
"suspend_while_paused", fallback=False
)
return args
except ValueError as error:
raise ConfigurationError("Configuration error {}".format(error)) from error
@classmethod
def create(cls, name: str, config: configparser.SectionProxy) -> "Kodi":
return cls(name, **cls.collect_init_args(config))
def __init__(
self, name: str, url: str, suspend_while_paused: bool = False, **kwargs
) -> None:
self._suspend_while_paused = suspend_while_paused
if self._suspend_while_paused:
request = url + (
'?request={"jsonrpc": "2.0", "id": 1, '
'"method": "XBMC.GetInfoBooleans",'
'"params": {"booleans": ["Player.Playing"]} }'
)
else:
request = url + (
'?request={"jsonrpc": "2.0", "id": 1, '
'"method": "Player.GetActivePlayers"}'
)
NetworkMixin.__init__(self, url=request, **kwargs)
Activity.__init__(self, name)
def check(self) -> Optional[str]:
try:
reply = self.request().json()
if self._suspend_while_paused:
if reply["result"]["Player.Playing"]:
return "Kodi actively playing media"
else:
if reply["result"]:
return "Kodi currently playing"
return None
except (KeyError, TypeError, json.JSONDecodeError) as error:
raise TemporaryCheckError(error) from error
class KodiIdleTime(NetworkMixin, Activity):
@classmethod
def collect_init_args(cls, config: configparser.SectionProxy) -> Dict[str, Any]:
try:
_add_default_kodi_url(config)
args = NetworkMixin.collect_init_args(config)
args["idle_time"] = config.getint("idle_time", fallback=120)
return args
except ValueError as error:
raise ConfigurationError("Configuration error " + str(error)) from error
@classmethod
def create(cls, name: str, config: configparser.SectionProxy) -> "KodiIdleTime":
return cls(name, **cls.collect_init_args(config))
def __init__(self, name: str, url: str, idle_time: int, **kwargs) -> None:
request = url + (
'?request={{"jsonrpc": "2.0", "id": 1, '
'"method": "XBMC.GetInfoBooleans",'
'"params": {{"booleans": ["System.IdleTime({})"]}}}}'.format(idle_time)
)
NetworkMixin.__init__(self, url=request, **kwargs)
Activity.__init__(self, name)
self._idle_time = idle_time
def check(self) -> Optional[str]:
try:
reply = self.request().json()
if not reply["result"]["System.IdleTime({})".format(self._idle_time)]:
return "Someone interacts with Kodi"
else:
return None
except (KeyError, TypeError, json.JSONDecodeError) as error:
raise TemporaryCheckError(error) from error
class Load(Activity):
@classmethod
def create(cls, name: str, config: configparser.SectionProxy) -> "Load":
try:
return cls(name, config.getfloat("threshold", fallback=2.5))
except ValueError as error:
raise ConfigurationError(
"Unable to parse threshold as float: {}".format(error)
) from error
def __init__(self, name: str, threshold: float) -> None:
Check.__init__(self, name)
self._threshold = threshold
def check(self) -> Optional[str]:
loadcurrent = os.getloadavg()[1]
self.logger.debug("Load: %s", loadcurrent)
if loadcurrent > self._threshold:
return "Load {} > threshold {}".format(loadcurrent, self._threshold)
else:
return None
class Mpd(Activity):
@classmethod
def create(cls, name: str, config: configparser.SectionProxy) -> "Mpd":
try:
host = config.get("host", fallback="localhost")
port = config.getint("port", fallback=6600)
timeout = config.getint("timeout", fallback=5)
return cls(name, host, port, timeout)
except ValueError as error:
raise ConfigurationError(
"Host port or timeout configuration wrong: {}".format(error)
) from error
def __init__(self, name: str, host: str, port: int, timeout: float) -> None:
Check.__init__(self, name)
self._host = host
self._port = port
self._timeout = timeout
def _get_state(self) -> Dict:
from mpd import MPDClient
client = MPDClient()
client.timeout = self._timeout
client.connect(self._host, self._port)
state = client.status()
client.close()
client.disconnect()
return state
def check(self) -> Optional[str]:
try:
state = self._get_state()
if state["state"] == "play":
return "MPD currently playing"
else:
return None
except (ConnectionError, socket.timeout, socket.gaierror) as error:
raise TemporaryCheckError(error) from error
class NetworkBandwidth(Activity):
@classmethod
def create(
cls, name: str, config: configparser.SectionProxy,
) -> "NetworkBandwidth":
try:
interfaces = config["interfaces"].split(",")
interfaces = [i.strip() for i in interfaces if i.strip()]
if not interfaces:
raise ConfigurationError("No interfaces configured")
host_interfaces = psutil.net_if_addrs().keys()
for interface in interfaces:
if interface not in host_interfaces:
raise ConfigurationError(
"Network interface {} does not exist".format(interface)
)
threshold_send = config.getfloat("threshold_send", fallback=100)
threshold_receive = config.getfloat("threshold_receive", fallback=100)
return cls(name, interfaces, threshold_send, threshold_receive)
except KeyError as error:
raise ConfigurationError(
"Missing configuration key: {}".format(error)
) from error
except ValueError as error:
raise ConfigurationError(
"Threshold in wrong format: {}".format(error)
) from error
def __init__(
self,
name: str,
interfaces: Iterable[str],
threshold_send: float,
threshold_receive: float,
) -> None:
Check.__init__(self, name)
self._interfaces = interfaces
self._threshold_send = threshold_send
self._threshold_receive = threshold_receive
self._previous_values = psutil.net_io_counters(pernic=True)
self._previous_time = time.time()
def check(self) -> Optional[str]:
# acquire the previous state and preserve it
old_values = self._previous_values
old_time = self._previous_time
# read new values and store them for the next iteration
new_values = psutil.net_io_counters(pernic=True)
self._previous_values = new_values
new_time = time.time()
if new_time == self._previous_time:
raise TemporaryCheckError("Called too fast, no time between calls")
self._previous_time = new_time
for interface in self._interfaces:
if interface not in new_values or interface not in self._previous_values:
raise TemporaryCheckError("Interface {} is missing".format(interface))
# send direction
delta_send = (
new_values[interface].bytes_sent - old_values[interface].bytes_sent
)
rate_send = delta_send / (new_time - old_time)
if rate_send > self._threshold_send:
return (
"Interface {} sending rate {} byte/s "
"higher than threshold {}".format(
interface, rate_send, self._threshold_send
)
)
# receive direction
delta_receive = (
new_values[interface].bytes_recv - old_values[interface].bytes_recv
)
rate_receive = delta_receive / (new_time - old_time)
if rate_receive > self._threshold_receive:
return (
"Interface {} receive rate {} byte/s "
"higher than threshold {}".format(
interface, rate_receive, self._threshold_receive
)
)
return None
class Ping(Activity):
"""Check if one or several hosts are reachable via ping."""
@classmethod
def create(cls, name: str, config: configparser.SectionProxy) -> "Ping":
try:
hosts = config["hosts"].split(",")
hosts = [h.strip() for h in hosts]
return cls(name, hosts)
except KeyError as error:
raise ConfigurationError(
"Unable to determine hosts to ping: {}".format(error)
) from error
def __init__(self, name: str, hosts: Iterable[str]) -> None:
Check.__init__(self, name)
self._hosts = hosts
def check(self) -> Optional[str]:
for host in self._hosts:
cmd = ["ping", "-q", "-c", "1", host]
if (
subprocess.call( # noqa: S603 we know the input from the config
cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
)
== 0
):
self.logger.debug("host " + host + " appears to be up")
return "Host {} is up".format(host)
return None
class Processes(Activity):
@classmethod
def create(cls, name: str, config: configparser.SectionProxy) -> "Processes":
try:
processes = config["processes"].split(",")
processes = [p.strip() for p in processes]
return cls(name, processes)
except KeyError as error:
raise ConfigurationError("No processes to check specified") from error
def __init__(self, name: str, processes: Iterable[str]) -> None:
Check.__init__(self, name)
self._processes = processes
def check(self) -> Optional[str]:
for proc in psutil.process_iter():
try:
pinfo = proc.name()
for name in self._processes:
if pinfo == name:
return "Process {} is running".format(name)
except psutil.NoSuchProcess:
pass
return None
class Smb(Activity):
@classmethod
def create(cls, name: str, config: Optional[configparser.SectionProxy]) -> "Smb":
return cls(name)
def check(self) -> Optional[str]:
try:
status_output = subprocess.check_output( # noqa: S603, S607
["smbstatus", "-b"]
).decode("utf-8")
except subprocess.CalledProcessError as error:
raise SevereCheckError(error) from error
self.logger.debug("Received status output:\n%s", status_output)
connections = []
start_seen = False
for line in status_output.splitlines():
if start_seen:
connections.append(line)
else:
if line.startswith("----"):
start_seen = True
if connections:
return "SMB clients are connected:\n{}".format("\n".join(connections))
else:
return None
class Users(Activity):
@classmethod
def create(cls, name: str, config: configparser.SectionProxy) -> "Users":
with warnings.catch_warnings():
warnings.simplefilter("ignore", FutureWarning)
try:
user_regex = re.compile(config.get("name", fallback=r".*"))
terminal_regex = re.compile(config.get("terminal", fallback=r".*"))
host_regex = re.compile(config.get("host", fallback=r".*"))
return cls(name, user_regex, terminal_regex, host_regex)
except re.error as error:
raise ConfigurationError(
"Regular expression is invalid: {}".format(error),
) from error
def __init__(
self,
name: str,
user_regex: Pattern,
terminal_regex: Pattern,
host_regex: Pattern,
) -> None:
Activity.__init__(self, name)
self._user_regex = user_regex
self._terminal_regex = terminal_regex
self._host_regex = host_regex
def check(self) -> Optional[str]:
for entry in psutil.users():
if (
self._user_regex.fullmatch(entry.name) is not None
and self._terminal_regex.fullmatch(entry.terminal) is not None
and self._host_regex.fullmatch(entry.host) is not None
):
self.logger.debug(
"User %s on terminal %s from host %s " "matches criteria.",
entry.name,
entry.terminal,
entry.host,
)
return (
"User {user} is logged in on terminal {terminal} "
"from {host} since {started}".format(
user=entry.name,
terminal=entry.terminal,
host=entry.host,
started=entry.started,
)
)
return None
class XIdleTime(Activity):
"""Check that local X display have been idle long enough."""
@classmethod
def create(cls, name: str, config: configparser.SectionProxy) -> "XIdleTime":
with warnings.catch_warnings():
warnings.simplefilter("ignore", FutureWarning)
try:
return cls(
name,
config.getint("timeout", fallback=600),
config.get("method", fallback="sockets"),
re.compile(config.get("ignore_if_process", fallback=r"a^")),
re.compile(config.get("ignore_users", fallback=r"a^")),
)
except re.error as error:
raise ConfigurationError(
"Regular expression is invalid: {}".format(error),
) from error
except ValueError as error:
raise ConfigurationError(
"Unable to parse configuration: {}".format(error),
) from error
def __init__(
self,
name: str,
timeout: float,
method: str,
ignore_process_re: Pattern,
ignore_users_re: Pattern,
) -> None:
Activity.__init__(self, name)
self._timeout = timeout
if method == "sockets":
self._provide_sessions = self._list_sessions_sockets
elif method == "logind":
self._provide_sessions = self._list_sessions_logind
else:
raise ValueError("Unknown session discovery method {}".format(method))
self._ignore_process_re = ignore_process_re
self._ignore_users_re = ignore_users_re
def _list_sessions_sockets(self) -> Sequence[Tuple[int, str]]:
"""List running X sessions by iterating the X sockets.
This method assumes that X servers are run under the users using the
server.
"""
sockets = glob.glob("/tmp/.X11-unix/X*")
self.logger.debug("Found sockets: %s", sockets)
results = []
for sock in sockets:
# determine the number of the X display
try:
display = int(sock[len("/tmp/.X11-unix/X") :])
except ValueError:
self.logger.warning(
"Cannot parse display number from socket %s. Skipping.",
sock,
exc_info=True,
)
continue
# determine the user of the display
try:
user = pwd.getpwuid(os.stat(sock).st_uid).pw_name
except (FileNotFoundError, KeyError):
self.logger.warning(
"Cannot get the owning user from socket %s. Skipping.",
sock,
exc_info=True,
)
continue
results.append((display, user))
return results
def _list_sessions_logind(self) -> Sequence[Tuple[int, str]]:
"""List running X sessions using logind.
This method assumes that a ``Display`` variable is set in the logind
sessions.
"""
results = []
for session_id, properties in list_logind_sessions():
if "Name" in properties and "Display" in properties:
try:
results.append(
(
int(properties["Display"].replace(":", "")),
str(properties["Name"]),
)
)
except ValueError:
self.logger.warning(
"Unable to parse display from session properties %s",
properties,
exc_info=True,
)
else:
self.logger.debug(
"Skipping session %s because it does not contain "
"a user name and a display",
session_id,
)
return results
def _is_skip_process_running(self, user: str) -> bool:
user_processes = []
for process in psutil.process_iter():
try:
if process.username() == user:
user_processes.append(process.name())
except (psutil.NoSuchProcess, psutil.ZombieProcess, psutil.AccessDenied):
# ignore processes which have disappeared etc.
pass
for process in user_processes:
if self._ignore_process_re.match(process) is not None:
self.logger.debug(
"Process %s with pid %s matches the ignore regex '%s'."
" Skipping idle time check for this user.",
process.name(),
process.pid,
self._ignore_process_re,
)
return True
return False
def check(self) -> Optional[str]:
for display, user in self._provide_sessions():
self.logger.info("Checking display %s of user %s", display, user)
# check whether this users should be ignored completely
if self._ignore_users_re.match(user) is not None:
self.logger.debug("Skipping user '%s' due to request", user)
continue
# check whether any of the running processes of this user matches
# the ignore regular expression. In that case we skip idletime
# checking because we assume the user has a process running that
# inevitably tampers with the idle time.
if self._is_skip_process_running(user):
continue
# prepare the environment for the xprintidle call
env = copy.deepcopy(os.environ)
env["DISPLAY"] = ":{}".format(display)
env["XAUTHORITY"] = os.path.join(
os.path.expanduser("~" + user), ".Xauthority"
)
try:
idle_time_output = subprocess.check_output( # noqa: S603, S607
["sudo", "-u", user, "xprintidle"], env=env
)
idle_time = float(idle_time_output.strip()) / 1000.0
except (subprocess.CalledProcessError, ValueError) as error:
self.logger.warning(
"Unable to determine the idle time for display %s.",
display,
exc_info=True,
)
raise TemporaryCheckError(error) from error
self.logger.debug(
"Idle time for display %s of user %s is %s seconds.",
display,
user,
idle_time,
)
if idle_time < self._timeout:
return (
"X session {} of user {} "
"has idle time {} < threshold {}".format(
display, user, idle_time, self._timeout
)
)
return None
class LogindSessionsIdle(Activity):
"""Prevents suspending in case a logind session is marked not idle.
The decision is based on the ``IdleHint`` property of logind sessions.
"""
@classmethod
def create(
cls, name: str, config: configparser.SectionProxy,
) -> "LogindSessionsIdle":
types = config.get("types", fallback="tty,x11,wayland").split(",")
types = [t.strip() for t in types]
states = config.get("states", fallback="active,online").split(",")
states = [t.strip() for t in states]
return cls(name, types, states)
def __init__(self, name: str, types: Iterable[str], states: Iterable[str]) -> None:
Activity.__init__(self, name)
self._types = types
self._states = states
def check(self) -> Optional[str]:
for session_id, properties in list_logind_sessions():
self.logger.debug("Session %s properties: %s", session_id, properties)
if properties["Type"] not in self._types:
self.logger.debug(
"Ignoring session of wrong type %s", properties["Type"]
)
continue
if properties["State"] not in self._states:
self.logger.debug(
"Ignoring session because its state is %s", properties["State"]
)
continue
if not properties["IdleHint"]:
return "Login session {} is not idle".format(session_id)
return None
class XPath(XPathMixin, Activity):
def __init__(self, name: str, **kwargs) -> None:
Activity.__init__(self, name)
XPathMixin.__init__(self, **kwargs)
def check(self) -> Optional[str]:
if self.evaluate():
return "XPath matches for url " + self._url
else:
return None
autosuspend-3.0/src/autosuspend/checks/util.py 0000664 0000000 0000000 00000011757 13611126124 0021666 0 ustar 00root root 0000000 0000000 import configparser
from typing import Any, Dict, Optional, Sequence, TYPE_CHECKING
from . import Check, ConfigurationError, SevereCheckError, TemporaryCheckError
if TYPE_CHECKING:
import requests.model
class CommandMixin:
"""Mixin for configuring checks based on external commands."""
@classmethod
def create(cls, name: str, config: configparser.SectionProxy) -> Check:
try:
return cls(name, config["command"].strip()) # type: ignore
except KeyError as error:
raise ConfigurationError("Missing command specification") from error
def __init__(self, command: str) -> None:
self._command = command
class NetworkMixin:
@classmethod
def collect_init_args(cls, config: configparser.SectionProxy) -> Dict[str, Any]:
try:
args = {} # type: Dict[str, Any]
args["timeout"] = config.getint("timeout", fallback=5)
args["url"] = config["url"]
args["username"] = config.get("username")
args["password"] = config.get("password")
if (args["username"] is None) != (args["password"] is None):
raise ConfigurationError("Username and password must be set")
return args
except ValueError as error:
raise ConfigurationError("Configuration error " + str(error)) from error
except KeyError as error:
raise ConfigurationError("Lacks " + str(error) + " config entry") from error
@classmethod
def create(cls, name: str, config: configparser.SectionProxy) -> Check:
return cls(name, **cls.collect_init_args(config)) # type: ignore
def __init__(
self,
url: str,
timeout: int,
username: Optional[str] = None,
password: Optional[str] = None,
) -> None:
self._url = url
self._timeout = timeout
self._username = username
self._password = password
def request(self) -> "requests.model.Response":
import requests
from requests.auth import HTTPBasicAuth, HTTPDigestAuth
import requests.exceptions
auth_map = {
"basic": HTTPBasicAuth,
"digest": HTTPDigestAuth,
}
session = requests.Session()
try:
from requests_file import FileAdapter
session.mount("file://", FileAdapter())
except ImportError:
pass
try:
reply = session.get(self._url, timeout=self._timeout)
# replace reply with an authenticated version if credentials are
# available and the server has requested authentication
if self._username and self._password and reply.status_code == 401:
auth_scheme = reply.headers["WWW-Authenticate"].split(" ")[0].lower()
if auth_scheme not in auth_map:
raise SevereCheckError(
"Unsupported authentication scheme {}".format(auth_scheme)
)
auth = auth_map[auth_scheme](self._username, self._password)
reply = session.get(self._url, timeout=self._timeout, auth=auth)
reply.raise_for_status()
return reply
except requests.exceptions.RequestException as error:
raise TemporaryCheckError(error) from error
class XPathMixin(NetworkMixin):
@classmethod
def collect_init_args(cls, config: configparser.SectionProxy) -> Dict[str, Any]:
from lxml.etree import XPath, XPathSyntaxError # noqa: S410 our input
try:
args = NetworkMixin.collect_init_args(config)
args["xpath"] = config["xpath"].strip()
# validate the expression
try:
XPath(args["xpath"])
except XPathSyntaxError as error:
raise ConfigurationError(
"Invalid xpath expression: " + args["xpath"]
) from error
return args
except KeyError as error:
raise ConfigurationError("Lacks " + str(error) + " config entry") from error
@classmethod
def create(cls, name: str, config: configparser.SectionProxy) -> Check:
return cls(name, **cls.collect_init_args(config)) # type: ignore
def __init__(self, xpath: str, **kwargs) -> None:
NetworkMixin.__init__(self, **kwargs)
self._xpath = xpath
from lxml import etree # noqa: S410 required flag set
self._parser = etree.XMLParser(resolve_entities=False)
def evaluate(self) -> Sequence[Any]:
import requests
import requests.exceptions
from lxml import etree # noqa: S410 using safe parser
try:
reply = self.request().content
root = etree.fromstring(reply, parser=self._parser) # noqa: S320
return root.xpath(self._xpath)
except requests.exceptions.RequestException as error:
raise TemporaryCheckError(error) from error
except etree.XMLSyntaxError as error:
raise TemporaryCheckError(error) from error
autosuspend-3.0/src/autosuspend/checks/wakeup.py 0000664 0000000 0000000 00000013474 13611126124 0022203 0 ustar 00root root 0000000 0000000 import configparser
from datetime import datetime, timedelta, timezone
from io import BytesIO
import subprocess
from typing import Optional
from .util import CommandMixin, NetworkMixin, XPathMixin
from .. import ConfigurationError, TemporaryCheckError, Wakeup
class Calendar(NetworkMixin, Wakeup):
"""Uses an ical calendar to wake up on the next scheduled event."""
def __init__(self, name: str, **kwargs) -> None:
NetworkMixin.__init__(self, **kwargs)
Wakeup.__init__(self, name)
def check(self, timestamp: datetime) -> Optional[datetime]:
from ..util.ical import list_calendar_events
response = self.request()
end = timestamp + timedelta(weeks=6 * 4)
events = list_calendar_events(BytesIO(response.content), timestamp, end)
# Filter out currently active events. They are not our business.
events = [e for e in events if e.start >= timestamp]
if events:
candidate = events[0]
if isinstance(candidate.start, datetime):
return candidate.start
else:
return datetime.combine(candidate.start, datetime.min.time())
else:
return None
class File(Wakeup):
"""Determines scheduled wake ups from the contents of a file on disk.
File contents are interpreted as a Unix timestamp in seconds UTC.
"""
@classmethod
def create(cls, name: str, config: configparser.SectionProxy) -> "File":
try:
path = config["path"]
return cls(name, path)
except KeyError as error:
raise ConfigurationError("Missing option path") from error
def __init__(self, name: str, path: str) -> None:
Wakeup.__init__(self, name)
self._path = path
def check(self, timestamp: datetime) -> Optional[datetime]:
try:
with open(self._path, "r") as time_file:
return datetime.fromtimestamp(
float(time_file.readlines()[0].strip()), timezone.utc
)
except FileNotFoundError:
# this is ok
return None
except (ValueError, IOError) as error:
raise TemporaryCheckError(error) from error
class Command(CommandMixin, Wakeup):
"""Determine wake up times based on an external command.
The called command must return a timestamp in UTC or nothing in case no
wake up is planned.
"""
def __init__(self, name: str, command: str) -> None:
CommandMixin.__init__(self, command)
Wakeup.__init__(self, name)
def check(self, timestamp: datetime) -> Optional[datetime]:
try:
output = subprocess.check_output(
self._command, shell=True, # noqa: S602
).splitlines()[0]
self.logger.debug(
"Command %s succeeded with output %s", self._command, output
)
if output.strip():
return datetime.fromtimestamp(float(output.strip()), timezone.utc)
else:
return None
except (subprocess.CalledProcessError, ValueError) as error:
raise TemporaryCheckError(error) from error
class Periodic(Wakeup):
"""Always indicates a wake up after a specified delta of time from now on.
Use this to periodically wake up a system.
"""
@classmethod
def create(cls, name: str, config: configparser.SectionProxy) -> "Periodic":
try:
kwargs = {}
kwargs[config["unit"]] = float(config["value"])
return cls(name, timedelta(**kwargs)) # type: ignore
except (ValueError, KeyError, TypeError) as error:
raise ConfigurationError(str(error))
def __init__(self, name: str, delta: timedelta) -> None:
Wakeup.__init__(self, name)
self._delta = delta
def check(self, timestamp: datetime) -> Optional[datetime]:
return timestamp + self._delta
class XPath(XPathMixin, Wakeup):
"""Determine wake up times from a network resource using XPath expressions.
The matched results are expected to represent timestamps in seconds UTC.
"""
def __init__(self, name: str, **kwargs) -> None:
Wakeup.__init__(self, name)
XPathMixin.__init__(self, **kwargs)
def convert_result(self, result: str, timestamp: datetime) -> datetime:
return datetime.fromtimestamp(float(result), timezone.utc)
def check(self, timestamp: datetime) -> Optional[datetime]:
matches = self.evaluate()
try:
if matches:
return min(self.convert_result(m, timestamp) for m in matches)
else:
return None
except TypeError as error:
raise TemporaryCheckError(
"XPath returned a result that is not a string: " + str(error)
)
except ValueError as error:
raise TemporaryCheckError("Result cannot be parsed: " + str(error))
class XPathDelta(XPath):
UNITS = [
"days",
"seconds",
"microseconds",
"milliseconds",
"minutes",
"hours",
"weeks",
]
@classmethod
def create(cls, name: str, config: configparser.SectionProxy) -> "XPathDelta":
try:
args = XPath.collect_init_args(config)
args["unit"] = config.get("unit", fallback="minutes")
return cls(name, **args)
except ValueError as error:
raise ConfigurationError(str(error))
def __init__(self, name: str, unit: str, **kwargs) -> None:
if unit not in self.UNITS:
raise ValueError("Unsupported unit")
XPath.__init__(self, name, **kwargs)
self._unit = unit
def convert_result(self, result: str, timestamp: datetime) -> datetime:
kwargs = {}
kwargs[self._unit] = float(result)
return timestamp + timedelta(**kwargs) # type: ignore
autosuspend-3.0/src/autosuspend/util/ 0000775 0000000 0000000 00000000000 13611126124 0020041 5 ustar 00root root 0000000 0000000 autosuspend-3.0/src/autosuspend/util/__init__.py 0000664 0000000 0000000 00000000771 13611126124 0022157 0 ustar 00root root 0000000 0000000 import logging
from typing import Any, Optional, Type
def logger_by_class(klass: Type, name: Optional[str] = None) -> logging.Logger:
return logging.getLogger(
"{module}.{klass}{name}".format(
module=klass.__module__,
klass=klass.__name__,
name=".{}".format(name) if name else "",
)
)
def logger_by_class_instance(
instance: Any, name: Optional[str] = None,
) -> logging.Logger:
return logger_by_class(instance.__class__, name=name)
autosuspend-3.0/src/autosuspend/util/ical.py 0000664 0000000 0000000 00000016075 13611126124 0021334 0 ustar 00root root 0000000 0000000 from datetime import date, datetime, timedelta
from typing import Dict, IO, Iterable, List, Mapping, Sequence, Union
from dateutil.rrule import rruleset, rrulestr
import icalendar
import icalendar.cal
import pytz
import tzlocal
class CalendarEvent:
def __init__(
self, summary: str, start: Union[datetime, date], end: Union[datetime, date],
) -> None:
self.summary = summary
self.start = start
self.end = end
def __str__(self) -> str:
return "CalendarEvent[summary={}, start={}, end={}]".format(
self.summary, self.start, self.end
)
def _expand_rrule_all_day(
rrule: str, start: date, exclusions: Iterable, start_at: datetime, end_at: datetime
) -> Iterable[date]:
"""Expand an rrule for all-day events.
To my mind, these events cannot have changes, just exclusions, because
changes only affect the time, which doesn't exist for all-day events.
"""
rules = rruleset()
rules.rrule(rrulestr(rrule, dtstart=start, ignoretz=True))
# add exclusions
if exclusions:
for xdate in exclusions:
rules.exdate(datetime.combine(xdate.dts[0].dt, datetime.min.time()))
dates = []
# reduce start and end to datetimes without timezone that just represent a
# date at midnight.
for candidate in rules.between(
datetime.combine(start_at.date(), datetime.min.time()),
datetime.combine(end_at.date(), datetime.min.time()),
inc=True,
):
dates.append(candidate.date())
return dates
def _expand_rrule(
rrule: str,
start: datetime,
instance_duration: timedelta,
exclusions: Iterable,
changes: Iterable[icalendar.cal.Event],
start_at: datetime,
end_at: datetime,
) -> Sequence[datetime]:
# unify everything to a single timezone and then strip it to handle DST
# changes correctly
orig_tz = start.tzinfo
start = start.replace(tzinfo=None)
start_at = start_at.astimezone(orig_tz).replace(tzinfo=None)
end_at = end_at.astimezone(orig_tz).replace(tzinfo=None)
rules = rruleset()
first_rule = rrulestr(rrule, dtstart=start, ignoretz=True)
# apply the same timezone logic for the until part of the rule after
# parsing it.
if first_rule._until:
first_rule._until = (
pytz.utc.localize(first_rule._until)
.astimezone(orig_tz)
.replace(tzinfo=None)
)
rules.rrule(first_rule)
# add exclusions
if exclusions:
for xdate in exclusions:
try:
# also in this case, unify and strip the timezone
rules.exdate(xdate.dts[0].dt.astimezone(orig_tz).replace(tzinfo=None))
except AttributeError:
pass
# add events that were changed
for change in changes:
# same timezone mangling applies here
rules.exdate(
change.get("recurrence-id").dt.astimezone(orig_tz).replace(tzinfo=None)
)
# expand the rrule
dates = []
for candidate in rules.between(start_at - instance_duration, end_at, inc=True):
localized = orig_tz.localize(candidate) # type: ignore
dates.append(localized)
return dates
ChangeMapping = Mapping[str, Iterable[icalendar.cal.Event]]
def _collect_recurrence_changes(calendar: icalendar.Calendar) -> ChangeMapping:
ConcreteChangeMapping = Dict[str, List[icalendar.cal.Event]] # noqa
recurring_changes = {} # type: ConcreteChangeMapping
for component in calendar.walk():
if component.name != "VEVENT":
continue
if component.get("recurrence-id"):
if component.get("uid") not in recurring_changes:
recurring_changes[component.get("uid")] = []
recurring_changes[component.get("uid")].append(component)
return recurring_changes
def list_calendar_events(
data: IO[bytes], start_at: datetime, end_at: datetime
) -> Sequence[CalendarEvent]:
"""List all relevant calendar events in the provided interval.
Args:
data:
A stream with icalendar data
start_at:
include events overlapping with this time (inclusive)
end_at:
do not include events that start after or exactly at this time
"""
def is_aware(dt: datetime) -> bool:
return dt.tzinfo is not None and dt.tzinfo.utcoffset(dt) is not None
# some useful notes:
# * end times and dates are non-inclusive for ical events
# * start and end are dates for all-day events
calendar = icalendar.Calendar.from_ical(data.read())
# Do a first pass through the calendar to collect all exclusions to
# recurring events so that they can be handled when expanding recurrences.
recurring_changes = _collect_recurrence_changes(calendar)
events = []
for component in calendar.walk():
if component.name != "VEVENT":
continue
summary = component.get("summary")
start = component.get("dtstart").dt
end = component.get("dtend").dt
exclusions = component.get("exdate")
if exclusions and not isinstance(exclusions, list):
exclusions = [exclusions]
# Check whether dates are floating and localize with local time if so.
# Only works in case of non-all-day events, which are dates, not
# datetimes.
if isinstance(start, datetime) and not is_aware(start):
assert not is_aware(end)
local_time = tzlocal.get_localzone()
start = local_time.localize(start)
end = local_time.localize(end)
length = end - start
if component.get("rrule"):
rrule = component.get("rrule").to_ical().decode("utf-8")
changes = [] # type: Iterable[icalendar.cal.Event]
if component.get("uid") in recurring_changes:
changes = recurring_changes[component.get("uid")]
if isinstance(start, datetime):
# complex processing in case of normal events
for local_start in _expand_rrule(
rrule, start, length, exclusions, changes, start_at, end_at
):
local_end = local_start + length
events.append(CalendarEvent(summary, local_start, local_end))
else:
# simplified processing for all-day events
for local_start_date in _expand_rrule_all_day(
rrule, start, exclusions, start_at, end_at
):
local_end = local_start_date + timedelta(days=1)
events.append(CalendarEvent(summary, local_start_date, local_end))
else:
# same distinction here as above
if isinstance(start, datetime):
# single events
if end > start_at and start < end_at:
events.append(CalendarEvent(str(summary), start, end))
else:
# all-day events
if end > start_at.date() and start <= end_at.date():
events.append(CalendarEvent(str(summary), start, end))
return sorted(events, key=lambda e: e.start)
autosuspend-3.0/src/autosuspend/util/systemd.py 0000664 0000000 0000000 00000002072 13611126124 0022104 0 ustar 00root root 0000000 0000000 from typing import Iterable, Tuple, TYPE_CHECKING
if TYPE_CHECKING:
import dbus
def _get_bus() -> "dbus.SystemBus":
import dbus
return dbus.SystemBus()
def list_logind_sessions() -> Iterable[Tuple[str, dict]]:
"""List running logind sessions and their properties.
Returns:
list of (session_id, properties dict):
A list with tuples of sessions ids and their associated properties
represented as dicts.
"""
import dbus
bus = _get_bus()
login1 = bus.get_object("org.freedesktop.login1", "/org/freedesktop/login1")
sessions = login1.ListSessions(dbus_interface="org.freedesktop.login1.Manager")
results = []
for session_id, path in [(s[0], s[4]) for s in sessions]:
session = bus.get_object("org.freedesktop.login1", path)
properties_interface = dbus.Interface(
session, "org.freedesktop.DBus.Properties"
)
properties = properties_interface.GetAll("org.freedesktop.login1.Session")
results.append((session_id, properties))
return results
autosuspend-3.0/tests/ 0000775 0000000 0000000 00000000000 13611126124 0015065 5 ustar 00root root 0000000 0000000 autosuspend-3.0/tests/__init__.py 0000664 0000000 0000000 00000000424 13611126124 0017176 0 ustar 00root root 0000000 0000000 import abc
from typing import Any
class CheckTest(abc.ABC):
@abc.abstractmethod
def create_instance(self, name: str) -> Any:
pass
def test_name_passing(self) -> None:
name = "checktestname"
assert self.create_instance(name).name == name
autosuspend-3.0/tests/conftest.py 0000664 0000000 0000000 00000004210 13611126124 0017261 0 ustar 00root root 0000000 0000000 from pathlib import Path
from typing import Callable, Tuple
import dbusmock
import pytest
from werkzeug.wrappers import Request, Response
from autosuspend.util import systemd as util_systemd
@pytest.fixture
def serve_file(httpserver) -> Callable[[Path], str]:
"""
Serve a file via HTTP.
Returns:
A callable that expected the file path to server. It returns the URL to
use for accessing the file.
"""
def serve(the_file: Path) -> str:
path = f"/{the_file.name}"
httpserver.expect_request(path).respond_with_data(the_file.read_bytes())
return httpserver.url_for(path)
return serve
@pytest.fixture
def serve_protected(httpserver) -> Callable[[Path], Tuple[str, str, str]]:
"""
Serve a file behind basic authentication.
Returns:
A callable that accepts the file path to serve. It returns as a tuple
the URL to use for the file, valid username and password
"""
realm = "the_realm"
username = "the_user"
password = "the_password"
def serve(the_file: Path) -> Tuple[str, str, str]:
def handler(request: Request) -> Response:
auth = request.authorization
if not auth or not (
auth.username == username and auth.password == password
):
return Response(
"Authentication required",
401,
{"WWW-Authenticate": f"Basic realm={realm}"},
)
else:
return Response(the_file.read_bytes())
path = f"/{the_file.name}"
httpserver.expect_request(path).respond_with_handler(handler)
return (httpserver.url_for(path), username, password)
return serve
@pytest.fixture()
def logind(monkeypatch):
pytest.importorskip("dbus")
pytest.importorskip("gi")
test_case = dbusmock.DBusTestCase()
test_case.start_system_bus()
mock, obj = test_case.spawn_server_template("logind")
def get_bus():
return test_case.get_dbus(system_bus=True)
monkeypatch.setattr(util_systemd, "_get_bus", get_bus)
yield obj
mock.terminate()
mock.wait()
autosuspend-3.0/tests/data/ 0000775 0000000 0000000 00000000000 13611126124 0015776 5 ustar 00root root 0000000 0000000 autosuspend-3.0/tests/data/mindeps-test.conf 0000664 0000000 0000000 00000000441 13611126124 0021260 0 ustar 00root root 0000000 0000000 [general]
interval = 5
idle_time = 900
suspend_cmd = /usr/bin/systemctl suspend
wakeup_cmd = echo {timestamp:.0f} > /sys/class/rtc/rtc0/wakealarm
woke_up_file = /var/run/autosuspend-just-woke-up
lock_file = /tmp/autosuspend-test-mindeps.lock
[check.Ping]
enabled = true
hosts = localhost
autosuspend-3.0/tests/test_autosuspend.py 0000664 0000000 0000000 00000055067 13611126124 0021065 0 ustar 00root root 0000000 0000000 import argparse
import configparser
from datetime import datetime, timedelta, timezone
import logging
import subprocess
import dateutil.parser
import pytest
import autosuspend
class TestExecuteSuspend:
def test_smoke(self, mocker) -> None:
mock = mocker.patch("subprocess.check_call")
command = ["foo", "bar"]
autosuspend.execute_suspend(command, None)
mock.assert_called_once_with(command, shell=True)
def test_call_exception(self, mocker) -> None:
mock = mocker.patch("subprocess.check_call")
command = ["foo", "bar"]
mock.side_effect = subprocess.CalledProcessError(2, command)
spy = mocker.spy(autosuspend._logger, "warning")
autosuspend.execute_suspend(command, None)
mock.assert_called_once_with(command, shell=True)
assert spy.call_count == 1
class TestScheduleWakeup:
def test_smoke(self, mocker) -> None:
mock = mocker.patch("subprocess.check_call")
dt = datetime.fromtimestamp(1525270801, timezone(timedelta(hours=4)))
autosuspend.schedule_wakeup("echo {timestamp:.0f} {iso}", dt)
mock.assert_called_once_with(
"echo 1525270801 2018-05-02T18:20:01+04:00", shell=True
)
def test_call_exception(self, mocker) -> None:
mock = mocker.patch("subprocess.check_call")
mock.side_effect = subprocess.CalledProcessError(2, "foo")
spy = mocker.spy(autosuspend._logger, "warning")
autosuspend.schedule_wakeup("foo", datetime.now(timezone.utc))
mock.assert_called_once_with("foo", shell=True)
assert spy.call_count == 1
class TestConfigureLogging:
def test_debug(self, mocker) -> None:
mock = mocker.patch("logging.basicConfig")
autosuspend.configure_logging(None, True)
mock.assert_called_once_with(level=logging.DEBUG)
def test_standard(self, mocker) -> None:
mock = mocker.patch("logging.basicConfig")
autosuspend.configure_logging(None, False)
mock.assert_called_once_with(level=logging.WARNING)
def test_file(self, mocker) -> None:
mock = mocker.patch("logging.config.fileConfig")
# anything that is not a boolean is treated like a file
autosuspend.configure_logging(42, False) # type: ignore
mock.assert_called_once_with(42)
def test_file_fallback(self, mocker) -> None:
mock = mocker.patch("logging.config.fileConfig", side_effect=RuntimeError())
mock_basic = mocker.patch("logging.basicConfig")
# anything that is not a boolean is treated like a file
autosuspend.configure_logging(42, False) # type: ignore
mock.assert_called_once_with(42)
mock_basic.assert_called_once_with(level=logging.WARNING)
class TestSetUpChecks:
def test_smoke(self, mocker) -> None:
mock_class = mocker.patch("autosuspend.checks.activity.Mpd")
mock_class.create.return_value = mocker.MagicMock(
spec=autosuspend.checks.Activity
)
parser = configparser.ConfigParser()
parser.read_string(
"""
[check.Foo]
class = Mpd
enabled = True
"""
)
autosuspend.set_up_checks(
parser, "check", "activity", autosuspend.Activity # type: ignore
)
mock_class.create.assert_called_once_with("Foo", parser["check.Foo"])
def test_external_class(self, mocker) -> None:
mock_class = mocker.patch("os.path.TestCheck", create=True)
mock_class.create.return_value = mocker.MagicMock(
spec=autosuspend.checks.Activity
)
parser = configparser.ConfigParser()
parser.read_string(
"""
[check.Foo]
class = os.path.TestCheck
enabled = True
"""
)
autosuspend.set_up_checks(
parser, "check", "activity", autosuspend.Activity # type: ignore
)
mock_class.create.assert_called_once_with("Foo", parser["check.Foo"])
def test_not_enabled(self, mocker) -> None:
mock_class = mocker.patch("autosuspend.checks.activity.Mpd")
mock_class.create.return_value = mocker.MagicMock(spec=autosuspend.Activity)
parser = configparser.ConfigParser()
parser.read_string(
"""
[check.Foo]
class = Mpd
enabled = False
"""
)
assert not autosuspend.set_up_checks(
parser, "check", "activity", autosuspend.Activity, # type: ignore
)
with pytest.raises(autosuspend.ConfigurationError):
autosuspend.set_up_checks(
parser,
"check",
"activity",
autosuspend.Activity, # type: ignore
error_none=True,
)
def test_not_enabled_continues_with_next(self, mocker) -> None:
mock_mpd = mocker.patch("autosuspend.checks.activity.Mpd")
mock_mpd.create.return_value = mocker.MagicMock(spec=autosuspend.Activity)
mock_xidletime = mocker.patch("autosuspend.checks.activity.XIdleTime")
mock_xidletime.create.return_value = mocker.MagicMock(spec=autosuspend.Activity)
parser = configparser.ConfigParser()
parser.read_string(
"""
[check.Foo]
class = Mpd
enabled = False
[check.Bar]
class = XIdleTime
enabled = True
"""
)
assert (
len(
autosuspend.set_up_checks(
parser, "check", "activity", autosuspend.Activity, # type: ignore
)
)
== 1
)
def test_no_such_class(self, mocker) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[check.Foo]
class = FooBarr
enabled = True
"""
)
with pytest.raises(autosuspend.ConfigurationError):
autosuspend.set_up_checks(
parser, "check", "activity", autosuspend.Activity # type: ignore
)
def test_not_a_check(self, mocker) -> None:
mock_class = mocker.patch("autosuspend.checks.activity.Mpd")
mock_class.create.return_value = mocker.MagicMock()
parser = configparser.ConfigParser()
parser.read_string(
"""
[check.Foo]
class = Mpd
enabled = True
"""
)
with pytest.raises(autosuspend.ConfigurationError):
autosuspend.set_up_checks(
parser, "check", "activity", autosuspend.Activity # type: ignore
)
mock_class.create.assert_called_once_with("Foo", parser["check.Foo"])
def test_passwords_redacted(self, mocker, caplog) -> None:
mock_class = mocker.patch("autosuspend.checks.activity.Mpd")
mock_class.create.return_value = mocker.MagicMock(
spec=autosuspend.checks.Activity
)
parser = configparser.ConfigParser()
parser.read_string(
"""
[check.Foo]
class = Mpd
enabled = True
password = THEPASS
"""
)
with caplog.at_level(logging.DEBUG):
autosuspend.set_up_checks(
parser, "check", "activity", autosuspend.Activity # type: ignore
)
assert "THEPASS" not in caplog.text
class TestExecuteChecks:
def test_no_checks(self, mocker) -> None:
assert autosuspend.execute_checks([], False, mocker.MagicMock()) is False
def test_matches(self, mocker) -> None:
matching_check = mocker.MagicMock(spec=autosuspend.Activity)
matching_check.name = "foo"
matching_check.check.return_value = "matches"
assert (
autosuspend.execute_checks([matching_check], False, mocker.MagicMock())
is True
)
matching_check.check.assert_called_once_with()
def test_only_first_called(self, mocker) -> None:
matching_check = mocker.MagicMock(spec=autosuspend.Activity)
matching_check.name = "foo"
matching_check.check.return_value = "matches"
second_check = mocker.MagicMock()
second_check.name = "bar"
second_check.check.return_value = "matches"
assert (
autosuspend.execute_checks(
[matching_check, second_check], False, mocker.MagicMock()
)
is True
)
matching_check.check.assert_called_once_with()
second_check.check.assert_not_called()
def test_all_called(self, mocker) -> None:
matching_check = mocker.MagicMock(spec=autosuspend.Activity)
matching_check.name = "foo"
matching_check.check.return_value = "matches"
second_check = mocker.MagicMock()
second_check.name = "bar"
second_check.check.return_value = "matches"
assert (
autosuspend.execute_checks(
[matching_check, second_check], True, mocker.MagicMock()
)
is True
)
matching_check.check.assert_called_once_with()
second_check.check.assert_called_once_with()
def test_ignore_temporary_errors(self, mocker) -> None:
matching_check = mocker.MagicMock(spec=autosuspend.Activity)
matching_check.name = "foo"
matching_check.check.side_effect = autosuspend.TemporaryCheckError()
second_check = mocker.MagicMock()
second_check.name = "bar"
second_check.check.return_value = "matches"
assert (
autosuspend.execute_checks(
[matching_check, second_check], False, mocker.MagicMock()
)
is True
)
matching_check.check.assert_called_once_with()
second_check.check.assert_called_once_with()
class TestExecuteWakeups:
def test_no_wakeups(self, mocker) -> None:
assert (
autosuspend.execute_wakeups(
[], datetime.now(timezone.utc), mocker.MagicMock()
)
is None
)
def test_all_none(self, mocker) -> None:
wakeup = mocker.MagicMock(spec=autosuspend.Wakeup)
wakeup.check.return_value = None
assert (
autosuspend.execute_wakeups(
[wakeup], datetime.now(timezone.utc), mocker.MagicMock()
)
is None
)
@pytest.mark.parametrize(
"illegal", [None, dateutil.parser.parse("20040605T090000Z")],
)
def test_skips_none_outdated_and_continues(self, mocker, illegal) -> None:
wakeup_none = mocker.MagicMock(spec=autosuspend.Wakeup)
wakeup_none.check.return_value = illegal
now = dateutil.parser.parse("20040705T090000Z")
wake_up_at = now + timedelta(minutes=10)
wakeup_real = mocker.MagicMock(spec=autosuspend.Wakeup)
wakeup_real.check.return_value = wake_up_at
assert (
autosuspend.execute_wakeups(
[wakeup_none, wakeup_real], now, mocker.MagicMock(),
)
== wake_up_at
)
assert wakeup_none.check.called
def test_basic_return(self, mocker) -> None:
wakeup = mocker.MagicMock(spec=autosuspend.Wakeup)
now = datetime.now(timezone.utc)
wakeup_time = now + timedelta(seconds=10)
wakeup.check.return_value = wakeup_time
assert (
autosuspend.execute_wakeups([wakeup], now, mocker.MagicMock())
== wakeup_time
)
def test_soonest_taken(self, mocker) -> None:
reference = datetime.now(timezone.utc)
wakeup = mocker.MagicMock(spec=autosuspend.Wakeup)
wakeup.check.return_value = reference + timedelta(seconds=20)
earlier = reference + timedelta(seconds=10)
wakeup_earlier = mocker.MagicMock(spec=autosuspend.Wakeup)
wakeup_earlier.check.return_value = earlier
in_between = reference + timedelta(seconds=15)
wakeup_later = mocker.MagicMock(spec=autosuspend.Wakeup)
wakeup_later.check.return_value = in_between
assert (
autosuspend.execute_wakeups(
[wakeup, wakeup_earlier, wakeup_later], reference, mocker.MagicMock()
)
== earlier
)
def test_ignore_temporary_errors(self, mocker) -> None:
now = datetime.now(timezone.utc)
wakeup = mocker.MagicMock(spec=autosuspend.Wakeup)
wakeup.check.return_value = now + timedelta(seconds=20)
wakeup_error = mocker.MagicMock(spec=autosuspend.Wakeup)
wakeup_error.check.side_effect = autosuspend.TemporaryCheckError()
wakeup_earlier = mocker.MagicMock(spec=autosuspend.Wakeup)
wakeup_earlier.check.return_value = now + timedelta(seconds=10)
assert autosuspend.execute_wakeups(
[wakeup, wakeup_error, wakeup_earlier], now, mocker.MagicMock()
) == now + timedelta(seconds=10)
def test_ignore_too_early(self, mocker) -> None:
now = datetime.now(timezone.utc)
wakeup = mocker.MagicMock(spec=autosuspend.Wakeup)
wakeup.check.return_value = now
assert autosuspend.execute_wakeups([wakeup], now, mocker.MagicMock()) is None
assert (
autosuspend.execute_wakeups(
[wakeup], now + timedelta(seconds=1), mocker.MagicMock()
)
is None
)
class TestNotifySuspend:
def test_date(self, mocker) -> None:
mock = mocker.patch("subprocess.check_call")
dt = datetime.fromtimestamp(1525270801, timezone(timedelta(hours=4)))
autosuspend.notify_suspend("echo {timestamp:.0f} {iso}", "not this", dt)
mock.assert_called_once_with(
"echo 1525270801 2018-05-02T18:20:01+04:00", shell=True
)
def test_date_no_command(self, mocker) -> None:
mock = mocker.patch("subprocess.check_call")
dt = datetime.fromtimestamp(1525270801, timezone(timedelta(hours=4)))
autosuspend.notify_suspend(None, "not this", dt)
mock.assert_not_called()
def test_no_date(self, mocker) -> None:
mock = mocker.patch("subprocess.check_call")
autosuspend.notify_suspend("echo {timestamp:.0f} {iso}", "echo nothing", None)
mock.assert_called_once_with("echo nothing", shell=True)
def test_no_date_no_command(self, mocker) -> None:
mock = mocker.patch("subprocess.check_call")
autosuspend.notify_suspend("echo {timestamp:.0f} {iso}", None, None)
mock.assert_not_called()
def test_ignore_execution_errors(self, mocker, caplog) -> None:
mock = mocker.patch("subprocess.check_call")
mock.side_effect = subprocess.CalledProcessError(2, "cmd")
dt = datetime.fromtimestamp(1525270801, timezone(timedelta(hours=4)))
with caplog.at_level(logging.WARNING):
autosuspend.notify_suspend("wakeup", "nowakeup", dt)
assert "Unable to execute" in caplog.text
assert mock.called
def test_info_no_command(self, caplog) -> None:
with caplog.at_level(logging.INFO):
autosuspend.notify_suspend(None, None, datetime.now())
assert "suitable" in caplog.text
class TestConfigureProcessor:
def test_minimal_config(self, mocker) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[general]
suspend_cmd = suspend
wakeup_cmd = wakeup
"""
)
args = mocker.MagicMock(spec=argparse.Namespace)
type(args).all_checks = mocker.PropertyMock(return_value=True)
processor = autosuspend.configure_processor(args, parser, [], [])
assert processor._idle_time == 300
assert processor._min_sleep_time == 1200
assert processor._wakeup_delta == 30
assert processor._all_activities
def test_notify_and_suspend(mocker) -> None:
mock = mocker.patch("subprocess.check_call")
dt = datetime.fromtimestamp(1525270801, timezone(timedelta(hours=4)))
autosuspend.notify_and_suspend(
"echo suspend", "echo notify {timestamp:.0f} {iso}", "not this", dt
)
mock.assert_has_calls(
[
mocker.call("echo notify 1525270801 2018-05-02T18:20:01+04:00", shell=True),
mocker.call("echo suspend", shell=True),
]
)
class _StubCheck(autosuspend.Activity):
@classmethod
def create(cls, name, config):
pass
def __init__(self, name, match):
autosuspend.Activity.__init__(self, name)
self.match = match
def check(self):
return self.match
@pytest.fixture
def sleep_fn():
class Func:
def __init__(self):
self.called = False
self.call_arg = None
def reset(self):
self.called = False
self.call_arg = None
def __call__(self, arg):
self.called = True
self.call_arg = arg
return Func()
@pytest.fixture
def wakeup_fn():
class Func:
def __init__(self):
self.call_arg = None
def reset(self):
self.call_arg = None
def __call__(self, arg):
self.call_arg = arg
return Func()
class TestProcessor:
def test_smoke(self, sleep_fn, wakeup_fn) -> None:
processor = autosuspend.Processor(
[_StubCheck("stub", None)], [], 2, 0, 0, sleep_fn, wakeup_fn, False
)
# should init the timestamp initially
start = datetime.now(timezone.utc)
processor.iteration(start, False)
assert not sleep_fn.called
# not yet reached
processor.iteration(start + timedelta(seconds=1), False)
assert not sleep_fn.called
# time must be greater, not equal
processor.iteration(start + timedelta(seconds=2), False)
assert not sleep_fn.called
# go to sleep
processor.iteration(start + timedelta(seconds=3), False)
assert sleep_fn.called
assert sleep_fn.call_arg is None
sleep_fn.reset()
# second iteration to check that the idle time got reset
processor.iteration(start + timedelta(seconds=4), False)
assert not sleep_fn.called
# go to sleep again
processor.iteration(start + timedelta(seconds=6, milliseconds=2), False)
assert sleep_fn.called
assert wakeup_fn.call_arg is None
def test_just_woke_up_handling(self, sleep_fn, wakeup_fn) -> None:
processor = autosuspend.Processor(
[_StubCheck("stub", None)], [], 2, 0, 0, sleep_fn, wakeup_fn, False
)
# should init the timestamp initially
start = datetime.now(timezone.utc)
processor.iteration(start, False)
assert not sleep_fn.called
# should go to sleep but we just woke up
processor.iteration(start + timedelta(seconds=3), True)
assert not sleep_fn.called
# start over again
processor.iteration(start + timedelta(seconds=4), False)
assert not sleep_fn.called
# not yet sleeping
processor.iteration(start + timedelta(seconds=6), False)
assert not sleep_fn.called
# now go to sleep
processor.iteration(start + timedelta(seconds=7), False)
assert sleep_fn.called
assert wakeup_fn.call_arg is None
def test_wakeup_blocks_sleep(self, mocker, sleep_fn, wakeup_fn) -> None:
start = datetime.now(timezone.utc)
wakeup = mocker.MagicMock(spec=autosuspend.Wakeup)
wakeup.check.return_value = start + timedelta(seconds=6)
processor = autosuspend.Processor(
[_StubCheck("stub", None)], [wakeup], 2, 3.1, 0, sleep_fn, wakeup_fn, False
)
# init iteration
processor.iteration(start, False)
# no activity and enough time passed to start sleeping
processor.iteration(start + timedelta(seconds=3), False)
assert not sleep_fn.called
assert wakeup_fn.call_arg is None
def test_wakeup_exact_hit_does_not_block(
self, mocker, sleep_fn, wakeup_fn,
) -> None:
start = datetime.now(timezone.utc)
wakeup = mocker.MagicMock(spec=autosuspend.Wakeup)
wakeup.check.return_value = start + timedelta(seconds=6)
processor = autosuspend.Processor(
[_StubCheck("stub", None)], [wakeup], 2, 3, 0, sleep_fn, wakeup_fn, False
)
# init iteration
processor.iteration(start, False)
# no activity and enough time passed to start sleeping
processor.iteration(start + timedelta(seconds=3), False)
assert sleep_fn.called
assert wakeup_fn.call_arg is not None
def test_wakeup_scheduled(self, mocker, sleep_fn, wakeup_fn) -> None:
start = datetime.now(timezone.utc)
wakeup = mocker.MagicMock(spec=autosuspend.Wakeup)
wakeup.check.return_value = start + timedelta(seconds=25)
processor = autosuspend.Processor(
[_StubCheck("stub", None)], [wakeup], 2, 10, 0, sleep_fn, wakeup_fn, False
)
# init iteration
processor.iteration(start, False)
# no activity and enough time passed to start sleeping
processor.iteration(start + timedelta(seconds=3), False)
assert sleep_fn.called
assert sleep_fn.call_arg == start + timedelta(seconds=25)
assert wakeup_fn.call_arg == start + timedelta(seconds=25)
sleep_fn.reset()
wakeup_fn.reset()
# ensure that wake up is not scheduled again
processor.iteration(start + timedelta(seconds=25), False)
assert wakeup_fn.call_arg is None
def test_wakeup_delta_blocks(self, mocker, sleep_fn, wakeup_fn) -> None:
start = datetime.now(timezone.utc)
wakeup = mocker.MagicMock(spec=autosuspend.Wakeup)
wakeup.check.return_value = start + timedelta(seconds=25)
processor = autosuspend.Processor(
[_StubCheck("stub", None)], [wakeup], 2, 10, 22, sleep_fn, wakeup_fn, False
)
# init iteration
processor.iteration(start, False)
# no activity and enough time passed to start sleeping
processor.iteration(start + timedelta(seconds=3), False)
assert not sleep_fn.called
def test_wakeup_delta_applied(self, mocker, sleep_fn, wakeup_fn) -> None:
start = datetime.now(timezone.utc)
wakeup = mocker.MagicMock(spec=autosuspend.Wakeup)
wakeup.check.return_value = start + timedelta(seconds=25)
processor = autosuspend.Processor(
[_StubCheck("stub", None)], [wakeup], 2, 10, 4, sleep_fn, wakeup_fn, False
)
# init iteration
processor.iteration(start, False)
# no activity and enough time passed to start sleeping
processor.iteration(start + timedelta(seconds=3), False)
assert sleep_fn.called
assert wakeup_fn.call_arg == start + timedelta(seconds=21)
autosuspend-3.0/tests/test_checks.py 0000664 0000000 0000000 00000000761 13611126124 0017742 0 ustar 00root root 0000000 0000000 from autosuspend.checks import Check
class TestCheck:
class DummyCheck(Check):
@classmethod
def create(cls, name, config):
pass
def check(self):
pass
def test_name(self) -> None:
name = "test"
assert self.DummyCheck(name).name == name
def test_name_default(self) -> None:
assert self.DummyCheck().name is not None
def test_str(self) -> None:
assert isinstance(str(self.DummyCheck("test")), str)
autosuspend-3.0/tests/test_checks_activity.py 0000664 0000000 0000000 00000125316 13611126124 0021662 0 ustar 00root root 0000000 0000000 from collections import namedtuple
import configparser
import json
import os
import os.path
import pwd
import re
import socket
import subprocess
import sys
from freezegun import freeze_time
import psutil
import pytest
import requests
from autosuspend.checks import ConfigurationError, SevereCheckError, TemporaryCheckError
from autosuspend.checks.activity import (
ActiveCalendarEvent,
ActiveConnection,
ExternalCommand,
Kodi,
KodiIdleTime,
Load,
LogindSessionsIdle,
Mpd,
NetworkBandwidth,
Ping,
Processes,
Smb,
Users,
XIdleTime,
XPath,
)
from . import CheckTest
snic = namedtuple("snic", ["family", "address", "netmask", "broadcast", "ptp"])
class TestSmb(CheckTest):
def create_instance(self, name):
return Smb(name)
def test_no_connections(self, datadir, monkeypatch) -> None:
def return_data(*args, **kwargs):
return (datadir / "smbstatus_no_connections").read_bytes()
monkeypatch.setattr(subprocess, "check_output", return_data)
assert Smb("foo").check() is None
def test_with_connections(self, datadir, monkeypatch) -> None:
def return_data(*args, **kwargs):
return (datadir / "smbstatus_with_connections").read_bytes()
monkeypatch.setattr(subprocess, "check_output", return_data)
res = Smb("foo").check()
assert res is not None
assert len(res.splitlines()) == 3
def test_call_error(self, mocker) -> None:
mocker.patch(
"subprocess.check_output",
side_effect=subprocess.CalledProcessError(2, "cmd"),
)
with pytest.raises(SevereCheckError):
Smb("foo").check()
def test_create(self) -> None:
assert isinstance(Smb.create("name", None), Smb)
class TestUsers(CheckTest):
def create_instance(self, name):
return Users(name, re.compile(".*"), re.compile(".*"), re.compile(".*"))
@staticmethod
def create_suser(name, terminal, host, started, pid):
return psutil._common.suser(name, terminal, host, started, pid)
def test_no_users(self, monkeypatch) -> None:
def data():
return []
monkeypatch.setattr(psutil, "users", data)
assert (
Users("users", re.compile(".*"), re.compile(".*"), re.compile(".*")).check()
is None
)
def test_smoke(self) -> None:
Users("users", re.compile(".*"), re.compile(".*"), re.compile(".*")).check()
def test_matching_users(self, monkeypatch) -> None:
def data():
return [self.create_suser("foo", "pts1", "host", 12345, 12345)]
monkeypatch.setattr(psutil, "users", data)
assert (
Users("users", re.compile(".*"), re.compile(".*"), re.compile(".*")).check()
is not None
)
def test_non_matching_user(self, monkeypatch) -> None:
def data():
return [self.create_suser("foo", "pts1", "host", 12345, 12345)]
monkeypatch.setattr(psutil, "users", data)
assert (
Users(
"users", re.compile("narf"), re.compile(".*"), re.compile(".*")
).check()
is None
)
def test_create(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
name = name.*name
terminal = term.*term
host = host.*host
"""
)
check = Users.create("name", parser["section"])
assert check._user_regex == re.compile("name.*name")
assert check._terminal_regex == re.compile("term.*term")
assert check._host_regex == re.compile("host.*host")
def test_create_regex_error(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
name = name.*name
terminal = term.[[a-9]term
host = host.*host
"""
)
with pytest.raises(ConfigurationError):
Users.create("name", parser["section"])
class TestProcesses(CheckTest):
def create_instance(self, name):
return Processes(name, ["foo"])
class StubProcess:
def __init__(self, name):
self._name = name
def name(self):
return self._name
class RaisingProcess:
def name(self):
raise psutil.NoSuchProcess(42)
def test_matching_process(self, monkeypatch) -> None:
def data():
return [self.StubProcess("blubb"), self.StubProcess("nonmatching")]
monkeypatch.setattr(psutil, "process_iter", data)
assert Processes("foo", ["dummy", "blubb", "other"]).check() is not None
def test_ignore_no_such_process(self, monkeypatch) -> None:
def data():
return [self.RaisingProcess()]
monkeypatch.setattr(psutil, "process_iter", data)
Processes("foo", ["dummy"]).check()
def test_non_matching_process(self, monkeypatch) -> None:
def data():
return [self.StubProcess("asdfasdf"), self.StubProcess("nonmatching")]
monkeypatch.setattr(psutil, "process_iter", data)
assert Processes("foo", ["dummy", "blubb", "other"]).check() is None
def test_create(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
processes = foo, bar, narf
"""
)
assert Processes.create("name", parser["section"])._processes == [
"foo",
"bar",
"narf",
]
def test_create_no_entry(self) -> None:
parser = configparser.ConfigParser()
parser.read_string("""[section]""")
with pytest.raises(ConfigurationError):
Processes.create("name", parser["section"])
class TestActiveCalendarEvent(CheckTest):
def create_instance(self, name):
return ActiveCalendarEvent(name, url="asdfasdf", timeout=5)
def test_smoke(self, datadir, serve_file) -> None:
result = ActiveCalendarEvent(
"test", url=serve_file(datadir / "long-event.ics"), timeout=3,
).check()
assert result is not None
assert "long-event" in result
def test_exact_range(self, datadir, serve_file) -> None:
with freeze_time("2016-06-05 13:00:00", tz_offset=-2):
result = ActiveCalendarEvent(
"test", url=serve_file(datadir / "long-event.ics"), timeout=3,
).check()
assert result is not None
assert "long-event" in result
def test_before_exact_range(self, datadir, serve_file) -> None:
with freeze_time("2016-06-05 12:58:00", tz_offset=-2):
result = ActiveCalendarEvent(
"test", url=serve_file(datadir / "long-event.ics"), timeout=3,
).check()
assert result is None
def test_no_event(self, datadir, serve_file) -> None:
assert (
ActiveCalendarEvent(
"test", url=serve_file(datadir / "old-event.ics"), timeout=3,
).check()
is None
)
def test_create(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
url = foobar
username = user
password = pass
timeout = 3
"""
)
check: ActiveCalendarEvent = ActiveCalendarEvent.create(
"name", parser["section"],
) # type: ignore
assert check._url == "foobar"
assert check._username == "user"
assert check._password == "pass"
assert check._timeout == 3
class TestActiveConnection(CheckTest):
MY_PORT = 22
MY_ADDRESS = "123.456.123.456"
MY_ADDRESS_IPV6 = "fe80::5193:518c:5c69:aedb"
# this might sometimes happen:
# https://superuser.com/a/99753/227177
MY_ADDRESS_IPV6_SCOPED = "fe80::5193:518c:5c69:cccc%eth0"
def create_instance(self, name):
return ActiveConnection(name, [10])
def test_smoke(self) -> None:
ActiveConnection("foo", [22]).check()
@pytest.mark.parametrize(
"connection",
[
# ipv4
psutil._common.sconn(
-1,
socket.AF_INET,
socket.SOCK_STREAM,
(MY_ADDRESS, MY_PORT),
("42.42.42.42", 42),
"ESTABLISHED",
None,
),
# ipv6
psutil._common.sconn(
-1,
socket.AF_INET6,
socket.SOCK_STREAM,
(MY_ADDRESS_IPV6, MY_PORT),
("42.42.42.42", 42),
"ESTABLISHED",
None,
),
# ipv6 where local address has scope
psutil._common.sconn(
-1,
socket.AF_INET6,
socket.SOCK_STREAM,
(MY_ADDRESS_IPV6_SCOPED.split("%")[0], MY_PORT),
("42.42.42.42", 42),
"ESTABLISHED",
None,
),
],
)
def test_connected(self, monkeypatch, connection) -> None:
def addresses():
return {
"dummy": [
snic(socket.AF_INET, self.MY_ADDRESS, "255.255.255.0", None, None),
snic(
socket.AF_INET6,
self.MY_ADDRESS_IPV6,
"ffff:ffff:ffff:ffff::",
None,
None,
),
snic(
socket.AF_INET6,
self.MY_ADDRESS_IPV6_SCOPED,
"ffff:ffff:ffff:ffff::",
None,
None,
),
],
}
def connections():
return [connection]
monkeypatch.setattr(psutil, "net_if_addrs", addresses)
monkeypatch.setattr(psutil, "net_connections", connections)
assert ActiveConnection("foo", [10, self.MY_PORT, 30]).check() is not None
@pytest.mark.parametrize(
"connection",
[
# not my port
psutil._common.sconn(
-1,
socket.AF_INET,
socket.SOCK_STREAM,
(MY_ADDRESS, 32),
("42.42.42.42", 42),
"ESTABLISHED",
None,
),
# not my local address
psutil._common.sconn(
-1,
socket.AF_INET,
socket.SOCK_STREAM,
("33.33.33.33", MY_PORT),
("42.42.42.42", 42),
"ESTABLISHED",
None,
),
# not established
psutil._common.sconn(
-1,
socket.AF_INET,
socket.SOCK_STREAM,
(MY_ADDRESS, MY_PORT),
("42.42.42.42", 42),
"NARF",
None,
),
# I am the client
psutil._common.sconn(
-1,
socket.AF_INET,
socket.SOCK_STREAM,
("42.42.42.42", 42),
(MY_ADDRESS, MY_PORT),
"NARF",
None,
),
],
)
def test_not_connected(self, monkeypatch, connection) -> None:
def addresses():
return {
"dummy": [
snic(socket.AF_INET, self.MY_ADDRESS, "255.255.255.0", None, None)
]
}
def connections():
return [connection]
monkeypatch.setattr(psutil, "net_if_addrs", addresses)
monkeypatch.setattr(psutil, "net_connections", connections)
assert ActiveConnection("foo", [10, self.MY_PORT, 30]).check() is None
def test_create(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
ports = 10,20,30
"""
)
assert ActiveConnection.create("name", parser["section"])._ports == {10, 20, 30}
def test_create_no_entry(self) -> None:
parser = configparser.ConfigParser()
parser.read_string("""[section]""")
with pytest.raises(ConfigurationError):
ActiveConnection.create("name", parser["section"])
def test_create_no_number(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
ports = 10,20xx,30
"""
)
with pytest.raises(ConfigurationError):
ActiveConnection.create("name", parser["section"])
class TestLoad(CheckTest):
def create_instance(self, name):
return Load(name, 0.4)
def test_below(self, monkeypatch) -> None:
threshold = 1.34
def data():
return [0, threshold - 0.2, 0]
monkeypatch.setattr(os, "getloadavg", data)
assert Load("foo", threshold).check() is None
def test_above(self, monkeypatch) -> None:
threshold = 1.34
def data():
return [0, threshold + 0.2, 0]
monkeypatch.setattr(os, "getloadavg", data)
assert Load("foo", threshold).check() is not None
def test_create(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
threshold = 3.2
"""
)
assert Load.create("name", parser["section"])._threshold == 3.2
def test_create_no_number(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
threshold = narf
"""
)
with pytest.raises(ConfigurationError):
Load.create("name", parser["section"])
class TestMpd(CheckTest):
def create_instance(self, name):
return Mpd(name, None, None, None)
def test_playing(self, monkeypatch) -> None:
check = Mpd("test", None, None, None) # type: ignore
def get_state():
return {"state": "play"}
monkeypatch.setattr(check, "_get_state", get_state)
assert check.check() is not None
def test_not_playing(self, monkeypatch) -> None:
check = Mpd("test", None, None, None) # type: ignore
def get_state():
return {"state": "pause"}
monkeypatch.setattr(check, "_get_state", get_state)
assert check.check() is None
def test_correct_mpd_interaction(self, mocker) -> None:
import mpd
mock_instance = mocker.MagicMock(spec=mpd.MPDClient)
mock_instance.status.return_value = {"state": "play"}
timeout_property = mocker.PropertyMock()
type(mock_instance).timeout = timeout_property
mock = mocker.patch("mpd.MPDClient")
mock.return_value = mock_instance
host = "foo"
port = 42
timeout = 17
assert Mpd("name", host, port, timeout).check() is not None
timeout_property.assert_called_once_with(timeout)
mock_instance.connect.assert_called_once_with(host, port)
mock_instance.status.assert_called_once_with()
mock_instance.close.assert_called_once_with()
mock_instance.disconnect.assert_called_once_with()
def test_handle_connection_errors(self) -> None:
check = Mpd("test", None, None, None) # type: ignore
def _get_state():
raise ConnectionError()
check._get_state = _get_state # type: ignore
with pytest.raises(TemporaryCheckError):
check.check()
def test_create(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
host = host
port = 1234
timeout = 12
"""
)
check = Mpd.create("name", parser["section"])
assert check._host == "host"
assert check._port == 1234
assert check._timeout == 12
def test_create_port_no_number(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
host = host
port = string
timeout = 12
"""
)
with pytest.raises(ConfigurationError):
Mpd.create("name", parser["section"])
def test_create_timeout_no_number(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
host = host
port = 10
timeout = string
"""
)
with pytest.raises(ConfigurationError):
Mpd.create("name", parser["section"])
class TestNetworkBandwidth(CheckTest):
def create_instance(self, name):
return NetworkBandwidth(name, psutil.net_if_addrs().keys(), 0, 0)
@staticmethod
@pytest.fixture()
def serve_data_url(httpserver) -> str:
httpserver.expect_request("").respond_with_json({"foo": "bar"})
return httpserver.url_for("")
def test_smoke(self, serve_data_url) -> None:
check = NetworkBandwidth("name", psutil.net_if_addrs().keys(), 0, 0)
# make some traffic
requests.get(serve_data_url)
assert check.check() is not None
@pytest.fixture
def mock_interfaces(self, mocker):
mock = mocker.patch("psutil.net_if_addrs")
mock.return_value = {"foo": None, "bar": None, "baz": None}
def test_create(self, mock_interfaces) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
interfaces = foo, baz
threshold_send = 200
threshold_receive = 300
"""
)
check = NetworkBandwidth.create("name", parser["section"])
assert set(check._interfaces) == {"foo", "baz"}
assert check._threshold_send == 200
assert check._threshold_receive == 300
def test_create_default(self, mock_interfaces) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
interfaces = foo, baz
"""
)
check = NetworkBandwidth.create("name", parser["section"])
assert set(check._interfaces) == {"foo", "baz"}
assert check._threshold_send == 100
assert check._threshold_receive == 100
@pytest.mark.parametrize(
"config,error_match",
[
(
"""
[section]
interfaces = foo, NOTEXIST
threshold_send = 200
threshold_receive = 300
""",
r"does not exist",
),
(
"""
[section]
threshold_send = 200
threshold_receive = 300
""",
r"configuration key: \'interfaces\'",
),
(
"""
[section]
interfaces =
threshold_send = 200
threshold_receive = 300
""",
r"No interfaces configured",
),
(
"""
[section]
interfaces = foo, bar
threshold_send = xxx
""",
r"Threshold in wrong format",
),
(
"""
[section]
interfaces = foo, bar
threshold_receive = xxx
""",
r"Threshold in wrong format",
),
],
)
def test_create_error(self, mock_interfaces, config, error_match) -> None:
parser = configparser.ConfigParser()
parser.read_string(config)
with pytest.raises(ConfigurationError, match=error_match):
NetworkBandwidth.create("name", parser["section"])
@pytest.mark.parametrize(
"send_threshold,receive_threshold,match",
[(sys.float_info.max, 0, "receive"), (0, sys.float_info.max, "sending")],
)
def test_with_activity(
self, send_threshold, receive_threshold, match, serve_data_url
) -> None:
check = NetworkBandwidth(
"name", psutil.net_if_addrs().keys(), send_threshold, receive_threshold
)
# make some traffic
requests.get(serve_data_url)
res = check.check()
assert res is not None
assert match in res
def test_no_activity(self, serve_data_url) -> None:
check = NetworkBandwidth(
"name", psutil.net_if_addrs().keys(), sys.float_info.max, sys.float_info.max
)
# make some traffic
requests.get(serve_data_url)
assert check.check() is None
def test_internal_state_updated(self, serve_data_url) -> None:
check = NetworkBandwidth(
"name", psutil.net_if_addrs().keys(), sys.float_info.max, sys.float_info.max
)
check.check()
old_state = check._previous_values
requests.get(serve_data_url)
check.check()
assert old_state != check._previous_values
def test_delta_calculation_send(self, mocker) -> None:
first = mocker.MagicMock()
type(first).bytes_sent = mocker.PropertyMock(return_value=1000)
type(first).bytes_recv = mocker.PropertyMock(return_value=800)
mocker.patch("psutil.net_io_counters").return_value = {
"eth0": first,
}
with freeze_time("2019-10-01 10:00:00"):
check = NetworkBandwidth("name", ["eth0"], 0, sys.float_info.max)
second = mocker.MagicMock()
type(second).bytes_sent = mocker.PropertyMock(return_value=1222)
type(second).bytes_recv = mocker.PropertyMock(return_value=900)
mocker.patch("psutil.net_io_counters").return_value = {
"eth0": second,
}
with freeze_time("2019-10-01 10:00:01"):
res = check.check()
assert res is not None
assert " 222.0 " in res
def test_delta_calculation_receive(self, mocker) -> None:
first = mocker.MagicMock()
type(first).bytes_sent = mocker.PropertyMock(return_value=1000)
type(first).bytes_recv = mocker.PropertyMock(return_value=800)
mocker.patch("psutil.net_io_counters").return_value = {
"eth0": first,
}
with freeze_time("2019-10-01 10:00:00"):
check = NetworkBandwidth("name", ["eth0"], sys.float_info.max, 0)
second = mocker.MagicMock()
type(second).bytes_sent = mocker.PropertyMock(return_value=1222)
type(second).bytes_recv = mocker.PropertyMock(return_value=900)
mocker.patch("psutil.net_io_counters").return_value = {
"eth0": second,
}
with freeze_time("2019-10-01 10:00:01"):
res = check.check()
assert res is not None
assert " 100.0 " in res
class TestKodi(CheckTest):
def create_instance(self, name):
return Kodi(name, url="url", timeout=10)
def test_playing(self, mocker) -> None:
mock_reply = mocker.MagicMock()
mock_reply.json.return_value = {
"id": 1,
"jsonrpc": "2.0",
"result": [{"playerid": 0, "type": "audio"}],
}
mocker.patch("requests.Session.get", return_value=mock_reply)
assert Kodi("foo", url="url", timeout=10).check() is not None
mock_reply.json.assert_called_once_with()
def test_not_playing(self, mocker) -> None:
mock_reply = mocker.MagicMock()
mock_reply.json.return_value = {"id": 1, "jsonrpc": "2.0", "result": []}
mocker.patch("requests.Session.get", return_value=mock_reply)
assert Kodi("foo", url="url", timeout=10).check() is None
mock_reply.json.assert_called_once_with()
def test_playing_suspend_while_paused(self, mocker) -> None:
mock_reply = mocker.MagicMock()
mock_reply.json.return_value = {
"id": 1,
"jsonrpc": "2.0",
"result": {"Player.Playing": True},
}
mocker.patch("requests.Session.get", return_value=mock_reply)
assert (
Kodi("foo", url="url", timeout=10, suspend_while_paused=True).check()
is not None
)
mock_reply.json.assert_called_once_with()
def test_not_playing_suspend_while_paused(self, mocker) -> None:
mock_reply = mocker.MagicMock()
mock_reply.json.return_value = {
"id": 1,
"jsonrpc": "2.0",
"result": {"Player.Playing": False},
}
mocker.patch("requests.Session.get", return_value=mock_reply)
assert (
Kodi("foo", url="url", timeout=10, suspend_while_paused=True).check()
is None
)
mock_reply.json.assert_called_once_with()
def test_assertion_no_result(self, mocker) -> None:
mock_reply = mocker.MagicMock()
mock_reply.json.return_value = {"id": 1, "jsonrpc": "2.0"}
mocker.patch("requests.Session.get", return_value=mock_reply)
with pytest.raises(TemporaryCheckError):
Kodi("foo", url="url", timeout=10).check()
def test_request_error(self, mocker) -> None:
mocker.patch(
"requests.Session.get", side_effect=requests.exceptions.RequestException()
)
with pytest.raises(TemporaryCheckError):
Kodi("foo", url="url", timeout=10).check()
def test_json_error(self, mocker) -> None:
mock_reply = mocker.MagicMock()
mock_reply.json.side_effect = json.JSONDecodeError("test", "test", 42)
mocker.patch("requests.Session.get", return_value=mock_reply)
with pytest.raises(TemporaryCheckError):
Kodi("foo", url="url", timeout=10).check()
def test_create(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
url = anurl
timeout = 12
"""
)
check = Kodi.create("name", parser["section"])
assert check._url.startswith("anurl")
assert check._timeout == 12
assert not check._suspend_while_paused
def test_create_default_url(self) -> None:
parser = configparser.ConfigParser()
parser.read_string("""[section]""")
check = Kodi.create("name", parser["section"])
assert check._url.split("?")[0] == "http://localhost:8080/jsonrpc"
def test_create_timeout_no_number(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
url = anurl
timeout = string
"""
)
with pytest.raises(ConfigurationError):
Kodi.create("name", parser["section"])
def test_create_suspend_while_paused(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
url = anurl
suspend_while_paused = True
"""
)
check = Kodi.create("name", parser["section"])
assert check._url.startswith("anurl")
assert check._suspend_while_paused
class TestKodiIdleTime(CheckTest):
def create_instance(self, name):
return KodiIdleTime(name, url="url", timeout=10, idle_time=10)
def test_create(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
url = anurl
timeout = 12
idle_time = 42
"""
)
check = KodiIdleTime.create("name", parser["section"])
assert check._url.startswith("anurl")
assert check._timeout == 12
assert check._idle_time == 42
def test_create_default_url(self) -> None:
parser = configparser.ConfigParser()
parser.read_string("""[section]""")
check = KodiIdleTime.create("name", parser["section"])
assert check._url.split("?")[0] == "http://localhost:8080/jsonrpc"
def test_create_timeout_no_number(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
url = anurl
timeout = string
"""
)
with pytest.raises(ConfigurationError):
KodiIdleTime.create("name", parser["section"])
def test_create_idle_time_no_number(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
url = anurl
idle_time = string
"""
)
with pytest.raises(ConfigurationError):
KodiIdleTime.create("name", parser["section"])
def test_no_result(self, mocker) -> None:
mock_reply = mocker.MagicMock()
mock_reply.json.return_value = {"id": 1, "jsonrpc": "2.0"}
mocker.patch("requests.Session.get", return_value=mock_reply)
with pytest.raises(TemporaryCheckError):
KodiIdleTime("foo", url="url", timeout=10, idle_time=42).check()
def test_result_is_list(self, mocker) -> None:
mock_reply = mocker.MagicMock()
mock_reply.json.return_value = {"id": 1, "jsonrpc": "2.0", "result": []}
mocker.patch("requests.Session.get", return_value=mock_reply)
with pytest.raises(TemporaryCheckError):
KodiIdleTime("foo", url="url", timeout=10, idle_time=42).check()
def test_result_no_entry(self, mocker) -> None:
mock_reply = mocker.MagicMock()
mock_reply.json.return_value = {"id": 1, "jsonrpc": "2.0", "result": {}}
mocker.patch("requests.Session.get", return_value=mock_reply)
with pytest.raises(TemporaryCheckError):
KodiIdleTime("foo", url="url", timeout=10, idle_time=42).check()
def test_result_wrong_entry(self, mocker) -> None:
mock_reply = mocker.MagicMock()
mock_reply.json.return_value = {
"id": 1,
"jsonrpc": "2.0",
"result": {"narf": True},
}
mocker.patch("requests.Session.get", return_value=mock_reply)
with pytest.raises(TemporaryCheckError):
KodiIdleTime("foo", url="url", timeout=10, idle_time=42).check()
def test_active(self, mocker) -> None:
mock_reply = mocker.MagicMock()
mock_reply.json.return_value = {
"id": 1,
"jsonrpc": "2.0",
"result": {"System.IdleTime(42)": False},
}
mocker.patch("requests.Session.get", return_value=mock_reply)
assert (
KodiIdleTime("foo", url="url", timeout=10, idle_time=42).check() is not None
)
def test_inactive(self, mocker) -> None:
mock_reply = mocker.MagicMock()
mock_reply.json.return_value = {
"id": 1,
"jsonrpc": "2.0",
"result": {"System.IdleTime(42)": True},
}
mocker.patch("requests.Session.get", return_value=mock_reply)
assert KodiIdleTime("foo", url="url", timeout=10, idle_time=42).check() is None
def test_request_error(self, mocker) -> None:
mocker.patch(
"requests.Session.get", side_effect=requests.exceptions.RequestException()
)
with pytest.raises(TemporaryCheckError):
KodiIdleTime("foo", url="url", timeout=10, idle_time=42).check()
class TestPing(CheckTest):
def create_instance(self, name):
return Ping(name, "8.8.8.8")
def test_smoke(self, mocker) -> None:
mock = mocker.patch("subprocess.call")
mock.return_value = 1
hosts = ["abc", "129.123.145.42"]
assert Ping("name", hosts).check() is None
assert mock.call_count == len(hosts)
for (args, _), host in zip(mock.call_args_list, hosts):
assert args[0][-1] == host
def test_matching(self, mocker) -> None:
mock = mocker.patch("subprocess.call")
mock.return_value = 0
assert Ping("name", ["foo"]).check() is not None
def test_create_missing_hosts(self) -> None:
parser = configparser.ConfigParser()
parser.read_string("""[section]""")
with pytest.raises(ConfigurationError):
Ping.create("name", parser["section"])
def test_create_host_splitting(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
hosts=a,b,c
"""
)
ping = Ping.create("name", parser["section"])
assert ping._hosts == ["a", "b", "c"]
class TestXIdleTime(CheckTest):
def create_instance(self, name):
return XIdleTime(name, 10, "sockets", None, None)
def test_smoke(self, mocker) -> None:
check = XIdleTime("name", 100, "logind", re.compile(r"a^"), re.compile(r"a^"))
mocker.patch.object(check, "_provide_sessions").return_value = [
("42", "auser"),
]
co_mock = mocker.patch("subprocess.check_output")
co_mock.return_value = "123"
res = check.check()
assert res is not None
assert " 0.123 " in res
args, kwargs = co_mock.call_args
assert "auser" in args[0]
assert kwargs["env"]["DISPLAY"] == ":42"
assert "auser" in kwargs["env"]["XAUTHORITY"]
def test_no_activity(self, mocker) -> None:
check = XIdleTime("name", 100, "logind", re.compile(r"a^"), re.compile(r"a^"))
mocker.patch.object(check, "_provide_sessions").return_value = [
("42", "auser"),
]
mocker.patch("subprocess.check_output").return_value = "120000"
assert check.check() is None
def test_multiple_sessions(self, mocker) -> None:
check = XIdleTime("name", 100, "logind", re.compile(r"a^"), re.compile(r"a^"))
mocker.patch.object(check, "_provide_sessions").return_value = [
("42", "auser"),
("17", "otheruser"),
]
co_mock = mocker.patch("subprocess.check_output")
co_mock.side_effect = [
"120000",
"123",
]
res = check.check()
assert res is not None
assert " 0.123 " in res
assert co_mock.call_count == 2
# check second call for correct values, not checked before
args, kwargs = co_mock.call_args_list[1]
assert "otheruser" in args[0]
assert kwargs["env"]["DISPLAY"] == ":17"
assert "otheruser" in kwargs["env"]["XAUTHORITY"]
def test_handle_call_error(self, mocker) -> None:
check = XIdleTime("name", 100, "logind", re.compile(r"a^"), re.compile(r"a^"))
mocker.patch.object(check, "_provide_sessions").return_value = [
("42", "auser"),
]
mocker.patch(
"subprocess.check_output",
).side_effect = subprocess.CalledProcessError(2, "foo")
with pytest.raises(TemporaryCheckError):
check.check()
def test_create_default(self) -> None:
parser = configparser.ConfigParser()
parser.read_string("""[section]""")
check = XIdleTime.create("name", parser["section"])
assert check._timeout == 600
assert check._ignore_process_re == re.compile(r"a^")
assert check._ignore_users_re == re.compile(r"a^")
assert check._provide_sessions == check._list_sessions_sockets
def test_create(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
timeout = 42
ignore_if_process = .*test
ignore_users = test.*test
method = logind
"""
)
check = XIdleTime.create("name", parser["section"])
assert check._timeout == 42
assert check._ignore_process_re == re.compile(r".*test")
assert check._ignore_users_re == re.compile(r"test.*test")
assert check._provide_sessions == check._list_sessions_logind
def test_create_no_int(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
timeout = string
"""
)
with pytest.raises(ConfigurationError):
XIdleTime.create("name", parser["section"])
def test_create_broken_process_re(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
ignore_if_process = [[a-9]
"""
)
with pytest.raises(ConfigurationError):
XIdleTime.create("name", parser["section"])
def test_create_broken_users_re(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
ignore_users = [[a-9]
"""
)
with pytest.raises(ConfigurationError):
XIdleTime.create("name", parser["section"])
def test_create_unknown_method(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
method = asdfasdf
"""
)
with pytest.raises(ConfigurationError):
XIdleTime.create("name", parser["section"])
def test_list_sessions_logind(self, mocker) -> None:
mock = mocker.patch("autosuspend.checks.activity.list_logind_sessions")
mock.return_value = [
("c1", {"Name": "foo"}),
("c2", {"Display": "asdfasf"}),
("c3", {"Name": "hello", "Display": "nonumber"}),
("c4", {"Name": "hello", "Display": "3"}),
]
parser = configparser.ConfigParser()
parser.read_string("""[section]""")
check = XIdleTime.create("name", parser["section"])
assert check._list_sessions_logind() == [(3, "hello")]
def test_list_sessions_socket(self, mocker) -> None:
mock_glob = mocker.patch("glob.glob")
mock_glob.return_value = [
"/tmp/.X11-unix/X0",
"/tmp/.X11-unix/X42",
"/tmp/.X11-unix/Xnum",
]
stat_return = os.stat(os.path.realpath(__file__))
this_user = pwd.getpwuid(stat_return.st_uid)
mock_stat = mocker.patch("os.stat")
mock_stat.return_value = stat_return
mock_pwd = mocker.patch("pwd.getpwuid")
mock_pwd.return_value = this_user
parser = configparser.ConfigParser()
parser.read_string("""[section]""")
check = XIdleTime.create("name", parser["section"])
assert check._list_sessions_sockets() == [
(0, this_user.pw_name),
(42, this_user.pw_name),
]
class TestExternalCommand(CheckTest):
def create_instance(self, name):
return ExternalCommand(name, "asdfasdf")
def test_check(self, mocker) -> None:
mock = mocker.patch("subprocess.check_call")
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
command = foo bar
"""
)
assert (
ExternalCommand.create("name", parser["section"]).check() is not None # type: ignore
)
mock.assert_called_once_with("foo bar", shell=True)
def test_check_no_match(self, mocker) -> None:
mock = mocker.patch("subprocess.check_call")
mock.side_effect = subprocess.CalledProcessError(2, "foo bar")
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
command = foo bar
"""
)
assert (
ExternalCommand.create("name", parser["section"]).check() is None # type: ignore
)
mock.assert_called_once_with("foo bar", shell=True)
class TestXPath(CheckTest):
def create_instance(self, name):
return XPath(
name=name,
url="url",
timeout=5,
username="userx",
password="pass",
xpath="/b",
)
def test_matching(self, mocker) -> None:
mock_reply = mocker.MagicMock()
content_property = mocker.PropertyMock()
type(mock_reply).content = content_property
content_property.return_value = ""
mock_method = mocker.patch("requests.Session.get", return_value=mock_reply)
url = "nourl"
assert XPath("foo", xpath="/a", url=url, timeout=5).check() is not None
mock_method.assert_called_once_with(url, timeout=5)
content_property.assert_called_once_with()
def test_not_matching(self, mocker) -> None:
mock_reply = mocker.MagicMock()
content_property = mocker.PropertyMock()
type(mock_reply).content = content_property
content_property.return_value = ""
mocker.patch("requests.Session.get", return_value=mock_reply)
assert XPath("foo", xpath="/b", url="nourl", timeout=5).check() is None
def test_create(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
url = url
xpath = /xpath
username = user
password = pass
timeout = 42
"""
)
check: XPath = XPath.create("name", parser["section"]) # type: ignore
assert check._xpath == "/xpath"
assert check._url == "url"
assert check._username == "user"
assert check._password == "pass"
assert check._timeout == 42
def test_network_errors_are_passed(self, datadir, serve_protected) -> None:
with pytest.raises(TemporaryCheckError):
XPath(
name="name",
url=serve_protected(datadir / "data.txt")[0],
timeout=5,
username="wrong",
password="wrong",
xpath="/b",
).request()
class TestLogindSessionsIdle(CheckTest):
def create_instance(self, name):
return LogindSessionsIdle(name, ["tty", "x11", "wayland"], ["active", "online"])
def test_active(self, logind) -> None:
logind.AddSession("c1", "seat0", 1042, "auser", True)
check = LogindSessionsIdle("test", ["test"], ["active", "online"])
check.check() is not None
def test_inactive(self, logind) -> None:
logind.AddSession("c1", "seat0", 1042, "auser", False)
check = LogindSessionsIdle("test", ["test"], ["active", "online"])
check.check() is None
def test_ignore_unknow_type(self, logind) -> None:
logind.AddSession("c1", "seat0", 1042, "auser", True)
check = LogindSessionsIdle("test", ["not_test"], ["active", "online"])
check.check() is None
def test_configure_defaults(self) -> None:
parser = configparser.ConfigParser()
parser.read_string("[section]")
check = LogindSessionsIdle.create("name", parser["section"])
assert check._types == ["tty", "x11", "wayland"]
assert check._states == ["active", "online"]
def test_configure_types(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
types=test, bla,foo
"""
)
check = LogindSessionsIdle.create("name", parser["section"])
assert check._types == ["test", "bla", "foo"]
def test_configure_states(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
states=test, bla,foo
"""
)
check = LogindSessionsIdle.create("name", parser["section"])
assert check._states == ["test", "bla", "foo"]
autosuspend-3.0/tests/test_checks_activity/ 0000775 0000000 0000000 00000000000 13611126124 0021300 5 ustar 00root root 0000000 0000000 autosuspend-3.0/tests/test_checks_activity/long-event.ics 0000664 0000000 0000000 00000001273 13611126124 0024061 0 ustar 00root root 0000000 0000000 BEGIN:VCALENDAR
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
VERSION:2.0
BEGIN:VTIMEZONE
TZID:Europe/Berlin
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:19700329T020000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:19701025T030000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
END:STANDARD
END:VTIMEZONE
BEGIN:VEVENT
CREATED:20180603T194125Z
LAST-MODIFIED:20180603T194144Z
DTSTAMP:20180603T194144Z
UID:6ff13ee1-e548-41b1-8e08-d7725423743a
SUMMARY:long-event
DTSTART;TZID=Europe/Berlin:20160605T130000
DTEND;TZID=Europe/Berlin:20260605T150000
TRANSP:OPAQUE
SEQUENCE:1
END:VEVENT
END:VCALENDAR
autosuspend-3.0/tests/test_checks_activity/old-event.ics 0000664 0000000 0000000 00000001273 13611126124 0023700 0 ustar 00root root 0000000 0000000 BEGIN:VCALENDAR
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
VERSION:2.0
BEGIN:VTIMEZONE
TZID:Europe/Berlin
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:19700329T020000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:19701025T030000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
END:STANDARD
END:VTIMEZONE
BEGIN:VEVENT
CREATED:20180603T194125Z
LAST-MODIFIED:20180603T194144Z
DTSTAMP:20180603T194144Z
UID:6ff13ee1-e548-41b1-8e08-d7725423743a
SUMMARY:long-event
DTSTART;TZID=Europe/Berlin:20040605T130000
DTEND;TZID=Europe/Berlin:20040605T150000
TRANSP:OPAQUE
SEQUENCE:1
END:VEVENT
END:VCALENDAR
autosuspend-3.0/tests/test_checks_activity/smbstatus_no_connections 0000664 0000000 0000000 00000000447 13611126124 0026353 0 ustar 00root root 0000000 0000000
Samba version 4.7.0
PID Username Group Machine Protocol Version Encryption Signing
----------------------------------------------------------------------------------------------------------------------------------------
autosuspend-3.0/tests/test_checks_activity/smbstatus_with_connections 0000664 0000000 0000000 00000000421 13611126124 0026702 0 ustar 00root root 0000000 0000000
Samba version 3.5.1
PID Username Group Machine
-------------------------------------------------------------------
14944 it 131.169.214.117 (131.169.214.117)
14944 it 131.169.214.117 (131.169.214.117)
autosuspend-3.0/tests/test_checks_util.py 0000664 0000000 0000000 00000016662 13611126124 0021006 0 ustar 00root root 0000000 0000000 import configparser
import pytest
import requests
from autosuspend.checks import Activity, ConfigurationError, TemporaryCheckError
from autosuspend.checks.util import CommandMixin, NetworkMixin, XPathMixin
class _CommandMixinSub(CommandMixin, Activity):
def __init__(self, name, command):
Activity.__init__(self, name)
CommandMixin.__init__(self, command)
def check(self):
pass
class TestCommandMixin:
def test_create(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
command = narf bla
"""
)
check: _CommandMixinSub = _CommandMixinSub.create(
"name", parser["section"],
) # type: ignore
assert check._command == "narf bla"
def test_create_no_command(self) -> None:
parser = configparser.ConfigParser()
parser.read_string("""[section]""")
with pytest.raises(ConfigurationError):
_CommandMixinSub.create("name", parser["section"])
class TestNetworkMixin:
def test_collect_missing_url(self) -> None:
with pytest.raises(ConfigurationError, match=r"^Lacks 'url'.*"):
parser = configparser.ConfigParser()
parser.read_string("[section]")
NetworkMixin.collect_init_args(parser["section"])
def test_username_missing(self) -> None:
with pytest.raises(ConfigurationError, match=r"^Username and.*"):
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
url=ok
password=xxx
"""
)
NetworkMixin.collect_init_args(parser["section"])
def test_password_missing(self) -> None:
with pytest.raises(ConfigurationError, match=r"^Username and.*"):
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
url=ok
username=xxx
"""
)
NetworkMixin.collect_init_args(parser["section"])
def test_collect_default_timeout(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
url=nourl
"""
)
args = NetworkMixin.collect_init_args(parser["section"])
assert args["timeout"] == 5
def test_collect_timeout(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
url=nourl
timeout=42
"""
)
args = NetworkMixin.collect_init_args(parser["section"])
assert args["timeout"] == 42
def test_collect_invalid_timeout(self) -> None:
with pytest.raises(ConfigurationError, match=r"^Configuration error .*"):
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
url=nourl
timeout=xx
"""
)
NetworkMixin.collect_init_args(parser["section"])
def test_request(self, datadir, serve_file) -> None:
reply = NetworkMixin(
serve_file(datadir / "xml_with_encoding.xml"), 5,
).request()
assert reply is not None
assert reply.status_code == 200
def test_requests_exception(self, mocker) -> None:
with pytest.raises(TemporaryCheckError):
mock_method = mocker.patch("requests.Session.get")
mock_method.side_effect = requests.exceptions.ReadTimeout()
NetworkMixin("url", timeout=5).request()
def test_smoke(self, datadir, serve_file) -> None:
response = NetworkMixin(serve_file(datadir / "data.txt"), timeout=5).request()
assert response is not None
assert response.text == "iamhere\n"
def test_exception_404(self, httpserver) -> None:
with pytest.raises(TemporaryCheckError):
NetworkMixin(httpserver.url_for("/does/not/exist"), timeout=5).request()
def test_authentication(self, datadir, serve_protected) -> None:
url, username, password = serve_protected(datadir / "data.txt")
NetworkMixin(url, 5, username=username, password=password).request()
def test_invalid_authentication(self, datadir, serve_protected) -> None:
with pytest.raises(TemporaryCheckError):
NetworkMixin(
serve_protected(datadir / "data.txt")[0],
5,
username="userx",
password="pass",
).request()
def test_file_url(self) -> None:
NetworkMixin("file://" + __file__, 5).request()
class _XPathMixinSub(XPathMixin, Activity):
def __init__(self, name, **kwargs):
Activity.__init__(self, name)
XPathMixin.__init__(self, **kwargs)
def check(self):
pass
class TestXPathMixin:
def test_smoke(self, datadir, serve_file) -> None:
result = _XPathMixinSub(
"foo",
xpath="/b",
url=serve_file(datadir / "xml_with_encoding.xml"),
timeout=5,
).evaluate()
assert result is not None
assert len(result) == 0
def test_broken_xml(self, mocker) -> None:
with pytest.raises(TemporaryCheckError):
mock_reply = mocker.MagicMock()
content_property = mocker.PropertyMock()
type(mock_reply).content = content_property
content_property.return_value = b"//broken"
mocker.patch("requests.Session.get", return_value=mock_reply)
_XPathMixinSub("foo", xpath="/b", url="nourl", timeout=5).evaluate()
def test_xml_with_encoding(self, mocker) -> None:
mock_reply = mocker.MagicMock()
content_property = mocker.PropertyMock()
type(mock_reply).content = content_property
content_property.return_value = (
b''
)
mocker.patch("requests.Session.get", return_value=mock_reply)
_XPathMixinSub("foo", xpath="/b", url="nourl", timeout=5).evaluate()
def test_xpath_prevalidation(self) -> None:
with pytest.raises(ConfigurationError, match=r"^Invalid xpath.*"):
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
xpath=|34/ad
url=nourl
"""
)
_XPathMixinSub.create("name", parser["section"])
@pytest.mark.parametrize("entry,", ["xpath", "url"])
def test_missing_config_entry(self, entry) -> None:
with pytest.raises(ConfigurationError, match=r"^Lacks '" + entry + "'.*"):
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
xpath=/valid
url=nourl
"""
)
del parser["section"][entry]
_XPathMixinSub.create("name", parser["section"])
def test_invalid_config_entry(self) -> None:
with pytest.raises(ConfigurationError, match=r"^Configuration error .*"):
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
xpath=/valid
timeout=xxx
url=nourl
"""
)
_XPathMixinSub.create("name", parser["section"])
autosuspend-3.0/tests/test_checks_util/ 0000775 0000000 0000000 00000000000 13611126124 0020421 5 ustar 00root root 0000000 0000000 autosuspend-3.0/tests/test_checks_util/data.txt 0000664 0000000 0000000 00000000010 13611126124 0022062 0 ustar 00root root 0000000 0000000 iamhere
autosuspend-3.0/tests/test_checks_util/xml_with_encoding.xml 0000664 0000000 0000000 00000003454 13611126124 0024652 0 ustar 00root root 0000000 0000000
autosuspend-3.0/tests/test_checks_wakeup.py 0000664 0000000 0000000 00000032576 13611126124 0021327 0 ustar 00root root 0000000 0000000 import configparser
from datetime import datetime, timedelta, timezone
import os
import subprocess
import dateutil.parser
import pytest
from autosuspend.checks import ConfigurationError, TemporaryCheckError
from autosuspend.checks.wakeup import (
Calendar,
Command,
File,
Periodic,
XPath,
XPathDelta,
)
from . import CheckTest
class TestCalendar(CheckTest):
def create_instance(self, name: str) -> Calendar:
return Calendar(name, url="file:///asdf", timeout=3)
def test_create(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
url = url
username = user
password = pass
timeout = 42
"""
)
check: Calendar = Calendar.create(
"name", parser["section"],
) # type: ignore
assert check._url == "url"
assert check._username == "user"
assert check._password == "pass"
assert check._timeout == 42
def test_empty(self, datadir, serve_file) -> None:
timestamp = dateutil.parser.parse("20050605T130000Z")
assert (
Calendar(
"test", url=serve_file(datadir / "old-event.ics"), timeout=3,
).check(timestamp)
is None
)
def test_smoke(self, datadir, serve_file) -> None:
timestamp = dateutil.parser.parse("20040605T090000Z")
desired_start = dateutil.parser.parse("20040605T110000Z")
assert (
Calendar(
"test", url=serve_file(datadir / "old-event.ics"), timeout=3,
).check(timestamp)
== desired_start
)
def test_select_earliest(self, datadir, serve_file) -> None:
timestamp = dateutil.parser.parse("20040401T090000Z")
desired_start = dateutil.parser.parse("20040405T110000Z")
assert (
Calendar(
"test", url=serve_file(datadir / "multiple.ics"), timeout=3,
).check(timestamp)
== desired_start
)
def test_ignore_running(self, datadir, serve_file) -> None:
url = serve_file(datadir / "old-event.ics")
timestamp = dateutil.parser.parse("20040605T110000Z")
# events are taken if start hits exactly the current time
assert Calendar("test", url=url, timeout=3).check(timestamp) is not None
timestamp = timestamp + timedelta(seconds=1)
assert Calendar("test", url=url, timeout=3).check(timestamp) is None
def test_limited_horizon(self, datadir, serve_file) -> None:
timestamp = dateutil.parser.parse("20040101T000000Z")
assert (
Calendar(
"test", url=serve_file(datadir / "after-horizon.ics"), timeout=3,
).check(timestamp)
is None
)
assert (
Calendar(
"test", url=serve_file(datadir / "before-horizon.ics"), timeout=3,
).check(timestamp)
is not None
)
class TestFile(CheckTest):
def create_instance(self, name):
return File(name, "asdf")
def test_create(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""[section]
path = /tmp/test"""
)
check = File.create("name", parser["section"])
assert check._path == "/tmp/test"
def test_create_no_path(self) -> None:
parser = configparser.ConfigParser()
parser.read_string("""[section]""")
with pytest.raises(ConfigurationError):
File.create("name", parser["section"])
def test_smoke(self, tmpdir) -> None:
test_file = tmpdir.join("file")
test_file.write("42\n\n")
assert File("name", str(test_file)).check(
datetime.now(timezone.utc)
) == datetime.fromtimestamp(42, timezone.utc)
def test_no_file(self, tmpdir) -> None:
assert (
File("name", str(tmpdir.join("narf"))).check(datetime.now(timezone.utc))
is None
)
def test_handle_permission_error(self, tmpdir) -> None:
file_path = tmpdir / "test"
file_path.write(b"2314898")
os.chmod(file_path, 0)
with pytest.raises(TemporaryCheckError):
File("name", str(file_path)).check(datetime.now(timezone.utc))
def test_handle_io_error(self, tmpdir, mocker) -> None:
file_path = tmpdir / "test"
file_path.write(b"2314898")
mocker.patch("builtins.open").side_effect = IOError
with pytest.raises(TemporaryCheckError):
File("name", str(file_path)).check(datetime.now(timezone.utc))
def test_invalid_number(self, tmpdir) -> None:
test_file = tmpdir.join("filexxx")
test_file.write("nonumber\n\n")
with pytest.raises(TemporaryCheckError):
File("name", str(test_file)).check(datetime.now(timezone.utc))
class TestCommand(CheckTest):
def create_instance(self, name):
return Command(name, "asdf")
def test_smoke(self) -> None:
check = Command("test", "echo 1234")
assert check.check(datetime.now(timezone.utc)) == datetime.fromtimestamp(
1234, timezone.utc
)
def test_no_output(self) -> None:
check = Command("test", "echo")
assert check.check(datetime.now(timezone.utc)) is None
def test_not_parseable(self) -> None:
check = Command("test", "echo asdfasdf")
with pytest.raises(TemporaryCheckError):
check.check(datetime.now(timezone.utc))
def test_multiple_lines(self, mocker) -> None:
mock = mocker.patch("subprocess.check_output")
mock.return_value = "1234\nignore\n"
check = Command("test", "echo bla")
assert check.check(datetime.now(timezone.utc)) == datetime.fromtimestamp(
1234, timezone.utc
)
def test_multiple_lines_but_empty(self, mocker) -> None:
mock = mocker.patch("subprocess.check_output")
mock.return_value = " \nignore\n"
check = Command("test", "echo bla")
assert check.check(datetime.now(timezone.utc)) is None
def test_process_error(self, mocker) -> None:
mock = mocker.patch("subprocess.check_output")
mock.side_effect = subprocess.CalledProcessError(2, "foo bar")
check = Command("test", "echo bla")
with pytest.raises(TemporaryCheckError):
check.check(datetime.now(timezone.utc))
class TestPeriodic(CheckTest):
def create_instance(self, name):
delta = timedelta(seconds=10, minutes=42)
return Periodic(name, delta)
def test_create(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
unit=seconds
value=13
"""
)
check = Periodic.create("name", parser["section"])
assert check._delta == timedelta(seconds=13)
def test_create_wrong_unit(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
unit=asdfasdf
value=13
"""
)
with pytest.raises(ConfigurationError):
Periodic.create("name", parser["section"])
def test_create_not_numeric(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
unit=seconds
value=asdfasd
"""
)
with pytest.raises(ConfigurationError):
Periodic.create("name", parser["section"])
def test_create_no_unit(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
value=asdfasd
"""
)
with pytest.raises(ConfigurationError):
Periodic.create("name", parser["section"])
def test_create_float(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
unit=seconds
value=21312.12
"""
)
Periodic.create("name", parser["section"])
def test_check(self) -> None:
delta = timedelta(seconds=10, minutes=42)
check = Periodic("test", delta)
now = datetime.now(timezone.utc)
assert check.check(now) == now + delta
class TestXPath(CheckTest):
def create_instance(self, name):
return XPath(name, xpath="/a", url="nourl", timeout=5)
def test_matching(self, mocker) -> None:
mock_reply = mocker.MagicMock()
content_property = mocker.PropertyMock()
type(mock_reply).content = content_property
content_property.return_value = ''
mock_method = mocker.patch("requests.Session.get", return_value=mock_reply)
url = "nourl"
assert XPath("foo", xpath="/a/@value", url=url, timeout=5).check(
datetime.now(timezone.utc)
) == datetime.fromtimestamp(42.3, timezone.utc)
mock_method.assert_called_once_with(url, timeout=5)
content_property.assert_called_once_with()
def test_not_matching(self, mocker) -> None:
mock_reply = mocker.MagicMock()
content_property = mocker.PropertyMock()
type(mock_reply).content = content_property
content_property.return_value = ""
mocker.patch("requests.Session.get", return_value=mock_reply)
assert (
XPath("foo", xpath="/b", url="nourl", timeout=5).check(
datetime.now(timezone.utc)
)
is None
)
def test_not_a_string(self, mocker) -> None:
mock_reply = mocker.MagicMock()
content_property = mocker.PropertyMock()
type(mock_reply).content = content_property
content_property.return_value = ""
mocker.patch("requests.Session.get", return_value=mock_reply)
with pytest.raises(TemporaryCheckError):
XPath("foo", xpath="/a", url="nourl", timeout=5).check(
datetime.now(timezone.utc)
)
def test_not_a_number(self, mocker) -> None:
mock_reply = mocker.MagicMock()
content_property = mocker.PropertyMock()
type(mock_reply).content = content_property
content_property.return_value = ''
mocker.patch("requests.Session.get", return_value=mock_reply)
with pytest.raises(TemporaryCheckError):
XPath("foo", xpath="/a/@value", url="nourl", timeout=5).check(
datetime.now(timezone.utc)
)
def test_multiple_min(self, mocker) -> None:
mock_reply = mocker.MagicMock()
content_property = mocker.PropertyMock()
type(mock_reply).content = content_property
content_property.return_value = """
"""
mocker.patch("requests.Session.get", return_value=mock_reply)
assert XPath("foo", xpath="//a/@value", url="nourl", timeout=5).check(
datetime.now(timezone.utc)
) == datetime.fromtimestamp(10, timezone.utc)
def test_create(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
xpath=/valid
url=nourl
timeout=20
"""
)
check: XPath = XPath.create("name", parser["section"]) # type: ignore
assert check._xpath == "/valid"
class TestXPathDelta(CheckTest):
def create_instance(self, name):
return XPathDelta(name, xpath="/a", url="nourl", timeout=5, unit="days")
@pytest.mark.parametrize(
"unit,factor",
[
("microseconds", 0.000001),
("milliseconds", 0.001),
("seconds", 1),
("minutes", 60),
("hours", 60 * 60),
("days", 60 * 60 * 24),
("weeks", 60 * 60 * 24 * 7),
],
)
def test_smoke(self, mocker, unit, factor) -> None:
mock_reply = mocker.MagicMock()
content_property = mocker.PropertyMock()
type(mock_reply).content = content_property
content_property.return_value = ''
mocker.patch("requests.Session.get", return_value=mock_reply)
url = "nourl"
now = datetime.now(timezone.utc)
result = XPathDelta(
"foo", xpath="/a/@value", url=url, timeout=5, unit=unit
).check(now)
assert result == now + timedelta(seconds=42) * factor
def test_create(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
xpath=/valid
url=nourl
timeout=20
unit=weeks
"""
)
check = XPathDelta.create("name", parser["section"])
assert check._unit == "weeks"
def test_create_wrong_unit(self) -> None:
parser = configparser.ConfigParser()
parser.read_string(
"""
[section]
xpath=/valid
url=nourl
timeout=20
unit=unknown
"""
)
with pytest.raises(ConfigurationError):
XPathDelta.create("name", parser["section"])
def test_init_wrong_unit(self) -> None:
with pytest.raises(ValueError):
XPathDelta("name", url="url", xpath="/a", timeout=5, unit="unknownunit")
autosuspend-3.0/tests/test_checks_wakeup/ 0000775 0000000 0000000 00000000000 13611126124 0020740 5 ustar 00root root 0000000 0000000 autosuspend-3.0/tests/test_checks_wakeup/after-horizon.ics 0000664 0000000 0000000 00000001273 13611126124 0024232 0 ustar 00root root 0000000 0000000 BEGIN:VCALENDAR
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
VERSION:2.0
BEGIN:VTIMEZONE
TZID:Europe/Berlin
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:19700329T020000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:19701025T030000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
END:STANDARD
END:VTIMEZONE
BEGIN:VEVENT
CREATED:20180603T194125Z
LAST-MODIFIED:20180603T194144Z
DTSTAMP:20180603T194144Z
UID:6ff13ee1-e548-41b1-8e08-d7725423743a
SUMMARY:long-event
DTSTART;TZID=Europe/Berlin:20040618T000000
DTEND;TZID=Europe/Berlin:20040618T150000
TRANSP:OPAQUE
SEQUENCE:1
END:VEVENT
END:VCALENDAR
autosuspend-3.0/tests/test_checks_wakeup/before-horizon.ics 0000664 0000000 0000000 00000001273 13611126124 0024373 0 ustar 00root root 0000000 0000000 BEGIN:VCALENDAR
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
VERSION:2.0
BEGIN:VTIMEZONE
TZID:Europe/Berlin
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:19700329T020000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:19701025T030000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
END:STANDARD
END:VTIMEZONE
BEGIN:VEVENT
CREATED:20180603T194125Z
LAST-MODIFIED:20180603T194144Z
DTSTAMP:20180603T194144Z
UID:6ff13ee1-e548-41b1-8e08-d7725423743a
SUMMARY:long-event
DTSTART;TZID=Europe/Berlin:20040617T000000
DTEND;TZID=Europe/Berlin:20040617T150000
TRANSP:OPAQUE
SEQUENCE:1
END:VEVENT
END:VCALENDAR
autosuspend-3.0/tests/test_checks_wakeup/multiple.ics 0000664 0000000 0000000 00000001716 13611126124 0023300 0 ustar 00root root 0000000 0000000 BEGIN:VCALENDAR
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
VERSION:2.0
BEGIN:VTIMEZONE
TZID:Europe/Berlin
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:19700329T020000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:19701025T030000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
END:STANDARD
END:VTIMEZONE
BEGIN:VEVENT
CREATED:20180603T194125Z
LAST-MODIFIED:20180603T194144Z
DTSTAMP:20180603T194144Z
UID:6ff13ee1-e548-41b1-8e08-d7725423743a
SUMMARY:long-event
DTSTART;TZID=Europe/Berlin:20040605T130000
DTEND;TZID=Europe/Berlin:20040605T150000
TRANSP:OPAQUE
SEQUENCE:1
END:VEVENT
BEGIN:VEVENT
CREATED:20180403T194125Z
LAST-MODIFIED:20180403T194144Z
DTSTAMP:20180403T194144Z
UID:6ff13ee1-e548-41b1-8e08-d7725423743b
SUMMARY:early-event
DTSTART;TZID=Europe/Berlin:20040405T130000
DTEND;TZID=Europe/Berlin:20040405T150000
TRANSP:OPAQUE
SEQUENCE:1
END:VEVENT
END:VCALENDAR
autosuspend-3.0/tests/test_checks_wakeup/old-event.ics 0000664 0000000 0000000 00000001273 13611126124 0023340 0 ustar 00root root 0000000 0000000 BEGIN:VCALENDAR
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
VERSION:2.0
BEGIN:VTIMEZONE
TZID:Europe/Berlin
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:19700329T020000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:19701025T030000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
END:STANDARD
END:VTIMEZONE
BEGIN:VEVENT
CREATED:20180603T194125Z
LAST-MODIFIED:20180603T194144Z
DTSTAMP:20180603T194144Z
UID:6ff13ee1-e548-41b1-8e08-d7725423743a
SUMMARY:long-event
DTSTART;TZID=Europe/Berlin:20040605T130000
DTEND;TZID=Europe/Berlin:20040605T150000
TRANSP:OPAQUE
SEQUENCE:1
END:VEVENT
END:VCALENDAR
autosuspend-3.0/tests/test_integration.py 0000664 0000000 0000000 00000014204 13611126124 0021022 0 ustar 00root root 0000000 0000000 import datetime
import logging
import os
import os.path
from freezegun import freeze_time
import pytest
import autosuspend
pytestmark = pytest.mark.integration
ROOT = os.path.dirname(os.path.realpath(__file__))
SUSPENSION_FILE = "would_suspend"
SCHEDULED_FILE = "wakeup_at"
WOKE_UP_FILE = "test-woke-up"
LOCK_FILE = "test-woke-up.lock"
NOTIFY_FILE = "notify"
def configure_config(config, datadir, tmpdir):
out_path = tmpdir.join(config)
with out_path.open("w") as out_config:
out_config.write(
(datadir / config).read_text().replace("@TMPDIR@", tmpdir.strpath),
)
return out_path
@pytest.fixture
def rapid_sleep(mocker):
with freeze_time() as frozen_time:
sleep_mock = mocker.patch("time.sleep")
sleep_mock.side_effect = lambda seconds: frozen_time.tick(
datetime.timedelta(seconds=seconds)
)
yield frozen_time
def test_no_suspend_if_matching(datadir, tmpdir, rapid_sleep) -> None:
autosuspend.main(
[
"-c",
configure_config("dont_suspend.conf", datadir, tmpdir).strpath,
"-d",
"daemon",
"-r",
"10",
]
)
assert not tmpdir.join(SUSPENSION_FILE).check()
def test_suspend(tmpdir, datadir, rapid_sleep) -> None:
autosuspend.main(
[
"-c",
configure_config("would_suspend.conf", datadir, tmpdir).strpath,
"-d",
"daemon",
"-r",
"10",
]
)
assert tmpdir.join(SUSPENSION_FILE).check()
def test_wakeup_scheduled(tmpdir, datadir, rapid_sleep) -> None:
# configure when to wake up
now = datetime.datetime.now(datetime.timezone.utc)
wakeup_at = now + datetime.timedelta(hours=4)
with tmpdir.join("wakeup_time").open("w") as out:
out.write(str(wakeup_at.timestamp()))
autosuspend.main(
[
"-c",
configure_config("would_schedule.conf", datadir, tmpdir).strpath,
"-d",
"daemon",
"-r",
"10",
]
)
assert tmpdir.join(SUSPENSION_FILE).check()
assert tmpdir.join(SCHEDULED_FILE).check()
assert int(tmpdir.join(SCHEDULED_FILE).read()) == int(
round((wakeup_at - datetime.timedelta(seconds=30)).timestamp())
)
def test_woke_up_file_removed(tmpdir, datadir, rapid_sleep) -> None:
tmpdir.join(WOKE_UP_FILE).ensure()
autosuspend.main(
[
"-c",
configure_config("dont_suspend.conf", datadir, tmpdir).strpath,
"-d",
"daemon",
"-r",
"5",
]
)
assert not tmpdir.join(WOKE_UP_FILE).check()
def test_notify_call(tmpdir, datadir, rapid_sleep) -> None:
autosuspend.main(
[
"-c",
configure_config("notify.conf", datadir, tmpdir).strpath,
"-d",
"daemon",
"-r",
"10",
]
)
assert tmpdir.join(SUSPENSION_FILE).check()
assert tmpdir.join(NOTIFY_FILE).check()
assert len(tmpdir.join(NOTIFY_FILE).read()) == 0
def test_notify_call_wakeup(tmpdir, datadir, rapid_sleep) -> None:
# configure when to wake up
now = datetime.datetime.now(datetime.timezone.utc)
wakeup_at = now + datetime.timedelta(hours=4)
with tmpdir.join("wakeup_time").open("w") as out:
out.write(str(wakeup_at.timestamp()))
autosuspend.main(
[
"-c",
configure_config("notify_wakeup.conf", datadir, tmpdir).strpath,
"-d",
"daemon",
"-r",
"10",
]
)
assert tmpdir.join(SUSPENSION_FILE).check()
assert tmpdir.join(NOTIFY_FILE).check()
assert int(tmpdir.join(NOTIFY_FILE).read()) == int(
round((wakeup_at - datetime.timedelta(seconds=10)).timestamp())
)
def test_error_no_checks_configured(tmpdir, datadir) -> None:
with pytest.raises(autosuspend.ConfigurationError):
autosuspend.main(
[
"-c",
configure_config("no_checks.conf", datadir, tmpdir).strpath,
"-d",
"daemon",
"-r",
"10",
]
)
def test_temporary_errors_logged(tmpdir, datadir, rapid_sleep, caplog) -> None:
autosuspend.main(
[
"-c",
configure_config("temporary_error.conf", datadir, tmpdir).strpath,
"-d",
"daemon",
"-r",
"10",
]
)
warnings = [
r
for r in caplog.record_tuples
if r[1] == logging.WARNING and "XPath" in r[2] and "failed" in r[2]
]
assert len(warnings) > 0
def test_loop_defaults(tmpdir, datadir, mocker) -> None:
loop = mocker.patch("autosuspend.loop")
loop.side_effect = StopIteration
with pytest.raises(StopIteration):
autosuspend.main(
[
"-c",
configure_config("minimal.conf", datadir, tmpdir).strpath,
"-d",
"daemon",
"-r",
"10",
]
)
args, kwargs = loop.call_args
assert args[1] == 60
assert kwargs["run_for"] == 10
assert kwargs["woke_up_file"] == ("/var/run/autosuspend-just-woke-up")
def test_hook_success(tmpdir, datadir):
autosuspend.main(
[
"-c",
configure_config("would_suspend.conf", datadir, tmpdir).strpath,
"-d",
"presuspend",
]
)
assert tmpdir.join(WOKE_UP_FILE).check()
def test_hook_call_wakeup(tmpdir, datadir):
# configure when to wake up
now = datetime.datetime.now(datetime.timezone.utc)
wakeup_at = now + datetime.timedelta(hours=4)
with tmpdir.join("wakeup_time").open("w") as out:
out.write(str(wakeup_at.timestamp()))
autosuspend.main(
[
"-c",
configure_config("would_schedule.conf", datadir, tmpdir).strpath,
"-d",
"presuspend",
]
)
assert tmpdir.join(SCHEDULED_FILE).check()
assert int(tmpdir.join(SCHEDULED_FILE).read()) == int(
round((wakeup_at - datetime.timedelta(seconds=30)).timestamp())
)
autosuspend-3.0/tests/test_integration/ 0000775 0000000 0000000 00000000000 13611126124 0020447 5 ustar 00root root 0000000 0000000 autosuspend-3.0/tests/test_integration/dont_suspend.conf 0000664 0000000 0000000 00000000410 13611126124 0024016 0 ustar 00root root 0000000 0000000 [general]
interval = 2
idle_time = 5
suspend_cmd = touch @TMPDIR@/would_suspend
wakeup_cmd = echo {timestamp:d} > @TMPDIR@/wakeup_at
woke_up_file = @TMPDIR@/test-woke-up
lock_file = @TMPDIR@/test-woke-up.lock
[check.ExternalCommand]
enabled = True
command = true
autosuspend-3.0/tests/test_integration/minimal.conf 0000664 0000000 0000000 00000000242 13611126124 0022742 0 ustar 00root root 0000000 0000000 [general]
suspend_cmd = touch @TMPDIR@/would_suspend
wakeup_cmd = echo {timestamp:d} > @TMPDIR@/wakeup_at
[check.ExternalCommand]
enabled = True
command = false
autosuspend-3.0/tests/test_integration/no_checks.conf 0000664 0000000 0000000 00000000350 13611126124 0023250 0 ustar 00root root 0000000 0000000 [general]
interval = 2
idle_time = 5
suspend_cmd = touch @TMPDIR@/would_suspend
wakeup_cmd = echo {timestamp:d} > @TMPDIR@/wakeup_at
woke_up_file = @TMPDIR@/test-woke-up
[check.ExternalCommand]
# lacks enabled=True
command = false
autosuspend-3.0/tests/test_integration/notify.conf 0000664 0000000 0000000 00000000563 13611126124 0022632 0 ustar 00root root 0000000 0000000 [general]
interval = 2
idle_time = 5
suspend_cmd = touch @TMPDIR@/would_suspend
wakeup_cmd = echo {timestamp:.0f} > @TMPDIR@/wakeup_at
notify_cmd_wakeup = echo {timestamp:.0f} > @TMPDIR@/notify
notify_cmd_no_wakeup = touch @TMPDIR@/notify
woke_up_file = @TMPDIR@/test-woke-up
lock_file = @TMPDIR@/test-woke-up.lock
[check.ExternalCommand]
enabled = True
command = false
autosuspend-3.0/tests/test_integration/notify_wakeup.conf 0000664 0000000 0000000 00000000677 13611126124 0024214 0 ustar 00root root 0000000 0000000 [general]
interval = 2
idle_time = 5
suspend_cmd = touch @TMPDIR@/would_suspend
wakeup_cmd = echo {timestamp:.0f} > @TMPDIR@/wakeup_at
notify_cmd_wakeup = echo {timestamp:.0f} > @TMPDIR@/notify
notify_cmd_no_wakeup = touch @TMPDIR@/notify
woke_up_file = @TMPDIR@/test-woke-up
lock_file = @TMPDIR@/test-woke-up.lock
wakeup_delta = 10
[check.ExternalCommand]
enabled = True
command = false
[wakeup.File]
enabled = True
path = @TMPDIR@/wakeup_time
autosuspend-3.0/tests/test_integration/temporary_error.conf 0000664 0000000 0000000 00000000435 13611126124 0024553 0 ustar 00root root 0000000 0000000 [general]
interval = 20
idle_time = 50
suspend_cmd = touch @TMPDIR@/would_suspend
wakeup_cmd = echo {timestamp:d} > @TMPDIR@/wakeup_at
woke_up_file = @TMPDIR@/test-woke-up
lock_file = @TMPDIR@/test-woke-up.lock
[check.XPath]
enabled = True
xpath = /a
url = asdfjlkasdjkfkasdlfjaklsdf
autosuspend-3.0/tests/test_integration/would_schedule.conf 0000664 0000000 0000000 00000000505 13611126124 0024324 0 ustar 00root root 0000000 0000000 [general]
interval = 2
idle_time = 5
suspend_cmd = touch @TMPDIR@/would_suspend
wakeup_cmd = echo {timestamp:.0f} > @TMPDIR@/wakeup_at
woke_up_file = @TMPDIR@/test-woke-up
lock_file = @TMPDIR@/test-woke-up.lock
[check.ExternalCommand]
enabled = True
command = false
[wakeup.File]
enabled = True
path = @TMPDIR@/wakeup_time
autosuspend-3.0/tests/test_integration/would_suspend.conf 0000664 0000000 0000000 00000000411 13611126124 0024205 0 ustar 00root root 0000000 0000000 [general]
interval = 2
idle_time = 5
suspend_cmd = touch @TMPDIR@/would_suspend
wakeup_cmd = echo {timestamp:d} > @TMPDIR@/wakeup_at
woke_up_file = @TMPDIR@/test-woke-up
lock_file = @TMPDIR@/test-woke-up.lock
[check.ExternalCommand]
enabled = True
command = false
autosuspend-3.0/tests/test_util.py 0000664 0000000 0000000 00000001576 13611126124 0017464 0 ustar 00root root 0000000 0000000 from autosuspend.util import logger_by_class, logger_by_class_instance
class DummyClass:
pass
class TestLoggerByClass:
def test_smoke(self) -> None:
logger = logger_by_class(DummyClass)
assert logger is not None
assert logger.name == "tests.test_util.DummyClass"
def test_name(self) -> None:
logger = logger_by_class(DummyClass, "foo")
assert logger is not None
assert logger.name == "tests.test_util.DummyClass.foo"
class TestLoggerByClassInstance:
def test_smoke(self) -> None:
logger = logger_by_class_instance(DummyClass())
assert logger is not None
assert logger.name == "tests.test_util.DummyClass"
def test_name(self) -> None:
logger = logger_by_class_instance(DummyClass(), "foo")
assert logger is not None
assert logger.name == "tests.test_util.DummyClass.foo"
autosuspend-3.0/tests/test_util_ical.py 0000664 0000000 0000000 00000037745 13611126124 0020463 0 ustar 00root root 0000000 0000000 from datetime import timedelta
from dateutil import parser
from dateutil.tz import tzlocal
from autosuspend.util.ical import CalendarEvent, list_calendar_events
class TestCalendarEvent:
def test_str(self) -> None:
start = parser.parse("2018-06-11 02:00:00 UTC")
end = start + timedelta(hours=1)
event = CalendarEvent("summary", start, end)
assert "summary" in str(event)
class TestListCalendarEvents:
def test_simple_recurring(self, datadir) -> None:
"""Tests for basic recurrence.
Events are collected with the same DST setting as their original
creation.
"""
with (datadir / "simple-recurring.ics").open("rb") as f:
start = parser.parse("2018-06-18 04:00:00 UTC")
end = start + timedelta(weeks=2)
events = list_calendar_events(f, start, end)
expected_start_times = [
parser.parse("2018-06-18 07:00:00 UTC"),
parser.parse("2018-06-19 07:00:00 UTC"),
parser.parse("2018-06-20 07:00:00 UTC"),
parser.parse("2018-06-21 07:00:00 UTC"),
parser.parse("2018-06-22 07:00:00 UTC"),
parser.parse("2018-06-25 07:00:00 UTC"),
parser.parse("2018-06-26 07:00:00 UTC"),
parser.parse("2018-06-27 07:00:00 UTC"),
parser.parse("2018-06-28 07:00:00 UTC"),
parser.parse("2018-06-29 07:00:00 UTC"),
]
expected_end_times = [
parser.parse("2018-06-18 16:00:00 UTC"),
parser.parse("2018-06-19 16:00:00 UTC"),
parser.parse("2018-06-20 16:00:00 UTC"),
parser.parse("2018-06-21 16:00:00 UTC"),
parser.parse("2018-06-22 16:00:00 UTC"),
parser.parse("2018-06-25 16:00:00 UTC"),
parser.parse("2018-06-26 16:00:00 UTC"),
parser.parse("2018-06-27 16:00:00 UTC"),
parser.parse("2018-06-28 16:00:00 UTC"),
parser.parse("2018-06-29 16:00:00 UTC"),
]
assert expected_start_times == [e.start for e in events]
assert expected_end_times == [e.end for e in events]
def test_recurrence_different_dst(self, datadir) -> None:
with (datadir / "simple-recurring.ics").open("rb") as f:
start = parser.parse("2018-11-19 04:00:00 UTC")
end = start + timedelta(weeks=2)
events = list_calendar_events(f, start, end)
expected_start_times = [
parser.parse("2018-11-19 08:00:00 UTC"),
parser.parse("2018-11-20 08:00:00 UTC"),
parser.parse("2018-11-21 08:00:00 UTC"),
parser.parse("2018-11-22 08:00:00 UTC"),
parser.parse("2018-11-23 08:00:00 UTC"),
parser.parse("2018-11-26 08:00:00 UTC"),
parser.parse("2018-11-27 08:00:00 UTC"),
parser.parse("2018-11-28 08:00:00 UTC"),
parser.parse("2018-11-29 08:00:00 UTC"),
parser.parse("2018-11-30 08:00:00 UTC"),
]
assert expected_start_times == [e.start for e in events]
def test_all_day_events(self, datadir) -> None:
with (datadir / "all-day-events.ics").open("rb") as f:
start = parser.parse("2018-06-11 02:00:00 UTC")
end = start + timedelta(weeks=1)
events = list_calendar_events(f, start, end)
assert len(events) == 3
expected_summaries = ["start", "between", "end"]
assert [e.summary for e in events] == expected_summaries
def test_normal_events(self, datadir) -> None:
with (datadir / "normal-events-corner-cases.ics").open("rb") as f:
start = parser.parse("2018-06-04 00:00:00 +0200")
end = start + timedelta(weeks=1)
events = list_calendar_events(f, start, end)
expected = [
(
"overlapping",
parser.parse("2018-06-02 20:00:00 +0200"),
parser.parse("2018-06-12 23:00:00 +0200"),
),
(
"before include",
parser.parse("2018-06-03 21:00:00 +0200"),
parser.parse("2018-06-04 02:00:00 +0200"),
),
(
"direct start",
parser.parse("2018-06-04 00:00:00 +0200"),
parser.parse("2018-06-04 03:00:00 +0200"),
),
(
"in between",
parser.parse("2018-06-07 04:00:00 +0200"),
parser.parse("2018-06-07 09:00:00 +0200"),
),
(
"end overlap",
parser.parse("2018-06-10 21:00:00 +0200"),
parser.parse("2018-06-11 02:00:00 +0200"),
),
(
"direct end",
parser.parse("2018-06-10 22:00:00 +0200"),
parser.parse("2018-06-11 00:00:00 +0200"),
),
]
assert [(e.summary, e.start, e.end) for e in events] == expected
def test_floating_time(self, datadir) -> None:
with (datadir / "floating.ics").open("rb") as f:
start = parser.parse("2018-06-09 00:00:00 +0200")
end = start + timedelta(weeks=1)
events = list_calendar_events(f, start, end)
tzinfo = {"LOCAL": tzlocal()}
expected = [
(
"floating",
parser.parse("2018-06-10 15:00:00 LOCAL", tzinfos=tzinfo),
parser.parse("2018-06-10 17:00:00 LOCAL", tzinfos=tzinfo),
),
(
"floating recurring",
parser.parse("2018-06-12 18:00:00 LOCAL", tzinfos=tzinfo),
parser.parse("2018-06-12 20:00:00 LOCAL", tzinfos=tzinfo),
),
(
"floating recurring",
parser.parse("2018-06-13 18:00:00 LOCAL", tzinfos=tzinfo),
parser.parse("2018-06-13 20:00:00 LOCAL", tzinfos=tzinfo),
),
(
"floating recurring",
parser.parse("2018-06-14 18:00:00 LOCAL", tzinfos=tzinfo),
parser.parse("2018-06-14 20:00:00 LOCAL", tzinfos=tzinfo),
),
(
"floating recurring",
parser.parse("2018-06-15 18:00:00 LOCAL", tzinfos=tzinfo),
parser.parse("2018-06-15 20:00:00 LOCAL", tzinfos=tzinfo),
),
]
assert [(e.summary, e.start, e.end) for e in events] == expected
def test_floating_time_other_dst(self, datadir) -> None:
with (datadir / "floating.ics").open("rb") as f:
start = parser.parse("2018-12-09 00:00:00 +0200")
end = start + timedelta(weeks=1)
events = list_calendar_events(f, start, end)
tzinfo = {"LOCAL": tzlocal()}
expected = [
(
"floating recurring",
parser.parse("2018-12-09 18:00:00 LOCAL", tzinfos=tzinfo),
parser.parse("2018-12-09 20:00:00 LOCAL", tzinfos=tzinfo),
),
(
"floating recurring",
parser.parse("2018-12-10 18:00:00 LOCAL", tzinfos=tzinfo),
parser.parse("2018-12-10 20:00:00 LOCAL", tzinfos=tzinfo),
),
(
"floating recurring",
parser.parse("2018-12-11 18:00:00 LOCAL", tzinfos=tzinfo),
parser.parse("2018-12-11 20:00:00 LOCAL", tzinfos=tzinfo),
),
(
"floating recurring",
parser.parse("2018-12-12 18:00:00 LOCAL", tzinfos=tzinfo),
parser.parse("2018-12-12 20:00:00 LOCAL", tzinfos=tzinfo),
),
(
"floating recurring",
parser.parse("2018-12-13 18:00:00 LOCAL", tzinfos=tzinfo),
parser.parse("2018-12-13 20:00:00 LOCAL", tzinfos=tzinfo),
),
(
"floating recurring",
parser.parse("2018-12-14 18:00:00 LOCAL", tzinfos=tzinfo),
parser.parse("2018-12-14 20:00:00 LOCAL", tzinfos=tzinfo),
),
(
"floating recurring",
parser.parse("2018-12-15 18:00:00 LOCAL", tzinfos=tzinfo),
parser.parse("2018-12-15 20:00:00 LOCAL", tzinfos=tzinfo),
),
]
assert [(e.summary, e.start, e.end) for e in events] == expected
def test_exclusions(self, datadir) -> None:
with (datadir / "exclusions.ics").open("rb") as f:
start = parser.parse("2018-06-09 04:00:00 UTC")
end = start + timedelta(weeks=2)
events = list_calendar_events(f, start, end)
expected_start_times = [
parser.parse("2018-06-11 12:00:00 UTC"),
parser.parse("2018-06-12 12:00:00 UTC"),
parser.parse("2018-06-13 12:00:00 UTC"),
parser.parse("2018-06-15 12:00:00 UTC"),
parser.parse("2018-06-16 12:00:00 UTC"),
parser.parse("2018-06-17 12:00:00 UTC"),
]
assert expected_start_times == [e.start for e in events]
def test_reucrring_single_changes(self, datadir) -> None:
with (datadir / "single-change.ics").open("rb") as f:
start = parser.parse("2018-06-11 00:00:00 UTC")
end = start + timedelta(weeks=1)
events = list_calendar_events(f, start, end)
expected_start_times = [
parser.parse("2018-06-11 11:00:00 UTC"),
parser.parse("2018-06-12 11:00:00 UTC"),
parser.parse("2018-06-13 14:00:00 UTC"),
parser.parse("2018-06-14 11:00:00 UTC"),
parser.parse("2018-06-15 09:00:00 UTC"),
parser.parse("2018-06-16 11:00:00 UTC"),
parser.parse("2018-06-17 11:00:00 UTC"),
]
assert expected_start_times == [e.start for e in events]
def test_reucrring_change_dst(self, datadir) -> None:
with (datadir / "recurring-change-dst.ics").open("rb") as f:
start = parser.parse("2018-12-10 00:00:00 UTC")
end = start + timedelta(weeks=1)
events = list_calendar_events(f, start, end)
expected_start_times = [
parser.parse("2018-12-10 13:00:00 UTC"),
parser.parse("2018-12-11 13:00:00 UTC"),
parser.parse("2018-12-12 10:00:00 UTC"),
parser.parse("2018-12-13 13:00:00 UTC"),
parser.parse("2018-12-15 13:00:00 UTC"),
parser.parse("2018-12-16 13:00:00 UTC"),
]
assert expected_start_times == [e.start for e in events]
def test_recurring_start_and_end_inclusive(self, datadir) -> None:
with (datadir / "issue-41.ics").open("rb") as f:
start = parser.parse("2018-06-26 15:13:51 UTC")
end = start + timedelta(weeks=1)
events = list_calendar_events(f, start, end)
expected_start_times = [
parser.parse("2018-06-26 15:00:00 UTC"),
parser.parse("2018-06-27 15:00:00 UTC"),
parser.parse("2018-06-28 15:00:00 UTC"),
parser.parse("2018-06-29 15:00:00 UTC"),
parser.parse("2018-06-30 15:00:00 UTC"),
parser.parse("2018-07-01 15:00:00 UTC"),
parser.parse("2018-07-02 15:00:00 UTC"),
parser.parse("2018-07-03 15:00:00 UTC"),
]
assert expected_start_times == [e.start for e in events]
def test_single_start_end_inclusive(self, datadir) -> None:
with (datadir / "old-event.ics").open("rb") as f:
start = parser.parse("2004-06-05 11:15:00 UTC")
end = start + timedelta(hours=1)
events = list_calendar_events(f, start, end)
expected_start_times = [
parser.parse("2004-06-05 11:00:00 UTC"),
]
assert expected_start_times == [e.start for e in events]
def test_single_all_day_start_end_inclusive(self, datadir) -> None:
with (datadir / "all-day-starts.ics").open("rb") as f:
start = parser.parse("2018-06-25 10:00:00 UTC")
end = start + timedelta(hours=2)
events = list_calendar_events(f, start, end)
expected_start_times = [
parser.parse("2018-06-25 02:00:00 UTC").date(),
]
assert expected_start_times == [e.start for e in events]
expected_end_times = [
parser.parse("2018-06-26 02:00:00 UTC").date(),
]
assert expected_end_times == [e.end for e in events]
def test_longer_single_all_day_start_end_inclusive(self, datadir) -> None:
with (datadir / "all-day-starts.ics").open("rb") as f:
start = parser.parse("2018-06-29 10:00:00 UTC")
end = start + timedelta(hours=2)
events = list_calendar_events(f, start, end)
expected_start_times = [
parser.parse("2018-06-28 02:00:00 UTC").date(),
]
assert expected_start_times == [e.start for e in events]
def test_recurring_all_day_start_end_inclusive(self, datadir) -> None:
with (datadir / "all-day-recurring.ics").open("rb") as f:
start = parser.parse("2018-06-29 10:00:00 UTC")
end = start + timedelta(hours=2)
events = list_calendar_events(f, start, end)
expected_start_times = [
parser.parse("2018-06-29 02:00:00 UTC").date(),
]
assert expected_start_times == [e.start for e in events]
expected_end_times = [
parser.parse("2018-06-30 02:00:00 UTC").date(),
]
assert expected_end_times == [e.end for e in events]
def test_recurring_all_day_start_in_between(self, datadir) -> None:
with (datadir / "all-day-recurring.ics").open("rb") as f:
start = parser.parse("2018-06-29 00:00:00 UTC")
end = start + timedelta(days=1)
events = list_calendar_events(f, start, end)
expected_start_times = [
parser.parse("2018-06-29 00:00:00 UTC").date(),
parser.parse("2018-06-30 00:00:00 UTC").date(),
]
assert expected_start_times == [e.start for e in events]
def test_recurring_all_day_exclusions(self, datadir) -> None:
with (datadir / "all-day-recurring-exclusions.ics").open("rb") as f:
start = parser.parse("2018-06-27 00:00:00 UTC")
end = start + timedelta(days=4)
events = list_calendar_events(f, start, end)
expected_start_times = [
parser.parse("2018-06-27 00:00:00 UTC").date(),
parser.parse("2018-06-28 00:00:00 UTC").date(),
parser.parse("2018-06-29 00:00:00 UTC").date(),
parser.parse("2018-07-01 00:00:00 UTC").date(),
]
assert expected_start_times == [e.start for e in events]
def test_recurring_all_day_exclusions_end(self, datadir) -> None:
with (datadir / "all-day-recurring-exclusions.ics").open("rb") as f:
start = parser.parse("2018-06-26 00:00:00 UTC")
end = start + timedelta(days=4)
events = list_calendar_events(f, start, end)
expected_start_times = [
parser.parse("2018-06-26 00:00:00 UTC").date(),
parser.parse("2018-06-27 00:00:00 UTC").date(),
parser.parse("2018-06-28 00:00:00 UTC").date(),
parser.parse("2018-06-29 00:00:00 UTC").date(),
]
assert expected_start_times == [e.start for e in events]
autosuspend-3.0/tests/test_util_ical/ 0000775 0000000 0000000 00000000000 13611126124 0020071 5 ustar 00root root 0000000 0000000 autosuspend-3.0/tests/test_util_ical/all-day-events.ics 0000664 0000000 0000000 00000002426 13611126124 0023422 0 ustar 00root root 0000000 0000000 BEGIN:VCALENDAR
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
VERSION:2.0
BEGIN:VEVENT
CREATED:20180601T194043Z
LAST-MODIFIED:20180601T194050Z
DTSTAMP:20180601T194050Z
UID:0f82aa78-1478-4093-85c5-16d754f362f6
SUMMARY:between
DTSTART;VALUE=DATE:20180613
DTEND;VALUE=DATE:20180615
TRANSP:TRANSPARENT
END:VEVENT
BEGIN:VEVENT
CREATED:20180601T194002Z
LAST-MODIFIED:20180601T194303Z
DTSTAMP:20180601T194303Z
UID:630f3b71-865e-4125-977d-a2fd0009ce7d
SUMMARY:start
DTSTART;VALUE=DATE:20180609
DTEND;VALUE=DATE:20180612
TRANSP:TRANSPARENT
X-MOZ-GENERATION:1
END:VEVENT
BEGIN:VEVENT
CREATED:20180601T194054Z
LAST-MODIFIED:20180601T194307Z
DTSTAMP:20180601T194307Z
UID:dc1c0bfc-633c-4d34-8de4-f6e9bcdb5fc6
SUMMARY:end
DTSTART;VALUE=DATE:20180617
DTEND;VALUE=DATE:20180620
TRANSP:TRANSPARENT
X-MOZ-GENERATION:1
END:VEVENT
BEGIN:VEVENT
CREATED:20180601T194313Z
LAST-MODIFIED:20180601T194317Z
DTSTAMP:20180601T194317Z
UID:5095407e-5e63-4609-93a0-5dcd45ed5bf5
SUMMARY:after
DTSTART;VALUE=DATE:20180619
DTEND;VALUE=DATE:20180620
TRANSP:TRANSPARENT
END:VEVENT
BEGIN:VEVENT
CREATED:20180601T195811Z
LAST-MODIFIED:20180601T195814Z
DTSTAMP:20180601T195814Z
UID:550119de-eef7-4820-9843-d260515807d2
SUMMARY:before
DTSTART;VALUE=DATE:20180605
DTEND;VALUE=DATE:20180606
TRANSP:TRANSPARENT
END:VEVENT
END:VCALENDAR
autosuspend-3.0/tests/test_util_ical/all-day-recurring-exclusions.ics 0000664 0000000 0000000 00000000605 13611126124 0026305 0 ustar 00root root 0000000 0000000 BEGIN:VCALENDAR
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
VERSION:2.0
BEGIN:VEVENT
CREATED:20180627T111330Z
LAST-MODIFIED:20180627T111340Z
DTSTAMP:20180627T111340Z
UID:ccf1c6b9-44c4-4fdb-8a98-0165e6f2e369
SUMMARY:single all day
DTSTART;VALUE=DATE:20180625
DTEND;VALUE=DATE:20180626
EXDATE:20180630
RRULE:FREQ=DAILY
TRANSP:TRANSPARENT
END:VEVENT
END:VCALENDAR
autosuspend-3.0/tests/test_util_ical/all-day-recurring.ics 0000664 0000000 0000000 00000000564 13611126124 0024117 0 ustar 00root root 0000000 0000000 BEGIN:VCALENDAR
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
VERSION:2.0
BEGIN:VEVENT
CREATED:20180627T111330Z
LAST-MODIFIED:20180627T111340Z
DTSTAMP:20180627T111340Z
UID:ccf1c6b9-44c4-4fdb-8a98-0165e6f2e369
SUMMARY:single all day
DTSTART;VALUE=DATE:20180625
DTEND;VALUE=DATE:20180626
RRULE:FREQ=DAILY
TRANSP:TRANSPARENT
END:VEVENT
END:VCALENDAR
autosuspend-3.0/tests/test_util_ical/all-day-starts.ics 0000664 0000000 0000000 00000001316 13611126124 0023433 0 ustar 00root root 0000000 0000000 BEGIN:VCALENDAR
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
VERSION:2.0
BEGIN:VEVENT
CREATED:20180627T111330Z
LAST-MODIFIED:20180627T111340Z
DTSTAMP:20180627T111340Z
UID:ccf1c6b9-44c4-4fdb-8a98-0165e6f2e369
SUMMARY:single all day
DTSTART;VALUE=DATE:20180625
DTEND;VALUE=DATE:20180626
TRANSP:TRANSPARENT
END:VEVENT
BEGIN:VEVENT
CREATED:20180627T111347Z
LAST-MODIFIED:20180627T111357Z
DTSTAMP:20180627T111357Z
UID:a2dab4dd-1ede-4733-af8e-90cff0e26f79
SUMMARY:two all days
DTSTART;VALUE=DATE:20180628
DTEND;VALUE=DATE:20180630
TRANSP:TRANSPARENT
BEGIN:VALARM
ACTION:DISPLAY
TRIGGER;VALUE=DURATION:-PT15M
DESCRIPTION:Default Mozilla Description
END:VALARM
END:VEVENT
END:VCALENDAR
autosuspend-3.0/tests/test_util_ical/exclusions.ics 0000664 0000000 0000000 00000001415 13611126124 0022766 0 ustar 00root root 0000000 0000000 BEGIN:VCALENDAR
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
VERSION:2.0
BEGIN:VTIMEZONE
TZID:Europe/Berlin
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:19700329T020000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:19701025T030000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
END:STANDARD
END:VTIMEZONE
BEGIN:VEVENT
CREATED:20180602T160606Z
LAST-MODIFIED:20180602T160632Z
DTSTAMP:20180602T160632Z
UID:a40c5b76-e3f5-4259-92f5-26692f99f131
SUMMARY:recurring
RRULE:FREQ=DAILY;UNTIL=20180617T120000Z
EXDATE:20180614T120000Z
DTSTART;TZID=Europe/Berlin:20180611T140000
DTEND;TZID=Europe/Berlin:20180611T160000
TRANSP:OPAQUE
X-MOZ-GENERATION:4
SEQUENCE:2
END:VEVENT
END:VCALENDAR
autosuspend-3.0/tests/test_util_ical/floating.ics 0000664 0000000 0000000 00000001167 13611126124 0022401 0 ustar 00root root 0000000 0000000 BEGIN:VCALENDAR
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
VERSION:2.0
BEGIN:VEVENT
CREATED:20180602T151629Z
LAST-MODIFIED:20180602T152512Z
DTSTAMP:20180602T152512Z
UID:f0028400-24e2-4f10-81a0-032372781443
SUMMARY:floating
DTSTART:20180610T150000
DTEND:20180610T170000
TRANSP:OPAQUE
SEQUENCE:5
X-MOZ-GENERATION:3
END:VEVENT
BEGIN:VEVENT
CREATED:20180602T151701Z
LAST-MODIFIED:20180602T152732Z
DTSTAMP:20180602T152732Z
UID:0ef23894-702e-40ac-ab09-94fa8c9c51fd
SUMMARY:floating recurring
RRULE:FREQ=DAILY
DTSTART:20180612T180000
DTEND:20180612T200000
TRANSP:OPAQUE
X-MOZ-GENERATION:5
SEQUENCE:3
END:VEVENT
END:VCALENDAR
autosuspend-3.0/tests/test_util_ical/issue-41.ics 0000664 0000000 0000000 00000001401 13611126124 0022137 0 ustar 00root root 0000000 0000000 BEGIN:VCALENDAR
PRODID:-//Inverse inc./SOGo 4.0.0//EN
VERSION:2.0
BEGIN:VTIMEZONE
TZID:Europe/Berlin
X-LIC-LOCATION:Europe/Berlin
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:19700329T020000
RRULE:FREQ=YEARLY;BYMONTH=3;BYDAY=-1SU
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:19701025T030000
RRULE:FREQ=YEARLY;BYMONTH=10;BYDAY=-1SU
END:STANDARD
END:VTIMEZONE
BEGIN:VEVENT
UID:2C-5B315480-3-4D014C80
SUMMARY:StayAlive
LOCATION:Home
CLASS:PUBLIC
X-SOGO-SEND-APPOINTMENT-NOTIFICATIONS:NO
RRULE:FREQ=DAILY
TRANSP:OPAQUE
DTSTART;TZID=Europe/Berlin:20180626T170000
DTEND;TZID=Europe/Berlin:20180626T210000
CREATED:20180625T204700Z
DTSTAMP:20180625T204700Z
LAST-MODIFIED:20180625T204700Z
END:VEVENT
END:VCALENDAR
autosuspend-3.0/tests/test_util_ical/normal-events-corner-cases.ics 0000664 0000000 0000000 00000005315 13611126124 0025751 0 ustar 00root root 0000000 0000000 BEGIN:VCALENDAR
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
VERSION:2.0
BEGIN:VTIMEZONE
TZID:Europe/Berlin
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:19700329T020000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:19701025T030000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
END:STANDARD
END:VTIMEZONE
BEGIN:VEVENT
CREATED:20180601T200433Z
LAST-MODIFIED:20180601T200455Z
DTSTAMP:20180601T200455Z
UID:1c056498-9c83-4e0f-bb77-777c967c9a54
SUMMARY:before include
DTSTART;TZID=Europe/Berlin:20180603T210000
DTEND;TZID=Europe/Berlin:20180604T020000
TRANSP:OPAQUE
X-MOZ-GENERATION:2
SEQUENCE:1
END:VEVENT
BEGIN:VEVENT
CREATED:20180601T200328Z
LAST-MODIFIED:20180601T200511Z
DTSTAMP:20180601T200511Z
UID:db4b1c02-6ac2-4def-bfb0-9a96b510387e
SUMMARY:direct start
DTSTART;TZID=Europe/Berlin:20180604T000000
DTEND;TZID=Europe/Berlin:20180604T030000
TRANSP:OPAQUE
X-MOZ-GENERATION:2
SEQUENCE:1
END:VEVENT
BEGIN:VEVENT
CREATED:20180601T200518Z
LAST-MODIFIED:20180601T200531Z
DTSTAMP:20180601T200531Z
UID:06622f56-d945-490b-9fd7-0fe5015f3188
SUMMARY:in between
DTSTART;TZID=Europe/Berlin:20180607T040000
DTEND;TZID=Europe/Berlin:20180607T090000
TRANSP:OPAQUE
X-MOZ-GENERATION:1
END:VEVENT
BEGIN:VEVENT
CREATED:20180601T200351Z
LAST-MODIFIED:20180601T200555Z
DTSTAMP:20180601T200555Z
UID:48d1debe-e457-4bde-9bea-ab18be136d4a
SUMMARY:before do not include
DTSTART;TZID=Europe/Berlin:20180603T220000
DTEND;TZID=Europe/Berlin:20180604T000000
TRANSP:OPAQUE
X-MOZ-GENERATION:4
SEQUENCE:2
END:VEVENT
BEGIN:VEVENT
CREATED:20180601T200531Z
LAST-MODIFIED:20180601T200615Z
DTSTAMP:20180601T200615Z
UID:0a36a2e8-fac3-4337-8464-f52e5cf17bd5
SUMMARY:direct end
DTSTART;TZID=Europe/Berlin:20180610T220000
DTEND;TZID=Europe/Berlin:20180611T000000
TRANSP:OPAQUE
X-MOZ-GENERATION:4
SEQUENCE:1
END:VEVENT
BEGIN:VEVENT
CREATED:20180601T200619Z
LAST-MODIFIED:20180601T200633Z
DTSTAMP:20180601T200633Z
UID:19bf0d84-3286-44d8-8376-67549a419001
SUMMARY:end overlap
DTSTART;TZID=Europe/Berlin:20180610T210000
DTEND;TZID=Europe/Berlin:20180611T020000
TRANSP:OPAQUE
X-MOZ-GENERATION:2
SEQUENCE:1
END:VEVENT
BEGIN:VEVENT
CREATED:20180601T200643Z
LAST-MODIFIED:20180601T200651Z
DTSTAMP:20180601T200651Z
UID:ae376911-eab5-45fe-bb5b-14e9fd904b44
SUMMARY:end after
DTSTART;TZID=Europe/Berlin:20180611T000000
DTEND;TZID=Europe/Berlin:20180611T030000
TRANSP:OPAQUE
X-MOZ-GENERATION:1
END:VEVENT
BEGIN:VEVENT
CREATED:20180602T144323Z
LAST-MODIFIED:20180602T144338Z
DTSTAMP:20180602T144338Z
UID:f52ee7b1-810f-4b08-bf28-80e8ae226ac3
SUMMARY:overlapping
DTSTART;TZID=Europe/Berlin:20180602T200000
DTEND;TZID=Europe/Berlin:20180612T230000
TRANSP:OPAQUE
X-MOZ-GENERATION:2
SEQUENCE:1
END:VEVENT
END:VCALENDAR
autosuspend-3.0/tests/test_util_ical/old-event.ics 0000664 0000000 0000000 00000001273 13611126124 0022471 0 ustar 00root root 0000000 0000000 BEGIN:VCALENDAR
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
VERSION:2.0
BEGIN:VTIMEZONE
TZID:Europe/Berlin
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:19700329T020000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:19701025T030000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
END:STANDARD
END:VTIMEZONE
BEGIN:VEVENT
CREATED:20180603T194125Z
LAST-MODIFIED:20180603T194144Z
DTSTAMP:20180603T194144Z
UID:6ff13ee1-e548-41b1-8e08-d7725423743a
SUMMARY:long-event
DTSTART;TZID=Europe/Berlin:20040605T130000
DTEND;TZID=Europe/Berlin:20040605T150000
TRANSP:OPAQUE
SEQUENCE:1
END:VEVENT
END:VCALENDAR
autosuspend-3.0/tests/test_util_ical/recurring-change-dst.ics 0000664 0000000 0000000 00000002640 13611126124 0024606 0 ustar 00root root 0000000 0000000 BEGIN:VCALENDAR
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
VERSION:2.0
BEGIN:VTIMEZONE
TZID:Europe/Berlin
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:19700329T020000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:19701025T030000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
END:STANDARD
END:VTIMEZONE
BEGIN:VEVENT
CREATED:20180603T200159Z
LAST-MODIFIED:20180603T200414Z
DTSTAMP:20180603T200414Z
UID:d083699e-6f37-4a85-b20d-f03750aa6691
SUMMARY:recurring
RRULE:FREQ=DAILY
EXDATE:20181214T130000Z
DTSTART;TZID=Europe/Berlin:20180606T140000
DTEND;TZID=Europe/Berlin:20180606T160000
TRANSP:OPAQUE
X-MOZ-GENERATION:4
SEQUENCE:2
END:VEVENT
BEGIN:VEVENT
CREATED:20180603T200213Z
LAST-MODIFIED:20180603T200243Z
DTSTAMP:20180603T200243Z
UID:d083699e-6f37-4a85-b20d-f03750aa6691
SUMMARY:recurring
RECURRENCE-ID;TZID=Europe/Berlin:20180612T140000
DTSTART;TZID=Europe/Berlin:20180612T140000
DTEND;TZID=Europe/Berlin:20180612T160000
SEQUENCE:5
TRANSP:OPAQUE
X-MOZ-GENERATION:4
END:VEVENT
BEGIN:VEVENT
CREATED:20180603T200401Z
LAST-MODIFIED:20180603T200407Z
DTSTAMP:20180603T200407Z
UID:d083699e-6f37-4a85-b20d-f03750aa6691
SUMMARY:recurring
RECURRENCE-ID;TZID=Europe/Berlin:20181212T140000
DTSTART;TZID=Europe/Berlin:20181212T110000
DTEND;TZID=Europe/Berlin:20181212T130000
SEQUENCE:2
TRANSP:OPAQUE
X-MOZ-GENERATION:4
END:VEVENT
END:VCALENDAR
autosuspend-3.0/tests/test_util_ical/simple-recurring.ics 0000664 0000000 0000000 00000001365 13611126124 0024065 0 ustar 00root root 0000000 0000000 BEGIN:VCALENDAR
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
VERSION:2.0
BEGIN:VTIMEZONE
TZID:Europe/Berlin
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:19700329T020000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:19701025T030000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
END:STANDARD
END:VTIMEZONE
BEGIN:VEVENT
CREATED:20180601T182719Z
LAST-MODIFIED:20180601T182803Z
DTSTAMP:20180601T182803Z
UID:74c93379-f763-439b-9d11-eca4d431bfc7
SUMMARY:Stay awake
RRULE:FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR
DTSTART;TZID=Europe/Berlin:20180327T090000
DTEND;TZID=Europe/Berlin:20180327T180000
TRANSP:OPAQUE
X-MOZ-GENERATION:2
SEQUENCE:1
END:VEVENT
END:VCALENDAR
autosuspend-3.0/tests/test_util_ical/single-change.ics 0000664 0000000 0000000 00000002610 13611126124 0023274 0 ustar 00root root 0000000 0000000 BEGIN:VCALENDAR
PRODID:-//Mozilla.org/NONSGML Mozilla Calendar V1.1//EN
VERSION:2.0
BEGIN:VTIMEZONE
TZID:Europe/Berlin
BEGIN:DAYLIGHT
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
TZNAME:CEST
DTSTART:19700329T020000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=3
END:DAYLIGHT
BEGIN:STANDARD
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
TZNAME:CET
DTSTART:19701025T030000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
END:STANDARD
END:VTIMEZONE
BEGIN:VEVENT
CREATED:20180603T194125Z
LAST-MODIFIED:20180603T194144Z
DTSTAMP:20180603T194144Z
UID:6ff13ee1-e548-41b1-8e08-d7725423743a
SUMMARY:recurring
RRULE:FREQ=DAILY
DTSTART;TZID=Europe/Berlin:20180605T130000
DTEND;TZID=Europe/Berlin:20180605T150000
TRANSP:OPAQUE
X-MOZ-GENERATION:4
SEQUENCE:1
END:VEVENT
BEGIN:VEVENT
CREATED:20180603T194138Z
LAST-MODIFIED:20180603T194140Z
DTSTAMP:20180603T194140Z
UID:6ff13ee1-e548-41b1-8e08-d7725423743a
SUMMARY:recurring
RECURRENCE-ID;TZID=Europe/Berlin:20180613T130000
DTSTART;TZID=Europe/Berlin:20180613T160000
DTEND;TZID=Europe/Berlin:20180613T180000
SEQUENCE:2
TRANSP:OPAQUE
X-MOZ-GENERATION:4
END:VEVENT
BEGIN:VEVENT
CREATED:20180603T194141Z
LAST-MODIFIED:20180603T194144Z
DTSTAMP:20180603T194144Z
UID:6ff13ee1-e548-41b1-8e08-d7725423743a
SUMMARY:recurring
RECURRENCE-ID;TZID=Europe/Berlin:20180615T130000
DTSTART;TZID=Europe/Berlin:20180615T110000
DTEND;TZID=Europe/Berlin:20180615T130000
SEQUENCE:2
TRANSP:OPAQUE
X-MOZ-GENERATION:4
END:VEVENT
END:VCALENDAR
autosuspend-3.0/tests/test_util_systemd.py 0000664 0000000 0000000 00000000512 13611126124 0021221 0 ustar 00root root 0000000 0000000 from autosuspend.util.systemd import list_logind_sessions
def test_list_logind_sessions_empty(logind) -> None:
assert len(list(list_logind_sessions())) == 0
logind.AddSession("c1", "seat0", 1042, "auser", True)
sessions = list(list_logind_sessions())
assert len(sessions) == 1
assert sessions[0][0] == "c1"
autosuspend-3.0/tox.ini 0000664 0000000 0000000 00000005146 13611126124 0015244 0 ustar 00root root 0000000 0000000 [tox]
envlist = coverage-clean,test-py37-psutil{55,latest}-dateutil{27,latest}, test-py38-psutillatest-dateutillatest, integration-py{37,38}, mindeps, check, docs, coverage
[testenv]
extras = test
setenv =
COVERAGE_FILE = ./.coverage.{envname}
deps =
psutil55: psutil>=5.5,<5.6
psutillatest: psutil
dateutil27: python-dateutil>=2.7,<2.8
dateutillatest: python-dateutil
commands =
{envbindir}/python -V
{envbindir}/python -c 'import psutil; print(psutil.__version__)'
{envbindir}/python -c 'import dateutil; print(dateutil.__version__)'
test: {envbindir}/pytest --cov -m "not integration" {posargs}
integration: {envbindir}/pytest --cov -m "integration" {posargs}
depends = coverage-clean
[testenv:coverage-clean]
deps = coverage
skip_install = true
commands = coverage erase
depends =
[testenv:coverage]
depends = test-py37-psutil{55,latest}-dateutil{27,latest}, test-py38-psutillatest-dateutillatest, integration-py{37,38}
deps =
coverage
skip_install = true
setenv =
commands =
- coverage combine
{envbindir}/coverage html
{envbindir}/coverage report
[testenv:mindeps]
description = tests whether the project can be used without any extras
extras =
deps =
depends =
commands =
{envbindir}/python -V
{envbindir}/python -c "import autosuspend; import autosuspend.checks.activity; import autosuspend.checks.wakeup"
{envbindir}/autosuspend -c tests/data/mindeps-test.conf daemon -r 1
[testenv:check]
depends =
deps =
pydocstyle
flake8
flake8-junit-report
flake8-docstrings
flake8-import-order
flake8-print
flake8-pep3101
flake8-string-format
pep8-naming
flake8-comprehensions
flake8-bugbear
flake8-builtins
flake8-eradicate
flake8-mutable
flake8-commas
flake8-broken-line
flake8-debugger
flake8-pie
flake8-bandit
dlint
flake8-mock
flake8-annotations
flake8-variables-names
flake8-black
mypy
commands =
{envbindir}/python -V
{envbindir}/flake8 {posargs}
{envbindir}/mypy src tests
[testenv:docs]
basepython = python3.8
depends =
deps = -rrequirements-doc.txt
commands = {envbindir}/sphinx-build -W -b html -d {envtmpdir}/doctrees doc/source {envtmpdir}/html
[testenv:devenv]
envdir = env
basepython = python3.6
usedevelop = True
depends =
deps =
psutil
python-mpd2
requests
requests-file
lxml
dbus-python
icalendar
python-dateutil
pytest
pytest-cov
pytest-mock
freezegun
flake8
flake8-mypy
flake8-docstrings
flake8-per-file-ignores
commands =
[gh-actions]
python =
3.7: py37, coverage
3.8: py38, coverage