pax_global_header 0000666 0000000 0000000 00000000064 14324204417 0014513 g ustar 00root root 0000000 0000000 52 comment=150a9ab3e9c23879e614960910a7fe479c0f4d60
pytray-0.3.5/ 0000775 0000000 0000000 00000000000 14324204417 0013050 5 ustar 00root root 0000000 0000000 pytray-0.3.5/.bandit.yaml 0000664 0000000 0000000 00000000030 14324204417 0015244 0 ustar 00root root 0000000 0000000 exclude_dirs: ['test',]
pytray-0.3.5/.flake8 0000664 0000000 0000000 00000000223 14324204417 0014220 0 ustar 00root root 0000000 0000000 [flake8]
ignore = E203, E266, E501, W503, F403, F401
max-line-length = 120
max-complexity = 18
select = B,C,E,F,W,T4,B9
exclude = .git,__pycache__
pytray-0.3.5/.github/ 0000775 0000000 0000000 00000000000 14324204417 0014410 5 ustar 00root root 0000000 0000000 pytray-0.3.5/.github/workflows/ 0000775 0000000 0000000 00000000000 14324204417 0016445 5 ustar 00root root 0000000 0000000 pytray-0.3.5/.github/workflows/ci.yml 0000664 0000000 0000000 00000002306 14324204417 0017564 0 ustar 00root root 0000000 0000000 name: continuous-integration
on: [push, pull_request]
jobs:
pre-commit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.8
uses: actions/setup-python@v2
with:
python-version: 3.8
- uses: pre-commit/action@v2.0.0
tests:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: [3.7, 3.8, 3.9, '3.10']
include:
- python-version: 3.8
rabbitmq: 3.6
- python-version: 3.8
rabbitmq: 3.8
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install python dependencies
run: pip install -e .[cli,gui,dev,docs,sci]
- name: Run pytest
run: pytest --cov=pytray -sv -p no:nb_regression test
- name: Create xml coverage
run: coverage xml
- name: Upload coverage to Codecov
if: github.repository == 'muhrin/pytray'
uses: codecov/codecov-action@v1
with:
file: ./coverage.xml
name: pytray
pytray-0.3.5/.gitignore 0000664 0000000 0000000 00000001531 14324204417 0015040 0 ustar 00root root 0000000 0000000 # Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*,cover
.hypothesis/
# Translations
*.mo
*.pot
# Django stuff:
*.log
# Sphinx documentation
docs/_build/
# PyBuilder
target/
#Ipython Notebook
.ipynb_checkpoints
# JetBrains IDE stuff
.idea/
# Virtual environment directories
/venv*/
*.swp # Vim files
pytray-0.3.5/.pre-commit-config.yaml 0000664 0000000 0000000 00000001503 14324204417 0017330 0 ustar 00root root 0000000 0000000 # See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.3.0
hooks:
- id: check-added-large-files
args: ['--maxkb=5000']
- id: end-of-file-fixer
- id: check-case-conflict
- id: detect-private-key
- id: check-docstring-first
- repo: https://github.com/psf/black
rev: 22.6.0
hooks:
- id: black
exclude: (.*)/migrations
- repo: https://gitlab.com/PyCQA/flake8
rev: 3.9.2
hooks:
- id: flake8
- repo: https://github.com/PyCQA/bandit
rev: 1.7.4
hooks:
- id: bandit
args: [ "-c", ".bandit.yaml" ]
- repo: https://github.com/commitizen-tools/commitizen
rev: v2.29.5
hooks:
- id: commitizen
stages: [commit-msg]
pytray-0.3.5/.pylintrc 0000664 0000000 0000000 00000032747 14324204417 0014732 0 ustar 00root root 0000000 0000000 [MASTER]
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code
extension-pkg-whitelist=
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS,test,examples,setup.py
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Use multiple processes to speed up Pylint.
jobs=1
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=
# Pickle collected data for later comparisons.
persistent=yes
# Specify a configuration file.
#rcfile=
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
confidence=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once).You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=missing-docstring, useless-object-inheritance, locally-disabled
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=
[REPORTS]
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details
#msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio).You can also give a reporter class, eg
# mypackage.mymodule.MyReporterClass.
output-format=text
# Tells whether to display a full report or only the messages
reports=no
# Activate the evaluation score.
score=yes
[REFACTORING]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
[BASIC]
# Naming hint for argument names
argument-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Regular expression matching correct argument names
argument-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Naming hint for attribute names
attr-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Regular expression matching correct attribute names
attr-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,bar,baz,toto,tutu,tata
# Naming hint for class attribute names
class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Regular expression matching correct class attribute names
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Naming hint for class names
class-name-hint=[A-Z_][a-zA-Z0-9]+$
# Regular expression matching correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Naming hint for constant names
const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Regular expression matching correct constant names
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=5
# Naming hint for function names
function-name-hint=(([a-z][a-z0-9_]{2,40})|(_[a-z0-9_]*))$
# Regular expression matching correct function names
function-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Good variable names which should always be accepted, separated by a comma
good-names=i,j,k,ex,Run,_, _INPUT_FILE_NAME, _OUTPUT_FILE_NAME, pk
# Include a hint for the correct naming format with invalid-name
include-naming-hint=no
# Naming hint for inline iteration names
inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
# Regular expression matching correct inline iteration names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Naming hint for method names
method-name-hint=(([a-z][a-z0-9_]{2,40})|(_[a-z0-9_]*))$
# Regular expression matching correct method names
method-rgx=(([a-z][a-z0-9_]{2,40})|(_[a-z0-9_]*)|(setUp)|(tearDown))$
# Naming hint for module names
module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression matching correct module names
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_,setUp,tearDown
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
property-classes=abc.abstractproperty
# Naming hint for variable names
variable-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Regular expression matching correct variable names
variable-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
[FORMAT]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )??$
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Maximum number of characters on a single line.
max-line-length=100
# Maximum number of lines in a module
max-module-lines=1000
# List of optional constructs for which whitespace checking is disabled. `dict-
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
# `empty-line` allows space-only lines.
no-space-check=trailing-comma,dict-separator
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
[SPELLING]
# Spelling dictionary name. Available dictionaries: none. To make it working
# install python-enchant package.
spelling-dict=
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to indicated private dictionary in
# --spelling-private-dict-file option instead of raising a message.
spelling-store-unknown-words=no
[LOGGING]
# Logging modules to check that the string format arguments are in logging
# function parameter format
logging-modules=logging
[SIMILARITIES]
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
# Minimum lines number of a similarity.
min-similarity-lines=4
[VARIABLES]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables=yes
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,_cb
# A regular expression matching the name of dummy variables (i.e. expectedly
# not used).
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*|^ignored_|^unused_
# Tells whether we should check for unused import in __init__ files.
init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,future.builtins
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,XXX,TODO
[TYPECHECK]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference=yes
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis. It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint=yes
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
[IMPORTS]
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all=no
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=regsub,TERMIOS,Bastion,rexec
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
[DESIGN]
# Maximum number of arguments for function / method
max-args=6
# Maximum number of attributes for a class (see R0902).
max-attributes=12
# Maximum number of boolean expressions in a if statement
max-bool-expr=5
# Maximum number of branch for function / method body
max-branches=12
# Maximum number of locals for function / method body
max-locals=20
# Maximum number of parents for a class (see R0901).
max-parents=20
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of return / yield for function / method body
max-returns=6
# Maximum number of statements in function / method body
max-statements=50
# Minimum number of public methods for a class (see R0903).
min-public-methods=1
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,_fields,_replace,_source,_make,_get_linkname_retrieved
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=Exception
pytray-0.3.5/LICENSE 0000664 0000000 0000000 00000016744 14324204417 0014071 0 ustar 00root root 0000000 0000000 GNU LESSER GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc.
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
This version of the GNU Lesser General Public License incorporates
the terms and conditions of version 3 of the GNU General Public
License, supplemented by the additional permissions listed below.
0. Additional Definitions.
As used herein, "this License" refers to version 3 of the GNU Lesser
General Public License, and the "GNU GPL" refers to version 3 of the GNU
General Public License.
"The Library" refers to a covered work governed by this License,
other than an Application or a Combined Work as defined below.
An "Application" is any work that makes use of an interface provided
by the Library, but which is not otherwise based on the Library.
Defining a subclass of a class defined by the Library is deemed a mode
of using an interface provided by the Library.
A "Combined Work" is a work produced by combining or linking an
Application with the Library. The particular version of the Library
with which the Combined Work was made is also called the "Linked
Version".
The "Minimal Corresponding Source" for a Combined Work means the
Corresponding Source for the Combined Work, excluding any source code
for portions of the Combined Work that, considered in isolation, are
based on the Application, and not on the Linked Version.
The "Corresponding Application Code" for a Combined Work means the
object code and/or source code for the Application, including any data
and utility programs needed for reproducing the Combined Work from the
Application, but excluding the System Libraries of the Combined Work.
1. Exception to Section 3 of the GNU GPL.
You may convey a covered work under sections 3 and 4 of this License
without being bound by section 3 of the GNU GPL.
2. Conveying Modified Versions.
If you modify a copy of the Library, and, in your modifications, a
facility refers to a function or data to be supplied by an Application
that uses the facility (other than as an argument passed when the
facility is invoked), then you may convey a copy of the modified
version:
a) under this License, provided that you make a good faith effort to
ensure that, in the event an Application does not supply the
function or data, the facility still operates, and performs
whatever part of its purpose remains meaningful, or
b) under the GNU GPL, with none of the additional permissions of
this License applicable to that copy.
3. Object Code Incorporating Material from Library Header Files.
The object code form of an Application may incorporate material from
a header file that is part of the Library. You may convey such object
code under terms of your choice, provided that, if the incorporated
material is not limited to numerical parameters, data structure
layouts and accessors, or small macros, inline functions and templates
(ten or fewer lines in length), you do both of the following:
a) Give prominent notice with each copy of the object code that the
Library is used in it and that the Library and its use are
covered by this License.
b) Accompany the object code with a copy of the GNU GPL and this license
document.
4. Combined Works.
You may convey a Combined Work under terms of your choice that,
taken together, effectively do not restrict modification of the
portions of the Library contained in the Combined Work and reverse
engineering for debugging such modifications, if you also do each of
the following:
a) Give prominent notice with each copy of the Combined Work that
the Library is used in it and that the Library and its use are
covered by this License.
b) Accompany the Combined Work with a copy of the GNU GPL and this license
document.
c) For a Combined Work that displays copyright notices during
execution, include the copyright notice for the Library among
these notices, as well as a reference directing the user to the
copies of the GNU GPL and this license document.
d) Do one of the following:
0) Convey the Minimal Corresponding Source under the terms of this
License, and the Corresponding Application Code in a form
suitable for, and under terms that permit, the user to
recombine or relink the Application with a modified version of
the Linked Version to produce a modified Combined Work, in the
manner specified by section 6 of the GNU GPL for conveying
Corresponding Source.
1) Use a suitable shared library mechanism for linking with the
Library. A suitable mechanism is one that (a) uses at run time
a copy of the Library already present on the user's computer
system, and (b) will operate properly with a modified version
of the Library that is interface-compatible with the Linked
Version.
e) Provide Installation Information, but only if you would otherwise
be required to provide such information under section 6 of the
GNU GPL, and only to the extent that such information is
necessary to install and execute a modified version of the
Combined Work produced by recombining or relinking the
Application with a modified version of the Linked Version. (If
you use option 4d0, the Installation Information must accompany
the Minimal Corresponding Source and Corresponding Application
Code. If you use option 4d1, you must provide the Installation
Information in the manner specified by section 6 of the GNU GPL
for conveying Corresponding Source.)
5. Combined Libraries.
You may place library facilities that are a work based on the
Library side by side in a single library together with other library
facilities that are not Applications and are not covered by this
License, and convey such a combined library under terms of your
choice, if you do both of the following:
a) Accompany the combined library with a copy of the same work based
on the Library, uncombined with any other library facilities,
conveyed under the terms of this License.
b) Give prominent notice with the combined library that part of it
is a work based on the Library, and explaining where to find the
accompanying uncombined form of the same work.
6. Revised Versions of the GNU Lesser General Public License.
The Free Software Foundation may publish revised and/or new versions
of the GNU Lesser General Public License from time to time. Such new
versions will be similar in spirit to the present version, but may
differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the
Library as you received it specifies that a certain numbered version
of the GNU Lesser General Public License "or any later version"
applies to it, you have the option of following the terms and
conditions either of that published version or of any later version
published by the Free Software Foundation. If the Library as you
received it does not specify a version number of the GNU Lesser
General Public License, you may choose any version of the GNU Lesser
General Public License ever published by the Free Software Foundation.
If the Library as you received it specifies that a proxy can decide
whether future versions of the GNU Lesser General Public License shall
apply, that proxy's public statement of acceptance of any version is
permanent authorization for you to choose that version for the
Library.
pytray-0.3.5/MANIFEST.in 0000664 0000000 0000000 00000000237 14324204417 0014610 0 ustar 00root root 0000000 0000000 # Licenses
include LICENSE
# Include readme manually (because it has md extension)
include README.md
# Force test subdirectories
recursive-include test *.py
pytray-0.3.5/README.rst 0000664 0000000 0000000 00000000566 14324204417 0014546 0 ustar 00root root 0000000 0000000 Recently I've been baking a number of pies [1-2] and I've found it useful to collect commonly used code in one place. This library contains a bunch of, often exotic, bits of code that make python so much easier to write in, especially when interacting with asyncio and concurrent threads.
[1] https://github.com/aiidateam/kiwipy/
[2] https://github.com/aiidateam/plumpy
pytray-0.3.5/pytray/ 0000775 0000000 0000000 00000000000 14324204417 0014400 5 ustar 00root root 0000000 0000000 pytray-0.3.5/pytray/__init__.py 0000664 0000000 0000000 00000000373 14324204417 0016514 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
from .version import *
from . import aiothreads
from . import futures
from . import tree
from . import version
__all__ = version.__all__ + (
"aiothreads",
"futures",
"tree",
) # pylint: disable=undefined-variable
pytray-0.3.5/pytray/aiothreads.py 0000664 0000000 0000000 00000023772 14324204417 0017110 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
"""
A module to create interoperability between concurrent threads and asyncio.
An asyncio event loop can be running on a thread on which coroutines can be scheduled
from a different threads. The result is returned as a concurrent future which can be
waited on.
"""
import asyncio
import concurrent.futures
from concurrent.futures import Future as ThreadFuture
from contextlib import contextmanager
from functools import partial
import logging
import sys
import threading
import typing
from typing import Callable
from . import futures
__all__ = ("LoopScheduler",)
_LOGGER = logging.getLogger(__name__)
def aio_future_chain_thread(aio_future: asyncio.Future, future: ThreadFuture):
"""Chain an asyncio future to a thread future.
If the result of the asyncio future is another aio future this will also
be chained so the client only sees thread futures
"""
def done(done_future: asyncio.Future):
# Here we're on the aio thread
# Copy over the future
try:
result = done_future.result()
if asyncio.isfuture(result):
# Change the aio future to a thread future
fut = ThreadFuture()
aio_future_chain_thread(result, fut)
result = fut
future.set_result(result)
except asyncio.CancelledError:
future.cancel()
except Exception as exception: # pylint: disable=broad-except
future.set_exception(exception)
aio_future.add_done_callback(done)
return future
def thread_future_chain_aio(future: ThreadFuture, aio_future: asyncio.Future):
"""Chain a thread future to an asyncio future
If the result of the thread future is another thread future this will also be
chained so the client only sees aio futures"""
loop = aio_future._loop # pylint: disable=protected-access
def done(done_future: ThreadFuture):
try:
result = done_future.result()
if isinstance(result, ThreadFuture):
# Change the thread future to an aio future
fut = loop.create_future()
thread_future_chain_aio(result, fut)
result = fut
loop.call_soon_threadsafe(aio_future.set_result, result)
except concurrent.futures.CancelledError:
loop.call_soon_threadsafe(aio_future.cancel)
except Exception as exception: # pylint: disable=broad-except
loop.call_soon_threadsafe(aio_future.set_exception, exception)
future.add_done_callback(done)
return aio_future
def aio_future_to_thread(aio_future: asyncio.Future):
"""Convert an asyncio future to a thread future. Mutations of the thread future will be
propagated to the asyncio future but not the other way around."""
future = ThreadFuture()
thread_future_chain_aio(future, aio_future)
return future
class LoopScheduler:
DEFAULT_TASK_TIMEOUT = 5.0
def __init__(
self,
loop: asyncio.AbstractEventLoop = None,
name="AsyncioScheduler",
timeout=DEFAULT_TASK_TIMEOUT,
):
self._loop = loop or asyncio.new_event_loop()
self._name = name
self.task_timeout = timeout
self._asyncio_thread = None
self._stop_signal = None
self._closed = False
def __enter__(self):
self._ensure_running()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.stop()
def loop(self):
return self._loop
def is_closed(self) -> bool:
return self._closed
def is_running(self):
return self._asyncio_thread is not None
def close(self):
if self.is_closed():
return
self.stop()
self._closed = True
def start(self):
if self._asyncio_thread is not None:
raise RuntimeError("Already running")
start_future = ThreadFuture()
self._asyncio_thread = threading.Thread(
target=self._run_loop, name=self._name, args=(start_future,), daemon=True
)
self._asyncio_thread.start()
start_future.result()
def stop(self):
# Save the thread because it will be set to None when it does stop
aio_thread = self._asyncio_thread
if aio_thread is None:
return
stop_future = ThreadFuture()
# Send the stop signal
self._loop.call_soon_threadsafe(
partial(self._stop_signal.set_result, stop_future)
)
# Wait for the result in case there was an exception
stop_future.result()
aio_thread.join()
def await_(self, awaitable: typing.Awaitable, *, name: str = None):
"""
Await an awaitable on the event loop and return the result. It may take a little time for
the loop to get around to scheduling it, so we use a timeout as set by the TASK_TIMEOUT class
constant.
:param awaitable: the coroutine to run
:param name: an optional name for the awaitable to aid with debugging. If no name is
supplied will attempt to use `awaitable.__name__`.
:return: the result of running the coroutine
"""
try:
return self.await_submit(awaitable).result(timeout=self.task_timeout)
except concurrent.futures.TimeoutError as exc:
# Try to get a reasonable name for the awaitable
name = name or getattr(awaitable, "__name__", "Awaitable")
raise concurrent.futures.TimeoutError(
"{} after {} seconds".format(name, self.task_timeout)
) from exc
def await_submit(self, awaitable: typing.Awaitable) -> ThreadFuture:
"""
Schedule an awaitable on the loop and return the corresponding future
"""
async def coro():
res = await awaitable
if asyncio.isfuture(res):
future = ThreadFuture()
aio_future_chain_thread(res, future)
return future
return res
self._ensure_running()
return asyncio.run_coroutine_threadsafe(coro(), loop=self._loop)
def run(self, func, *args, **kwargs):
"""
Run a function on the event loop and return the result. It may take a little time for the
loop to get around to scheduling it so we use a timeout as set by the TASK_TIMEOUT class
constant.
:param func: the coroutine to run
:return: the result of running the coroutine
"""
return self.submit(func, *args, **kwargs).result(timeout=self.task_timeout)
def submit(self, func: Callable, *args, **kwargs) -> ThreadFuture:
"""
Schedule a function on the loop and return the corresponding future
"""
self._ensure_running()
future = ThreadFuture()
def callback():
if not future.cancelled():
with futures.capture_exceptions(future):
result = func(*args, **kwargs)
if asyncio.isfuture(result):
result = aio_future_to_thread(result)
future.set_result(result)
handle = self._loop.call_soon_threadsafe(callback)
def handle_cancel(done_future: ThreadFuture):
"""Function to propagate a cancellation of the concurrent future up to the loop
callback"""
if done_future.cancelled():
self._loop.call_soon_threadsafe(handle.cancel)
future.add_done_callback(handle_cancel)
return future
@contextmanager
def async_ctx(self, ctx_manager: typing.AsyncContextManager):
"""Can be used to turn an async context manager into a synchronous one"""
aexit = ctx_manager.__aexit__
aenter = ctx_manager.__aenter__
# result = self.await_(aenter())
result = asyncio.run_coroutine_threadsafe(aenter(), loop=self._loop).result()
# Make sure that if we got a future, we convert it appropriately
if asyncio.isfuture(result):
result = aio_future_to_thread(result)
try:
yield result
except Exception: # pylint: disable=broad-except
if not self.await_(aexit(*sys.exc_info())):
raise
else:
self.await_(aexit(None, None, None))
@contextmanager
def ctx(self, ctx_manager: typing.ContextManager):
"""Can be used to enter a context on the event loop"""
ctx_exit = ctx_manager.__exit__
ctx_enter = ctx_manager.__enter__
result = self.run(ctx_enter)
try:
yield result
except Exception: # pylint: disable=broad-except
if not self.run(ctx_exit, *sys.exc_info()):
raise
else:
self.run(ctx_exit, None, None, None)
def async_iter(self, aiterable: typing.AsyncIterable):
"""Iterate an async iterable from this thread"""
iterator = aiterable.__aiter__()
running = True
while running:
try:
target = self.await_(iterator.__anext__())
except StopAsyncIteration:
running = False
else:
yield target
def _ensure_running(self):
if self._asyncio_thread is not None:
return
self.start()
def _run_loop(self, start_future):
"""Here we are on the aio thread"""
_LOGGER.debug(
"Starting event loop (id %s) on %s",
id(self._loop),
threading.current_thread(),
)
asyncio.set_event_loop(self._loop)
try:
self._stop_signal = self._loop.create_future()
async def run_loop():
start_future.set_result(True)
# Wait to stop
stop_future = await self._stop_signal
stop_future.set_result(True)
self._loop.run_until_complete(run_loop())
# The loop is finished
self._asyncio_thread = None
_LOGGER.debug("Event loop stopped on %s", threading.current_thread())
finally:
asyncio.set_event_loop(None)
pytray-0.3.5/pytray/futures.py 0000664 0000000 0000000 00000001222 14324204417 0016444 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
import contextlib
__all__ = ("capture_exceptions",)
@contextlib.contextmanager
def capture_exceptions(future, ignore=()):
"""
Capture any uncaught exceptions in the context and set them as the result of the given future
:param future: The future to the exception on, has to have a `set_exception()` method
:param ignore: An optional list of exception types to ignore, these will be raised and not set on the future
"""
try:
yield
except Exception as exception: # pylint: disable=broad-except
if isinstance(exception, ignore):
raise
future.set_exception(exception)
pytray-0.3.5/pytray/obj_load.py 0000664 0000000 0000000 00000001340 14324204417 0016521 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
"""Module for methods used to load objects/symbols dynamically in python"""
from typing import Union
import types
__version__ = "0.0.1"
__all__ = "__version__", "load_obj", "full_name"
def load_obj(name: str) -> Union[type, types.FunctionType, types.BuiltinFunctionType]:
"""Load a type from a fully qualified name"""
components = name.split(".")
mod = __import__(components[0])
# Get the components one by one
for comp in components[1:]:
mod = getattr(mod, comp)
return mod
def full_name(
symbol: Union[type, types.FunctionType, types.BuiltinFunctionType]
) -> str:
"""Get the fully qualified name of a type."""
return symbol.__module__ + "." + symbol.__name__
pytray-0.3.5/pytray/pretty.py 0000664 0000000 0000000 00000001116 14324204417 0016300 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
"""Pretty printing functions"""
import typing
import deprecation
from .version import __version__
def type_string(obj_type: typing.Type) -> str:
"""Given an type will return a simple type string"""
type_str = str(obj_type)
if type_str.startswith(" str:
return type_string(obj_type)
pytray-0.3.5/pytray/tree.py 0000664 0000000 0000000 00000005057 14324204417 0015720 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
"""A tree in this context is considered a nested container containing either lists or
dictionaries and with anything else being considered a leaf. Paths to particular points
in the data structure are represented as sequences containing the index at each level"""
from functools import reduce
import operator
def get_by_path(root, items):
"""Access a nested object in root by item sequence. Taken from:
https://stackoverflow.com/questions/14692690/access-nested-dictionary-items-via-a-list-of-keys"""
if not items:
# Support either empty items or None items meaning give back root
return root
return reduce(operator.getitem, items, root)
def set_by_path(root, items, value):
"""Set a value in a nested object in root by item sequence. Taken from:
https://stackoverflow.com/questions/14692690/access-nested-dictionary-items-via-a-list-of-keys"""
get_by_path(root, items[:-1])[items[-1]] = value
def path_to_string(path: tuple) -> str:
return ".".join((str(key) for key in path))
def transform(visitor, root, path: tuple = (), **kwargs):
"""Given a list or a dict call create a new container of that type calling
`visitor` for each entry to get the transformed value. kwargs will be passed
to the visitor.
"""
if isinstance(root, dict):
return {
key: visitor(value, path=path + (key,), **kwargs)
for key, value in root.items()
}
if isinstance(root, list):
return [
visitor(value, path=path + (idx,), **kwargs)
for idx, value in enumerate(root)
]
return root
def flatten(root, filter=None) -> dict: # pylint: disable=redefined-builtin
"""Given a tree flatten it into a dictionary contaning the path as key and the corresponding
leaves as values"""
def should_flatten(value):
return (isinstance(value, (dict, list)) and bool(value)) and (
filter is None or filter(value)
)
def do_flattening(entry, path=()):
if should_flatten(entry):
# Descend further
if isinstance(entry, dict):
for key, value in entry.items():
yield from do_flattening(value, path + (key,))
elif isinstance(entry, list):
for idx, value in enumerate(entry):
yield from do_flattening(value, path + (idx,))
else:
raise TypeError("Cannot flatten type '{}'".format(type(entry)))
else:
yield path, entry
return dict(do_flattening(root))
pytray-0.3.5/pytray/version.py 0000664 0000000 0000000 00000000117 14324204417 0016436 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
__version__ = "${version}"
__all__ = ("__version__",)
pytray-0.3.5/pytray/weakdict.py 0000664 0000000 0000000 00000003666 14324204417 0016560 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
import collections
import collections.abc
import typing
import weakref
class WeakObjectIdDict(collections.MutableMapping):
"""
Like weakref.WeakKeyDict but internally uses object ids instead of the object reference
itself thereby avoiding the need for the object to be hashable (and therefore immutable).
"""
def __init__(self, seq=None, **kwargs):
self._refs = (
{}
) # type: collections.abc.MutableMapping[int, weakref.ReferenceType]
self._values = {} # type: collections.abc.MutableMapping[int, typing.Any]
if seq:
if isinstance(seq, collections.abc.Mapping):
for key, value in seq.items():
self[key] = value
elif isinstance(seq, collections.Iterable):
for key, value in seq:
self[key] = value
if kwargs:
for key, value in kwargs.items():
self[key] = value
def __copy__(self):
return WeakObjectIdDict(self)
def __getitem__(self, item):
try:
return self._values[id(item)]
except KeyError:
raise KeyError(str(item))
def __setitem__(self, key, value):
obj_id = id(key)
wref = weakref.ref(key, self._finalised)
self._refs[obj_id] = wref
self._values[obj_id] = value
def __delitem__(self, key):
obj_id = id(key)
del self._values[obj_id]
del self._refs[obj_id]
def __len__(self):
return len(self._values)
def __iter__(self):
for ref in self._refs.values():
yield ref()
def _finalised(self, wref):
found_id = None
for obj_id, ref in self._refs.items():
if ref == wref:
found_id = obj_id
break
# Delete both the object values and the reference itself
del self._values[found_id]
del self._refs[found_id]
pytray-0.3.5/release.sh 0000775 0000000 0000000 00000001736 14324204417 0015036 0 ustar 00root root 0000000 0000000
PACKAGE="pytray"
VERSION_FILE=${PACKAGE}/version.py
version=$1
while true; do
read -p "Release version ${version}? " yn
case $yn in
[Yy]* ) break;;
[Nn]* ) exit;;
* ) echo "Please answer yes or no.";;
esac
done
sed -i '0,/__version__* =.*/s//__version__ = \"${version}\"/' $VERSION_FILE
current_branch=`git rev-parse --abbrev-ref HEAD`
tag="v${version}"
relbranch="release-${version}"
echo Releasing version $version
git checkout -b $relbranch
git add ${VERSION_FILE}
git commit --no-verify -m "Release ${version}"
git tag -a $tag -m "Version $version"
# Merge into master
git checkout master
git merge $relbranch
# And back into the working branch (usually develop)
git checkout $current_branch
git merge $relbranch
git branch -d $relbranch
# Push everything
git push --tags muhrin master $current_branch
# Release on pypi
rm -r dist build *.egg-info
python setup.py sdist
python setup.py bdist_wheel --universal
twine upload dist/*
pytray-0.3.5/setup.py 0000664 0000000 0000000 00000002504 14324204417 0014563 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
from setuptools import setup
__author__ = "Martin Uhrin"
__license__ = "LGPLv3"
about = {}
with open("pytray/version.py") as f:
exec(f.read(), about) # nosec
setup(
name="pytray",
version=about["__version__"],
description="A python tools library for baking pies",
long_description=open("README.rst").read(),
url="https://github.com/muhrin/pytray.git",
author="Martin Uhrin",
author_email="martin.uhrin.10@ucl.ac.uk",
license=__license__,
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
],
keywords="tools utilities",
install_requires=["deprecation"],
python_requires=">=3.7",
extras_require={
"dev": [
"grayskull",
"pip",
"pre-commit",
"pytest",
"pytest-cov",
"ipython<6",
"twine",
],
"docs": [
"Sphinx==1.8.4",
"Pygments==2.3.1",
"docutils==0.14",
],
},
packages=["pytray"],
test_suite="test",
)
pytray-0.3.5/test/ 0000775 0000000 0000000 00000000000 14324204417 0014027 5 ustar 00root root 0000000 0000000 pytray-0.3.5/test/test_aiothreads.py 0000664 0000000 0000000 00000007546 14324204417 0017577 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
import asyncio
import concurrent.futures
import contextlib
import pytest
from pytray.aiothreads import LoopScheduler
@pytest.fixture
def loop_scheduler():
loop = asyncio.new_event_loop()
loop.set_debug(True)
with LoopScheduler(loop=loop) as scheduler:
yield scheduler
async def simple(arg):
await asyncio.sleep(0.1)
return arg
def test_simple_await_submit(loop_scheduler): # pylint: disable=redefined-outer-name
future = loop_scheduler.await_submit(simple("Done!"))
assert future.result() == "Done!"
def test_simple_await(loop_scheduler): # pylint: disable=redefined-outer-name
result = loop_scheduler.await_(simple("Done!"))
assert result == "Done!"
def test_async_context(loop_scheduler): # pylint: disable=redefined-outer-name
sequence = []
@contextlib.asynccontextmanager
async def do_():
sequence.append("Entered")
yield 10
sequence.append("Exiting")
with loop_scheduler.async_ctx(do_()) as value:
assert value == 10
assert sequence == ["Entered", "Exiting"]
def test_async_context_exception(
loop_scheduler,
): # pylint: disable=redefined-outer-name
@contextlib.asynccontextmanager
async def raises_before_yield():
raise RuntimeError
yield
with pytest.raises(RuntimeError):
with loop_scheduler.async_ctx(raises_before_yield()):
pass
@contextlib.asynccontextmanager
async def raises_after_yield():
yield
raise RuntimeError
with pytest.raises(RuntimeError):
with loop_scheduler.async_ctx(raises_after_yield()):
pass
def test_task_timeout():
loop = asyncio.new_event_loop()
loop.set_debug(True)
# First check a normal (sub timeout) situation
with LoopScheduler(loop=loop, timeout=0.1) as scheduler:
# Make sure the sleep is bigger than our timeout
scheduler.await_(asyncio.sleep(0.001))
# Now one where we time out
with pytest.raises(concurrent.futures.TimeoutError) as excinfo:
with LoopScheduler(loop=loop, timeout=0.1) as scheduler:
scheduler.await_(asyncio.sleep(1.0))
assert asyncio.sleep.__name__ in str(excinfo.value)
# Test supplying a custom name
with pytest.raises(concurrent.futures.TimeoutError) as excinfo:
with LoopScheduler(loop=loop, timeout=0.1) as scheduler:
scheduler.await_(asyncio.sleep(1.0), name="sleepin'...zZzZ")
assert "sleepin'...zZzZ" in str(excinfo.value)
def test_task_cancel(loop_scheduler): # pylint: disable=redefined-outer-name
evt = asyncio.Event()
async def set_env():
evt.set()
loop_scheduler.await_submit(set_env()).result()
assert evt.is_set()
evt.clear()
loop_scheduler.await_submit(set_env()).cancel()
assert not evt.is_set()
def test_await_futures(loop_scheduler): # pylint: disable=redefined-outer-name
"""Test that a series of Futures works correctly"""
async def inception():
fut = asyncio.Future()
fut2 = asyncio.Future()
fut3 = asyncio.Future()
fut.set_result(True)
fut2.set_result(fut)
fut3.set_result(fut2)
return fut3
assert loop_scheduler.await_(inception()).result().result().result() is True
def test_await_ctx_futures(loop_scheduler): # pylint: disable=redefined-outer-name
"""Test that an async context yielding a future is correctly handled i.e. the asyncio future
should be converted to a concurrent one and the result be propagated back to the asyncio future
in the context"""
@contextlib.asynccontextmanager
async def ctx():
fut = asyncio.Future()
try:
yield fut
finally:
assert fut.result() is True
with loop_scheduler.async_ctx(ctx()) as future:
assert isinstance(future, concurrent.futures.Future)
future.set_result(True)
pytray-0.3.5/test/test_obj_load.py 0000664 0000000 0000000 00000001352 14324204417 0017212 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
import enum
import re
from pytray import obj_load
def test_load_class():
# Test with a class
name = obj_load.full_name(enum.Enum)
assert name == "enum.Enum"
obj_type = obj_load.load_obj(name)
assert obj_type is enum.Enum
# Now test loading something that we haven't imported the module for
obj_type = obj_load.load_obj("argparse.ArgumentParser")
import argparse # pylint: disable=import-outside-toplevel
assert obj_type is argparse.ArgumentParser
# Test with builtin
name = obj_load.full_name(dict)
obj_load.load_obj(name)
def test_load_function():
name = obj_load.full_name(re.match)
assert name == "re.match"
assert obj_load.load_obj(name) is re.match
pytray-0.3.5/test/test_tree.py 0000664 0000000 0000000 00000001614 14324204417 0016401 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
from pytray import tree
def test_flattening():
my_tree = {
"a": {"b": 6, "c": "potato"},
"d": [
{
"e": [],
},
"hello",
],
}
flattened = tree.flatten(my_tree)
assert flattened.pop(("a", "b")) == 6
assert flattened.pop(("a", "c")) == "potato"
assert flattened.pop(("d", 0, "e")) == []
assert flattened.pop(("d", 1)) == "hello"
assert not flattened
def test_flattening_filter():
my_tree = {"a": {"b": 5, "c": {"d": 6}}}
# Don't flatten {'d': 6}
flattened = tree.flatten(my_tree, filter=lambda value: value != {"d": 6})
assert flattened.pop(("a", "b")) == 5
assert flattened.pop(("a", "c")) == {"d": 6}
assert not flattened
def test_path_to_string():
pathstring = tree.path_to_string(("a", "b", "c", 0, "d"))
assert pathstring == "a.b.c.0.d"