pax_global_header 0000666 0000000 0000000 00000000064 14332461332 0014513 g ustar 00root root 0000000 0000000 52 comment=3f40f12ab5e8aa20392fe42c6efc7e816d856961
python-oldmemo-1.0.3/ 0000775 0000000 0000000 00000000000 14332461332 0014467 5 ustar 00root root 0000000 0000000 python-oldmemo-1.0.3/.flake8 0000664 0000000 0000000 00000000212 14332461332 0015635 0 ustar 00root root 0000000 0000000 [flake8]
max-line-length = 110
doctests = True
ignore = E201,E202,W503
exclude = *_pb2.py
per-file-ignores =
oldmemo/project.py:E203
python-oldmemo-1.0.3/.github/ 0000775 0000000 0000000 00000000000 14332461332 0016027 5 ustar 00root root 0000000 0000000 python-oldmemo-1.0.3/.github/workflows/ 0000775 0000000 0000000 00000000000 14332461332 0020064 5 ustar 00root root 0000000 0000000 python-oldmemo-1.0.3/.github/workflows/test-and-publish.yml 0000664 0000000 0000000 00000003337 14332461332 0024000 0 ustar 00root root 0000000 0000000 name: Test & Publish
on: [push, pull_request]
permissions:
contents: read
jobs:
test:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "pypy-3.9"]
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Install/update package management dependencies
run: python -m pip install --upgrade pip setuptools wheel
- name: Build and install python-oldmemo
run: pip install .[xml]
- name: Install test dependencies
run: pip install --upgrade mypy pylint flake8 mypy-protobuf types-protobuf
- name: Type-check using mypy
run: mypy --strict oldmemo/ setup.py
- name: Lint using pylint
run: pylint oldmemo/ setup.py
- name: Format-check using Flake8
run: flake8 oldmemo/ setup.py
build:
name: Build source distribution and wheel
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Build source distribution and wheel
run: python3 setup.py sdist bdist_wheel
- uses: actions/upload-artifact@v3
with:
path: |
dist/*.tar.gz
dist/*.whl
publish:
needs: [test, build]
runs-on: ubuntu-latest
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v')
steps:
- uses: actions/download-artifact@v3
with:
name: artifact
path: dist
- uses: pypa/gh-action-pypi-publish@v1.5.1
with:
user: __token__
password: ${{ secrets.pypi_token }}
python-oldmemo-1.0.3/.gitignore 0000664 0000000 0000000 00000000132 14332461332 0016453 0 ustar 00root root 0000000 0000000 dist/
Oldmemo.egg-info/
__pycache__/
.pytest_cache/
.mypy_cache/
.coverage
docs/_build/
python-oldmemo-1.0.3/CHANGELOG.md 0000664 0000000 0000000 00000002503 14332461332 0016300 0 ustar 00root root 0000000 0000000 # Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased]
## [1.0.3] - 8th of November 2022
### Changed
- Exclude tests from the packages
## [1.0.2] - 4th of November 2022
### Changed
- Increased the minimum version of protobuf to 3.20.3 after reports that earlier versions cause issues
- Disabled protobuf's deterministic serialization in an attempt to achieve PyPy3 compatibility
## [1.0.1] - 3rd of November 2022
### Added
- Python 3.11 to the list of supported versions
### Changed
- Replaced usages of the walrus operator to correctly support Python 3.7 and 3.8 as advertized
- Fixed a bug in the way the authentication tag was calculated during decryption of the AEAD implementation
## [1.0.0] - 1st of November 2022
### Added
- Initial release.
[Unreleased]: https://github.com/Syndace/python-oldmemo/compare/v1.0.3...HEAD
[1.0.3]: https://github.com/Syndace/python-oldmemo/compare/v1.0.2...v1.0.3
[1.0.2]: https://github.com/Syndace/python-oldmemo/compare/v1.0.1...v1.0.2
[1.0.1]: https://github.com/Syndace/python-oldmemo/compare/v1.0.0...v1.0.1
[1.0.0]: https://github.com/Syndace/python-oldmemo/releases/tag/v1.0.0
python-oldmemo-1.0.3/LICENSE 0000664 0000000 0000000 00000103333 14332461332 0015477 0 ustar 00root root 0000000 0000000 GNU AFFERO GENERAL PUBLIC LICENSE
Version 3, 19 November 2007
Copyright (C) 2007 Free Software Foundation, Inc.
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU Affero General Public License is a free, copyleft license for
software and other kinds of works, specifically designed to ensure
cooperation with the community in the case of network server software.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
our General Public Licenses are intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
Developers that use our General Public Licenses protect your rights
with two steps: (1) assert copyright on the software, and (2) offer
you this License which gives you legal permission to copy, distribute
and/or modify the software.
A secondary benefit of defending all users' freedom is that
improvements made in alternate versions of the program, if they
receive widespread use, become available for other developers to
incorporate. Many developers of free software are heartened and
encouraged by the resulting cooperation. However, in the case of
software used on network servers, this result may fail to come about.
The GNU General Public License permits making a modified version and
letting the public access it on a server without ever releasing its
source code to the public.
The GNU Affero General Public License is designed specifically to
ensure that, in such cases, the modified source code becomes available
to the community. It requires the operator of a network server to
provide the source code of the modified version running there to the
users of that server. Therefore, public use of a modified version, on
a publicly accessible server, gives the public access to the source
code of the modified version.
An older license, called the Affero General Public License and
published by Affero, was designed to accomplish similar goals. This is
a different license, not a version of the Affero GPL, but Affero has
released a new version of the Affero GPL which permits relicensing under
this license.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU Affero General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Remote Network Interaction; Use with the GNU General Public License.
Notwithstanding any other provision of this License, if you modify the
Program, your modified version must prominently offer all users
interacting with it remotely through a computer network (if your version
supports such interaction) an opportunity to receive the Corresponding
Source of your version by providing access to the Corresponding Source
from a network server at no charge, through some standard or customary
means of facilitating copying of software. This Corresponding Source
shall include the Corresponding Source for any work covered by version 3
of the GNU General Public License that is incorporated pursuant to the
following paragraph.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the work with which it is combined will remain governed by version
3 of the GNU General Public License.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU Affero General Public License from time to time. Such new versions
will be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU Affero General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU Affero General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU Affero General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
Copyright (C)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see .
Also add information on how to contact you by electronic and paper mail.
If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source. For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code. There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for the
specific requirements.
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU AGPL, see
.
python-oldmemo-1.0.3/MANIFEST.in 0000664 0000000 0000000 00000000031 14332461332 0016217 0 ustar 00root root 0000000 0000000 include oldmemo/py.typed
python-oldmemo-1.0.3/README.md 0000664 0000000 0000000 00000004551 14332461332 0015753 0 ustar 00root root 0000000 0000000 [](https://pypi.org/project/Oldmemo/)
[](https://pypi.org/project/Oldmemo/)
[](https://github.com/Syndace/python-oldmemo/actions/workflows/test-and-publish.yml)
[](https://python-oldmemo.readthedocs.io/)
# python-oldmemo #
Backend implementation for [python-omemo](https://github.com/Syndace/python-omemo), equipping python-omemo with support for OMEMO under the namespace `eu.siacs.conversations.axolotl` (casually/jokingly referred to as "oldmemo").
This repository is based on [python-twomemo](https://github.com/Syndace/python-twomemo) and will be rebased on top of new commits to that repository regularly, so expect commit hashes to be unstable. For the same reason, release tags might not be available or point to non-existing commit hashes.
## Installation ##
Install the latest release using pip (`pip install oldmemo`) or manually from source by running `pip install .` in the cloned repository.
## Protobuf ##
Install `protoc`. Then, in the root directory of this repository, run:
```sh
$ pip install protobuf mypy mypy-protobuf types-protobuf
$ protoc --python_out=oldmemo/ --mypy_out=oldmemo/ oldmemo.proto
```
This will generate `oldmemo/oldmemo_pb2.py` and `oldmemo/oldmemo_pb2.pyi`.
## Type Checks and Linting ##
python-oldmemo uses [mypy](http://mypy-lang.org/) for static type checks and both [pylint](https://pylint.pycqa.org/en/latest/) and [Flake8](https://flake8.pycqa.org/en/latest/) for linting. All checks can be run locally with the following commands:
```sh
$ pip install --upgrade mypy pylint flake8 mypy-protobuf types-protobuf
$ mypy --strict oldmemo/ setup.py
$ pylint oldmemo/ setup.py
$ flake8 oldmemo/ setup.py
```
## Getting Started ##
Refer to the documentation on [readthedocs.io](https://python-oldmemo.readthedocs.io/), or build/view it locally in the `docs/` directory. To build the docs locally, install the requirements listed in `docs/requirements.txt`, e.g. using `pip install -r docs/requirements.txt`, and then run `make html` from within the `docs/` directory. The documentation can then be found in `docs/_build/html/`.
python-oldmemo-1.0.3/docs/ 0000775 0000000 0000000 00000000000 14332461332 0015417 5 ustar 00root root 0000000 0000000 python-oldmemo-1.0.3/docs/Makefile 0000664 0000000 0000000 00000001135 14332461332 0017057 0 ustar 00root root 0000000 0000000 # Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
SPHINXPROJ = Oldmemo
SOURCEDIR = .
BUILDDIR = _build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
python-oldmemo-1.0.3/docs/_static/ 0000775 0000000 0000000 00000000000 14332461332 0017045 5 ustar 00root root 0000000 0000000 python-oldmemo-1.0.3/docs/_static/.gitkeep 0000664 0000000 0000000 00000000000 14332461332 0020464 0 ustar 00root root 0000000 0000000 python-oldmemo-1.0.3/docs/conf.py 0000664 0000000 0000000 00000006367 14332461332 0016732 0 ustar 00root root 0000000 0000000 # Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a full list see
# the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory, add these
# directories to sys.path here. If the directory is relative to the documentation root,
# use os.path.abspath to make it absolute, like shown here.
import os
import sys
this_file_path = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(this_file_path, "..", "oldmemo"))
from version import __version__ as __version
from project import project as __project
# -- Project information -----------------------------------------------------------------
project = __project["name"]
author = __project["author"]
copyright = f"{__project['year']}, {__project['author']}"
# The short X.Y version
version = __version["short"]
# The full version, including alpha/beta/rc tags
release = __version["full"]
# -- General configuration ---------------------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions coming
# with Sphinx (named "sphinx.ext.*") or your custom ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.viewcode",
"sphinx.ext.napoleon",
"sphinx_autodoc_typehints"
]
# Add any paths that contain templates here, relative to this directory.
templates_path = [ "_templates" ]
# List of patterns, relative to source directory, that match files and directories to
# ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = [ "_build", "Thumbs.db", ".DS_Store" ]
# -- Options for HTML output -------------------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for a list of
# builtin themes.
html_theme = "sphinx_rtd_theme"
# Add any paths that contain custom static files (such as style sheets) here, relative to
# this directory. They are copied after the builtin static files, so a file named
# "default.css" will overwrite the builtin "default.css".
html_static_path = [ "_static" ]
# -- Autodoc Configuration ---------------------------------------------------------------
# The following two options seem to be ignored...
autodoc_typehints = "description"
autodoc_type_aliases = { type_alias: f"{type_alias}" for type_alias in {
"JSONType",
"JSONObject"
} }
def autodoc_skip_member_handler(app, what, name, obj, skip, options):
# Skip private members, i.e. those that start with double underscores but do not end in underscores
if name.startswith("__") and not name.endswith("_"):
return True
# Could be achieved using exclude-members, but this is more comfy
if name in {
"__abstractmethods__",
"__module__",
"_abc_impl"
}: return True
# Skip __init__s without documentation. Those are just used for type hints.
if name == "__init__" and obj.__doc__ is None:
return True
return None
def setup(app):
app.connect("autodoc-skip-member", autodoc_skip_member_handler)
python-oldmemo-1.0.3/docs/getting_started.rst 0000664 0000000 0000000 00000001172 14332461332 0021341 0 ustar 00root root 0000000 0000000 Getting Started
===============
No further preparation is required to get started with this backend. Create an instance of :class:`~oldmemo.oldmemo.Oldmemo` and pass it to `python-omemo `__ to equip it with ``eu.siacs.conversations.axolotl`` capabilities.
Users of ElementTree can use the helpers in :ref:`etree` for their XML serialization/parsing, which is available after installing `xmlschema `_, or by using ``pip install oldmemo[xml]``. Users of a different XML framework can use the module as a reference to write their own serialization/parsing.
python-oldmemo-1.0.3/docs/index.rst 0000664 0000000 0000000 00000001056 14332461332 0017262 0 ustar 00root root 0000000 0000000 Oldmemo - Backend implementation of the ``eu.siacs.conversations.axolotl`` namespace for python-omemo.
======================================================================================================
Backend implementation for `python-omemo `__, equipping python-omemo with support for OMEMO under the namespace ``eu.siacs.conversations.axolotl`` (casually/jokingly referred to as "oldmemo").
.. toctree::
installation
getting_started
migration_from_legacy
API Documentation
python-oldmemo-1.0.3/docs/installation.rst 0000664 0000000 0000000 00000000251 14332461332 0020650 0 ustar 00root root 0000000 0000000 Installation
============
Install the latest release using pip (``pip install oldmemo``) or manually from source by running ``pip install .`` in the cloned repository.
python-oldmemo-1.0.3/docs/make.bat 0000664 0000000 0000000 00000001453 14332461332 0017027 0 ustar 00root root 0000000 0000000 @ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=.
set BUILDDIR=_build
set SPHINXPROJ=Oldmemo
if "%1" == "" goto help
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
:end
popd
python-oldmemo-1.0.3/docs/migration_from_legacy.rst 0000664 0000000 0000000 00000001756 14332461332 0022522 0 ustar 00root root 0000000 0000000 Migration from Legacy
=====================
This backend supports migration of legacy data from ``python-omemo<1.0.0`` + ``python-omemo-backend-signal`` setups. Other legacy setups cannot be migrated by this package.
To migrate legacy data, first implement the :class:`oldmemo.migrations.LegacyStorage` interface. This interface is very similar to the ``Storage`` class of legacy ``python-omemo`` and differs from it mostly in the lack of methods to store data and the addition of methods to delete data. You should be able to reuse most of your code.
With the :class:`~oldmemo.migrations.LegacyStorage` set up, call the :func:`oldmemo.migrations.migrate` function with both the legacy storage and the new storage to perform the migration of the legacy data. You can call this function as part of your general ``python-omemo`` setup routine, the function checks whether migrations are required itself and returns instantly if not.
See the module documentation of :mod:`oldmemo.migrations` for details.
python-oldmemo-1.0.3/docs/oldmemo/ 0000775 0000000 0000000 00000000000 14332461332 0017053 5 ustar 00root root 0000000 0000000 python-oldmemo-1.0.3/docs/oldmemo/etree.rst 0000664 0000000 0000000 00000000310 14332461332 0020703 0 ustar 00root root 0000000 0000000 .. _etree:
Module: etree
=============
.. automodule:: oldmemo.etree
:members:
:special-members:
:private-members:
:undoc-members:
:member-order: bysource
:show-inheritance:
python-oldmemo-1.0.3/docs/oldmemo/migrations.rst 0000664 0000000 0000000 00000000265 14332461332 0021764 0 ustar 00root root 0000000 0000000 Module: migrations
==================
.. automodule:: oldmemo.migrations
:members:
:private-members:
:undoc-members:
:member-order: bysource
:show-inheritance:
python-oldmemo-1.0.3/docs/oldmemo/oldmemo.rst 0000664 0000000 0000000 00000000302 14332461332 0021234 0 ustar 00root root 0000000 0000000 Module: oldmemo
===============
.. automodule:: oldmemo.oldmemo
:members:
:special-members:
:private-members:
:undoc-members:
:member-order: bysource
:show-inheritance:
python-oldmemo-1.0.3/docs/oldmemo/package.rst 0000664 0000000 0000000 00000000214 14332461332 0021175 0 ustar 00root root 0000000 0000000 Package: oldmemo
================
.. toctree::
Module: etree
Module: migrations
Module: oldmemo
python-oldmemo-1.0.3/docs/requirements.txt 0000664 0000000 0000000 00000000061 14332461332 0020700 0 ustar 00root root 0000000 0000000 sphinx
sphinx-rtd-theme
sphinx-autodoc-typehints
python-oldmemo-1.0.3/oldmemo.proto 0000664 0000000 0000000 00000002222 14332461332 0017206 0 ustar 00root root 0000000 0000000 syntax = "proto2";
package oldmemo;
// A modified version of SignalMessage defined in
// https://github.com/signalapp/libsignal-protocol-java/blob/master/protobuf/WhisperTextProtocol.proto
// Structure, field names, required/optional and order changed to match python-twomemo. The serialization
// should not be affected by those modifications.
message OMEMOMessage {
required uint32 n = 2;
required uint32 pn = 3;
required bytes dh_pub = 1;
optional bytes ciphertext = 4;
}
// A modified version of PreKeySignalMessage defined in
// https://github.com/signalapp/libsignal-protocol-java/blob/master/protobuf/WhisperTextProtocol.proto
// Structure, field names, required/optional and order changed to match python-twomemo. The serialization
// should not be affected by those modifications.
message OMEMOKeyExchange {
required uint32 pk_id = 1;
required uint32 spk_id = 6;
required bytes ik = 3;
required bytes ek = 2;
required bytes message = 4; // Byte-encoding of an OMEMOMessage & authentication tag (see OMEMOAuthenticatedMessage in oldmemo/oldmemo.py)
optional uint32 unused = 5;
}
python-oldmemo-1.0.3/oldmemo/ 0000775 0000000 0000000 00000000000 14332461332 0016123 5 ustar 00root root 0000000 0000000 python-oldmemo-1.0.3/oldmemo/__init__.py 0000664 0000000 0000000 00000000520 14332461332 0020231 0 ustar 00root root 0000000 0000000 from .version import __version__
from .project import project
from .oldmemo import Oldmemo
# Fun:
# https://github.com/PyCQA/pylint/issues/6006
# https://github.com/python/mypy/issues/10198
__all__ = [ # pylint: disable=unused-variable
# .version
"__version__",
# .project
"project",
# .oldmemo
"Oldmemo"
]
python-oldmemo-1.0.3/oldmemo/etree.py 0000664 0000000 0000000 00000035751 14332461332 0017614 0 ustar 00root root 0000000 0000000 import base64
from typing import Dict, Optional, Set, Tuple, cast
import xml.etree.ElementTree as ET
from omemo import (
DeviceListDownloadFailed,
EncryptedKeyMaterial,
KeyExchange,
Message,
SenderNotFound,
SessionManager
)
import x3dh
import xeddsa
try:
import xmlschema
except ImportError as import_error:
raise ImportError(
"Optional dependency xmlschema not found. Please install xmlschema, or install this package using"
" `pip install python-oldmemo[xml]`, to use the ElementTree-based XML serialization/parser helpers."
) from import_error
from .oldmemo import NAMESPACE, BundleImpl, ContentImpl, EncryptedKeyMaterialImpl, KeyExchangeImpl, StateImpl
__all__ = [ # pylint: disable=unused-variable
"serialize_device_list",
"parse_device_list",
"serialize_bundle",
"parse_bundle",
"serialize_message",
"parse_message"
]
NS = f"{{{NAMESPACE}}}"
DEVICE_LIST_SCHEMA = xmlschema.XMLSchema("""
""")
BUNDLE_SCHEMA = xmlschema.XMLSchema("""
""")
MESSAGE_SCHEMA = xmlschema.XMLSchema11("""
""")
def serialize_device_list(device_list: Dict[int, Optional[str]]) -> ET.Element:
"""
Args:
device_list: The device list to serialize. The first entry of each tuple is the device id, and the
second entry is the optional label. Note that labels are not supported by this version of the
specification and will not be included in the XML.
Returns:
The serialized device list as an XML element.
"""
list_elt = ET.Element(f"{NS}list")
for device_id, _ in device_list.items():
device_elt = ET.SubElement(list_elt, f"{NS}device")
device_elt.set("id", str(device_id))
return list_elt
def parse_device_list(element: ET.Element) -> Dict[int, Optional[str]]:
"""
Args:
element: The XML element to parse the device list from.
Returns:
The extracted device list. The first entry of each tuple is the device id, and the second entry is the
optional label. Note that labels are not supported by this version of the specification, so all labels
will be set to ``None``.
Raises:
XMLSchemaValidationError: in case the element does not conform to the XML schema given in the
specification.
"""
DEVICE_LIST_SCHEMA.validate(element)
return { int(cast(str, device_elt.get("id"))): None for device_elt in element.iter(f"{NS}device") }
def serialize_bundle(bundle: BundleImpl) -> ET.Element:
"""
Args:
bundle: The bundle to serialize.
Returns:
The serialized bundle as an XML element.
"""
bundle_elt = ET.Element(f"{NS}bundle")
signed_pre_key_signature_mut = bytearray(bundle.bundle.signed_pre_key_sig)
signed_pre_key_signature_mut[63] |= bundle.bundle.identity_key[31] & 0x80
signed_pre_key_signature = bytes(signed_pre_key_signature_mut)
ET.SubElement(
bundle_elt,
f"{NS}signedPreKeyPublic",
attrib={ "signedPreKeyId": str(bundle.signed_pre_key_id) }
).text = base64.b64encode(StateImpl.serialize_public_key(bundle.bundle.signed_pre_key)).decode("ASCII")
ET.SubElement(
bundle_elt,
f"{NS}signedPreKeySignature"
).text = base64.b64encode(signed_pre_key_signature).decode("ASCII")
ET.SubElement(
bundle_elt,
f"{NS}identityKey"
).text = base64.b64encode(StateImpl.serialize_public_key(xeddsa.ed25519_pub_to_curve25519_pub(
bundle.bundle.identity_key
))).decode("ASCII")
prekeys_elt = ET.SubElement(bundle_elt, f"{NS}prekeys")
for pre_key in bundle.bundle.pre_keys:
ET.SubElement(
prekeys_elt,
f"{NS}preKeyPublic",
attrib={ "preKeyId": str(bundle.pre_key_ids[pre_key]) }
).text = base64.b64encode(StateImpl.serialize_public_key(pre_key)).decode("ASCII")
return bundle_elt
def parse_bundle(element: ET.Element, bare_jid: str, device_id: int) -> BundleImpl:
"""
Args:
element: The XML element to parse the bundle from.
bare_jid: The bare JID this bundle belongs to.
device_id: The device id of the specific device this bundle belongs to.
Returns:
The extracted bundle.
Raises:
ValueError: in case of malformed data that still passed the schema validation.
XMLSchemaValidationError: in case the element does not conform to the XML schema given in the
specification.
"""
BUNDLE_SCHEMA.validate(element)
spkp_elt = cast(ET.Element, element.find(f"{NS}signedPreKeyPublic"))
pkp_elts = list(element.iter(f"{NS}preKeyPublic"))
signed_pre_key_signature = base64.b64decode(cast(str, cast(ET.Element, element.find(
f"{NS}signedPreKeySignature"
)).text))
identity_key = xeddsa.curve25519_pub_to_ed25519_pub(StateImpl.parse_public_key(base64.b64decode(
cast(str, cast(ET.Element, element.find(f"{NS}identityKey")).text)
)), bool((signed_pre_key_signature[63] >> 7) & 1))
signed_pre_key_signature_mut = bytearray(signed_pre_key_signature)
signed_pre_key_signature_mut[63] &= 0x7f
signed_pre_key_signature = bytes(signed_pre_key_signature_mut)
pre_key_ids = {
StateImpl.parse_public_key(base64.b64decode(cast(str, pkp_elt.text))):
int(cast(str, pkp_elt.get("preKeyId")))
for pkp_elt
in pkp_elts
}
return BundleImpl(
bare_jid,
device_id,
x3dh.Bundle(
identity_key,
StateImpl.parse_public_key(base64.b64decode(cast(str, spkp_elt.text))),
signed_pre_key_signature,
frozenset(pre_key_ids.keys())
),
int(cast(str, spkp_elt.get("signedPreKeyId"))),
pre_key_ids
)
def serialize_message(message: Message) -> ET.Element:
"""
Args:
message: The message to serialize.
Returns:
The serialized message as an XML element.
"""
assert isinstance(message.content, ContentImpl)
encrypted_elt = ET.Element(f"{NS}encrypted")
header_elt = ET.SubElement(encrypted_elt, f"{NS}header", attrib={ "sid": str(message.device_id) })
for encrypted_key_material, key_exchange in message.keys:
assert isinstance(encrypted_key_material, EncryptedKeyMaterialImpl)
key_elt = ET.SubElement(
header_elt,
f"{NS}key",
attrib={ "rid": str(encrypted_key_material.device_id) }
)
authenticated_message = encrypted_key_material.serialize()
if key_exchange is None:
key_elt.text = base64.b64encode(authenticated_message).decode("ASCII")
else:
assert isinstance(key_exchange, KeyExchangeImpl)
key_exchange_serialized, _sign_bit_set = key_exchange.serialize(authenticated_message)
key_elt.set("prekey", "true")
key_elt.text = base64.b64encode(key_exchange_serialized).decode("ASCII")
ET.SubElement(
header_elt,
f"{NS}iv"
).text = base64.b64encode(message.content.initialization_vector).decode("ASCII")
if not message.content.empty:
ET.SubElement(
encrypted_elt,
f"{NS}payload"
).text = base64.b64encode(message.content.ciphertext).decode("ASCII")
return encrypted_elt
async def parse_message(
element: ET.Element,
sender_bare_jid: str,
own_bare_jid: str,
session_manager: SessionManager
) -> Message:
"""
Args:
element: The XML element to parse the message from.
sender_bare_jid: The bare JID of the sender.
own_bare_jid: The bare JID of the XMPP account decrypting this message, i.e. us.
session_manager: The session manager instance is required to find one piece of information that the
oldmemo message serialization format lacks with regards to the identity key.
Returns:
The extracted message.
Raises:
ValueError: in case there is malformed data not caught be the XML schema validation.
XMLSchemaValidationError: in case the element does not conform to the XML schema given in the
specification.
SenderNotFound: in case the public information about the sending device could not be found or is
incomplete.
Warning:
This version of the OMEMO specification matches key material to recipient purely by device id. The
library, however, matches by bare JID and device id. Since the XML doesn't include the bare JID, the
structures expected by the library can't be filled correctly. Instead, to match the behaviour of the
specification, the bare JID of all key material included in the message is assigned to
``own_bare_jid``, i.e. our own bare JID, which achieves the desired effect of matching only on
the device id.
"""
MESSAGE_SCHEMA.validate(element)
payload_elt = element.find(f"{NS}payload")
header_elt = cast(ET.Element, element.find(f"{NS}header"))
iv_elt = header_elt.find(f"{NS}iv")
sender_device_id = int(cast(str, header_elt.get("sid")))
# The following code might seem overkill just to find the sign of the identity key. However, to make usage
# of the library as simple as possible, I believe it is worth it. Since most results are cached, this
# shouldn't significantly slow down the overall decryption flow either.
sender_device = next((
device
for device
in await session_manager.get_device_information(sender_bare_jid)
if device.device_id == sender_device_id
), None)
if sender_device is None:
try:
# If the device wasn't found, refresh the device list
await session_manager.refresh_device_list(NAMESPACE, sender_bare_jid)
except DeviceListDownloadFailed as e:
raise SenderNotFound(
"Couldn't find public information about the device which sent this message and an attempt to"
" refresh the sender's device list failed."
) from e
sender_device = next((
device
for device
in await session_manager.get_device_information(sender_bare_jid)
if device.device_id == sender_device_id
), None)
if sender_device is None:
raise SenderNotFound(
"Couldn't find public information about the device which sent this message. I.e. the device"
" either does not appear in the device list of the sending XMPP account, or the bundle of the"
" sending device could not be downloaded."
)
set_sign_bit = bool((sender_device.identity_key[31] >> 7) & 1)
keys: Set[Tuple[EncryptedKeyMaterial, Optional[KeyExchange]]] = set()
for key_elt in header_elt.iter(f"{NS}key"):
recipient_device_id = int(cast(str, key_elt.get("rid")))
content = base64.b64decode(cast(str, key_elt.text))
key_exchange: Optional[KeyExchangeImpl] = None
authenticated_message: bytes
if bool(key_elt.get("prekey", False)):
key_exchange, authenticated_message = KeyExchangeImpl.parse(content, set_sign_bit)
else:
authenticated_message = content
encrypted_key_material = EncryptedKeyMaterialImpl.parse(
authenticated_message,
own_bare_jid,
recipient_device_id
)
keys.add((encrypted_key_material, key_exchange))
return Message(
NAMESPACE,
sender_bare_jid,
sender_device_id,
(
ContentImpl.make_empty()
if payload_elt is None or iv_elt is None
else ContentImpl(
base64.b64decode(cast(str, payload_elt.text)),
base64.b64decode(cast(str, iv_elt.text))
)
),
frozenset(keys)
)
python-oldmemo-1.0.3/oldmemo/migrations.py 0000664 0000000 0000000 00000066032 14332461332 0020660 0 ustar 00root root 0000000 0000000 # This import from future (theoretically) enables sphinx_autodoc_typehints to handle type aliases better
from __future__ import annotations # pylint: disable=unused-variable
from abc import ABC, abstractmethod
import base64
from typing import Awaitable, Callable, Dict, List, Optional, Set
from typing_extensions import TypedDict
import doubleratchet
import omemo
from omemo.session import Initiation
import x3dh
import x3dh.migrations
import x3dh.types
import xeddsa
from .oldmemo import NAMESPACE, BundleImpl, StateImpl
__all__ = [ # pylint: disable=unused-variable
"OwnData",
"Trust",
"Session",
"BoundOTPK",
"StateSuper",
"State",
"LegacyStorage",
"migrate"
]
class OwnData(TypedDict):
# pylint: disable=invalid-name
"""
This TypedDict describes how the own data was expected to be returned by the corresponding legacy storage
method.
"""
own_bare_jid: str
own_device_id: int
class Trust(TypedDict):
# pylint: disable=invalid-name
"""
This TypedDict describes how trust information was expected to be returned by the corresponding legacy
storage method.
"""
key: str
trusted: bool
class Session(TypedDict):
# pylint: disable=invalid-name
"""
This TypedDict describes how session instances (more precisely ``ExtendedDoubleRatchet`` instances) were
serialized in the pre-stable serialization format.
"""
super: doubleratchet.JSONObject
other_ik: str
class BoundOTPK(TypedDict):
# pylint: disable=invalid-name
"""
Used as part of the legacy state format to represent a bound pre key.
"""
otpk: str
id: int
class StateSuper(TypedDict):
# pylint: disable=invalid-name
"""
Used as part of the legacy state format to represent the super class of the ``X3DHDoubleRatchet``.
"""
super: x3dh.JSONObject
spk_id: int
spk_pub: Optional[str]
otpk_id_counter: int
otpk_ids: Dict[str, int]
class State(TypedDict):
# pylint: disable=invalid-name
"""
This TypedDict describes how the state (more precisely ``X3DHDoubleRatchet``) was serialized in the
pre-stable serialization format. Note that the ``pk_messages`` entry has been omitted from this type since
it is not needed for migration. The same applies to the ``version`` field, which apparently never had any
relevance.
"""
super: StateSuper
bound_otpks: Dict[str, Dict[int, BoundOTPK]]
class LegacyStorage(ABC):
# pylint: disable=invalid-name
"""
This is a slightly modified copy of the storage interface used by legacy (i.e. pre-1.0.0) python-omemo.
All methods related to storing values have been removed. A few methods for deleting values have been added
instead. Methods related to efficient bulk loading have been removed as well.
"""
@abstractmethod
async def loadOwnData(self) -> Optional[OwnData]:
"""
Returns:
The own data stored in this instance, if any.
"""
@abstractmethod
async def deleteOwnData(self) -> None:
"""
Delete the own data stored in this instance, if any. Do not raise if there is none.
"""
@abstractmethod
async def loadState(self) -> Optional[State]:
"""
Returns:
The state stored in this instance, if any.
"""
@abstractmethod
async def deleteState(self) -> None:
"""
Delete the state stored in this instance, if any. Do not raise if there is none.
"""
@abstractmethod
async def loadSession(self, bare_jid: str, device_id: int) -> Optional[Session]:
"""
Args:
bare_jid: The bare JID.
device_id: The device id.
Returns:
The session stored in this instance for the given bare JID + device id, if any.
"""
@abstractmethod
async def deleteSession(self, bare_jid: str, device_id: int) -> None:
"""
Delete the session stored in this instance for the given bare JID + device id, if any. Do not raise if
there is none.
Args:
bare_jid: The bare JID.
device_id: The device id.
"""
@abstractmethod
async def loadActiveDevices(self, bare_jid: str) -> Optional[List[int]]:
"""
Args:
bare_jid: The bare JID.
Returns:
The list of active devices stored in this instance for the given bare JID, if any.
Note:
It doesn't matter whether you return ``None`` or an empty list of no list is stored for this bare
JID.
"""
@abstractmethod
async def loadInactiveDevices(self, bare_jid: str) -> Optional[Dict[int, int]]:
"""
Args:
bare_jid: The bare JID.
Returns:
A mapping of inactive devices stored in this instance for the given bare JID, if any. The mapping
maps from device id to the timestamp of last activity (seconds since epoch).
Note:
It doesn't matter whether you return ``None`` or an empty dictionary of no dictionary is stored
for this bare JID.
"""
@abstractmethod
async def deleteActiveDevices(self, bare_jid: str) -> None:
"""
Delete the list of active devices stored in this instance for the given bare JID, if any. Do not raise
if there is none.
Args:
bare_jid: The bare JID.
"""
@abstractmethod
async def deleteInactiveDevices(self, bare_jid: str) -> None:
"""
Delete the dictionary of inactive devices stored in this instance for the given bare JID, if any. Do
not raise if there is none.
Args:
bare_jid: The bare JID.
"""
@abstractmethod
async def loadTrust(self, bare_jid: str, device_id: int) -> Optional[Trust]:
"""
Args:
bare_jid: The bare JID.
device_id: The device id.
Returns:
The trust information stored in this instance for the given bare JID + device id, if any.
"""
@abstractmethod
async def deleteTrust(self, bare_jid: str, device_id: int) -> None:
"""
Delete the trust information stored in this instance for the given bare JID + device id, if any. Do
not raise if there is none.
Args:
bare_jid: The bare JID.
device_id: The device id.
"""
@abstractmethod
async def listJIDs(self) -> Optional[List[str]]:
"""
Returns:
A list of all bare JIDs that have associated device lists stored in the storage, if any. For a
bare JID to be included in the list, it doesn't matter if the associated device lists are empty or
not. Return ``None`` if the list of bare JIDs has been deleted, do not return an empty list in
that case.
"""
@abstractmethod
async def deleteJIDList(self) -> None:
"""
Delete the list of bare JIDs as returned by :meth:`listJIDs`, if it exists. Do not raise if it
doesn't.
"""
async def migrate(
legacy_storage: LegacyStorage,
storage: omemo.Storage,
trusted_trust_level_name: str,
undecided_trust_level_name: str,
untrusted_trust_level_name: str,
download_bundle: Callable[[str, int], Awaitable[BundleImpl]]
) -> None:
"""
Migrate the data from a legacy storage instance to the current storage format. This function is
idempotent, which means that you can run it without checking whether migrations are required or not. If
migrations are not required, the function will do nothing. This function also makes sure that only safely
migrated data is deleted from the legacy storage, such that migration failure at any point leaves both
storages in a consistent state.
Args:
legacy_storage: The legacy storage instance to migrate the data from. This assumes that the storage
was used with legacy (pre-1.0.0) python-omemo in conjunction with python-omemo-backend-signal. If
the storage was used with a backend other than python-omemo-backend-signal, automated migration is
not possible with this function.
storage: The storage implementation to migrate the data to.
trusted_trust_level_name: The legacy storage format stored trust as a boolean, i.e. there were only
trusted or untrusted devices. This is the name of the custom trust level to store when a trusted
device is migrated.
undecided_trust_level_name: The name of the custom trust level to store when a device without any
associated trust information is migrated.
untrusted_trust_level_name: The name of the custom trust level to store when an untrusted device is
migrated.
download_bundle: A function which downloads the bundle of the given bare JID + device id. May raise
:class:`~omemo.session_manager.BundleDownloadFailed` (or a subclass thereof) to indicate failure.
"""
# python-omemo SessionManager
# - f"/devices/{bare_jid}/{device_id}/namespaces" List[str]
# - f"/devices/{bare_jid}/{device_id}/active" Dict[str, bool]
# - f"/devices/{bare_jid}/{device_id}/label" Optional[str]
# - f"/devices/{bare_jid}/{device_id}/identity_key" bytes
# - f"/devices/{bare_jid}/list" List[int]
# - f"/trust/{bare_jid}/{base64.urlsafe_b64encode(identity_key).decode('ASCII')}" str
# - "/own_device_id" int
# python-omemo IdentityKeyPair
# - "/ikp/is_seed" bool
# - "/ikp/key" 32 bytes
# python-oldmemo
# - f"/{self.namespace}/x3dh" x3dh.JSONObject
# - f"/{self.namespace}/{session.bare_jid}/{session.device_id}/initiation" str
# - f"/{self.namespace}/{session.bare_jid}/{session.device_id}/key_exchange/identity_key" bytes
# - f"/{self.namespace}/{session.bare_jid}/{session.device_id}/key_exchange/ephemeral_key" bytes
# - f"/{self.namespace}/{session.bare_jid}/{session.device_id}/key_exchange/signed_pre_key" bytes
# - f"/{self.namespace}/{session.bare_jid}/{session.device_id}/key_exchange/signed_pre_key_id" int
# - f"/{self.namespace}/{session.bare_jid}/{session.device_id}/key_exchange/pre_key" bytes
# - f"/{self.namespace}/{session.bare_jid}/{session.device_id}/key_exchange/pre_key_id" int
# - f"/{self.namespace}/{session.bare_jid}/{session.device_id}/associated_data" bytes
# - f"/{self.namespace}/{session.bare_jid}/{session.device_id}/double_ratchet" doubleratchet.JSONObject
# - f"/{self.namespace}/{session.bare_jid}/{session.device_id}/confirmed" bool
# - f"/{self.namespace}/bare_jids" List[str]
# - f"/{self.namespace}/{session.bare_jid}/device_ids" List[int]
# - f"/{self.namespace}/signed_pre_key_ids" Dict[str, int]
# - f"/{self.namespace}/pre_key_ids" Dict[str, int]
# - f"/{self.namespace}/pre_key_id_counter" int
# The own data is the very first thing to be loaded and the very last thing to be deleted.
own_data = await legacy_storage.loadOwnData()
if own_data is None:
return
# The own bare JID isn't part of the new storage format.
await storage.store("/own_device_id", own_data["own_device_id"])
# The legacy state format contains both the X3DH state and information required for the migration of
# sessions
state = await legacy_storage.loadState()
if state is None:
return
# Migrate the X3DH state
await storage.store(f"/{NAMESPACE}/x3dh", state["super"]["super"])
# Now, load and migrate the own identity key since it's needed for session migrations later on. The own
# identity key is part of the X3DH state. Parse the X3DH state and extract the identity key.
base_state_model, _ = x3dh.migrations.parse_base_state_model(state["super"]["super"])
# The base state model contains the secret and the secret type of the identity key
own_identity_key_secret = base_state_model.identity_key.secret
own_identity_key_secret_type = base_state_model.identity_key.secret_type
# Migrate the secret
await storage.store_bytes("/ikp/key", own_identity_key_secret)
# The type of secret should be x3dh.types.SecretType.PRIV, since that's the only format supported by
# legacy python-omemo + python-omemo-backend-signal, but it doesn't hurt to check the type anyway.
await storage.store("/ikp/is_seed", own_identity_key_secret_type is x3dh.types.SecretType.SEED)
# From the secret, we can calculate the Ed25519 public key
own_identity_key = (
xeddsa.seed_to_ed25519_pub(own_identity_key_secret)
if own_identity_key_secret_type is x3dh.types.SecretType.SEED
else xeddsa.priv_to_ed25519_pub(own_identity_key_secret)
)
# Prepare the identity key in the serialized format required to build the associated data later as part of
# session migration
own_identity_key_serialized = StateImpl.serialize_public_key(xeddsa.ed25519_pub_to_curve25519_pub(
own_identity_key
))
# With the state loaded and identity key prepared, begin with the migration of information related to
# other devices, i.e. device information including trust and sessions.
bare_jids = await legacy_storage.listJIDs()
if bare_jids is not None:
for bare_jid in bare_jids:
# Load active and inactive devices of this bare JID
active_devices = await legacy_storage.loadActiveDevices(bare_jid)
inactive_devices = await legacy_storage.loadInactiveDevices(bare_jid)
# The timestamp on the inactive devices is not needed
active_device_ids = set() if active_devices is None else set(active_devices)
inactive_device_ids = set() if inactive_devices is None else set(inactive_devices.keys())
# Migrate general device information
migrated_devices: Set[int] = set()
for device_id in active_device_ids | inactive_device_ids:
active = device_id in active_device_ids
# At this point, there are two options: either, trust information for the device exists,
# including the identity key of the device in Curve25519 format, or it doesn't. Either way,
# there is a problem. The new format expects the identity key in Ed25519 format. To convert a
# Curve25519 key to Ed25519, the missing sign bit has to be provided. The sign bit can be
# fetched from the signed pre key signature included in the public bundle of the device. That
# means, if the trust information including the Curve25519 identity is present, the bundle has
# to be fetched for the sign bit. If the trust information is not present, the bundle has to
# be fetched for the whole identity key. That means, either way, we need the bundle here.
# The single exception is the own device, since out own identity key is available.
identity_key: bytes
if bare_jid == own_data["own_bare_jid"] and device_id == own_data["own_device_id"]:
identity_key = own_identity_key
else:
try:
bundle = await download_bundle(bare_jid, device_id)
except omemo.BundleDownloadFailed:
# A device whose bundle can't be fetched cannot be migrated. Delete all data related
# to the device and skip to the next one.
await legacy_storage.deleteSession(bare_jid, device_id)
await legacy_storage.deleteTrust(bare_jid, device_id)
continue
# The BundleImpl structure contains the identity in Ed25519 form, thus no further
# conversion is required. The bundle parsing code has already taken care of extracting the
# sign bit from the signed pre key signature and converting the key from Curve25519
# format.
identity_key = bundle.identity_key
# Load the trust information stored for this device.
trust = await legacy_storage.loadTrust(bare_jid, device_id)
# If trust information is available, verify that the identity key stored for the device
# matches the one just fetched from the bundle
if trust is not None:
legacy_storage_identity_key = base64.b64decode(trust["key"])
if xeddsa.ed25519_pub_to_curve25519_pub(identity_key) != legacy_storage_identity_key:
# If the stored identity key doesn't match the bundle identity key, the device is not
# migrated. Delete all data related to the device and skip to the next one.
await legacy_storage.deleteSession(bare_jid, device_id)
await legacy_storage.deleteTrust(bare_jid, device_id)
continue
# Select the custom trust level name to assign to the device upon migration
trust_level_name = undecided_trust_level_name
if trust is not None:
if trust["trusted"]:
trust_level_name = trusted_trust_level_name
else:
trust_level_name = untrusted_trust_level_name
# All available data about this device has been gathered, migrate it
await storage.store(f"/devices/{bare_jid}/{device_id}/namespaces", [ NAMESPACE ])
await storage.store(f"/devices/{bare_jid}/{device_id}/active", { NAMESPACE: active })
await storage.store(f"/devices/{bare_jid}/{device_id}/label", None)
await storage.store_bytes(f"/devices/{bare_jid}/{device_id}/identity_key", identity_key)
await storage.store(
f"/trust/{bare_jid}/{base64.urlsafe_b64encode(identity_key).decode('ASCII')}",
trust_level_name
)
# The device has been migrated successfully, add it to the corresponding set but don't delete
# its data yet.
migrated_devices.add(device_id)
# Write the list of successfully migrated devices
await storage.store(f"/devices/{bare_jid}/list", list(migrated_devices))
# Look for sessions with the successfully migrated devices and migrate those too
migrated_sessions: Set[int] = set()
for device_id in migrated_devices:
session = await legacy_storage.loadSession(bare_jid, device_id)
if session is None:
continue
legacy_storage_identity_key = base64.b64decode(session["other_ik"])
double_ratchet = session["super"]
# Same situation as above: the identity key is only included in Curve25519 format, but is
# needed in Ed25519 format to become part of the key exchange information. Luckily, we have
# already requested the bundle above and stored the identity key in Ed25519 format. Load it
# here (from cache) and make sure it matches the identity key stored in legacy storage.
identity_key = (await storage.load_bytes(
f"/devices/{bare_jid}/{device_id}/identity_key"
)).from_just()
if xeddsa.ed25519_pub_to_curve25519_pub(identity_key) != legacy_storage_identity_key:
# If the stored identity key doesn't match the bundle identity key, the session is not
# migrated. Delete all data related to it and skip to the next one.
await legacy_storage.deleteSession(bare_jid, device_id)
continue
# If the identity keys match, store the Ed25519 one.
await storage.store_bytes(
f"/{NAMESPACE}/{bare_jid}/{device_id}/key_exchange/identity_key",
identity_key
)
# Prepare the serialized identity as needed to build the associated data byte string
identity_key_serialized = StateImpl.serialize_public_key(
xeddsa.ed25519_pub_to_curve25519_pub(identity_key)
)
# A bunch of information stored in the new storage format wasn't directly available in the
# legacy format, namely which party initiated the session, whether the session was confirmed
# via the passive party responding, and the key exchange information that was used to build
# the session. All of this information is used for protocol stability reasons regarding the
# initial X3DH key agreement. Some of this information can be extracted/guessed from
# "bound pre keys" that were used by the legacy format and stored with the state. Bound pre
# keys are pre keys associated with passively initiated sessions upon creation, and deleted at
# a user-defined point in the future, hopefully as soon as the session has been fully
# confirmed and no more key exchange messages are to be expected. All implementations I know
# of didn't delete bound pre keys at all, which is good for the migration. The presence of a
# bound pre key indicates that a session was passively initiated and allows us to restore at
# least part of the key exchange information. Sessions without an associated bound pre key are
# migrated as actively initiated and confirmed, which is the constellation of parameters that
# makes sure the missing key exchange information is never accessed.
bound_otpk = state["bound_otpks"].get(bare_jid, {}).get(device_id)
if bound_otpk is None:
# Set the initiation to active as explained above.
await storage.store(
f"/{NAMESPACE}/{bare_jid}/{device_id}/initiation",
Initiation.ACTIVE.name
)
# Store the associated data in the format expected for actively initated sessions.
await storage.store_bytes(
f"/{NAMESPACE}/{bare_jid}/{device_id}/associated_data",
own_identity_key_serialized + identity_key_serialized
)
# Without a bound pre key, the pre key and pre key id fields of the key exchange can't be
# set correctly. The KeyExchangeImpl class detects and handles the following filler values
# though:
await storage.store(f"/{NAMESPACE}/{bare_jid}/{device_id}/key_exchange/pre_key_id", -1)
await storage.store_bytes(
f"/{NAMESPACE}/{bare_jid}/{device_id}/key_exchange/pre_key",
b"\x00" * 32
)
else:
# Set the initiation to passive as explained above.
await storage.store(
f"/{NAMESPACE}/{bare_jid}/{device_id}/initiation",
Initiation.PASSIVE.name
)
# Store the associated data in the format expected for passively initated sessions.
await storage.store_bytes(
f"/{NAMESPACE}/{bare_jid}/{device_id}/associated_data",
identity_key_serialized + own_identity_key_serialized
)
# With a bound pre key, we can at least fill the pre key and pre key id fields of the key
# exchange information correctly.
await storage.store(
f"/{NAMESPACE}/{bare_jid}/{device_id}/key_exchange/pre_key_id",
bound_otpk["id"]
)
await storage.store_bytes(
f"/{NAMESPACE}/{bare_jid}/{device_id}/key_exchange/pre_key",
base64.b64decode(bound_otpk["otpk"])
)
# All sessions are marked as confirmed. This makes sure that the code never attempts to send
# the incomplete key exchange information.
await storage.store(f"/{NAMESPACE}/{bare_jid}/{device_id}/confirmed", True)
# The serialized double ratchet just has to be assigned to the correct key.
await storage.store(f"/{NAMESPACE}/{bare_jid}/{device_id}/double_ratchet", double_ratchet)
# The ephemeral key, signed pre key and signed pre key ids used during key exchange are
# unknown. The values in storage still have to be filled though. The KeyExchangeImpl class
# detects and handles the following filler values correctly:
await storage.store_bytes(
f"/{NAMESPACE}/{bare_jid}/{device_id}/key_exchange/ephemeral_key",
b"\x00" * 32
)
await storage.store(f"/{NAMESPACE}/{bare_jid}/{device_id}/key_exchange/signed_pre_key_id", -1)
await storage.store_bytes(
f"/{NAMESPACE}/{bare_jid}/{device_id}/key_exchange/signed_pre_key",
b"\x00" * 32
)
# The session was migrated successfully, add it to the corresponding set but don't delete its
# data yet.
migrated_sessions.add(device_id)
# Write the list of successfully migrated sessions
await storage.store(f"/{NAMESPACE}/{bare_jid}/device_ids", list(migrated_sessions))
# Migration completed for this bare JID, delete all legacy data to avoid double migration
await legacy_storage.deleteActiveDevices(bare_jid)
await legacy_storage.deleteInactiveDevices(bare_jid)
for device_id in migrated_devices:
await legacy_storage.deleteTrust(bare_jid, device_id)
for device_id in migrated_sessions:
await legacy_storage.deleteSession(bare_jid, device_id)
# All bare JIDs have been migrated, write the list of bare JIDs...
await storage.store(f"/{NAMESPACE}/bare_jids", list(bare_jids))
# ...and delete the list of JIDs
await legacy_storage.deleteJIDList()
# What remains to be migrated are (signed) pre key id mappings and counters.
# The legacy format didn't keep the old signed pre key around, so there is at most the current signed pre
# key id to migrate, which is optional in the legacy storage format too.
if state["super"]["spk_pub"] is not None:
await storage.store(
f"/{NAMESPACE}/signed_pre_key_ids",
{ state["super"]["spk_pub"]: state["super"]["spk_id"] }
)
# The pre key id mapping and id counter are already in the format/types required by the new storage
# format, thus it's simply a matter of migrating them to their new keys.
await storage.store(f"/{NAMESPACE}/pre_key_ids", state["super"]["otpk_ids"])
await storage.store(f"/{NAMESPACE}/pre_key_id_counter", state["super"]["otpk_id_counter"])
# Finally delete the legacy state and the own data.
await legacy_storage.deleteState()
await legacy_storage.deleteOwnData()
python-oldmemo-1.0.3/oldmemo/oldmemo.py 0000664 0000000 0000000 00000165201 14332461332 0020136 0 ustar 00root root 0000000 0000000 # This import from future (theoretically) enables sphinx_autodoc_typehints to handle type aliases better
from __future__ import annotations # pylint: disable=unused-variable
import base64
import secrets
from typing import Dict, NamedTuple, Optional, Tuple, cast
from typing_extensions import Final
from cryptography.exceptions import InvalidTag
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
import doubleratchet
from doubleratchet.recommended import (
aead_aes_hmac,
diffie_hellman_ratchet_curve25519,
HashFunction,
kdf_hkdf,
kdf_separate_hmacs
)
from doubleratchet.recommended.crypto_provider_impl import CryptoProviderImpl
import google.protobuf.message
import xeddsa
import x3dh
import x3dh.identity_key_pair
from omemo.backend import Backend, DecryptionFailed, KeyExchangeFailed
from omemo.bundle import Bundle
from omemo.identity_key_pair import IdentityKeyPair, IdentityKeyPairSeed
from omemo.message import Content, EncryptedKeyMaterial, PlainKeyMaterial, KeyExchange
from omemo.session import Initiation, Session
from omemo.storage import Storage
from omemo.types import JSONType
# https://github.com/PyCQA/pylint/issues/4987
from .oldmemo_pb2 import ( # pylint: disable=no-name-in-module
OMEMOKeyExchange,
OMEMOMessage
)
__all__ = [ # pylint: disable=unused-variable
"Oldmemo",
"NAMESPACE",
"AEADImpl",
"BundleImpl",
"ContentImpl",
"DoubleRatchetImpl",
"EncryptedKeyMaterialImpl",
"KeyExchangeImpl",
"MessageChainKDFImpl",
"PlainKeyMaterialImpl",
"RootChainKDFImpl",
"SessionImpl",
"StateImpl"
]
NAMESPACE: Final = "eu.siacs.conversations.axolotl"
class RootChainKDFImpl(kdf_hkdf.KDF):
"""
The root chain KDF implementation used by this version of the specification.
"""
@staticmethod
def _get_hash_function() -> HashFunction:
return HashFunction.SHA_256
@staticmethod
def _get_info() -> bytes:
# https://github.com/signalapp/libsignal-protocol-java/blob/fde96d22004f32a391554e4991e4e1f0a14c2d50/
# java/src/main/java/org/whispersystems/libsignal/ratchet/RootKey.java#L35
return "WhisperRatchet".encode("ASCII")
class MessageChainKDFImpl(kdf_separate_hmacs.KDF):
"""
The message chain KDF implementation used by this version of the specification.
"""
@staticmethod
def _get_hash_function() -> HashFunction:
return HashFunction.SHA_256
class OMEMOAuthenticatedMessage(NamedTuple):
# pylint: disable=invalid-name
"""
The `urn:xmpp:omemo:2` version of the specification uses a protobuf structure called
``OMEMOAuthenticatedMessage`` to hold and transfer a serialized :class:`OMEMOMessage` in network format
and an authentication tag. This version of the specification instead uses simple concatenation of the two
byte strings. This class mocks the protobuf structure API to make maintaining this backend as a fork of
python-twomemo easier and to make the code cleaner.
"""
mac: bytes
message: bytes
def SerializeToString(self, deterministic: bool = True) -> bytes:
"""
Args:
deterministic: This parameter only exists to mimic protobuf's structure API and must be ``True``.
Returns:
The contents of this instance serialized as a byte string.
"""
assert deterministic
return self.message + self.mac
@staticmethod
def FromString(serialized: bytes) -> OMEMOAuthenticatedMessage:
"""
Args:
serialized: A serialized instance as returned by :meth:`SerializeToString`.
Returns:
An instance with the data restored from the serialized input.
"""
return OMEMOAuthenticatedMessage(
mac=serialized[-AEADImpl.AUTHENTICATION_TAG_TRUNCATED_LENGTH:],
message=serialized[:-AEADImpl.AUTHENTICATION_TAG_TRUNCATED_LENGTH]
)
class AEADImpl(aead_aes_hmac.AEAD):
"""
The AEAD used by this backend as part of the Double Ratchet. While this implementation derives from
:class:`doubleratchet.recommended.aead_aes_hmac.AEAD`, it actually doesn't use any of its code. This is
due to a minor difference in the way the associated data is built. The derivation only has symbolic value.
Can only be used with :class:`DoubleRatchetImpl`, due to the reliance on a certain structure of the
associated data.
"""
# https://github.com/signalapp/libsignal-protocol-java/blob/fde96d22004f32a391554e4991e4e1f0a14c2d50/java/
# src/main/java/org/whispersystems/libsignal/protocol/SignalMessage.java#L28
AUTHENTICATION_TAG_TRUNCATED_LENGTH: Final = 8
@staticmethod
def _get_hash_function() -> HashFunction:
return HashFunction.SHA_256
@staticmethod
def _get_info() -> bytes:
# https://github.com/signalapp/libsignal-protocol-java/blob/fde96d22004f32a391554e4991e4e1f0a14c2d50/
# java/src/main/java/org/whispersystems/libsignal/ratchet/ChainKey.java#L48
return "WhisperMessageKeys".encode("ASCII")
@classmethod
async def encrypt(cls, plaintext: bytes, key: bytes, associated_data: bytes) -> bytes:
hash_function = cls._get_hash_function()
encryption_key, authentication_key, iv = await cls.__derive(key, hash_function, cls._get_info())
# Encrypt the plaintext using AES-256 (the 256 bit are implied by the key size) in CBC mode and the
# previously created key and IV, after padding it with PKCS#7
ciphertext = await CryptoProviderImpl.aes_cbc_encrypt(encryption_key, iv, plaintext)
# Parse the associated data
associated_data, header = cls.__parse_associated_data(associated_data)
# Build an OMEMOMessage including the header and the ciphertext
omemo_message = b"\x33" + OMEMOMessage(
n=header.sending_chain_length,
pn=header.previous_sending_chain_length,
dh_pub=StateImpl.serialize_public_key(header.ratchet_pub),
ciphertext=ciphertext
).SerializeToString()
# Calculate the authentication tag over the associated data and the OMEMOMessage, truncate the
# authentication tag to AUTHENTICATION_TAG_TRUNCATED_LENGTH bytes
auth = (await CryptoProviderImpl.hmac_calculate(
authentication_key,
hash_function,
associated_data + omemo_message
))[:AEADImpl.AUTHENTICATION_TAG_TRUNCATED_LENGTH]
# Serialize the authentication tag with the OMEMOMessage in an OMEMOAuthenticatedMessage.
return OMEMOAuthenticatedMessage(mac=auth, message=omemo_message).SerializeToString()
@classmethod
async def decrypt(cls, ciphertext: bytes, key: bytes, associated_data: bytes) -> bytes:
hash_function = cls._get_hash_function()
decryption_key, authentication_key, iv = await cls.__derive(key, hash_function, cls._get_info())
# Parse the associated data
associated_data, header = cls.__parse_associated_data(associated_data)
# Parse the ciphertext as an OMEMOAuthenticatedMessage
try:
omemo_authenticated_message = OMEMOAuthenticatedMessage.FromString(ciphertext)
except google.protobuf.message.DecodeError as e:
raise doubleratchet.DecryptionFailedException() from e
# Calculate and verify the authentication tag
new_auth = (await CryptoProviderImpl.hmac_calculate(
authentication_key,
hash_function,
associated_data + omemo_authenticated_message.message
))[:AEADImpl.AUTHENTICATION_TAG_TRUNCATED_LENGTH]
if new_auth != omemo_authenticated_message.mac:
raise doubleratchet.aead.AuthenticationFailedException("Authentication tags do not match.")
# Parse the OMEMOMessage contained in the OMEMOAuthenticatedMessage
if len(omemo_authenticated_message.message) < 1 or omemo_authenticated_message.message[0] != 0x33:
raise doubleratchet.DecryptionFailedException("Version byte missing.")
try:
omemo_message = OMEMOMessage.FromString(omemo_authenticated_message.message[1:])
except google.protobuf.message.DecodeError as e:
raise doubleratchet.DecryptionFailedException() from e
# Make sure that the headers match as a little additional consistency check
if header != doubleratchet.Header(
StateImpl.parse_public_key(omemo_message.dh_pub),
omemo_message.pn,
omemo_message.n
):
raise doubleratchet.aead.AuthenticationFailedException("Header mismatch.")
# Decrypt the plaintext using AES-256 (the 256 bit are implied by the key size) in CBC mode and the
# previously created key and IV, and unpad the resulting plaintext with PKCS#7
return await CryptoProviderImpl.aes_cbc_decrypt(decryption_key, iv, omemo_message.ciphertext)
@staticmethod
async def __derive(key: bytes, hash_function: HashFunction, info: bytes) -> Tuple[bytes, bytes, bytes]:
# Prepare the salt, a zero-filled byte sequence with the size of the hash digest
salt = b"\x00" * hash_function.hash_size
# Derive 80 bytes
hkdf_out = await CryptoProviderImpl.hkdf_derive(
hash_function=hash_function,
length=80,
salt=salt,
info=info,
key_material=key
)
# Split these 80 bytes into three parts
return hkdf_out[:32], hkdf_out[32:64], hkdf_out[64:]
@staticmethod
def __parse_associated_data(associated_data: bytes) -> Tuple[bytes, doubleratchet.Header]:
"""
Parse the associated data as built by :meth:`DoubleRatchetImpl._build_associated_data`.
Args:
associated_data: The associated data.
Returns:
The original associated data and the header used to build it.
Raises:
DecryptionFailedException: if the data is malformed.
"""
associated_data_length = StateImpl.IDENTITY_KEY_ENCODING_LENGTH * 2
try:
omemo_message = OMEMOMessage.FromString(associated_data[associated_data_length:])
except google.protobuf.message.DecodeError as e:
raise doubleratchet.DecryptionFailedException() from e
associated_data = associated_data[:associated_data_length]
return associated_data, doubleratchet.Header(
StateImpl.parse_public_key(omemo_message.dh_pub),
omemo_message.pn,
omemo_message.n
)
class DoubleRatchetImpl(doubleratchet.DoubleRatchet):
"""
The Double Ratchet implementation used by this version of the specification.
"""
# https://github.com/signalapp/libsignal-protocol-java/blob/fde96d22004f32a391554e4991e4e1f0a14c2d50/java/
# src/main/java/org/whispersystems/libsignal/ratchet/ChainKey.java#L20
MESSAGE_CHAIN_CONSTANT: Final = b"\x02\x01"
@staticmethod
def _build_associated_data(associated_data: bytes, header: doubleratchet.Header) -> bytes:
return associated_data + OMEMOMessage(
n=header.sending_chain_length,
pn=header.previous_sending_chain_length,
dh_pub=StateImpl.serialize_public_key(header.ratchet_pub)
).SerializeToString()
class StateImpl(x3dh.BaseState):
"""
The X3DH state implementation used by this version of the specification.
"""
# https://github.com/signalapp/libsignal-protocol-java/blob/fde96d22004f32a391554e4991e4e1f0a14c2d50/java/
# src/main/java/org/whispersystems/libsignal/ratchet/RatchetingSession.java#L132
INFO: Final = "WhisperText".encode("ASCII")
IDENTITY_KEY_ENCODING_LENGTH: Final = 33 # One byte constant + 32 bytes key
@staticmethod
def _encode_public_key(key_format: x3dh.IdentityKeyFormat, pub: bytes) -> bytes:
# python-omemo uses Ed25519 for the identity key format, while the 0.3.0 specification uses Curve25519
# encoding. This is one of the places where this difference matters, since the identity key will be
# passed in Ed25519 format, but required to be encoded as Curve25519. All keys but the identity key
# are fixed to Curve25519 format anyway, thus the following check should handle the compatibility:
if key_format is x3dh.IdentityKeyFormat.ED_25519:
pub = xeddsa.ed25519_pub_to_curve25519_pub(pub)
# https://github.com/signalapp/libsignal-protocol-java/blob/fde96d22004f32a391554e4991e4e1f0a14c2d50/
# java/src/main/java/org/whispersystems/libsignal/ecc/Curve.java#L17
return b"\x05" + pub
@staticmethod
def serialize_public_key(pub: bytes) -> bytes:
"""
Args:
pub: A public key in Curve25519 format.
Returns:
The public key serialized in the network format.
Note:
This is a reexport of :meth:`_encode_public_key` with the format fixed to Curve25519.
"""
return StateImpl._encode_public_key(x3dh.IdentityKeyFormat.CURVE_25519, pub)
@staticmethod
def parse_public_key(serialized: bytes) -> bytes:
"""
Args:
serialized: A Curve25519 public key serialized in the network format, as returned by e.g.
:meth:`serialize_public_key`.
Returns:
The parsed public key in Curve25519 format.
Raises:
ValueError: if the input format does not comply to the expected network format.
"""
if len(serialized) == StateImpl.IDENTITY_KEY_ENCODING_LENGTH and serialized[0] == 0x05:
return serialized[1:]
raise ValueError("Public key not serialized in network format.")
class BundleImpl(Bundle):
"""
:class:`~omemo.bundle.Bundle` implementation as a simple storage type.
"""
def __init__(
self,
bare_jid: str,
device_id: int,
bundle: x3dh.Bundle,
signed_pre_key_id: int,
pre_key_ids: Dict[bytes, int]
) -> None:
"""
Args:
bare_jid: The bare JID this bundle belongs to.
device_id: The device id of the specific device this bundle belongs to.
bundle: The bundle to store in this instance.
signed_pre_key_id: The id of the signed pre key referenced in the bundle.
pre_key_ids: A dictionary that maps each pre key referenced in the bundle to its id.
"""
self.__bare_jid = bare_jid
self.__device_id = device_id
self.__bundle = bundle
self.__signed_pre_key_id = signed_pre_key_id
self.__pre_key_ids = dict(pre_key_ids)
@property
def namespace(self) -> str:
return NAMESPACE
@property
def bare_jid(self) -> str:
return self.__bare_jid
@property
def device_id(self) -> int:
return self.__device_id
@property
def identity_key(self) -> bytes:
return self.__bundle.identity_key
def __eq__(self, other: object) -> bool:
if isinstance(other, BundleImpl):
return (
other.bare_jid == self.bare_jid
and other.device_id == self.device_id
and other.bundle == self.bundle
and other.signed_pre_key_id == self.signed_pre_key_id
and other.pre_key_ids == self.pre_key_ids
)
return False
def __hash__(self) -> int:
return hash((
self.bare_jid,
self.device_id,
self.bundle,
self.signed_pre_key_id,
frozenset(self.pre_key_ids.items())
))
@property
def bundle(self) -> x3dh.Bundle:
"""
Returns:
The bundle held by this instance.
"""
return self.__bundle
@property
def signed_pre_key_id(self) -> int:
"""
Returns:
The id of the signed pre key referenced in the bundle.
"""
return self.__signed_pre_key_id
@property
def pre_key_ids(self) -> Dict[bytes, int]:
"""
Returns:
A dictionary that maps each pre key referenced in the bundle to its id.
"""
return dict(self.__pre_key_ids)
class ContentImpl(Content):
"""
:class:`~omemo.message.Content` implementation as a simple storage type.
"""
def __init__(self, ciphertext: bytes, initialization_vector: bytes) -> None:
"""
Args:
ciphertext: The ciphertext to store in this instance.
initialization_vector: The initialization vector to store in this instance.
Note:
For empty OMEMO messages as per the specification, the ciphertext is set to an empty byte string
and the initialization vector is initialized with a valid initialization vector for further use by
external protocols (aka ``KeyTransportMessage``).
"""
self.__ciphertext = ciphertext
self.__initialization_vector = initialization_vector
@property
def empty(self) -> bool:
return self.__ciphertext == b""
@staticmethod
def make_empty() -> ContentImpl:
"""
Returns:
An "empty" instance, i.e. one that corresponds to an empty OMEMO message as per the specification.
The ciphertext is set to an empty byte string and the initialization vector is initialized with a
valid initialization vector for further use by external protocols (aka ``KeyTransportMessage``).
"""
return ContentImpl(b"", secrets.token_bytes(12))
@property
def ciphertext(self) -> bytes:
"""
Returns:
The ciphertext held by this instance.
"""
return self.__ciphertext
@property
def initialization_vector(self) -> bytes:
"""
Returns:
The initialization vector held by this instance.
"""
return self.__initialization_vector
class EncryptedKeyMaterialImpl(EncryptedKeyMaterial):
"""
:class:`~omemo.message.EncryptedKeyMaterial` implementation as a simple storage type.
"""
def __init__(
self,
bare_jid: str,
device_id: int,
encrypted_message: doubleratchet.EncryptedMessage
) -> None:
"""
Args:
bare_jid: The bare JID of the other party.
device_id: The device id of the specific device of the other party.
encrypted_message: The encrypted Double Ratchet message to store in this instance.
"""
self.__bare_jid = bare_jid
self.__device_id = device_id
self.__encrypted_message = encrypted_message
@property
def bare_jid(self) -> str:
return self.__bare_jid
@property
def device_id(self) -> int:
return self.__device_id
@property
def encrypted_message(self) -> doubleratchet.EncryptedMessage:
"""
Returns:
The encrypted Double Ratchet message held by this instance.
"""
return self.__encrypted_message
def serialize(self) -> bytes:
"""
Returns:
A serialized OMEMOAuthenticatedMessage message structure representing the content of this
instance.
"""
# The ciphertext field contains the result of :meth:`AEADImpl.encrypt`, which is a serialized
# OMEMOAuthenticatedMessage with all fields already correctly set, thus it can be used here as is.
return self.__encrypted_message.ciphertext
@staticmethod
def parse(authenticated_message: bytes, bare_jid: str, device_id: int) -> EncryptedKeyMaterialImpl:
"""
Args:
authenticated_message: A serialized OMEMOAuthenticatedMessage message structure.
bare_jid: The bare JID of the other party.
device_id: The device id of the specific device of the other party.
Returns:
An instance of this class, parsed from the OMEMOAuthenticatedMessage.
Raises:
ValueError: if the data is malformed.
"""
message_serialized = OMEMOAuthenticatedMessage.FromString(authenticated_message).message
if len(message_serialized) < 1 or message_serialized[0] != 0x33:
raise ValueError("Version byte missing.")
# Parse the OMEMOAuthenticatedMessage and OMEMOMessage structures to extract the header.
try:
message = OMEMOMessage.FromString(message_serialized[1:])
except google.protobuf.message.DecodeError as e:
raise ValueError() from e
return EncryptedKeyMaterialImpl(
bare_jid,
device_id,
doubleratchet.EncryptedMessage(
doubleratchet.Header(
StateImpl.parse_public_key(message.dh_pub),
message.pn,
message.n
),
authenticated_message
)
)
class PlainKeyMaterialImpl(PlainKeyMaterial):
"""
:class:`~omemo.message.PlainKeyMaterial` implementation as a simple storage type.
"""
KEY_LENGTH: Final = 16
def __init__(self, key: bytes, auth_tag: bytes) -> None:
"""
Args:
key: The key to store in this instance.
auth_tag: The authentication tag to store in this instance.
Note:
For empty OMEMO messages as per the specification, the key is set to a freshly generated key for
further use by external protocols (aka ``KeyTransportMessage``), while the auth tag is set to an
empty byte string.
"""
self.__key = key
self.__auth_tag = auth_tag
@property
def key(self) -> bytes:
"""
Returns:
The key held by this instance.
"""
return self.__key
@property
def auth_tag(self) -> bytes:
"""
Returns:
The authentication tag held by this instance.
"""
return self.__auth_tag
@staticmethod
def make_empty() -> PlainKeyMaterialImpl:
"""
Returns:
An "empty" instance, i.e. one that corresponds to an empty OMEMO message as per the specification.
The key stored in empty instances is a freshly generated key for further use by external protocols
(aka ``KeyTransportMessage``), while the auth tag is set to an empty byte string.
"""
return PlainKeyMaterialImpl(secrets.token_bytes(PlainKeyMaterialImpl.KEY_LENGTH), b"")
class KeyExchangeImpl(KeyExchange):
"""
:class:`~omemo.message.KeyExchange` implementation as a simple storage type.
There are four kinds of instances:
- Completely filled instances
- Partially filled instances received via network
- Very sparsely filled instances migrated from the legacy storage format
- Almost completely empty instances migrated from the legacy storage format
Empty fields are filled with filler values such that the data types and lengths still match expectations.
The fourth kind, almost completely empty instances, will never have any of their methods called except for
getters.
"""
def __init__(self, header: x3dh.Header, signed_pre_key_id: int, pre_key_id: int) -> None:
"""
Args:
header: The header to store in this instance.
signed_pre_key_id: The id of the signed pre key referenced in the header.
pre_key_id: The id of the pre key referenced in the header.
"""
self.__header = header
self.__signed_pre_key_id = signed_pre_key_id
self.__pre_key_id = pre_key_id
@property
def identity_key(self) -> bytes:
return self.__header.identity_key
def builds_same_session(self, other: KeyExchange) -> bool:
if isinstance(other, KeyExchangeImpl):
if self.is_migrated_instance() or other.is_migrated_instance():
# If any of the instances is a migrated instance, we can only compare the identity key and the
# pre key id. Sadly that's the only data included in the legacy storage format, next to the
# pre key byte data, which doesn't add any value.
return (
other.header.identity_key == self.header.identity_key
and other.pre_key_id == self.pre_key_id
)
# Otherwise, we are dealing with completely filled instances or network instances. The signed pre
# key id and pre key id are enough for uniqueness; ignoring the actual signed pre key and pre key
# bytes here makes it possible to compare network instances with completely filled instances.
return (
other.header.identity_key == self.header.identity_key
and other.header.ephemeral_key == self.header.ephemeral_key
and other.signed_pre_key_id == self.signed_pre_key_id
and other.pre_key_id == self.pre_key_id
)
return False
@property
def header(self) -> x3dh.Header:
"""
Returns:
The header held by this instance.
"""
return self.__header
@property
def signed_pre_key_id(self) -> int:
"""
Returns:
The id of the signed pre key referenced in the header.
"""
return self.__signed_pre_key_id
@property
def pre_key_id(self) -> int:
"""
Returns:
The id of the pre key referenced in the header.
"""
return self.__pre_key_id
def is_network_instance(self) -> bool:
"""
Returns:
Returns whether this is a network instance. A network instance has all fields filled except for
the signed pre key and pre key byte data. The missing byte data can be restored by looking it up
from storage using the respective ids.
"""
return self.__header.signed_pre_key == b"" and self.__header.pre_key == b""
def is_migrated_instance(self) -> bool:
"""
Returns:
Whether this is a migrated instance, according to the third kind as described in the class
docstring. A migrated instance of that kind only sets the identity key, the pre key id and the pre
key byte data. Other values are fillers.
"""
# Could confirm the other values here too, but why the trouble.
return self.__signed_pre_key_id == -1 and self.__pre_key_id != -1
def serialize(self, authenticated_message: bytes) -> Tuple[bytes, bool]:
"""
Args:
authenticated_message: The serialized OMEMOAuthenticatedMessage message structure to include with
the key exchange information.
Returns:
A serialized OMEMOKeyExchange message structure in network format representing the content of this
instance, and a flag indicating whether the sign bit was is on the identity key in its Ed25519
form.
"""
return b"\x33" + OMEMOKeyExchange(
pk_id=self.__pre_key_id,
spk_id=self.__signed_pre_key_id,
ik=StateImpl.serialize_public_key(xeddsa.ed25519_pub_to_curve25519_pub(
self.__header.identity_key
)),
ek=StateImpl.serialize_public_key(self.__header.ephemeral_key),
message=authenticated_message
).SerializeToString(), bool((self.__header.identity_key[31] >> 7) & 1)
@staticmethod
def parse(key_exchange: bytes, set_sign_bit: bool) -> Tuple[KeyExchangeImpl, bytes]:
"""
Args:
key_exchange: A serialized OMEMOKeyExchange message structure in network format.
set_sign_bit: Whether to set the sign bit on the identity key when converting it to its Ed25519
form.
Returns:
An instance of this class, parsed from the OMEMOKeyExchange, and the serialized
OMEMOAuthenticatedMessage extracted from the OMEMOKeyExchange.
Raises:
ValueError: if the data is malformed.
Warning:
The OMEMOKeyExchange message structure only contains the ids of the signed pre key and the pre key
used for the key exchange, not the full public keys. Since the job of this method is just parsing,
the X3DH header is initialized without the public keys here, and the code using instances of this
class has to handle the public key lookup from the ids. Use :attr:`header_filled` to check whether
the header is filled with the public keys.
"""
if len(key_exchange) < 1 or key_exchange[0] != 0x33:
raise ValueError("Version byte missing.")
key_exchange = key_exchange[1:]
try:
parsed = OMEMOKeyExchange.FromString(key_exchange)
except google.protobuf.message.DecodeError as e:
raise ValueError() from e
return KeyExchangeImpl(
x3dh.Header(
xeddsa.curve25519_pub_to_ed25519_pub(StateImpl.parse_public_key(parsed.ik), set_sign_bit),
StateImpl.parse_public_key(parsed.ek),
b"",
b""
),
parsed.spk_id,
parsed.pk_id
), parsed.message
class SessionImpl(Session):
"""
:class:`~omemo.session.Session` implementation as a simple storage type.
"""
def __init__(
self,
bare_jid: str,
device_id: int,
initiation: Initiation,
key_exchange: KeyExchangeImpl,
associated_data: bytes,
double_ratchet: DoubleRatchetImpl,
confirmed: bool = False
):
"""
Args:
bare_jid: The bare JID of the other party.
device_id: The device id of the specific device of the other party.
initiation: Whether this session was built through active or passive session initiation.
key_exchange: The key exchange information to store in this instance.
associated_data: The associated data to store in this instance.
double_ratchet: The Double Ratchet to store in this instance.
confirmed: Whether the session was confirmed, i.e. whether a message was decrypted after actively
initiating the session. Leave this at the default value for passively initiated sessions.
"""
self.__bare_jid = bare_jid
self.__device_id = device_id
self.__initiation = initiation
self.__key_exchange = key_exchange
self.__associated_data = associated_data
self.__double_ratchet = double_ratchet
self.__confirmed = confirmed
@property
def namespace(self) -> str:
return NAMESPACE
@property
def bare_jid(self) -> str:
return self.__bare_jid
@property
def device_id(self) -> int:
return self.__device_id
@property
def initiation(self) -> Initiation:
return self.__initiation
@property
def confirmed(self) -> bool:
return self.__confirmed
@property
def key_exchange(self) -> KeyExchangeImpl:
return self.__key_exchange
@property
def receiving_chain_length(self) -> Optional[int]:
return self.__double_ratchet.receiving_chain_length
@property
def sending_chain_length(self) -> int:
return self.__double_ratchet.sending_chain_length
@property
def associated_data(self) -> bytes:
"""
Returns:
The associated data held by this instance.
"""
return self.__associated_data
@property
def double_ratchet(self) -> DoubleRatchetImpl:
"""
Returns:
The Double Ratchet held by this instance.
"""
return self.__double_ratchet
def confirm(self) -> None:
"""
Mark this session as confirmed.
"""
self.__confirmed = True
class Oldmemo(Backend):
"""
:class:`~omemo.backend.Backend` implementation providing OMEMO in the `eu.siacs.conversations.axolotl`
namespace.
One notable implementation detail is the handling of the identity key format. The specification requires
the identity key to be transferred in Curve25519 format (in bundles, key exchanges etc.), while the
python-omemo library uses Ed25519 serialization whenever the identity key is referred to. Thus, conversion
has to happen during the serialization/parsing of transferred data, as done for example in
:mod:`oldmemo.etree`.
"""
def __init__(
self,
storage: Storage,
max_num_per_session_skipped_keys: int = 1000,
max_num_per_message_skipped_keys: Optional[int] = None
) -> None:
"""
Args:
storage: The storage to store backend-specific data in. Note that all data keys are prefixed with
the backend namespace to avoid name clashes between backends.
max_num_per_session_skipped_keys: The maximum number of skipped message keys to keep around per
session. Once the maximum is reached, old message keys are deleted to make space for newer
ones. Accessible via :attr:`max_num_per_session_skipped_keys`.
max_num_per_message_skipped_keys: The maximum number of skipped message keys to accept in a single
message. When set to ``None`` (the default), this parameter defaults to the per-session
maximum (i.e. the value of the ``max_num_per_session_skipped_keys`` parameter). This parameter
may only be 0 if the per-session maximum is 0, otherwise it must be a number between 1 and the
per-session maximum. Accessible via :attr:`max_num_per_message_skipped_keys`.
"""
super().__init__(max_num_per_session_skipped_keys, max_num_per_message_skipped_keys)
self.__storage = storage
async def __get_state(self) -> StateImpl:
"""
Returns:
The loaded or newly created X3DH state.
"""
def check_type(value: JSONType) -> x3dh.types.JSONObject:
if isinstance(value, dict):
return cast(x3dh.types.JSONObject, value)
raise TypeError(
f"Stored StateImpl under key /{self.namespace}/x3dh corrupt: not a JSON object: {value}"
)
state, _ = (await self.__storage.load(
f"/{self.namespace}/x3dh"
)).fmap(check_type).fmap(lambda serialized: StateImpl.from_json(
serialized,
x3dh.IdentityKeyFormat.ED_25519,
x3dh.HashFunction.SHA_256,
StateImpl.INFO
)).maybe((None, False))
if state is None:
identity_key_pair = await IdentityKeyPair.get(self.__storage)
state = StateImpl.create(
x3dh.IdentityKeyFormat.ED_25519,
x3dh.HashFunction.SHA_256,
StateImpl.INFO,
(
x3dh.identity_key_pair.IdentityKeyPairSeed(identity_key_pair.seed)
if isinstance(identity_key_pair, IdentityKeyPairSeed)
else x3dh.identity_key_pair.IdentityKeyPairPriv(identity_key_pair.as_priv().priv)
)
)
await self.__storage.store(f"/{self.namespace}/x3dh", state.json)
return state
@property
def namespace(self) -> str:
return NAMESPACE
async def load_session(self, bare_jid: str, device_id: int) -> Optional[SessionImpl]:
def check_type(value: JSONType) -> doubleratchet.types.JSONObject:
if isinstance(value, dict):
return cast(doubleratchet.types.JSONObject, value)
raise TypeError(
f"Stored DoubleRatchetImpl under key"
f" /{self.namespace}/{bare_jid}/{device_id}/double_ratchet corrupt: not a JSON object:"
f" {value}"
)
try:
double_ratchet = (await self.__storage.load(
f"/{self.namespace}/{bare_jid}/{device_id}/double_ratchet"
)).fmap(check_type).fmap(lambda serialized: DoubleRatchetImpl.from_json(
serialized,
diffie_hellman_ratchet_curve25519.DiffieHellmanRatchet,
RootChainKDFImpl,
MessageChainKDFImpl,
DoubleRatchetImpl.MESSAGE_CHAIN_CONSTANT,
self.max_num_per_message_skipped_keys,
self.max_num_per_session_skipped_keys,
AEADImpl
)).maybe(None)
except doubleratchet.InconsistentSerializationException:
return None
if double_ratchet is None:
return None
initiation = Initiation((await self.__storage.load_primitive(
f"/{self.namespace}/{bare_jid}/{device_id}/initiation",
str
)).from_just())
identity_key = (await self.__storage.load_bytes(
f"/{self.namespace}/{bare_jid}/{device_id}/key_exchange/identity_key"
)).from_just()
ephemeral_key = (await self.__storage.load_bytes(
f"/{self.namespace}/{bare_jid}/{device_id}/key_exchange/ephemeral_key"
)).from_just()
signed_pre_key = (await self.__storage.load_bytes(
f"/{self.namespace}/{bare_jid}/{device_id}/key_exchange/signed_pre_key"
)).from_just()
signed_pre_key_id = (await self.__storage.load_primitive(
f"/{self.namespace}/{bare_jid}/{device_id}/key_exchange/signed_pre_key_id",
int
)).from_just()
pre_key = (await self.__storage.load_bytes(
f"/{self.namespace}/{bare_jid}/{device_id}/key_exchange/pre_key"
)).from_just()
pre_key_id = (await self.__storage.load_primitive(
f"/{self.namespace}/{bare_jid}/{device_id}/key_exchange/pre_key_id",
int
)).from_just()
associated_data = (await self.__storage.load_bytes(
f"/{self.namespace}/{bare_jid}/{device_id}/associated_data"
)).from_just()
confirmed = (await self.__storage.load_primitive(
f"/{self.namespace}/{bare_jid}/{device_id}/confirmed",
bool
)).from_just()
return SessionImpl(bare_jid, device_id, initiation, KeyExchangeImpl(
x3dh.Header(identity_key, ephemeral_key, signed_pre_key, pre_key),
signed_pre_key_id,
pre_key_id
), associated_data, double_ratchet, confirmed)
async def store_session(self, session: Session) -> None:
assert isinstance(session, SessionImpl)
assert session.key_exchange.header.pre_key is not None
await self.__storage.store(
f"/{self.namespace}/{session.bare_jid}/{session.device_id}/initiation",
session.initiation.name
)
await self.__storage.store_bytes(
f"/{self.namespace}/{session.bare_jid}/{session.device_id}/key_exchange/identity_key",
session.key_exchange.header.identity_key
)
await self.__storage.store_bytes(
f"/{self.namespace}/{session.bare_jid}/{session.device_id}/key_exchange/ephemeral_key",
session.key_exchange.header.ephemeral_key
)
await self.__storage.store_bytes(
f"/{self.namespace}/{session.bare_jid}/{session.device_id}/key_exchange/signed_pre_key",
session.key_exchange.header.signed_pre_key
)
await self.__storage.store(
f"/{self.namespace}/{session.bare_jid}/{session.device_id}/key_exchange/signed_pre_key_id",
session.key_exchange.signed_pre_key_id
)
await self.__storage.store_bytes(
f"/{self.namespace}/{session.bare_jid}/{session.device_id}/key_exchange/pre_key",
session.key_exchange.header.pre_key
)
await self.__storage.store(
f"/{self.namespace}/{session.bare_jid}/{session.device_id}/key_exchange/pre_key_id",
session.key_exchange.pre_key_id
)
await self.__storage.store_bytes(
f"/{self.namespace}/{session.bare_jid}/{session.device_id}/associated_data",
session.associated_data
)
await self.__storage.store(
f"/{self.namespace}/{session.bare_jid}/{session.device_id}/double_ratchet",
session.double_ratchet.json
)
await self.__storage.store(
f"/{self.namespace}/{session.bare_jid}/{session.device_id}/confirmed",
session.confirmed
)
# Keep track of bare JIDs with stored sessions
bare_jids = set((await self.__storage.load_list(f"/{self.namespace}/bare_jids", str)).maybe([]))
bare_jids.add(session.bare_jid)
await self.__storage.store(f"/{self.namespace}/bare_jids", list(bare_jids))
# Keep track of device ids with stored sessions
device_ids = set((await self.__storage.load_list(
f"/{self.namespace}/{session.bare_jid}/device_ids",
int
)).maybe([]))
device_ids.add(session.device_id)
await self.__storage.store(f"/{self.namespace}/{session.bare_jid}/device_ids", list(device_ids))
async def build_session_active(
self,
bare_jid: str,
device_id: int,
bundle: Bundle,
plain_key_material: PlainKeyMaterial
) -> Tuple[SessionImpl, EncryptedKeyMaterialImpl]:
assert isinstance(bundle, BundleImpl)
assert isinstance(plain_key_material, PlainKeyMaterialImpl)
try:
state = await self.__get_state()
shared_secret, associated_data, header = await state.get_shared_secret_active(bundle.bundle)
except x3dh.KeyAgreementException as e:
raise KeyExchangeFailed() from e
assert header.pre_key is not None
double_ratchet, encrypted_message = await DoubleRatchetImpl.encrypt_initial_message(
diffie_hellman_ratchet_curve25519.DiffieHellmanRatchet,
RootChainKDFImpl,
MessageChainKDFImpl,
DoubleRatchetImpl.MESSAGE_CHAIN_CONSTANT,
self.max_num_per_message_skipped_keys,
self.max_num_per_session_skipped_keys,
AEADImpl,
shared_secret,
bundle.bundle.signed_pre_key,
plain_key_material.key + plain_key_material.auth_tag,
associated_data
)
session = SessionImpl(
bare_jid,
device_id,
Initiation.ACTIVE,
KeyExchangeImpl(
header,
bundle.signed_pre_key_id,
bundle.pre_key_ids[header.pre_key]
),
associated_data,
double_ratchet
)
encrypted_key_material = EncryptedKeyMaterialImpl(bare_jid, device_id, encrypted_message)
return session, encrypted_key_material
async def build_session_passive(
self,
bare_jid: str,
device_id: int,
key_exchange: KeyExchange,
encrypted_key_material: EncryptedKeyMaterial
) -> Tuple[SessionImpl, PlainKeyMaterialImpl]:
assert isinstance(key_exchange, KeyExchangeImpl)
assert isinstance(encrypted_key_material, EncryptedKeyMaterialImpl)
state = await self.__get_state()
# The key exchange can be a network instance here, but it can't be a migrated instance, so we don't
# have to worry about that here.
if key_exchange.is_network_instance():
# Perform lookup of the signed pre key and pre key public keys in case the header is not filled
signed_pre_keys_by_id = { v: k for k, v in (await self.__get_signed_pre_key_ids()).items() }
if key_exchange.signed_pre_key_id not in signed_pre_keys_by_id:
raise KeyExchangeFailed(f"No signed pre key with id {key_exchange.signed_pre_key_id} known.")
pre_keys_by_id = { v: k for k, v in (await self.__get_pre_key_ids()).items() }
if key_exchange.pre_key_id not in pre_keys_by_id:
raise KeyExchangeFailed(f"No pre key with id {key_exchange.pre_key_id} known.")
# Update the key exchange information with the filled header
key_exchange = KeyExchangeImpl(
x3dh.Header(
key_exchange.header.identity_key,
key_exchange.header.ephemeral_key,
signed_pre_keys_by_id[key_exchange.signed_pre_key_id],
pre_keys_by_id[key_exchange.pre_key_id]
),
key_exchange.signed_pre_key_id,
key_exchange.pre_key_id
)
try:
shared_secret, associated_data, signed_pre_key = await state.get_shared_secret_passive(
key_exchange.header
)
except x3dh.KeyAgreementException as e:
raise KeyExchangeFailed() from e
try:
double_ratchet, decrypted_message = await DoubleRatchetImpl.decrypt_initial_message(
diffie_hellman_ratchet_curve25519.DiffieHellmanRatchet,
RootChainKDFImpl,
MessageChainKDFImpl,
DoubleRatchetImpl.MESSAGE_CHAIN_CONSTANT,
self.max_num_per_message_skipped_keys,
self.max_num_per_session_skipped_keys,
AEADImpl,
shared_secret,
signed_pre_key.priv,
encrypted_key_material.encrypted_message,
associated_data
)
except Exception as e:
raise DecryptionFailed(
"Decryption of the initial message as part of passive session building failed."
) from e
session = SessionImpl(
bare_jid,
device_id,
Initiation.PASSIVE,
key_exchange,
associated_data,
double_ratchet
)
plain_key_material = PlainKeyMaterialImpl(
decrypted_message[:PlainKeyMaterialImpl.KEY_LENGTH],
decrypted_message[PlainKeyMaterialImpl.KEY_LENGTH:]
)
return session, plain_key_material
async def encrypt_plaintext(self, plaintext: bytes) -> Tuple[ContentImpl, PlainKeyMaterialImpl]:
# Generate KEY_LENGTH bytes of cryptographically secure random data for the key
key = secrets.token_bytes(PlainKeyMaterialImpl.KEY_LENGTH)
# Generate 12 bytes for the IV
initialization_vector = secrets.token_bytes(12)
# Encrypt the plaintext using AES-128 (the 128 bit are implied by the key size) in GCM mode and the
# previously created key and IV
aes = Cipher(
algorithms.AES(key),
modes.GCM(initialization_vector),
backend=default_backend()
).encryptor()
ciphertext = aes.update(plaintext) + aes.finalize() # pylint: disable=no-member
# This authentication tag is not truncated
auth_tag = aes.tag # pylint: disable=no-member
return ContentImpl(ciphertext, initialization_vector), PlainKeyMaterialImpl(key, auth_tag)
async def encrypt_empty(self) -> Tuple[ContentImpl, PlainKeyMaterialImpl]:
return ContentImpl.make_empty(), PlainKeyMaterialImpl.make_empty()
async def encrypt_key_material(
self,
session: Session,
plain_key_material: PlainKeyMaterial
) -> EncryptedKeyMaterialImpl:
assert isinstance(session, SessionImpl)
assert isinstance(plain_key_material, PlainKeyMaterialImpl)
# Rebuild the associated data such that it follows the order
# sender identity key || recipient identity key
# regardless of who initiated the session. This is to conform to the undocumented behaviour of
# libsignal.
associated_data = session.associated_data
if session.initiation is Initiation.PASSIVE:
associated_data = (
associated_data[StateImpl.IDENTITY_KEY_ENCODING_LENGTH:]
+ associated_data[:StateImpl.IDENTITY_KEY_ENCODING_LENGTH]
)
return EncryptedKeyMaterialImpl(
session.bare_jid,
session.device_id,
await session.double_ratchet.encrypt_message(
plain_key_material.key + plain_key_material.auth_tag,
associated_data
)
)
async def decrypt_plaintext(self, content: Content, plain_key_material: PlainKeyMaterial) -> bytes:
assert isinstance(content, ContentImpl)
assert isinstance(plain_key_material, PlainKeyMaterialImpl)
assert not content.empty
# Decrypt the plaintext using AES-128 (the 128 bit are implied by the key size) in GCM mode and the
# key and IV in plain_key_material, while also verifying the authentication tag.
aes = Cipher(
algorithms.AES(plain_key_material.key),
modes.GCM(content.initialization_vector, plain_key_material.auth_tag),
backend=default_backend()
).decryptor()
try:
return aes.update(content.ciphertext) + aes.finalize() # pylint: disable=no-member
except InvalidTag as e:
raise DecryptionFailed("Ciphertext decryption failed.") from e
async def decrypt_key_material(
self,
session: Session,
encrypted_key_material: EncryptedKeyMaterial
) -> PlainKeyMaterialImpl:
assert isinstance(session, SessionImpl)
assert isinstance(encrypted_key_material, EncryptedKeyMaterialImpl)
# Rebuild the associated data such that it follows the order
# sender identity key || recipient identity key
# regardless of who initiated the session. This is to conform to the undocumented behaviour of
# libsignal.
associated_data = session.associated_data
if session.initiation is Initiation.ACTIVE:
associated_data = (
associated_data[StateImpl.IDENTITY_KEY_ENCODING_LENGTH:]
+ associated_data[:StateImpl.IDENTITY_KEY_ENCODING_LENGTH]
)
try:
decrypted_message = await session.double_ratchet.decrypt_message(
encrypted_key_material.encrypted_message,
associated_data
)
except Exception as e:
raise DecryptionFailed("Key material decryption failed.") from e
session.confirm()
return PlainKeyMaterialImpl(
decrypted_message[:PlainKeyMaterialImpl.KEY_LENGTH],
decrypted_message[PlainKeyMaterialImpl.KEY_LENGTH:]
)
async def signed_pre_key_age(self) -> int:
return (await self.__get_state()).signed_pre_key_age()
async def rotate_signed_pre_key(self) -> None:
state = await self.__get_state()
state.rotate_signed_pre_key()
await self.__storage.store(f"/{self.namespace}/x3dh", state.json)
async def hide_pre_key(self, session: Session) -> bool:
assert isinstance(session, SessionImpl)
# This method is only called with KeyExchangeImpl instances that have the pre key byte data set. We do
# not have to worry about the field containing a filler value and the assertion is merely there to
# satisfy the type system.
assert session.key_exchange.header.pre_key is not None
state = await self.__get_state()
hidden = state.hide_pre_key(session.key_exchange.header.pre_key)
await self.__storage.store(f"/{self.namespace}/x3dh", state.json)
return hidden
async def delete_pre_key(self, session: Session) -> bool:
assert isinstance(session, SessionImpl)
# This method is only called with KeyExchangeImpl instances that have the pre key byte data set. We do
# not have to worry about the field containing a filler value and the assertion is merely there to
# satisfy the type system.
assert session.key_exchange.header.pre_key is not None
state = await self.__get_state()
deleted = state.delete_pre_key(session.key_exchange.header.pre_key)
await self.__storage.store(f"/{self.namespace}/x3dh", state.json)
return deleted
async def delete_hidden_pre_keys(self) -> None:
state = await self.__get_state()
state.delete_hidden_pre_keys()
await self.__storage.store(f"/{self.namespace}/x3dh", state.json)
async def get_num_visible_pre_keys(self) -> int:
return (await self.__get_state()).get_num_visible_pre_keys()
async def generate_pre_keys(self, num_pre_keys: int) -> None:
state = await self.__get_state()
state.generate_pre_keys(num_pre_keys)
await self.__storage.store(f"/{self.namespace}/x3dh", state.json)
async def get_bundle(self, bare_jid: str, device_id: int) -> BundleImpl:
bundle = (await self.__get_state()).bundle
return BundleImpl(
bare_jid,
device_id,
bundle,
(await self.__get_signed_pre_key_ids())[bundle.signed_pre_key],
{
pre_key: pre_key_id
for pre_key, pre_key_id
in (await self.__get_pre_key_ids()).items()
if pre_key in bundle.pre_keys
}
)
async def purge(self) -> None:
for bare_jid in (await self.__storage.load_list(f"/{self.namespace}/bare_jids", str)).maybe([]):
await self.purge_bare_jid(bare_jid)
await self.__storage.delete(f"/{self.namespace}/bare_jids")
await self.__storage.delete(f"/{self.namespace}/x3dh")
await self.__storage.delete(f"/{self.namespace}/signed_pre_key_ids")
await self.__storage.delete(f"/{self.namespace}/pre_key_ids")
await self.__storage.delete(f"/{self.namespace}/pre_key_id_counter")
async def purge_bare_jid(self, bare_jid: str) -> None:
storage = self.__storage
for device_id in (await storage.load_list(f"/{self.namespace}/{bare_jid}/device_ids", int)).maybe([]):
await storage.delete(f"/{self.namespace}/{bare_jid}/{device_id}/initiation")
await storage.delete(f"/{self.namespace}/{bare_jid}/{device_id}/key_exchange/identity_key")
await storage.delete(f"/{self.namespace}/{bare_jid}/{device_id}/key_exchange/ephemeral_key")
await storage.delete(f"/{self.namespace}/{bare_jid}/{device_id}/key_exchange/signed_pre_key")
await storage.delete(f"/{self.namespace}/{bare_jid}/{device_id}/key_exchange/signed_pre_key_id")
await storage.delete(f"/{self.namespace}/{bare_jid}/{device_id}/key_exchange/pre_key")
await storage.delete(f"/{self.namespace}/{bare_jid}/{device_id}/key_exchange/pre_key_id")
await storage.delete(f"/{self.namespace}/{bare_jid}/{device_id}/associated_data")
await storage.delete(f"/{self.namespace}/{bare_jid}/{device_id}/double_ratchet")
await storage.delete(f"/{self.namespace}/{bare_jid}/{device_id}/confirmed")
await storage.delete(f"/{self.namespace}/{bare_jid}/device_ids")
bare_jids = set((await storage.load_list(f"/{self.namespace}/bare_jids", str)).maybe([]))
bare_jids.remove(bare_jid)
await storage.store(f"/{self.namespace}/bare_jids", list(bare_jids))
async def __get_signed_pre_key_ids(self) -> Dict[bytes, int]:
"""
Assigns an id to each signed pre key currently available in the X3DH state, both the current signed
pre key and the old signed pre key that is kept around for one more rotation period. Once assigned to
a signed pre key, its id will never change.
Returns:
The mapping from signed pre key to id.
"""
state = await self.__get_state()
signed_pre_key = state.bundle.signed_pre_key
old_signed_pre_key = state.old_signed_pre_key
# Load the existing signed pre key ids from the storage
signed_pre_key_ids = {
base64.b64decode(signed_pre_key_b64): signed_pre_key_id
for signed_pre_key_b64, signed_pre_key_id
in (await self.__storage.load_dict(
f"/{self.namespace}/signed_pre_key_ids",
int
)).maybe({}).items()
}
# Take note of the highest id that was assigned, default to 0 if no ids were assigned yet
signed_pre_key_id_counter = max(
signed_pre_key_id
for _, signed_pre_key_id
in signed_pre_key_ids.items()
) if len(signed_pre_key_ids) > 0 else 0
# Prepare the dictionary to hold updated signed pre key ids
new_signed_pre_key_ids: Dict[bytes, int] = {}
# Assign the next highest id to the signed pre key, if there is no id assigned to it yet.
signed_pre_key_id_counter += 1
new_signed_pre_key_ids[signed_pre_key] = signed_pre_key_ids.get(
signed_pre_key,
signed_pre_key_id_counter
)
# Assign the next highest id to the old signed pre key, if there is no id assigned to it yet. This
# should never happen, since the old signed pre key should have been assigned an id when it was the
# (non-old) signed pre key, however there might be edge cases of the signed pre key rotating twice
# before the assigned ids are updated.
if old_signed_pre_key is not None:
signed_pre_key_id_counter += 1
new_signed_pre_key_ids[old_signed_pre_key] = signed_pre_key_ids.get(
old_signed_pre_key,
signed_pre_key_id_counter
)
# If the ids have changed, store them
if new_signed_pre_key_ids != signed_pre_key_ids:
await self.__storage.store(f"/{self.namespace}/signed_pre_key_ids", {
base64.b64encode(signed_pre_key).decode("ASCII"): signed_pre_key_id
for signed_pre_key, signed_pre_key_id
in new_signed_pre_key_ids.items()
})
return new_signed_pre_key_ids
async def __get_pre_key_ids(self) -> Dict[bytes, int]:
"""
Assigns an id to each pre key currently available in the X3DH state, both hidden and visible pre keys.
Once assigned to a pre key, its id will never change.
Returns:
The mapping from pre key to id.
"""
state = await self.__get_state()
pre_keys = state.bundle.pre_keys | state.hidden_pre_keys
# Load the existing pre key ids from the storage
pre_key_ids = {
base64.b64decode(pre_key_b64): pre_key_id
for pre_key_b64, pre_key_id
in (await self.__storage.load_dict(f"/{self.namespace}/pre_key_ids", int)).maybe({}).items()
}
# Load the pre key id counter from the storage
pre_key_id_counter = (await self.__storage.load_primitive(
f"/{self.namespace}/pre_key_id_counter",
int
)).maybe(0)
# Prepare the dictionary to hold updated pre key ids
new_pre_key_ids: Dict[bytes, int] = {}
# Assign the next highest id to each pre key if there is no existing id assigned to it
for pre_key in pre_keys:
pre_key_id_counter += 1
new_pre_key_ids[pre_key] = pre_key_ids.get(pre_key, pre_key_id_counter)
# If the ids have changed, store them
if new_pre_key_ids != pre_key_ids:
await self.__storage.store(f"/{self.namespace}/pre_key_ids", {
base64.b64encode(pre_key).decode("ASCII"): pre_key_id
for pre_key, pre_key_id
in new_pre_key_ids.items()
})
await self.__storage.store(f"/{self.namespace}/pre_key_id_counter", pre_key_id_counter)
return new_pre_key_ids
python-oldmemo-1.0.3/oldmemo/oldmemo_pb2.py 0000664 0000000 0000000 00000002525 14332461332 0020700 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: oldmemo.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import builder as _builder
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\roldmemo.proto\x12\x07oldmemo\"I\n\x0cOMEMOMessage\x12\t\n\x01n\x18\x02 \x02(\r\x12\n\n\x02pn\x18\x03 \x02(\r\x12\x0e\n\x06\x64h_pub\x18\x01 \x02(\x0c\x12\x12\n\nciphertext\x18\x04 \x01(\x0c\"j\n\x10OMEMOKeyExchange\x12\r\n\x05pk_id\x18\x01 \x02(\r\x12\x0e\n\x06spk_id\x18\x06 \x02(\r\x12\n\n\x02ik\x18\x03 \x02(\x0c\x12\n\n\x02\x65k\x18\x02 \x02(\x0c\x12\x0f\n\x07message\x18\x04 \x02(\x0c\x12\x0e\n\x06unused\x18\x05 \x01(\r')
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'oldmemo_pb2', globals())
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_OMEMOMESSAGE._serialized_start=26
_OMEMOMESSAGE._serialized_end=99
_OMEMOKEYEXCHANGE._serialized_start=101
_OMEMOKEYEXCHANGE._serialized_end=207
# @@protoc_insertion_point(module_scope)
python-oldmemo-1.0.3/oldmemo/oldmemo_pb2.pyi 0000664 0000000 0000000 00000006243 14332461332 0021052 0 ustar 00root root 0000000 0000000 """
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.message
import typing
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
class OMEMOMessage(google.protobuf.message.Message):
"""A modified version of SignalMessage defined in
https://github.com/signalapp/libsignal-protocol-java/blob/master/protobuf/WhisperTextProtocol.proto
Structure, field names, required/optional and order changed to match python-twomemo. The serialization
should not be affected by those modifications.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
N_FIELD_NUMBER: builtins.int
PN_FIELD_NUMBER: builtins.int
DH_PUB_FIELD_NUMBER: builtins.int
CIPHERTEXT_FIELD_NUMBER: builtins.int
n: builtins.int
pn: builtins.int
dh_pub: builtins.bytes
ciphertext: builtins.bytes
def __init__(self,
*,
n: typing.Optional[builtins.int] = ...,
pn: typing.Optional[builtins.int] = ...,
dh_pub: typing.Optional[builtins.bytes] = ...,
ciphertext: typing.Optional[builtins.bytes] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["ciphertext",b"ciphertext","dh_pub",b"dh_pub","n",b"n","pn",b"pn"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["ciphertext",b"ciphertext","dh_pub",b"dh_pub","n",b"n","pn",b"pn"]) -> None: ...
global___OMEMOMessage = OMEMOMessage
class OMEMOKeyExchange(google.protobuf.message.Message):
"""A modified version of PreKeySignalMessage defined in
https://github.com/signalapp/libsignal-protocol-java/blob/master/protobuf/WhisperTextProtocol.proto
Structure, field names, required/optional and order changed to match python-twomemo. The serialization
should not be affected by those modifications.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
PK_ID_FIELD_NUMBER: builtins.int
SPK_ID_FIELD_NUMBER: builtins.int
IK_FIELD_NUMBER: builtins.int
EK_FIELD_NUMBER: builtins.int
MESSAGE_FIELD_NUMBER: builtins.int
UNUSED_FIELD_NUMBER: builtins.int
pk_id: builtins.int
spk_id: builtins.int
ik: builtins.bytes
ek: builtins.bytes
message: builtins.bytes
"""Byte-encoding of an OMEMOMessage & authentication tag (see OMEMOAuthenticatedMessage in oldmemo/oldmemo.py)"""
unused: builtins.int
def __init__(self,
*,
pk_id: typing.Optional[builtins.int] = ...,
spk_id: typing.Optional[builtins.int] = ...,
ik: typing.Optional[builtins.bytes] = ...,
ek: typing.Optional[builtins.bytes] = ...,
message: typing.Optional[builtins.bytes] = ...,
unused: typing.Optional[builtins.int] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["ek",b"ek","ik",b"ik","message",b"message","pk_id",b"pk_id","spk_id",b"spk_id","unused",b"unused"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["ek",b"ek","ik",b"ik","message",b"message","pk_id",b"pk_id","spk_id",b"spk_id","unused",b"unused"]) -> None: ...
global___OMEMOKeyExchange = OMEMOKeyExchange
python-oldmemo-1.0.3/oldmemo/project.py 0000664 0000000 0000000 00000001034 14332461332 0020141 0 ustar 00root root 0000000 0000000 __all__ = [ "project" ] # pylint: disable=unused-variable
project = {
"name" : "Oldmemo",
"description" : "Backend implementation of the namespace `eu.siacs.conversations.axolotl` for"
" python-omemo.",
"url" : "https://github.com/Syndace/python-oldmemo",
"year" : "2022",
"author" : "Tim Henkes (Syndace)",
"author_email" : "me@syndace.dev",
"categories" : [
"Topic :: Communications :: Chat",
"Topic :: Security :: Cryptography"
]
}
python-oldmemo-1.0.3/oldmemo/py.typed 0000664 0000000 0000000 00000000000 14332461332 0017610 0 ustar 00root root 0000000 0000000 python-oldmemo-1.0.3/oldmemo/version.py 0000664 0000000 0000000 00000000323 14332461332 0020160 0 ustar 00root root 0000000 0000000 __all__ = [ "__version__" ] # pylint: disable=unused-variable
__version__ = {}
__version__["short"] = "1.0.3"
__version__["tag"] = "stable"
__version__["full"] = f"{__version__['short']}-{__version__['tag']}"
python-oldmemo-1.0.3/pylintrc 0000664 0000000 0000000 00000036366 14332461332 0016274 0 ustar 00root root 0000000 0000000 [MASTER]
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code.
extension-pkg-whitelist=schema_pb2
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=.+_pb2\.py
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
# number of processors available to use.
jobs=1
# Control the amount of potential inferred values when inferring a single
# object. This can help the performance when dealing with large functions or
# complex, nested conditions.
limit-inference-results=100
# List of plugins (as comma separated values of python module names) to load,
# usually to register additional checkers.
load-plugins=
# Pickle collected data for later comparisons.
persistent=yes
# Specify a configuration file.
#rcfile=
# When enabled, pylint would attempt to guess common misconfiguration and emit
# user-friendly hints instead of false-positive error messages.
suggestion-mode=yes
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
confidence=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once). You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use "--disable=all --enable=classes
# --disable=W".
disable=missing-module-docstring,
duplicate-code,
fixme,
logging-fstring-interpolation
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=useless-suppression
[REPORTS]
# Python expression which should return a score less than or equal to 10. You
# have access to the variables 'error', 'warning', 'refactor', and 'convention'
# which contain the number of messages in each category, as well as 'statement'
# which is the total number of statements analyzed. This score is used by the
# global evaluation report (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details.
#msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio). You can also give a reporter class, e.g.
# mypackage.mymodule.MyReporterClass.
output-format=text
# Tells whether to display a full report or only the messages.
reports=no
# Activate the evaluation score.
score=yes
[REFACTORING]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
# Complete name of functions that never returns. When checking for
# inconsistent-return-statements if a never returning function is called then
# it will be considered as an explicit return statement and no message will be
# printed.
never-returning-functions=sys.exit
[SPELLING]
# Limits count of emitted suggestions for spelling mistakes.
max-spelling-suggestions=4
# Spelling dictionary name. Available dictionaries: none. To make it work,
# install the python-enchant package.
spelling-dict=
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains the private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to the private dictionary (see the
# --spelling-private-dict-file option) instead of raising a message.
spelling-store-unknown-words=no
[VARIABLES]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid defining new builtins when possible.
additional-builtins=
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables=no
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,
_cb
# A regular expression matching the name of dummy variables (i.e. expected to
# not be used).
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
# Argument names that match this expression will be ignored. Default to name
# with leading underscore.
ignored-argument-names=_.*|^ignored_|^unused_
# Tells whether we should check for unused import in __init__ files.
init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
[LOGGING]
# Format style used to check logging format string. `old` means using %
# formatting, `new` is for `{}` formatting,and `fstr` is for f-strings.
logging-format-style=old
# Logging modules to check that the string format arguments are in logging
# function parameter format.
logging-modules=logging
[FORMAT]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )??$
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Maximum number of characters on a single line.
max-line-length=110
# Maximum number of lines in a module.
max-module-lines=10000
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=yes
[TYPECHECK]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# Tells whether to warn about missing members when the owner of the attribute
# is inferred to be None.
ignore-none=no
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference=no
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis). It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint=yes
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
# List of decorators that change the signature of a decorated function.
signature-mutators=
[STRING]
# This flag controls whether the implicit-str-concat-in-sequence should
# generate a warning on implicit string concatenation in sequences defined over
# several lines.
check-str-concat-over-line-jumps=no
[SIMILARITIES]
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
# Minimum lines number of a similarity.
min-similarity-lines=4
[BASIC]
# Naming style matching correct argument names.
argument-naming-style=snake_case
# Regular expression matching correct argument names. Overrides argument-
# naming-style.
#argument-rgx=
# Naming style matching correct attribute names.
attr-naming-style=snake_case
# Regular expression matching correct attribute names. Overrides attr-naming-
# style.
#attr-rgx=
# Bad variable names which should always be refused, separated by a comma.
bad-names=foo,
bar,
baz,
toto,
tutu,
tata
# Naming style matching correct class attribute names.
class-attribute-naming-style=UPPER_CASE
# Regular expression matching correct class attribute names. Overrides class-
# attribute-naming-style.
#class-attribute-rgx=
# Naming style matching correct class names.
class-naming-style=PascalCase
# Regular expression matching correct class names. Overrides class-naming-
# style.
#class-rgx=
# Naming style matching correct constant names.
const-naming-style=any
# Regular expression matching correct constant names. Overrides const-naming-
# style.
#const-rgx=
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
# Naming style matching correct function names.
function-naming-style=snake_case
# Regular expression matching correct function names. Overrides function-
# naming-style.
#function-rgx=
# Good variable names which should always be accepted, separated by a comma.
good-names=i,
j,
e, # exceptions in except blocks
_,
iv # Domain-specific two-letter variable names
# Include a hint for the correct naming format with invalid-name.
include-naming-hint=no
# Naming style matching correct inline iteration names.
inlinevar-naming-style=any
# Regular expression matching correct inline iteration names. Overrides
# inlinevar-naming-style.
#inlinevar-rgx=
# Naming style matching correct method names.
method-naming-style=snake_case
# Regular expression matching correct method names. Overrides method-naming-
# style.
#method-rgx=
# Naming style matching correct module names.
module-naming-style=snake_case
# Regular expression matching correct module names. Overrides module-naming-
# style.
#module-rgx=
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
# These decorators are taken in consideration only for invalid-name.
property-classes=abc.abstractproperty
# Naming style matching correct variable names.
variable-naming-style=snake_case
# Regular expression matching correct variable names. Overrides variable-
# naming-style.
#variable-rgx=
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
XXX,
TODO
[IMPORTS]
# List of modules that can be imported at any level, not just the top level
# one.
allow-any-import-level=
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all=no
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=yes
# Deprecated modules which should not be used, separated by a comma.
deprecated-modules=optparse,tkinter.tix
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled).
ext-import-graph=
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled).
import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled).
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
# Couples of modules and preferred modules, separated by a comma.
preferred-modules=
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,
__new__,
setUp,
__post_init__,
create
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,
_fields,
_replace,
_source,
_make
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=cls
[DESIGN]
# Maximum number of arguments for function / method.
max-args=100
# Maximum number of attributes for a class (see R0902).
max-attributes=100
# Maximum number of boolean expressions in an if statement (see R0916).
max-bool-expr=10
# Maximum number of branch for function / method body.
max-branches=100
# Maximum number of locals for function / method body.
max-locals=100
# Maximum number of parents for a class (see R0901).
max-parents=10
# Maximum number of public methods for a class (see R0904).
max-public-methods=100
# Maximum number of return / yield for function / method body.
max-returns=100
# Maximum number of statements in function / method body.
max-statements=1000
# Minimum number of public methods for a class (see R0903).
min-public-methods=0
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "BaseException, Exception".
overgeneral-exceptions=BaseException,
Exception
python-oldmemo-1.0.3/pyproject.toml 0000664 0000000 0000000 00000000121 14332461332 0017375 0 ustar 00root root 0000000 0000000 [build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
python-oldmemo-1.0.3/requirements.txt 0000664 0000000 0000000 00000000227 14332461332 0017754 0 ustar 00root root 0000000 0000000 OMEMO>=1.0.0,<2
DoubleRatchet>=1.0.0,<2
X3DH>=1.0.0,<2
XEdDSA>=1.0.0,<2
cryptography>=3.3.2
protobuf>=3.20.3
typing-extensions>=4.3.0
xmlschema>=2.0.2
python-oldmemo-1.0.3/setup.py 0000664 0000000 0000000 00000004407 14332461332 0016206 0 ustar 00root root 0000000 0000000 # pylint: disable=exec-used
import os
from typing import Dict, Union, List
from setuptools import setup, find_packages # type: ignore[import]
source_root = os.path.join(os.path.dirname(os.path.abspath(__file__)), "oldmemo")
version_scope: Dict[str, Dict[str, str]] = {}
with open(os.path.join(source_root, "version.py"), encoding="utf-8") as f:
exec(f.read(), version_scope)
version = version_scope["__version__"]
project_scope: Dict[str, Dict[str, Union[str, List[str]]]] = {}
with open(os.path.join(source_root, "project.py"), encoding="utf-8") as f:
exec(f.read(), project_scope)
project = project_scope["project"]
with open("README.md", encoding="utf-8") as f:
long_description = f.read()
classifiers = [
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU Affero General Public License v3",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy"
]
classifiers.extend(project["categories"])
if version["tag"] == "alpha":
classifiers.append("Development Status :: 3 - Alpha")
if version["tag"] == "beta":
classifiers.append("Development Status :: 4 - Beta")
if version["tag"] == "stable":
classifiers.append("Development Status :: 5 - Production/Stable")
del project["categories"]
del project["year"]
setup(
version=version["short"],
long_description=long_description,
long_description_content_type="text/markdown",
license="AGPLv3",
packages=find_packages(exclude=["tests"]),
install_requires=[
"OMEMO>=1.0.0,<2",
"DoubleRatchet>=1.0.0,<2",
"X3DH>=1.0.0,<2",
"XEdDSA>=1.0.0,<2",
"cryptography>=3.3.2",
"protobuf>=3.20.3",
"typing-extensions>=4.3.0"
],
extras_require={
"xml": [
"xmlschema>=2.0.2"
]
},
python_requires=">=3.7",
include_package_data=True,
zip_safe=False,
classifiers=classifiers,
**project
)