xandikos_0.2.10.orig/.coveragerc 0000644 0000000 0000000 00000000114 13045733571 013506 0 ustar 00 [run]
branch = True
[report]
exclude_lines =
raise NotImplementedError
xandikos_0.2.10.orig/.dockerignore 0000644 0000000 0000000 00000000016 14073043403 014030 0 ustar 00 .git/
compat/
xandikos_0.2.10.orig/.flake8 0000644 0000000 0000000 00000000414 14476041427 012544 0 ustar 00 [flake8]
extend-ignore = E203, E266, E501, W293, W291, W503
max-line-length = 88
max-complexity = 18
select = B,C,E,F,W,T4,B9
ignore = W504,E203,W503
exclude = compat/vdirsyncer/,.tox,.git,compat/pycaldav,examples/gunicorn.conf.py
application-package-names = xandikos
xandikos_0.2.10.orig/.mailmap 0000644 0000000 0000000 00000000214 13077711601 013002 0 ustar 00 Jelmer Vernooij Jelmer Vernooij
Jelmer Vernooij Jelmer Vernooij
xandikos_0.2.10.orig/.readthedocs.yaml 0000644 0000000 0000000 00000000373 14210727453 014620 0 ustar 00 # .readthedocs.yaml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
# Required
version: 2
# Build documentation in the docs/ directory with Sphinx
sphinx:
configuration: docs/conf.py
xandikos_0.2.10.orig/.stestr.conf 0000644 0000000 0000000 00000000043 14162333643 013634 0 ustar 00 [DEFAULT]
test_path=xandikos/tests
xandikos_0.2.10.orig/.testr.conf 0000644 0000000 0000000 00000000243 13043745642 013456 0 ustar 00 [DEFAULT]
test_command=PYTHONPATH=. python3 -m subunit.run $IDOPTION $LISTOPT xandikos.tests.test_suite
test_id_option=--load-list $IDFILE
test_list_option=--list
xandikos_0.2.10.orig/AUTHORS 0000644 0000000 0000000 00000000475 13657126476 012460 0 ustar 00 Jelmer Vernooij
Geert Stappers
Hugo Osvaldo Barrera
Markus Unterwaditzer
Daniel M. Capella
Ole-Christian S. Hagenes
Denis Laxalde
Félix Sipma
xandikos_0.2.10.orig/CODE_OF_CONDUCT.md 0000644 0000000 0000000 00000006424 14015467146 014176 0 ustar 00 # Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to making participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, sex characteristics, gender identity and expression,
level of experience, education, socio-economic status, nationality, personal
appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces
when an individual is representing the project or its community. Examples of
representing a project or community include using an official project e-mail
address, posting via an official social media account, or acting as an appointed
representative at an online or offline event. Representation of a project may be
further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project lead at jelmer@jelmer.uk. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see
https://www.contributor-covenant.org/faq
xandikos_0.2.10.orig/CONTRIBUTING.md 0000644 0000000 0000000 00000000417 14041236755 013623 0 ustar 00 Xandikos uses the PEP8 style guide.
You can verify whether you've introduced any style violations by running
"make style".
There are some very minimal developer documentation/vague design docs in notes/.
Please implement new RFCs as much as possible in their own file.
xandikos_0.2.10.orig/COPYING 0000644 0000000 0000000 00000104513 12633333551 012424 0 ustar 00 GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc.
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
Copyright (C)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see .
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
Copyright (C)
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
.
xandikos_0.2.10.orig/Cargo.lock 0000644 0000000 0000000 00000037466 14475446710 013322 0 ustar 00 # This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "addr2line"
version = "0.21.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb"
dependencies = [
"gimli",
]
[[package]]
name = "adler"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
[[package]]
name = "anstream"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163"
dependencies = [
"anstyle",
"anstyle-parse",
"anstyle-query",
"anstyle-wincon",
"colorchoice",
"is-terminal",
"utf8parse",
]
[[package]]
name = "anstyle"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "15c4c2c83f81532e5845a733998b6971faca23490340a418e9b72a3ec9de12ea"
[[package]]
name = "anstyle-parse"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333"
dependencies = [
"utf8parse",
]
[[package]]
name = "anstyle-query"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b"
dependencies = [
"windows-sys",
]
[[package]]
name = "anstyle-wincon"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c677ab05e09154296dd37acecd46420c17b9713e8366facafa8fc0885167cf4c"
dependencies = [
"anstyle",
"windows-sys",
]
[[package]]
name = "autocfg"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "backtrace"
version = "0.3.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837"
dependencies = [
"addr2line",
"cc",
"cfg-if",
"libc",
"miniz_oxide",
"object",
"rustc-demangle",
]
[[package]]
name = "bitflags"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635"
[[package]]
name = "bytes"
version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be"
[[package]]
name = "cc"
version = "1.0.83"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0"
dependencies = [
"libc",
]
[[package]]
name = "cfg-if"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "clap"
version = "4.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34d21f9bf1b425d2968943631ec91202fe5e837264063503708b83013f8fc938"
dependencies = [
"clap_builder",
]
[[package]]
name = "clap_builder"
version = "4.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "914c8c79fb560f238ef6429439a30023c862f7a28e688c58f7203f12b29970bd"
dependencies = [
"anstream",
"anstyle",
"bitflags 1.3.2",
"clap_lex",
"strsim",
]
[[package]]
name = "clap_lex"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a2dd5a6fe8c6e3502f568a6353e5273bbb15193ad9a89e457b9970798efbea1"
[[package]]
name = "colorchoice"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7"
[[package]]
name = "errno"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "136526188508e25c6fef639d7927dfb3e0e3084488bf202267829cf7fc23dbdd"
dependencies = [
"errno-dragonfly",
"libc",
"windows-sys",
]
[[package]]
name = "errno-dragonfly"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf"
dependencies = [
"cc",
"libc",
]
[[package]]
name = "gimli"
version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0"
[[package]]
name = "hermit-abi"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b"
[[package]]
name = "indoc"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa799dd5ed20a7e349f3b4639aa80d74549c81716d9ec4f994c9b5815598306"
[[package]]
name = "is-terminal"
version = "0.4.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b"
dependencies = [
"hermit-abi",
"rustix",
"windows-sys",
]
[[package]]
name = "libc"
version = "0.2.147"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
[[package]]
name = "linux-raw-sys"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503"
[[package]]
name = "lock_api"
version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16"
dependencies = [
"autocfg",
"scopeguard",
]
[[package]]
name = "memchr"
version = "2.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f232d6ef707e1956a43342693d2a31e72989554d58299d7a88738cc95b0d35c"
[[package]]
name = "memoffset"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1"
dependencies = [
"autocfg",
]
[[package]]
name = "miniz_oxide"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7"
dependencies = [
"adler",
]
[[package]]
name = "mio"
version = "0.8.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2"
dependencies = [
"libc",
"wasi",
"windows-sys",
]
[[package]]
name = "num_cpus"
version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43"
dependencies = [
"hermit-abi",
"libc",
]
[[package]]
name = "object"
version = "0.32.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0"
dependencies = [
"memchr",
]
[[package]]
name = "once_cell"
version = "1.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
[[package]]
name = "parking_lot"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
dependencies = [
"lock_api",
"parking_lot_core",
]
[[package]]
name = "parking_lot_core"
version = "0.9.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447"
dependencies = [
"cfg-if",
"libc",
"redox_syscall",
"smallvec",
"windows-targets",
]
[[package]]
name = "pin-project-lite"
version = "0.2.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58"
[[package]]
name = "proc-macro2"
version = "1.0.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9"
dependencies = [
"unicode-ident",
]
[[package]]
name = "pyo3"
version = "0.18.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3b1ac5b3731ba34fdaa9785f8d74d17448cd18f30cf19e0c7e7b1fdb5272109"
dependencies = [
"cfg-if",
"indoc",
"libc",
"memoffset",
"parking_lot",
"pyo3-build-config",
"pyo3-ffi",
"pyo3-macros",
"unindent",
]
[[package]]
name = "pyo3-build-config"
version = "0.18.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9cb946f5ac61bb61a5014924910d936ebd2b23b705f7a4a3c40b05c720b079a3"
dependencies = [
"once_cell",
"target-lexicon",
]
[[package]]
name = "pyo3-ffi"
version = "0.18.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd4d7c5337821916ea2a1d21d1092e8443cf34879e53a0ac653fbb98f44ff65c"
dependencies = [
"libc",
"pyo3-build-config",
]
[[package]]
name = "pyo3-macros"
version = "0.18.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9d39c55dab3fc5a4b25bbd1ac10a2da452c4aca13bb450f22818a002e29648d"
dependencies = [
"proc-macro2",
"pyo3-macros-backend",
"quote",
"syn 1.0.109",
]
[[package]]
name = "pyo3-macros-backend"
version = "0.18.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97daff08a4c48320587b5224cc98d609e3c27b6d437315bd40b605c98eeb5918"
dependencies = [
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "quote"
version = "1.0.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
dependencies = [
"proc-macro2",
]
[[package]]
name = "redox_syscall"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29"
dependencies = [
"bitflags 1.3.2",
]
[[package]]
name = "rustc-demangle"
version = "0.1.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
[[package]]
name = "rustix"
version = "0.38.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0c3dde1fc030af041adc40e79c0e7fbcf431dd24870053d187d7c66e4b87453"
dependencies = [
"bitflags 2.4.0",
"errno",
"libc",
"linux-raw-sys",
"windows-sys",
]
[[package]]
name = "scopeguard"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "signal-hook-registry"
version = "1.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1"
dependencies = [
"libc",
]
[[package]]
name = "smallvec"
version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9"
[[package]]
name = "socket2"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2538b18701741680e0322a2302176d3253a35388e2e62f172f64f4f16605f877"
dependencies = [
"libc",
"windows-sys",
]
[[package]]
name = "strsim"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]]
name = "syn"
version = "1.0.109"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "syn"
version = "2.0.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "718fa2415bcb8d8bd775917a1bf12a7931b6dfa890753378538118181e0cb398"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "target-lexicon"
version = "0.12.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d0e916b1148c8e263850e1ebcbd046f333e0683c724876bb0da63ea4373dc8a"
[[package]]
name = "tokio"
version = "1.32.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9"
dependencies = [
"backtrace",
"bytes",
"libc",
"mio",
"num_cpus",
"parking_lot",
"pin-project-lite",
"signal-hook-registry",
"socket2",
"tokio-macros",
"windows-sys",
]
[[package]]
name = "tokio-macros"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.31",
]
[[package]]
name = "unicode-ident"
version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c"
[[package]]
name = "unindent"
version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c"
[[package]]
name = "utf8parse"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
[[package]]
name = "wasi"
version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "windows-sys"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-targets"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
[[package]]
name = "windows_aarch64_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
[[package]]
name = "windows_i686_gnu"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
[[package]]
name = "windows_i686_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
[[package]]
name = "windows_x86_64_gnu"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
[[package]]
name = "windows_x86_64_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
[[package]]
name = "xandikos"
version = "0.2.10"
dependencies = [
"clap",
"pyo3",
"tokio",
]
xandikos_0.2.10.orig/Cargo.toml 0000644 0000000 0000000 00000000700 14475446710 013322 0 ustar 00 [package]
name = "xandikos"
version = "0.2.10"
authors = [ "Jelmer Vernooij ",]
edition = "2021"
license = "GPL-3.0+"
description = "Lightweight CalDAV/CardDAV server"
repository = "https://github.com/jelmer/xandikos.git"
homepage = "https://github.com/jelmer/xandikos"
[dependencies]
clap = "<=4.2"
[dependencies.pyo3]
version = "0.18"
features = [ "auto-initialize",]
[dependencies.tokio]
version = "1"
features = [ "full",]
xandikos_0.2.10.orig/Dockerfile 0000644 0000000 0000000 00000001465 14476041427 013372 0 ustar 00 # Docker file for Xandikos.
#
# Note that this dockerfile starts Xandikos without any authentication;
# for authenticated access we recommend you run it behind a reverse proxy.
FROM debian:sid-slim
LABEL maintainer="jelmer@jelmer.uk"
RUN apt-get update && \
apt-get -y install --no-install-recommends python3-icalendar python3-dulwich python3-jinja2 python3-defusedxml python3-aiohttp python3-vobject python3-aiohttp-openmetrics && \
apt-get clean && \
rm -rf /var/lib/apt/lists/ && \
groupadd -g 1000 xandikos && \
useradd -d /code -c Xandikos -g xandikos -M -s /bin/bash -u 1000 xandikos
ADD . /code
WORKDIR /code
VOLUME /data
EXPOSE 8000
USER xandikos
ENTRYPOINT ["python3", "-m", "xandikos.web", "--port=8000", "--metrics-port=8001", "--listen-address=0.0.0.0", "-d", "/data"]
CMD ["--defaults"]
xandikos_0.2.10.orig/GOALS.rst 0000644 0000000 0000000 00000000434 13371556014 012766 0 ustar 00 The goal of Xandikos is to be a simple CalDAV/CardDAV server for personal use:
* easy to set up
* use of plain .ics/.vcf files for storage
* history stored in Git
* clear separation between protocol implementation and storage
* well tested
* standards complete
* standards compliant
xandikos_0.2.10.orig/MANIFEST.in 0000644 0000000 0000000 00000000350 14476041427 013126 0 ustar 00 include *.rst
include AUTHORS
include COPYING
include README.rst
include Makefile
include compat/*.sh
include compat/*.rst
include compat/*.xml
include compat/*.sha256sum
include notes/*.rst
include tox.ini
graft examples
graft man
xandikos_0.2.10.orig/Makefile 0000644 0000000 0000000 00000002774 14476041427 013044 0 ustar 00 export PYTHON ?= python3
COVERAGE ?= $(PYTHON) -m coverage
COVERAGE_RUN_OPTIONS ?=
COVERAGE_RUN ?= $(COVERAGE) run $(COVERAGE_RUN_OPTIONS)
TESTSUITE = xandikos.tests.test_suite
LITMUS_TESTS ?= basic http
CALDAVTESTER_TESTS ?= CalDAV/delete.xml \
CalDAV/options.xml \
CalDAV/vtodos.xml
XANDIKOS_COVERAGE ?= $(COVERAGE_RUN) -a --rcfile=$(shell pwd)/.coveragerc --source=xandikos -m xandikos.web
check:
$(PYTHON) -m unittest $(TESTSUITE)
style:
$(PYTHON) -m flake8
isort --check .
typing:
$(PYTHON) -m mypy xandikos
web:
$(PYTHON) -m xandikos.web
check-litmus-all:
./compat/xandikos-litmus.sh "basic copymove http props locks"
check-litmus:
./compat/xandikos-litmus.sh "${LITMUS_TESTS}"
check-pycaldav:
./compat/xandikos-pycaldav.sh
coverage-pycaldav:
XANDIKOS="$(XANDIKOS_COVERAGE)" ./compat/xandikos-pycaldav.sh
coverage-litmus:
XANDIKOS="$(XANDIKOS_COVERAGE)" ./compat/xandikos-litmus.sh "${LITMUS_TESTS}"
check-vdirsyncer:
./compat/xandikos-vdirsyncer.sh
coverage-vdirsyncer:
XANDIKOS="$(XANDIKOS_COVERAGE)" ./compat/xandikos-vdirsyncer.sh
check-all: check check-vdirsyncer check-litmus check-pycaldav style
coverage-all: coverage coverage-litmus coverage-vdirsyncer
coverage:
$(COVERAGE_RUN) --source=xandikos -m unittest $(TESTSUITE)
coverage-html: coverage
$(COVERAGE) html
docs:
$(MAKE) -C docs html
.PHONY: docs
docker:
buildah build -t jvernooij/xandikos -t ghcr.io/jelmer/xandikos .
buildah push jvernooij/xandikos
buildah push ghcr.io/jelmer/xandikos
reformat:
isort .
xandikos_0.2.10.orig/NEWS 0000644 0000000 0000000 00000003577 14476041427 012105 0 ustar 00 0.2.10 2023-09-04
* Add support for systemd socket activation.
(schnusch, #136, #155)
* Add basic documentation.
(Jelmer Vernooij)
* Use entry points to install xandikos script.
(Jelmer Vernooij, #163)
* ``sync-collection``: handle invalid tokens.
(Jelmer Vernooij)
0.2.8 2022-01-09
0.2.7 2021-12-27
* Add basic XMP property support. (Jelmer Vernooij)
* Add a /health target. (Jelmer Vernooij)
0.2.6 2021-03-20
* Don't listen on TCP port (defautlting to 0.0.0.0) when a UNIX domain socket
is specified. (schnusch, #134)
0.2.5 2021-02-18
* Fix support for uwsgi when environ['wsgi.input'].read() does not
accept a size=None. (Jelmer Vernooij)
0.2.4 2021-02-16
* Wait for entire body to arrive. (Michael Alyn Miller, #129)
0.2.3 2020-07-25
* Fix handling of WSGI - not all versions of start_response take
keyword arguments. (Jelmer Vernooij, #124)
* Add --no-strict option for clients that don't follow
the spec. (Jelmer Vernooij)
* Add basic support for expanding RRULE. (Jelmer Vernooij, #8)
* Add parsing support for CALDAV:schedule-tag property.
(Jelmer Vernooij)
* Fix support for HTTP Expect. (Jelmer Vernooij, #126)
0.2.2 2020-05-14
* Fix use of xandikos.wsgi module in uwsgi. (Jelmer Vernooij)
0.2.1 2020-05-06
* Add missing dependencies in setup.py. (Jelmer Vernooij)
* Fix syntax errors in xandikos/store/vdir.py.
(Unused, but breaks bytecompilation). (Jelmer Vernooij)
0.2.0 2020-05-04
* Fix subelement filtering. (Jelmer Vernooij)
* Skip non-calendar files for calendar-query operations.
(Jelmer Vernooij, #108)
* Switch to using aiohttp rather than uWSGI.
(Jelmer Vernooij)
* Query component's SUMMARY in ICalendarFile.describe().
(Denis Laxalde)
* Add /metrics support. (Jelmer Vernooij)
* Drop support for Python 3.4, add support for 3.8.
(Jelmer Vernooij)
0.1.0 2019-04-07
Initial release.
xandikos_0.2.10.orig/README.rst 0000644 0000000 0000000 00000013711 14476041427 013064 0 ustar 00 Xandikos is a lightweight yet complete CardDAV/CalDAV server that backs onto a Git repository.
Xandikos (Ξανδικός or Ξανθικός) takes its name from the name of the March month
in the ancient Macedonian calendar, used in Macedon in the first millennium BC.
Extended documentation can be found `on the home page `_.
Implemented standards
=====================
The following standards are implemented:
- :RFC:`4918`/:RFC:`2518` (Core WebDAV) - *implemented, except for COPY/MOVE/LOCK operations*
- :RFC:`4791` (CalDAV) - *fully implemented*
- :RFC:`6352` (CardDAV) - *fully implemented*
- :RFC:`5397` (Current Principal) - *fully implemented*
- :RFC:`3253` (Versioning Extensions) - *partially implemented, only the REPORT method and {DAV:}expand-property property*
- :RFC:`3744` (Access Control) - *partially implemented*
- :RFC:`5995` (POST to create members) - *fully implemented*
- :RFC:`5689` (Extended MKCOL) - *fully implemented*
- :RFC:`6578` (Collection Synchronization for WebDAV) - *fully implemented*
The following standards are not implemented:
- :RFC:`6638` (CalDAV Scheduling Extensions) - *not implemented*
- :RFC:`7809` (CalDAV Time Zone Extensions) - *not implemented*
- :RFC:`7529` (WebDAV Quota) - *not implemented*
- :RFC:`4709` (WebDAV Mount) - `intentionally `_ *not implemented*
- :RFC:`5546` (iCal iTIP) - *not implemented*
- :RFC:`4324` (iCAL CAP) - *not implemented*
- :RFC:`7953` (iCal AVAILABILITY) - *not implemented*
See `DAV compliance `_ for more detail on specification compliancy.
Limitations
-----------
- No multi-user support
- No support for CalDAV scheduling extensions
Supported clients
=================
Xandikos has been tested and works with the following CalDAV/CardDAV clients:
- `Vdirsyncer `_
- `caldavzap `_/`carddavmate `_
- `evolution `_
- `DAVx5 `_ (formerly DAVDroid)
- `sogo connector for Icedove/Thunderbird `_
- `aCALdav syncer for Android `_
- `pycardsyncer `_
- `akonadi `_
- `CalDAV-Sync `_
- `CardDAV-Sync `_
- `Calendarsync `_
- `Tasks `_
- `AgendaV `_
- `CardBook `_
- Apple's iOS
- `homeassistant's CalDAV integration `_
Dependencies
============
At the moment, Xandikos supports Python 3 (see pyproject.toml for specific version)
as well as Pypy 3. It also uses `Dulwich `_,
`Jinja2 `_,
`icalendar `_, and
`defusedxml `_.
E.g. to install those dependencies on Debian:
.. code:: shell
sudo apt install python3-dulwich python3-defusedxml python3-icalendar python3-jinja2
Or to install them using pip:
.. code:: shell
python setup.py develop
Docker
------
A Dockerfile is also provided; see the comments on the top of the file for
configuration instructions. The docker image is regularly built and
published at ``ghcr.io/jelmer/xandikos``. See
``examples/docker-compose.yml`` and the
`man page `_ for more info.
Running
=======
Xandikos can either directly listen on a plain HTTP socket, or it can sit
behind a reverse HTTP proxy.
Testing
-------
To run a standalone (no authentication) instance of Xandikos,
with a pre-created calendar and addressbook (storing data in *$HOME/dav*):
.. code:: shell
./bin/xandikos --defaults -d $HOME/dav
A server should now be listening on `localhost:8080 `_.
Note that Xandikos does not create any collections unless --defaults is
specified. You can also either create collections from your CalDAV/CardDAV client,
or by creating git repositories under the *contacts* or *calendars* directories
it has created.
Production
----------
The easiest way to run Xandikos in production is by running a reverse HTTP proxy
like Apache or nginx in front of it.
The xandikos script can either listen on the local host on a particular port, or
it can listen on a unix domain socket.
For example init system configurations, see examples/.
Client instructions
===================
Some clients can automatically discover the calendars and addressbook URLs from
a DAV server (if they support RFC:`5397`). For such clients you can simply
provide the base URL to Xandikos during setup.
Clients that lack such automated discovery (e.g. Thunderbird Lightning) require
the direct URL to a calendar or addressbook. In this case you
should provide the full URL to the calendar or addressbook; if you initialized
Xandikos using the ``--defaults`` argument mentioned in the previous section,
these URLs will look something like this::
http://dav.example.com/user/calendars/calendar
http://dav.example.com/user/contacts/addressbook
Contributing
============
Contributions to Xandikos are very welcome. If you run into bugs or have
feature requests, please file issues `on GitHub
`_. If you're interested in
contributing code or documentation, please read `CONTRIBUTING
`_. Issues that are good for new contributors are tagged
`new-contributor `_
on GitHub.
Help
====
There is a *#xandikos* IRC channel on the `OFTC `_
IRC network, and a `Xandikos `_
mailing list.
xandikos_0.2.10.orig/SECURITY.md 0000644 0000000 0000000 00000000261 14015467211 013152 0 ustar 00 # Security Policy
## Reporting a Vulnerability
Please report security issues by e-mail to jelmer@jelmer.uk, ideally PGP encrypted to the key at https://jelmer.uk/D729A457.asc
xandikos_0.2.10.orig/SUPPORT.md 0000644 0000000 0000000 00000000250 14060003533 013046 0 ustar 00 There is a *#xandikos* IRC channel on the [OFTC](https://www.oftc.net/).
IRC network, and a
[Xandikos](https://groups.google.com/forum/#!forum/xandikos>) mailing list.
xandikos_0.2.10.orig/bin/ 0000755 0000000 0000000 00000000000 13046331303 012125 5 ustar 00 xandikos_0.2.10.orig/compat/ 0000755 0000000 0000000 00000000000 13037551457 012656 5 ustar 00 xandikos_0.2.10.orig/disperse.conf 0000644 0000000 0000000 00000000657 14475445730 014073 0 ustar 00 # See https://github.com/jelmer/disperse
name: "xandikos"
news_file: "NEWS"
timeout_days: 5
tag_name: "v$VERSION"
github_url: "https://github.com/jelmer/xandikos"
verify_command: "make check"
update_version {
path: "xandikos/__init__.py"
match: "^__version__ = \((.*)\)$"
new_line: "__version__ = $TUPLED_VERSION"
}
update_version {
path: "Cargo.toml"
match: "^version = \"(.*)\"$"
new_line: "version = \"$VERSION\""
}
xandikos_0.2.10.orig/docs/ 0000755 0000000 0000000 00000000000 14210664333 012313 5 ustar 00 xandikos_0.2.10.orig/examples/ 0000755 0000000 0000000 00000000000 13055142225 013176 5 ustar 00 xandikos_0.2.10.orig/grafana-dashboard.json 0000644 0000000 0000000 00000004073 14323017253 015604 0 ustar 00 {
"__inputs": [
{
"name": "DS_PROMETHEUS",
"label": "Prometheus",
"description": "",
"type": "datasource",
"pluginId": "prometheus",
"pluginName": "Prometheus"
}
],
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": "-- Grafana --",
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"gnetId": null,
"graphTooltip": 0,
"id": 10286,
"links": [],
"panels": [
{
"datasource": "${DS_PROMETHEUS}",
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 9,
"w": 12,
"x": 0,
"y": 0
},
"id": 2,
"options": {
"colorMode": "value",
"graphMode": "area",
"justifyMode": "auto",
"orientation": "auto",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"text": {},
"textMode": "auto"
},
"pluginVersion": "7.5.11",
"targets": [
{
"exemplar": true,
"expr": "up{job=\"xandikos\"}",
"interval": "",
"legendFormat": "",
"refId": "A"
}
],
"title": "Health",
"type": "stat"
}
],
"schemaVersion": 27,
"style": "dark",
"tags": [],
"templating": {
"list": []
},
"time": {
"from": "now-6h",
"to": "now"
},
"timepicker": {},
"timezone": "",
"title": "Xandikos",
"uid": "k7dunuVVk",
"version": 2
}
xandikos_0.2.10.orig/man/ 0000755 0000000 0000000 00000000000 14020773671 012143 5 ustar 00 xandikos_0.2.10.orig/notes/ 0000755 0000000 0000000 00000000000 12633333551 012515 5 ustar 00 xandikos_0.2.10.orig/pyproject.toml 0000644 0000000 0000000 00000003476 14475330435 014317 0 ustar 00 [build-system]
requires = ["setuptools>=61.2"]
build-backend = "setuptools.build_meta"
[project]
name = "xandikos"
description = "Lightweight CalDAV/CardDAV server"
readme = "README.rst"
authors = [{name = "Jelmer Vernooij", email = "jelmer@jelmer.uk"}]
license = {text = "GNU GPLv3 or later"}
classifiers = [
"Development Status :: 4 - Beta",
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Operating System :: POSIX",
]
urls = {Homepage = "https://www.xandikos.org/"}
requires-python = ">=3.9"
dependencies = [
"aiohttp",
"icalendar>=5.0.4",
"dulwich>=0.21.6",
"defusedxml",
"jinja2",
"multidict",
"vobject",
]
dynamic = ["version"]
[project.optional-dependencies]
prometheus = ["aiohttp_openmetrics"]
systemd = ["systemd_python"]
[project.scripts]
xandikos = "xandikos.__main__:main"
[tool.setuptools]
include-package-data = false
[tool.setuptools.packages]
find = {namespaces = false}
[tool.setuptools.package-data]
xandikos = [
"templates/*.html",
"py.typed",
]
[tool.setuptools.dynamic]
version = {attr = "xandikos.__version__"}
[tool.mypy]
ignore_missing_imports = true
[tool.distutils.bdist_wheel]
universal = 1
[tool.ruff]
select = [
"ANN",
"D",
"E",
"F",
"UP",
]
ignore = [
"ANN001",
"ANN002",
"ANN003",
"ANN101", # missing-type-self
"ANN102",
"ANN201",
"ANN202",
"ANN204",
"ANN206",
"D100",
"D101",
"D102",
"D103",
"D104",
"D105",
"D107",
"D403",
"D417",
"E501",
]
target-version = "py37"
[tool.ruff.pydocstyle]
convention = "google"
xandikos_0.2.10.orig/requirements.txt 0000644 0000000 0000000 00000000076 13561565105 014657 0 ustar 00 icalendar
dulwich
defusedxml
jinja2
aiohttp
prometheus_client
xandikos_0.2.10.orig/setup.py 0000755 0000000 0000000 00000000071 14476041427 013105 0 ustar 00 #!/usr/bin/python3
from setuptools import setup
setup()
xandikos_0.2.10.orig/src/ 0000755 0000000 0000000 00000000000 14433202134 012144 5 ustar 00 xandikos_0.2.10.orig/tox.ini 0000644 0000000 0000000 00000000223 13654342552 012702 0 ustar 00 [tox]
downloadcache = {toxworkdir}/cache/
envlist = py36, py37, py38
[testenv]
commands = make check
recreate = True
whitelist_externals = make
xandikos_0.2.10.orig/xandikos/ 0000755 0000000 0000000 00000000000 13043745642 013211 5 ustar 00 xandikos_0.2.10.orig/bin/xandikos 0000755 0000000 0000000 00000002103 14476041427 013704 0 ustar 00 #!/usr/bin/env python3
# Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import asyncio
import os
import sys
# running from source dir?
if os.path.join(os.path.dirname(__file__), "..", "xandikos"):
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
from xandikos.__main__ import main
sys.exit(asyncio.run(main(sys.argv[1:])))
xandikos_0.2.10.orig/compat/.gitignore 0000644 0000000 0000000 00000000046 14476041427 014645 0 ustar 00 litmus-*.tar.gz
vdirsyncer/
pycaldav/
xandikos_0.2.10.orig/compat/README.rst 0000644 0000000 0000000 00000000364 14476041427 014347 0 ustar 00 This directory contains scripts to run external CalDAV/CardDAV/WebDAV
testsuites against the Xandikos web server.
Currently supported:
- `Vdirsyncer `_
- `litmus `_
xandikos_0.2.10.orig/compat/common.sh 0000644 0000000 0000000 00000001432 14476041427 014501 0 ustar 00 #!/bin/bash
# Common functions for running xandikos in compat tests
XANDIKOS_PID=
DAEMON_LOG=$(mktemp)
SERVEDIR=$(mktemp -d)
if [ -z "${XANDIKOS}" ]; then
XANDIKOS=$(dirname $0)/../bin/xandikos
fi
set -e
xandikos_cleanup() {
[ -z ${XANDIKOS_PID} ] || kill -INT ${XANDIKOS_PID}
rm --preserve-root -rf ${SERVEDIR}
cat ${DAEMON_LOG}
wait ${XANDIKOS_PID} || true
}
run_xandikos()
{
PORT="$1"
METRICS_PORT="$2"
shift 2
echo "Writing daemon log to $DAEMON_LOG"
${XANDIKOS} --no-detect-systemd --port=${PORT} --metrics-port=${METRICS_PORT} -llocalhost -d ${SERVEDIR} "$@" 2>&1 >$DAEMON_LOG &
XANDIKOS_PID=$!
trap xandikos_cleanup 0 EXIT
i=0
while [ $i -lt 50 ]
do
if [ "$(curl http://localhost:${METRICS_PORT}/health)" = "ok" ]; then
break
fi
sleep 1
let i+=1
done
}
xandikos_0.2.10.orig/compat/litmus-0.13.tar.gz.sha256sum 0000644 0000000 0000000 00000000125 13056332561 017425 0 ustar 00 09d615958121706444db67e09c40df5f753ccf1fa14846fdeb439298aa9ac3ff litmus-0.13.tar.gz
xandikos_0.2.10.orig/compat/litmus.sh 0000755 0000000 0000000 00000001312 13260421012 014504 0 ustar 00 #!/bin/bash -e
URL="$1"
if [ -z "$URL" ]; then
echo "Usage: $0 URL"
exit 1
fi
if [ -n "$TESTS" ]; then
TEST_ARG=TESTS="$TESTS"
fi
SRCPATH="$(dirname $(readlink -m $0))"
VERSION=${LITMUS_VERSION:-0.13}
LITMUS_URL="${LITMUS_URL:-http://www.webdav.org/neon/litmus/litmus-${VERSION}.tar.gz}"
scratch=$(mktemp -d)
function finish() {
rm -rf "${scratch}"
}
trap finish EXIT
pushd "${scratch}"
if [ -f "${SRCPATH}/litmus-${VERSION}.tar.gz" ]; then
cp "${SRCPATH}/litmus-${VERSION}.tar.gz" .
else
wget -O "litmus-${VERSION}.tar.gz" "${LITMUS_URL}"
fi
sha256sum ${SRCPATH}/litmus-${VERSION}.tar.gz.sha256sum
tar xvfz litmus-${VERSION}.tar.gz
pushd litmus-${VERSION}
./configure
make
make URL="$URL" $TEST_ARG check
xandikos_0.2.10.orig/compat/serverinfo.xml 0000644 0000000 0000000 00000050032 13123462602 015547 0 ustar 00
localhost
5233
8443
basic
120
0.25
caldav
no-duplicate-uids
ctag
$multistatus-response-prefix:
/{DAV:}multistatus/{DAV:}response
$multistatus-href-prefix:
/{DAV:}multistatus/{DAV:}response/{DAV:}href
$verify-response-prefix:
{DAV:}response/{DAV:}propstat/{DAV:}prop
$verify-property-prefix:
/{DAV:}multistatus/{DAV:}response/{DAV:}propstat/{DAV:}prop
$verify-bad-response:
/{DAV:}multistatus/{DAV:}response/{DAV:}status
$verify-error-response:
/{DAV:}multistatus/{DAV:}response/{DAV:}error
$CALDAV:
urn:ietf:params:xml:ns:caldav
$CARDDAV:
urn:ietf:params:xml:ns:carddav
$CS:
http://calendarserver.org/ns/
$root:
/
$principalcollection:
$root:principals/
$uidstype:
__uids__
$userstype:
users
$groupstype:
groups
$locationstype:
locations
$resourcestype:
resources
$principals_uids:
$principalcollection:$uidstype:/
$principals_users:
$principalcollection:$userstype:/
$principals_groups:
$principalcollection:$groupstype:/
$principals_resources:
$principalcollection:$resourcestype:/
$principals_locations:
$principalcollection:$locationstype:/
$calendars:
$root:calendars/
$calendars_uids:
$calendars:$uidstype:/
$calendars_users:
$calendars:$userstype:/
$calendars_groups:
$calendars:$groupstype:/
$calendars_resources:
$calendars:$resourcestype:/
$calendars_locations:
$calendars:$locationstype:/
$calendar:
calendar
$tasks:
tasks
$polls:
polls
$inbox:
inbox
$outbox:
outbox
$dropbox:
dropbox
$attachments:
dropbox
$notification:
notification
$freebusy:
freebusy
$servertoserver:
$root:inbox
$timezoneservice:
$root:timezones
$timezonestdservice:
$root:stdtimezones
$addressbooks:
$root:addressbooks/
$addressbooks_uids:
$addressbooks:$uidstype:/
$addressbooks_users:
$addressbooks:$userstype:/
$addressbooks_groups:
$addressbooks:$groupstype:/
$addressbook:
addressbook
$directory:
$root:directory/
$add-member:
;add-member
$useradmin:
admin
$useradminguid:
admin
$pswdadmin:
admin
$principal_admin:
$principals_users:$useradmin:/
$principaluri_admin:
$principals_uids:$useradminguid:/
$userapprentice:
apprentice
$userapprenticeguid:
apprentice
$pswdapprentice:
apprentice
$principal_apprentice:
$principals_users:$userapprentice:/
$principaluri_apprentice:
$principals_uids:$userapprenticeguid:/
$userproxy:
superuser
$pswdproxy:
superuser
$userid%d:
user%02d
$userguid%d:
user%02d
$username%d:
User %02d
$username-encoded%d:
User%%20%02d
$firstname%d:
User
$lastname%d:
%02d
$pswd%d:
user%02d
$principal%d:
$principals_users:$userid%d:/
$principaluri%d:
$principals_uids:$userguid%d:/
$principal%dnoslash:
$principals_users:$userid%d:
$calendarhome%d:
$calendars_uids:$userguid%d:
$calendarhomealt%d:
$calendars_users:$userid%d:
$calendarpath%d:
$calendarhome%d:/$calendar:
$calendarpathalt%d:
$calendarhomealt%d:/$calendar:
$taskspath%d:
$calendarhome%d:/$tasks:
$pollspath%d:
$calendarhome%d:/$polls:
$inboxpath%d:
$calendarhome%d:/$inbox:
$outboxpath%d:
$calendarhome%d:/$outbox:
$dropboxpath%d:
$calendarhome%d:/$dropbox:
$notificationpath%d:
$calendarhome%d:/$notification:
$freebusypath%d:
$calendarhome%d:/$freebusy:
$email%d:
$userid%d:@example.com
$cuaddr%d:
mailto:$email%d:
$cuaddralt%d:
$principaluri%d:
$cuaddraltnoslash%d:
$principals_uids:$userguid%d:
$cuaddrurn%d:
urn:uuid:$userguid%d:
$addressbookhome%d:
$addressbooks_uids:$userguid%d:
$addressbookpath%d:
$addressbookhome%d:/$addressbook:
$publicuserid%d:
public%02d
$publicuserguid%d:
public%02d
$publicusername%d:
Public %02d
$publicpswd%d:
public%02d
$publicprincipal%d:
$principals_users:$publicuserid%d:/
$publicprincipaluri%d:
$principals_uids:$publicuserguid%d:/
$publiccalendarhome%d:
$calendars_uids:$publicuserguid%d:
$publiccalendarpath%d:
$calendars_uids:$publicuserguid%d:/$calendar:
$publicemail%d:
$publicuserid%d:@example.com
$publiccuaddr%d:
mailto:$publicemail%d:
$publiccuaddralt%d:
$publicprincipaluri%d:
$publiccuaddrurn%d:
urn:uuid:$publicuserguid%d:
$resourceid%d:
resource%02d
$resourceguid%d:
resource%02d
$resourcename%d:
Resource %02d
$rcalendarhome%d:
$calendars_uids:$resourceguid%d:
$rcalendarpath%d:
$calendars_uids:$resourceguid%d:/$calendar:
$rinboxpath%d:
$calendars_uids:$resourceguid%d:/$inbox:
$routboxpath%d:
$calendars_uids:$resourceguid%d:/$outbox:
$rprincipal%d:
$principals_resources:$resourceid%d:/
$rprincipaluri%d:
$principals_uids:$resourceguid%d:/
$rcuaddralt%d:
$rprincipaluri%d:
$rcuaddrurn%d:
urn:uuid:$resourceguid%d:
$locationid%d:
location%02d
$locationguid%d:
location%02d
$locationname%d:
Location %02d
$lcalendarhome%d:
$calendars_uids:$locationguid%d:
$lcalendarpath%d:
$calendars_uids:$locationguid%d:/$calendar:
$linboxpath%d:
$calendars_uids:$locationguid%d:/$inbox:
$loutboxpath%d:
$calendars_uids:$locationguid%d:/$outbox:
$lprincipal%d:
$principals_resources:$locationid%d:/
$lprincipaluri%d:
$principals_uids:$locationguid%d:/
$lcuaddralt%d:
$lprincipaluri%d:
$lcuaddrurn%d:
urn:uuid:$locationguid%d:
$groupid%d:
group%02d
$groupguid%d:
group%02d
$groupname%d:
Group %02d
$gprincipal%d:
$principals_resources:$groupid%d:/
$gprincipaluri%d:
$principals_uids:$groupguid%d:/
$gcuaddralt%d:
$gprincipaluri%d:
$gcuaddrurn%d:
urn:uuid:$groupguid%d:
$i18nid:
i18nuser
$i18nguid:
i18nuser
$i18nname:
まだ
$i18npswd:
i18nuser
$i18ncalendarpath:
$calendars_uids:$i18nguid:/$calendar:
$i18nemail:
$i18nid:@example.com
$i18ncuaddr:
mailto:$i18nemail:
$i18ncuaddrurn:
urn:uuid:$i18nguid:
$principaldisabled:
$principals_groups:disabledgroup/
$principaluridisabled:
$principals_uids:disabledgroup/
$cuaddrdisabled:
$principals_uids:disabledgroup/
$cuaddr2:
MAILTO:$email2:
xandikos_0.2.10.orig/compat/xandikos-litmus.sh 0000755 0000000 0000000 00000000422 14476041427 016345 0 ustar 00 #!/bin/bash -x
# Run litmus against xandikos
. $(dirname $0)/common.sh
TESTS="$1"
set -e
run_xandikos 5233 5234 --autocreate
if which litmus >/dev/null; then
LITMUS=litmus
else
LITMUS="$(dirname $0)/litmus.sh"
fi
TESTS="$TESTS" $LITMUS http://localhost:5233/
exit 0
xandikos_0.2.10.orig/compat/xandikos-pycaldav.sh 0000755 0000000 0000000 00000001477 14476041427 016646 0 ustar 00 #!/bin/bash
# Run python-caldav tests against Xandikos.
set -e
. $(dirname $0)/common.sh
BRANCH=master
if [ ! -d $(dirname $0)/pycaldav ]; then
git clone https://github.com/python-caldav/caldav $(dirname $0)/pycaldav
else
pushd $(dirname $0)/pycaldav
git pull --ff-only origin $BRANCH
popd
fi
cat <$(dirname $0)/pycaldav/tests/conf_private.py
# Only run tests against my private caldav servers.
only_private = True
caldav_servers = [
{'url': 'http://localhost:5233/',
# Until recurring support is added in xandikos.
# See https://github.com/jelmer/xandikos/issues/102
'incompatibilities': ['no_expand', 'no_recurring', 'no_scheduling', 'text_search_not_working'],
}
]
EOF
run_xandikos 5233 5234 --defaults
pushd $(dirname $0)/pycaldav
${PYTHON:-python3} -m pytest tests "$@"
popd
xandikos_0.2.10.orig/compat/xandikos-vdirsyncer.sh 0000755 0000000 0000000 00000002140 13366211551 017211 0 ustar 00 #!/bin/bash
. $(dirname $0)/common.sh
set -e
readonly BRANCH=master
run_xandikos 5001 --autocreate
[ -z "$PYTHON" ] && PYTHON=python3
cd "$(dirname $0)"
REPO_DIR="$(readlink -f ..)"
if [ ! -d vdirsyncer ]; then
git clone -b $BRANCH https://github.com/pimutils/vdirsyncer
else
pushd vdirsyncer
git pull --ff-only origin $BRANCH
popd
fi
cd vdirsyncer
if [ -z "${VIRTUAL_ENV}" ]; then
virtualenv venv -p${PYTHON}
source venv/bin/activate
export PYTHONPATH=${REPO_DIR}
pushd ${REPO_DIR} && ${PYTHON} setup.py develop && popd
fi
if [ -z "${CARGO_HOME}" ]; then
export CARGO_HOME="$(readlink -f .)/cargo"
export RUSTUP_HOME="$(readlink -f .)/cargo"
fi
curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain nightly --no-modify-path
. ${CARGO_HOME}/env
rustup update nightly
# Add --ignore=tests/system/utils/test_main.py since it fails in travis,
# and isn't testing anything relevant to Xandikos.
make \
PYTEST_ARGS="${PYTEST_ARGS} tests/storage/dav/ --ignore=tests/system/utils/test_main.py" \
DAV_SERVER=xandikos \
install-dev install-test test
exit 0
xandikos_0.2.10.orig/docs/Makefile 0000644 0000000 0000000 00000000747 14210664333 013763 0 ustar 00 SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = source
BUILDDIR = build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
xandikos_0.2.10.orig/docs/source/ 0000755 0000000 0000000 00000000000 14210664333 013613 5 ustar 00 xandikos_0.2.10.orig/docs/source/clients.rst 0000644 0000000 0000000 00000002013 14214176574 016013 0 ustar 00 Configuring Clients
===================
Xandikos supports ``auto-discovery`` of DAV collections (i.e. calendars or
addressbooks). Most clients today do as well, but there are some exceptions.
This section contains basic instructions on how to use various clients with Xandikos.
Please do send us patches if your favourite client is missing.
Evolution
---------
Select "CardDAV" (address books) or "CalDAV" (calendars) as the type when
adding a new account.
Simplify provide the root URL of your Xandikos instance. Hit the "Find
Addressbooks" or "Find Calenders" button and Evolution will prompt for
credentials and show you a list of all relevant calendars or addressbooks.
DAVx5
--------
vdirsyncer
----------
sogo connector for Icedove/Thunderbird
--------------------------------------
caldavzap/carddavmate
---------------------
pycardsyncer
------------
akonadi
-------
CalDAV-Sync
-----------
CardDAV-Sync
------------
Calendarsync
------------
AgendaV
-------
CardBook
--------
Tasks
-----
Apple iOS
---------
xandikos_0.2.10.orig/docs/source/conf.py 0000644 0000000 0000000 00000003446 14210664333 015121 0 ustar 00 # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'Xandikos'
copyright = '2022 Jelmer Vernooij et al'
author = 'Jelmer Vernooij'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'furo'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
xandikos_0.2.10.orig/docs/source/getting-started.rst 0000644 0000000 0000000 00000003376 14300424372 017460 0 ustar 00 .. _getting-started:
Getting Started
===============
Xandikos can either be run in a container (e.g. in docker or Kubernetes) or
outside of a container.
It is recommended that you run it behind a reverse proxy, since Xandikos by
itself does not provide authentication support. See :ref:`reverse-proxy` for
details.
Running from systemd
--------------------
Xandikos supports socket activation through systemd. To use systemd, run something like:
.. code-block:: shell
cp examples/xandikos.{socket,service} /etc/systemd/system
systemctl daemon-reload
systemctl enable xandikos.socket
Running from docker
-------------------
There is a docker image that gets regularly updated at
``ghcr.io/jelmer/xandikos``.
If you use docker-compose, see the example configuration in
``examples/docker-compose.yml``.
To run in docker interactively, try something like:
.. code-block:: shell
mkdir /tmp/xandikos
docker -it run ghcr.io/jelmer/xandikos -v /tmp/xandikos:/data
The following environment variables are supported by the docker image:
* ``CURRENT_USER_PRINCIPAL``: path to current user principal; defaults to "/$USER"
* ``AUTOCREATE``: whether to automatically create missing directories ("yes" or "no")
* ``DEFAULTS``: whether to create a default directory hierarch with one
calendar and one addressbook ("yes" or "no")
* ``ROUTE_PREFIX``: HTTP prefix under which Xandikos should run
Running from kubernetes
-----------------------
Here is an example configuration for running Xandikos in kubernetes:
.. literalinclude:: ../../examples/xandikos.k8s.yaml
:language: yaml
If you're using the prometheus operator, you may want also want to use this service monitor:
.. literalinclude:: ../../examples/xandikos-servicemonitor.k8s.yaml
:language: yaml
xandikos_0.2.10.orig/docs/source/index.rst 0000644 0000000 0000000 00000000324 14210701273 015446 0 ustar 00 Xandikos
========
.. toctree::
:maxdepth: 2
:caption: Contents:
getting-started
reverse-proxy
clients
troubleshooting
Indices and tables
==================
* :ref:`genindex`
* :ref:`search`
xandikos_0.2.10.orig/docs/source/reverse-proxy.rst 0000644 0000000 0000000 00000002433 14300424372 017176 0 ustar 00 .. _reverse-proxy:
Running behind a reverse proxy
==============================
By default, Xandikos does not provide any authentication support. Instead, it
is recommended that it is run behind a reverse HTTP proxy that does.
The author has used both nginx and Apache in front of Xandikos, but any
reverse HTTP proxy should do.
If you expose Xandikos at the root of a domain, no further configuration is
necessary. When exposing it on a different path prefix, make sure to set the
``--route-prefix`` argument to Xandikos appropriately.
.well-known
-----------
When serving Xandikos on a prefix, you may still want to provide
the appropriate ``.well-known`` files at the root so that clients
can find the DAV server without having to specify the subprefix.
For this to work, reverse proxy the ``.well-known/carddav`` and
``.well-known/caldav`` files to Xandikos.
Example: Kubernetes ingress
---------------------------
Here is an example configuring Xandikos to listen on ``/dav`` using the
Kubernetes nginx ingress controller. Note that this relies on the
appropriate server being set up in kubernetes (see :ref:`getting-started`) and
the ``my-htpasswd`` secret being present and having a htpasswd like file in it.
.. literalinclude:: ../../examples/xandikos-ingress.k8s.yaml
:language: yaml
xandikos_0.2.10.orig/docs/source/troubleshooting.rst 0000644 0000000 0000000 00000002211 14305755512 017575 0 ustar 00 Troubleshooting
===============
Support channels
----------------
For help, please try the `Xandikos Discussions Forum
`_,
IRC (``#xandikos`` on irc.oftc.net), or Matrix (`#xandikos:matrix.org
`_).
Debugging \*DAV
---------------
Your client may have a way of increasing log verbosity; this can often be very
helpful.
Xandikos also has several command-line flags that may help with debugging:
* ``--dump-dav-xml``: Write all \*DAV communication to standard out;
interpreting the contents may require in-depth \*DAV knowledge, but
providing this data is usually sufficient for one of the Xandikos
developers to identify the cause of an issue.
* ``--no-strict``: Don't follow a strict interpretation of the
various standards, for clients that don't follow them.
* ``--debug``: Print extra information about Xandikos' internal state.
If you do find that a particular server requires ``--no-strict``, please
do report it - either to the servers' authors or in the
[Xandikos Discussions](https://github.com/jelmer/xandikos/discussions).
xandikos_0.2.10.orig/examples/docker-compose.yml 0000644 0000000 0000000 00000000264 14176007114 016637 0 ustar 00 version: "3.4"
services:
xandikos:
image: ghcr.io/jelmer/xandikos
ports:
- 8000:8000
volumes:
- /path/to/xandikos/data:/data
restart: unless-stopped
xandikos_0.2.10.orig/examples/gunicorn.conf.py 0000644 0000000 0000000 00000001607 14240530555 016330 0 ustar 00 # Gunicorn config file
#
# Usage
# ----------------------------------------------------------
#
# Install: 1) copy this config to src directory for xandikos
# 2) run 'pip install gunicorn'
# 3) mkdir logs && mkdir data
#
# Execute: 'gunicorn'
#
wsgi_app = 'xandikos.wsgi:app'
# Server Mechanics
# ========================================
# daemon mode
daemon = False
# enviroment variables
raw_env = [
'XANDIKOSPATH=./data',
'CURRENT_USER_PRINCIPAL=/user/',
'AUTOCREATE=defaults'
]
# Server Socket
# ========================================
bind = '0.0.0.0:8000'
# Worker Processes
# ========================================
workers = 2
# Logging
# ========================================
# access log
accesslog = './logs/access.log'
access_log_format = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
# gunicorn log
errorlog = '-'
loglevel = 'info'
xandikos_0.2.10.orig/examples/uwsgi-heroku.ini 0000644 0000000 0000000 00000000460 13170736675 016350 0 ustar 00 [uwsgi]
http-socket = :$(PORT)
die-on-term = true
umask = 022
master = true
cheaper = 0
processes = 1
plugin = router_basicauth,python3
route = ^/ basicauth:myrealm,user1:password1
module = xandikos.wsgi:app
env = XANDIKOSPATH=$HOME/dav
env = CURRENT_USER_PRINCIPAL=/dav/user1/
env = AUTOCREATE=defaults
xandikos_0.2.10.orig/examples/uwsgi-standalone.ini 0000644 0000000 0000000 00000001073 13260421012 017154 0 ustar 00 [uwsgi]
http-socket = 127.0.0.1:8080
umask = 022
master = true
cheaper = 0
processes = 1
plugin = router_basicauth,python3
route = ^/ basicauth:myrealm,user1:password1
module = xandikos.wsgi:app
env = XANDIKOSPATH=$HOME/dav
env = CURRENT_USER_PRINCIPAL=/dav/user1/
# Set AUTOCREATE to have Xandikos create default CalDAV/CardDAV
# collections if they don't yet exist. Possible values:
# - principal: just create the current user principal
# - defaults: create the principal and default calendar and contacts
# collections. (recommended)
env = AUTOCREATE=defaults
xandikos_0.2.10.orig/examples/uwsgi.ini 0000644 0000000 0000000 00000001045 13260421012 015025 0 ustar 00 [uwsgi]
socket = 127.0.0.1:8001
uid = xandikos
gid = xandikos
master = true
cheaper = 0
processes = 1
plugin = python3
module = xandikos.wsgi:app
umask = 022
env = XANDIKOSPATH=/var/lib/xandikos/collections
env = CURRENT_USER_PRINCIPAL=/user/
# Set AUTOCREATE to have Xandikos create default CalDAV/CardDAV
# collections if they don't yet exist. Possible values:
# - principal: just create the current user principal
# - defaults: create the principal and default calendar and contacts
# collections. (recommended)
env = AUTOCREATE=defaults
xandikos_0.2.10.orig/examples/xandikos-ingress.k8s.yaml 0000644 0000000 0000000 00000001773 14210677464 020102 0 ustar 00 apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: xandikos
annotations:
nginx.ingress.kubernetes.io/auth-type: basic
nginx.ingress.kubernetes.io/auth-secret: my-htpasswd
nginx.ingress.kubernetes.io/auth-realm: 'Authentication Required - mysite'
spec:
ingressClassName: nginx
rules:
- host: example.com
http:
paths:
- backend:
service:
name: xandikos
port:
name: web
path: /dav(/|$)(.*)
pathType: Prefix
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: xandikos-wellknown
spec:
ingressClassName: nginx
rules:
- host: example.com
http:
paths:
- backend:
service:
name: xandikos
port:
name: web
path: /.well-known/carddav
pathType: Exact
- backend:
service:
name: xandikos
port:
name: web
path: /.well-known/caldav
pathType: Exact
xandikos_0.2.10.orig/examples/xandikos-servicemonitor.k8s.yaml 0000644 0000000 0000000 00000000307 14210673205 021455 0 ustar 00 ---
apiVersion: monitoring.coreos.com/v1
kind: ServiceMonitor
metadata:
name: xandikos
labels:
app: xandikos
spec:
selector:
matchLabels:
app: xandikos
endpoints:
- port: web
xandikos_0.2.10.orig/examples/xandikos.avahi.service 0000644 0000000 0000000 00000000746 13665311756 017514 0 ustar 00
Xandikos CalDAV/CardDAV server on %h
_caldavs._tcp
443
_carddavs._tcp
443
xandikos_0.2.10.orig/examples/xandikos.example 0000644 0000000 0000000 00000000155 13155703704 016402 0 ustar 00 # This an example .xandikos file.
# The color for this collection is red
color = FF0000
inbox-url = inbox/
xandikos_0.2.10.orig/examples/xandikos.k8s.yaml 0000644 0000000 0000000 00000002746 14243724230 016421 0 ustar 00 ---
apiVersion: apps/v1
kind: Deployment
metadata:
name: xandikos
spec:
strategy:
rollingUpdate:
maxSurge: 1
maxUnavailable: 1
type: RollingUpdate
replicas: 1
selector:
matchLabels:
app: xandikos
template:
metadata:
labels:
app: xandikos
spec:
containers:
- name: xandikos
image: ghcr.io/jelmer/xandikos
imagePullPolicy: Always
command:
- "python3"
- "-m"
- "xandikos.web"
- "--port=8081"
- "-d/data"
- "--defaults"
- "--listen-address=0.0.0.0"
- "--current-user-principal=/jelmer"
- "--route-prefix=/dav"
resources:
limits:
cpu: "2"
memory: "2Gi"
requests:
cpu: "0.1"
memory: "10M"
livenessProbe:
httpGet:
path: /health
port: 8081
initialDelaySeconds: 30
periodSeconds: 3
timeoutSeconds: 90
ports:
- containerPort: 8081
volumeMounts:
- name: xandikos-volume
mountPath: /data
securityContext:
fsGroup: 1000
volumes:
- name: xandikos-volume
persistentVolumeClaim:
claimName: xandikos
---
apiVersion: v1
kind: Service
metadata:
name: xandikos
labels:
app: xandikos
spec:
ports:
- port: 8081
name: web
selector:
app: xandikos
type: ClusterIP
xandikos_0.2.10.orig/examples/xandikos.nginx.conf 0000644 0000000 0000000 00000002407 13655756070 017031 0 ustar 00 upstream xandikos {
server 127.0.0.1:8080;
# server unix:/run/xandikos.socket; # nginx will need write permissions here
}
server {
server_name dav.example.com;
# Service discovery, see RFC 6764
location = /.well-known/caldav {
return 307 $scheme://$host/user/calendars;
}
location = /.well-known/carddav {
return 307 $scheme://$host/user/contacts;
}
location / {
proxy_set_header Host $http_host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_redirect off;
proxy_buffering off;
proxy_pass http://xandikos;
auth_basic "Login required";
auth_basic_user_file /etc/xandikos/htpasswd;
}
listen 443 ssl http2;
listen [::]:443 ssl ipv6only=on http2;
# use e.g. Certbot to have these modified:
ssl_certificate /etc/letsencrypt/live/dav.example.com/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/dav.example.com/privkey.pem;
include /etc/letsencrypt/options-ssl-nginx.conf;
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
}
server {
if ($host = dav.example.com) {
return 301 https://$host$request_uri;
}
listen 80 http2;
listen [::]:80 http2;
server_name dav.example.com;
return 404;
}
xandikos_0.2.10.orig/examples/xandikos.service 0000644 0000000 0000000 00000000473 13653665167 016427 0 ustar 00 [Unit]
Description=Xandikos CalDAV/CardDAV server
After=network.target
[Service]
ExecStart=/usr/local/bin/xandikos \
-d /var/lib/xandikos \
--route-prefix=/dav \
--current-user-principal=/jelmer \
-l /run/sock
User=xandikos
Group=www-data
Restart=on-failure
KillSignal=SIGQUIT
Type=simple
NotifyAccess=all
xandikos_0.2.10.orig/examples/xandikos.socket 0000644 0000000 0000000 00000000160 13653665167 016250 0 ustar 00 [Unit]
Description=Xandikos socket
[Socket]
ListenStream=/run/xandikos.sock
[Install]
WantedBy=sockets.target
xandikos_0.2.10.orig/man/xandikos.8 0000644 0000000 0000000 00000002416 14020773671 014057 0 ustar 00 .TH XANDIKOS "8" "March 2021" "xandikos 0.2.5" "System Administration Utilities"
.SH NAME
xandikos \- git-backed CalDAV/CardDAV server
.SH DESCRIPTION
usage: ./bin/xandikos \fB\-d\fR ROOT\-DIR [OPTIONS]
.SS "optional arguments:"
.TP
\fB\-h\fR, \fB\-\-help\fR
show this help message and exit
.TP
\fB\-\-version\fR
show program's version number and exit
.TP
\fB\-d\fR DIRECTORY, \fB\-\-directory\fR DIRECTORY
Directory to serve from.
.TP
\fB\-\-current\-user\-principal\fR CURRENT_USER_PRINCIPAL
Path to current user principal. [/user/]
.TP
\fB\-\-autocreate\fR
Automatically create necessary directories.
.TP
\fB\-\-defaults\fR
Create initial calendar and address book. Implies
\fB\-\-autocreate\fR.
.TP
\fB\-\-dump\-dav\-xml\fR
Print DAV XML request/responses.
.TP
\fB\-\-avahi\fR
Announce services with avahi.
.TP
\fB\-\-no\-strict\fR
Enable workarounds for buggy CalDAV/CardDAV client
implementations.
.SS "Access Options:"
.TP
\fB\-l\fR LISTEN_ADDRESS, \fB\-\-listen\-address\fR LISTEN_ADDRESS
Bind to this address. Pass in path for unix domain
socket. [localhost]
.TP
\fB\-p\fR PORT, \fB\-\-port\fR PORT
Port to listen on. [8080]
.TP
\fB\-\-route\-prefix\fR ROUTE_PREFIX
Path to Xandikos. (useful when Xandikos is behind a
reverse proxy) [/]
.SH AUTHORS
Jelmer Vernooij
xandikos_0.2.10.orig/notes/README.rst 0000644 0000000 0000000 00000000156 14210666053 014205 0 ustar 00 This directory contains rough design documentation for Xandikos.
For user-targeted documentation, see docs/.
xandikos_0.2.10.orig/notes/api-stability.rst 0000644 0000000 0000000 00000000717 13140202367 016021 0 ustar 00 API Stability
=============
There are currently no guarantees about Xandikos Python APIs staying the same
across different versions, except the following APIs:
xandikos.web.XandikosBackend(path)
xandikos.web.XandikosBackend.create_principal(principal, create_defaults=False)
xandikos.web.XandikosApp(backend, current_user_principal)
xandikos.web.WellknownRedirector(app, path)
If you care about stability of any other APIs, please file a bug against Xandikos.
xandikos_0.2.10.orig/notes/auth.rst 0000644 0000000 0000000 00000000765 13075511367 014224 0 ustar 00 Authentication
==============
Ideally, Xandikos would stay out of the business of authenticating users.
The trouble with this is that there are many flavours that need to
be supported and configured.
However, it is still necessary for Xandikos to handle authorization.
An external system authenticates the user, and then sets the REMOTE_USER
environment variable.
Per
http://wsgi.readthedocs.io/en/latest/specifications/simple_authentication.html,
Xandikos should distinguish between 401 and 403.
xandikos_0.2.10.orig/notes/collection-config.rst 0000644 0000000 0000000 00000003323 13366211551 016645 0 ustar 00 Per-collection configuration
============================
Xandikos needs to store several piece of per-collection metadata.
Goals
-----
Find a place to store per-collection metadata.
Some of these can be inferred from other sources.
For starters, for each collection:
- resource types: principal, calendar, addressbook
At the moment, Xandikos is storing some of this information in git configuration. However, this means:
* it is not versioned
* there is a 1-1 relationship between collections and git repositories
* some users object to mixing in this metadata in their git config
Per resource type-specific properties
-------------------------------------
Generic
~~~~~~~
- ACLs
- owner?
Principal
~~~~~~~~~
Per principal configuration settings:
- calendar home sets
- addressbook home sets
- user address set
- infit settings
Calendar
~~~~~~~~
Need per calendar config:
- color
- description (can be inferred from .git/description)
- inbox URL
- outbox URL
- max instances
- max attendees per instance
- calendar timezone
- calendar schedule transparency
Addressbook
~~~~~~~~~~~
Need per addressbook config:
- max image size
- max resource size
- color
- description (can be inferred from .git/description)
Schedule Inbox
~~~~~~~~~~~~~~
- default-calendar-URL
Proposed format
---------------
Store a ini-style .xandikos file in the directory hosting the Collection (or
Tree in case of a Git repository).
All properties mentioned above are simple key/value pairs. For simplicity, it
may make sense to use an ini-style format so that users can edit metadata using their editor.
Example
-------
# This is a standard Python configobj file, so it's mostly ini-style, and comments
# can appear preceded by #.
color = 030003
xandikos_0.2.10.orig/notes/context.rst 0000644 0000000 0000000 00000001341 13076032645 014734 0 ustar 00 Contexts
========
Currently, property get_value/set_value receive three pieces of context:
- HREF for the resource
- resource object
- Element object to update
However, some properties need WebDAV server metadata:
- supported-live-property-set needs list of properties
- supported-report-set needs list of reports
- supported-method-set needs list of methods
Some operations need access to current user information:
- current-user-principal
- current-user-privilege-set
- calendar-user-address-set
PUT/DELETE/MKCOL need access to username (for author) and possibly things like user agent
(for better commit message)
.. code:: python
class Context(object):
def get_current_user(self):
return (name, principal)
xandikos_0.2.10.orig/notes/dav-compliance.rst 0000644 0000000 0000000 00000017366 14476041427 016153 0 ustar 00 DAV Compliance
==============
This document aims to document the compliance with various RFCs.
rfc4918.txt (Core WebDAV) (obsoletes rfc2518)
---------------------------------------------
Mostly supported.
HTTP Methods
^^^^^^^^^^^^
- PROPFIND [supported]
- PROPPATCH [supported]
- MKCOL [supported]
- DELETE [supported]
- PUT [supported]
- COPY [not implemented]
- MOVE [not implemented]
- LOCK [not implemented]
- UNLOCK [not implemented]
HTTP Headers
^^^^^^^^^^^^
- (9.1) Dav [supported]
- (9.2) Depth ['0, '1' and 'infinity' are supported]
- (9.3) Destination [only used with COPY/MOVE, which are not supported]
- (9.4) If [not supported]
- (9.5) Lock-Token [not supported]
- (9.6) Overwrite [only used with COPY/MOVE, which are not supported]
- (9.7) Status-URI [not supported]
- (9.8) Timeout [not supported, only used for locks]
DAV Properties
^^^^^^^^^^^^^^
- (15.1) creationdate [supported]
- (15.2) displayname [supported]
- (15.3) getcontentlanguage [supported]
- (15.4) getcontentlength [supported]
- (15.5) getcontenttype [supported]
- (15.6) getetag [supported]
- (15.7) getlastmodified [supported]
- (15.8) lockdiscovery [supported]
- (15.9) resourcetype [supported]
- (15.10) supportedlock [supported]
- (RFC2518 ONLY - 13.10) source [not supported]
rfc3253.txt (Versioning Extensions)
-----------------------------------
Broadly speaking, only features related to the REPORT method are supported.
HTTP Methods
^^^^^^^^^^^^
- REPORT [supported]
- CHECKOUT [not supported]
- CHECKIN [not supported]
- UNCHECKOUT [not supported]
- MKWORKSPACE [not supported]
- UPDATE [not supported]
- LABEL [not supported]
- MERGE [not supported]
- VERSION-CONTROL [not supported]
- BASELINE-CONTROL [not supported]
- MKACTIVITY [not supported]
DAV Properties
^^^^^^^^^^^^^^
- DAV:comment [supported]
- DAV:creator-displayname [not supported]
- DAV:supported-method-set [not supported]
- DAV:supported-live-property-set [not supported]
- DAV:supported-report-set [supported]
- DAV:predecessor-set [not supported]
- DAV:successor-set [not supported]
- DAV:checkout-set [not supported]
- DAV:version-name [not supported]
- DAV:checked-out [not supported]
- DAV:chcked-in [not supported]
- DAV:auto-version [not supported]
DAV Reports
^^^^^^^^^^^
- DAV:expand-property [supported]
- DAV:version-tree [not supported]
rfc5323.txt (WebDAV "SEARCH")
-----------------------------
Not supported
HTTP Methods
^^^^^^^^^^^^
- SEARCH [not supported]
DAV Properties
^^^^^^^^^^^^^^
- DAV:datatype [not supported]
- DAV:searchable [not supported]
- DAV:selectable [not supported]
- DAV:sortable [not supported]
- DAV:caseless [not supported]
- DAV:operators [not supported]
rfc3744.txt (WebDAV access control)
-----------------------------------
Not really supported
DAV Properties
^^^^^^^^^^^^^^
- DAV:alternate-uri-set [not supported]
- DAV:principal-URL [supported]
- DAV:group-member-set [not supported]
- DAV:group-membership [supported]
- DAV:owner [supported]
- DAV:group [not supported]
- DAV:current-user-privilege-set [supported]
- DAV:supported-privilege-set [not supported]
- DAV:acl [not supported]
- DAV:acl-restrictions [not supported]
- DAV:inherited-acl-set [not supported]
- DAV:principal-collection-set [not supported]
DAV Reports
^^^^^^^^^^^
- DAV:acl-principal-prop-set [not supported]
- DAV:principal-match [not supported]
- DAV:principal-property-search [not supported]
- DAV:principal-search-property-set [not supported]
rfc4791.txt (CalDAV)
--------------------
Fully supported.
DAV Properties
^^^^^^^^^^^^^^
- CALDAV:calendar-description [supported]
- CALDAV:calendar-home-set [supported]
- CALDAV:calendar-timezone [supported]
- CALDAV:supported-calendar-component-set [supported]
- CALDAV:supported-calendar-data [supported]
- CALDAV:max-resource-size [supported]
- CALDAV:min-date-time [supported]
- CALDAV:max-date-time [supported]
- CALDAV:max-instances [supported]
- CALDAV:max-attendees-per-instance [supported]
HTTP Methods
^^^^^^^^^^^^
- MKCALENDAR [not supported]
DAV Reports
^^^^^^^^^^^
- CALDAV:calendar-query [supported]
- CALDAV:calendar-multiget [supported]
- CALDAV:free-busy-query [supported]
rfc6352.txt (CardDAV)
---------------------
Fully supported.
DAV Properties
^^^^^^^^^^^^^^
- CARDDAV:addressbook-description [supported]
- CARDDAV:supported-address-data [supported]
- CARDDAV:max-resource-size [supported]
- CARDDAV:addressbook-home-set [supported]
- CARDDAV:princial-address [supported]
DAV Reports
^^^^^^^^^^^
- CARDDAV:addressbook-query [supported]
- CARDDAV:addressbook-multiget [supported]
rfc6638.txt (CalDAV scheduling extensions)
------------------------------------------
DAV Properties
^^^^^^^^^^^^^^
- CALDAV:schedule-outbox-URL [supported]
- CALDAV:schedule-inbox-URL [supported]
- CALDAV:calendar-user-address-set [supported]
- CALDAV:calendar-user-type [supported]
- CALDAV:schedule-calendar-transp [supported]
- CALDAV:schedule-default-calendar-URL [supported]
- CALDAV:schedule-tag [supported]
rfc6764.txt (Locating groupware services)
-----------------------------------------
Most of this is outside of the scope of xandikos, but it does support
DAV:current-user-principal
rfc7809.txt (CalDAV Time Zone Extensions)
-----------------------------------------
Not supported
DAV Properties
^^^^^^^^^^^^^^
- CALDAV:timezone-service-set [supported]
- CALDAV:calendar-timezone-id [not supported]
rfc5397.txt (WebDAV Current Principal Extension)
------------------------------------------------
DAV Properties
^^^^^^^^^^^^^^
- CALDAV:current-user-principal [supported]
Proprietary extensions
----------------------
Custom properties used by various clients
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- CARDDAV:max-image-size [supported]
https://github.com/apple/ccs-calendarserver/blob/master/doc/Extensions/caldav-ctag.txt
- DAV:getctag [supported]
https://github.com/apple/ccs-calendarserver/blob/master/doc/Extensions/caldav-proxy.txt
- DAV:calendar-proxy-read-for [supported]
- DAV:calendar-proxy-write-for [supported]
Apple-specific Properties
^^^^^^^^^^^^^^^^^^^^^^^^^
- calendar-color [supported]
- calendar-order [supported]
- getctag [supported]
- refreshrate [supported]
- source
XMPP Subscriptions
^^^^^^^^^^^^^^^^^^
- xmpp-server
- xmpp-heartbeat
- xmpp-uri
inf-it properties
^^^^^^^^^^^^^^^^^
- headervalue [supported]
- settings [supported]
- addressbook-color [supported]
AgendaV properties
^^^^^^^^^^^^^^^^^^
https://tools.ietf.org/id/draft-ietf-calext-caldav-attachments-03.html
- CALDAV:max-attachments-per-resource [supported]
- CALDAV:max-attachment-size [supported]
- CALDAV:managed-attachments-server-URL [supported]
rfc5995.txt (POST to create members)
------------------------------------
Fully supported.
DAV Properties
^^^^^^^^^^^^^^
- DAV:add-member [supported]
HTTP Methods
^^^^^^^^^^^^
- POST [supported]
rfc5689 (Extended MKCOL)
------------------------
Fully supported
HTTP Methods
^^^^^^^^^^^^
- MKCOL [supported]
rfc7529.txt (WebDAV Quota)
--------------------------
DAV properties
^^^^^^^^^^^^^^
- {DAV:}quote-available-bytes [supported]
- {DAV:}quote-used-bytes [supported]
rfc4709 (WebDAV Mount)
----------------------
This RFC documents a mechanism that allows clients to find the WebDAV mount
associated with a specific page. It's unclear to the writer what the value of
this is - an alternate resource in the HTML page would also do.
As far as I can tell, there is only a single server side implementation and a
single client side implementation of this RFC. I don't have access to the
client implementation (Xythos Drive) and the server side implementation is in
SabreDAV.
Experimental support for WebDAV Mount is available in the 'mount' branch, but
won't be merged without a good use case.
Managed Attachments
-------------------
Apple extension:
https://datatracker.ietf.org/doc/html/draft-ietf-calext-caldav-attachments-04
Currently unsupported.
xandikos_0.2.10.orig/notes/debugging.rst 0000644 0000000 0000000 00000001050 13411442167 015175 0 ustar 00 Debugging Xandikos
==================
When filing bugs, please include details on the Xandikos version you're running
and the clients that you're using.
It would be helpful if you can reproduce any issues with a clean Xandikos
setup. That also makes it easier to e.g. share log files.
1. Verify the server side contents; you can do this by
looking at the Git repository on the Xandikos side.
2. Run with ``xandikos --dump-dav-xml``; please note that these
may contain personal information, so be careful before e.g. posting
them on GitHub.
xandikos_0.2.10.orig/notes/file-format.rst 0000644 0000000 0000000 00000001767 13076032645 015471 0 ustar 00 File structure
==============
Collections are represented as Git repositories on disk.
A specific version is represented as a commit id. The 'ctag' for a calendar is taken from the
tree id of the calendar root tree.
The `entity tag`_ for an event is taken from the blob id of the Blob representing that EVENT. These kinds
of entity tags are strong, since blobs are equivalent by octet equality.
.. _entity tag: https://tools.ietf.org/html/rfc2616#section-3.11
The file name of calendar events shall be .ics / .vcf. Because of
this, every file MUST only contain one UID and thus MUST contain exactly one
VEVENT, VTODO, VJOURNAL or VFREEBUSY.
All items in a collection *must* be well formed, so that they do not have to be validated when served.
When new items are added, the collection should verify no existing items have the same UID.
Open questions:
- How to handle subtrees? Are they just subcollections?
- Where should collection metadata (e.g. colors, description) be stored? .git/config?
xandikos_0.2.10.orig/notes/goals.rst 0000644 0000000 0000000 00000000254 13076032645 014357 0 ustar 00 Goals
=====
- standards compliant
- standards complete
- backed by Git
- easily hackable/editable with standard tools (e.g. Git/Vim)
- version tracked
- unit tested
xandikos_0.2.10.orig/notes/hacking.txt 0000644 0000000 0000000 00000000057 13036715724 014670 0 ustar 00 DAV in class names is spelled in all capitals.
xandikos_0.2.10.orig/notes/heroku.rst 0000644 0000000 0000000 00000002235 13155657351 014555 0 ustar 00 Running Xandikos on Heroku
==========================
Heroku is an easy way to get a public instance of Xandikos running. A free
heroku instance comes with 100Mb of local storage, which is enough for
thousands of calendar items or contacts.
Deployment
----------
All of these steps assume you already have a Heroku account and have installed
the heroku command-line client.
To run a Heroku instance with Xandikos:
1. Create a copy of Xandikos::
$ git clone git://jelmer.uk/xandikos xandikos
$ cd xandikos
2. Make a copy of the example uwsgi configuration::
$ cp examples/uwsgi-heroku.ini uwsgi.ini
3. Edit *uwsgi.ini* as necessary, such as changing the credentials (the
defaults are *user1*/*password1*).
4. Make heroku install and use uwsgi::
$ echo uwsgi > requirements.txt
$ echo web: uwsgi uwsgi.ini > Procfile
5. Create the Heroku instance::
$ heroku create
(this might ask you for your heroku credentials)
6. Deploy the app::
$ git push heroku master
7. Open the app with your browser::
$ heroku open
(The URL opened is also the URL that you can provide to any CalDAV/CardDAV
application that supports service discovery)
xandikos_0.2.10.orig/notes/indexes.rst 0000644 0000000 0000000 00000004276 14476041427 014724 0 ustar 00 Filter Performance
==================
There are several API calls that would be good to speed up. In particular,
querying an entire calendar with filters is quite slow because it involves
scanning all the items.
Common Filters
~~~~~~~~~~~~~~
There are a couple of common filters:
Component filters that filter for only VTODO or VEVENT items
Property filters that filter for a specific UID
Property filters that filter for another property
Property filters that do complex text searches, e.g. in DESCRIPTION
Property filters that filter for some time range.
But these are by no means the only possible filters, and there is no
predicting what clients will scan for.
Indexes are an implementation detail of the Store. This is necessary so that
e.g. the Git stores can take advantage of the fact that they have a tree hash.
One option would be to serialize the filter and then to keep a list of results
per (tree_id, filter_hash). Unfortunately this by itself is not enough, since
it doesn't help when we get repeated queries for different UIDs.
Options considered:
* Have some pre-set indexes. Perhaps components, and UID?
* Cache but use the rightmost value as a key in a dict
* Always just cache everything that was queried. This is probably actually fine.
* Count how often a particular index is used
Open Questions
~~~~~~~~~~~~~~
* How are indexes identified?
Proposed API
~~~~~~~~~~~~
class Filter(object):
def check_slow(self, name, resource):
"""Check whether this filter applies to a resources based on the actual
resource.
This is the naive, slow, fallback implementation.
Args:
resource: Resource to check
"""
raise NotImplementedError(self.check_slow)
def check_index(self, values):
"""Check whether this filter applies to a resources based on index values.
Args:
values: Dictionary mapping indexes to index values
"""
raise NotImplementedError(self.check_index)
def required_indexes(self):
"""Return a list of indexes that this Filter needs to function.
Returns: List of ORed options, similar to a Depends line in Debian
"""
raise NotImplementedError(self.required_indexes)
xandikos_0.2.10.orig/notes/monitoring.rst 0000644 0000000 0000000 00000000440 13076032645 015434 0 ustar 00 Monitoring
==========
Things to monitor:
- number of uploaded items
- number of accessed store items
- number of lru cache hits
- number of HTTP requests
- number of reports
- number of properties requested
- number of unknown properties requested
- number of unknown reports requested
xandikos_0.2.10.orig/notes/multi-user.rst 0000644 0000000 0000000 00000003274 13452372634 015370 0 ustar 00 Multi-User Support
==================
Multi-user support could arguably also include sharing of
calendars/collections/etc. This is beyond the scope of this document, which
just focuses on allowing multiple users to use their own silo in a single
instance of Xandikos.
Siloed user support can be split up into three steps:
* storage - mapping a user to a principal
* authentication - letting a user log in
* authorization - checking whether the user has access to a resource
Authentication
--------------
In the simplest form, a forwarding proxy provides the name of an authenticated
user. E.g. Apache or uWSGI sets the REMOTE_USER environment variable. If
REMOTE_USER is not present for an operation that requires authentication, a 401
error is returned.
Authorization
-------------
In the simplest form, users only have access to the resources under their own
principal.
As a second step, we could let users configure ACLs; one way of doing this would be
to allow adding authentication in the collection configuration. I.e. something like::
[acl]
read = jelmer, joe
write = jelmer
Storage
-------
By default, the principal for a user is simply "/%(username)s".
Roadmap
=======
* Optional: Allow marking collections as principals [DONE]
* Expose username (or None, if not logged in) everywhere [DONE]
* Add function get_username_principal() for mapping username to principal path [DONE]
* Support automatic creation of principal on first login of user
* Add simple function check_path_access() for checking access ("is this user allowed to access this path?")
* Use access checking function everywhere
* Have current-user-principal setting depend on $REMOTE_USER and get_username_principal() [DONE]
xandikos_0.2.10.orig/notes/prometheus.rst 0000644 0000000 0000000 00000000176 13407242274 015447 0 ustar 00 Prometheus
==========
Proposed metrics:
* number of HTTP queries
* number of DAV queries by category
* DAV versions used
xandikos_0.2.10.orig/notes/release-process.rst 0000644 0000000 0000000 00000000341 13123462602 016334 0 ustar 00 Release Process
===============
1. Update version in setup.py
2. Update version in xandikos/__init__.py
3. git commit -a -m "Release $VERSION"
4. git tag -as -m "Release $VERSION" v$VERSION
5. ./setup.py sdist upload --sign
xandikos_0.2.10.orig/notes/scheduling-plan.rst 0000644 0000000 0000000 00000001162 13146425117 016324 0 ustar 00 CalDAV Scheduling
=================
TODO:
- When a new calendar object is uploaded to a calendar collection:
* Check if the ATTENDEE property is present, and if so, process it
- Support CALDAV:schedule-tag
* When comparing with if-schedule-tag-match, simply retrieve the blob by schedule-tag and compare delta between newly uploaded and current
* When determining schedule-tag, scroll back until last revision that didn't have attendee changes?
+ Perhaps include a hint in e.g. commit message?
- Inbox "contains copies of incoming scheduling messages"
- Outbox "at which busy time information requests are targeted."
xandikos_0.2.10.orig/notes/store.rst 0000644 0000000 0000000 00000001100 13123462602 014366 0 ustar 00 Dulwich Store
=============
The main building blocks are vCard (.vcf) and iCalendar (.ics) files. Storage
happens in Git repositories.
Most items are identified by a UID and a filename, both of which are unique for
the store. Items can have multiple versions, which are identified by an ETag.
Each store maps to a single Git repository, and can not contain directories. In
the future, a store could map to a subtree in a Git repository.
Stores are responsible for making sure that:
- their contents are validly formed calendars/contacts
- UIDs are unique (where relevant)
xandikos_0.2.10.orig/notes/structure.rst 0000644 0000000 0000000 00000001316 13075511367 015314 0 ustar 00 Xandikos has a fairly clear distinction between different components.
Modules
=======
The core WebDAV implementation lives in xandikos.webdav. This just implements
the WebDAV protocol, and provides abstract classes for WebDAV resources that can be
implemented by other code.
Several WebDAV extensions (access, CardDAV, CalDAV) live in their own
Python file. They build on top of the WebDAV module, and provide extra
reporter and property implementations as defined in those specifications.
Store is a simple object-store implementation on top of a Git repository, which
has several properties that make it useful as a WebDAV backend.
The business logic lives in xandikos.web; it ties together the other modules,
xandikos_0.2.10.orig/notes/subcommands.rst 0000644 0000000 0000000 00000000670 13407241352 015562 0 ustar 00 Subcommands
===========
At the moment, the Xandikos command just supports running a
(debug) webserver. In various situations it would also be useful
to have subcommands for adminstrative operations.
Propose subcommands:
* ``xandikos init [--defaults] [--autocreate] [-d DIRECTORY]`` -
create a Xandikos database
* ``xandikos stats`` - dump stats, similar to those exposed by prometheus
* ``xandikos web`` - run a debug web server
xandikos_0.2.10.orig/notes/uwsgi.rst 0000644 0000000 0000000 00000001755 13653665207 014426 0 ustar 00 Running Xandikos from uWSGI
===========================
In addition to running as a standalone service, Xandikos can also be run by any
service that supports the wsgi interface. An example of such a service is uWSGI.
One option is to setup uWSGI with a server like
`Apache `_,
`Nginx `_ or another web
server that can authenticate users and forward authorized requests to
Xandikos in uWSGI. See `examples/uwsgi.ini `_ for an
example uWSGI configuration.
Alternatively, you can run uWSGI standalone and have it authenticate and
directly serve HTTP traffic. An example configuration for this can be found in
`examples/uwsgi-standalone.ini `_.
This will start a server on `localhost:8080 `_ with username *user1* and password
*password1*.
.. code:: shell
mkdir -p $HOME/dav
uwsgi examples/uwsgi-standalone.ini
xandikos_0.2.10.orig/notes/webdav.rst 0000644 0000000 0000000 00000002442 14476041427 014526 0 ustar 00 WebDAV implementation
=====================
.. code:: python
class DAVPropertyProvider(object):
NAME property
matchresource()
# One or multiple properties?
def proplist(self, resource, all=False):
def getprop(self, resource, property):
def propupdate(self, resource, updates):
class DAVBackend(object):
def get_resource(self, path):
def create_collection(self, path):
class DAVReporter(object):
class DAVResource(object):
def get_resource_types(self):
def get_body(self):
"""Returns the body of the resource.
Returns: bytes representing contents
"""
def set_body(self, body):
"""Set the body of the resource.
Args:
body: body (as bytes)
"""
def proplist(self):
"""Return list of properties.
Returns: List of property names
"""
def propupdate(self, updates):
"""Update properties.
Args:
updates: Dictionary mapping names to new values
"""
def lock(self):
def unlock(self):
def members(self):
"""List members.
Returns: List tuples of (name, DAVResource)
"""
# TODO(jelmer): COPY
# TODO(jelmer): MOVE
# TODO(jelmer): MKCOL
# TODO(jelmer): LOCK/UNLOCK
# TODO(jelmer): REPORT
xandikos_0.2.10.orig/src/lib.rs 0000644 0000000 0000000 00000000000 14433202134 013246 0 ustar 00 xandikos_0.2.10.orig/xandikos/__init__.py 0000644 0000000 0000000 00000001731 14476041427 015325 0 ustar 00 #
# Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""CalDAV/CardDAV server."""
__version__ = (0, 2, 10)
version_string = ".".join(map(str, __version__))
import defusedxml.ElementTree # noqa: This does some monkey-patching on-load
xandikos_0.2.10.orig/xandikos/__main__.py 0000644 0000000 0000000 00000002041 14476041427 015301 0 ustar 00 # Xandikos
# Copyright (C) 2016-2018 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Xandikos command-line handling."""
import asyncio
def main(argv=None):
# For now, just invoke xandikos.web
from .web import main
return asyncio.run(main(argv))
if __name__ == "__main__":
import sys
sys.exit(main(sys.argv[1:]))
xandikos_0.2.10.orig/xandikos/access.py 0000644 0000000 0000000 00000004441 14476041427 015030 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Access control.
See http://www.webdav.org/specs/rfc3744.html
"""
from xandikos import webdav
ET = webdav.ET
# Feature to advertise access control support.
FEATURE = "access-control"
class CurrentUserPrivilegeSetProperty(webdav.Property):
"""current-user-privilege-set property.
See http://www.webdav.org/specs/rfc3744.html, section 3.7
"""
name = "{DAV:}current-user-privilege-set"
in_allprops = False
live = True
async def get_value(self, href, resource, el, environ):
privilege = ET.SubElement(el, "{DAV:}privilege")
# TODO(jelmer): Use something other than all
ET.SubElement(privilege, "{DAV:}all")
class OwnerProperty(webdav.Property):
"""owner property.
See http://www.webdav.org/specs/rfc3744.html, section 5.1
"""
name = "{DAV:}owner"
in_allprops = False
live = True
async def get_value(self, base_href, resource, el, environ):
owner_href = resource.get_owner()
if owner_href is not None:
el.append(webdav.create_href(owner_href, base_href=base_href))
class GroupMembershipProperty(webdav.Property):
"""Group membership.
See https://www.ietf.org/rfc/rfc3744.txt, section 4.4
"""
name = "{DAV:}group-membership"
in_allprops = False
live = True
resource_type = webdav.PRINCIPAL_RESOURCE_TYPE
async def get_value(self, base_href, resource, el, environ):
for href in resource.get_group_membership():
el.append(webdav.create_href(href, base_href=href))
xandikos_0.2.10.orig/xandikos/apache.py 0000644 0000000 0000000 00000003001 14476041427 014777 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Apache.org mod_dav custom properties.
See http://www.webdav.org/mod_dav/
"""
from xandikos import webdav
class ExecutableProperty(webdav.Property):
"""executable property.
Equivalent of the 'x' bit on POSIX.
"""
name = "{http://apache.org/dav/props/}executable"
resource_type = None
live = False
async def get_value(self, href, resource, el, environ):
el.text = "T" if resource.get_is_executable() else "F"
async def set_value(self, href, resource, el):
if el.text == "T":
resource.set_is_executable(True)
elif el.text == "F":
resource.set_is_executable(False)
else:
raise ValueError(f"invalid executable setting {el.text!r}")
xandikos_0.2.10.orig/xandikos/caldav.py 0000644 0000000 0000000 00000103121 14476041427 015014 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Simple CalDAV server.
https://tools.ietf.org/html/rfc4791
"""
import datetime
import itertools
import pytz
from icalendar.cal import Calendar as ICalendar
from icalendar.cal import Component, FreeBusy, component_factory
from icalendar.prop import LocalTimezone, vDDDTypes, vPeriod
from . import davcommon, webdav
from .icalendar import (apply_time_range_vevent, as_tz_aware_ts,
expand_calendar_rrule)
ET = webdav.ET
PRODID = "-//Jelmer Vernooij//Xandikos//EN"
WELLKNOWN_CALDAV_PATH = "/.well-known/caldav"
EXTENDED_MKCOL_FEATURE = "extended-mkcol"
NAMESPACE = "urn:ietf:params:xml:ns:caldav"
# https://tools.ietf.org/html/rfc4791, section 4.2
CALENDAR_RESOURCE_TYPE = "{%s}calendar" % NAMESPACE
SUBSCRIPTION_RESOURCE_TYPE = "{http://calendarserver.org/ns/}subscribed"
# TODO(jelmer): These resource types belong in scheduling.py
SCHEDULE_INBOX_RESOURCE_TYPE = "{%s}schedule-inbox" % NAMESPACE
SCHEDULE_OUTBOX_RESOURCE_TYPE = "{%s}schedule-outbox" % NAMESPACE
# Feature to advertise to indicate CalDAV support.
FEATURE = "calendar-access"
TRANSPARENCY_TRANSPARENT = "transparent"
TRANSPARENCY_OPAQUE = "opaque"
class Calendar(webdav.Collection):
resource_types = webdav.Collection.resource_types + [
CALENDAR_RESOURCE_TYPE]
def get_calendar_description(self) -> str:
"""Return the calendar description."""
raise NotImplementedError(self.get_calendar_description)
def get_calendar_color(self) -> str:
"""Return the calendar color."""
raise NotImplementedError(self.get_calendar_color)
def set_calendar_color(self, color: str) -> None:
"""Set the calendar color."""
raise NotImplementedError(self.set_calendar_color)
def get_calendar_order(self) -> str:
"""Return the calendar order."""
raise NotImplementedError(self.get_calendar_order)
def set_calendar_order(self, order: str) -> None:
"""Set the calendar order."""
raise NotImplementedError(self.set_calendar_order)
def get_calendar_timezone(self) -> str:
"""Return calendar timezone.
This should be an iCalendar object with exactly one
VTIMEZONE component.
"""
raise NotImplementedError(self.get_calendar_timezone)
def set_calendar_timezone(self, content: str) -> None:
"""Set calendar timezone.
This should be an iCalendar object with exactly one
VTIMEZONE component.
"""
raise NotImplementedError(self.set_calendar_timezone)
def get_supported_calendar_components(self) -> str:
"""Return set of supported calendar components in this calendar.
Returns: iterable over component names
"""
raise NotImplementedError(self.get_supported_calendar_components)
def get_supported_calendar_data_types(self) -> str:
"""Return supported calendar data types.
Returns: iterable over (content_type, version) tuples
"""
raise NotImplementedError(self.get_supported_calendar_data_types)
def get_min_date_time(self):
"""Return minimum datetime property."""
raise NotImplementedError(self.get_min_date_time)
def get_max_date_time(self):
"""Return maximum datetime property."""
raise NotImplementedError(self.get_max_date_time)
def get_max_instances(self):
"""Return maximum number of instances."""
raise NotImplementedError(self.get_max_instances)
def get_max_attendees_per_instance(self):
"""Return maximum number of attendees per instance."""
raise NotImplementedError(self.get_max_attendees_per_instance)
def get_max_resource_size(self):
"""Return max resource size."""
raise NotImplementedError(self.get_max_resource_size)
def get_max_attachments_per_resource(self):
"""Return max attachments per resource."""
raise NotImplementedError(self.get_max_attachments_per_resource)
def get_max_attachment_size(self):
"""Return max attachment size."""
raise NotImplementedError(self.get_max_attachment_size)
def get_schedule_calendar_transparency(self):
"""Get calendar transparency.
Possible values are TRANSPARENCY_TRANSPARENT and TRANSPARENCY_OPAQUE
"""
return TRANSPARENCY_OPAQUE
def calendar_query(self, create_filter_fn):
"""Query for all the members of this calendar that match `filter`.
This is a naive implementation; subclasses should ideally provide
their own implementation that is faster.
Args:
create_filter_fn: Callback that constructs a
filter; takes a filter building class.
Returns: Iterator over name, resource objects
"""
raise NotImplementedError(self.calendar_query)
def get_xmpp_server(self):
raise NotImplementedError(self.get_xmpp_server)
def get_xmpp_heartbeat(self):
raise NotImplementedError(self.get_xmpp_heartbeat)
def get_xmpp_uri(self):
raise NotImplementedError(self.get_xmpp_uri)
class Subscription:
resource_types = webdav.Collection.resource_types + [
SUBSCRIPTION_RESOURCE_TYPE]
def get_source_url(self):
"""Get the source URL for this calendar."""
raise NotImplementedError(self.get_source_url)
def set_source_url(self, url):
"""Set the source URL for this calendar."""
raise NotImplementedError(self.set_source_url)
def get_calendar_description(self):
"""Return the calendar description."""
raise NotImplementedError(self.get_calendar_description)
def get_calendar_color(self):
"""Return the calendar color."""
raise NotImplementedError(self.get_calendar_color)
def set_calendar_color(self, color):
"""Set the calendar color."""
raise NotImplementedError(self.set_calendar_color)
def get_supported_calendar_components(self):
"""Return set of supported calendar components in this calendar.
Returns: iterable over component names
"""
raise NotImplementedError(self.get_supported_calendar_components)
class CalendarHomeSet:
def get_managed_attachments_server_url(self):
"""Return the attachments server URL."""
raise NotImplementedError(self.get_managed_attachments_server_url)
class PrincipalExtensions:
"""CalDAV-specific extensions to DAVPrincipal."""
def get_calendar_home_set(self):
"""Get the calendar home set.
Returns: a set of URLs
"""
raise NotImplementedError(self.get_calendar_home_set)
def get_calendar_user_address_set(self):
"""Get the calendar user address set.
Returns: a set of URLs (usually mailto:...)
"""
raise NotImplementedError(self.get_calendar_user_address_set)
class CalendarHomeSetProperty(webdav.Property):
"""calendar-home-set property.
See https://www.ietf.org/rfc/rfc4791.txt, section 6.2.1.
"""
name = "{%s}calendar-home-set" % NAMESPACE
resource_type = "{DAV:}principal"
in_allprops = False
live = True
async def get_value(self, base_href, resource, el, environ):
for href in resource.get_calendar_home_set():
href = webdav.ensure_trailing_slash(href)
el.append(webdav.create_href(href, base_href))
class CalendarDescriptionProperty(webdav.Property):
"""Provides calendar-description property.
https://tools.ietf.org/html/rfc4791, section 5.2.1
"""
name = "{%s}calendar-description" % NAMESPACE
resource_type = (CALENDAR_RESOURCE_TYPE, SUBSCRIPTION_RESOURCE_TYPE)
async def get_value(self, base_href, resource, el, environ):
el.text = resource.get_calendar_description()
# TODO(jelmer): allow modification of this property
async def set_value(self, href, resource, el):
raise NotImplementedError
def _extract_from_component(
incomp: Component, outcomp: Component, requested) -> None:
"""Extract specific properties from a calendar event.
Args:
incomp: Incoming component
outcomp: Outcoming component
requested: Which components should be included
"""
for tag in requested:
if tag.tag == ("{%s}comp" % NAMESPACE):
for insub in incomp.subcomponents:
if insub.name == tag.get("name"):
outsub = component_factory[insub.name]()
outcomp.add_component(outsub)
_extract_from_component(insub, outsub, tag)
elif tag.tag == ("{%s}prop" % NAMESPACE):
outcomp[tag.get("name")] = incomp[tag.get("name")]
elif tag.tag == ("{%s}allprop" % NAMESPACE):
for propname in incomp:
outcomp[propname] = incomp[propname]
elif tag.tag == ("{%s}allcomp" % NAMESPACE):
for insub in incomp.subcomponents:
outsub = component_factory[insub.name]()
outcomp.add_component(outsub)
_extract_from_component(insub, outsub, tag)
else:
raise AssertionError(f"invalid element {tag!r}")
def extract_from_calendar(incal, requested):
"""Extract requested components/properties from calendar.
Args:
incal: Calendar to filter
requested: element with requested
components/properties
"""
for tag in requested:
if tag.tag == ("{%s}comp" % NAMESPACE):
if incal.name == tag.get("name"):
c = ICalendar()
_extract_from_component(incal, c, tag)
incal = c
elif tag.tag == ("{%s}expand" % NAMESPACE):
(start, end) = _parse_time_range(tag)
incal = expand_calendar_rrule(incal, start, end)
elif tag.tag == ("{%s}limit-recurrence-set" % NAMESPACE):
# TODO(jelmer): https://github.com/jelmer/xandikos/issues/103
raise NotImplementedError(
"limit-recurrence-set is not yet implemented")
elif tag.tag == ("{%s}limit-freebusy-set" % NAMESPACE):
# TODO(jelmer): https://github.com/jelmer/xandikos/issues/104
raise NotImplementedError(
"limit-freebusy-set is not yet implemented")
else:
raise AssertionError(f"invalid element {tag!r}")
return incal
class CalendarDataProperty(davcommon.SubbedProperty):
"""calendar-data property.
See https://tools.ietf.org/html/rfc4791, section 5.2.4
Note that this is not technically a DAV property, and
it is thus not registered in the regular webdav server.
"""
name = "{%s}calendar-data" % NAMESPACE
def supported_on(self, resource):
return resource.get_content_type() == "text/calendar"
async def get_value_ext(self, base_href, resource, el, environ, requested):
if len(requested) == 0:
serialized_cal = b"".join(await resource.get_body())
else:
calendar = await calendar_from_resource(resource)
if calendar is None:
raise KeyError
c = extract_from_calendar(calendar, requested)
serialized_cal = c.to_ical()
# TODO(jelmer): Don't hardcode encoding
# TODO(jelmer): Strip invalid characters or raise an exception
el.text = serialized_cal.decode("utf-8")
class CalendarOrderProperty(webdav.Property):
"""Provides calendar-order property."""
name = "{http://apple.com/ns/ical/}calendar-order"
resource_type = CALENDAR_RESOURCE_TYPE
async def get_value(self, base_href, resource, el, environ):
el.text = resource.get_calendar_order()
async def set_value(self, href, resource, el):
resource.set_calendar_order(el.text)
class CalendarMultiGetReporter(davcommon.MultiGetReporter):
name = "{%s}calendar-multiget" % NAMESPACE
resource_type = (CALENDAR_RESOURCE_TYPE, SCHEDULE_INBOX_RESOURCE_TYPE)
data_property = CalendarDataProperty()
def parse_prop_filter(el, cls):
name = el.get("name")
# From https://tools.ietf.org/html/rfc4791, 9.7.2:
# A CALDAV:comp-filter is said to match if:
prop_filter = cls(name=name)
for subel in el:
if subel.tag == "{urn:ietf:params:xml:ns:caldav}is-not-defined":
prop_filter.is_not_defined = True
elif subel.tag == "{urn:ietf:params:xml:ns:caldav}time-range":
parse_time_range(subel, prop_filter.filter_time_range)
elif subel.tag == "{urn:ietf:params:xml:ns:caldav}text-match":
parse_text_match(subel, prop_filter.filter_text_match)
elif subel.tag == "{urn:ietf:params:xml:ns:caldav}param-filter":
parse_param_filter(subel, prop_filter.filter_parameter)
elif subel.tag == "{urn:ietf:params:xml:ns:caldav}is-not-defined":
pass
else:
raise AssertionError(f"unknown subelement {subel.tag!r}")
return prop_filter
def parse_text_match(el, cls):
collation = el.get("collation", "i;ascii-casemap")
negate_condition = el.get("negate-condition", "no")
return cls(
el.text,
collation=collation,
negate_condition=(negate_condition == "yes"),
)
def parse_param_filter(el, cls):
name = el.get("name")
param_filter = cls(name=name)
for subel in el:
if subel.tag == "{urn:ietf:params:xml:ns:caldav}is-not-defined":
param_filter.is_not_defined = True
elif subel.tag == "{urn:ietf:params:xml:ns:caldav}text-match":
parse_text_match(subel, param_filter.filter_time_range)
else:
raise AssertionError("unknown tag %r in param-filter", subel.tag)
return param_filter
def _parse_time_range(el):
start = el.get("start")
end = el.get("end")
# Either start OR end OR both need to be specified.
# https://tools.ietf.org/html/rfc4791, section 9.9
assert start is not None or end is not None
if start is None:
start = "00010101T000000Z"
if end is None:
end = "99991231T235959Z"
start = vDDDTypes.from_ical(start)
end = vDDDTypes.from_ical(end)
assert end > start
return (start, end)
def parse_time_range(el, cls):
(start, end) = _parse_time_range(el)
return cls(start, end)
def parse_comp_filter(el: ET.Element, cls):
"""Compile a comp-filter element into a Python function."""
name = el.get("name")
# From https://tools.ietf.org/html/rfc4791, 9.7.1:
# A CALDAV:comp-filter is said to match if:
comp_filter = cls(name=name)
# 3. The CALDAV:comp-filter XML element contains a CALDAV:time-range XML
# element and at least one recurrence instance in the targeted calendar
# component is scheduled to overlap the specified time range, and all
# specified CALDAV:prop-filter and CALDAV:comp-filter child XML elements
# also match the targeted calendar component;
for subel in el:
if subel.tag == "{urn:ietf:params:xml:ns:caldav}is-not-defined":
comp_filter.is_not_defined = True
if subel.tag == "{urn:ietf:params:xml:ns:caldav}comp-filter":
parse_comp_filter(subel, comp_filter.filter_subcomponent)
elif subel.tag == "{urn:ietf:params:xml:ns:caldav}prop-filter":
parse_prop_filter(subel, comp_filter.filter_property)
elif subel.tag == "{urn:ietf:params:xml:ns:caldav}time-range":
parse_time_range(subel, comp_filter.filter_time_range)
else:
raise AssertionError(f"unknown filter tag {subel.tag!r}")
return comp_filter
def parse_filter(filter_el: ET.Element, cls):
for subel in filter_el:
if subel.tag == "{urn:ietf:params:xml:ns:caldav}comp-filter":
parse_comp_filter(subel, cls.filter_subcomponent)
else:
raise AssertionError(f"unknown filter tag {subel.tag!r}")
return cls
async def calendar_from_resource(resource):
try:
if resource.get_content_type() != "text/calendar":
return None
except KeyError:
return None
file = await resource.get_file()
return file.calendar
def extract_tzid(cal):
return cal.subcomponents[0]["TZID"]
def get_pytz_from_text(tztext):
tzid = extract_tzid(ICalendar.from_ical(tztext))
return pytz.timezone(tzid)
def get_calendar_timezone(resource):
try:
tztext = resource.get_calendar_timezone()
except KeyError:
return LocalTimezone()
else:
return get_pytz_from_text(tztext)
class CalendarQueryReporter(webdav.Reporter):
name = "{%s}calendar-query" % NAMESPACE
resource_type = (CALENDAR_RESOURCE_TYPE, SCHEDULE_INBOX_RESOURCE_TYPE)
data_property = CalendarDataProperty()
@webdav.multistatus
async def report(
self,
environ,
body,
resources_by_hrefs,
properties,
base_href,
base_resource,
depth,
strict
):
# TODO(jelmer): Verify that resource is a calendar
requested = None
filter_el = None
tztext = None
for el in body:
if el.tag in ("{DAV:}prop", "{DAV:}propname", "{DAV:}allprop"):
requested = el
elif el.tag == "{urn:ietf:params:xml:ns:caldav}filter":
filter_el = el
elif el.tag == "{urn:ietf:params:xml:ns:caldav}timezone":
tztext = el.text
else:
webdav.nonfatal_bad_request(
f"Unknown tag {el.tag} in report {self.name}",
strict
)
if requested is None:
# The CalDAV RFC says that behaviour mimicks that of PROPFIND,
# and the WebDAV RFC says that no body implies {DAV}allprop
# This isn't exactly an empty body, but close enough.
requested = ET.Element('{DAV:}allprop')
if tztext is not None:
tz = get_pytz_from_text(tztext)
else:
tz = get_calendar_timezone(base_resource)
def filter_fn(cls):
return parse_filter(filter_el, cls(tz))
def members(collection):
return itertools.chain(
collection.calendar_query(filter_fn),
collection.subcollections(),
)
async for (href, resource) in webdav.traverse_resource(
base_resource, base_href, depth, members=members
):
# Ideally traverse_resource would only return the right things.
if getattr(resource, "content_type", None) == "text/calendar":
propstat = davcommon.get_properties_with_data(
self.data_property,
href,
resource,
properties,
environ,
requested,
)
yield webdav.Status(
href, "200 OK", propstat=[s async for s in propstat]
)
class CalendarColorProperty(webdav.Property):
"""calendar-color property.
This contains a HTML #RRGGBB color code, as CDATA.
"""
name = "{http://apple.com/ns/ical/}calendar-color"
resource_type = (CALENDAR_RESOURCE_TYPE, SUBSCRIPTION_RESOURCE_TYPE)
async def get_value(self, href, resource, el, environ):
el.text = resource.get_calendar_color()
async def set_value(self, href, resource, el):
resource.set_calendar_color(el.text)
class SupportedCalendarComponentSetProperty(webdav.Property):
"""supported-calendar-component-set property.
Set of supported calendar components by this calendar.
See https://www.ietf.org/rfc/rfc4791.txt, section 5.2.3
"""
name = "{%s}supported-calendar-component-set" % NAMESPACE
resource_type = (
CALENDAR_RESOURCE_TYPE,
SCHEDULE_INBOX_RESOURCE_TYPE,
SCHEDULE_OUTBOX_RESOURCE_TYPE,
SUBSCRIPTION_RESOURCE_TYPE,
)
in_allprops = False
live = True
async def get_value(self, href, resource, el, environ):
for component in resource.get_supported_calendar_components():
subel = ET.SubElement(el, "{urn:ietf:params:xml:ns:caldav}comp")
subel.set("name", component)
class SupportedCalendarDataProperty(webdav.Property):
"""supported-calendar-data property.
See https://tools.ietf.org/html/rfc4791, section 5.2.4
"""
name = "{urn:ietf:params:xml:ns:caldav}supported-calendar-data"
resource_type = (
CALENDAR_RESOURCE_TYPE,
SCHEDULE_INBOX_RESOURCE_TYPE,
SCHEDULE_OUTBOX_RESOURCE_TYPE,
)
in_allprops = False
async def get_value(self, href, resource, el, environ):
for (
content_type,
version,
) in resource.get_supported_calendar_data_types():
subel = ET.SubElement(
el, "{urn:ietf:params:xml:ns:caldav}calendar-data")
subel.set("content-type", content_type)
subel.set("version", version)
class CalendarTimezoneProperty(webdav.Property):
"""calendar-timezone property.
See https://tools.ietf.org/html/rfc4791, section 5.2.2
"""
name = "{urn:ietf:params:xml:ns:caldav}calendar-timezone"
resource_type = (CALENDAR_RESOURCE_TYPE, SCHEDULE_INBOX_RESOURCE_TYPE)
in_allprops = False
async def get_value(self, href, resource, el, environ):
el.text = resource.get_calendar_timezone()
async def set_value(self, href, resource, el):
if el is not None:
resource.set_calendar_timezone(el.text)
else:
resource.set_calendar_timezone(None)
class MinDateTimeProperty(webdav.Property):
"""min-date-time property.
See https://tools.ietf.org/html/rfc4791, section 5.2.6
"""
name = "{urn:ietf:params:xml:ns:caldav}min-date-time"
resource_type = (
CALENDAR_RESOURCE_TYPE,
SCHEDULE_INBOX_RESOURCE_TYPE,
SCHEDULE_OUTBOX_RESOURCE_TYPE,
)
in_allprops = False
live = True
async def get_value(self, href, resource, el, environ):
el.text = resource.get_min_date_time()
class MaxDateTimeProperty(webdav.Property):
"""max-date-time property.
See https://tools.ietf.org/html/rfc4791, section 5.2.7
"""
name = "{urn:ietf:params:xml:ns:caldav}max-date-time"
resource_type = (
CALENDAR_RESOURCE_TYPE,
SCHEDULE_INBOX_RESOURCE_TYPE,
SCHEDULE_OUTBOX_RESOURCE_TYPE,
)
in_allprops = False
live = True
async def get_value(self, href, resource, el, environ):
el.text = resource.get_max_date_time()
class MaxInstancesProperty(webdav.Property):
"""max-instances property.
See https://tools.ietf.org/html/rfc4791, section 5.2.8
"""
name = "{%s}max-instances" % NAMESPACE
resource_type = (CALENDAR_RESOURCE_TYPE, SCHEDULE_INBOX_RESOURCE_TYPE)
in_allprops = False
live = True
async def get_value(self, href, resource, el, environ):
el.text = str(resource.get_max_instances())
class MaxAttendeesPerInstanceProperty(webdav.Property):
"""max-instances property.
See https://tools.ietf.org/html/rfc4791, section 5.2.9
"""
name = "{%s}max-attendees-per-instance" % NAMESPACE
resource_type = (
CALENDAR_RESOURCE_TYPE,
SCHEDULE_INBOX_RESOURCE_TYPE,
SCHEDULE_OUTBOX_RESOURCE_TYPE,
)
in_allprops = False
live = True
async def get_value(self, href, resource, el, environ):
el.text = str(resource.get_max_attendees_per_instance())
class MaxResourceSizeProperty(webdav.Property):
"""max-resource-size property.
See https://tools.ietf.org/html/rfc4791, section 5.2.5
"""
name = "{%s}max-resource-size" % NAMESPACE
resource_type = (
CALENDAR_RESOURCE_TYPE,
SCHEDULE_INBOX_RESOURCE_TYPE,
SCHEDULE_OUTBOX_RESOURCE_TYPE,
)
in_allprops = False
live = True
async def get_value(self, href, resource, el, environ):
el.text = str(resource.get_max_resource_size())
class MaxAttachmentsPerResourceProperty(webdav.Property):
"""max-attachments-per-resource property.
https://tools.ietf.org/id/draft-ietf-calext-caldav-attachments-03.html#rfc.section.6.3
"""
name = "{%s}max-attachments-per-resource" % NAMESPACE
resource_type = CALENDAR_RESOURCE_TYPE
in_allprops = False
live = True
async def get_value(self, href, resource, el, environ):
el.text = str(resource.get_max_attachments_per_resource())
class MaxAttachmentSizeProperty(webdav.Property):
"""max-attachment-size property.
https://tools.ietf.org/id/draft-ietf-calext-caldav-attachments-03.html#rfc.section.6.2
"""
name = "{%s}max-attachment-size" % NAMESPACE
resource_type = CALENDAR_RESOURCE_TYPE
in_allprops = False
live = True
async def get_value(self, href, resource, el, environ):
el.text = str(resource.get_max_attachment_size())
class ManagedAttachmentsServerURLProperty(webdav.Property):
"""managed-attachments-server-URL property.
https://tools.ietf.org/id/draft-ietf-calext-caldav-attachments-03.html#rfc.section.6.1
"""
name = "{%s}managed-attachments-server-URL" % NAMESPACE
in_allprops = False
async def get_value(self, base_href, resource, el, environ):
# The RFC specifies that this property can be set on a calendar home
# collection.
# However, there is no matching resource type and we don't want to
# force all resources to implement it. So we just check whether the
# attribute is present.
fn = getattr(resource, "get_managed_attachments_server_url", None)
if fn is None:
raise KeyError
href = fn()
if href is not None:
el.append(webdav.create_href(href, base_href))
class SourceProperty(webdav.Property):
"""source property."""
name = "{http://calendarserver.org/ns/}source"
resource_type = SUBSCRIPTION_RESOURCE_TYPE
in_allprops = True
live = False
async def get_value(self, base_href, resource, el, environ):
el.append(webdav.create_href(resource.get_source_url(), base_href))
async def set_value(self, href, resource, el):
raise NotImplementedError(self.set_value)
class CalendarProxyReadForProperty(webdav.Property):
"""calendar-proxy-read-for property.
See https://github.com/apple/ccs-calendarserver/blob/master/\
doc/Extensions/caldav-proxy.txt, section 5.3.1.
"""
name = "{http://calendarserver.org/ns/}calendar-proxy-read-for"
resource_type = webdav.PRINCIPAL_RESOURCE_TYPE
in_allprops = False
live = True
async def get_value(self, base_href, resource, el, environ):
for href in resource.get_calendar_proxy_read_for():
el.append(webdav.create_href(href, base_href))
class CalendarProxyWriteForProperty(webdav.Property):
"""calendar-proxy-write-for property.
See https://github.com/apple/ccs-calendarserver/blob/master/\
doc/Extensions/caldav-proxy.txt, section 5.3.2.
"""
name = "{http://calendarserver.org/ns/}calendar-proxy-write-for"
resource_type = webdav.PRINCIPAL_RESOURCE_TYPE
in_allprops = False
live = True
async def get_value(self, base_href, resource, el, environ):
for href in resource.get_calendar_proxy_write_for():
el.append(webdav.create_href(href, base_href))
class ScheduleCalendarTransparencyProperty(webdav.Property):
"""schedule-calendar-transp property.
See https://tools.ietf.org/html/rfc6638#section-9.1
"""
name = "{%s}schedule-calendar-transp" % NAMESPACE
in_allprops = False
live = False
resource_type = CALENDAR_RESOURCE_TYPE
async def get_value(self, base_href, resource, el, environ):
transp = resource.get_schedule_calendar_transparency()
if transp == TRANSPARENCY_TRANSPARENT:
ET.SubElement(el, "{%s}transparent" % NAMESPACE)
elif transp == TRANSPARENCY_OPAQUE:
ET.SubElement(el, "{%s}opaque" % NAMESPACE)
else:
raise ValueError(f"Invalid transparency {transp}")
def map_freebusy(comp):
transp = comp.get("TRANSP", "OPAQUE")
if transp == "TRANSPARENT":
return "FREE"
assert transp == "OPAQUE", f"unknown transp {transp!r}"
status = comp.get("STATUS", "CONFIRMED")
if status == "CONFIRMED":
return "BUSY"
elif status == "CANCELLED":
return "FREE"
elif status == "TENTATIVE":
return "BUSY-TENTATIVE"
elif status.startswith("X-"):
return status
else:
raise AssertionError(f"unknown status {status!r}")
def extract_freebusy(comp, tzify):
kind = map_freebusy(comp)
if kind == "FREE":
return None
if "DTEND" in comp:
ret = vPeriod((tzify(comp["DTSTART"].dt), tzify(comp["DTEND"].dt)))
if "DURATION" in comp:
ret = vPeriod((tzify(comp["DTSTART"].dt), comp["DURATION"].dt))
if kind != "BUSY":
ret.params["FBTYPE"] = kind
return ret
async def iter_freebusy(resources, start, end, tzify):
async for (href, resource) in resources:
c = await calendar_from_resource(resource)
if c is None:
continue
if c.name != "VCALENDAR":
continue
for comp in c.subcomponents:
if comp.name == "VEVENT":
if apply_time_range_vevent(start, end, comp, tzify):
vp = extract_freebusy(comp, tzify)
if vp is not None:
yield vp
class FreeBusyQueryReporter(webdav.Reporter):
"""free-busy-query reporter.
See https://tools.ietf.org/html/rfc4791, section 7.10
"""
name = "{urn:ietf:params:xml:ns:caldav}free-busy-query"
resource_type = CALENDAR_RESOURCE_TYPE
async def report(
self,
environ,
body,
resources_by_hrefs,
properties,
base_href,
base_resource,
depth,
strict
):
requested = None
for el in body:
if el.tag == "{urn:ietf:params:xml:ns:caldav}time-range":
requested = el
else:
webdav.nonfatal_bad_request("unexpected XML element", strict)
continue
tz = get_calendar_timezone(base_resource)
def tzify(dt):
return as_tz_aware_ts(dt, tz).astimezone(pytz.utc)
(start, end) = _parse_time_range(requested)
ret = ICalendar()
ret["VERSION"] = "2.0"
ret["PRODID"] = PRODID
fb = FreeBusy()
fb["DTSTAMP"] = vDDDTypes(tzify(datetime.datetime.now()))
fb["DTSTART"] = vDDDTypes(start)
fb["DTEND"] = vDDDTypes(end)
fb["FREEBUSY"] = [
item
async for item in iter_freebusy(
webdav.traverse_resource(base_resource, base_href, depth),
start,
end,
tzify,
)
]
ret.add_component(fb)
return webdav.Response(status="200 OK", body=[ret.to_ical()])
class MkcalendarMethod(webdav.Method):
async def handle(self, request, environ, app):
content_type = request.content_type
base_content_type, params = webdav.parse_type(content_type)
if base_content_type not in (
"text/xml",
"application/xml",
None,
"text/plain",
"application/octet-stream",
):
raise webdav.UnsupportedMediaType(content_type)
href, path, resource = app._get_resource_from_environ(request, environ)
if resource is not None:
return webdav._send_simple_dav_error(
request,
"403 Forbidden",
error=ET.Element("{DAV:}resource-must-be-null"),
description=f"Something already exists at {path!r}",
)
try:
resource = app.backend.create_collection(path)
except FileNotFoundError:
return webdav.Response(status="409 Conflict")
el = ET.Element("{DAV:}resourcetype")
await app.properties["{DAV:}resourcetype"].get_value(
href, resource, el, environ
)
ET.SubElement(el, "{urn:ietf:params:xml:ns:caldav}calendar")
await app.properties["{DAV:}resourcetype"].set_value(
href, resource, el)
if base_content_type in ("text/xml", "application/xml"):
et = await webdav._readXmlBody(
request,
"{urn:ietf:params:xml:ns:caldav}mkcalendar",
strict=app.strict,
)
propstat = []
for el in et:
if el.tag != "{DAV:}set":
webdav.nonfatal_bad_request(
f"Unknown tag {el.tag} in mkcalendar",
app.strict)
continue
propstat.extend(
[
ps
async for ps in webdav.apply_modify_prop(
el, href, resource, app.properties
)
]
)
ret = ET.Element(
"{urn:ietf:params:xml:ns:carldav:}mkcalendar-response")
for propstat_el in webdav.propstat_as_xml(propstat):
ret.append(propstat_el)
return webdav._send_xml_response(
"201 Created", ret, webdav.DEFAULT_ENCODING
)
else:
return webdav.Response(status="201 Created")
xandikos_0.2.10.orig/xandikos/carddav.py 0000644 0000000 0000000 00000027305 14476041427 015177 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""CardDAV support.
https://tools.ietf.org/html/rfc6352
"""
from . import collation as _mod_collation
from . import davcommon, webdav
ET = webdav.ET
WELLKNOWN_CARDDAV_PATH = "/.well-known/carddav"
NAMESPACE = "urn:ietf:params:xml:ns:carddav"
ADDRESSBOOK_RESOURCE_TYPE = "{%s}addressbook" % NAMESPACE
# Feature to advertise presence of CardDAV support
FEATURE = "addressbook"
class AddressbookHomeSetProperty(webdav.Property):
"""addressbook-home-set property.
See https://tools.ietf.org/html/rfc6352, section 7.1.1
"""
name = "{%s}addressbook-home-set" % NAMESPACE
resource_type = "{DAV:}principal"
in_allprops = False
live = True
async def get_value(self, base_href, resource, el, environ):
for href in resource.get_addressbook_home_set():
href = webdav.ensure_trailing_slash(href)
el.append(webdav.create_href(href, base_href))
class AddressDataProperty(davcommon.SubbedProperty):
"""address-data property.
See https://tools.ietf.org/html/rfc6352, section 10.4
Note that this is not technically a DAV property, and
it is thus not registered in the regular webdav server.
"""
name = "{%s}address-data" % NAMESPACE
def supported_on(self, resource):
return resource.get_content_type() == "text/vcard"
async def get_value_ext(self, href, resource, el, environ, requested):
# TODO(jelmer): Support subproperties
# TODO(jelmer): Don't hardcode encoding
el.text = b"".join(await resource.get_body()).decode("utf-8")
class AddressbookDescriptionProperty(webdav.Property):
"""Provides calendar-description property.
https://tools.ietf.org/html/rfc6352, section 6.2.1
"""
name = "{%s}addressbook-description" % NAMESPACE
resource_type = ADDRESSBOOK_RESOURCE_TYPE
async def get_value(self, href, resource, el, environ):
el.text = resource.get_addressbook_description()
async def set_value(self, href, resource, el):
resource.set_addressbook_description(el.text)
class AddressbookMultiGetReporter(davcommon.MultiGetReporter):
name = "{%s}addressbook-multiget" % NAMESPACE
resource_type = ADDRESSBOOK_RESOURCE_TYPE
data_property = AddressDataProperty()
class Addressbook(webdav.Collection):
resource_types = webdav.Collection.resource_types + [
ADDRESSBOOK_RESOURCE_TYPE]
def get_addressbook_description(self) -> str:
raise NotImplementedError(self.get_addressbook_description)
def set_addressbook_description(self, description: str) -> None:
raise NotImplementedError(self.set_addressbook_description)
def get_addressbook_color(self) -> str:
raise NotImplementedError(self.get_addressbook_color)
def set_addressbook_color(self, color: str) -> None:
raise NotImplementedError(self.set_addressbook_color)
def get_supported_address_data_types(self):
"""Get list of supported data types.
Returns: List of tuples with content type and version
"""
raise NotImplementedError(self.get_supported_address_data_types)
def get_max_resource_size(self) -> int:
"""Get maximum object size this address book will store (in bytes).
Absence indicates no maximum.
"""
raise NotImplementedError(self.get_max_resource_size)
def get_max_image_size(self) -> int:
"""Get maximum image size this address book will store (in bytes).
Absence indicates no maximum.
"""
raise NotImplementedError(self.get_max_image_size)
class PrincipalExtensions:
"""Extensions to webdav.Principal."""
def get_addressbook_home_set(self) -> set[str]:
"""Return set of addressbook home URLs.
Returns: set of URLs
"""
raise NotImplementedError(self.get_addressbook_home_set)
def get_principal_address(self) -> str:
"""Return URL to principal address vCard."""
raise NotImplementedError(self.get_principal_address)
class PrincipalAddressProperty(webdav.Property):
"""Provides the principal-address property.
https://tools.ietf.org/html/rfc6352, section 7.1.2
"""
name = "{%s}principal-address" % NAMESPACE
resource_type = "{DAV:}principal"
in_allprops = False
async def get_value(self, href, resource, el, environ):
el.append(webdav.create_href(resource.get_principal_address(), href))
class SupportedAddressDataProperty(webdav.Property):
"""Provides the supported-address-data property.
https://tools.ietf.org/html/rfc6352, section 6.2.2
"""
name = "{%s}supported-address-data" % NAMESPACE
resource_type = ADDRESSBOOK_RESOURCE_TYPE
in_allprops = False
live = True
async def get_value(self, href, resource, el, environ):
for (
content_type,
version,
) in resource.get_supported_address_data_types():
subel = ET.SubElement(el, "{%s}content-type" % NAMESPACE)
subel.set("content-type", content_type)
subel.set("version", version)
class MaxResourceSizeProperty(webdav.Property):
"""Provides the max-resource-size property.
See https://tools.ietf.org/html/rfc6352, section 6.2.3.
"""
name = "{%s}max-resource-size" % NAMESPACE
resource_type = ADDRESSBOOK_RESOURCE_TYPE
in_allprops = False
live = True
async def get_value(self, href, resource, el, environ):
el.text = str(resource.get_max_resource_size())
class MaxImageSizeProperty(webdav.Property):
"""Provides the max-image-size property.
This seems to be a carddav extension used by iOS and caldavzap.
"""
name = "{%s}max-image-size" % NAMESPACE
resource_type = ADDRESSBOOK_RESOURCE_TYPE
in_allprops = False
live = True
async def get_value(self, href, resource, el, environ):
el.text = str(resource.get_max_image_size())
async def addressbook_from_resource(resource):
try:
if resource.get_content_type() != "text/vcard":
return None
except KeyError:
return None
file = await resource.get_file()
return file.addressbook.contents
def apply_text_match(el: ET.Element, value: str) -> bool:
collation = el.get("collation", "i;ascii-casemap")
negate_condition = el.get("negate-condition", "no")
match_type = el.get("match-type", "contains")
matches = _mod_collation.collations[collation](
value, el.text or '', match_type)
if negate_condition == "yes":
return not matches
else:
return matches
def apply_param_filter(el, prop):
name = el.get("name")
if (len(el) == 1
and el[0].tag == "{urn:ietf:params:xml:ns:carddav}is-not-defined"):
return name not in prop.params
try:
value = prop.params[name]
except KeyError:
return False
for subel in el:
if subel.tag == "{urn:ietf:params:xml:ns:carddav}text-match":
if not apply_text_match(subel, value):
return False
else:
raise AssertionError("unknown tag %r in param-filter", subel.tag)
return True
def apply_prop_filter(el, ab):
name = el.get("name").lower()
# From https://tools.ietf.org/html/rfc6352
# A CARDDAV:prop-filter is said to match if:
# The CARDDAV:prop-filter XML element contains a CARDDAV:is-not-defined XML
# element and no property of the type specified by the "name" attribute
# exists in the enclosing calendar component;
if (len(el) == 1
and el[0].tag == "{urn:ietf:params:xml:ns:carddav}is-not-defined"):
return name not in ab
try:
prop = ab[name]
except KeyError:
return False
for prop_el in prop:
matched = True
for subel in el:
if subel.tag == "{urn:ietf:params:xml:ns:carddav}text-match":
if not apply_text_match(subel, str(prop_el)):
matched = False
break
elif subel.tag == "{urn:ietf:params:xml:ns:carddav}param-filter":
if not apply_param_filter(subel, prop_el):
matched = False
break
if matched:
return True
return False
async def apply_filter(el, resource):
"""Compile a filter element into a Python function."""
if el is None or not list(el):
# Empty filter, let's not bother parsing
return lambda x: True
ab = await addressbook_from_resource(resource)
if ab is None:
return False
test_name = el.get("test", "anyof")
test = {"allof": all, "anyof": any}[test_name]
return test(apply_prop_filter(subel, ab) for subel in el)
class AddressbookQueryReporter(webdav.Reporter):
name = "{%s}addressbook-query" % NAMESPACE
resource_type = ADDRESSBOOK_RESOURCE_TYPE
data_property = AddressDataProperty()
@webdav.multistatus
async def report(
self,
environ,
body,
resources_by_hrefs,
properties,
base_href,
base_resource,
depth,
strict
):
requested = None
filter_el = None
limit = None
for el in body:
if el.tag in ("{DAV:}prop", "{DAV:}allprop", "{DAV:}propname"):
requested = el
elif el.tag == ("{%s}filter" % NAMESPACE):
filter_el = el
elif el.tag == ("{%s}limit" % NAMESPACE):
limit = el
else:
webdav.nonfatal_bad_request(
f"Unknown tag {el.tag} in report {self.name}",
strict)
if requested is None:
# The CardDAV RFC says that behaviour mimicks that of PROPFIND,
# and the WebDAV RFC says that no body implies {DAV}allprop
# This isn't exactly an empty body, but close enough.
requested = ET.Element('{DAV:}allprop')
if limit is not None:
try:
[nresults_el] = list(limit)
except ValueError:
webdav.nonfatal_bad_request(
"Invalid number of subelements in limit", strict)
nresults = None
else:
try:
nresults = int(nresults_el.text)
except ValueError:
webdav.nonfatal_bad_request(
"nresults not a number", strict)
nresults = None
else:
nresults = None
i = 0
async for (href, resource) in webdav.traverse_resource(
base_resource, base_href, depth
):
if not await apply_filter(filter_el, resource):
continue
if nresults is not None and i >= nresults:
break
propstat = davcommon.get_properties_with_data(
self.data_property,
href,
resource,
properties,
environ,
requested,
)
yield webdav.Status(
href, "200 OK", propstat=[s async for s in propstat])
i += 1
xandikos_0.2.10.orig/xandikos/collation.py 0000644 0000000 0000000 00000004167 14476041427 015560 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Collations."""
from typing import Callable
class UnknownCollation(Exception):
def __init__(self, collation: str) -> None:
super().__init__(
f"Collation {collation!r} is not supported"
)
self.collation = collation
def _match(a, b, k):
if k == "equals":
return a == b
elif k == "contains":
return b in a
elif k == "starts-with":
return a.startswith(b)
elif k == "ends-with":
return b.endswith(b)
else:
raise NotImplementedError
collations: dict[str, Callable[[str, str, str], bool]] = {
"i;ascii-casemap": lambda a, b, k: _match(
a.encode("ascii").upper(), b.encode("ascii").upper(), k
),
"i;octet": lambda a, b, k: _match(a, b, k),
# TODO(jelmer): Follow all rules as specified in
# https://datatracker.ietf.org/doc/html/rfc5051
"i;unicode-casemap": lambda a, b, k: _match(
a.encode('utf-8', 'surrogateescape').upper(),
b.encode('utf-8', 'surrogateescape').upper(),
k),
}
def get_collation(name: str) -> Callable[[str, str, str], bool]:
"""Get a collation by name.
Args:
name: Collation name
Raises:
UnknownCollation: If the collation is not supported
"""
try:
return collations[name]
except KeyError as exc:
raise UnknownCollation(name) from exc
xandikos_0.2.10.orig/xandikos/davcommon.py 0000644 0000000 0000000 00000006773 14476041427 015564 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Common functions for DAV implementations."""
from xandikos import webdav
ET = webdav.ET
class SubbedProperty(webdav.Property):
"""Property with sub-components that can be queried."""
async def get_value_ext(self, href, resource, el, environ, requested):
"""Get the value of a data property.
Args:
href: Resource href
resource: Resource to get value for
el: Element to fill in
environ: WSGI environ dict
requested: Requested property (including subelements)
"""
raise NotImplementedError(self.get_value_ext)
async def get_properties_with_data(
data_property, href, resource, properties, environ, requested
):
properties = dict(properties)
properties[data_property.name] = data_property
async for ps in webdav.get_properties(
href, resource, properties, environ, requested
):
yield ps
class MultiGetReporter(webdav.Reporter):
"""Abstract base class for multi-get reporters."""
name: str
# A SubbedProperty subclass
data_property: SubbedProperty
@webdav.multistatus
async def report(
self,
environ,
body,
resources_by_hrefs,
properties,
base_href,
resource,
depth,
strict
):
# TODO(jelmer): Verify that depth == "0"
# TODO(jelmer): Verify that resource is an the right resource type
requested = None
hrefs = []
for el in body:
if el.tag in ("{DAV:}prop", "{DAV:}allprop", "{DAV:}propname"):
requested = el
elif el.tag == "{DAV:}href":
hrefs.append(webdav.read_href_element(el))
else:
webdav.nonfatal_bad_request(
f"Unknown tag {el.tag} in report {self.name}",
strict)
if requested is None:
# The CalDAV RFC says that behaviour mimicks that of PROPFIND,
# and the WebDAV RFC says that no body implies {DAV}allprop
# This isn't exactly an empty body, but close enough.
requested = ET.Element('{DAV:}allprop')
for (href, resource) in resources_by_hrefs(hrefs):
if resource is None:
yield webdav.Status(href, "404 Not Found", propstat=[])
else:
propstat = get_properties_with_data(
self.data_property,
href,
resource,
properties,
environ,
requested,
)
yield webdav.Status(
href, "200 OK", propstat=[s async for s in propstat]
)
# see https://tools.ietf.org/html/rfc4790
xandikos_0.2.10.orig/xandikos/icalendar.py 0000644 0000000 0000000 00000103017 14476041427 015510 0 ustar 00 # Xandikos
# Copyright (C) 2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""ICalendar file handling."""
import logging
from collections.abc import Iterable
from datetime import datetime, time, timedelta, timezone
from typing import Callable, Optional, Union
import dateutil.rrule
import pytz
from icalendar.cal import Calendar, Component, component_factory
from icalendar.prop import TypesFactory, vCategory, vDatetime, vDDDTypes, vText
from xandikos.store import File, Filter, InvalidFileContents
from . import collation as _mod_collation
from .store.index import IndexDict, IndexKey, IndexValue, IndexValueIterator
TYPES_FACTORY = TypesFactory()
PropTypes = Union[vText]
TzifyFunction = Callable[[datetime], datetime]
# TODO(jelmer): Populate this further based on
# https://tools.ietf.org/html/rfc5545#3.3.11
_INVALID_CONTROL_CHARACTERS = ["\x0c", "\x01"]
class MissingProperty(Exception):
def __init__(self, property_name) -> None:
super().__init__(
f"Property {property_name!r} missing")
self.property_name = property_name
def validate_calendar(cal, strict=False):
"""Validate a calendar object.
Args:
cal: Calendar object
Returns: iterator over error messages
"""
yield from validate_component(cal, strict=strict)
# SubIndexDict is like IndexDict, but None can also occur as a key
SubIndexDict = dict[Optional[IndexKey], IndexValue]
def create_subindexes(
indexes: Union[SubIndexDict, IndexDict], base: str) -> SubIndexDict:
ret: SubIndexDict = {}
for k, v in indexes.items():
if k is not None and k.startswith(base + "/"):
ret[k[len(base) + 1:]] = v
elif k == base:
ret[None] = v
return ret
def validate_component(comp, strict=False):
"""Validate a calendar component.
Args:
comp: Calendar component
"""
# Check text fields for invalid characters
for (name, value) in comp.items():
if isinstance(value, vText):
for c in _INVALID_CONTROL_CHARACTERS:
if c in value:
yield "Invalid character {} in field {}".format(
c.encode("unicode_escape"),
name,
)
if strict:
for required in comp.required:
try:
comp[required]
except KeyError:
yield f"Missing required field {required}"
for subcomp in comp.subcomponents:
yield from validate_component(subcomp, strict=strict)
def calendar_component_delta(old_cal, new_cal):
"""Find the differences between components in two calendars.
Args:
old_cal: Old calendar (can be None)
new_cal: New calendar (can be None)
Returns: iterator over (old_component, new_component) tuples (either can be None)
"""
by_uid = {}
by_content = {}
by_idx = {}
idx = 0
for component in getattr(old_cal, "subcomponents", []):
try:
by_uid[component["UID"]] = component
except KeyError:
by_content[component.to_ical()] = True
by_idx[idx] = component
idx += 1
idx = 0
for component in new_cal.subcomponents:
try:
old_component = by_uid.pop(component["UID"])
except KeyError:
if not by_content.pop(component.to_ical(), None):
# Not previously present
yield (
by_idx.get(idx, component_factory[component.name]()),
component,
)
by_idx.pop(idx, None)
else:
yield (old_component, component)
for old_component in by_idx.values():
yield (old_component, component_factory[old_component.name]())
def calendar_prop_delta(old_component, new_component):
fields = set(
[field for field in old_component or []]
+ [field for field in new_component or []]
)
for field in fields:
old_value = old_component.get(field)
new_value = new_component.get(field)
if (
getattr(old_value, "to_ical", None) is None
or getattr(new_value, "to_ical", None) is None
or old_value.to_ical() != new_value.to_ical()
):
yield (field, old_value, new_value)
def describe_component(component):
if component.name == "VTODO":
try:
return f"task '{component['SUMMARY']}'"
except KeyError:
return "task"
else:
try:
return component["SUMMARY"]
except KeyError:
return "calendar item"
DELTA_IGNORE_FIELDS = {
"LAST-MODIFIED",
"SEQUENCE",
"DTSTAMP",
"PRODID",
"CREATED",
"COMPLETED",
"X-MOZ-GENERATION",
"X-LIC-ERROR",
"UID",
}
def describe_calendar_delta(old_cal, new_cal):
"""Describe the differences between two calendars.
Args:
old_cal: Old calendar (can be None)
new_cal: New calendar (can be None)
Returns: Lines describing changes
"""
# TODO(jelmer): Extend
for old_component, new_component in calendar_component_delta(
old_cal, new_cal):
if not new_component:
yield f"Deleted {describe_component(old_component)}"
continue
description = describe_component(new_component)
if not old_component:
yield f"Added {describe_component(new_component)}"
continue
for field, old_value, new_value in calendar_prop_delta(
old_component, new_component
):
if field.upper() in DELTA_IGNORE_FIELDS:
continue
if (old_component.name.upper() == "VTODO"
and field.upper() == "STATUS"):
if new_value is None:
yield f"status of {description} deleted"
else:
human_readable = {
"NEEDS-ACTION": "needing action",
"COMPLETED": "complete",
"CANCELLED": "cancelled",
}
yield "{} marked as {}".format(
description,
human_readable.get(new_value.upper(), new_value),
)
elif field.upper() == "DESCRIPTION":
yield f"changed description of {description}"
elif field.upper() == "SUMMARY":
yield f"changed summary of {description}"
elif field.upper() == "LOCATION":
yield f"changed location of {description} to {new_value}"
elif (
old_component.name.upper() == "VTODO"
and field.upper() == "PERCENT-COMPLETE"
and new_value is not None
):
yield "%s marked as %d%% completed." % (description, new_value)
elif field.upper() == "DUE":
yield "changed due date for {} from {} to {}".format(
description,
old_value.dt if old_value else "none",
new_value.dt if new_value else "none",
)
elif field.upper() == "DTSTART":
yield "changed start date/time of {} from {} to {}".format(
description,
old_value.dt if old_value else "none",
new_value.dt if new_value else "none",
)
elif field.upper() == "DTEND":
yield "changed end date/time of {} from {} to {}".format(
description,
old_value.dt if old_value else "none",
new_value.dt if new_value else "none",
)
elif field.upper() == "CLASS":
yield "changed class of {} from {} to {}".format(
description,
old_value.lower() if old_value else "none",
new_value.lower() if new_value else "none",
)
else:
yield f"modified field {field} in {description}"
logging.debug(
"Changed %s/%s or %s/%s from %s to %s.",
old_component.name,
field,
new_component.name,
field,
old_value,
new_value,
)
def apply_time_range_vevent(start, end, comp, tzify):
dtstart = comp.get("DTSTART")
if not dtstart:
raise MissingProperty("DTSTART")
if not (end > tzify(dtstart.dt)):
return False
dtend = comp.get("DTEND")
if dtend:
if tzify(dtend.dt) < tzify(dtstart.dt):
logging.debug("Invalid DTEND < DTSTART")
return start < tzify(dtend.dt)
duration = comp.get("DURATION")
if duration:
return start < tzify(dtstart.dt) + duration.dt
if getattr(dtstart.dt, "time", None) is not None:
return start <= tzify(dtstart.dt)
else:
return start < (tzify(dtstart.dt) + timedelta(1))
def apply_time_range_vjournal(start, end, comp, tzify):
dtstart = comp.get("DTSTART")
if not dtstart:
raise MissingProperty("DTSTART")
if not (end > tzify(dtstart.dt)):
return False
if getattr(dtstart.dt, "time", None) is not None:
return start <= tzify(dtstart.dt)
else:
return start < (tzify(dtstart.dt) + timedelta(1))
def apply_time_range_vtodo(start, end, comp, tzify):
dtstart = comp.get("DTSTART")
due = comp.get("DUE")
# See RFC4719, section 9.9
if dtstart:
duration = comp.get("DURATION")
if duration and not due:
return start <= tzify(dtstart.dt) + duration.dt and (
end > tzify(dtstart.dt)
or end >= tzify(dtstart.dt) + duration.dt
)
elif due and not duration:
return (start <= tzify(dtstart.dt) or start < tzify(due.dt)) and (
end > tzify(dtstart.dt) or end < tzify(due.dt)
)
else:
return start <= tzify(dtstart.dt) and end > tzify(dtstart.dt)
if due:
return start < tzify(due.dt) and end >= tzify(due.dt)
completed = comp.get("COMPLETED")
created = comp.get("CREATED")
if completed:
if created:
return (start <= tzify(created.dt)
or start <= tzify(completed.dt)) and (
end >= tzify(created.dt) or end >= tzify(completed.dt))
else:
return start <= tzify(completed.dt) and end >= tzify(completed.dt)
elif created:
return end >= tzify(created.dt)
else:
return True
def apply_time_range_vfreebusy(start, end, comp, tzify):
dtstart = comp.get("DTSTART")
dtend = comp.get("DTEND")
if dtstart and dtend:
return start <= tzify(dtend.dt) and end > tzify(dtstart.dt)
for period in comp.get("FREEBUSY", []):
if start < period.end and end > period.start:
return True
return False
def apply_time_range_valarm(start, end, comp, tzify):
raise NotImplementedError(apply_time_range_valarm)
class PropertyTimeRangeMatcher:
def __init__(self, start: datetime, end: datetime) -> None:
self.start = start
self.end = end
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.start!r}, {self.end!r})"
def match(self, prop, tzify):
dt = tzify(prop.dt)
return dt >= self.start and dt <= self.end
def match_indexes(self, prop: SubIndexDict, tzify: TzifyFunction):
return any(
self.match(
vDDDTypes(vDDDTypes.from_ical(p.decode('utf-8'))), tzify)
for p in prop[None] if not isinstance(p, bool))
TimeRangeFilter = Callable[
[datetime, datetime, Component, TzifyFunction], bool]
class ComponentTimeRangeMatcher:
all_props = [
"DTSTART",
"DTEND",
"DURATION",
"CREATED",
"COMPLETED",
"DUE",
"FREEBUSY",
]
# According to https://tools.ietf.org/html/rfc4791, section 9.9 these
# are the properties to check.
component_handlers: dict[str, TimeRangeFilter] = {
"VEVENT": apply_time_range_vevent,
"VTODO": apply_time_range_vtodo,
"VJOURNAL": apply_time_range_vjournal,
"VFREEBUSY": apply_time_range_vfreebusy,
"VALARM": apply_time_range_valarm,
}
def __init__(self, start, end, comp=None) -> None:
self.start = start
self.end = end
self.comp = comp
def __repr__(self) -> str:
if self.comp is not None:
return "{}({!r}, {!r}, comp={!r})".format(
self.__class__.__name__,
self.start,
self.end,
self.comp,
)
else:
return f"{self.__class__.__name__}({self.start!r}, {self.end!r})"
def match(self, comp: Component, tzify: TzifyFunction):
try:
component_handler = self.component_handlers[comp.name]
except KeyError:
logging.warning(
"unknown component %r in time-range filter", comp.name)
return False
return component_handler(self.start, self.end, comp, tzify)
def match_indexes(self, indexes: SubIndexDict, tzify: TzifyFunction):
vs: dict[str, vDDDTypes] = {}
for name, values in indexes.items():
if not name:
continue
field = name[2:]
if field not in self.all_props:
continue
for value in values:
if value and not isinstance(value, bool):
vs.setdefault(field, []).append(
vDDDTypes(vDDDTypes.from_ical(value.decode('utf-8'))))
try:
component_handler = self.component_handlers[self.comp]
except KeyError:
logging.warning(
"unknown component %r in time-range filter", self.comp)
return False
return component_handler(
self.start,
self.end,
# TODO(jelmer): What to do if there is more than one value?
{k: vs[0] for (k, vs) in vs.items()},
tzify)
def index_keys(self) -> list[list[str]]:
if self.comp == "VEVENT":
props = ["DTSTART", "DTEND", "DURATION"]
elif self.comp == "VTODO":
props = ["DTSTART", "DUE", "DURATION", "CREATED", "COMPLETED"]
elif self.comp == "VJOURNAL":
props = ["DTSTART"]
elif self.comp == "VFREEBUSY":
props = ["DTSTART", "DTEND", "FREEBUSY"]
elif self.comp == "VALARM":
raise NotImplementedError
else:
props = self.all_props
return [["P=" + prop] for prop in props]
class TextMatcher:
def __init__(self, name: str, text: str,
collation: Optional[str] = None,
negate_condition: bool = False) -> None:
self.name = name
self.type_fn = TYPES_FACTORY.for_property(name)
assert isinstance(text, str)
self.text = text
if collation is None:
collation = "i;ascii-casemap"
self.collation = _mod_collation.get_collation(collation)
self.negate_condition = negate_condition
def __repr__(self) -> str:
return "{}({!r}, {!r}, collation={!r}, negate_condition={!r})".format(
self.__class__.__name__,
self.name,
self.text,
self.collation,
self.negate_condition,
)
def match_indexes(self, indexes: SubIndexDict):
return any(
self.match(self.type_fn(self.type_fn.from_ical(k)))
for k in indexes[None])
def match(self, prop: Union[vText, vCategory, str]):
if isinstance(prop, vText):
matches = self.collation(self.text, str(prop), 'equals')
elif isinstance(prop, str):
matches = self.collation(self.text, prop, 'equals')
elif isinstance(prop, vCategory):
matches = any([self.match(cat) for cat in prop.cats])
else:
logging.warning(
"potentially unsupported value in text match search: " +
repr(prop))
return False
if self.negate_condition:
return not matches
else:
return matches
class ComponentFilter:
time_range: Optional[ComponentTimeRangeMatcher]
def __init__(
self, name: str, children=None, is_not_defined: bool = False,
time_range=None) -> None:
self.name = name
self.children = children
self.is_not_defined = is_not_defined
self.time_range = time_range
self.children = children or []
def __repr__(self) -> str:
return ("{}({!r}, children={!r}, is_not_defined={!r}, time_range={!r})"
.format(
self.__class__.__name__,
self.name,
self.children,
self.is_not_defined,
self.time_range))
def filter_subcomponent(
self, name: str, is_not_defined: bool = False,
time_range: Optional[ComponentTimeRangeMatcher] = None):
ret = ComponentFilter(
name=name, is_not_defined=is_not_defined, time_range=time_range
)
self.children.append(ret)
return ret
def filter_property(self, name: str, is_not_defined: bool = False,
time_range: Optional[PropertyTimeRangeMatcher] = None):
ret = PropertyFilter(
name=name, is_not_defined=is_not_defined, time_range=time_range
)
self.children.append(ret)
return ret
def filter_time_range(self, start: datetime, end: datetime):
self.time_range = ComponentTimeRangeMatcher(start, end, comp=self.name)
return self.time_range
def match(self, comp: Component, tzify: TzifyFunction):
# From https://tools.ietf.org/html/rfc4791, 9.7.1:
# A CALDAV:comp-filter is said to match if:
# 2. The CALDAV:comp-filter XML element contains a
# CALDAV:is-not-defined XML element and the calendar object or calendar
# component type specified by the "name" attribute does not exist in
# the current scope;
if self.is_not_defined:
return comp.name != self.name
# 1: The CALDAV:comp-filter XML element is empty and the calendar
# object or calendar component type specified by the "name" attribute
# exists in the current scope;
if comp.name != self.name:
return False
# 3. The CALDAV:comp-filter XML element contains a CALDAV:time-range
# XML element and at least one recurrence instance in the targeted
# calendar component is scheduled to overlap the specified time range
if (self.time_range is not None
and not self.time_range.match(comp, tzify)):
return False
# ... and all specified CALDAV:prop-filter and CALDAV:comp-filter child
# XML elements also match the targeted calendar component;
for child in self.children:
if isinstance(child, ComponentFilter):
if not any(child.match(c, tzify) for c in comp.subcomponents):
return False
elif isinstance(child, PropertyFilter):
if not child.match(comp, tzify):
return False
else:
raise TypeError(child)
return True
def _implicitly_defined(self):
return any(
not getattr(child, "is_not_defined", False)
for child in self.children
)
def match_indexes(self, indexes: IndexDict, tzify: TzifyFunction):
myindex = "C=" + self.name
if self.is_not_defined:
return not bool(indexes[myindex])
subindexes = create_subindexes(indexes, myindex)
if self.time_range is not None and not self.time_range.match_indexes(
subindexes, tzify
):
return False
for child in self.children:
if not child.match_indexes(subindexes, tzify):
return False
if not self._implicitly_defined():
return bool(indexes[myindex])
return True
def index_keys(self):
mine = "C=" + self.name
for child in (
self.children +
([self.time_range] if self.time_range else [])):
for tl in child.index_keys():
yield [(mine + "/" + child_index) for child_index in tl]
if not self._implicitly_defined():
yield [mine]
class PropertyFilter:
def __init__(self, name: str, children=None, is_not_defined: bool = False,
time_range: Optional[PropertyTimeRangeMatcher] = None) -> None:
self.name = name
self.is_not_defined = is_not_defined
self.children = children or []
self.time_range = time_range
def __repr__(self) -> str:
return ("{}({!r}, children={!r}, is_not_defined={!r}, time_range={!r})"
.format(
self.__class__.__name__, self.name, self.children,
self.is_not_defined, self.time_range))
def filter_parameter(
self, name: str,
is_not_defined: bool = False) -> "ParameterFilter":
ret = ParameterFilter(name=name, is_not_defined=is_not_defined)
self.children.append(ret)
return ret
def filter_time_range(
self, start: datetime, end: datetime) -> PropertyTimeRangeMatcher:
self.time_range = PropertyTimeRangeMatcher(start, end)
return self.time_range
def filter_text_match(
self, text: str, collation: Optional[str] = None,
negate_condition: bool = False) -> TextMatcher:
ret = TextMatcher(self.name, text, collation=collation,
negate_condition=negate_condition)
self.children.append(ret)
return ret
def match(self, comp: Component, tzify: TzifyFunction) -> bool:
# From https://tools.ietf.org/html/rfc4791, 9.7.2:
# A CALDAV:comp-filter is said to match if:
# The CALDAV:prop-filter XML element contains a CALDAV:is-not-defined
# XML element and no property of the type specified by the "name"
# attribute exists in the enclosing calendar component;
if self.is_not_defined:
return self.name not in comp
try:
prop = comp[self.name]
except KeyError:
return False
if self.time_range and not self.time_range.match(prop, tzify):
return False
for child in self.children:
if not child.match(prop):
return False
return True
def match_indexes(
self, indexes: SubIndexDict,
tzify: TzifyFunction) -> bool:
myindex = "P=" + self.name
if self.is_not_defined:
return not bool(indexes[myindex])
subindexes: SubIndexDict = create_subindexes(indexes, myindex)
if not self.children and not self.time_range:
return bool(indexes[myindex])
if self.time_range is not None and not self.time_range.match_indexes(
subindexes, tzify
):
return False
for child in self.children:
if not child.match_indexes(subindexes):
return False
return True
def index_keys(self):
mine = "P=" + self.name
for child in self.children:
if not isinstance(child, ParameterFilter):
continue
for tl in child.index_keys():
yield [(mine + "/" + child_index) for child_index in tl]
yield [mine]
class ParameterFilter:
children: list[TextMatcher]
def __init__(self, name: str, children: Optional[list[TextMatcher]] = None,
is_not_defined: bool = False) -> None:
self.name = name
self.is_not_defined = is_not_defined
self.children = children or []
def filter_text_match(self, text: str, collation: Optional[str] = None,
negate_condition: bool = False) -> TextMatcher:
ret = TextMatcher(
self.name, text, collation=collation,
negate_condition=negate_condition)
self.children.append(ret)
return ret
def match(self, prop: PropTypes) -> bool:
if self.is_not_defined:
return self.name not in prop.params
try:
value = prop.params[self.name]
except KeyError:
return False
for child in self.children:
if not child.match(value):
return False
return True
def index_keys(self) -> Iterable[list[str]]:
yield ["A=" + self.name]
def match_indexes(self, indexes: IndexDict) -> bool:
myindex = "A=" + self.name
if self.is_not_defined:
return not bool(indexes[myindex])
subindexes = create_subindexes(indexes, myindex)
if not subindexes:
return False
for child in self.children:
if not child.match_indexes(subindexes):
return False
return True
class CalendarFilter(Filter):
"""A filter that works on ICalendar files."""
content_type = "text/calendar"
def __init__(self, default_timezone: Union[str, timezone]) -> None:
self.tzify = lambda dt: as_tz_aware_ts(dt, default_timezone)
self.children: list[ComponentFilter] = []
def filter_subcomponent(self, name, is_not_defined=False, time_range=None):
ret = ComponentFilter(
name=name, is_not_defined=is_not_defined, time_range=time_range
)
self.children.append(ret)
return ret
def check(self, name: str, file: File) -> bool:
if not isinstance(file, ICalendarFile):
return False
c = file.calendar
if c is None:
return False
for child_filter in self.children:
try:
if not child_filter.match(file.calendar, self.tzify):
return False
except MissingProperty as e:
logging.warning(
"calendar_query: Ignoring calendar object %s, due "
"to missing property %s",
name,
e.property_name,
)
return False
return True
def check_from_indexes(self, name: str, indexes: IndexDict) -> bool:
for child_filter in self.children:
try:
if not child_filter.match_indexes(indexes, self.tzify):
return False
except MissingProperty as e:
logging.warning(
"calendar_query: Ignoring calendar object %s, due "
"to missing property %s",
name,
e.property_name,
)
return False
return True
def index_keys(self) -> list[str]:
subindexes = []
for child in self.children:
subindexes.extend(child.index_keys())
return subindexes
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.children!r})"
class ICalendarFile(File):
"""Handle for ICalendar files."""
content_type = "text/calendar"
def __init__(self, content, content_type) -> None:
super().__init__(content, content_type)
self._calendar = None
def validate(self) -> None:
"""Verify that file contents are valid."""
cal = self.calendar
# TODO(jelmer): return the list of errors to the caller
if cal.is_broken:
raise InvalidFileContents(
self.content_type, self.content, "Broken calendar file"
)
errors = list(validate_calendar(cal, strict=False))
if errors:
raise InvalidFileContents(
self.content_type, self.content, ", ".join(errors)
)
def normalized(self):
"""Return a normalized version of the file."""
return [self.calendar.to_ical()]
@property
def calendar(self):
if self._calendar is None:
try:
self._calendar = Calendar.from_ical(b"".join(self.content))
except ValueError as exc:
raise InvalidFileContents(
self.content_type, self.content, str(exc)) from exc
return self._calendar
def describe_delta(self, name, previous):
try:
lines = list(
describe_calendar_delta(
previous.calendar if previous else None, self.calendar
)
)
except NotImplementedError:
lines = []
if not lines:
lines = super().describe_delta(name, previous)
return lines
def describe(self, name):
try:
subcomponents = self.calendar.subcomponents
except InvalidFileContents:
pass
else:
for component in subcomponents:
try:
return component["SUMMARY"]
except KeyError:
pass
return super().describe(name)
def get_uid(self):
"""Extract the UID from a VCalendar file.
Args:
cal: Calendar, possibly serialized.
Returns: UID
"""
for component in self.calendar.subcomponents:
try:
return component["UID"]
except KeyError:
pass
raise KeyError
def _get_index(self, key: IndexKey) -> IndexValueIterator:
todo = [(self.calendar, key.split("/"))]
rest = []
c: Component
while todo:
(c, segments) = todo.pop(0)
if segments and segments[0].startswith("C="):
if c.name == segments[0][2:]:
if len(segments) > 1 and segments[1].startswith("C="):
todo.extend(
(comp, segments[1:]) for comp in c.subcomponents)
else:
rest.append((c, segments[1:]))
for c, segments in rest:
if not segments:
yield True
elif segments[0].startswith("P="):
assert len(segments) == 1
try:
p = c[segments[0][2:]]
except KeyError:
pass
else:
if p is not None:
yield p.to_ical()
else:
raise AssertionError(f"segments: {segments!r}")
def as_tz_aware_ts(dt, default_timezone: Union[str, timezone]) -> datetime:
if not getattr(dt, "time", None):
dt = datetime.combine(dt, time())
if dt.tzinfo is None:
dt = dt.replace(tzinfo=default_timezone)
assert dt.tzinfo
return dt
def rruleset_from_comp(comp: Component) -> dateutil.rrule.rruleset:
dtstart = comp["DTSTART"].dt
rrulestr = comp["RRULE"].to_ical().decode("utf-8")
rrule = dateutil.rrule.rrulestr(rrulestr, dtstart=dtstart)
rs = dateutil.rrule.rruleset()
rs.rrule(rrule) # type: ignore
if "EXDATE" in comp:
for exdate in comp["EXDATE"]:
rs.exdate(exdate)
if "RDATE" in comp:
for rdate in comp["RDATE"]:
rs.rdate(rdate)
if "EXRULE" in comp:
exrulestr = comp["EXRULE"].to_ical().decode("utf-8")
exrule = dateutil.rrule.rrulestr(exrulestr, dtstart=dtstart)
rs.exrule(exrule)
return rs
def _expand_rrule_component(
incomp: Component, start: datetime, end: datetime,
existing: dict[str, Component]) -> Iterable[Component]:
if "RRULE" not in incomp:
return
rs = rruleset_from_comp(incomp)
for field in ["RRULE", "EXRULE", "UNTIL", "RDATE", "EXDATE"]:
if field in incomp:
del incomp[field]
# Work our magic
for ts in rs.between(start, end):
utcts = asutc(ts)
try:
outcomp = existing.pop(utcts)
outcomp["DTSTART"] = vDatetime(asutc(outcomp["DTSTART"].dt))
except KeyError:
outcomp = incomp.copy()
outcomp["DTSTART"] = vDatetime(utcts)
outcomp["RECURRENCE-ID"] = vDatetime(utcts)
yield outcomp
def expand_calendar_rrule(
incal: Calendar, start: datetime, end: datetime) -> Calendar:
outcal = Calendar()
if incal.name != "VCALENDAR":
raise AssertionError(
f"called on file with root component {incal.name}")
for field in incal:
outcal[field] = incal[field]
known = {}
for insub in incal.subcomponents:
if "RECURRENCE-ID" in insub:
ts = insub["RECURRENCE-ID"].dt
utcts = asutc(ts)
known[utcts] = insub
for insub in incal.subcomponents:
if insub.name == "VTIMEZONE":
continue
if "RECURRENCE-ID" in insub:
continue
if "RRULE" in insub:
for outsub in _expand_rrule_component(insub, start, end, known):
outcal.add_component(outsub)
else:
outcal.add_component(insub)
return outcal
def asutc(dt):
return dt.astimezone(pytz.utc).replace(tzinfo=None)
xandikos_0.2.10.orig/xandikos/infit.py 0000644 0000000 0000000 00000004432 14476041427 014700 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Inf-It properties."""
from xandikos import carddav, webdav
class SettingsProperty(webdav.Property):
"""settings propety.
JSON settings.
"""
name = "{http://inf-it.com/ns/dav/}settings"
resource_type = webdav.PRINCIPAL_RESOURCE_TYPE
live = False
async def get_value(self, href: str, resource, el, environ):
el.text = resource.get_infit_settings()
async def set_value(self, href: str, resource, el):
resource.set_infit_settings(el.text)
class AddressbookColorProperty(webdav.Property):
"""Provides the addressbook-color property.
Contains a RRGGBB code, similar to calendar-color.
"""
name = "{http://inf-it.com/ns/ab/}addressbook-color"
resource_type = carddav.ADDRESSBOOK_RESOURCE_TYPE
in_allprops = False
async def get_value(self, href, resource, el, environ):
el.text = resource.get_addressbook_color()
async def set_value(self, href, resource, el):
resource.set_addressbook_color(el.text)
class HeaderValueProperty(webdav.Property):
"""Provides the header-value property.
This behaves similar to the hrefLabel setting in caldavzap/carddavmate.
"""
name = "{http://inf-it.com/ns/dav/}headervalue"
resource_type = webdav.COLLECTION_RESOURCE_TYPE
in_allprops = False
live = False
async def get_value(self, href, resource, el, environ):
el.text = resource.get_headervalue()
async def set_value(self, href, resource, el):
# TODO
raise NotImplementedError
xandikos_0.2.10.orig/xandikos/py.typed 0000644 0000000 0000000 00000000000 14317656250 014677 0 ustar 00 xandikos_0.2.10.orig/xandikos/quota.py 0000644 0000000 0000000 00000002767 14476041427 014731 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Quota and Size properties.
See https://tools.ietf.org/html/rfc4331
"""
from xandikos import webdav
FEATURE: str = "quota"
class QuotaAvailableBytesProperty(webdav.Property):
"""quota-available-bytes."""
name = "{DAV:}quota-available-bytes"
resource_type = None
in_allprops = False
live = True
async def get_value(self, href, resource, el, environ):
el.text = resource.get_quota_available_bytes()
class QuotaUsedBytesProperty(webdav.Property):
"""quota-used-bytes."""
name = "{DAV:}quota-used-bytes"
resource_type = None
in_allprops = False
live = True
async def get_value(self, href, resource, el, environ):
el.text = resource.get_quota_used_bytes()
xandikos_0.2.10.orig/xandikos/scheduling.py 0000644 0000000 0000000 00000017036 14476041427 015720 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Scheduling.
See https://tools.ietf.org/html/rfc6638
"""
from xandikos import caldav, webdav
SCHEDULE_INBOX_RESOURCE_TYPE = "{%s}schedule-inbox" % caldav.NAMESPACE
SCHEDULE_OUTBOX_RESOURCE_TYPE = "{%s}schedule-outbox" % caldav.NAMESPACE
# Feature to advertise to indicate scheduling support.
FEATURE = "calendar-auto-schedule"
CALENDAR_USER_TYPE_INDIVIDUAL = "INDIVIDUAL" # An individual
CALENDAR_USER_TYPE_GROUP = "GROUP" # A group of individuals
CALENDAR_USER_TYPE_RESOURCE = "RESOURCE" # A physical resource
CALENDAR_USER_TYPE_ROOM = "ROOM" # A room resource
CALENDAR_USER_TYPE_UNKNOWN = "UNKNOWN" # Otherwise not known
CALENDAR_USER_TYPES = (
CALENDAR_USER_TYPE_INDIVIDUAL,
CALENDAR_USER_TYPE_GROUP,
CALENDAR_USER_TYPE_RESOURCE,
CALENDAR_USER_TYPE_ROOM,
CALENDAR_USER_TYPE_UNKNOWN,
)
class ScheduleInbox(webdav.Collection):
resource_types = webdav.Collection.resource_types + [
SCHEDULE_INBOX_RESOURCE_TYPE]
def get_calendar_user_type(self):
# Default, per section 2.4.2
return CALENDAR_USER_TYPE_INDIVIDUAL
def get_calendar_timezone(self):
"""Return calendar timezone.
This should be an iCalendar object with exactly one
VTIMEZONE component.
"""
raise NotImplementedError(self.get_calendar_timezone)
def set_calendar_timezone(self):
"""Set calendar timezone.
This should be an iCalendar object with exactly one
VTIMEZONE component.
"""
raise NotImplementedError(self.set_calendar_timezone)
def get_supported_calendar_components(self):
"""Return set of supported calendar components in this calendar.
Returns: iterable over component names
"""
raise NotImplementedError(self.get_supported_calendar_components)
def get_supported_calendar_data_types(self):
"""Return supported calendar data types.
Returns: iterable over (content_type, version) tuples
"""
raise NotImplementedError(self.get_supported_calendar_data_types)
def get_min_date_time(self):
"""Return minimum datetime property."""
raise NotImplementedError(self.get_min_date_time)
def get_max_date_time(self):
"""Return maximum datetime property."""
raise NotImplementedError(self.get_max_date_time)
def get_max_instances(self):
"""Return maximum number of instances."""
raise NotImplementedError(self.get_max_instances)
def get_max_attendees_per_instance(self):
"""Return maximum number of attendees per instance."""
raise NotImplementedError(self.get_max_attendees_per_instance)
def get_max_resource_size(self):
"""Return max resource size."""
raise NotImplementedError(self.get_max_resource_size)
def get_schedule_default_calendar_url(self):
"""Return default calendar URL.
None indicates there is no default URL.
"""
return None
class ScheduleOutbox(webdav.Collection):
resource_types = webdav.Collection.resource_types + [
SCHEDULE_OUTBOX_RESOURCE_TYPE]
def get_supported_calendar_components(self):
"""Return set of supported calendar components in this calendar.
Returns: iterable over component names
"""
raise NotImplementedError(self.get_supported_calendar_components)
def get_supported_calendar_data_types(self):
"""Return supported calendar data types.
Returns: iterable over (content_type, version) tuples
"""
raise NotImplementedError(self.get_supported_calendar_data_types)
def get_max_resource_size(self):
"""Return max resource size."""
raise NotImplementedError(self.get_max_resource_size)
def get_min_date_time(self):
"""Return minimum datetime property."""
raise NotImplementedError(self.get_min_date_time)
def get_max_date_time(self):
"""Return maximum datetime property."""
raise NotImplementedError(self.get_max_date_time)
def get_max_attendees_per_instance(self):
"""Return maximum number of attendees per instance."""
raise NotImplementedError(self.get_max_attendees_per_instance)
class ScheduleInboxURLProperty(webdav.Property):
"""Schedule-inbox-URL property.
See https://tools.ietf.org/html/rfc6638, section 2.2
"""
name = "{%s}schedule-inbox-URL" % caldav.NAMESPACE
resource_type = webdav.PRINCIPAL_RESOURCE_TYPE
in_allprops = True
async def get_value(self, href, resource, el, environ):
el.append(webdav.create_href(resource.get_schedule_inbox_url(), href))
class ScheduleOutboxURLProperty(webdav.Property):
"""Schedule-outbox-URL property.
See https://tools.ietf.org/html/rfc6638, section 2.1
"""
name = "{%s}schedule-outbox-URL" % caldav.NAMESPACE
resource_type = webdav.PRINCIPAL_RESOURCE_TYPE
in_allprops = True
async def get_value(self, href, resource, el, environ):
el.append(webdav.create_href(resource.get_schedule_outbox_url(), href))
class CalendarUserAddressSetProperty(webdav.Property):
"""calendar-user-address-set property.
See https://tools.ietf.org/html/rfc6638, section 2.4.1
"""
name = "{%s}calendar-user-address-set" % caldav.NAMESPACE
resource_type = webdav.PRINCIPAL_RESOURCE_TYPE
in_allprops = False
async def get_value(self, base_href, resource, el, environ):
for href in resource.get_calendar_user_address_set():
el.append(webdav.create_href(href, base_href))
class ScheduleTagProperty(webdav.Property):
"""schedule-tag property.
See https://tools.ietf.org/html/rfc6638, section 3.2.10
"""
name = "{%s}schedule-tag" % caldav.NAMESPACE
in_allprops = False
def supported_on(self, resource):
return resource.get_content_type() == "text/calendar"
async def get_value(self, base_href, resource, el, environ):
el.text = resource.get_schedule_tag()
class CalendarUserTypeProperty(webdav.Property):
"""calendar-user-type property.
See https://tools.ietf.org/html/rfc6638, section 2.4.2
"""
name = "{%s}calendar-user-type" % caldav.NAMESPACE
resource_type = webdav.PRINCIPAL_RESOURCE_TYPE
in_allprops = False
async def get_value(self, href, resource, el, environ):
el.text = resource.get_calendar_user_type()
class ScheduleDefaultCalendarURLProperty(webdav.Property):
"""schedule-default-calendar-URL property.
See https://tools.ietf.org/html/rfc6638, section-9.2
"""
name = "{%s}schedule-default-calendar-URL" % caldav.NAMESPACE
resource_type = SCHEDULE_INBOX_RESOURCE_TYPE
in_allprops = True
async def get_value(self, href, resource, el, environ):
url = resource.get_schedule_default_calendar_url()
if url is not None:
el.append(webdav.create_href(url, href))
xandikos_0.2.10.orig/xandikos/server_info.py 0000644 0000000 0000000 00000004422 14476041427 016107 0 ustar 00 # Xandikos
# Copyright (C) 2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Server info.
See https://www.ietf.org/archive/id/draft-douglass-server-info-03.txt
"""
import hashlib
from typing import List
from xandikos import version_string, webdav
ET = webdav.ET
# Feature to advertise server-info support.
FEATURE = "server-info"
SERVER_INFO_MIME_TYPE = "application/server-info+xml"
class ServerInfo:
"""Server info."""
def __init__(self) -> None:
self._token = None
self._features: List[str] = []
self._applications: List[str] = []
def add_feature(self, feature):
self._features.append(feature)
self._token = None
@property
def token(self):
if self._token is None:
h = hashlib.sha1()
h.update(version_string.encode("utf-8"))
for z in self._features + self._applications:
h.update(z.encode("utf-8"))
self._token = h.hexdigest()
return self._token
async def get_body(self):
el = ET.Element("{DAV:}server-info")
el.set("token", self.token)
server_el = ET.SubElement(el, "server-instance-info")
ET.SubElement(server_el, "name").text = "Xandikos"
ET.SubElement(server_el, "version").text = version_string
features_el = ET.SubElement(el, "features")
for feature in self._features:
features_el.append(feature)
applications_el = ET.SubElement(el, "applications")
for application in self.applications:
applications_el.append(application)
return el
xandikos_0.2.10.orig/xandikos/store/ 0000755 0000000 0000000 00000000000 13155705321 014337 5 ustar 00 xandikos_0.2.10.orig/xandikos/sync.py 0000644 0000000 0000000 00000012350 14476041427 014541 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Calendar synchronisation.
See https://tools.ietf.org/html/rfc6578
"""
import itertools
import urllib.parse
from xandikos import webdav
ET = webdav.ET
FEATURE = "sync-collection"
class SyncToken:
"""A sync token wrapper."""
def __init__(self, token) -> None:
self.token = token
def aselement(self):
ret = ET.Element("{DAV:}sync-token")
ret.text = self.token
return ret
class InvalidToken(Exception):
"""Requested token is invalid."""
def __init__(self, token) -> None:
self.token = token
class SyncCollectionReporter(webdav.Reporter):
"""sync-collection reporter implementation.
See https://tools.ietf.org/html/rfc6578, section 3.2.
"""
name = "{DAV:}sync-collection"
@webdav.multistatus # noqa: C901
async def report( # noqa: C901
self,
environ,
request_body,
resources_by_hrefs,
properties,
href,
resource,
depth,
strict
):
old_token = None
sync_level = None
limit = None
requested = None
for el in request_body:
if el.tag == "{DAV:}sync-token":
old_token = el.text
elif el.tag == "{DAV:}sync-level":
sync_level = el.text
elif el.tag == "{DAV:}limit":
limit = el.text
elif el.tag == "{DAV:}prop":
requested = list(el)
else:
webdav.nonfatal_bad_request(
f"unknown tag {el.tag}", strict)
# TODO(jelmer): Implement sync_level infinite
if sync_level not in ("1",):
raise webdav.BadRequestError(
f"sync level {sync_level!r} unsupported")
new_token = resource.get_sync_token()
try:
try:
diff_iter = resource.iter_differences_since(
old_token, new_token)
except NotImplementedError:
yield webdav.Status(
href,
"403 Forbidden",
error=ET.Element("{DAV:}sync-traversal-supported"),
)
return
if limit is not None:
try:
[nresults_el] = list(limit)
except ValueError:
webdav.nonfatal_bad_request(
"Invalid number of subelements in limit",
strict)
else:
try:
nresults = int(nresults_el.text)
except ValueError:
webdav.nonfatal_bad_request(
"nresults not a number", strict)
else:
diff_iter = itertools.islice(diff_iter, nresults)
for (name, old_resource, new_resource) in diff_iter:
subhref = urllib.parse.urljoin(
webdav.ensure_trailing_slash(href), name)
if new_resource is None:
yield webdav.Status(subhref, status="404 Not Found")
else:
propstat = []
for prop in requested:
if old_resource is not None:
old_propstat = (
await webdav.get_property_from_element(
href, old_resource, properties, environ,
prop))
else:
old_propstat = None
new_propstat = await webdav.get_property_from_element(
href, new_resource, properties, environ, prop
)
if old_propstat != new_propstat:
propstat.append(new_propstat)
yield webdav.Status(subhref, propstat=propstat)
except InvalidToken as exc:
raise webdav.PreconditionFailure(
'{DAV:}valid-sync-token',
f"Requested sync token {exc.token} is invalid") from exc
yield SyncToken(new_token)
class SyncTokenProperty(webdav.Property):
"""sync-token property.
See https://tools.ietf.org/html/rfc6578, section 4
"""
name = "{DAV:}sync-token"
resource_type = webdav.COLLECTION_RESOURCE_TYPE
in_allprops = False
live = True
async def get_value(self, href, resource, el, environ):
el.text = resource.get_sync_token()
xandikos_0.2.10.orig/xandikos/templates/ 0000755 0000000 0000000 00000000000 13063311553 015177 5 ustar 00 xandikos_0.2.10.orig/xandikos/tests/ 0000755 0000000 0000000 00000000000 13043745642 014353 5 ustar 00 xandikos_0.2.10.orig/xandikos/timezones.py 0000644 0000000 0000000 00000003201 14476041427 015575 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Timezone handling.
See http://www.webdav.org/specs/rfc7809.html
"""
from xandikos import webdav
class TimezoneServiceSetProperty(webdav.Property):
"""timezone-service-set property.
See http://www.webdav.org/specs/rfc7809.html, section 5.1
"""
name = "{DAV:}timezone-service-set"
# Should be set on CalDAV calendar home collection resources,
# but Xandikos doesn't have a separate resource type for those.
resource_type = webdav.COLLECTION_RESOURCE_TYPE
in_allprops = False
live = True
def __init__(self, timezone_services) -> None:
super().__init__()
self._timezone_services = timezone_services
async def get_value(self, base_href, resource, el, environ):
for timezone_service_href in self._timezone_services:
el.append(webdav.create_href(timezone_service_href, base_href))
xandikos_0.2.10.orig/xandikos/vcard.py 0000644 0000000 0000000 00000004212 14476041427 014662 0 ustar 00 # Xandikos
# Copyright (C) 2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""VCard file handling."""
from .store import File, InvalidFileContents
class VCardFile(File):
content_type = "text/vcard"
def __init__(self, content, content_type) -> None:
super().__init__(content, content_type)
self._addressbook = None
def validate(self):
c = b"".join(self.content).strip()
# TODO(jelmer): Do more extensive checking of VCards
if (not c.startswith((b"BEGIN:VCARD\r\n", b"BEGIN:VCARD\n"))
or not c.endswith(b"\nEND:VCARD")):
raise InvalidFileContents(
self.content_type,
self.content,
"Missing header and trailer lines",
)
if not self.addressbook.validate():
# TODO(jelmer): Get data about what is invalid
raise InvalidFileContents(
self.content_type,
self.content,
"Invalid VCard file")
@property
def addressbook(self):
if self._addressbook is None:
import vobject
text = b"".join(self.content).decode('utf-8', 'surrogateescape')
try:
self._addressbook = vobject.readOne(text)
except vobject.base.ParseError as exc:
raise InvalidFileContents(
self.content_type, self.content, str(exc)) from exc
return self._addressbook
xandikos_0.2.10.orig/xandikos/web.py 0000644 0000000 0000000 00000141272 14476041427 014350 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Web server implementation..
This is the concrete web server implementation. It provides the
high level application logic that combines the WebDAV server,
the carddav support, the caldav support and the DAV store.
"""
import asyncio
import functools
import hashlib
import logging
import os
import posixpath
import shutil
import socket
import urllib.parse
from collections.abc import Iterable, Iterator
from email.utils import parseaddr
from typing import Optional
import jinja2
try:
import systemd.daemon
except ImportError:
systemd_imported = False
def get_systemd_listen_sockets() -> list[socket.socket]:
raise NotImplementedError
else:
systemd_imported = True
def get_systemd_listen_sockets() -> list[socket.socket]:
socks = []
for fd in systemd.daemon.listen_fds():
for family in (socket.AF_UNIX, # type: ignore
socket.AF_INET, socket.AF_INET6):
if systemd.daemon.is_socket(fd, family=family,
type=socket.SOCK_STREAM,
listening=True):
sock = socket.fromfd(fd, family, socket.SOCK_STREAM)
socks.append(sock)
break
else:
raise RuntimeError(
"socket family must be AF_INET, AF_INET6, or AF_UNIX; "
"socket type must be SOCK_STREAM; and it must be listening"
)
return socks
from xandikos import __version__ as xandikos_version
from xandikos import (access, apache, caldav, carddav, infit, quota,
scheduling, sync, timezones, webdav, xmpp)
from xandikos.store import (STORE_TYPE_ADDRESSBOOK, STORE_TYPE_CALENDAR,
STORE_TYPE_OTHER, STORE_TYPE_PRINCIPAL,
STORE_TYPE_SCHEDULE_INBOX,
STORE_TYPE_SCHEDULE_OUTBOX,
STORE_TYPE_SUBSCRIPTION, DuplicateUidError, File,
InvalidCTag, InvalidFileContents, LockedError,
NoSuchItem, NotStoreError, OutOfSpaceError, Store)
from .icalendar import CalendarFilter, ICalendarFile
from .store.git import GitStore, TreeGitStore
from .vcard import VCardFile
try:
from asyncio import to_thread # type: ignore
except ImportError: # python < 3.8
import contextvars
from asyncio import events
async def to_thread(func, *args, **kwargs): # type: ignore
loop = events.get_running_loop()
ctx = contextvars.copy_context()
func_call = functools.partial(ctx.run, func, *args, **kwargs)
return await loop.run_in_executor(None, func_call)
WELLKNOWN_DAV_PATHS = {
caldav.WELLKNOWN_CALDAV_PATH,
carddav.WELLKNOWN_CARDDAV_PATH,
}
STORE_CACHE_SIZE = 128
# TODO(jelmer): Make these configurable/dynamic
CALENDAR_HOME_SET = ["calendars"]
ADDRESSBOOK_HOME_SET = ["contacts"]
TEMPLATES_DIR = os.path.join(os.path.dirname(__file__), "templates")
jinja_env = jinja2.Environment(
loader=jinja2.FileSystemLoader(TEMPLATES_DIR), enable_async=True
)
async def render_jinja_page(
name: str, accepted_content_languages: list[str], **kwargs
) -> tuple[Iterable[bytes], int, Optional[str], str, list[str]]:
"""Render a HTML page from jinja template.
Args:
name: Name of the page
accepted_content_languages: List of accepted content languages
Returns: Tuple of (body, content_length, etag, content_type, languages)
"""
# TODO(jelmer): Support rendering other languages
encoding = "utf-8"
template = jinja_env.get_template(name)
body = await template.render_async(
version=xandikos_version, urljoin=urllib.parse.urljoin, **kwargs
)
body_encoded = body.encode(encoding)
return (
[body_encoded],
len(body_encoded),
None,
f"text/html; encoding={encoding}",
["en-UK"],
)
def create_strong_etag(etag: str) -> str:
"""Create strong etags.
Args:
etag: basic etag
Returns: A strong etag
"""
return '"' + etag + '"'
def extract_strong_etag(etag: Optional[str]) -> Optional[str]:
"""Extract a strong etag from a string."""
if etag is None:
return etag
return etag.strip('"')
class ObjectResource(webdav.Resource):
"""Object resource."""
def __init__(
self,
store: Store,
name: str,
content_type: str,
etag: str,
file: Optional[File] = None,
) -> None:
self.store = store
self.name = name
self.etag = etag
self.content_type = content_type
self._file = file
def __repr__(self) -> str:
return "{}({!r}, {!r}, {!r}, {!r})".format(
type(self).__name__,
self.store,
self.name,
self.etag,
self.get_content_type(),
)
async def get_file(self) -> File:
if self._file is None:
self._file = await to_thread(
self.store.get_file, self.name, self.content_type, self.etag)
assert self._file is not None
return self._file
async def get_body(self) -> Iterable[bytes]:
file = await self.get_file()
return file.content
async def set_body(self, data, replace_etag=None):
try:
(name, etag) = await to_thread(
self.store.import_one,
self.name,
self.content_type,
data,
replace_etag=extract_strong_etag(replace_etag))
except InvalidFileContents as exc:
# TODO(jelmer): Not every invalid file is a calendar file..
raise webdav.PreconditionFailure(
"{%s}valid-calendar-data" % caldav.NAMESPACE,
f"Not a valid calendar file: {exc.error}",
) from exc
except DuplicateUidError as exc:
raise webdav.PreconditionFailure(
"{%s}no-uid-conflict" % caldav.NAMESPACE, "UID already in use."
) from exc
except LockedError as exc:
raise webdav.ResourceLocked() from exc
return create_strong_etag(etag)
def get_content_language(self) -> str:
raise KeyError
def get_content_type(self) -> str:
return self.content_type
async def get_content_length(self) -> int:
return sum(map(len, await self.get_body()))
async def get_etag(self) -> str:
return create_strong_etag(self.etag)
def get_supported_locks(self):
return []
def get_active_locks(self):
return []
def get_owner(self):
return None
def get_comment(self):
raise KeyError
def set_comment(self, comment):
raise NotImplementedError(self.set_comment)
def get_creationdate(self):
# TODO(jelmer): Find creation date using store function
raise KeyError
def get_last_modified(self):
# TODO(jelmer): Find last modified time using store function
raise KeyError
def get_is_executable(self):
# TODO(jelmer): Retrieve POSIX mode and check for executability.
return False
def get_quota_used_bytes(self):
# TODO(jelmer): Ask the store?
raise KeyError
def get_quota_available_bytes(self):
# TODO(jelmer): Ask the store?
raise KeyError
def get_schedule_tag(self):
# TODO(jelmer): Ask the store?
raise KeyError
class StoreBasedCollection:
def __init__(self, backend, relpath, store) -> None:
self.backend = backend
self.relpath = relpath
self.store = store
def __repr__(self) -> str:
return f"{type(self).__name__}({self.store!r})"
def set_resource_types(self, resource_types):
# TODO(jelmer): Allow more than just this set; allow combining
# addressbook/calendar.
resource_types = set(resource_types)
if resource_types == {
caldav.CALENDAR_RESOURCE_TYPE,
webdav.COLLECTION_RESOURCE_TYPE,
}:
self.store.set_type(STORE_TYPE_CALENDAR)
elif resource_types == {
carddav.ADDRESSBOOK_RESOURCE_TYPE,
webdav.COLLECTION_RESOURCE_TYPE,
}:
self.store.set_type(STORE_TYPE_ADDRESSBOOK)
elif resource_types == {webdav.PRINCIPAL_RESOURCE_TYPE}:
self.store.set_type(STORE_TYPE_PRINCIPAL)
elif resource_types == {
caldav.SCHEDULE_INBOX_RESOURCE_TYPE,
webdav.COLLECTION_RESOURCE_TYPE,
}:
self.store.set_type(STORE_TYPE_SCHEDULE_INBOX)
elif resource_types == {
caldav.SCHEDULE_OUTBOX_RESOURCE_TYPE,
webdav.COLLECTION_RESOURCE_TYPE,
}:
self.store.set_type(STORE_TYPE_SCHEDULE_OUTBOX)
elif resource_types == {webdav.COLLECTION_RESOURCE_TYPE}:
self.store.set_type(STORE_TYPE_OTHER)
elif resource_types == {
webdav.COLLECTION_RESOURCE_TYPE,
caldav.SUBSCRIPTION_RESOURCE_TYPE,
}:
self.store.set_type(STORE_TYPE_SUBSCRIPTION)
else:
raise NotImplementedError(self.set_resource_types)
def _get_resource(
self,
name: str,
content_type: str,
etag: str,
file: Optional[File] = None,
) -> webdav.Resource:
return ObjectResource(self.store, name, content_type, etag, file=file)
def _get_subcollection(self, name: str) -> webdav.Collection:
return self.backend.get_resource(posixpath.join(self.relpath, name))
def get_displayname(self) -> str:
displayname = self.store.get_displayname()
if displayname is None:
return os.path.basename(self.store.repo.path)
return displayname
def set_displayname(self, displayname: str) -> None:
self.store.set_displayname(displayname)
def get_sync_token(self) -> str:
return self.store.get_ctag()
def get_ctag(self) -> str:
return self.store.get_ctag()
async def get_etag(self) -> str:
return create_strong_etag(self.store.get_ctag())
def members(self) -> Iterator[tuple[str, webdav.Resource]]:
for (name, content_type, etag) in self.store.iter_with_etag():
resource = self._get_resource(name, content_type, etag)
yield (name, resource)
for (name, resource) in self.subcollections():
yield (name, resource)
def subcollections(self):
for name in self.store.subdirectories():
yield (name, self._get_subcollection(name))
def get_member(self, name):
assert name != ""
for (fname, content_type, fetag) in self.store.iter_with_etag():
if name == fname:
return self._get_resource(name, content_type, fetag)
if name in self.store.subdirectories():
return self._get_subcollection(name)
raise KeyError(name)
def delete_member(self, name, etag=None):
assert name != ""
try:
self.store.delete_one(name, etag=extract_strong_etag(etag))
except NoSuchItem:
# TODO: Properly allow removing subcollections
# self.get_subcollection(name).destroy()
shutil.rmtree(os.path.join(self.store.path, name))
async def create_member(
self, name: str, contents: Iterable[bytes], content_type: str
) -> tuple[str, str]:
try:
(name, etag) = self.store.import_one(name, content_type, contents)
except InvalidFileContents as exc:
# TODO(jelmer): Not every invalid file is a calendar file..
raise webdav.PreconditionFailure(
"{%s}valid-calendar-data" % caldav.NAMESPACE,
f"Not a valid calendar file: {exc.error}",
) from exc
except DuplicateUidError as exc:
raise webdav.PreconditionFailure(
"{%s}no-uid-conflict" % caldav.NAMESPACE, "UID already in use."
) from exc
except OutOfSpaceError as exc:
raise webdav.InsufficientStorage() from exc
except LockedError as exc:
raise webdav.ResourceLocked() from exc
return (name, create_strong_etag(etag))
def iter_differences_since(
self, old_token: str, new_token: str
) -> Iterator[
tuple[str, Optional[webdav.Resource], Optional[webdav.Resource]]]:
old_resource: Optional[webdav.Resource]
new_resource: Optional[webdav.Resource]
try:
for (
name,
content_type,
old_etag,
new_etag,
) in self.store.iter_changes(old_token, new_token):
if old_etag is not None:
old_resource = self._get_resource(
name, content_type, old_etag)
else:
old_resource = None
if new_etag is not None:
new_resource = self._get_resource(
name, content_type, new_etag)
else:
new_resource = None
yield (name, old_resource, new_resource)
except InvalidCTag as exc:
raise sync.InvalidToken(exc.ctag) from exc
def get_owner(self):
return None
def get_supported_locks(self):
return []
def get_active_locks(self):
return []
def get_headervalue(self):
raise KeyError
def get_comment(self):
return self.store.get_comment()
def set_comment(self, comment):
self.store.set_comment(comment)
def get_creationdate(self):
# TODO(jelmer): Find creation date using store function
raise KeyError
def get_last_modified(self):
# TODO(jelmer): Find last modified time using store function
raise KeyError
def get_content_type(self):
return "httpd/unix-directory"
def get_content_language(self):
raise KeyError
async def get_content_length(self):
raise KeyError
def destroy(self) -> None:
# RFC2518, section 8.6.2 says this should recursively delete.
self.store.destroy()
async def get_body(self):
raise NotImplementedError(self.get_body)
async def render(
self, self_url, accepted_content_types, accepted_content_languages
):
content_types = webdav.pick_content_types(
accepted_content_types, ["text/html"])
assert content_types == ["text/html"]
return await render_jinja_page(
"collection.html",
accepted_content_languages,
collection=self,
self_url=self_url,
)
def get_is_executable(self) -> bool:
return False
def get_quota_used_bytes(self):
# TODO(jelmer): Ask the store?
raise KeyError
def get_quota_available_bytes(self):
# TODO(jelmer): Ask the store?
raise KeyError
def get_refreshrate(self):
# TODO(jelmer): Support setting refreshrate
raise KeyError
def set_refreshrate(self, value):
# TODO(jelmer): Store refreshrate
raise NotImplementedError(self.set_refreshrate)
class Collection(StoreBasedCollection, webdav.Collection):
"""A generic WebDAV collection."""
class ScheduleInbox(StoreBasedCollection, scheduling.ScheduleInbox):
"""A schedling inbox collection."""
class ScheduleOutbox(StoreBasedCollection, scheduling.ScheduleOutbox):
"""A schedling outbox collection."""
class SubscriptionCollection(StoreBasedCollection, caldav.Subscription):
def get_source_url(self):
source_url = self.store.get_source_url()
if source_url is None:
raise KeyError
return source_url
def set_source_url(self, url):
self.store.set_source_url(url)
def get_calendar_description(self):
return self.store.get_description()
def get_calendar_color(self):
color = self.store.get_color()
if not color:
raise KeyError
if color and color[0] != "#":
color = "#" + color
return color
def set_calendar_color(self, color):
self.store.set_color(color)
def get_supported_calendar_components(self):
return ["VEVENT", "VTODO", "VJOURNAL", "VFREEBUSY"]
class CalendarCollection(StoreBasedCollection, caldav.Calendar):
def get_calendar_description(self):
return self.store.get_description()
def get_calendar_color(self):
color = self.store.get_color()
if not color:
raise KeyError
if color and color[0] != "#":
color = "#" + color
return color
def set_calendar_color(self, color):
self.store.set_color(color)
def get_calendar_order(self):
order = self.store.config.get_order()
if not order:
raise KeyError
return order
def set_calendar_order(self, order):
self.store.config.set_order(order)
def get_calendar_timezone(self):
# TODO(jelmer): Read from config
raise KeyError
def set_calendar_timezone(self, content):
raise NotImplementedError(self.set_calendar_timezone)
def get_supported_calendar_components(self):
return ["VEVENT", "VTODO", "VJOURNAL", "VFREEBUSY"]
def get_supported_calendar_data_types(self):
return [("text/calendar", "1.0"), ("text/calendar", "2.0")]
def get_max_date_time(self):
return "99991231T235959Z"
def get_min_date_time(self):
return "00010101T000000Z"
def get_max_instances(self):
raise KeyError
def get_max_attendees_per_instance(self):
raise KeyError
def get_max_resource_size(self):
# No resource limit
raise KeyError
def get_max_attachments_per_resource(self):
# No resource limit
raise KeyError
def get_max_attachment_size(self):
# No resource limit
raise KeyError
def get_schedule_calendar_transparency(self):
# TODO(jelmer): Allow configuration in config
return caldav.TRANSPARENCY_OPAQUE
def get_managed_attachments_server_url(self):
# TODO(jelmer)
raise KeyError
def calendar_query(self, create_filter_fn):
filter = create_filter_fn(CalendarFilter)
for (name, file, etag) in self.store.iter_with_filter(filter=filter):
resource = self._get_resource(
name, file.content_type, etag, file=file)
yield (name, resource)
def get_xmpp_heartbeat(self):
# TODO
raise KeyError
def get_xmpp_server(self):
# TODO
raise KeyError
def get_xmpp_uri(self):
# TODO
raise KeyError
class AddressbookCollection(StoreBasedCollection, carddav.Addressbook):
def get_addressbook_description(self):
return self.store.get_description()
def set_addressbook_description(self, description):
self.store.set_description(description)
def get_supported_address_data_types(self):
return [("text/vcard", "3.0")]
def get_max_resource_size(self):
# No resource limit
raise KeyError
def get_max_image_size(self):
# No resource limit
raise KeyError
def set_addressbook_color(self, color):
self.store.set_color(color)
def get_addressbook_color(self):
color = self.store.get_color()
if not color:
raise KeyError
if color and color[0] != "#":
color = "#" + color
return color
class CollectionSetResource(webdav.Collection):
"""Resource for calendar sets."""
def __init__(self, backend, relpath) -> None:
self.backend = backend
self.relpath = relpath
@classmethod
def create(cls, backend, relpath):
path = backend._map_to_file_path(relpath)
if not os.path.isdir(path):
os.makedirs(path)
logging.info("Creating %s", path)
return cls(backend, relpath)
def get_displayname(self):
return posixpath.basename(self.relpath)
def get_sync_token(self):
raise KeyError
async def get_etag(self):
raise KeyError
def get_ctag(self):
raise KeyError
def get_supported_locks(self):
return []
def get_active_locks(self):
return []
def get_owner(self):
return None
def members(self):
p = self.backend._map_to_file_path(self.relpath)
for name in os.listdir(p):
if name.startswith("."):
continue
resource = self.get_member(name)
yield (name, resource)
def get_member(self, name):
assert name != ""
relpath = posixpath.join(self.relpath, name)
p = self.backend._map_to_file_path(relpath)
if not os.path.isdir(p):
raise KeyError(name)
return self.backend.get_resource(relpath)
def get_headervalue(self):
raise KeyError
def get_comment(self):
raise KeyError
def set_comment(self, comment):
raise NotImplementedError(self.set_comment)
def get_content_type(self):
return "httpd/unix-directory"
def get_content_language(self):
raise KeyError
async def get_content_length(self):
raise KeyError
def get_last_modified(self):
# TODO(jelmer): Find last modified time using store function
raise KeyError
def delete_member(self, name, etag=None):
# This doesn't have any non-collection members.
self.get_member(name).destroy()
def destroy(self):
p = self.backend._map_to_file_path(self.relpath)
# RFC2518, section 8.6.2 says this should recursively delete.
shutil.rmtree(p)
async def render(
self, self_url, accepted_content_types, accepted_content_languages
):
content_types = webdav.pick_content_types(
accepted_content_types, ["text/html"])
assert content_types == ["text/html"]
return await render_jinja_page(
"root.html", accepted_content_languages, self_url=self_url
)
def get_is_executable(self):
return False
def get_quota_used_bytes(self):
# TODO(jelmer): Ask the store?
raise KeyError
def get_quota_available_bytes(self):
# TODO(jelmer): Ask the store?
raise KeyError
def get_creationdate(self):
# TODO(jelmer): Find creation date using store function
raise KeyError
class RootPage(webdav.Resource):
"""A non-DAV resource."""
resource_types: list[str] = []
def __init__(self, backend) -> None:
self.backend = backend
def render(self, self_url, accepted_content_types,
accepted_content_languages):
content_types = webdav.pick_content_types(
accepted_content_types, ["text/html"])
assert content_types == ["text/html"]
return render_jinja_page(
"root.html",
accepted_content_languages,
principals=self.backend.find_principals(),
self_url=self_url,
)
async def get_body(self):
raise KeyError
async def get_content_length(self):
raise KeyError
def get_content_type(self):
return "text/html"
def get_supported_locks(self):
return []
def get_active_locks(self):
return []
async def get_etag(self):
h = hashlib.md5()
for c in await self.get_body():
h.update(c)
return h.hexdigest()
def get_last_modified(self):
raise KeyError
def get_content_language(self):
return ["en-UK"]
def get_member(self, name):
return self.backend.get_resource("/" + name)
def delete_member(self, name, etag=None):
# This doesn't have any non-collection members.
self.get_member("/" + name).destroy()
def get_is_executable(self):
return False
def get_quota_used_bytes(self):
# TODO(jelmer): Ask the store?
raise KeyError
def get_quota_available_bytes(self):
# TODO(jelmer): Ask the store?
raise KeyError
class Principal(webdav.Principal):
def get_principal_url(self):
return "."
def get_principal_address(self):
raise KeyError
def get_calendar_home_set(self):
return CALENDAR_HOME_SET
def get_addressbook_home_set(self):
return ADDRESSBOOK_HOME_SET
def get_calendar_user_address_set(self):
# TODO(jelmer): Make this configurable
ret = []
try:
(fullname, email) = parseaddr(os.environ["EMAIL"])
except KeyError:
pass
else:
ret.append("mailto:" + email)
return ret
def set_infit_settings(self, settings):
relpath = posixpath.join(self.relpath, ".infit")
p = self.backend._map_to_file_path(relpath)
with open(p, "w") as f:
f.write(settings)
def get_infit_settings(self):
relpath = posixpath.join(self.relpath, ".infit")
p = self.backend._map_to_file_path(relpath)
if not os.path.exists(p):
raise KeyError
with open(p) as f:
return f.read()
def get_group_membership(self):
"""Get group membership URLs."""
return []
def get_calendar_user_type(self):
# TODO(jelmer)
return scheduling.CALENDAR_USER_TYPE_INDIVIDUAL
def get_calendar_proxy_read_for(self):
# TODO(jelmer)
return []
def get_calendar_proxy_write_for(self):
# TODO(jelmer)
return []
def get_owner(self):
return None
def get_schedule_outbox_url(self):
raise KeyError
def get_schedule_inbox_url(self):
# TODO(jelmer): make this configurable
return "inbox"
def get_creationdate(self):
raise KeyError
class PrincipalBare(CollectionSetResource, Principal):
"""Principal user resource."""
resource_types = [webdav.PRINCIPAL_RESOURCE_TYPE]
@classmethod
def create(cls, backend, relpath):
p = super().create(backend, relpath)
to_create = set()
to_create.update(p.get_addressbook_home_set())
to_create.update(p.get_calendar_home_set())
for n in to_create:
try:
backend.create_collection(posixpath.join(relpath, n))
except FileExistsError:
pass
return p
async def render(
self, self_url, accepted_content_types, accepted_content_languages
):
content_types = webdav.pick_content_types(
accepted_content_types, ["text/html"])
assert content_types == ["text/html"]
return await render_jinja_page(
"principal.html",
accepted_content_languages,
principal=self,
self_url=self_url,
)
def subcollections(self):
# TODO(jelmer): Return members
return []
class PrincipalCollection(Collection, Principal):
"""Principal user resource."""
resource_types = webdav.Collection.resource_types + [
webdav.PRINCIPAL_RESOURCE_TYPE]
@classmethod
def create(cls, backend, relpath):
p = super().create(backend, relpath)
p.store.set_type(STORE_TYPE_PRINCIPAL)
to_create = set()
to_create.update(p.get_addressbook_home_set())
to_create.update(p.get_calendar_home_set())
for n in to_create:
try:
backend.create_collection(posixpath.join(relpath, n))
except FileExistsError:
pass
return p
@functools.lru_cache(maxsize=STORE_CACHE_SIZE)
def open_store_from_path(path: str, **kwargs):
store = GitStore.open_from_path(path, **kwargs)
store.load_extra_file_handler(ICalendarFile)
store.load_extra_file_handler(VCardFile)
return store
class XandikosBackend(webdav.Backend):
def __init__(self, path, *, paranoid: bool = False,
index_threshold: Optional[int] = None) -> None:
self.path = path
self._user_principals: set[str] = set()
self.paranoid = paranoid
self.index_threshold = index_threshold
def _map_to_file_path(self, relpath):
return os.path.join(self.path, relpath.lstrip("/"))
def _mark_as_principal(self, path):
self._user_principals.add(posixpath.normpath(path))
def create_collection(self, relpath):
p = self._map_to_file_path(relpath)
return Collection(self, relpath, TreeGitStore.create(p))
def create_principal(self, relpath, create_defaults=False):
principal = PrincipalBare.create(self, relpath)
self._mark_as_principal(relpath)
if create_defaults:
create_principal_defaults(self, principal)
def find_principals(self):
"""List all of the principals on this server."""
return self._user_principals
def get_resource(self, relpath):
relpath = posixpath.normpath(relpath)
if not relpath.startswith("/"):
raise ValueError("relpath %r should start with /")
if relpath == "/":
return RootPage(self)
p = self._map_to_file_path(relpath)
if p is None:
return None
if os.path.isdir(p):
try:
store = open_store_from_path(
p, double_check_indexes=self.paranoid,
index_threshold=self.index_threshold)
except NotStoreError:
if relpath in self._user_principals:
return PrincipalBare(self, relpath)
return CollectionSetResource(self, relpath)
else:
return {
STORE_TYPE_CALENDAR: CalendarCollection,
STORE_TYPE_ADDRESSBOOK: AddressbookCollection,
STORE_TYPE_PRINCIPAL: PrincipalCollection,
STORE_TYPE_SCHEDULE_INBOX: ScheduleInbox,
STORE_TYPE_SCHEDULE_OUTBOX: ScheduleOutbox,
STORE_TYPE_SUBSCRIPTION: SubscriptionCollection,
STORE_TYPE_OTHER: Collection,
}[store.get_type()](self, relpath, store)
else:
(basepath, name) = os.path.split(relpath)
assert name != "", f"path is {relpath!r}"
store = self.get_resource(basepath)
if store is None:
return None
if webdav.COLLECTION_RESOURCE_TYPE not in store.resource_types:
return None
try:
return store.get_member(name)
except KeyError:
return None
class XandikosApp(webdav.WebDAVApp):
"""A wsgi App that provides a Xandikos web server."""
def __init__(self, backend, current_user_principal, strict=True) -> None:
super().__init__(backend, strict=strict)
def get_current_user_principal(env):
try:
return current_user_principal % env
except KeyError:
return None
self.register_properties(
[
webdav.ResourceTypeProperty(),
webdav.CurrentUserPrincipalProperty(
get_current_user_principal),
webdav.PrincipalURLProperty(),
webdav.DisplayNameProperty(),
webdav.GetETagProperty(),
webdav.GetContentTypeProperty(),
webdav.GetContentLengthProperty(),
webdav.GetContentLanguageProperty(),
caldav.SourceProperty(),
caldav.CalendarHomeSetProperty(),
carddav.AddressbookHomeSetProperty(),
caldav.CalendarDescriptionProperty(),
caldav.CalendarColorProperty(),
caldav.CalendarOrderProperty(),
caldav.SupportedCalendarComponentSetProperty(),
carddav.AddressbookDescriptionProperty(),
carddav.PrincipalAddressProperty(),
webdav.AppleGetCTagProperty(),
webdav.DAVGetCTagProperty(),
carddav.SupportedAddressDataProperty(),
webdav.SupportedReportSetProperty(self.reporters),
sync.SyncTokenProperty(),
caldav.SupportedCalendarDataProperty(),
caldav.CalendarTimezoneProperty(),
caldav.MinDateTimeProperty(),
caldav.MaxDateTimeProperty(),
caldav.MaxResourceSizeProperty(),
carddav.MaxResourceSizeProperty(),
carddav.MaxImageSizeProperty(),
access.CurrentUserPrivilegeSetProperty(),
access.OwnerProperty(),
webdav.CreationDateProperty(),
webdav.SupportedLockProperty(),
webdav.LockDiscoveryProperty(),
infit.AddressbookColorProperty(),
infit.SettingsProperty(),
infit.HeaderValueProperty(),
webdav.CommentProperty(),
scheduling.CalendarUserAddressSetProperty(),
scheduling.ScheduleInboxURLProperty(),
scheduling.ScheduleOutboxURLProperty(),
scheduling.CalendarUserTypeProperty(),
scheduling.ScheduleTagProperty(),
webdav.GetLastModifiedProperty(),
timezones.TimezoneServiceSetProperty([]),
webdav.AddMemberProperty(),
caldav.ScheduleCalendarTransparencyProperty(),
scheduling.ScheduleDefaultCalendarURLProperty(),
caldav.MaxInstancesProperty(),
caldav.MaxAttendeesPerInstanceProperty(),
access.GroupMembershipProperty(),
apache.ExecutableProperty(),
caldav.CalendarProxyReadForProperty(),
caldav.CalendarProxyWriteForProperty(),
caldav.MaxAttachmentSizeProperty(),
caldav.MaxAttachmentsPerResourceProperty(),
caldav.ManagedAttachmentsServerURLProperty(),
quota.QuotaAvailableBytesProperty(),
quota.QuotaUsedBytesProperty(),
webdav.RefreshRateProperty(),
xmpp.XmppUriProperty(),
xmpp.XmppServerProperty(),
xmpp.XmppHeartbeatProperty()
]
)
self.register_reporters(
[
caldav.CalendarMultiGetReporter(),
caldav.CalendarQueryReporter(),
carddav.AddressbookMultiGetReporter(),
carddav.AddressbookQueryReporter(),
webdav.ExpandPropertyReporter(),
sync.SyncCollectionReporter(),
caldav.FreeBusyQueryReporter(),
]
)
self.register_methods(
[
caldav.MkcalendarMethod(),
]
)
def create_principal_defaults(backend, principal):
"""Create default calendar and addressbook for a principal.
Args:
backend: Backend in which the principal exists.
principal: Principal object
"""
calendar_path = posixpath.join(
principal.relpath, principal.get_calendar_home_set()[0], "calendar"
)
try:
resource = backend.create_collection(calendar_path)
except FileExistsError:
pass
else:
resource.store.set_type(STORE_TYPE_CALENDAR)
logging.info("Create calendar in %s.", resource.store.path)
addressbook_path = posixpath.join(
principal.relpath,
principal.get_addressbook_home_set()[0],
"addressbook",
)
try:
resource = backend.create_collection(addressbook_path)
except FileExistsError:
pass
else:
resource.store.set_type(STORE_TYPE_ADDRESSBOOK)
logging.info("Create addressbook in %s.", resource.store.path)
calendar_path = posixpath.join(
principal.relpath, principal.get_schedule_inbox_url()
)
try:
resource = backend.create_collection(calendar_path)
except FileExistsError:
pass
else:
resource.store.set_type(STORE_TYPE_SCHEDULE_INBOX)
logging.info("Create inbox in %s.", resource.store.path)
class RedirectDavHandler:
def __init__(self, dav_root: str) -> None:
self._dav_root = dav_root
async def __call__(self, request):
from aiohttp import web
return web.HTTPFound(self._dav_root)
MDNS_NAME = "Xandikos CalDAV/CardDAV service"
def avahi_register(port: int, path: str):
import avahi
import dbus
bus = dbus.SystemBus()
server = dbus.Interface(
bus.get_object(avahi.DBUS_NAME, avahi.DBUS_PATH_SERVER),
avahi.DBUS_INTERFACE_SERVER,
)
group = dbus.Interface(
bus.get_object(avahi.DBUS_NAME, server.EntryGroupNew()),
avahi.DBUS_INTERFACE_ENTRY_GROUP,
)
for service in ["_carddav._tcp", "_caldav._tcp"]:
try:
group.AddService(
avahi.IF_UNSPEC,
avahi.PROTO_INET,
0,
MDNS_NAME,
service,
"",
"",
port,
avahi.string_array_to_txt_array([f"path={path}"]),
)
except dbus.DBusException as e:
logging.error("Error registering %s: %s", service, e)
group.Commit()
def run_simple_server(
directory: str,
current_user_principal: str,
autocreate: bool = False,
defaults: bool = False,
strict: bool = True,
route_prefix: str = "/",
listen_address: Optional[str] = "::",
port: Optional[int] = 8080,
socket_path: Optional[str] = None) -> None:
"""Simple function to run a Xandikos server.
This function is meant to be used by external code. We'll try our best
not to break API compatibility.
Args:
directory: Directory to store data in ("/tmp/blah")
current_user_principal: Name of current user principal ("/user")
autocreate: Whether to create missing principals and collections
defaults: Whether to create default calendar and addressbook collections
strict: Whether to be strict in *DAV implementation. Set to False for
buggy clients
route_prefix: Route prefix under which to server ("/")
listen_address: IP address to listen on (None to disable)
port: TCP Port to listen on (None to disable)
socket_path: Unix domain socket path to listen on (None to disable)
"""
backend = XandikosBackend(directory)
backend._mark_as_principal(current_user_principal)
if autocreate or defaults:
if not os.path.isdir(directory):
os.makedirs(directory)
backend.create_principal(
current_user_principal, create_defaults=defaults
)
if not os.path.isdir(directory):
logging.warning(
"%r does not exist. Run xandikos with --autocreate?",
directory,
)
if not backend.get_resource(current_user_principal):
logging.warning(
"default user principal %s does not exist. "
"Run xandikos with --autocreate?",
current_user_principal,
)
main_app = XandikosApp(
backend,
current_user_principal=current_user_principal,
strict=strict,
)
async def xandikos_handler(request):
return await main_app.aiohttp_handler(request, route_prefix)
if socket_path:
logging.info("Listening on unix domain socket %s", socket_path)
if listen_address and port:
logging.info("Listening on %s:%s", listen_address, port)
from aiohttp import web
app = web.Application()
for path in WELLKNOWN_DAV_PATHS:
app.router.add_route(
"*", path, RedirectDavHandler(route_prefix).__call__
)
if route_prefix.strip("/"):
xandikos_app = web.Application()
xandikos_app.router.add_route("*", "/{path_info:.*}", xandikos_handler)
async def redirect_to_subprefix(request):
return web.HTTPFound(route_prefix)
app.router.add_route("*", "/", redirect_to_subprefix)
app.add_subapp(route_prefix, xandikos_app)
else:
app.router.add_route("*", "/{path_info:.*}", xandikos_handler)
web.run_app(app, port=port, host=listen_address, path=socket_path)
async def main(argv=None): # noqa: C901
import argparse
import sys
from xandikos import __version__
parser = argparse.ArgumentParser(
usage="%(prog)s -d ROOT-DIR [OPTIONS]")
parser.add_argument(
"--version",
action="version",
version="%(prog)s " + ".".join(map(str, __version__)),
)
access_group = parser.add_argument_group(title="Access Options")
access_group.add_argument(
"--no-detect-systemd",
action="store_false",
dest="detect_systemd",
help="Disable systemd detection and socket activation.",
default=systemd_imported
)
access_group.add_argument(
"-l",
"--listen-address",
dest="listen_address",
default="localhost",
help=(
"Bind to this address. "
"Pass in path for unix domain socket. [%(default)s]"
),
)
access_group.add_argument(
"-p",
"--port",
dest="port",
type=int,
default=8080,
help="Port to listen on. [%(default)s]",
)
access_group.add_argument(
"--metrics-port",
dest="metrics_port",
default=8081,
help="Port to listen on for metrics. [%(default)s]")
access_group.add_argument(
"--route-prefix",
default="/",
help=(
"Path to Xandikos. "
"(useful when Xandikos is behind a reverse proxy) "
"[%(default)s]"
),
)
parser.add_argument(
"-d",
"--directory",
dest="directory",
default=None,
help="Directory to serve from.",
)
parser.add_argument(
"--current-user-principal",
default="/user/",
help="Path to current user principal. [%(default)s]",
)
parser.add_argument(
"--autocreate",
action="store_true",
dest="autocreate",
help="Automatically create necessary directories.",
)
parser.add_argument(
"--defaults",
action="store_true",
dest="defaults",
help=("Create initial calendar and address book. "
"Implies --autocreate."),
)
parser.add_argument(
"--dump-dav-xml",
action="store_true",
dest="dump_dav_xml",
help="Print DAV XML request/responses.",
)
parser.add_argument(
"--avahi", action="store_true", help="Announce services with avahi."
)
parser.add_argument(
"--no-strict",
action="store_false",
dest="strict",
help=("Enable workarounds for buggy CalDAV/CardDAV client "
"implementations."),
default=True,
)
parser.add_argument(
'--debug', action='store_true',
help='Print debug messages')
# Hidden arguments. These may change without notice in between releases,
# and are generally just meant for developers.
parser.add_argument('--paranoid', action='store_true',
help=argparse.SUPPRESS)
parser.add_argument('--index-threshold', type=int, help=argparse.SUPPRESS)
options = parser.parse_args(argv)
if options.directory is None:
parser.print_usage()
sys.exit(1)
if options.dump_dav_xml:
# TODO(jelmer): Find a way to propagate this without abusing
# os.environ.
os.environ["XANDIKOS_DUMP_DAV_XML"] = "1"
if not options.route_prefix.endswith('/'):
options.route_prefix += '/'
if options.debug:
loglevel = logging.DEBUG
else:
loglevel = logging.INFO
logging.basicConfig(level=loglevel, format='%(message)s')
backend = XandikosBackend(
os.path.abspath(options.directory), paranoid=options.paranoid,
index_threshold=options.index_threshold)
backend._mark_as_principal(options.current_user_principal)
if options.autocreate or options.defaults:
if not os.path.isdir(options.directory):
os.makedirs(options.directory)
backend.create_principal(
options.current_user_principal, create_defaults=options.defaults
)
if not os.path.isdir(options.directory):
logging.warning(
"%r does not exist. Run xandikos with --autocreate?",
options.directory,
)
if not backend.get_resource(options.current_user_principal):
logging.warning(
"default user principal %s does not exist. "
"Run xandikos with --autocreate?",
options.current_user_principal,
)
main_app = XandikosApp(
backend,
current_user_principal=options.current_user_principal,
strict=options.strict,
)
async def xandikos_handler(request):
return await main_app.aiohttp_handler(request, options.route_prefix)
if options.detect_systemd and not systemd_imported:
parser.error(
'systemd detection requested, but unable to find systemd_python')
if options.detect_systemd and systemd.daemon.booted():
listen_socks = get_systemd_listen_sockets()
socket_path = None
listen_address = None
listen_port = None
logging.info(
"Receiving file descriptors from systemd socket activation")
elif "/" in options.listen_address:
socket_path = options.listen_address
listen_address = None
listen_port = None # otherwise aiohttp also listens on default host
listen_socks = []
logging.info("Listening on unix domain socket %s", socket_path)
else:
listen_address = options.listen_address
listen_port = options.port
socket_path = None
listen_socks = []
logging.info("Listening on %s:%s", listen_address, options.port)
from aiohttp import web
app = web.Application()
if options.metrics_port:
metrics_app = web.Application()
try:
from aiohttp_openmetrics import metrics, metrics_middleware
except ModuleNotFoundError:
logging.warning(
"aiohttp-openmetrics not found; "
"/metrics will not be available.")
else:
app.middlewares.insert(0, metrics_middleware)
metrics_app.router.add_get("/metrics", metrics, name="metrics")
# For now, just always claim everything is okay.
metrics_app.router.add_get(
"/health", lambda r: web.Response(text='ok'))
else:
metrics_app = None
for path in WELLKNOWN_DAV_PATHS:
app.router.add_route(
"*", path, RedirectDavHandler(options.route_prefix).__call__
)
if options.route_prefix.strip("/"):
xandikos_app = web.Application()
xandikos_app.router.add_route("*", "/{path_info:.*}", xandikos_handler)
async def redirect_to_subprefix(request):
return web.HTTPFound(options.route_prefix)
app.router.add_route("*", "/", redirect_to_subprefix)
app.add_subapp(options.route_prefix, xandikos_app)
else:
app.router.add_route("*", "/{path_info:.*}", xandikos_handler)
if options.avahi:
try:
import avahi # noqa: F401
import dbus # noqa: F401
except ImportError:
logging.error(
"Please install python-avahi and python-dbus for "
"avahi support."
)
else:
avahi_register(options.port, options.route_prefix)
runner = web.AppRunner(app)
await runner.setup()
sites = []
if metrics_app:
metrics_runner = web.AppRunner(metrics_app)
await metrics_runner.setup()
# TODO(jelmer): Allow different metrics listen addres?
sites.append(web.TCPSite(metrics_runner, listen_address,
options.metrics_port))
if listen_socks:
sites.extend([web.SockSite(runner, sock) for sock in listen_socks])
if socket_path:
sites.append(web.UnixSite(runner, socket_path))
else:
sites.append(web.TCPSite(runner, listen_address, listen_port))
import signal
# Set SIGINT to default handler; this appears to be necessary
# when running under coverage.
signal.signal(signal.SIGINT, signal.SIG_DFL)
for site in sites:
await site.start()
while True:
await asyncio.sleep(3600)
if __name__ == "__main__":
import sys
sys.exit(asyncio.run(main(sys.argv[1:])))
xandikos_0.2.10.orig/xandikos/webdav.py 0000644 0000000 0000000 00000210153 14476041427 015036 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Abstract WebDAV server implementation..
This module contains an abstract WebDAV server. All caldav/carddav specific
functionality should live in xandikos.caldav/xandikos.carddav respectively.
"""
# TODO(jelmer): Add authorization support
import asyncio
import collections
import fnmatch
import functools
import logging
import os
import posixpath
import urllib.parse
from collections.abc import AsyncIterable, Iterable, Iterator, Sequence
from datetime import datetime
from typing import Callable, Optional, Union, Dict, Type
from wsgiref.util import request_uri
# Hmm, defusedxml doesn't have XML generation functions? :(
from xml.etree import ElementTree as ET
from defusedxml.ElementTree import fromstring as xmlparse
DEFAULT_ENCODING = "utf-8"
COLLECTION_RESOURCE_TYPE = "{DAV:}collection"
PRINCIPAL_RESOURCE_TYPE = "{DAV:}principal"
PropStatus = collections.namedtuple(
"PropStatus", ["statuscode", "responsedescription", "prop"]
)
class BadRequestError(Exception):
"""Base class for bad request errors."""
def __init__(self, message) -> None:
super().__init__(message)
self.message = message
def nonfatal_bad_request(message, strict=False):
if strict:
raise BadRequestError(message)
logging.debug('Bad request: %s', message)
class NotAcceptableError(Exception):
"""Base class for not acceptable errors."""
def __init__(self, available_content_types, acceptable_content_types) -> None:
super().__init__(
f"Unable to convert from content types {available_content_types!r} to one of {acceptable_content_types!r}"
)
self.available_content_types = available_content_types
self.acceptable_content_types = acceptable_content_types
class UnsupportedMediaType(Exception):
"""Base class for unsupported media type errors."""
def __init__(self, content_type) -> None:
super().__init__(
f"Unsupported media type: {content_type!r}"
)
self.content_type = content_type
class UnauthorizedError(Exception):
"""Base class for unauthorized errors."""
def __init__(self) -> None:
super().__init__("Request unauthorized")
class Response:
"""Generic wrapper for HTTP-style responses."""
def __init__(self, status=200, reason="OK", body=None, headers=None) -> None:
if isinstance(status, str):
self.status = int(status.split(" ", 1)[0])
self.reason = status.split(" ", 1)[1]
else:
self.status = status
self.reason = reason
self.body = body or []
if isinstance(headers, dict):
self.headers = list(headers.items())
elif isinstance(headers, list):
self.headers = list(headers)
elif not headers:
self.headers = []
else:
raise TypeError(headers)
def for_wsgi(self, start_response):
start_response("%d %s" % (self.status, self.reason), self.headers)
return self.body
def for_aiohttp(self):
from aiohttp import web
if isinstance(self.body, list):
body = b"".join(self.body)
else:
body = self.body
return web.Response(
status=self.status,
reason=self.reason,
headers=self.headers,
body=body,
)
def pick_content_types(accepted_content_types, available_content_types):
"""Pick best content types for a client.
Args:
accepted_content_types: Accept variable (as name, params tuples)
Raises:
NotAcceptableError: If there are no overlapping content types
"""
available_content_types = set(available_content_types)
acceptable_by_q = {}
for ct, params in accepted_content_types:
acceptable_by_q.setdefault(float(params.get("q", "1")), []).append(ct)
if 0 in acceptable_by_q:
# Items with q=0 are not acceptable
for pat in acceptable_by_q[0]:
available_content_types -= set(
fnmatch.filter(available_content_types, pat))
del acceptable_by_q[0]
for q, pats in sorted(acceptable_by_q.items(), reverse=True):
ret = []
for pat in pats:
ret.extend(fnmatch.filter(available_content_types, pat))
if ret:
return ret
raise NotAcceptableError(available_content_types, accepted_content_types)
def parse_type(content_type):
"""Parse a content-type style header.
Args:
content_type: type to parse
Returns: Tuple with base name and dict with params
"""
params = {}
try:
(ct, rest) = content_type.split(";", 1)
except ValueError:
ct = content_type
else:
for param in rest.split(";"):
(key, val) = param.split("=")
params[key.strip()] = val.strip()
return (ct, params)
def parse_accept_header(accept):
"""Parse a HTTP Accept or Accept-Language header.
Args:
accept: Accept header contents
Returns: List of (content_type, params) tuples
"""
ret = []
for part in accept.split(","):
part = part.strip()
if not part:
continue
ret.append(parse_type(part))
return ret
class PreconditionFailure(Exception):
"""A precondition failed."""
def __init__(self, precondition, description) -> None:
self.precondition = precondition
self.description = description
class InsufficientStorage(Exception):
"""Insufficient storage."""
class ResourceLocked(Exception):
"""Resource locked."""
def etag_matches(condition, actual_etag):
"""Check if an etag matches an If-Matches condition.
Args:
condition: Condition (e.g. '*', '"foo"' or '"foo", "bar"'
actual_etag: ETag to compare to. None nonexistant
Returns: bool indicating whether condition matches
"""
if actual_etag is None and condition:
return False
for etag in condition.split(","):
if etag.strip(" ") == "*":
return True
if etag.strip(" ") == actual_etag:
return True
return False
class NeedsMultiStatus(Exception):
"""Raised when a response needs multi-status (e.g. for propstat)."""
def propstat_by_status(propstat):
"""Sort a list of propstatus objects by HTTP status.
Args:
propstat: List of PropStatus objects:
Returns: dictionary mapping HTTP status code to list of PropStatus objects
"""
bystatus = {}
for propstat in propstat:
(
bystatus.setdefault(
(propstat.statuscode, propstat.responsedescription), []
).append(propstat.prop)
)
return bystatus
def propstat_as_xml(propstat):
"""Format a list of propstats as XML elements.
Args:
propstat: List of PropStatus objects
Returns: Iterator over {DAV:}propstat elements
"""
bystatus = propstat_by_status(propstat)
for (status, rd), props in sorted(bystatus.items()):
propstat = ET.Element("{DAV:}propstat")
ET.SubElement(propstat, "{DAV:}status").text = "HTTP/1.1 " + status
if rd:
ET.SubElement(propstat, "{DAV:}responsedescription").text = rd
propresp = ET.SubElement(propstat, "{DAV:}prop")
for prop in props:
propresp.append(prop)
yield propstat
def path_from_environ(environ, name):
"""Return a path from an environ dict.
Will re-decode using a different encoding as necessary.
"""
# Re-decode using DEFAULT_ENCODING. PEP-3333 says that
# everything will be decoded using iso-8859-1.
# See also https://bugs.python.org/issue16679
path = environ[name].encode("iso-8859-1").decode(DEFAULT_ENCODING)
return posixpath.normpath(path)
class Status:
"""A DAV response that can be used in multi-status."""
def __init__(
self,
href,
status=None,
error=None,
responsedescription=None,
propstat=None,
) -> None:
self.href = str(href)
self.status = status
self.error = error
self.propstat = propstat
self.responsedescription = responsedescription
def __repr__(self) -> str:
return "<{}({!r}, {!r}, {!r})>".format(
type(self).__name__,
self.href,
self.status,
self.responsedescription,
)
def get_single_body(self, encoding):
if self.propstat and len(propstat_by_status(self.propstat)) > 1:
raise NeedsMultiStatus()
if self.error is not None:
raise NeedsMultiStatus()
if self.propstat:
[ret] = list(propstat_as_xml(self.propstat))
body = ET.tostringlist(ret, encoding)
return body, (f'text/xml; encoding="{encoding}"')
else:
body = (
[self.responsedescription.encode(encoding)]
if self.responsedescription
else []
)
return body, (f'text/plain; encoding="{encoding}"')
def aselement(self):
ret = ET.Element("{DAV:}response")
ret.append(create_href(self.href))
if self.propstat:
for ps in propstat_as_xml(self.propstat):
ret.append(ps)
elif self.status:
ET.SubElement(ret, "{DAV:}status").text = "HTTP/1.1 " + self.status
# Note the check for "is not None" here. Elements without children
# evaluate to False.
if self.error is not None:
ET.SubElement(ret, "{DAV:}error").append(self.error)
if self.responsedescription:
ET.SubElement(
ret, "{DAV:}responsedescription"
).text = self.responsedescription
return ret
def multistatus(req_fn):
async def wrapper(self, environ, *args, **kwargs):
responses = []
async for resp in req_fn(self, environ, *args, **kwargs):
responses.append(resp)
return _send_dav_responses(responses, DEFAULT_ENCODING)
return wrapper
class Resource:
"""A WebDAV resource."""
# A list of resource type names (e.g. '{DAV:}collection')
resource_types: list[str] = []
# TODO(jelmer): Be consistent in using get/set functions vs properties.
def set_resource_types(self, resource_types: list[str]) -> None:
"""Set the resource types."""
raise NotImplementedError(self.set_resource_types)
def get_displayname(self) -> str:
"""Get the resource display name."""
raise KeyError
def set_displayname(self, displayname: str) -> None:
"""Set the resource display name."""
raise NotImplementedError(self.set_displayname)
def get_creationdate(self) -> datetime:
"""Get the resource creation date.
Returns: A datetime object
"""
raise NotImplementedError(self.get_creationdate)
def get_supported_locks(self) -> list[tuple[str, str]]:
"""Get the list of supported locks.
This should return a list of (lockscope, locktype) tuples.
Known lockscopes are LOCK_SCOPE_EXCLUSIVE, LOCK_SCOPE_SHARED
Known locktypes are LOCK_TYPE_WRITE
"""
raise NotImplementedError(self.get_supported_locks)
def get_active_locks(self) -> list["ActiveLock"]:
"""Return the list of active locks.
Returns: A list of ActiveLock tuples
"""
raise NotImplementedError(self.get_active_locks)
def get_content_type(self) -> str:
"""Get the content type for the resource.
This is a mime type like text/plain
"""
raise NotImplementedError(self.get_content_type)
def get_owner(self) -> str:
"""Get an href identifying the owner of the resource.
Can be None if owner information is not known.
"""
raise NotImplementedError(self.get_owner)
async def get_etag(self) -> str:
"""Get the etag for this resource.
Contains the ETag header value (from Section 14.19 of [RFC2616]) as it
would be returned by a GET without accept headers.
"""
raise NotImplementedError(self.get_etag)
async def get_body(self) -> Iterable[bytes]:
"""Get resource contents.
Returns: Iterable over bytestrings.
"""
raise NotImplementedError(self.get_body)
async def render(
self, self_url: str, accepted_content_types: list[str],
accepted_languages: list[str]) -> tuple[
Iterable[bytes], int, str, str, Optional[str]]:
"""'Render' this resource in the specified content type.
The default implementation just checks that the
resource' content type is acceptable and if so returns
(get_body(), get_content_type(), get_content_language()).
Args:
accepted_content_types: List of accepted content types
accepted_languages: List of accepted languages
Raises:
NotAcceptableError: if there is no acceptable content type
Returns: Tuple with (content_body, content_length, etag, content_type,
content_language)
"""
# TODO(jelmer): Check content_language
content_types = pick_content_types(
accepted_content_types, [self.get_content_type()]
)
assert content_types == [self.get_content_type()]
body = await self.get_body()
try:
content_language = self.get_content_language()
except KeyError:
content_language = None
return (
body,
sum(map(len, body)),
await self.get_etag(),
self.get_content_type(),
content_language,
)
async def get_content_length(self) -> int:
"""Get content length.
Returns: Length of this objects content.
"""
return sum(map(len, await self.get_body()))
def get_content_language(self) -> str:
"""Get content language.
Returns: Language, as used in HTTP Accept-Language
"""
raise NotImplementedError(self.get_content_language)
async def set_body(
self, body: Iterable[bytes],
replace_etag: Optional[str] = None) -> str:
"""Set resource contents.
Args:
body: Iterable over bytestrings
Returns: New ETag
"""
raise NotImplementedError(self.set_body)
def set_comment(self, comment: str) -> None:
"""Set resource comment.
Args:
comment: New comment
"""
raise NotImplementedError(self.set_comment)
def get_comment(self) -> str:
"""Get resource comment.
Returns: comment
"""
raise NotImplementedError(self.get_comment)
def get_last_modified(self) -> datetime:
"""Get last modified time.
Returns: Last modified time
"""
raise NotImplementedError(self.get_last_modified)
def get_is_executable(self) -> bool:
"""Get executable bit.
Returns: Boolean indicating executability
"""
raise NotImplementedError(self.get_is_executable)
def set_is_executable(self, executable: bool) -> None:
"""Set executable bit.
Args:
executable: Boolean indicating executability
"""
raise NotImplementedError(self.set_is_executable)
def get_quota_used_bytes(self) -> int:
"""Return bytes consumed by this resource.
If unknown, this can raise KeyError.
Returns: an integer
"""
raise NotImplementedError(self.get_quota_used_bytes)
def get_quota_available_bytes(self) -> int:
"""Return quota available as bytes.
This can raise KeyError if there is infinite quota available.
"""
raise NotImplementedError(self.get_quota_available_bytes)
class Property:
"""Handler for listing, retrieving and updating DAV Properties."""
# Property name (e.g. '{DAV:}resourcetype')
name: str
# Whether to include this property in 'allprop' PROPFIND requests.
# https://tools.ietf.org/html/rfc4918, section 14.2
in_allprops: bool = True
# Resource type this property belongs to. If None, get_value()
# will always be called.
resource_type: Optional[Sequence[str]] = None
# Whether this property is live (i.e set by the server)
live: bool
def supported_on(self, resource: Resource) -> bool:
if self.resource_type is None:
return True
if isinstance(self.resource_type, tuple):
return any(
rs in resource.resource_types for rs in self.resource_type)
if self.resource_type in resource.resource_types:
return True
return False
async def is_set(
self, href: str, resource: Resource, environ: dict[str, str]
) -> bool:
"""Check if this property is set on a resource."""
if not self.supported_on(resource):
return False
try:
await self.get_value("/", resource, ET.Element(self.name), environ)
except KeyError:
return False
else:
return True
async def get_value(
self,
href: str,
resource: Resource,
el: ET.Element,
environ: dict[str, str],
) -> None:
"""Get property with specified name.
Args:
href: Resource href
resource: Resource for which to retrieve the property
el: Element to populate
environ: WSGI environment dict
Raises:
KeyError: if this property is not present
"""
raise KeyError(self.name)
async def set_value(
self, href: str, resource: Resource, el: ET.Element) -> None:
"""Set property.
Args:
href: Resource href
resource: Resource to modify
el: Element to get new value from (None to remove property)
Raises:
NotImplementedError: to indicate this property can not be set
(i.e. is protected)
"""
raise NotImplementedError(self.set_value)
class ResourceTypeProperty(Property):
"""Provides {DAV:}resourcetype."""
name = "{DAV:}resourcetype"
resource_type = None
live = True
async def get_value(self, href, resource, el, environ):
for rt in resource.resource_types:
ET.SubElement(el, rt)
async def set_value(self, href, resource, el):
resource.set_resource_types([e.tag for e in el])
class DisplayNameProperty(Property):
"""Provides {DAV:}displayname.
https://tools.ietf.org/html/rfc4918, section 5.2
"""
name = "{DAV:}displayname"
resource_type = None
async def get_value(self, href, resource, el, environ):
el.text = resource.get_displayname()
async def set_value(self, href, resource, el):
resource.set_displayname(el.text)
class GetETagProperty(Property):
"""Provides {DAV:}getetag.
https://tools.ietf.org/html/rfc4918, section 15.6
"""
name = "{DAV:}getetag"
resource_type = None
live = True
async def get_value(self, href, resource, el, environ):
el.text = await resource.get_etag()
ADD_MEMBER_FEATURE = "add-member"
class AddMemberProperty(Property):
"""Provides {DAV:}add-member.
https://tools.ietf.org/html/rfc5995, section 3.2.1
"""
name = "{DAV:}add-member"
resource_type = COLLECTION_RESOURCE_TYPE
live = True
async def get_value(self, href, resource, el, environ):
# Support POST against collection URL
el.append(create_href(".", href))
class GetLastModifiedProperty(Property):
"""Provides {DAV:}getlastmodified.
https://tools.ietf.org/html/rfc4918, section 15.7
"""
name = "{DAV:}getlastmodified"
resource_type = None
live = True
in_allprops = True
async def get_value(self, href, resource, el, environ):
# Use rfc1123 date (section 3.3.1 of RFC2616)
el.text = resource.get_last_modified().strftime(
"%a, %d %b %Y %H:%M:%S GMT")
def format_datetime(dt: datetime) -> bytes:
s = "%04d%02d%02dT%02d%02d%02dZ" % (
dt.year,
dt.month,
dt.day,
dt.hour,
dt.minute,
dt.second,
)
return s.encode("utf-8")
class CreationDateProperty(Property):
"""Provides {DAV:}creationdate.
https://tools.ietf.org/html/rfc4918, section 23.2
"""
name = "{DAV:}creationdate"
resource_type = None
live = True
async def get_value(self, href, resource, el, environ):
el.text = format_datetime(resource.get_creationdate())
class GetContentLanguageProperty(Property):
"""Provides {DAV:}getcontentlanguage.
https://tools.ietf.org/html/rfc4918, section 15.3
"""
name = "{DAV:}getcontentlanguage"
resource_type = None
async def get_value(self, href, resource, el, environ):
el.text = ", ".join(resource.get_content_language())
class GetContentLengthProperty(Property):
"""Provides {DAV:}getcontentlength.
https://tools.ietf.org/html/rfc4918, section 15.4
"""
name = "{DAV:}getcontentlength"
resource_type = None
async def get_value(self, href, resource, el, environ):
el.text = str(await resource.get_content_length())
class GetContentTypeProperty(Property):
"""Provides {DAV:}getcontenttype.
https://tools.ietf.org/html/rfc4918, section 13.5
"""
name = "{DAV:}getcontenttype"
resource_type = None
async def get_value(self, href, resource, el, environ):
el.text = resource.get_content_type()
class CurrentUserPrincipalProperty(Property):
"""Provides {DAV:}current-user-principal.
See https://tools.ietf.org/html/rfc5397
"""
name = "{DAV:}current-user-principal"
resource_type = None
in_allprops = False
live = True
def __init__(self, get_current_user_principal) -> None:
super().__init__()
self.get_current_user_principal = get_current_user_principal
async def get_value(self, href, resource, el, environ):
"""Get property with specified name.
Args:
name: A property name.
"""
current_user_principal = self.get_current_user_principal(environ)
if current_user_principal is None:
ET.SubElement(el, "{DAV:}unauthenticated")
else:
current_user_principal = ensure_trailing_slash(
current_user_principal.lstrip("/")
)
el.append(create_href(
current_user_principal, environ["SCRIPT_NAME"]))
class PrincipalURLProperty(Property):
name = "{DAV:}principal-URL"
resource_type = "{DAV:}principal"
in_allprops = True
live = True
async def get_value(self, href, resource, el, environ):
"""Get property with specified name.
Args:
name: A property name.
"""
el.append(create_href(
ensure_trailing_slash(resource.get_principal_url()), href))
class SupportedReportSetProperty(Property):
name = "{DAV:}supported-report-set"
resource_type = "{DAV:}collection"
in_allprops = False
live = True
def __init__(self, reporters) -> None:
self._reporters = reporters
async def get_value(self, href, resource, el, environ):
for name, reporter in self._reporters.items():
if reporter.supported_on(resource):
bel = ET.SubElement(el, "{DAV:}supported-report")
rel = ET.SubElement(bel, "{DAV:}report")
ET.SubElement(rel, name)
class GetCTagProperty(Property):
"""getctag property."""
name: str
resource_type = COLLECTION_RESOURCE_TYPE
in_allprops = False
live = True
async def get_value(self, href, resource, el, environ):
el.text = resource.get_ctag()
class DAVGetCTagProperty(GetCTagProperty):
"""getctag property."""
name = "{DAV:}getctag"
class AppleGetCTagProperty(GetCTagProperty):
"""getctag property."""
name = "{http://calendarserver.org/ns/}getctag"
class RefreshRateProperty(Property):
"""refreshrate property.
(no public documentation, but contains an ical-style frequency indicator)
"""
name = "{http://calendarserver.org/ns/}refreshrate"
resource_type = COLLECTION_RESOURCE_TYPE
in_allprops = False
async def get_value(self, href, resource, el, environ):
el.text = resource.get_refreshrate()
async def set_value(self, href, resource, el):
resource.set_refreshrate(el.text)
LOCK_SCOPE_EXCLUSIVE = "{DAV:}exclusive"
LOCK_SCOPE_SHARED = "{DAV:}shared"
LOCK_TYPE_WRITE = "{DAV:}write"
ActiveLock = collections.namedtuple(
"ActiveLock",
[
"lockscope",
"locktype",
"depth",
"owner",
"timeout",
"locktoken",
"lockroot",
],
)
class Collection(Resource):
"""Resource for a WebDAV Collection."""
resource_types = Resource.resource_types + [COLLECTION_RESOURCE_TYPE]
def members(self) -> Iterable[tuple[str, Resource]]:
"""List all members.
Returns: List of (name, Resource) tuples
"""
raise NotImplementedError(self.members)
def get_member(self, name: str) -> Resource:
"""Retrieve a member by name.
Args;
name: Name of member to retrieve
Returns:
A Resource
"""
raise NotImplementedError(self.get_member)
def delete_member(self, name: str, etag: Optional[str] = None) -> None:
"""Delete a member with a specific name.
Args:
name: Member name
etag: Optional required etag
Raises:
KeyError: when the item doesn't exist
"""
raise NotImplementedError(self.delete_member)
async def create_member(
self, name: str, contents: Iterable[bytes],
content_type: str) -> tuple[str, str]:
"""Create a new member with specified name and contents.
Args:
name: Member name (can be None)
contents: Chunked contents
etag: Optional required etag
Returns: (name, etag) for the new member
"""
raise NotImplementedError(self.create_member)
def get_sync_token(self) -> str:
"""Get sync-token for the current state of this collection."""
raise NotImplementedError(self.get_sync_token)
def iter_differences_since(
self, old_token: str, new_token: str) -> Iterator[
tuple[str, Optional[Resource], Optional[Resource]]]:
"""Iterate over differences in this collection.
Should return an iterator over (name, old resource, new resource)
tuples. If one of the two didn't exist previously or now, they should
be None.
If old_token is None, this should return full contents of the
collection.
May raise NotImplementedError if iterating differences is not
supported.
"""
raise NotImplementedError(self.iter_differences_since)
def get_ctag(self) -> str:
raise NotImplementedError(self.get_ctag)
def get_headervalue(self) -> str:
raise NotImplementedError(self.get_headervalue)
def destroy(self) -> None:
"""Destroy this collection itself."""
raise NotImplementedError(self.destroy)
def set_refreshrate(self, value: Optional[str]) -> None:
"""Set the recommended refresh rate for this collection.
Args:
value: Refresh rate (None to remove)
"""
raise NotImplementedError(self.set_refreshrate)
def get_refreshrate(self) -> str:
"""Get the recommended refresh rate.
Returns: Recommended refresh rate
:raise KeyError: if there is no refresh rate set
"""
raise NotImplementedError(self.get_refreshrate)
class Principal(Resource):
"""Resource for a DAV Principal."""
resource_Types = Resource.resource_types + [PRINCIPAL_RESOURCE_TYPE]
def get_principal_url(self) -> str:
"""Return the principal URL for this principal.
Returns: A URL identifying this principal.
"""
raise NotImplementedError(self.get_principal_url)
def get_infit_settings(self) -> str:
"""Return inf-it settings string."""
raise NotImplementedError(self.get_infit_settings)
def set_infit_settings(self, settings: Optional[str]) -> None:
"""Set inf-it settings string."""
raise NotImplementedError(self.get_infit_settings)
def get_group_membership(self) -> list[str]:
"""Get group membership URLs."""
raise NotImplementedError(self.get_group_membership)
def get_calendar_proxy_read_for(self) -> list[str]:
"""List principals for which this one is a read proxy.
Returns: List of principal hrefs
"""
raise NotImplementedError(self.get_calendar_proxy_read_for)
def get_calendar_proxy_write_for(self) -> list[str]:
"""List principals for which this one is a write proxy.
Returns: List of principal hrefs
"""
raise NotImplementedError(self.get_calendar_proxy_write_for)
def get_schedule_inbox_url(self) -> str:
raise NotImplementedError(self.get_schedule_inbox_url)
def get_schedule_outbox_url(self) -> str:
raise NotImplementedError(self.get_schedule_outbox_url)
async def get_property_from_name(
href: str, resource: Resource, properties, name: str, environ
):
"""Get a single property on a resource.
Args:
href: Resource href
resource: Resource object
properties: Dictionary of properties
environ: WSGI environ dict
name: name of property to resolve
Returns: PropStatus items
"""
return await get_property_from_element(
href, resource, properties, environ, ET.Element(name)
)
async def get_property_from_element(
href: str,
resource: Resource,
properties: dict[str, Property],
environ,
requested: ET.Element,
) -> PropStatus:
"""Get a single property on a resource.
Args:
href: Resource href
resource: Resource object
properties: Dictionary of properties
environ: WSGI environ dict
requested: Requested element
Returns: PropStatus items
"""
responsedescription = None
ret = ET.Element(requested.tag)
try:
prop = properties[requested.tag]
except KeyError:
statuscode = "404 Not Found"
logging.warning(
"Client requested unknown property %s on %s (%r)",
requested.tag,
href,
resource.resource_types,
)
else:
try:
if not prop.supported_on(resource):
raise KeyError
if hasattr(prop, 'get_value_ext'):
await prop.get_value_ext( # type: ignore
href, resource, ret, environ, requested)
else:
await prop.get_value(href, resource, ret, environ)
except KeyError:
statuscode = "404 Not Found"
except NotImplementedError:
logging.exception(
"Not implemented while getting %s for %r",
requested.tag,
resource,
)
statuscode = "501 Not Implemented"
else:
statuscode = "200 OK"
return PropStatus(statuscode, responsedescription, ret)
async def get_properties(
href: str,
resource: Resource,
properties: dict[str, Property],
environ,
requested: ET.Element,
) -> AsyncIterable[PropStatus]:
"""Get a set of properties.
Args:
href: Resource Href
resource: Resource object
properties: Dictionary of properties
requested: XML {DAV:}prop element with properties to look up
environ: WSGI environ dict
Returns: Iterator over PropStatus items
"""
for propreq in list(requested):
yield await get_property_from_element(
href, resource, properties, environ, propreq
)
async def get_property_names(
href: str,
resource: Resource,
properties: dict[str, Property],
environ,
requested: ET.Element,
) -> AsyncIterable[PropStatus]:
"""Get a set of property names.
Args:
href: Resource Href
resource: Resource object
properties: Dictionary of properties
environ: WSGI environ dict
requested: XML {DAV:}prop element with properties to look up
Returns: Iterator over PropStatus items
"""
for name, prop in properties.items():
if await prop.is_set(href, resource, environ):
yield PropStatus("200 OK", None, ET.Element(name))
async def get_all_properties(
href: str, resource: Resource, properties: dict[str, Property], environ
) -> AsyncIterable[PropStatus]:
"""Get all properties.
Args:
href: Resource Href
resource: Resource object
properties: Dictionary of properties
requested: XML {DAV:}prop element with properties to look up
environ: WSGI environ dict
Returns: Iterator over PropStatus items
"""
for name in properties:
ps = await get_property_from_name(
href, resource, properties, name, environ)
if ps.statuscode == "200 OK":
yield ps
def ensure_trailing_slash(href: str) -> str:
"""Ensure that a href has a trailing slash.
Useful for collection hrefs, e.g. when used with urljoin.
Args:
href: href to possibly add slash to
Returns: href with trailing slash
"""
if href.endswith("/"):
return href
return href + "/"
async def traverse_resource(
base_resource: Resource,
base_href: str,
depth: str,
members: Optional[
Callable[[Collection], Iterable[tuple[str, Resource]]]] = None,
) -> AsyncIterable[tuple[str, Resource]]:
"""Traverse a resource.
Args:
base_resource: Resource to traverse from
base_href: href for base resource
depth: Depth ("0", "1", "infinity")
members: Function to use to get members of each
collection.
Returns: Iterator over (URL, Resource) tuples
"""
if members is None:
def members_fn(c):
return c.members()
else:
members_fn = members
todo = collections.deque([(base_href, base_resource, depth)])
while todo:
(href, resource, depth) = todo.popleft()
if COLLECTION_RESOURCE_TYPE in resource.resource_types:
# caldavzap/carddavmate require this
# https://tools.ietf.org/html/rfc4918#section-5.2
# mentions that a trailing slash *SHOULD* be added for
# collections.
href = ensure_trailing_slash(href)
yield (href, resource)
if depth == "0":
continue
elif depth == "1":
nextdepth = "0"
elif depth == "infinity":
nextdepth = "infinity"
else:
raise AssertionError(f"invalid depth {depth!r}")
if COLLECTION_RESOURCE_TYPE in resource.resource_types:
for (child_name, child_resource) in members_fn(resource):
child_href = urllib.parse.urljoin(href, child_name)
todo.append((child_href, child_resource, nextdepth))
class Reporter:
"""Implementation for DAV REPORT requests."""
name: str
resource_type: Optional[Union[str, tuple]] = None
def supported_on(self, resource: Resource) -> bool:
"""Check if this reporter is available for the specified resource.
Args:
resource: Resource to check for
Returns: boolean indicating whether this reporter is available
"""
if self.resource_type is None:
return True
if isinstance(self.resource_type, tuple):
return any(
rs in resource.resource_types for rs in self.resource_type)
return self.resource_type in resource.resource_types
async def report(
self,
environ: dict[str, str],
request_body: ET.Element,
resources_by_hrefs:
Callable[[Iterable[str]], Iterable[tuple[str, Resource]]],
properties: dict[str, Property],
href: str,
resource: Resource,
depth: str,
strict: bool
) -> Status:
"""Send a report.
Args:
environ: wsgi environ
request_body: XML Element for request body
resources_by_hrefs: Function for retrieving resource by HREF
properties: Dictionary mapping names to DAVProperty instances
href: Base resource href
resource: Resource to start from
depth: Depth ("0", "1", ...)
strict:
Returns: a response
"""
raise NotImplementedError(self.report)
def create_href(href: str, base_href: Optional[str] = None) -> ET.Element:
parsed_url = urllib.parse.urlparse(href)
if "//" in parsed_url.path:
logging.warning("invalidly formatted href: %s", href)
et = ET.Element("{DAV:}href")
if base_href is not None:
href = urllib.parse.urljoin(ensure_trailing_slash(base_href), href)
et.text = urllib.parse.quote(href)
return et
def read_href_element(et: ET.Element) -> Optional[str]:
if et.text is None:
return None
el = urllib.parse.unquote(et.text)
parsed_url = urllib.parse.urlsplit(el)
# TODO(jelmer): Check that the hostname matches the local hostname?
return parsed_url.path
class ExpandPropertyReporter(Reporter):
"""A expand-property reporter.
See https://tools.ietf.org/html/rfc3253, section 3.8
"""
name = "{DAV:}expand-property"
async def _populate(
self,
prop_list: ET.Element,
resources_by_hrefs:
Callable[[Iterable[str]], list[tuple[str, Resource]]],
properties: dict[str, Property],
href: str,
resource: Resource,
environ,
strict
) -> AsyncIterable[Status]:
"""Expand properties for a resource.
Args:
prop_list: DAV:property elements to retrieve and expand
resources_by_hrefs: Resolve resource by HREF
properties: Available properties
href: href for current resource
resource: current resource
environ: WSGI environ dict
Returns: Status object
"""
ret = []
for prop in prop_list:
prop_name = prop.get("name")
if prop_name is None:
nonfatal_bad_request(
f"Tag {prop.tag} without name attribute",
strict)
continue
# FIXME: Resolve prop_name on resource
propstat = await get_property_from_name(
href, resource, properties, prop_name, environ
)
new_prop = ET.Element(propstat.prop.tag)
child_hrefs = filter(
None,
[
read_href_element(prop_child)
for prop_child in propstat.prop
if prop_child.tag == "{DAV:}href"
],
)
child_resources = resources_by_hrefs(child_hrefs)
for prop_child in propstat.prop:
if prop_child.tag != "{DAV:}href":
new_prop.append(prop_child)
else:
child_href = read_href_element(prop_child)
if child_href is None:
nonfatal_bad_request(
f"Tag {prop_child.tag} without valid href",
strict)
continue
child_resource = dict(child_resources).get(child_href)
if child_resource is None:
# FIXME: What to do if the referenced href is invalid?
# For now, let's just keep the unresolved href around
new_prop.append(prop_child)
else:
async for response in self._populate(
prop,
resources_by_hrefs,
properties,
child_href,
child_resource,
environ,
strict
):
new_prop.append(response.aselement())
propstat = PropStatus(
propstat.statuscode,
propstat.responsedescription,
prop=new_prop,
)
ret.append(propstat)
yield Status(href, "200 OK", propstat=ret)
@multistatus
async def report(
self,
environ,
request_body,
resources_by_hrefs,
properties,
href,
resource,
depth,
strict
):
async for resp in self._populate(
request_body,
resources_by_hrefs,
properties,
href,
resource,
environ,
strict
):
yield resp
class SupportedLockProperty(Property):
"""supportedlock property.
See rfc4918, section 15.10.
"""
name = "{DAV:}supportedlock"
resource_type = None
live = True
async def get_value(self, href, resource, el, environ):
for (lockscope, locktype) in resource.get_supported_locks():
entry = ET.SubElement(el, "{DAV:}lockentry")
scope_el = ET.SubElement(entry, "{DAV:}lockscope")
ET.SubElement(scope_el, lockscope)
type_el = ET.SubElement(entry, "{DAV:}locktype")
ET.SubElement(type_el, locktype)
class LockDiscoveryProperty(Property):
"""lockdiscovery property.
See rfc4918, section 15.8
"""
name = "{DAV:}lockdiscovery"
resource_type = None
live = True
async def get_value(self, href, resource, el, environ):
for activelock in resource.get_active_locks():
entry = ET.SubElement(el, "{DAV:}activelock")
type_el = ET.SubElement(entry, "{DAV:}locktype")
ET.SubElement(type_el, activelock.locktype)
scope_el = ET.SubElement(entry, "{DAV:}lockscope")
ET.SubElement(scope_el, activelock.lockscope)
ET.SubElement(entry, "{DAV:}depth").text = str(activelock.depth)
if activelock.owner:
ET.SubElement(entry, "{DAV:}owner").text = activelock.owner
if activelock.timeout:
ET.SubElement(entry, "{DAV:}timeout").text = activelock.timeout
if activelock.locktoken:
locktoken_el = ET.SubElement(entry, "{DAV:}locktoken")
locktoken_el.append(create_href(activelock.locktoken))
if activelock.lockroot:
lockroot_el = ET.SubElement(entry, "{DAV:}lockroot")
lockroot_el.append(create_href(activelock.lockroot))
class CommentProperty(Property):
"""comment property.
See RFC3253, section 3.1.1
"""
name = "{DAV:}comment"
live = False
in_allprops = False
async def get_value(self, href, resource, el, environ):
el.text = resource.get_comment()
async def set_value(self, href, resource, el):
resource.set_comment(el.text)
class Backend:
"""WebDAV backend."""
def create_collection(self, relpath):
"""Create a collection with the specified relpath.
Args:
relpath: Collection path
"""
raise NotImplementedError(self.create_collection)
def get_resource(self, relpath):
raise NotImplementedError(self.get_resource)
def _get_resources_by_hrefs(backend, environ, hrefs):
"""Retrieve multiple resources by href.
Args:
backend: backend from which to retrieve resources
environ: Environment dictionary
hrefs: List of hrefs to resolve
Returns: iterator over (href, resource) tuples
"""
script_name = environ["SCRIPT_NAME"]
# TODO(jelmer): Bulk query hrefs in a more efficient manner
for href in hrefs:
if not href.startswith(script_name):
resource = None
else:
path = href[len(script_name):]
if not path.startswith("/"):
path = "/" + path
resource = backend.get_resource(path)
yield (href, resource)
def _send_xml_response(status, et, out_encoding):
body_type = f'text/xml; charset="{out_encoding}"'
if os.environ.get("XANDIKOS_DUMP_DAV_XML"):
print("OUT: " + ET.tostring(et).decode("utf-8"))
body = ET.tostringlist(et, encoding=out_encoding)
return Response(
status=status,
body=body,
headers={
"Content-Type": body_type,
"Content-Length": str(sum(map(len, body))),
},
)
def _send_dav_responses(responses, out_encoding):
if isinstance(responses, Status):
try:
(body, body_type) = responses.get_single_body(out_encoding)
except NeedsMultiStatus:
responses = [responses]
else:
return Response(
status=responses.status,
headers={
"Content-Type": body_type,
"Content-Length": str(sum(map(len, body))),
},
body=body,
)
ret = ET.Element("{DAV:}multistatus")
for response in responses:
ret.append(response.aselement())
return _send_xml_response("207 Multi-Status", ret, out_encoding)
def _send_simple_dav_error(request, statuscode, error, description):
status = Status(
request.url, statuscode, error=error, responsedescription=description
)
return _send_dav_responses(status, DEFAULT_ENCODING)
def _send_not_found(request):
body = [b"Path " + request.path.encode(DEFAULT_ENCODING) + b" not found."]
return Response(body=body, status=404, reason="Not Found")
def _send_method_not_allowed(allowed_methods):
return Response(
status=405,
reason="Method Not Allowed",
headers={"Allow": ", ".join(allowed_methods)},
)
async def apply_modify_prop(el, href, resource, properties):
"""Apply property set/remove operations.
Returns:
el: set element to apply.
href: Resource href
resource: Resource to apply property modifications on
properties: Known properties
Returns: PropStatus objects
"""
if el.tag not in ("{DAV:}set", "{DAV:}remove"):
# callers should check tag
raise AssertionError
try:
[requested] = el
except IndexError as exc:
raise BadRequestError(
"Received more than one element in {DAV:}set element.") from exc
if requested.tag != "{DAV:}prop":
raise BadRequestError("Expected prop tag, got " + requested.tag)
for propel in requested:
try:
handler = properties[propel.tag]
except KeyError:
logging.warning(
"client attempted to modify unknown property %r on %r",
propel.tag,
href,
)
yield PropStatus("404 Not Found", None, ET.Element(propel.tag))
else:
if el.tag == "{DAV:}remove":
newval = None
elif el.tag == "{DAV:}set":
newval = propel
else:
raise AssertionError
if not handler.supported_on(resource):
statuscode = "404 Not Found"
else:
try:
await handler.set_value(href, resource, newval)
except NotImplementedError:
# TODO(jelmer): Signal
# {DAV:}cannot-modify-protected-property error
statuscode = "409 Conflict"
else:
statuscode = "200 OK"
yield PropStatus(statuscode, None, ET.Element(propel.tag))
async def _readBody(request):
return [await request.content.read()]
async def _readXmlBody(
request, expected_tag: Optional[str] = None, strict: bool = True
):
content_type = request.content_type
base_content_type, params = parse_type(content_type)
if strict and base_content_type not in ("text/xml", "application/xml"):
raise UnsupportedMediaType(content_type)
body = b"".join(await _readBody(request))
if os.environ.get("XANDIKOS_DUMP_DAV_XML"):
print("IN: " + body.decode("utf-8"))
try:
et = xmlparse(body)
except ET.ParseError as exc:
raise BadRequestError("Unable to parse body.") from exc
if expected_tag is not None and et.tag != expected_tag:
raise BadRequestError(
f"Expected {expected_tag} tag, got {et.tag}")
return et
class Method:
@property
def name(self):
return type(self).__name__.upper()[:-6]
async def handle(self, request, environ, app):
raise NotImplementedError(self.handle)
def allow(self, request):
"""Is this method allowed considering the specified request?"""
return True
class DeleteMethod(Method):
async def handle(self, request, environ, app):
unused_href, path, r = app._get_resource_from_environ(request, environ)
if r is None:
return _send_not_found(request)
container_path, item_name = posixpath.split(path.rstrip("/"))
pr = app.backend.get_resource(container_path)
if pr is None:
return _send_not_found(request)
current_etag = await r.get_etag()
if_match = request.headers.get("If-Match", None)
if if_match is not None and not etag_matches(if_match, current_etag):
return Response(status=412, reason="Precondition Failed")
pr.delete_member(item_name, current_etag)
return Response(status=204, reason="No Content")
class PostMethod(Method):
async def handle(self, request, environ, app):
# see RFC5995
new_contents = await _readBody(request)
unused_href, path, r = app._get_resource_from_environ(request, environ)
if r is None:
return _send_not_found(request)
if COLLECTION_RESOURCE_TYPE not in r.resource_types:
return _send_method_not_allowed(app._get_allowed_methods(request))
content_type, params = parse_type(request.content_type)
try:
(name, etag) = await r.create_member(
None, new_contents, content_type)
except PreconditionFailure as e:
return _send_simple_dav_error(
request,
"412 Precondition Failed",
error=ET.Element(e.precondition),
description=e.description,
)
except InsufficientStorage:
return Response(status=507, reason="Insufficient Storage")
except ResourceLocked:
return Response(status=423, reason="Resource Locked")
href = environ["SCRIPT_NAME"] + urllib.parse.urljoin(
ensure_trailing_slash(path), name
)
return Response(headers={"Location": href})
class PutMethod(Method):
async def handle(self, request, environ, app):
new_contents = await _readBody(request)
unused_href, path, r = app._get_resource_from_environ(request, environ)
if r is not None:
current_etag = await r.get_etag()
else:
current_etag = None
if_match = request.headers.get("If-Match", None)
if if_match is not None and not etag_matches(if_match, current_etag):
return Response(status="412 Precondition Failed")
if_none_match = request.headers.get("If-None-Match", None)
if if_none_match and etag_matches(if_none_match, current_etag):
return Response(status="412 Precondition Failed")
if r is not None:
# Item already exists; update it
try:
new_etag = await r.set_body(new_contents, current_etag)
except ResourceLocked:
return Response(status=423, reason="Resource Locked")
except PreconditionFailure as e:
return _send_simple_dav_error(
request,
"412 Precondition Failed",
error=ET.Element(e.precondition),
description=e.description,
)
except NotImplementedError:
return _send_method_not_allowed(
app._get_allowed_methods(request))
else:
return Response(
status="204 No Content", headers=[("ETag", new_etag)])
content_type = request.content_type
container_path, name = posixpath.split(path)
r = app.backend.get_resource(container_path)
if r is None:
return _send_not_found(request)
if COLLECTION_RESOURCE_TYPE not in r.resource_types:
return _send_method_not_allowed(app._get_allowed_methods(request))
try:
(new_name, new_etag) = await r.create_member(
name, new_contents, content_type)
except PreconditionFailure as e:
return _send_simple_dav_error(
request,
"412 Precondition Failed",
error=ET.Element(e.precondition),
description=e.description,
)
except InsufficientStorage:
return Response(status=507, reason="Insufficient Storage")
except ResourceLocked:
return Response(status=423, reason="Resource Locked")
return Response(
status=201, reason="Created", headers=[("ETag", new_etag)])
class ReportMethod(Method):
async def handle(self, request, environ, app):
# See https://tools.ietf.org/html/rfc3253, section 3.6
base_href, unused_path, r = app._get_resource_from_environ(
request, environ)
if r is None:
return _send_not_found(request)
depth = request.headers.get("Depth", "0")
et = await _readXmlBody(request, None, strict=app.strict)
try:
reporter = app.reporters[et.tag]
except KeyError:
logging.warning("Client requested unknown REPORT %s", et.tag)
return _send_simple_dav_error(
request,
"403 Forbidden",
error=ET.Element("{DAV:}supported-report"),
description=f"Unknown report {et.tag}.",
)
if not reporter.supported_on(r):
return _send_simple_dav_error(
request,
"403 Forbidden",
error=ET.Element("{DAV:}supported-report"),
description=f"Report {et.tag} not supported on resource.",
)
try:
return await reporter.report(
environ,
et,
functools.partial(
_get_resources_by_hrefs, app.backend, environ),
app.properties,
base_href,
r,
depth,
app.strict
)
except PreconditionFailure as e:
return _send_simple_dav_error(
request,
"412 Precondition Failed",
error=ET.Element(e.precondition),
description=e.description,
)
class PropfindMethod(Method):
@multistatus
async def handle(self, request, environ, app):
base_href, unused_path, base_resource = app._get_resource_from_environ(
request, environ
)
if base_resource is None:
yield Status(request.url, "404 Not Found")
return
# Default depth is infinity, per RFC2518
depth = request.headers.get("Depth", "infinity")
if not request.can_read_body:
requested = None
else:
et = await _readXmlBody(
request, "{DAV:}propfind", strict=app.strict)
try:
[requested] = et
except ValueError as exc:
raise BadRequestError(
"Received more than one element in propfind.") from exc
async for href, resource in traverse_resource(
base_resource, base_href, depth):
propstat = []
if requested is None or requested.tag == "{DAV:}allprop":
propstat = get_all_properties(
href, resource, app.properties, environ)
elif requested.tag == "{DAV:}prop":
propstat = get_properties(
href, resource, app.properties, environ, requested
)
elif requested.tag == "{DAV:}propname":
propstat = get_property_names(
href, resource, app.properties, environ, requested
)
else:
nonfatal_bad_request(
"Expected prop/allprop/propname tag, got " + requested.tag,
app.strict
)
continue
yield Status(href, "200 OK", propstat=[s async for s in propstat])
# By my reading of the WebDAV RFC, it should be legal to return
# '200 OK' here if Depth=0, but the RFC is not super clear and
# some clients don't seem to like it and prefer a 207 instead.
class ProppatchMethod(Method):
@multistatus
async def handle(self, request, environ, app):
href, unused_path, resource = app._get_resource_from_environ(
request, environ)
if resource is None:
yield Status(request.url, "404 Not Found")
return
et = await _readXmlBody(
request, "{DAV:}propertyupdate", strict=app.strict)
propstat = []
for el in et:
if el.tag not in ("{DAV:}set", "{DAV:}remove"):
nonfatal_bad_request(
f"Unknown tag {el.tag} in propertyupdate", app.strict)
continue
propstat.extend(
[
ps
async for ps in apply_modify_prop(
el, href, resource, app.properties
)
]
)
yield Status(request.url, propstat=propstat)
class MkcolMethod(Method):
async def handle(self, request, environ, app):
content_type = request.content_type
base_content_type, params = parse_type(content_type)
if base_content_type not in (
"text/plain",
"text/xml",
"application/xml",
None,
"application/octet-stream",
):
raise UnsupportedMediaType(base_content_type)
href, path, resource = app._get_resource_from_environ(request, environ)
if resource is not None:
return _send_method_not_allowed(app._get_allowed_methods(request))
try:
resource = app.backend.create_collection(path)
except FileNotFoundError:
return Response(status=409, reason="Conflict")
if base_content_type in ("text/xml", "application/xml"):
# Extended MKCOL (RFC5689)
et = await _readXmlBody(request, "{DAV:}mkcol", strict=app.strict)
propstat = []
for el in et:
if el.tag != "{DAV:}set":
nonfatal_bad_request(
f"Unknown tag {el.tag} in mkcol", app.strict)
continue
propstat.extend(
[
ps
async for ps in apply_modify_prop(
el, href, resource, app.properties
)
]
)
ret = ET.Element("{DAV:}mkcol-response")
for propstat_el in propstat_as_xml(propstat):
ret.append(propstat_el)
return _send_xml_response("201 Created", ret, DEFAULT_ENCODING)
else:
return Response(status=201, reason="Created")
class OptionsMethod(Method):
async def handle(self, request, environ, app):
headers = []
if request.raw_path != "*":
unused_href, unused_path, r = app._get_resource_from_environ(
request, environ
)
if r is None:
return _send_not_found(request)
dav_features = app._get_dav_features(r)
headers.append(("DAV", ", ".join(dav_features)))
allowed_methods = app._get_allowed_methods(request)
headers.append(("Allow", ", ".join(allowed_methods)))
# RFC7231 requires that if there is no response body,
# Content-Length: 0 must be sent. This implies that there is
# content (albeit empty), and thus a 204 is not a valid reply.
# Thunderbird also fails if a 204 is sent rather than a 200.
return Response(
status=200,
reason="OK",
headers=headers + [("Content-Length", "0")],
)
class HeadMethod(Method):
async def handle(self, request, environ, app):
return await _do_get(request, environ, app, send_body=False)
class GetMethod(Method):
async def handle(self, request, environ, app):
return await _do_get(request, environ, app, send_body=True)
async def _do_get(request, environ, app, send_body):
unused_href, unused_path, r = app._get_resource_from_environ(
request, environ)
if r is None:
return _send_not_found(request)
accept_content_types = parse_accept_header(
request.headers.get("Accept", "*/*"))
accept_content_languages = parse_accept_header(
request.headers.get("Accept-Languages", "*")
)
(
body,
content_length,
current_etag,
content_type,
content_languages,
) = await r.render(
request.path, accept_content_types, accept_content_languages)
if_none_match = request.headers.get("If-None-Match", None)
if (
if_none_match
and current_etag is not None
and etag_matches(if_none_match, current_etag)
):
return Response(status="304 Not Modified")
headers = [
("Content-Length", str(content_length)),
]
if current_etag is not None:
headers.append(("ETag", current_etag))
if content_type is not None:
headers.append(("Content-Type", content_type))
try:
last_modified = r.get_last_modified()
except KeyError:
pass
else:
headers.append(("Last-Modified", last_modified))
if content_languages is not None:
headers.append(("Content-Language", ", ".join(content_languages)))
if send_body:
return Response(body=body, status=200, reason="OK", headers=headers)
else:
return Response(status=200, reason="OK", headers=headers)
class WSGIRequest:
"""Request object for wsgi requests (with environ)."""
def __init__(self, environ) -> None:
self._environ = environ
self.method = environ["REQUEST_METHOD"]
self.raw_path = environ["SCRIPT_NAME"] + environ["PATH_INFO"]
self.path = environ["SCRIPT_NAME"] + path_from_environ(
environ, "PATH_INFO")
self.content_type = environ.get(
"CONTENT_TYPE", "application/octet-stream")
try:
self.content_length: Optional[int] = int(environ["CONTENT_LENGTH"])
except (KeyError, ValueError):
self.content_length = None
from multidict import CIMultiDict
self.headers = CIMultiDict(
[(k[5:], v) for k, v in environ.items() if k.startswith("HTTP_")]
)
self.url = request_uri(environ)
class StreamWrapper:
def __init__(self, stream) -> None:
self._stream = stream
async def read(self, size=None):
if size is None:
return self._stream.read()
else:
return self._stream.read(size)
self.content = StreamWrapper(self._environ["wsgi.input"])
self.match_info = {"path_info": environ["PATH_INFO"]}
@property
def can_read_body(self):
return (
"CONTENT_TYPE" in self._environ
or self._environ.get("CONTENT_LENGTH") != "0"
)
async def read(self):
return self._environ["wsgi.input"].read()
class WebDAVApp:
"""A wsgi App that provides a WebDAV server.
A concrete implementation should provide an implementation of the
lookup_resource function that can map a path to a Resource object
(returning None for nonexistant objects).
"""
def __init__(self, backend, strict=True) -> None:
self.backend = backend
self.properties: Dict[str, Type[Property]] = {}
self.reporters: Dict[str, Type[Reporter]] = {}
self.methods: Dict[str, Type[Method]] = {}
self.strict = strict
self.register_methods(
[
DeleteMethod(),
PostMethod(),
PutMethod(),
ReportMethod(),
PropfindMethod(),
ProppatchMethod(),
MkcolMethod(),
OptionsMethod(),
GetMethod(),
HeadMethod(),
]
)
def _get_resource_from_environ(self, request, environ):
path_info = request.match_info["path_info"]
if not path_info.startswith("/"):
path_info = "/" + path_info
r = self.backend.get_resource(path_info)
return (request.path, path_info, r)
def register_properties(self, properties):
for p in properties:
self.properties[p.name] = p
def register_reporters(self, reporters):
for r in reporters:
self.reporters[r.name] = r
def register_methods(self, methods):
for m in methods:
self.methods[m.name] = m
def _get_dav_features(self, resource):
# TODO(jelmer): Support access-control
return [
"1",
"2",
"3",
"calendar-access",
"calendar-auto-scheduling",
"addressbook",
"extended-mkcol",
"add-member",
"sync-collection",
"quota",
]
def _get_allowed_methods(self, request):
"""List of supported methods on this endpoint."""
ret = []
for name in sorted(self.methods.keys()):
if self.methods[name].allow(request):
ret.append(name)
return ret
async def _handle_request(self, request, environ):
try:
do = self.methods[request.method]
except KeyError:
return _send_method_not_allowed(self._get_allowed_methods(request))
try:
return await do.handle(request, environ, self)
except BadRequestError as e:
logging.debug('Bad request: %s', e.message)
return Response(
status="400 Bad Request",
body=[e.message.encode(DEFAULT_ENCODING)],
)
except NotAcceptableError as e:
return Response(
status="406 Not Acceptable",
body=[str(e).encode(DEFAULT_ENCODING)],
)
except UnsupportedMediaType as e:
return Response(
status="415 Unsupported Media Type",
body=[
f"Unsupported media type {e.content_type!r}".encode(
DEFAULT_ENCODING
)
],
)
except UnauthorizedError:
return Response(
status="401 Unauthorized",
body=[("Please login.".encode(DEFAULT_ENCODING))],
)
def handle_wsgi_request(self, environ, start_response):
if "SCRIPT_NAME" not in environ:
logging.debug('SCRIPT_NAME not set; assuming "".')
environ["SCRIPT_NAME"] = ""
request = WSGIRequest(environ)
environ = {"SCRIPT_NAME": environ["SCRIPT_NAME"]}
try:
loop = asyncio.get_event_loop()
except RuntimeError:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
response = loop.run_until_complete(
self._handle_request(request, environ))
return response.for_wsgi(start_response)
async def aiohttp_handler(self, request, route_prefix="/"):
environ = {"SCRIPT_NAME": route_prefix}
response = await self._handle_request(request, environ)
return response.for_aiohttp()
# Backwards compatibility
__call__ = handle_wsgi_request
xandikos_0.2.10.orig/xandikos/wsgi.py 0000644 0000000 0000000 00000003447 14476041427 014545 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""WSGI wrapper for xandikos."""
import logging
import os
from .web import XandikosApp, XandikosBackend
backend = XandikosBackend(path=os.environ["XANDIKOSPATH"])
if not os.path.isdir(backend.path):
if os.getenv("AUTOCREATE"):
os.makedirs(os.environ["XANDIKOSPATH"])
else:
logging.warning("%r does not exist.", backend.path)
current_user_principal = os.environ.get("CURRENT_USER_PRINCIPAL", "/user/")
if not backend.get_resource(current_user_principal):
if os.getenv("AUTOCREATE"):
backend.create_principal(
current_user_principal,
create_defaults=os.environ["AUTOCREATE"] == "defaults",
)
else:
logging.warning(
"default user principal '%s' does not exist. "
"Create directory %s or set AUTOCREATE variable?",
current_user_principal,
backend._map_to_file_path(current_user_principal),
)
backend._mark_as_principal(current_user_principal)
app = XandikosApp(backend, current_user_principal)
xandikos_0.2.10.orig/xandikos/wsgi_helpers.py 0000644 0000000 0000000 00000002730 14476041427 016261 0 ustar 00 # Xandikos
# Copyright (C) 2016-2020 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""WSGI wrapper for xandikos."""
import posixpath
from .web import WELLKNOWN_DAV_PATHS
class WellknownRedirector:
"""Redirect paths under .well-known/ to the appropriate paths."""
def __init__(self, inner_app, dav_root) -> None:
self._inner_app = inner_app
self._dav_root = dav_root
def __call__(self, environ, start_response):
# See https://tools.ietf.org/html/rfc6764
path = posixpath.normpath(
environ["SCRIPT_NAME"] + environ["PATH_INFO"])
if path in WELLKNOWN_DAV_PATHS:
start_response("302 Found", [("Location", self._dav_root)])
return []
return self._inner_app(environ, start_response)
xandikos_0.2.10.orig/xandikos/xmpp.py 0000644 0000000 0000000 00000005036 14476041427 014554 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""XMPP support.
https://github.com/evert/calendarserver-extensions/blob/master/caldav-pubsubdiscovery.txt
"""
from . import webdav
from .caldav import CALENDAR_RESOURCE_TYPE
ET = webdav.ET
class XmppUriProperty(webdav.Property):
"""xmpp-uri property."""
name = "{http://calendarserver.org/ns/}xmpp-uri"
resource_type = CALENDAR_RESOURCE_TYPE
in_allprops = True
live = False
async def get_value(self, base_href, resource, el, environ):
el.text = resource.get_xmpp_uri()
async def set_value(self, href, resource, el):
raise NotImplementedError(self.set_value)
class XmppHeartbeatProperty(webdav.Property):
"""xmpp-heartbeat property."""
name = "{http://calendarserver.org/ns/}xmpp-heartbeat"
resource_type = CALENDAR_RESOURCE_TYPE
in_allprops = True
live = False
async def get_value(self, base_href, resource, el, environ):
(uri, minutes) = resource.get_xmpp_heartbeat()
uri_el = ET.SubElement(
el, "{http://calendarserver.org/ns/}xmpp-heartbeat-uri")
uri_el.text = uri
minutes_el = ET.SubElement(
el, "{http://calendarserver.org/ns/}xmpp-heartbeat-minutes")
minutes_el.text = str(minutes)
async def set_value(self, href, resource, el):
raise NotImplementedError(self.set_value)
class XmppServerProperty(webdav.Property):
"""xmpp-server property."""
name = "{http://calendarserver.org/ns/}xmpp-server"
resource_type = CALENDAR_RESOURCE_TYPE
in_allprops = True
live = False
async def get_value(self, base_href, resource, el, environ):
server = resource.get_xmpp_server()
el.text = server
async def set_value(self, href, resource, el):
raise NotImplementedError(self.set_value)
xandikos_0.2.10.orig/xandikos/store/__init__.py 0000644 0000000 0000000 00000041670 14476041427 016467 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Stores and store sets.
ETags (https://en.wikipedia.org/wiki/HTTP_ETag) used in this file
are always strong, and should be returned without wrapping quotes.
"""
import logging
import mimetypes
from collections.abc import Iterable, Iterator
from typing import Optional
from .index import AutoIndexManager, IndexDict, IndexKey, IndexValueIterator
STORE_TYPE_ADDRESSBOOK = "addressbook"
STORE_TYPE_CALENDAR = "calendar"
STORE_TYPE_PRINCIPAL = "principal"
STORE_TYPE_SCHEDULE_INBOX = "schedule-inbox"
STORE_TYPE_SCHEDULE_OUTBOX = "schedule-outbox"
STORE_TYPE_SUBSCRIPTION = "subscription"
STORE_TYPE_OTHER = "other"
VALID_STORE_TYPES = (
STORE_TYPE_ADDRESSBOOK,
STORE_TYPE_CALENDAR,
STORE_TYPE_PRINCIPAL,
STORE_TYPE_SCHEDULE_INBOX,
STORE_TYPE_SCHEDULE_OUTBOX,
STORE_TYPE_SUBSCRIPTION,
STORE_TYPE_OTHER,
)
MIMETYPES = mimetypes.MimeTypes()
MIMETYPES.add_type("text/calendar", ".ics") # type: ignore
MIMETYPES.add_type("text/vcard", ".vcf") # type: ignore
DEFAULT_MIME_TYPE = "application/octet-stream"
class InvalidCTag(Exception):
"""The request CTag can not be retrieved."""
def __init__(self, ctag) -> None:
self.ctag = ctag
class File:
"""A file type handler."""
content: Iterable[bytes]
content_type: str
def __init__(self, content: Iterable[bytes], content_type: str) -> None:
self.content = content
self.content_type = content_type
def validate(self) -> None:
"""Verify that file contents are valid.
:raise InvalidFileContents: Raised if a file is not valid
"""
def normalized(self) -> Iterable[bytes]:
"""Return a normalized version of the file."""
return self.content
def describe(self, name: str) -> str:
"""Describe the contents of this file.
Used in e.g. commit messages.
"""
return name
def get_uid(self) -> str:
"""Return UID.
:raise NotImplementedError: If UIDs aren't supported for this format
:raise KeyError: If there is no UID set on this file
:raise InvalidFileContents: If the file is misformatted
Returns: UID
"""
raise NotImplementedError(self.get_uid)
def describe_delta(
self, name: str, previous: Optional["File"]) -> Iterator[str]:
"""Describe the important difference between this and previous one.
Args:
name: File name
previous: Previous file to compare to.
Raises:
InvalidFileContents: If the file is misformatted
Returns: List of strings describing change
"""
assert name is not None
item_description = self.describe(name)
assert item_description is not None
if previous is None:
yield "Added " + item_description
else:
yield "Modified " + item_description
def _get_index(self, key: IndexKey) -> IndexValueIterator:
"""Obtain an index for this file.
Args:
key: Index key
Returns:
iterator over index values
"""
raise NotImplementedError(self._get_index)
def get_indexes(self, keys: Iterable[IndexKey]) -> IndexDict:
"""Obtain indexes for this file.
Args:
keys: Iterable of index keys
Returns: Dictionary mapping key names to values
"""
ret = {}
for k in keys:
ret[k] = list(self._get_index(k))
return ret
class Filter:
"""A filter that can be used to query for certain resources.
Filters are often resource-type specific.
"""
content_type: str
def check(self, name: str, resource: File) -> bool:
"""Check if this filter applies to a resource.
Args:
name: Name of the resource
resource: File object
Returns: boolean
"""
raise NotImplementedError(self.check)
def index_keys(self) -> list[IndexKey]:
"""Returns a list of indexes that could be used to apply this filter.
Returns: AND-list of OR-options
"""
raise NotImplementedError(self.index_keys)
def check_from_indexes(self, name: str, indexes: IndexDict) -> bool:
"""Check from a set of indexes whether a resource matches.
Args:
name: Name of the resource
indexes: Dictionary mapping index names to values
Returns: boolean
"""
raise NotImplementedError(self.check_from_indexes)
def open_by_content_type(
content: Iterable[bytes], content_type: str, extra_file_handlers
) -> File:
"""Open a file based on content type.
Args:
content: list of bytestrings with content
content_type: MIME type
Returns: File instance
"""
return extra_file_handlers.get(content_type.split(";")[0], File)(
content, content_type
)
def open_by_extension(
content: Iterable[bytes],
name: str,
extra_file_handlers: dict[str, type[File]],
) -> File:
"""Open a file based on the filename extension.
Args:
content: list of bytestrings with content
name: Name of file to open
Returns: File instance
"""
(mime_type, _) = MIMETYPES.guess_type(name)
if mime_type is None:
mime_type = DEFAULT_MIME_TYPE
return open_by_content_type(
content, mime_type, extra_file_handlers=extra_file_handlers
)
class DuplicateUidError(Exception):
"""UID already exists in store."""
def __init__(self, uid: str, existing_name: str, new_name: str) -> None:
self.uid = uid
self.existing_name = existing_name
self.new_name = new_name
class NoSuchItem(Exception):
"""No such item."""
def __init__(self, name: str) -> None:
self.name = name
class InvalidETag(Exception):
"""Unexpected value for etag."""
def __init__(self, name: str, expected_etag: str, got_etag: str) -> None:
self.name = name
self.expected_etag = expected_etag
self.got_etag = got_etag
class NotStoreError(Exception):
"""Not a store."""
def __init__(self, path: str) -> None:
self.path = path
class InvalidFileContents(Exception):
"""Invalid file contents."""
def __init__(self, content_type: str, data, error) -> None:
self.content_type = content_type
self.data = data
self.error = error
class OutOfSpaceError(Exception):
"""Out of disk space."""
def __init__(self) -> None:
pass
class LockedError(Exception):
"""File or store being accessed is locked."""
def __init__(self, path: str) -> None:
self.path = path
class Store:
"""A object store."""
extra_file_handlers: dict[str, type[File]]
def __init__(self, index, *, double_check_indexes: bool = False,
index_threshold: Optional[int] = None) -> None:
self.extra_file_handlers = {}
self.index = index
self.index_manager = AutoIndexManager(
self.index, threshold=index_threshold)
self.double_check_indexes = double_check_indexes
def load_extra_file_handler(self, file_handler: type[File]) -> None:
self.extra_file_handlers[file_handler.content_type] = file_handler
def iter_with_etag(
self, ctag: Optional[str] = None) -> Iterator[
tuple[str, str, str]]:
"""Iterate over all items in the store with etag.
Args:
ctag: Possible ctag to iterate for
Returns: iterator over (name, content_type, etag) tuples
"""
raise NotImplementedError(self.iter_with_etag)
def iter_with_filter(
self, filter: Filter) -> Iterator[tuple[str, File, str]]:
"""Iterate over all items in the store that match a particular filter.
Args:
filter: Filter to apply
Returns: iterator over (name, file, etag) tuples
"""
if self.index_manager is not None:
try:
necessary_keys = filter.index_keys()
except NotImplementedError:
pass
else:
present_keys = self.index_manager.find_present_keys(
necessary_keys)
if present_keys is not None:
return self._iter_with_filter_indexes(filter, present_keys)
return self._iter_with_filter_naive(filter)
def _iter_with_filter_naive(
self, filter: Filter
) -> Iterator[tuple[str, File, str]]:
for (name, content_type, etag) in self.iter_with_etag():
if not filter.content_type == content_type:
continue
file = self.get_file(name, content_type, etag)
try:
if filter.check(name, file):
yield (name, file, etag)
except InvalidFileContents:
logging.warning("Unable to parse file %s, skipping.", name)
def _iter_with_filter_indexes(
self, filter: Filter, keys
) -> Iterator[tuple[str, File, str]]:
for (name, content_type, etag) in self.iter_with_etag():
if not filter.content_type == content_type:
continue
try:
file_values = self.index.get_values(name, etag, keys)
except KeyError:
# Index values not yet present for this file.
file = self.get_file(name, content_type, etag)
try:
file_values = file.get_indexes(self.index.available_keys())
except InvalidFileContents:
logging.warning(
"Unable to parse file %s for indexing, skipping.", name
)
file_values = {}
self.index.add_values(name, etag, file_values)
if filter.check_from_indexes(name, file_values):
yield (name, file, etag)
else:
if file_values is None:
continue
file = self.get_file(name, content_type, etag)
if self.double_check_indexes:
if file_values != file.get_indexes(keys):
raise AssertionError(
f"{file_values!r} != {file.get_indexes(keys)!r}")
if (filter.check_from_indexes(name, file_values)
!= filter.check(name, file)):
raise AssertionError(
f"index based filter {filter} "
f"(values: {file_values}) not matching "
"real file filter")
if filter.check_from_indexes(name, file_values):
file = self.get_file(name, content_type, etag)
yield (name, file, etag)
def get_file(
self,
name: str,
content_type: Optional[str] = None,
etag: Optional[str] = None,
) -> File:
"""Get the contents of an object.
Returns: A File object
"""
if content_type is None:
return open_by_extension(
self._get_raw(name, etag),
name,
extra_file_handlers=self.extra_file_handlers,
)
else:
return open_by_content_type(
self._get_raw(name, etag),
content_type,
extra_file_handlers=self.extra_file_handlers,
)
def _get_raw(
self, name: str, etag: Optional[str] = None) -> Iterable[bytes]:
"""Get the raw contents of an object.
Args:
name: Filename
etag: Optional etag to return
Returns: raw contents
"""
raise NotImplementedError(self._get_raw)
def get_ctag(self) -> str:
"""Return the ctag for this store."""
raise NotImplementedError(self.get_ctag)
def import_one(
self,
name: str,
content_type: str,
data: Iterable[bytes],
message: Optional[str] = None,
author: Optional[str] = None,
replace_etag: Optional[str] = None,
) -> tuple[str, str]:
"""Import a single object.
Args:
name: Name of the object
content_type: Content type of the object
data: serialized object as list of bytes
message: Commit message
author: Optional author
replace_etag: Etag to replace
Raise:
NameExists: when the name already exists
DuplicateUidError: when the uid already exists
Returns: (name, etag)
"""
raise NotImplementedError(self.import_one)
def delete_one(
self,
name: str,
message: Optional[str] = None,
author: Optional[str] = None,
etag: Optional[str] = None,
) -> None:
"""Delete an item.
Args:
name: Filename to delete
message: Commit message
author: Optional author
etag: Optional mandatory etag of object to remove
Raises:
NoSuchItem: when the item doesn't exist
InvalidETag: If the specified ETag doesn't match the current
"""
raise NotImplementedError(self.delete_one)
def set_type(self, store_type: str) -> None:
"""Set store type.
Args:
store_type: New store type (one of VALID_STORE_TYPES)
"""
raise NotImplementedError(self.set_type)
def get_type(self) -> str:
"""Get type of this store.
Returns: one of VALID_STORE_TYPES
"""
ret = STORE_TYPE_OTHER
for (name, content_type, etag) in self.iter_with_etag():
if content_type == "text/calendar":
ret = STORE_TYPE_CALENDAR
elif content_type == "text/vcard":
ret = STORE_TYPE_ADDRESSBOOK
return ret
def set_description(self, description: str) -> None:
"""Set the extended description of this store.
Args:
description: String with description
"""
raise NotImplementedError(self.set_description)
def get_description(self) -> str:
"""Get the extended description of this store."""
raise NotImplementedError(self.get_description)
def get_displayname(self) -> str:
"""Get the display name of this store."""
raise NotImplementedError(self.get_displayname)
def set_displayname(self, displayname: str) -> None:
"""Set the display name of this store."""
raise NotImplementedError(self.set_displayname)
def get_color(self) -> str:
"""Get the color code for this store."""
raise NotImplementedError(self.get_color)
def set_color(self, color: str) -> None:
"""Set the color code for this store."""
raise NotImplementedError(self.set_color)
def iter_changes(
self, old_ctag: str, new_ctag: str
) -> Iterator[tuple[str, str, str, str]]:
"""Get changes between two versions of this store.
Args:
old_ctag: Old ctag (None for empty Store)
new_ctag: New ctag
Returns: Iterator over (name, content_type, old_etag, new_etag)
"""
raise NotImplementedError(self.iter_changes)
def get_comment(self) -> str:
"""Retrieve store comment.
Returns: Comment
"""
raise NotImplementedError(self.get_comment)
def set_comment(self, comment: str) -> None:
"""Set comment.
Args:
comment: New comment to set
"""
raise NotImplementedError(self.set_comment)
def destroy(self) -> None:
"""Destroy this store."""
raise NotImplementedError(self.destroy)
def subdirectories(self) -> Iterator[str]:
"""Returns subdirectories to probe for other stores.
Returns: List of names
"""
raise NotImplementedError(self.subdirectories)
def get_source_url(self) -> str:
"""Return source URL, if this is a subscription."""
raise NotImplementedError(self.get_source_url)
def set_source_url(self, url: str) -> None:
"""Set the source URL."""
raise NotImplementedError(self.set_source_url)
def open_store(location: str) -> Store:
"""Open store from a location string.
Args:
location: Location string to open
Returns: A `Store`
"""
# For now, just support opening git stores
from .git import GitStore
return GitStore.open_from_path(location)
xandikos_0.2.10.orig/xandikos/store/config.py 0000644 0000000 0000000 00000011323 14476041427 016165 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Collection configuration file."""
import configparser
FILENAME = ".xandikos"
class CollectionMetadata:
"""Metadata for a configuration."""
def get_color(self) -> str:
"""Get the color for this collection."""
raise NotImplementedError(self.get_color)
def set_color(self, color: str) -> None:
"""Change the color of this collection."""
raise NotImplementedError(self.set_color)
def get_source_url(self) -> str:
"""Get the source URL for this collection."""
raise NotImplementedError(self.get_source_url)
def set_source_url(self, url: str) -> None:
"""Set the source URL for this collection."""
raise NotImplementedError(self.set_source_url)
def get_comment(self) -> str:
raise NotImplementedError(self.get_comment)
def get_displayname(self) -> str:
raise NotImplementedError(self.get_displayname)
def get_description(self) -> str:
raise NotImplementedError(self.get_description)
def get_order(self) -> str:
raise NotImplementedError(self.get_order)
def set_order(self, order: str) -> None:
raise NotImplementedError(self.set_order)
class FileBasedCollectionMetadata(CollectionMetadata):
"""Metadata for a configuration."""
def __init__(self, cp=None, save=None) -> None:
if cp is None:
cp = configparser.ConfigParser()
self._configparser = cp
self._save_cb = save
def _save(self, message):
if self._save_cb is None:
return
self._save_cb(self._configparser, message)
@classmethod
def from_file(cls, f):
cp = configparser.ConfigParser()
cp.read_file(f)
return cls(cp)
def get_source_url(self):
return self._configparser["DEFAULT"]["source"]
def set_source_url(self, url):
if url is not None:
self._configparser["DEFAULT"]["source"] = url
else:
del self._configparser["DEFAULT"]["source"]
self._save("Set source URL.")
def get_color(self):
return self._configparser["DEFAULT"]["color"]
def get_comment(self):
return self._configparser["DEFAULT"]["comment"]
def get_displayname(self):
return self._configparser["DEFAULT"]["displayname"]
def get_description(self):
return self._configparser["DEFAULT"]["description"]
def set_color(self, color):
if color is not None:
self._configparser["DEFAULT"]["color"] = color
else:
del self._configparser["DEFAULT"]["color"]
self._save("Set color.")
def set_displayname(self, displayname):
if displayname is not None:
self._configparser["DEFAULT"]["displayname"] = displayname
else:
del self._configparser["DEFAULT"]["displayname"]
self._save("Set display name.")
def set_description(self, description):
if description is not None:
self._configparser["DEFAULT"]["description"] = description
else:
del self._configparser["DEFAULT"]["description"]
self._save("Set description.")
def set_comment(self, comment):
if comment is not None:
self._configparser["DEFAULT"]["comment"] = comment
else:
del self._configparser["DEFAULT"]["comment"]
self._save("Set comment.")
def set_type(self, store_type):
self._configparser["DEFAULT"]["type"] = store_type
self._save("Set collection type.")
def get_type(self):
return self._configparser["DEFAULT"]["type"]
def get_order(self):
return self._configparser["calendar"]["order"]
def set_order(self, order):
try:
self._configparser.add_section("calendar")
except configparser.DuplicateSectionError:
pass
if order is None:
del self._configparser["calendar"]["order"]
else:
self._configparser["calendar"]["order"] = order
xandikos_0.2.10.orig/xandikos/store/git.py 0000644 0000000 0000000 00000063276 14476041427 015521 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Git store."""
import configparser
import errno
import logging
import os
import shutil
import stat
import uuid
from io import BytesIO, StringIO
from typing import Optional, Iterable
import dulwich.repo
from dulwich.file import FileLocked, GitFile
from dulwich.index import (Index, index_entry_from_stat,
write_index_dict)
from dulwich.objects import Blob, Tree
from dulwich.pack import SHA1Writer
from . import (DEFAULT_MIME_TYPE, MIMETYPES, VALID_STORE_TYPES,
DuplicateUidError, InvalidCTag, InvalidETag,
InvalidFileContents, LockedError, NoSuchItem, NotStoreError,
OutOfSpaceError, Store, open_by_content_type, open_by_extension)
from .config import FILENAME as CONFIG_FILENAME
from .config import CollectionMetadata, FileBasedCollectionMetadata
from .index import MemoryIndex
DEFAULT_ENCODING = "utf-8"
logger = logging.getLogger(__name__)
class RepoCollectionMetadata(CollectionMetadata):
def __init__(self, repo) -> None:
self._repo = repo
@classmethod
def present(cls, repo):
config = repo.get_config()
return config.has_section((b"xandikos",))
def get_source_url(self):
config = self._repo.get_config()
url = config.get(b"xandikos", b"source")
if not url:
raise KeyError
return url.decode(DEFAULT_ENCODING)
def set_source_url(self, url):
config = self._repo.get_config()
if url is not None:
config.set(b"xandikos", b"source", url.encode(DEFAULT_ENCODING))
else:
# TODO(jelmer): Add and use config.remove()
config.set(b"xandikos", b"source", b"")
self._write_config(config)
def get_color(self):
config = self._repo.get_config()
color = config.get(b"xandikos", b"color")
if color == b"":
raise KeyError
return color.decode(DEFAULT_ENCODING)
def set_color(self, color):
config = self._repo.get_config()
if color is not None:
config.set(b"xandikos", b"color", color.encode(DEFAULT_ENCODING))
else:
# TODO(jelmer): Add and use config.remove()
config.set(b"xandikos", b"color", b"")
self._write_config(config)
def _write_config(self, config):
f = BytesIO()
config.write_to_file(f)
self._repo._put_named_file("config", f.getvalue())
def get_displayname(self):
config = self._repo.get_config()
displayname = config.get(b"xandikos", b"displayname")
if displayname == b"":
raise KeyError
return displayname.decode(DEFAULT_ENCODING)
def set_displayname(self, displayname):
config = self._repo.get_config()
if displayname is not None:
config.set(
b"xandikos",
b"displayname",
displayname.encode(DEFAULT_ENCODING),
)
else:
config.set(b"xandikos", b"displayname", b"")
self._write_config(config)
def get_description(self):
desc = self._repo.get_description()
if desc in (None, b""):
raise KeyError
return desc.decode(DEFAULT_ENCODING)
def set_description(self, description):
if description is not None:
self._repo.set_description(description.encode(DEFAULT_ENCODING))
else:
self._repo.set_description(b"")
def get_comment(self):
config = self._repo.get_config()
comment = config.get(b"xandikos", b"comment")
if comment == b"":
raise KeyError
return comment.decode(DEFAULT_ENCODING)
def set_comment(self, comment):
config = self._repo.get_config()
if comment is not None:
config.set(
b"xandikos", b"comment", comment.encode(DEFAULT_ENCODING))
else:
# TODO(jelmer): Add and use config.remove()
config.set(b"xandikos", b"comment", b"")
self._write_config(config)
def set_type(self, store_type):
config = self._repo.get_config()
config.set(b"xandikos", b"type", store_type.encode(DEFAULT_ENCODING))
self._write_config(config)
def get_type(self):
config = self._repo.get_config()
store_type = config.get(b"xandikos", b"type")
store_type = store_type.decode(DEFAULT_ENCODING)
if store_type not in VALID_STORE_TYPES:
logging.warning(
"Invalid store type %s set for %r.", store_type, self._repo)
return store_type
def get_order(self):
config = self._repo.get_config()
order = config.get(b"xandikos", b"calendar-order")
if order == b"":
raise KeyError
return order.decode("utf-8")
def set_order(self, order):
config = self._repo.get_config()
if order is None:
order = ""
config.set(b"xandikos", b"calendar-order", order.encode("utf-8"))
self._write_config(config)
class locked_index:
def __init__(self, path) -> None:
self._path = path
def __enter__(self):
self._file = GitFile(self._path, "wb")
self._index = Index(self._path)
return self._index
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is not None:
self._file.abort()
return
try:
f = SHA1Writer(self._file)
write_index_dict(f, self._index._byname)
except BaseException:
self._file.abort()
else:
f.close()
class GitStore(Store):
"""A Store backed by a Git Repository."""
def __init__(self, repo, *, ref: bytes = b"refs/heads/master",
check_for_duplicate_uids=True,
**kwargs) -> None:
super().__init__(MemoryIndex(), **kwargs)
self.ref = ref
self.repo = repo
# Maps uids to (sha, fname)
self._uid_to_fname: dict[str, tuple[bytes, str]] = {}
self._check_for_duplicate_uids = check_for_duplicate_uids
# Set of blob ids that have already been scanned
self._fname_to_uid: dict[str, tuple[str, str]] = {}
def _get_etag(self, name: str) -> str:
raise NotImplementedError(self._get_etag)
def _import_one(
self, name: str, data: Iterable[bytes], message: str,
author: Optional[str] = None):
raise NotImplementedError(self._import_one)
@property
def config(self):
if RepoCollectionMetadata.present(self.repo):
return RepoCollectionMetadata(self.repo)
else:
cp = configparser.ConfigParser()
try:
cf = self._get_raw(CONFIG_FILENAME)
except KeyError:
pass
else:
if cf is not None:
cp.read_string(b"".join(cf).decode("utf-8"))
def save_config(cp, message):
f = StringIO()
cp.write(f)
self._import_one(
CONFIG_FILENAME, [f.getvalue().encode("utf-8")], message
)
return FileBasedCollectionMetadata(cp, save=save_config)
def __repr__(self) -> str:
return f"{type(self).__name__}({self.repo!r}, ref={self.ref!r})"
@property
def path(self):
return self.repo.path
def _check_duplicate(self, uid, name, replace_etag):
if uid is not None and self._check_for_duplicate_uids:
self._scan_uids()
try:
(existing_name, _) = self._uid_to_fname[uid]
except KeyError:
pass
else:
if existing_name != name:
raise DuplicateUidError(uid, existing_name, name)
try:
etag = self._get_etag(name)
except KeyError:
etag = None
if replace_etag is not None and etag != replace_etag:
raise InvalidETag(name, etag, replace_etag)
return etag
def import_one(
self,
name: str,
content_type: str,
data: Iterable[bytes],
message: Optional[str] = None,
author: Optional[str] = None,
replace_etag: Optional[str] = None,
) -> tuple[str, str]:
"""Import a single object.
Args:
name: name of the object
content_type: Content type
data: serialized object as list of bytes
message: Commit message
author: Optional author
replace_etag: optional etag of object to replace
Raises:
InvalidETag: when the name already exists but with different etag
DuplicateUidError: when the uid already exists
Returns: etag
"""
if content_type is None:
fi = open_by_extension(data, name, self.extra_file_handlers)
else:
fi = open_by_content_type(
data, content_type, self.extra_file_handlers)
if name is None:
name = str(uuid.uuid4())
extension = MIMETYPES.guess_extension(content_type)
if extension is not None:
name += extension
fi.validate()
try:
uid = fi.get_uid()
except (KeyError, NotImplementedError):
uid = None
self._check_duplicate(uid, name, replace_etag)
if message is None:
try:
old_fi = self.get_file(name, content_type, replace_etag)
except KeyError:
old_fi = None
message = "\n".join(fi.describe_delta(name, old_fi))
etag = self._import_one(name, fi.normalized(), message, author=author)
return (name, etag.decode("ascii"))
def _get_raw(self, name, etag=None):
"""Get the raw contents of an object.
Args:
name: Name of the item
etag: Optional etag
Returns: raw contents as chunks
"""
if etag is None:
etag = self._get_etag(name)
blob = self.repo.object_store[etag.encode("ascii")]
return blob.chunked
def _scan_uids(self):
removed = set(self._fname_to_uid.keys())
for (name, mode, sha) in self._iterblobs():
etag = sha.decode("ascii")
if name in removed:
removed.remove(name)
if (name in self._fname_to_uid
and self._fname_to_uid[name][0] == etag):
continue
blob = self.repo.object_store[sha]
fi = open_by_extension(
blob.chunked, name, self.extra_file_handlers)
try:
uid = fi.get_uid()
except KeyError:
logger.warning("No UID found in file %s", name)
uid = None
except InvalidFileContents:
logging.warning("Unable to parse file %s", name)
uid = None
except NotImplementedError:
# This file type doesn't support UIDs
uid = None
self._fname_to_uid[name] = (etag, uid)
if uid is not None:
self._uid_to_fname[uid] = (name, etag)
for name in removed:
(unused_etag, uid) = self._fname_to_uid[name]
if uid is not None:
del self._uid_to_fname[uid]
del self._fname_to_uid[name]
def _iterblobs(self, ctag=None):
raise NotImplementedError(self._iterblobs)
def iter_with_etag(self, ctag=None):
"""Iterate over all items in the store with etag.
Args:
ctag: Ctag to iterate for
Returns: iterator over (name, content_type, etag) tuples
"""
for (name, mode, sha) in self._iterblobs(ctag):
(mime_type, _) = MIMETYPES.guess_type(name)
if mime_type is None:
mime_type = DEFAULT_MIME_TYPE
yield (name, mime_type, sha.decode("ascii"))
@classmethod
def create(cls, path):
"""Create a new store backed by a Git repository on disk.
Returns: A `GitStore`
"""
raise NotImplementedError(cls.create)
@classmethod
def open_from_path(cls, path, **kwargs):
"""Open a GitStore from a path.
Args:
path: Path
Returns: A `GitStore`
"""
try:
return cls.open(dulwich.repo.Repo(path), **kwargs)
except dulwich.repo.NotGitRepository:
raise NotStoreError(path)
@classmethod
def open(cls, repo, **kwargs):
"""Open a GitStore given a Repo object.
Args:
repo: A Dulwich `Repo`
Returns: A `GitStore`
"""
if repo.has_index():
return TreeGitStore(repo, **kwargs)
else:
return BareGitStore(repo, **kwargs)
def get_description(self):
"""Get extended description.
Returns: repository description as string
"""
try:
return self.config.get_description()
except KeyError:
return None
def set_description(self, description):
"""Set extended description.
Args:
description: repository description as string
"""
self.config.set_description(description)
def set_comment(self, comment):
"""Set comment.
Args:
comment: Comment
"""
self.config.set_comment(comment)
def get_comment(self):
"""Get comment.
Returns: Comment
"""
try:
return self.config.get_comment()
except KeyError:
return None
def get_color(self):
"""Get color.
Returns: A Color code, or None
"""
try:
return self.config.get_color()
except KeyError:
return None
def set_color(self, color):
"""Set the color code for this store."""
self.config.set_color(color)
def get_source_url(self):
"""Get source URL."""
try:
return self.config.get_source_url()
except KeyError:
return None
def set_source_url(self, url):
"""Set the source URL."""
self.config.set_source_url(url)
def get_displayname(self):
"""Get display name.
Returns: The display name, or None if not set
"""
try:
return self.config.get_displayname()
except KeyError:
return None
def set_displayname(self, displayname):
"""Set the display name.
Args:
displayname: New display name
"""
self.config.set_displayname(displayname)
def set_type(self, store_type):
"""Set store type.
Args:
store_type: New store type (one of VALID_STORE_TYPES)
"""
self.config.set_type(store_type)
def get_type(self):
"""Get store type.
This looks in git config first, then falls back to guessing.
"""
try:
return self.config.get_type()
except KeyError:
return super().get_type()
def iter_changes(self, old_ctag, new_ctag):
"""Get changes between two versions of this store.
Args:
old_ctag: Old ctag (None for empty Store)
new_ctag: New ctag
Returns: Iterator over (name, content_type, old_etag, new_etag)
"""
if old_ctag is None:
t = Tree()
self.repo.object_store.add_object(t)
old_ctag = t.id.decode("ascii")
previous = {
name: (content_type, etag)
for (name, content_type, etag) in self.iter_with_etag(old_ctag)
}
for (name, new_content_type, new_etag) in self.iter_with_etag(
new_ctag):
try:
(old_content_type, old_etag) = previous[name]
except KeyError:
old_etag = None
else:
assert old_content_type == new_content_type
if old_etag != new_etag:
yield (name, new_content_type, old_etag, new_etag)
if old_etag is not None:
del previous[name]
for (name, (old_content_type, old_etag)) in previous.items():
yield (name, old_content_type, old_etag, None)
def destroy(self):
"""Destroy this store."""
shutil.rmtree(self.path)
class BareGitStore(GitStore):
"""A Store backed by a bare git repository."""
def _get_current_tree(self):
try:
ref_object = self.repo[self.ref]
except KeyError:
return Tree()
if isinstance(ref_object, Tree):
return ref_object
else:
return self.repo.object_store[ref_object.tree]
def _get_etag(self, name):
tree = self._get_current_tree()
name = name.encode(DEFAULT_ENCODING)
return tree[name][1].decode("ascii")
def get_ctag(self):
"""Return the ctag for this store."""
return self._get_current_tree().id.decode("ascii")
def _iterblobs(self, ctag=None):
if ctag is None:
tree = self._get_current_tree()
else:
try:
tree = self.repo.object_store[ctag.encode("ascii")]
except KeyError as exc:
raise InvalidCTag(ctag) from exc
for (name, mode, sha) in tree.iteritems():
name = name.decode(DEFAULT_ENCODING)
if name == CONFIG_FILENAME:
continue
yield (name, mode, sha)
@classmethod
def create_memory(cls) -> "GitStore":
"""Create a new store backed by a memory repository.
Returns: A `GitStore`
"""
return cls(dulwich.repo.MemoryRepo())
def _commit_tree(self, tree_id, message, author=None):
return self.repo.do_commit(
message=message, tree=tree_id, ref=self.ref, author=author
)
def _import_one(self, name: str, data: Iterable[bytes], message: str,
author: Optional[str] = None) -> bytes:
"""Import a single object.
Args:
name: Optional name of the object
data: serialized object as bytes
message: optional commit message
author: optional author
Returns: etag
"""
b = Blob()
b.chunked = data
tree = self._get_current_tree()
old_tree_id = tree.id
name_enc = name.encode(DEFAULT_ENCODING)
tree[name_enc] = (0o644 | stat.S_IFREG, b.id)
self.repo.object_store.add_objects([(tree, ""), (b, name_enc)])
if tree.id != old_tree_id:
self._commit_tree(
tree.id, message.encode(DEFAULT_ENCODING), author=author)
return b.id
def delete_one(self, name, message=None, author=None, etag=None):
"""Delete an item.
Args:
name: Filename to delete
message; Commit message
author: Optional author to store
etag: Optional mandatory etag of object to remove
Raises:
NoSuchItem: when the item doesn't exist
InvalidETag: If the specified ETag doesn't match the curren
"""
tree = self._get_current_tree()
name_enc = name.encode(DEFAULT_ENCODING)
try:
current_sha = tree[name_enc][1]
except KeyError as exc:
raise NoSuchItem(name) from exc
if etag is not None and current_sha != etag.encode("ascii"):
raise InvalidETag(name, etag, current_sha.decode("ascii"))
del tree[name_enc]
self.repo.object_store.add_objects([(tree, "")])
if message is None:
fi = open_by_extension(
self.repo.object_store[current_sha].chunked,
name,
self.extra_file_handlers,
)
message = "Delete " + fi.describe(name)
self._commit_tree(
tree.id, message.encode(DEFAULT_ENCODING), author=author)
@classmethod
def create(cls, path):
"""Create a new store backed by a Git repository on disk.
Returns: A `GitStore`
"""
os.mkdir(path)
return cls(dulwich.repo.Repo.init_bare(path))
def subdirectories(self):
"""Returns subdirectories to probe for other stores.
Returns: List of names
"""
# Or perhaps just return all subdirectories but filter out
# Git-owned ones?
return []
class TreeGitStore(GitStore):
"""A Store that backs onto a treefull Git repository."""
@classmethod
def create(cls, path, bare=True):
"""Create a new store backed by a Git repository on disk.
Returns: A `GitStore`
"""
os.mkdir(path)
return cls(dulwich.repo.Repo.init(path))
def _get_etag(self, name):
index = self.repo.open_index()
name = name.encode(DEFAULT_ENCODING)
return index[name].sha.decode("ascii")
def _commit_tree(self, index, message, author=None):
tree = index.commit(self.repo.object_store)
return self.repo.do_commit(message=message, author=author, tree=tree)
def _import_one(self, name: str, data: Iterable[bytes], message: str, author: Optional[str] = None) -> bytes:
"""Import a single object.
Args:
name: name of the object
data: serialized object as list of bytes
message: Commit message
author: Optional author
Returns: etag
"""
try:
with locked_index(self.repo.index_path()) as index:
p = os.path.join(self.repo.path, name)
with open(p, "wb") as f:
f.writelines(data)
st = os.lstat(p)
blob = Blob.from_string(b"".join(data))
encoded_name = name.encode(DEFAULT_ENCODING)
if (encoded_name not in index
or blob.id != index[encoded_name].sha):
self.repo.object_store.add_object(blob)
index[encoded_name] = index_entry_from_stat(st, blob.id)
self._commit_tree(
index, message.encode(DEFAULT_ENCODING), author=author
)
return blob.id
except FileLocked as exc:
raise LockedError(name) from exc
except OSError as exc:
if exc.errno == errno.ENOSPC:
raise OutOfSpaceError() from exc
raise
def delete_one(self, name, message=None, author=None, etag=None):
"""Delete an item.
Args:
name: Filename to delete
message: Commit message
author: Optional author
etag: Optional mandatory etag of object to remove
Raise:
NoSuchItem: when the item doesn't exist
InvalidETag: If the specified ETag doesn't match the curren
"""
p = os.path.join(self.repo.path, name)
try:
with open(p, "rb") as f:
current_blob = Blob.from_string(f.read())
except FileNotFoundError as exc:
raise NoSuchItem(name) from exc
except IsADirectoryError as exc:
raise NoSuchItem(name) from exc
if message is None:
fi = open_by_extension(
current_blob.chunked, name, self.extra_file_handlers)
message = "Delete " + fi.describe(name)
if etag is not None:
with open(p, "rb") as f:
current_etag = current_blob.id
if etag.encode("ascii") != current_etag:
raise InvalidETag(name, etag, current_etag.decode("ascii"))
try:
with locked_index(self.repo.index_path()) as index:
os.unlink(p)
del index[name.encode(DEFAULT_ENCODING)]
self._commit_tree(
index, message.encode(DEFAULT_ENCODING), author=author
)
except FileLocked:
raise LockedError(name)
def get_ctag(self):
"""Return the ctag for this store."""
index = self.repo.open_index()
return index.commit(self.repo.object_store).decode("ascii")
def _iterblobs(self, ctag=None):
"""Iterate over all items in the store with etag.
:yield: (name, etag) tuples
"""
if ctag is not None:
try:
tree = self.repo.object_store[ctag.encode("ascii")]
except KeyError as exc:
raise InvalidCTag(ctag) from exc
for (name, mode, sha) in tree.iteritems():
name = name.decode(DEFAULT_ENCODING)
if name == CONFIG_FILENAME:
continue
yield (name, mode, sha)
else:
index = self.repo.open_index()
for (name, sha, mode) in index.iterobjects():
name = name.decode(DEFAULT_ENCODING)
if name == CONFIG_FILENAME:
continue
yield (name, mode, sha)
def subdirectories(self):
"""Returns subdirectories to probe for other stores.
Returns: List of names
"""
ret = []
for name in os.listdir(self.path):
if name == dulwich.repo.CONTROLDIR:
continue
p = os.path.join(self.path, name)
if os.path.isdir(p):
ret.append(name)
return ret
xandikos_0.2.10.orig/xandikos/store/index.py 0000644 0000000 0000000 00000010067 14476041427 016033 0 ustar 00 # Xandikos
# Copyright (C) 2019 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Indexing."""
import collections
import logging
from collections.abc import Iterable, Iterator
from typing import Optional, Union, Dict, Set
IndexKey = str
IndexValue = list[Union[bytes, bool]]
IndexValueIterator = Iterator[Union[bytes, bool]]
IndexDict = dict[IndexKey, IndexValue]
DEFAULT_INDEXING_THRESHOLD = 5
class Index:
"""Index management."""
def available_keys(self) -> Iterable[IndexKey]:
"""Return list of available index keys."""
raise NotImplementedError(self.available_keys)
def get_values(self, name: str, etag: str, keys: list[IndexKey]):
"""Get the values for specified keys for a name."""
raise NotImplementedError(self.get_values)
def iter_etags(self) -> Iterator[str]:
"""Return all the etags covered by this index."""
raise NotImplementedError(self.iter_etags)
class MemoryIndex(Index):
def __init__(self) -> None:
self._indexes: Dict[IndexKey, Dict[str, IndexValue]] = {}
self._in_index: Set[str] = set()
def available_keys(self):
return self._indexes.keys()
def get_values(self, name, etag, keys):
if etag not in self._in_index:
raise KeyError(etag)
indexes = {}
for k in keys:
if k not in self._indexes:
raise AssertionError
try:
indexes[k] = self._indexes[k][etag]
except KeyError:
indexes[k] = []
return indexes
def iter_etags(self):
return iter(self._in_index)
def add_values(self, name, etag, values):
for k, v in values.items():
if k not in self._indexes:
raise AssertionError
self._indexes[k][etag] = v
self._in_index.add(etag)
def reset(self, keys):
self._in_index = set()
self._indexes = {}
for key in keys:
self._indexes[key] = {}
class AutoIndexManager:
def __init__(self, index, threshold: Optional[int] = None) -> None:
self.index = index
self.desired: dict[IndexKey, int] = collections.defaultdict(lambda: 0)
if threshold is None:
threshold = DEFAULT_INDEXING_THRESHOLD
self.indexing_threshold = threshold
def find_present_keys(
self, necessary_keys: Iterable[IndexKey]) -> Optional[
Iterable[IndexKey]]:
available_keys = self.index.available_keys()
needed_keys = []
missing_keys: list[IndexKey] = []
new_index_keys = set()
for keys in necessary_keys:
found = False
for key in keys:
if key in available_keys:
needed_keys.append(key)
found = True
if not found:
for key in keys:
self.desired[key] += 1
if self.desired[key] > self.indexing_threshold:
new_index_keys.add(key)
missing_keys.extend(keys)
if not missing_keys:
return needed_keys
if new_index_keys:
logging.debug("Adding new index keys: %r", new_index_keys)
self.index.reset(set(self.index.available_keys()) | new_index_keys)
# TODO(jelmer): Maybe best to check if missing_keys are satisfiable
# now?
return None
xandikos_0.2.10.orig/xandikos/store/vdir.py 0000644 0000000 0000000 00000027332 14476041427 015673 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""vdir store.
See https://github.com/pimutils/vdirsyncer/blob/master/docs/vdir.rst
"""
import configparser
import hashlib
import logging
import os
import shutil
from typing import Dict
import uuid
from . import (MIMETYPES, DuplicateUidError, InvalidETag, InvalidFileContents,
NoSuchItem, Store, open_by_content_type, open_by_extension)
from .config import FILENAME as CONFIG_FILENAME
from .config import FileBasedCollectionMetadata
from .index import MemoryIndex
DEFAULT_ENCODING = "utf-8"
logger = logging.getLogger(__name__)
class VdirStore(Store):
"""A Store backed by a Vdir directory."""
def __init__(self, path, check_for_duplicate_uids=True) -> None:
super().__init__(MemoryIndex())
self.path = path
self._check_for_duplicate_uids = check_for_duplicate_uids
# Set of blob ids that have already been scanned
self._fname_to_uid: Dict[str, str] = {}
# Maps uids to (sha, fname)
self._uid_to_fname: Dict[str, str] = {}
cp = configparser.ConfigParser()
cp.read([os.path.join(self.path, CONFIG_FILENAME)])
def save_config(cp, message):
with open(os.path.join(self.path, CONFIG_FILENAME), "w") as f:
cp.write(f)
self.config = FileBasedCollectionMetadata(cp, save=save_config)
def __repr__(self) -> str:
return f"{type(self).__name__}({self.path!r})"
def _get_etag(self, name):
path = os.path.join(self.path, name)
md5 = hashlib.md5()
try:
with open(path, "rb") as f:
for chunk in f:
md5.update(chunk)
except FileNotFoundError as exc:
raise KeyError(name) from exc
except IsADirectoryError as exc:
raise KeyError(name) from exc
return md5.hexdigest()
def _get_raw(self, name, etag=None):
"""Get the raw contents of an object.
Args:
name: Name of the item
etag: Optional etag (ignored)
Returns: raw contents as chunks
"""
path = os.path.join(self.path, name)
try:
with open(path, "rb") as f:
return [f.read()]
except FileNotFoundError as exc:
raise KeyError(name) from exc
except IsADirectoryError as exc:
raise KeyError(name) from exc
def _scan_uids(self):
removed = set(self._fname_to_uid.keys())
for (name, content_type, etag) in self.iter_with_etag():
if name in removed:
removed.remove(name)
if (name in self._fname_to_uid
and self._fname_to_uid[name][0] == etag):
continue
fi = open_by_extension(
self._get_raw(name, etag), name, self.extra_file_handlers
)
try:
uid = fi.get_uid()
except KeyError:
logger.warning("No UID found in file %s", name)
uid = None
except InvalidFileContents:
logging.warning("Unable to parse file %s", name)
uid = None
except NotImplementedError:
# This file type doesn't support UIDs
uid = None
self._fname_to_uid[name] = (etag, uid)
if uid is not None:
self._uid_to_fname[uid] = (name, etag)
for name in removed:
(unused_etag, uid) = self._fname_to_uid[name]
if uid is not None:
del self._uid_to_fname[uid]
del self._fname_to_uid[name]
def _check_duplicate(self, uid, name, replace_etag):
if uid is not None and self._check_for_duplicate_uids:
self._scan_uids()
try:
(existing_name, _) = self._uid_to_fname[uid]
except KeyError:
pass
else:
if existing_name != name:
raise DuplicateUidError(uid, existing_name, name)
try:
etag = self._get_etag(name)
except KeyError:
etag = None
if replace_etag is not None and etag != replace_etag:
raise InvalidETag(name, etag, replace_etag)
return etag
def import_one(
self,
name,
content_type,
data,
message=None,
author=None,
replace_etag=None,
):
"""Import a single object.
Args:
name: name of the object
content_type: Content type
data: serialized object as list of bytes
message: Commit message
author: Optional author
replace_etag: optional etag of object to replace
Raises:
InvalidETag: when the name already exists but with different etag
DuplicateUidError: when the uid already exists
Returns: etag
"""
if content_type is None:
fi = open_by_extension(data, name, self.extra_file_handlers)
else:
fi = open_by_content_type(
data, content_type, self.extra_file_handlers)
if name is None:
name = str(uuid.uuid4())
extension = MIMETYPES.guess_extension(content_type)
if extension is not None:
name += extension
fi.validate()
try:
uid = fi.get_uid()
except (KeyError, NotImplementedError):
uid = None
self._check_duplicate(uid, name, replace_etag)
# TODO(jelmer): Check that extensions match content type:
# if this is a vCard, the extension should be .vcf
# if this is a iCalendar, the extension should be .ics
# TODO(jelmer): check that a UID is present and that all UIDs are the
# same
path = os.path.join(self.path, name)
tmppath = os.path.join(self.path, name + ".tmp")
with open(tmppath, "wb") as f:
for chunk in fi.normalized():
f.write(chunk)
os.replace(tmppath, path)
return (name, self._get_etag(name))
def iter_with_etag(self, ctag=None):
"""Iterate over all items in the store with etag.
Args:
ctag: Ctag to iterate for
Returns: iterator over (name, content_type, etag) tuples
"""
for name in os.listdir(self.path):
if name.endswith(".tmp"):
continue
if name == CONFIG_FILENAME:
continue
if name.endswith(".ics"):
content_type = "text/calendar"
elif name.endswith(".vcf"):
content_type = "text/vcard"
else:
continue
yield (name, content_type, self._get_etag(name))
@classmethod
def create(cls, path: str) -> "VdirStore":
"""Create a new store backed by a Vdir on disk.
Returns: A `VdirStore`
"""
os.mkdir(path)
return cls(path)
@classmethod
def open_from_path(cls, path: str) -> "VdirStore":
"""Open a VdirStore from a path.
Args:
path: Path
Returns: A `VdirStore`
"""
return cls(path)
def get_description(self):
"""Get extended description.
Returns: repository description as string
"""
return self.config.get_description()
def set_description(self, description):
"""Set extended description.
Args:
description: repository description as string
"""
self.config.set_description(description)
def set_comment(self, comment):
"""Set comment.
Args:
comment: Comment
"""
raise NotImplementedError(self.set_comment)
def get_comment(self):
"""Get comment.
Returns: Comment
"""
raise NotImplementedError(self.get_comment)
def _read_metadata(self, name):
try:
with open(os.path.join(self.path, name)) as f:
return f.read().strip()
except FileNotFoundError:
return None
except IsADirectoryError:
return None
def _write_metadata(self, name, data):
path = os.path.join(self.path, name)
if data is not None:
with open(path, "w") as f:
f.write(data)
else:
os.unlink(path)
def get_color(self):
"""Get color.
Returns: A Color code, or None
"""
color = self._read_metadata("color")
if color is not None:
assert color.startswith("#")
return color
def set_color(self, color):
"""Set the color code for this store."""
assert color.startswith("#")
self._write_metadata("color", color)
def get_source_url(self):
"""Get source URL."""
return self._read_metadata("source")
def set_source_url(self, url):
"""Set source URL."""
self._write_metadata("source", url)
def get_displayname(self):
"""Get display name.
Returns: The display name, or None if not set
"""
return self._read_metadata("displayname")
def set_displayname(self, displayname):
"""Set the display name.
Args:
displayname: New display name
"""
self._write_metadata("displayname", displayname)
def iter_changes(self, old_ctag, new_ctag):
"""Get changes between two versions of this store.
Args:
old_ctag: Old ctag (None for empty Store)
new_ctag: New ctag
Returns: Iterator over (name, content_type, old_etag, new_etag)
"""
raise NotImplementedError(self.iter_changes)
def destroy(self):
"""Destroy this store."""
shutil.rmtree(self.path)
def delete_one(self, name, message=None, author=None, etag=None):
"""Delete an item.
Args:
name: Filename to delete
message: Commit message
author: Optional author
etag: Optional mandatory etag of object to remove
Raises:
NoSuchItem: when the item doesn't exist
InvalidETag: If the specified ETag doesn't match the curren
"""
path = os.path.join(self.path, name)
if etag is not None:
try:
current_etag = self._get_etag(name)
except KeyError:
raise NoSuchItem(name)
if etag != current_etag:
raise InvalidETag(name, etag, current_etag)
try:
os.unlink(path)
except FileNotFoundError as exc:
raise NoSuchItem(path) from exc
except IsADirectoryError as exc:
raise NoSuchItem(path) from exc
def get_ctag(self):
"""Return the ctag for this store."""
raise NotImplementedError(self.get_ctag)
def subdirectories(self):
"""Returns subdirectories to probe for other stores.
Returns: List of names
"""
ret = []
for name in os.listdir(self.path):
p = os.path.join(self.path, name)
if os.path.isdir(p):
ret.append(name)
return ret
xandikos_0.2.10.orig/xandikos/templates/collection.html 0000644 0000000 0000000 00000001173 13452372634 020233 0 ustar 00
WebDAV Collection - {{ collection.get_displayname() }}
{{ collection.get_displayname() }}
This is a collection.
Subcollections
{% for name, resource in collection.subcollections() %}
- {{ name }}
{% endfor %}
For more information about Xandikos, see https://www.xandikos.org/
or https://github.com/jelmer/xandikos.
xandikos_0.2.10.orig/xandikos/templates/principal.html 0000644 0000000 0000000 00000001343 13452372634 020060 0 ustar 00
WebDAV Principal - {{ principal.get_displayname() }}
{{ principal.get_displayname() }}
This is a user principal. CalDAV/CardDAV clients that support
autodiscovery can use the URL for this page for discovery.
Subcollections
{% for name, resource in principal.subcollections() %}
- {{ name }}
{% endfor %}
For more information about Xandikos, see https://www.xandikos.org/
or https://github.com/jelmer/xandikos.
xandikos_0.2.10.orig/xandikos/templates/root.html 0000644 0000000 0000000 00000001115 13452372634 017057 0 ustar 00
Xandikos WebDAV server
This is a Xandikos WebDAV server.
Principals on this server:
{% for path in principals %}
- {{ path }}
{% endfor %}
For more information about Xandikos, see https://www.xandikos.org/
or https://github.com/jelmer/xandikos.
xandikos_0.2.10.orig/xandikos/tests/__init__.py 0000644 0000000 0000000 00000002241 14476041427 016464 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import unittest
def test_suite():
names = [
"api",
"caldav",
"carddav",
"config",
"icalendar",
"store",
"vcard",
"webdav",
"web",
"wsgi",
]
module_names = ["xandikos.tests.test_" + name for name in names]
loader = unittest.TestLoader()
return loader.loadTestsFromNames(module_names)
xandikos_0.2.10.orig/xandikos/tests/test_api.py 0000644 0000000 0000000 00000002456 14476041427 016545 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import shutil
import tempfile
import unittest
from ..web import XandikosApp, XandikosBackend
class WebTests(unittest.TestCase):
# When changing this API, please update notes/api-stability.rst and inform
# vdirsyncer, who rely on this API.
def test_backend(self):
path = tempfile.mkdtemp()
try:
backend = XandikosBackend(path)
backend.create_principal("foo", create_defaults=True)
XandikosApp(backend, "foo")
finally:
shutil.rmtree(path)
xandikos_0.2.10.orig/xandikos/tests/test_caldav.py 0000644 0000000 0000000 00000017430 14476041427 017224 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import unittest
from wsgiref.util import setup_testing_defaults
from icalendar.cal import Calendar as ICalendar
from xandikos import caldav
from xandikos.tests import test_webdav
from ..webdav import ET, Property, WebDAVApp
class WebTests(test_webdav.WebTestCase):
def makeApp(self, backend):
app = WebDAVApp(backend)
app.register_methods([caldav.MkcalendarMethod()])
return app
def mkcalendar(self, app, path):
environ = {
"PATH_INFO": path,
"REQUEST_METHOD": "MKCALENDAR",
"SCRIPT_NAME": "",
}
setup_testing_defaults(environ)
_code = []
_headers = []
def start_response(code, headers):
_code.append(code)
_headers.extend(headers)
contents = b"".join(app(environ, start_response))
return _code[0], _headers, contents
def test_mkcalendar_ok(self):
class Backend:
def create_collection(self, relpath):
pass
def get_resource(self, relpath):
return None
class ResourceTypeProperty(Property):
name = "{DAV:}resourcetype"
async def get_value(unused_self, href, resource, ret, environ):
ET.SubElement(ret, "{DAV:}collection")
async def set_value(unused_self, href, resource, ret):
self.assertEqual(
[
"{DAV:}collection",
"{urn:ietf:params:xml:ns:caldav}calendar",
],
[x.tag for x in ret],
)
app = self.makeApp(Backend())
app.register_properties([ResourceTypeProperty()])
code, headers, contents = self.mkcalendar(app, "/resource/bla")
self.assertEqual("201 Created", code)
self.assertEqual(b"", contents)
class ExtractfromCalendarTests(unittest.TestCase):
def setUp(self):
super().setUp()
self.requested = ET.Element("{%s}calendar-data" % caldav.NAMESPACE)
def extractEqual(self, incal_str, outcal_str):
incal = ICalendar.from_ical(incal_str)
expected_outcal = ICalendar.from_ical(outcal_str)
outcal = ICalendar()
outcal = caldav.extract_from_calendar(incal, self.requested)
self.maxDiff = None
self.assertMultiLineEqual(
expected_outcal.to_ical().decode(),
outcal.to_ical().decode(),
ET.tostring(self.requested),
)
def test_comp(self):
comp = ET.SubElement(self.requested, "{%s}comp" % caldav.NAMESPACE)
comp.set("name", "VCALENDAR")
self.extractEqual(
"""\
BEGIN:VCALENDAR
BEGIN:VTODO
CLASS:PUBLIC
COMPLETED:20100829T234417Z
CREATED:20090606T042958Z
END:VTODO
END:VCALENDAR
""",
"""\
BEGIN:VCALENDAR
END:VCALENDAR
""",
)
def test_comp_nested(self):
vcal_comp = ET.SubElement(
self.requested, "{%s}comp" % caldav.NAMESPACE)
vcal_comp.set("name", "VCALENDAR")
vtodo_comp = ET.SubElement(vcal_comp, "{%s}comp" % caldav.NAMESPACE)
vtodo_comp.set("name", "VTODO")
self.extractEqual(
"""\
BEGIN:VCALENDAR
BEGIN:VTODO
COMPLETED:20100829T234417Z
CREATED:20090606T042958Z
END:VTODO
END:VCALENDAR
""",
"""\
BEGIN:VCALENDAR
BEGIN:VTODO
END:VTODO
END:VCALENDAR
""",
)
self.extractEqual(
"""\
BEGIN:VCALENDAR
BEGIN:VEVENT
COMPLETED:20100829T234417Z
CREATED:20090606T042958Z
END:VEVENT
END:VCALENDAR
""",
"""\
BEGIN:VCALENDAR
END:VCALENDAR
""",
)
def test_prop(self):
vcal_comp = ET.SubElement(
self.requested, "{%s}comp" % caldav.NAMESPACE)
vcal_comp.set("name", "VCALENDAR")
vtodo_comp = ET.SubElement(vcal_comp, "{%s}comp" % caldav.NAMESPACE)
vtodo_comp.set("name", "VTODO")
completed_prop = ET.SubElement(
vtodo_comp, "{%s}prop" % caldav.NAMESPACE)
completed_prop.set("name", "COMPLETED")
self.extractEqual(
"""\
BEGIN:VCALENDAR
BEGIN:VTODO
COMPLETED:20100829T234417Z
CREATED:20090606T042958Z
END:VTODO
END:VCALENDAR
""",
"""\
BEGIN:VCALENDAR
BEGIN:VTODO
COMPLETED:20100829T234417Z
END:VTODO
END:VCALENDAR
""",
)
self.extractEqual(
"""\
BEGIN:VCALENDAR
BEGIN:VEVENT
CREATED:20090606T042958Z
END:VEVENT
END:VCALENDAR
""",
"""\
BEGIN:VCALENDAR
END:VCALENDAR
""",
)
def test_allprop(self):
vcal_comp = ET.SubElement(
self.requested, "{%s}comp" % caldav.NAMESPACE)
vcal_comp.set("name", "VCALENDAR")
vtodo_comp = ET.SubElement(vcal_comp, "{%s}comp" % caldav.NAMESPACE)
vtodo_comp.set("name", "VTODO")
ET.SubElement(vtodo_comp, "{%s}allprop" % caldav.NAMESPACE)
self.extractEqual(
"""\
BEGIN:VCALENDAR
BEGIN:VTODO
COMPLETED:20100829T234417Z
CREATED:20090606T042958Z
END:VTODO
END:VCALENDAR
""",
"""\
BEGIN:VCALENDAR
BEGIN:VTODO
COMPLETED:20100829T234417Z
CREATED:20090606T042958Z
END:VTODO
END:VCALENDAR
""",
)
def test_allcomp(self):
vcal_comp = ET.SubElement(
self.requested, "{%s}comp" % caldav.NAMESPACE)
vcal_comp.set("name", "VCALENDAR")
ET.SubElement(vcal_comp, "{%s}allcomp" % caldav.NAMESPACE)
self.extractEqual(
"""\
BEGIN:VCALENDAR
BEGIN:VTODO
COMPLETED:20100829T234417Z
CREATED:20090606T042958Z
END:VTODO
END:VCALENDAR
""",
"""\
BEGIN:VCALENDAR
BEGIN:VTODO
END:VTODO
END:VCALENDAR
""",
)
def test_expand(self):
expand = ET.SubElement(self.requested, "{%s}expand" % caldav.NAMESPACE)
expand.set("start", "20060103T000000Z")
expand.set("end", "20060105T000000Z")
self.extractEqual(
"""\
BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//Example Corp.//CalDAV Client//EN
BEGIN:VTIMEZONE
LAST-MODIFIED:20040110T032845Z
TZID:US/Eastern
BEGIN:DAYLIGHT
DTSTART:20000404T020000
RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=4
TZNAME:EDT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:STANDARD
DTSTART:20001026T020000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
TZNAME:EST
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
END:STANDARD
END:VTIMEZONE
BEGIN:VEVENT
DTSTAMP:20060206T001121Z
DTSTART;TZID=US/Eastern:20060102T120000
DURATION:PT1H
RRULE:FREQ=DAILY;COUNT=5
SUMMARY:Event #2
UID:00959BC664CA650E933C892C@example.com
END:VEVENT
BEGIN:VEVENT
DTSTAMP:20060206T001121Z
DTSTART;TZID=US/Eastern:20060104T140000
DURATION:PT1H
RECURRENCE-ID;TZID=US/Eastern:20060104T120000
SUMMARY:Event #2 bis
UID:00959BC664CA650E933C892C@example.com
END:VEVENT
END:VCALENDAR
""",
"""\
BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//Example Corp.//CalDAV Client//EN
BEGIN:VEVENT
DTSTAMP:20060206T001121Z
DTSTART:20060103T170000
DURATION:PT1H
RECURRENCE-ID:20060103T170000
SUMMARY:Event #2
UID:00959BC664CA650E933C892C@example.com
END:VEVENT
BEGIN:VEVENT
DTSTAMP:20060206T001121Z
DTSTART:20060104T190000
DURATION:PT1H
RECURRENCE-ID:20060104T170000
SUMMARY:Event #2 bis
UID:00959BC664CA650E933C892C@example.com
END:VEVENT
END:VCALENDAR
""",
)
xandikos_0.2.10.orig/xandikos/tests/test_carddav.py 0000644 0000000 0000000 00000003172 14377251311 017367 0 ustar 00 # Xandikos
# Copyright (C) 2022 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import asyncio
import unittest
from ..carddav import NAMESPACE, apply_filter
from ..vcard import VCardFile
from ..webdav import ET
from .test_vcard import EXAMPLE_VCARD1
class TestApplyFilter(unittest.TestCase):
async def get_file(self):
return VCardFile([EXAMPLE_VCARD1], "text/vcard")
def get_content_type(self):
return "text/vcard"
def test_apply_filter(self):
el = ET.Element("{%s}filter" % NAMESPACE)
el.set("test", "anyof")
pf = ET.SubElement(el, "{%s}prop-filter" % NAMESPACE)
pf.set("name", "FN")
tm = ET.SubElement(pf, "{%s}text-match" % NAMESPACE)
tm.set("collation", "i;unicode-casemap")
tm.set("match-type", "contains")
tm.text = "Jeffrey"
loop = asyncio.get_event_loop()
self.assertTrue(loop.run_until_complete(apply_filter(el, self)))
xandikos_0.2.10.orig/xandikos/tests/test_config.py 0000644 0000000 0000000 00000011147 14476041427 017236 0 ustar 00 # Xandikos
# Copyright (C) 2018 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Tests for xandikos.store.config."""
from io import StringIO
from unittest import TestCase
import dulwich.repo
from ..store.config import FileBasedCollectionMetadata
from ..store.git import RepoCollectionMetadata
class FileBasedCollectionMetadataTests(TestCase):
def test_get_color(self):
f = StringIO(
"""\
[DEFAULT]
color = #ffffff
"""
)
cc = FileBasedCollectionMetadata.from_file(f)
self.assertEqual("#ffffff", cc.get_color())
def test_get_color_missing(self):
f = StringIO("")
cc = FileBasedCollectionMetadata.from_file(f)
self.assertRaises(KeyError, cc.get_color)
def test_get_comment(self):
f = StringIO(
"""\
[DEFAULT]
comment = this is a comment
"""
)
cc = FileBasedCollectionMetadata.from_file(f)
self.assertEqual("this is a comment", cc.get_comment())
def test_get_comment_missing(self):
f = StringIO("")
cc = FileBasedCollectionMetadata.from_file(f)
self.assertRaises(KeyError, cc.get_comment)
def test_get_description(self):
f = StringIO(
"""\
[DEFAULT]
description = this is a description
"""
)
cc = FileBasedCollectionMetadata.from_file(f)
self.assertEqual("this is a description", cc.get_description())
def test_get_description_missing(self):
f = StringIO("")
cc = FileBasedCollectionMetadata.from_file(f)
self.assertRaises(KeyError, cc.get_description)
def test_get_displayname(self):
f = StringIO(
"""\
[DEFAULT]
displayname = DISPLAY-NAME
"""
)
cc = FileBasedCollectionMetadata.from_file(f)
self.assertEqual("DISPLAY-NAME", cc.get_displayname())
def test_get_displayname_missing(self):
f = StringIO("")
cc = FileBasedCollectionMetadata.from_file(f)
self.assertRaises(KeyError, cc.get_displayname)
class MetadataTests:
def test_color(self):
self.assertRaises(KeyError, self._config.get_color)
self._config.set_color("#ffffff")
self.assertEqual("#ffffff", self._config.get_color())
self._config.set_color(None)
self.assertRaises(KeyError, self._config.get_color)
def test_comment(self):
self.assertRaises(KeyError, self._config.get_comment)
self._config.set_comment("this is a comment")
self.assertEqual("this is a comment", self._config.get_comment())
self._config.set_comment(None)
self.assertRaises(KeyError, self._config.get_comment)
def test_displayname(self):
self.assertRaises(KeyError, self._config.get_displayname)
self._config.set_displayname("DiSpLaYName")
self.assertEqual("DiSpLaYName", self._config.get_displayname())
self._config.set_displayname(None)
self.assertRaises(KeyError, self._config.get_displayname)
def test_description(self):
self.assertRaises(KeyError, self._config.get_description)
self._config.set_description("this is a description")
self.assertEqual(
"this is a description", self._config.get_description())
self._config.set_description(None)
self.assertRaises(KeyError, self._config.get_description)
def test_order(self):
self.assertRaises(KeyError, self._config.get_order)
self._config.set_order("this is a order")
self.assertEqual("this is a order", self._config.get_order())
self._config.set_order(None)
self.assertRaises(KeyError, self._config.get_order)
class FileMetadataTests(TestCase, MetadataTests):
def setUp(self):
super().setUp()
self._config = FileBasedCollectionMetadata()
class RepoMetadataTests(TestCase, MetadataTests):
def setUp(self):
super().setUp()
self._repo = dulwich.repo.MemoryRepo()
self._config = RepoCollectionMetadata(self._repo)
xandikos_0.2.10.orig/xandikos/tests/test_icalendar.py 0000644 0000000 0000000 00000043767 14476041427 017730 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Tests for xandikos.icalendar."""
import unittest
from datetime import datetime
import pytz
from icalendar.cal import Event
from icalendar.prop import vCategory, vText
from xandikos import collation as _mod_collation
from xandikos.store import InvalidFileContents
from ..icalendar import (CalendarFilter, ICalendarFile, MissingProperty,
TextMatcher, apply_time_range_vevent, as_tz_aware_ts,
validate_calendar)
EXAMPLE_VCALENDAR1 = b"""\
BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//bitfire web engineering//DAVdroid 0.8.0 (ical4j 1.0.x)//EN
BEGIN:VTODO
CREATED:20150314T223512Z
DTSTAMP:20150527T221952Z
LAST-MODIFIED:20150314T223512Z
STATUS:NEEDS-ACTION
SUMMARY:do something
CATEGORIES:home
UID:bdc22720-b9e1-42c9-89c2-a85405d8fbff
END:VTODO
END:VCALENDAR
"""
EXAMPLE_VCALENDAR_WITH_PARAM = b"""\
BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//bitfire web engineering//DAVdroid 0.8.0 (ical4j 1.0.x)//EN
BEGIN:VTODO
CREATED;TZID=America/Denver:20150314T223512Z
DTSTAMP:20150527T221952Z
LAST-MODIFIED:20150314T223512Z
STATUS:NEEDS-ACTION
SUMMARY:do something
UID:bdc22720-b9e1-42c9-89c2-a85405d8fbff
END:VTODO
END:VCALENDAR
"""
EXAMPLE_VCALENDAR_NO_UID = b"""\
BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//bitfire web engineering//DAVdroid 0.8.0 (ical4j 1.0.x)//EN
BEGIN:VTODO
CREATED:20120314T223512Z
DTSTAMP:20130527T221952Z
LAST-MODIFIED:20150314T223512Z
STATUS:NEEDS-ACTION
SUMMARY:do something without uid
END:VTODO
END:VCALENDAR
"""
EXAMPLE_VCALENDAR_INVALID_CHAR = b"""\
BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//bitfire web engineering//DAVdroid 0.8.0 (ical4j 1.0.x)//EN
BEGIN:VTODO
CREATED:20150314T223512Z
DTSTAMP:20150527T221952Z
LAST-MODIFIED:20150314T223512Z
STATUS:NEEDS-ACTION
SUMMARY:do something
ID:bdc22720-b9e1-42c9-89c2-a85405d8fbff
END:VTODO
END:VCALENDAR
"""
class ExtractCalendarUIDTests(unittest.TestCase):
def test_extract_str(self):
fi = ICalendarFile([EXAMPLE_VCALENDAR1], "text/calendar")
self.assertEqual("bdc22720-b9e1-42c9-89c2-a85405d8fbff", fi.get_uid())
fi.validate()
def test_extract_no_uid(self):
fi = ICalendarFile([EXAMPLE_VCALENDAR_NO_UID], "text/calendar")
fi.validate()
self.assertEqual(
["Missing required field UID"],
list(validate_calendar(fi.calendar, strict=True)),
)
self.assertEqual(
[], list(validate_calendar(fi.calendar, strict=False)))
self.assertRaises(KeyError, fi.get_uid)
def test_invalid_character(self):
fi = ICalendarFile([EXAMPLE_VCALENDAR_INVALID_CHAR], "text/calendar")
self.assertRaises(InvalidFileContents, fi.validate)
self.assertEqual(
["Invalid character b'\\\\x0c' in field SUMMARY"],
list(validate_calendar(fi.calendar, strict=False)),
)
class CalendarFilterTests(unittest.TestCase):
def setUp(self):
self.cal = ICalendarFile([EXAMPLE_VCALENDAR1], "text/calendar")
def test_simple_comp_filter(self):
filter = CalendarFilter(None)
filter.filter_subcomponent("VCALENDAR").filter_subcomponent("VEVENT")
self.assertEqual(filter.index_keys(), [["C=VCALENDAR/C=VEVENT"]])
self.assertEqual(
self.cal.get_indexes(
["C=VCALENDAR/C=VEVENT", "C=VCALENDAR/C=VTODO"]),
{"C=VCALENDAR/C=VEVENT": [], "C=VCALENDAR/C=VTODO": [True]},
)
self.assertFalse(
filter.check_from_indexes(
"file",
{"C=VCALENDAR/C=VEVENT": [], "C=VCALENDAR/C=VTODO": [True]},
)
)
self.assertFalse(filter.check("file", self.cal))
filter = CalendarFilter(None)
filter.filter_subcomponent("VCALENDAR").filter_subcomponent("VTODO")
self.assertTrue(filter.check("file", self.cal))
self.assertTrue(
filter.check_from_indexes(
"file",
{"C=VCALENDAR/C=VEVENT": [], "C=VCALENDAR/C=VTODO": [True]},
)
)
def test_simple_comp_missing_filter(self):
filter = CalendarFilter(None)
filter.filter_subcomponent("VCALENDAR").filter_subcomponent(
"VTODO", is_not_defined=True
)
self.assertEqual(
filter.index_keys(), [["C=VCALENDAR/C=VTODO"], ["C=VCALENDAR"]]
)
self.assertFalse(
filter.check_from_indexes(
"file",
{
"C=VCALENDAR": [True],
"C=VCALENDAR/C=VEVENT": [],
"C=VCALENDAR/C=VTODO": [True],
},
)
)
self.assertFalse(filter.check("file", self.cal))
filter = CalendarFilter(None)
filter.filter_subcomponent("VCALENDAR").filter_subcomponent(
"VEVENT", is_not_defined=True
)
self.assertTrue(filter.check("file", self.cal))
self.assertTrue(
filter.check_from_indexes(
"file",
{
"C=VCALENDAR": [True],
"C=VCALENDAR/C=VEVENT": [],
"C=VCALENDAR/C=VTODO": [True],
},
)
)
def test_prop_presence_filter(self):
filter = CalendarFilter(None)
filter.filter_subcomponent("VCALENDAR").filter_subcomponent(
"VTODO"
).filter_property("X-SUMMARY")
self.assertEqual(
filter.index_keys(), [["C=VCALENDAR/C=VTODO/P=X-SUMMARY"]])
self.assertFalse(filter.check_from_indexes(
"file", {"C=VCALENDAR/C=VTODO/P=X-SUMMARY": []})
)
self.assertFalse(filter.check("file", self.cal))
filter = CalendarFilter(None)
filter.filter_subcomponent("VCALENDAR").filter_subcomponent(
"VTODO"
).filter_property("SUMMARY")
self.assertTrue(
filter.check_from_indexes(
"file", {"C=VCALENDAR/C=VTODO/P=SUMMARY": [b"do something"]}
)
)
self.assertTrue(filter.check("file", self.cal))
def test_prop_explicitly_missing_filter(self):
filter = CalendarFilter(None)
filter.filter_subcomponent("VCALENDAR").filter_subcomponent(
"VEVENT"
).filter_property("X-SUMMARY", is_not_defined=True)
self.assertEqual(
filter.index_keys(),
[["C=VCALENDAR/C=VEVENT/P=X-SUMMARY"], ["C=VCALENDAR/C=VEVENT"]],
)
self.assertFalse(
filter.check_from_indexes(
"file",
{
"C=VCALENDAR/C=VEVENT/P=X-SUMMARY": [],
"C=VCALENDAR/C=VEVENT": [],
},
)
)
self.assertFalse(filter.check("file", self.cal))
filter = CalendarFilter(None)
filter.filter_subcomponent("VCALENDAR").filter_subcomponent(
"VTODO"
).filter_property("X-SUMMARY", is_not_defined=True)
self.assertTrue(
filter.check_from_indexes(
"file",
{
"C=VCALENDAR/C=VTODO/P=X-SUMMARY": [],
"C=VCALENDAR/C=VTODO": [True],
},
)
)
self.assertTrue(filter.check("file", self.cal))
def test_prop_text_match(self):
filter = CalendarFilter(None)
f = filter.filter_subcomponent("VCALENDAR")
f = f.filter_subcomponent("VTODO")
f = f.filter_property("SUMMARY")
f.filter_text_match("do something different")
self.assertEqual(
filter.index_keys(), [["C=VCALENDAR/C=VTODO/P=SUMMARY"]])
self.assertFalse(
filter.check_from_indexes(
"file", {"C=VCALENDAR/C=VTODO/P=SUMMARY": [b"do something"]}
)
)
self.assertFalse(filter.check("file", self.cal))
filter = CalendarFilter(None)
filter.filter_subcomponent("VCALENDAR").filter_subcomponent(
"VTODO"
).filter_property("SUMMARY").filter_text_match("do something")
self.assertTrue(
filter.check_from_indexes(
"file", {"C=VCALENDAR/C=VTODO/P=SUMMARY": [b"do something"]}
)
)
self.assertTrue(filter.check("file", self.cal))
def test_prop_text_match_category(self):
filter = CalendarFilter(None)
f = filter.filter_subcomponent("VCALENDAR")
f = f.filter_subcomponent("VTODO")
f = f.filter_property("CATEGORIES")
f.filter_text_match("work")
self.assertEqual(
self.cal.get_indexes(["C=VCALENDAR/C=VTODO/P=CATEGORIES"]),
{"C=VCALENDAR/C=VTODO/P=CATEGORIES": [b'home']},
)
self.assertEqual(
filter.index_keys(), [["C=VCALENDAR/C=VTODO/P=CATEGORIES"]])
self.assertFalse(
filter.check_from_indexes(
"file", {"C=VCALENDAR/C=VTODO/P=CATEGORIES": [b"home"]}
)
)
self.assertFalse(filter.check("file", self.cal))
filter = CalendarFilter(None)
filter.filter_subcomponent("VCALENDAR").filter_subcomponent(
"VTODO"
).filter_property("CATEGORIES").filter_text_match("home")
self.assertTrue(
filter.check_from_indexes(
"file", {"C=VCALENDAR/C=VTODO/P=CATEGORIES": [b"home"]}
)
)
self.assertTrue(filter.check("file", self.cal))
def test_param_text_match(self):
self.cal = ICalendarFile(
[EXAMPLE_VCALENDAR_WITH_PARAM], "text/calendar")
filter = CalendarFilter(None)
f = filter.filter_subcomponent("VCALENDAR")
f = f.filter_subcomponent("VTODO")
f = f.filter_property("CREATED")
f = f.filter_parameter("TZID")
f.filter_text_match("America/Blah")
self.assertEqual(
filter.index_keys(),
[
["C=VCALENDAR/C=VTODO/P=CREATED/A=TZID"],
["C=VCALENDAR/C=VTODO/P=CREATED"],
],
)
self.assertFalse(
filter.check_from_indexes(
"file",
{"C=VCALENDAR/C=VTODO/P=CREATED/A=TZID": [b"America/Denver"]},
)
)
self.assertFalse(filter.check("file", self.cal))
filter = CalendarFilter(None)
f = filter.filter_subcomponent("VCALENDAR")
f = f.filter_subcomponent("VTODO")
f = f.filter_property("CREATED")
f = f.filter_parameter("TZID")
f.filter_text_match("America/Denver")
self.assertTrue(
filter.check_from_indexes(
"file",
{"C=VCALENDAR/C=VTODO/P=CREATED/A=TZID": [b"America/Denver"]},
)
)
self.assertTrue(filter.check("file", self.cal))
def _tzify(self, dt):
return as_tz_aware_ts(dt, pytz.utc)
def test_prop_apply_time_range(self):
filter = CalendarFilter(pytz.utc)
filter.filter_subcomponent("VCALENDAR").filter_subcomponent(
"VTODO"
).filter_property("CREATED").filter_time_range(
self._tzify(datetime(2019, 3, 10, 22, 35, 12)),
self._tzify(datetime(2019, 3, 18, 22, 35, 12)),
)
self.assertEqual(
filter.index_keys(), [["C=VCALENDAR/C=VTODO/P=CREATED"]])
self.assertFalse(
filter.check_from_indexes(
"file",
{"C=VCALENDAR/C=VTODO/P=CREATED": [b"20150314T223512Z"]}
)
)
self.assertFalse(
filter.check_from_indexes(
"file",
{"C=VCALENDAR/C=VTODO/P=CREATED": [b"20150314"]}
)
)
self.assertFalse(filter.check("file", self.cal))
filter = CalendarFilter(self._tzify)
filter.filter_subcomponent("VCALENDAR").filter_subcomponent(
"VTODO"
).filter_property("CREATED").filter_time_range(
self._tzify(datetime(2015, 3, 10, 22, 35, 12)),
self._tzify(datetime(2015, 3, 18, 22, 35, 12)),
)
self.assertTrue(
filter.check_from_indexes(
"file",
{"C=VCALENDAR/C=VTODO/P=CREATED": [b"20150314T223512Z"]})
)
self.assertTrue(filter.check("file", self.cal))
def test_comp_apply_time_range(self):
self.assertEqual(
self.cal.get_indexes(["C=VCALENDAR/C=VTODO/P=CREATED"]),
{'C=VCALENDAR/C=VTODO/P=CREATED': [b'20150314T223512Z']})
filter = CalendarFilter(pytz.utc)
filter.filter_subcomponent("VCALENDAR").filter_subcomponent(
"VTODO"
).filter_time_range(
self._tzify(datetime(2015, 3, 3, 22, 35, 12)),
self._tzify(datetime(2015, 3, 10, 22, 35, 12)),
)
self.assertEqual(
filter.index_keys(),
[
["C=VCALENDAR/C=VTODO/P=DTSTART"],
["C=VCALENDAR/C=VTODO/P=DUE"],
["C=VCALENDAR/C=VTODO/P=DURATION"],
["C=VCALENDAR/C=VTODO/P=CREATED"],
["C=VCALENDAR/C=VTODO/P=COMPLETED"],
["C=VCALENDAR/C=VTODO"],
],
)
self.assertFalse(
filter.check_from_indexes(
"file",
{
"C=VCALENDAR/C=VTODO/P=CREATED": [b"20150314T223512Z"],
"C=VCALENDAR/C=VTODO": [True],
"C=VCALENDAR/C=VTODO/P=DUE": [],
"C=VCALENDAR/C=VTODO/P=DURATION": [],
"C=VCALENDAR/C=VTODO/P=COMPLETED": [],
"C=VCALENDAR/C=VTODO/P=DTSTART": [],
},
)
)
self.assertFalse(
filter.check_from_indexes(
"file",
{
"C=VCALENDAR/C=VTODO/P=CREATED": [b"20150314"],
"C=VCALENDAR/C=VTODO": [True],
"C=VCALENDAR/C=VTODO/P=DUE": [],
"C=VCALENDAR/C=VTODO/P=DURATION": [],
"C=VCALENDAR/C=VTODO/P=COMPLETED": [],
"C=VCALENDAR/C=VTODO/P=DTSTART": [],
},
)
)
self.assertFalse(filter.check("file", self.cal))
filter = CalendarFilter(pytz.utc)
filter.filter_subcomponent("VCALENDAR").filter_subcomponent(
"VTODO"
).filter_time_range(
self._tzify(datetime(2015, 3, 10, 22, 35, 12)),
self._tzify(datetime(2015, 3, 18, 22, 35, 12)),
)
self.assertTrue(
filter.check_from_indexes(
"file",
{
"C=VCALENDAR/C=VTODO/P=CREATED": [b"20150314T223512Z"],
"C=VCALENDAR/C=VTODO": [True],
"C=VCALENDAR/C=VTODO/P=DUE": [],
"C=VCALENDAR/C=VTODO/P=DURATION": [],
"C=VCALENDAR/C=VTODO/P=COMPLETED": [],
"C=VCALENDAR/C=VTODO/P=DTSTART": [],
},
)
)
self.assertTrue(filter.check("file", self.cal))
class TextMatchTest(unittest.TestCase):
def test_default_collation(self):
tm = TextMatcher("summary", "foobar")
self.assertTrue(tm.match(vText("FOOBAR")))
self.assertTrue(tm.match(vText("foobar")))
self.assertFalse(tm.match(vText("fobar")))
self.assertTrue(tm.match_indexes({None: [b'foobar']}))
self.assertTrue(tm.match_indexes({None: [b'FOOBAR']}))
self.assertFalse(tm.match_indexes({None: [b'fobar']}))
def test_casecmp_collation(self):
tm = TextMatcher("summary", "foobar", collation="i;ascii-casemap")
self.assertTrue(tm.match(vText("FOOBAR")))
self.assertTrue(tm.match(vText("foobar")))
self.assertFalse(tm.match(vText("fobar")))
self.assertTrue(tm.match_indexes({None: [b'foobar']}))
self.assertTrue(tm.match_indexes({None: [b'FOOBAR']}))
self.assertFalse(tm.match_indexes({None: [b'fobar']}))
def test_cmp_collation(self):
tm = TextMatcher("summary", "foobar", collation="i;octet")
self.assertFalse(tm.match(vText("FOOBAR")))
self.assertTrue(tm.match(vText("foobar")))
self.assertFalse(tm.match(vText("fobar")))
self.assertFalse(tm.match_indexes({None: [b'FOOBAR']}))
self.assertTrue(tm.match_indexes({None: [b'foobar']}))
self.assertFalse(tm.match_indexes({None: [b'fobar']}))
def test_category(self):
tm = TextMatcher("categories", "foobar")
self.assertTrue(tm.match(vCategory(["FOOBAR", "blah"])))
self.assertTrue(tm.match(vCategory(["foobar"])))
self.assertFalse(tm.match(vCategory(["fobar"])))
self.assertTrue(tm.match_indexes({None: [b'foobar,blah']}))
self.assertFalse(tm.match_indexes({None: [b'foobarblah']}))
def test_unknown_type(self):
tm = TextMatcher("dontknow", "foobar")
self.assertFalse(tm.match(object()))
self.assertFalse(tm.match_indexes({None: [b'foobarblah']}))
def test_unknown_collation(self):
self.assertRaises(
_mod_collation.UnknownCollation,
TextMatcher,
"summary",
"foobar",
collation="i;blah",
)
class ApplyTimeRangeVeventTests(unittest.TestCase):
def _tzify(self, dt):
return as_tz_aware_ts(dt, "UTC")
def test_missing_dtstart(self):
ev = Event()
self.assertRaises(
MissingProperty,
apply_time_range_vevent,
datetime.utcnow(),
datetime.utcnow(),
ev,
self._tzify,
)
xandikos_0.2.10.orig/xandikos/tests/test_store.py 0000644 0000000 0000000 00000037656 14476041427 017142 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import logging
import os
import shutil
import stat
import tempfile
import unittest
from dulwich.objects import Blob, Commit, Tree
from dulwich.repo import Repo
from xandikos.store import (DuplicateUidError, File, Filter, InvalidETag,
NoSuchItem, Store)
from ..icalendar import ICalendarFile
from ..store.git import BareGitStore, GitStore, TreeGitStore
from ..store.vdir import VdirStore
EXAMPLE_VCALENDAR1 = b"""\
BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//bitfire web engineering//DAVdroid 0.8.0 (ical4j 1.0.x)//EN
BEGIN:VTODO
CREATED:20150314T223512Z
DTSTAMP:20150527T221952Z
LAST-MODIFIED:20150314T223512Z
STATUS:NEEDS-ACTION
SUMMARY:do something
UID:bdc22720-b9e1-42c9-89c2-a85405d8fbff
END:VTODO
END:VCALENDAR
"""
EXAMPLE_VCALENDAR1_NORMALIZED = b"""\
BEGIN:VCALENDAR\r
VERSION:2.0\r
PRODID:-//bitfire web engineering//DAVdroid 0.8.0 (ical4j 1.0.x)//EN\r
BEGIN:VTODO\r
CREATED:20150314T223512Z\r
DTSTAMP:20150527T221952Z\r
LAST-MODIFIED:20150314T223512Z\r
STATUS:NEEDS-ACTION\r
SUMMARY:do something\r
UID:bdc22720-b9e1-42c9-89c2-a85405d8fbff\r
END:VTODO\r
END:VCALENDAR\r
"""
EXAMPLE_VCALENDAR2 = b"""\
BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//bitfire web engineering//DAVdroid 0.8.0 (ical4j 1.0.x)//EN
BEGIN:VTODO
CREATED:20120314T223512Z
DTSTAMP:20130527T221952Z
LAST-MODIFIED:20150314T223512Z
STATUS:NEEDS-ACTION
SUMMARY:do something else
UID:bdc22764-b9e1-42c9-89c2-a85405d8fbff
END:VTODO
END:VCALENDAR
"""
EXAMPLE_VCALENDAR2_NORMALIZED = b"""\
BEGIN:VCALENDAR\r
VERSION:2.0\r
PRODID:-//bitfire web engineering//DAVdroid 0.8.0 (ical4j 1.0.x)//EN\r
BEGIN:VTODO\r
CREATED:20120314T223512Z\r
DTSTAMP:20130527T221952Z\r
LAST-MODIFIED:20150314T223512Z\r
STATUS:NEEDS-ACTION\r
SUMMARY:do something else\r
UID:bdc22764-b9e1-42c9-89c2-a85405d8fbff\r
END:VTODO\r
END:VCALENDAR\r
"""
EXAMPLE_VCALENDAR_NO_UID = b"""\
BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//bitfire web engineering//DAVdroid 0.8.0 (ical4j 1.0.x)//EN
BEGIN:VTODO
CREATED:20120314T223512Z
DTSTAMP:20130527T221952Z
LAST-MODIFIED:20150314T223512Z
STATUS:NEEDS-ACTION
SUMMARY:do something without uid
END:VTODO
END:VCALENDAR
"""
class BaseStoreTest:
def test_import_one(self):
gc = self.create_store()
(name, etag) = gc.import_one(
"foo.ics", "text/calendar", [EXAMPLE_VCALENDAR1])
self.assertIsInstance(etag, str)
self.assertEqual(
[("foo.ics", "text/calendar", etag)], list(gc.iter_with_etag())
)
def test_with_filter(self):
gc = self.create_store()
(name1, etag1) = gc.import_one(
"foo.ics", "text/calendar", [EXAMPLE_VCALENDAR1])
(name2, etag2) = gc.import_one(
"bar.ics", "text/calendar", [EXAMPLE_VCALENDAR2])
class DummyFilter(Filter):
content_type = "text/calendar"
def __init__(self, text) -> None:
self.text = text
def check(self, name, resource):
return self.text in b"".join(resource.content)
self.assertEqual(
2,
len(list(gc.iter_with_filter(filter=DummyFilter(b"do something"))))
)
[(ret_name, ret_file, ret_etag)] = list(
gc.iter_with_filter(filter=DummyFilter(b"do something else"))
)
self.assertEqual(ret_name, name2)
self.assertEqual(ret_etag, etag2)
self.assertEqual(ret_file.content_type, "text/calendar")
self.assertEqual(
b"".join(ret_file.content),
EXAMPLE_VCALENDAR2.replace(b"\n", b"\r\n"),
)
def test_get_by_index(self):
gc = self.create_store()
(name1, etag1) = gc.import_one(
"foo.ics", "text/calendar", [EXAMPLE_VCALENDAR1])
(name2, etag2) = gc.import_one(
"bar.ics", "text/calendar", [EXAMPLE_VCALENDAR2])
(name3, etag3) = gc.import_one(
"bar.txt", "text/plain", [b"Not a calendar file."]
)
self.assertEqual({}, dict(gc.index_manager.desired))
filtertext = "C=VCALENDAR/C=VTODO/P=SUMMARY"
class DummyFilter(Filter):
content_type = "text/calendar"
def __init__(self, text) -> None:
self.text = text
def index_keys(self):
return [[filtertext]]
def check_from_indexes(self, name, index_values):
return any(self.text in v for v in index_values[filtertext])
def check(self, name, resource):
return self.text in b"".join(resource.content)
self.assertEqual(
2,
len(list(gc.iter_with_filter(filter=DummyFilter(b"do something"))))
)
[(ret_name, ret_file, ret_etag)] = list(
gc.iter_with_filter(filter=DummyFilter(b"do something else"))
)
self.assertEqual({filtertext: 2}, dict(gc.index_manager.desired))
# Force index
gc.index.reset([filtertext])
[(ret_name, ret_file, ret_etag)] = list(
gc.iter_with_filter(filter=DummyFilter(b"do something else"))
)
self.assertEqual({filtertext: 2}, dict(gc.index_manager.desired))
self.assertEqual(ret_name, name2)
self.assertEqual(ret_etag, etag2)
self.assertEqual(ret_file.content_type, "text/calendar")
self.assertEqual(
b"".join(ret_file.content),
EXAMPLE_VCALENDAR2.replace(b"\n", b"\r\n"),
)
def test_import_one_duplicate_uid(self):
gc = self.create_store()
(name, etag) = gc.import_one(
"foo.ics", "text/calendar", [EXAMPLE_VCALENDAR1])
self.assertRaises(
DuplicateUidError,
gc.import_one,
"bar.ics",
"text/calendar",
[EXAMPLE_VCALENDAR1],
)
def test_import_one_duplicate_name(self):
gc = self.create_store()
(name, etag) = gc.import_one(
"foo.ics", "text/calendar", [EXAMPLE_VCALENDAR1])
(name, etag) = gc.import_one(
"foo.ics", "text/calendar", [EXAMPLE_VCALENDAR2], replace_etag=etag
)
(name, etag) = gc.import_one(
"foo.ics", "text/calendar", [EXAMPLE_VCALENDAR1])
self.assertRaises(
InvalidETag,
gc.import_one,
"foo.ics",
"text/calendar",
[EXAMPLE_VCALENDAR2],
replace_etag="invalidetag",
)
def test_get_raw(self):
gc = self.create_store()
(name1, etag1) = gc.import_one(
"foo.ics", "text/calendar", [EXAMPLE_VCALENDAR1])
(name2, etag2) = gc.import_one(
"bar.ics", "text/calendar", [EXAMPLE_VCALENDAR2])
self.assertEqual(
EXAMPLE_VCALENDAR1_NORMALIZED,
b"".join(gc._get_raw("foo.ics", etag1)),
)
self.assertEqual(
EXAMPLE_VCALENDAR2_NORMALIZED,
b"".join(gc._get_raw("bar.ics", etag2)),
)
self.assertRaises(KeyError, gc._get_raw, "missing.ics", "01" * 20)
def test_get_file(self):
gc = self.create_store()
(name1, etag1) = gc.import_one(
"foo.ics", "text/calendar", [EXAMPLE_VCALENDAR1])
(name1, etag2) = gc.import_one(
"bar.ics", "text/calendar", [EXAMPLE_VCALENDAR2])
f1 = gc.get_file("foo.ics", "text/calendar", etag1)
self.assertEqual(EXAMPLE_VCALENDAR1_NORMALIZED, b"".join(f1.content))
self.assertEqual("text/calendar", f1.content_type)
f2 = gc.get_file("bar.ics", "text/calendar", etag2)
self.assertEqual(EXAMPLE_VCALENDAR2_NORMALIZED, b"".join(f2.content))
self.assertEqual("text/calendar", f2.content_type)
self.assertRaises(KeyError, gc._get_raw, "missing.ics", "01" * 20)
def test_delete_one(self):
gc = self.create_store()
self.assertEqual([], list(gc.iter_with_etag()))
(name1, etag1) = gc.import_one(
"foo.ics", "text/calendar", [EXAMPLE_VCALENDAR1])
self.assertEqual(
[("foo.ics", "text/calendar", etag1)], list(gc.iter_with_etag())
)
gc.delete_one("foo.ics")
self.assertEqual([], list(gc.iter_with_etag()))
def test_delete_one_with_etag(self):
gc = self.create_store()
self.assertEqual([], list(gc.iter_with_etag()))
(name1, etag1) = gc.import_one(
"foo.ics", "text/calendar", [EXAMPLE_VCALENDAR1])
self.assertEqual(
[("foo.ics", "text/calendar", etag1)], list(gc.iter_with_etag())
)
gc.delete_one("foo.ics", etag=etag1)
self.assertEqual([], list(gc.iter_with_etag()))
def test_delete_one_nonexistant(self):
gc = self.create_store()
self.assertRaises(NoSuchItem, gc.delete_one, "foo.ics")
def test_delete_one_invalid_etag(self):
gc = self.create_store()
self.assertEqual([], list(gc.iter_with_etag()))
(name1, etag1) = gc.import_one(
"foo.ics", "text/calendar", [EXAMPLE_VCALENDAR1])
(name2, etag2) = gc.import_one(
"bar.ics", "text/calendar", [EXAMPLE_VCALENDAR2])
self.assertEqual(
{
("foo.ics", "text/calendar", etag1),
("bar.ics", "text/calendar", etag2),
},
set(gc.iter_with_etag()),
)
self.assertRaises(InvalidETag, gc.delete_one, "foo.ics", etag=etag2)
self.assertEqual(
{
("foo.ics", "text/calendar", etag1),
("bar.ics", "text/calendar", etag2),
},
set(gc.iter_with_etag()),
)
class VdirStoreTest(BaseStoreTest, unittest.TestCase):
kls = VdirStore
def create_store(self):
d = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, d)
store = self.kls.create(os.path.join(d, "store"))
store.load_extra_file_handler(ICalendarFile)
return store
class BaseGitStoreTest(BaseStoreTest):
kls: type[Store]
def create_store(self):
raise NotImplementedError(self.create_store)
def add_blob(self, gc, name, contents):
raise NotImplementedError(self.add_blob)
def test_create(self):
d = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, d)
gc = self.kls.create(os.path.join(d, "store"))
self.assertIsInstance(gc, GitStore)
self.assertEqual(gc.repo.path, os.path.join(d, "store"))
def test_iter_with_etag_missing_uid(self):
logging.getLogger("").setLevel(logging.ERROR)
gc = self.create_store()
bid = self.add_blob(gc, "foo.ics", EXAMPLE_VCALENDAR_NO_UID)
self.assertEqual(
[("foo.ics", "text/calendar", bid)], list(gc.iter_with_etag()))
gc._scan_uids()
logging.getLogger("").setLevel(logging.NOTSET)
def test_iter_with_etag(self):
gc = self.create_store()
bid = self.add_blob(gc, "foo.ics", EXAMPLE_VCALENDAR1)
self.assertEqual(
[("foo.ics", "text/calendar", bid)], list(gc.iter_with_etag()))
def test_get_description_from_git_config(self):
gc = self.create_store()
config = gc.repo.get_config()
config.set(b"xandikos", b"test", b"test")
if getattr(config, "path", None):
config.write_to_path()
gc.repo.set_description(b"a repo description")
self.assertEqual(gc.get_description(), "a repo description")
def test_displayname(self):
gc = self.create_store()
self.assertIs(None, gc.get_color())
c = gc.repo.get_config()
c.set(b"xandikos", b"displayname", b"a name")
if getattr(c, "path", None):
c.write_to_path()
self.assertEqual("a name", gc.get_displayname())
def test_get_color(self):
gc = self.create_store()
self.assertIs(None, gc.get_color())
c = gc.repo.get_config()
c.set(b"xandikos", b"color", b"334433")
if getattr(c, "path", None):
c.write_to_path()
self.assertEqual("334433", gc.get_color())
def test_get_source_url(self):
gc = self.create_store()
self.assertIs(None, gc.get_source_url())
c = gc.repo.get_config()
c.set(b"xandikos", b"source", b"www.google.com")
if getattr(c, "path", None):
c.write_to_path()
self.assertEqual("www.google.com", gc.get_source_url())
def test_default_no_subdirectories(self):
gc = self.create_store()
self.assertEqual([], gc.subdirectories())
def test_import_only_once(self):
gc = self.create_store()
(name1, etag1) = gc.import_one(
"foo.ics", "text/calendar", [EXAMPLE_VCALENDAR1])
(name2, etag2) = gc.import_one(
"foo.ics", "text/calendar", [EXAMPLE_VCALENDAR1])
self.assertEqual(name1, name2)
self.assertEqual(etag1, etag2)
walker = gc.repo.get_walker(include=[gc.repo.refs[gc.ref]])
self.assertEqual(1, len([w.commit for w in walker]))
class GitStoreTest(unittest.TestCase):
def test_open_from_path_bare(self):
d = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, d)
Repo.init_bare(d)
gc = GitStore.open_from_path(d)
self.assertIsInstance(gc, BareGitStore)
self.assertEqual(gc.repo.path, d)
def test_open_from_path_tree(self):
d = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, d)
Repo.init(d)
gc = GitStore.open_from_path(d)
self.assertIsInstance(gc, TreeGitStore)
self.assertEqual(gc.repo.path, d)
class BareGitStoreTest(BaseGitStoreTest, unittest.TestCase):
kls = BareGitStore
def create_store(self):
store = BareGitStore.create_memory()
store.load_extra_file_handler(ICalendarFile)
return store
def test_create_memory(self):
gc = BareGitStore.create_memory()
self.assertIsInstance(gc, GitStore)
def add_blob(self, gc, name, contents):
b = Blob.from_string(contents)
t = Tree()
t.add(name.encode("utf-8"), 0o644 | stat.S_IFREG, b.id)
c = Commit()
c.tree = t.id
c.committer = c.author = b"Somebody "
c.commit_time = c.author_time = 800000
c.commit_timezone = c.author_timezone = 0
c.message = b"do something"
gc.repo.object_store.add_objects([(b, None), (t, None), (c, None)])
gc.repo[gc.ref] = c.id
return b.id.decode("ascii")
def test_get_ctag(self):
gc = self.create_store()
self.assertEqual(Tree().id.decode("ascii"), gc.get_ctag())
self.add_blob(gc, "foo.ics", EXAMPLE_VCALENDAR1)
self.assertEqual(
gc._get_current_tree().id.decode("ascii"), gc.get_ctag())
class TreeGitStoreTest(BaseGitStoreTest, unittest.TestCase):
kls = TreeGitStore
def create_store(self):
d = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, d)
store = self.kls.create(os.path.join(d, "store"))
store.load_extra_file_handler(ICalendarFile)
return store
def add_blob(self, gc, name, contents):
with open(os.path.join(gc.repo.path, name), "wb") as f:
f.write(contents)
gc.repo.stage(name.encode("utf-8"))
return Blob.from_string(contents).id.decode("ascii")
class ExtractRegularUIDTests(unittest.TestCase):
def test_extract_no_uid(self):
fi = File([EXAMPLE_VCALENDAR_NO_UID], "text/bla")
self.assertRaises(NotImplementedError, fi.get_uid)
xandikos_0.2.10.orig/xandikos/tests/test_vcard.py 0000644 0000000 0000000 00000002344 14420024062 017050 0 ustar 00 # Xandikos
# Copyright (C) 2022 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Tests for xandikos.vcard."""
import unittest
from ..vcard import VCardFile
EXAMPLE_VCARD1 = b"""\
BEGIN:VCARD
VERSION:3.0
EMAIL;TYPE=INTERNET:jeffrey@osafoundation.org
EMAIL;TYPE=INTERNET:jeffery@example.org
ORG:Open Source Applications Foundation
FN:Jeffrey Harris
N:Harris;Jeffrey;;;
END:VCARD
"""
class ParseVcardTests(unittest.TestCase):
def test_validate(self):
fi = VCardFile([EXAMPLE_VCARD1], "text/vcard")
fi.validate()
xandikos_0.2.10.orig/xandikos/tests/test_web.py 0000644 0000000 0000000 00000013071 14476041427 016544 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""Tests for xandikos.web."""
import os
import shutil
import tempfile
import unittest
from .. import caldav
from ..icalendar import ICalendarFile
from ..store.vdir import VdirStore
from ..web import CalendarCollection, XandikosBackend
EXAMPLE_VCALENDAR1 = b"""\
BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//bitfire web engineering//DAVdroid 0.8.0 (ical4j 1.0.x)//EN
BEGIN:VTODO
CREATED:20150314T223512Z
DTSTAMP:20150527T221952Z
LAST-MODIFIED:20150314T223512Z
STATUS:NEEDS-ACTION
SUMMARY:do something
UID:bdc22720-b9e1-42c9-89c2-a85405d8fbff
END:VTODO
END:VCALENDAR
"""
class CalendarCollectionTests(unittest.TestCase):
def setUp(self):
super().setUp()
self.tempdir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, self.tempdir)
self.store = VdirStore.create(os.path.join(self.tempdir, "c"))
self.store.load_extra_file_handler(ICalendarFile)
self.backend = XandikosBackend(self.tempdir)
self.cal = CalendarCollection(self.backend, "c", self.store)
def test_description(self):
self.store.set_description("foo")
self.assertEqual("foo", self.cal.get_calendar_description())
def test_color(self):
self.assertRaises(KeyError, self.cal.get_calendar_color)
self.cal.set_calendar_color("#aabbcc")
self.assertEqual("#aabbcc", self.cal.get_calendar_color())
def test_get_supported_calendar_components(self):
self.assertEqual(
["VEVENT", "VTODO", "VJOURNAL", "VFREEBUSY"],
self.cal.get_supported_calendar_components(),
)
def test_calendar_query_vtodos(self):
def create_fn(cls):
f = cls(None)
f.filter_subcomponent("VCALENDAR").filter_subcomponent("VTODO")
return f
self.assertEqual([], list(self.cal.calendar_query(create_fn)))
self.store.import_one("foo.ics", "text/calendar", [EXAMPLE_VCALENDAR1])
result = list(self.cal.calendar_query(create_fn))
self.assertEqual(1, len(result))
self.assertEqual("foo.ics", result[0][0])
self.assertIs(self.store, result[0][1].store)
self.assertEqual("foo.ics", result[0][1].name)
self.assertEqual("text/calendar", result[0][1].content_type)
def test_calendar_query_vtodo_by_uid(self):
def create_fn(cls):
f = cls(None)
f.filter_subcomponent("VCALENDAR").filter_subcomponent(
"VTODO"
).filter_property("UID").filter_text_match(
"bdc22720-b9e1-42c9-89c2-a85405d8fbff"
)
return f
self.assertEqual([], list(self.cal.calendar_query(create_fn)))
self.store.import_one("foo.ics", "text/calendar", [EXAMPLE_VCALENDAR1])
result = list(self.cal.calendar_query(create_fn))
self.assertEqual(1, len(result))
self.assertEqual("foo.ics", result[0][0])
self.assertIs(self.store, result[0][1].store)
self.assertEqual("foo.ics", result[0][1].name)
self.assertEqual("text/calendar", result[0][1].content_type)
def test_get_supported_calendar_data_types(self):
self.assertEqual(
[("text/calendar", "1.0"), ("text/calendar", "2.0")],
self.cal.get_supported_calendar_data_types(),
)
def test_get_max_date_time(self):
self.assertEqual("99991231T235959Z", self.cal.get_max_date_time())
def test_get_min_date_time(self):
self.assertEqual("00010101T000000Z", self.cal.get_min_date_time())
def test_members(self):
self.assertEqual([], list(self.cal.members()))
self.store.import_one("foo.ics", "text/calendar", [EXAMPLE_VCALENDAR1])
result = list(self.cal.members())
self.assertEqual(1, len(result))
self.assertEqual("foo.ics", result[0][0])
self.assertIs(self.store, result[0][1].store)
self.assertEqual("foo.ics", result[0][1].name)
self.assertEqual("text/calendar", result[0][1].content_type)
def test_get_member(self):
self.assertRaises(KeyError, self.cal.get_member, "foo.ics")
self.store.import_one("foo.ics", "text/calendar", [EXAMPLE_VCALENDAR1])
result = self.cal.get_member("foo.ics")
self.assertIs(self.store, result.store)
self.assertEqual("foo.ics", result.name)
self.assertEqual("text/calendar", result.content_type)
def test_delete_member(self):
self.assertRaises(KeyError, self.cal.get_member, "foo.ics")
self.store.import_one("foo.ics", "text/calendar", [EXAMPLE_VCALENDAR1])
self.cal.get_member("foo.ics")
self.cal.delete_member("foo.ics")
self.assertRaises(KeyError, self.cal.get_member, "foo.ics")
def test_get_schedule_calendar_transparency(self):
self.assertEqual(
caldav.TRANSPARENCY_OPAQUE,
self.cal.get_schedule_calendar_transparency(),
)
xandikos_0.2.10.orig/xandikos/tests/test_webdav.py 0000644 0000000 0000000 00000042274 14476041427 017246 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import logging
import unittest
from io import BytesIO
from wsgiref.util import setup_testing_defaults
from xandikos import webdav
from ..webdav import ET, Collection, Property, Resource, WebDAVApp
class WebTestCase(unittest.TestCase):
def setUp(self):
super().setUp()
logging.disable(logging.WARNING)
self.addCleanup(logging.disable, logging.NOTSET)
def makeApp(self, resources, properties):
class Backend:
get_resource = resources.get
app = WebDAVApp(Backend())
app.register_properties(properties)
return app
class WebTests(WebTestCase):
def _method(self, app, method, path):
environ = {"PATH_INFO": path, "REQUEST_METHOD": method}
setup_testing_defaults(environ)
_code = []
_headers = []
def start_response(code, headers):
_code.append(code)
_headers.extend(headers)
contents = b"".join(app(environ, start_response))
return _code[0], _headers, contents
def lock(self, app, path):
return self._method(app, "LOCK", path)
def mkcol(self, app, path):
environ = {
"PATH_INFO": path,
"REQUEST_METHOD": "MKCOL",
}
setup_testing_defaults(environ)
_code = []
_headers = []
def start_response(code, headers):
_code.append(code)
_headers.extend(headers)
contents = b"".join(app(environ, start_response))
return _code[0], _headers, contents
def delete(self, app, path):
environ = {"PATH_INFO": path, "REQUEST_METHOD": "DELETE"}
setup_testing_defaults(environ)
_code = []
_headers = []
def start_response(code, headers):
_code.append(code)
_headers.extend(headers)
contents = b"".join(app(environ, start_response))
return _code[0], _headers, contents
def get(self, app, path):
environ = {"PATH_INFO": path, "REQUEST_METHOD": "GET"}
setup_testing_defaults(environ)
_code = []
_headers = []
def start_response(code, headers):
_code.append(code)
_headers.extend(headers)
contents = b"".join(app(environ, start_response))
return _code[0], _headers, contents
def put(self, app, path, contents):
environ = {
"PATH_INFO": path,
"REQUEST_METHOD": "PUT",
"wsgi.input": BytesIO(contents),
}
setup_testing_defaults(environ)
_code = []
_headers = []
def start_response(code, headers):
_code.append(code)
_headers.extend(headers)
list(app(environ, start_response))
return _code[0], _headers
def propfind(self, app, path, body):
environ = {
"PATH_INFO": path,
"REQUEST_METHOD": "PROPFIND",
"CONTENT_TYPE": "text/xml",
"wsgi.input": BytesIO(body),
}
setup_testing_defaults(environ)
_code = []
_headers = []
def start_response(code, headers):
_code.append(code)
_headers.extend(headers)
contents = b"".join(app(environ, start_response))
return _code[0], _headers, contents
def test_not_found(self):
app = self.makeApp({}, [])
code, headers, contents = self.get(app, "/.well-known/carddav")
self.assertEqual("404 Not Found", code)
def test_get_body(self):
class TestResource(Resource):
async def get_body(self):
return [b"this is content"]
def get_last_modified(self):
raise KeyError
def get_content_language(self):
raise KeyError
async def get_etag(self):
return "myetag"
def get_content_type(self):
return "text/plain"
app = self.makeApp({"/.well-known/carddav": TestResource()}, [])
code, headers, contents = self.get(app, "/.well-known/carddav")
self.assertEqual("200 OK", code)
self.assertEqual(b"this is content", contents)
def test_set_body(self):
new_body = []
class TestResource(Resource):
async def set_body(self, body, replace_etag=None):
new_body.extend(body)
async def get_etag(self):
return '"blala"'
app = self.makeApp({"/.well-known/carddav": TestResource()}, [])
code, headers = self.put(app, "/.well-known/carddav", b"New contents")
self.assertEqual("204 No Content", code)
self.assertEqual([b"New contents"], new_body)
def test_lock_not_allowed(self):
app = self.makeApp({}, [])
code, headers, contents = self.lock(app, "/resource")
self.assertEqual("405 Method Not Allowed", code)
self.assertIn(
(
"Allow",
(
"DELETE, GET, HEAD, MKCOL, OPTIONS, "
"POST, PROPFIND, PROPPATCH, PUT, REPORT"
),
),
headers,
)
self.assertEqual(b"", contents)
def test_mkcol_ok(self):
class Backend:
def create_collection(self, relpath):
pass
def get_resource(self, relpath):
return None
app = WebDAVApp(Backend())
code, headers, contents = self.mkcol(app, "/resource/bla")
self.assertEqual("201 Created", code)
self.assertEqual(b"", contents)
def test_mkcol_exists(self):
app = self.makeApp(
{"/resource": Resource(), "/resource/bla": Resource()}, [])
code, headers, contents = self.mkcol(app, "/resource/bla")
self.assertEqual("405 Method Not Allowed", code)
self.assertEqual(b"", contents)
def test_delete(self):
class TestResource(Collection):
async def get_etag(self):
return '"foo"'
def delete_member(unused_self, name, etag=None):
self.assertEqual(name, "resource")
app = self.makeApp(
{"/": TestResource(), "/resource": TestResource()}, [])
code, headers, contents = self.delete(app, "/resource")
self.assertEqual("204 No Content", code)
self.assertEqual(b"", contents)
def test_delete_not_found(self):
class TestResource(Collection):
pass
app = self.makeApp({"/resource": TestResource()}, [])
code, headers, contents = self.delete(app, "/resource")
self.assertEqual("404 Not Found", code)
self.assertTrue(contents.endswith(b"/resource not found."))
def test_propfind_prop_does_not_exist(self):
app = self.makeApp({"/resource": Resource()}, [])
code, headers, contents = self.propfind(
app,
"/resource",
b"""\
""",
)
self.assertMultiLineEqual(
contents.decode("utf-8"),
''
"/resource"
"HTTP/1.1 404 Not Found"
""
"",
)
self.assertEqual(code, "207 Multi-Status")
def test_propfind_prop_not_present(self):
class TestProperty(Property):
name = "{DAV:}current-user-principal"
async def get_value(self, href, resource, ret, environ):
raise KeyError
app = self.makeApp({"/resource": Resource()}, [TestProperty()])
code, headers, contents = self.propfind(
app,
"/resource",
b"""\
""",
)
self.assertMultiLineEqual(
contents.decode("utf-8"),
''
"/resource"
"HTTP/1.1 404 Not Found"
""
"",
)
self.assertEqual(code, "207 Multi-Status")
def test_propfind_found(self):
class TestProperty(Property):
name = "{DAV:}current-user-principal"
async def get_value(self, href, resource, ret, environ):
ET.SubElement(ret, "{DAV:}href").text = "/user/"
app = self.makeApp({"/resource": Resource()}, [TestProperty()])
code, headers, contents = self.propfind(
app,
"/resource",
b"""\
\
""",
)
self.assertMultiLineEqual(
contents.decode("utf-8"),
''
"/resource"
"HTTP/1.1 200 OK"
"/user/"
""
"",
)
self.assertEqual(code, "207 Multi-Status")
def test_propfind_found_multi(self):
class TestProperty1(Property):
name = "{DAV:}current-user-principal"
async def get_value(self, href, resource, el, environ):
ET.SubElement(el, "{DAV:}href").text = "/user/"
class TestProperty2(Property):
name = "{DAV:}somethingelse"
async def get_value(self, href, resource, el, environ):
pass
app = self.makeApp(
{"/resource": Resource()}, [TestProperty1(), TestProperty2()]
)
code, headers, contents = self.propfind(
app,
"/resource",
b"""\
\
""",
)
self.maxDiff = None
self.assertMultiLineEqual(
contents.decode("utf-8"),
''
"/resource"
"HTTP/1.1 200 OK"
"/user/"
""
"",
)
self.assertEqual(code, "207 Multi-Status")
def test_propfind_found_multi_status(self):
class TestProperty(Property):
name = "{DAV:}current-user-principal"
async def get_value(self, href, resource, ret, environ):
ET.SubElement(ret, "{DAV:}href").text = "/user/"
app = self.makeApp({"/resource": Resource()}, [TestProperty()])
code, headers, contents = self.propfind(
app,
"/resource",
b"""\
\
""",
)
self.maxDiff = None
self.assertEqual(code, "207 Multi-Status")
self.assertMultiLineEqual(
contents.decode("utf-8"),
"""\
/resource\
HTTP/1.1 200 OK\
/user/\
\
HTTP/1.1 404 Not Found\
\
\
""",
)
class PickContentTypesTests(unittest.TestCase):
def test_not_acceptable(self):
self.assertRaises(
webdav.NotAcceptableError,
webdav.pick_content_types,
[("text/plain", {})],
["text/html"],
)
self.assertRaises(
webdav.NotAcceptableError,
webdav.pick_content_types,
[("text/plain", {}), ("text/html", {"q": "0"})],
["text/html"],
)
def test_highest_q(self):
self.assertEqual(
["text/plain"],
webdav.pick_content_types(
[("text/html", {"q": "0.3"}), ("text/plain", {"q": "0.4"})],
["text/plain", "text/html"],
),
)
self.assertEqual(
["text/html", "text/plain"],
webdav.pick_content_types(
[("text/html", {}), ("text/plain", {"q": "1"})],
["text/plain", "text/html"],
),
)
def test_no_q(self):
self.assertEqual(
["text/html", "text/plain"],
webdav.pick_content_types(
[("text/html", {}), ("text/plain", {})],
["text/plain", "text/html"],
),
)
def test_wildcard(self):
self.assertEqual(
["text/plain"],
webdav.pick_content_types(
[("text/*", {"q": "0.3"}), ("text/plain", {"q": "0.4"})],
["text/plain", "text/html"],
),
)
self.assertEqual(
{"text/plain", "text/html"},
set(
webdav.pick_content_types(
[("text/*", {"q": "0.4"}), ("text/plain", {"q": "0.3"})],
["text/plain", "text/html"],
)
),
)
self.assertEqual(
["application/html"],
webdav.pick_content_types(
[
("application/*", {"q": "0.4"}),
("text/plain", {"q": "0.3"}),
],
["text/plain", "application/html"],
),
)
class ParseAcceptHeaderTests(unittest.TestCase):
def test_parse(self):
self.assertEqual([], webdav.parse_accept_header(""))
self.assertEqual(
[("text/plain", {"q": "0.1"})],
webdav.parse_accept_header("text/plain; q=0.1"),
)
self.assertEqual(
[("text/plain", {"q": "0.1"}), ("text/plain", {})],
webdav.parse_accept_header("text/plain; q=0.1, text/plain"),
)
class ETagMatchesTests(unittest.TestCase):
def test_matches(self):
self.assertTrue(webdav.etag_matches("etag1, etag2", "etag1"))
self.assertFalse(webdav.etag_matches("etag3, etag2", "etag1"))
self.assertFalse(webdav.etag_matches("etag1 etag2", "etag1"))
self.assertFalse(webdav.etag_matches("etag1, etag2", None))
self.assertTrue(webdav.etag_matches("*, etag2", "etag1"))
self.assertTrue(webdav.etag_matches("*", "etag1"))
self.assertFalse(webdav.etag_matches("*", None))
class PropstatByStatusTests(unittest.TestCase):
def test_none(self):
self.assertEqual({}, webdav.propstat_by_status([]))
def test_one(self):
self.assertEqual(
{("200 OK", None): ["foo"]},
webdav.propstat_by_status(
[webdav.PropStatus("200 OK", None, "foo")]),
)
def test_multiple(self):
self.assertEqual(
{
("200 OK", None): ["foo"],
("404 Not Found", "Cannot find"): ["bar"],
},
webdav.propstat_by_status(
[
webdav.PropStatus("200 OK", None, "foo"),
webdav.PropStatus("404 Not Found", "Cannot find", "bar"),
]
),
)
class PropstatAsXmlTests(unittest.TestCase):
def test_none(self):
self.assertEqual([], list(webdav.propstat_as_xml([])))
def test_one(self):
self.assertEqual(
[
b'HTTP/1.1 200 '
b"OK"
],
[
ET.tostring(x)
for x in webdav.propstat_as_xml(
[webdav.PropStatus("200 OK", None, ET.Element("foo"))]
)
],
)
class PathFromEnvironTests(unittest.TestCase):
def test_ascii(self):
self.assertEqual(
"/bla",
webdav.path_from_environ({"PATH_INFO": "/bla"}, "PATH_INFO"),
)
def test_recode(self):
self.assertEqual(
"/blü",
webdav.path_from_environ(
{"PATH_INFO": "/bl\xc3\xbc"}, "PATH_INFO"),
)
xandikos_0.2.10.orig/xandikos/tests/test_wsgi.py 0000644 0000000 0000000 00000002023 14476041427 016733 0 ustar 00 # Xandikos
# Copyright (C) 2016-2017 Jelmer Vernooij , et al.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 3
# of the License or (at your option) any later version of
# the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import unittest
from ..wsgi_helpers import WellknownRedirector
class WebTests(unittest.TestCase):
def test_wellknownredirector(self):
def app(environ, start_response):
pass
WellknownRedirector(app, "/path")