pax_global_header 0000666 0000000 0000000 00000000064 12560602342 0014512 g ustar 00root root 0000000 0000000 52 comment=9ae36f4c819e0abaf8eaf935d4f62acf08413acb
aiocoap-0.1+9ae36f4/ 0000775 0000000 0000000 00000000000 12560602342 0014042 5 ustar 00root root 0000000 0000000 aiocoap-0.1+9ae36f4/.gitignore 0000664 0000000 0000000 00000000050 12560602342 0016025 0 ustar 00root root 0000000 0000000 *.pyc
__pycache__
build
*.egg-info
dist
aiocoap-0.1+9ae36f4/LICENSE 0000664 0000000 0000000 00000002226 12560602342 0015051 0 ustar 00root root 0000000 0000000 Copyright (c) 2012-2014 Maciej Wasilak ,
2013-2014 Christian Amsüss
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
aiocoap-0.1+9ae36f4/README.rst 0000664 0000000 0000000 00000005424 12560602342 0015536 0 ustar 00root root 0000000 0000000 aiocoap -- The Python CoAP library
==================================
The aiocoap package is a Python implementation of CoAP, the Constrained
Application Protocol (`RFC 7252`_, more info at http://coap.technology/).
It uses the asyncio module introduced in Python 3.4 to facilitate concurrent
operations while maintaining a simple to use interface and not depending on
anything outside the standard library.
aiocoap is originally based on txThings_. If you want to use CoAP in your
existing twisted application, or can not migrate to Python 3 yet, that is
probably more useful to you than aiocoap.
.. _`RFC 7252`: http://tools.ietf.org/html/rfc7252
.. _txThings: https://github.com/siskin/txThings
Usage
-----
For details on how to use the aiocoap library, have a look at the :mod:`aiocoap`
module documentation, or at the :doc:`examples` and :doc:`tools` provided.
All examples can be run directly from a source code copy. If you prefer to
install it, the usual Python mechanisms apply.
Dependencies
------------
The core aiocoap functionality works out of the box with Python_ 3.4; with the
additional asyncio_ module, it works with version 3.3 as well.
When application/link-format typed resources (`RFC 6690`_) are supposed to be
used, the `link_header`_ module is required as well. When the respective code
paths are used without the module, an `ImportError` will be raised, or a `5.00`
status code will be returned.
.. _Python: https://www.python.org/
.. _asyncio: https://pypi.python.org/pypi/asyncio
.. _`RFC 6690`: http://tools.ietf.org/html/rfc6690
.. _`link_header`: https://pypi.python.org/pypi/LinkHeader
Development
-----------
aiocoap tries to stay close to PEP8_ recommendations and general best practice,
and should thus be easy to contribute to. Unit tests are implemented in the
``./tests/`` directory; complete test coverage is aimed for, but not yet
complete (and might never be, as the error handling for pathological network
partners is hard to trigger with a library designed not to misbehave).
Documentation is built using sphinx_; hacks used there are described in
``./doc/README.doc``.
Bugs from design goal and wishlist to typos are currently tracked in github
(see below).
.. _PEP8: http://legacy.python.org/dev/peps/pep-0008/
.. _sphinx: http://sphinx-doc.org/
Relevant URLs
-------------
* https://github.com/chrysn/aiocoap
This is where the latest source code can be found, and bugs can be reported.
Generally, this serves as the project web site.
* http://aiocoap.readthedocs.org/
Online documentation built from the sources.
Licensing
---------
aiocoap is published under the MIT License, see :doc:`LICENSE` for details.
Copyright (c) 2012-2014 Maciej Wasilak ,
2013-2014 Christian Amsüss
aiocoap-0.1+9ae36f4/aiocoap/ 0000775 0000000 0000000 00000000000 12560602342 0015455 5 ustar 00root root 0000000 0000000 aiocoap-0.1+9ae36f4/aiocoap/__init__.py 0000664 0000000 0000000 00000002560 12560602342 0017571 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""
aiocoap
=======
The aiocoap package is a library that implements CoAP, the Constrained
Application Protocol (`RFC 7252`_, more info at http://coap.technology/).
.. _`RFC 7252`: http://tools.ietf.org/html/rfc7252
Usage
-----
In all but the most exotic applications, you will want to create a single
:class:`.Context` instance that binds to the network. The
:meth:`.Context.create_client_context` and
:meth:`.Context.create_server_context` coroutines give you a readily connected
context.
On the client side, you can request resources by assembling a :class:`.Message`
and passing it to your context's :meth:`.Context.request` method, which
returns a :class:`.protocol.Request` object with a
:attr:`.protocol.Request.response` future (which is a :class:`.Message` again).
On the server side, a resource tree gets built from
:class:`aiocoap.resource.Resource` objects into a
:class:`aiocoap.resource.Site`, which is assigned to the context at creation
time.
"""
from .numbers import *
from .message import Message
from .protocol import Context
aiocoap-0.1+9ae36f4/aiocoap/cli/ 0000775 0000000 0000000 00000000000 12560602342 0016224 5 ustar 00root root 0000000 0000000 aiocoap-0.1+9ae36f4/aiocoap/cli/__init__.py 0000664 0000000 0000000 00000000747 12560602342 0020345 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""Container module for command line utilities bundled with aiocoap:
* :mod:`.proxy` -- running a stand-alone forward or reverse proxy server
"""
aiocoap-0.1+9ae36f4/aiocoap/cli/proxy.py 0000664 0000000 0000000 00000011372 12560602342 0017763 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""a plain CoAP proxy that can work both as forward and as reverse proxy"""
import sys
import logging
import asyncio
import argparse
import aiocoap
from aiocoap.proxy.server import ForwardProxyWithPooledObservations, ReverseProxyWithPooledObservations, NameBasedVirtualHost, SubresourceVirtualHost, UnconditionalRedirector
def parse_commandline(args):
p = argparse.ArgumentParser(description=__doc__)
mode = p.add_argument_group("mode", "Required argument for setting the operation mode")
mode.add_argument('--forward', help="Run as forward proxy", action='store_const', const=ForwardProxyWithPooledObservations, dest='direction')
mode.add_argument('--reverse', help="Run as reverse proxy", action='store_const', const=ReverseProxyWithPooledObservations, dest='direction')
details = p.add_argument_group("details", "Options that govern how requests go in and out")
details.add_argument('--server-address', help="Address to bind the server context to", metavar="HOST", default="::")
details.add_argument('--server-port', help="Port to bind the server context to", metavar="PORT", default=aiocoap.COAP_PORT, type=int)
details.add_argument('--proxy', help="Relay outgoing requests through yet another proxy", metavar="HOST[:PORT]")
r = p.add_argument_group('Rules', description="Sequence of forwarding rules that, if matched by a request, specify a forwarding destination")
class TypedAppend(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
if getattr(namespace, self.dest) is None:
setattr(namespace, self.dest, [])
getattr(namespace, self.dest).append((option_string, values))
r.add_argument('--namebased', help="If Uri-Host matches NAME, route to DEST", metavar="NAME:DEST", action=TypedAppend, dest='r')
r.add_argument('--pathbased', help="If a requested path starts with PATH, split that part off and route to DEST", metavar="PATH:DEST", action=TypedAppend, dest='r')
r.add_argument('--unconditional', help="Route all requests not previously matched to DEST", metavar="DEST", action=TypedAppend, dest='r')
return p, p.parse_args(args)
class Main:
def __init__(self, args):
self.args = args
self.initializing = asyncio.Task(self.__start())
@asyncio.coroutine
def __start(self):
parser, options = parse_commandline(self.args)
if options.direction is None:
raise parser.error("Either --forward or --reverse must be given.")
self.outgoing_context = yield from aiocoap.Context.create_client_context(dump_to='/tmp/proxy-out.log')
proxy = options.direction(self.outgoing_context)
for kind, data in options.r or ():
if kind == '--namebased':
try:
name, dest = data.split(':', 1)
except:
raise parser.error("--namebased needs NAME:DEST as arguments")
r = NameBasedVirtualHost(name, dest)
elif kind == '--pathbased':
try:
path, dest = data.split(':', 1)
except:
raise parser.error("--pathbased needs PATH:DEST as arguments")
r = SubresourceVirtualHost(path.split('/'), dest)
elif kind == '--unconditional':
r = UnconditionalRedirector(data)
else:
raise AssertionError('Unknown redirectory kind')
proxy.add_redirector(r)
self.proxy_context = yield from aiocoap.Context.create_server_context(proxy, dump_to='/tmp/proxy-in.log', bind=(options.server_address, options.server_port))
@asyncio.coroutine
def shutdown(self):
yield from self.initializing
yield from self.outgoing_context.shutdown()
yield from self.proxy_context.shutdown()
def sync_main(args=None):
if args is None:
args = sys.argv[1:]
logging.basicConfig(level=logging.DEBUG)
main = None
try:
loop = asyncio.get_event_loop()
main = Main(args)
loop.run_until_complete(main.initializing)
logging.info("proxy ready")
loop.run_forever()
except KeyboardInterrupt:
sys.exit(3)
finally:
print("stopping loop")
if main is not None:
loop.run_until_complete(main.shutdown())
loop.stop()
if __name__ == "__main__":
# if you want to run this using `python3 -m`, see http://bugs.python.org/issue22480
sync_main()
aiocoap-0.1+9ae36f4/aiocoap/dump.py 0000664 0000000 0000000 00000006745 12560602342 0017010 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
from datetime import datetime
class TextDumper(object):
"""Plain text etwork data dumper
A TextDumper can be used to log network traffic into a file that can be
converted to a PCAP-NG file as described in its header.
Currently, this discards information like addresses; it is unknown how that
information can be transferred into a dump reader easily while
simultaneously staying at application level and staying ignorant of
particular underlying protocols' data structures.
It can be used stand-alone (outside of the asyncio transport/protocol
mechanisms) when instanciated only with an output file; in that case, us
the :meth:datagram_received and :meth:sendto methods.
To use it between an asyncio transport and protocol, use the
:meth:endpointfactory method."""
def __init__(self, outfile, protocol=None):
self._outfile = outfile
self._outfile.write("# Generated by aiocoap.dump %s\n"%datetime.now())
self._outfile.write("# Convert to pcap-ng by using:\n#\n")
self._outfile.write("""# text2pcap -n -u 5683,5683 -D -t "%Y-%m-%d %H:%M:%S."\n\n""")
self._protocol = protocol
self._transport = None
@classmethod
def endpointfactory(cls, outfile, actual_protocol):
"""This method returns a function suitable for passing to an asyncio
loop's .create_datagram_endpoint method. It will place the TextDumper
between the object and the transport, transparently dumping network
traffic and passing it on together with other methods defined in the
protocol/transport interface.
If you need the actual protocol after generating the endpoint (which
when using this method returns a TextDumper instead of an
actual_protocol), you can access it using the protocol property."""
def factory():
dumper = cls(outfile, actual_protocol())
return dumper
return factory
protocol = property(lambda self: self._protocol)
# methods for both direct use and transport/protocol use
def datagram_received(self, data, address):
self._outfile.write("I %s 000 %s\n"%(datetime.now(), " ".join("%02x"%c for c in data)))
if self._protocol is not None:
self._protocol.datagram_received(data, address)
def sendto(self, data, address):
self._outfile.write("O %s 000 %s\n"%(datetime.now(), " ".join("%02x"%c for c in data)))
if self._protocol is not None:
# it's not an error to check for _protocol and not for _transport
# here: if the protocol got hold of this fake transport by other
# means than connection_made, writing before connection_made should
# still create an error.
self._transport.sendto(data, address)
# passed-through properties and methods
def connection_made(self, transport):
self._transport = transport
self._protocol.connection_made(self)
_sock = property(lambda self: self._transport._sock)
def close(self):
self._outfile.close()
self._transport.close()
def connection_lost(self, exc):
self._protocol.connection_lost(exc)
aiocoap-0.1+9ae36f4/aiocoap/error.py 0000664 0000000 0000000 00000006123 12560602342 0017162 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""
Exception definitions for txThings CoAP library.
"""
from .numbers import codes
class Error(Exception):
"""
Base exception for all exceptions that indicate a failed request
"""
class RenderableError(Error):
"""
Exception that can meaningfully be represented in a CoAP response
"""
code = codes.INTERNAL_SERVER_ERROR
message = ""
class NoResource(RenderableError):
"""
Raised when resource is not found.
"""
code = codes.NOT_FOUND
message = "Error: Resource not found!"
class UnallowedMethod(RenderableError):
"""
Raised by a resource when request method is understood by the server
but not allowed for that particular resource.
"""
code = codes.METHOD_NOT_ALLOWED
message = "Error: Method not allowed!"
class UnsupportedMethod(RenderableError):
"""
Raised when request method is not understood by the server at all.
"""
code = codes.METHOD_NOT_ALLOWED
message = "Error: Method not recognized!"
class NotImplemented(Error):
"""
Raised when request is correct, but feature is not implemented
by txThings library.
For example non-sequential blockwise transfers
"""
class RequestTimedOut(Error):
"""
Raised when request is timed out.
"""
class WaitingForClientTimedOut(Error):
"""
Raised when server expects some client action:
- sending next PUT/POST request with block1 or block2 option
- sending next GET request with block2 option
but client does nothing.
"""
class ResourceChanged(Error):
"""
The requested resource was modified during the request and could therefore
not be received in a consistent state.
"""
class UnexpectedBlock1Option(Error):
"""
Raised when a server responds with block1 options that just don't match.
"""
class UnexpectedBlock2(Error):
"""
Raised when a server responds with another block2 than expected.
"""
class MissingBlock2Option(Error):
"""
Raised when response with Block2 option is expected
(previous response had Block2 option with More flag set),
but response without Block2 option is received.
"""
class NotObservable(Error):
"""
The server did not accept the request to observe the resource.
"""
class ObservationCancelled(Error):
"""
The server claimed that it will no longer sustain the observation.
"""
class UnparsableMessage(Error):
"""
An incoming message does not look like CoAP.
Note that this happens rarely -- the requirements are just two bit at the
beginning of the message, and a minimum length.
"""
class CommunicationKilled(RenderableError):
"""
The communication process has been aborted by request of the application.
"""
code = codes.SERVICE_UNAVAILABLE
aiocoap-0.1+9ae36f4/aiocoap/interfaces.py 0000664 0000000 0000000 00000005732 12560602342 0020161 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""This module provides interface base classes to various aiocoap services,
especially with respect to request and response handling."""
import abc
from asyncio import coroutine
class RequestProvider(metaclass=abc.ABCMeta):
@abc.abstractmethod
def request(self, request_message):
"""Create and act on a a :class:`Request` object that will be handled
according to the provider's implementation."""
class Request(metaclass=abc.ABCMeta):
"""A CoAP request, initiated by sending a message. Typically, this is not
instanciated directly, but generated by a :meth:`RequestProvider.request`
method."""
response = """A future that is present from the creation of the object and \
fullfilled with the response message."""
class Resource(metaclass=abc.ABCMeta):
"""Interface that is expected by a :class:`.protocol.Context` to be present
on the serversite, which renders all requests to that context."""
@abc.abstractmethod
@coroutine
def render(self, request):
"""Return a message that can be sent back to the requester.
This does not need to set any low-level message options like remote,
token or message type; it does however need to set a response code."""
@abc.abstractmethod
@coroutine
def needs_blockwise_assembly(self, request):
"""Indicator to the :class:`.protocol.Responder` about whether it
should assemble request blocks to a single request and extract the
requested blocks from a complete-resource answer (True), or whether
the resource will do that by itself (False)."""
class ObservableResource(Resource, metaclass=abc.ABCMeta):
"""Interface the :class:`.protocol.ServerObservation` uses to negotiate
whether an observation can be established based on a request.
This adds only functionality for registering and unregistering observations;
the notification contents will be retrieved from the resource using the
regular :meth:`.render` method from crafted (fake) requests.
"""
@abc.abstractmethod
@coroutine
def add_observation(self, request, serverobservation):
"""Before the incoming request is sent to :meth:`.render`, the
:meth:`.add_observation` method is called. If the resource chooses to
accept the observation, it has to call the
`serverobservation.accept(cb)` with a callback that will be called when
the observation ends. After accepting, the ObservableResource should
call `serverobservation.trigger()` whenever it changes its state; the
ServerObservation will then initiate notifications by having the
request rendered again."""
aiocoap-0.1+9ae36f4/aiocoap/message.py 0000664 0000000 0000000 00000035455 12560602342 0017467 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
import asyncio
import urllib.parse
import struct
import copy
import ipaddress
from . import error
from .numbers import *
from .options import Options
## Monkey patch urllib to make URL joining available in CoAP
# This is a workaround for .
urllib.parse.uses_relative.append('coap')
urllib.parse.uses_netloc.append('coap')
class Message(object):
"""CoAP Message with some handling metadata
This object's attributes provide access to the fields in a CoAP message and
can be directly manipulated.
* Some attributes are additional data that do not round-trip through
serialization and deserialization. They are marked as "non-roundtrippable".
* Some attributes that need to be filled for submission of the message can
be left empty by most applications, and will be taken care of by the
library. Those are marked as "managed".
The attributes are:
* :attr:`payload`: The payload (body) of the message as bytes.
* :attr:`mtype`: Message type (CON, ACK etc, see :mod:`.numbers.types`).
Managed unless set by the application.
* :attr:`code`: The code (either request or response code), see
:mod:`.numbers.codes`.
* :attr:`opt`: A container for the options, see :class:`.options.Options`.
* :attr:`mid`: The message ID. Managed by the :class:`.Context`.
* :attr:`token`: The message's token as bytes. Managed by the :class:`.Context`.
* :attr:`remote`: The socket address of the side, managed by the
:class:`.protocol.Request` by resolving the ``.opt.uri_host`` or
``unresolved_remote``, or the :class:`.Responder` by echoing the incoming
request's. (If you choose to set this explicitly set this, make sure not
to set incomplete IPv6 address tuples, as they can be sent but don't
compare equally with the responses). Non-roundtrippable.
* requested_*: Managed by the :class:`.protocol.Request` a response results
from, and filled with the request's URL data. Non-roundtrippable.
* unresolved_remote: ``host[:port]`` formatted string. If this attribute is
set, it overrides ``.opt.uri_host`` (and ``-_port``) when it comes to
filling the ``remote`` in an outgoing request.
Use this when you want to send a request with a host name that would not
normally resolve to the destination address. (Typically, this is used for
proxying.)
* :attr:`prepath`, :attr:`postpath`: Not sure, will probably go away when
resources are overhauled. Non-roundtrippable.
"""
def __init__(self, *, mtype=None, mid=None, code=EMPTY, payload=b'', token=b'', uri=None):
self.version = 1
if mtype is None:
# leave it unspecified for convenience, sending functions will know what to do
self.mtype = None
else:
self.mtype = Type(mtype)
self.mid = mid
self.code = Code(code)
self.token = token
self.payload = payload
self.opt = Options()
self.remote = None
self.unresolved_remote = None
self.prepath = None
self.postpath = None
# attributes that indicate which request path the response belongs to.
# their main purpose is allowing .get_request_uri() to work smoothly, a
# feature that is required to resolve links relative to the message.
#
# path and query are stored as lists, as they would be accessed for
# example by self.opt.uri_path
self.requested_proxy_uri = None
self.requested_scheme = None
self.requested_host = None
self.requested_port = None
self.requested_path = None
self.requested_query = None
# deprecation error, should go away roughly after 0.2 release
if self.payload is None:
raise TypeError("Payload must not be None. Use empty string instead.")
if uri:
self.set_request_uri(uri)
def __repr__(self):
return ""%(
id(self),
self.mtype,
self.code,
self.mid,
self.token,
self.remote,
", %s option(s)"%len(self.opt._options) if self.opt._options else "",
", %s byte(s) payload"%len(self.payload) if self.payload else ""
)
@classmethod
def decode(cls, rawdata, remote=None):
"""Create Message object from binary representation of message."""
try:
(vttkl, code, mid) = struct.unpack('!BBH', rawdata[:4])
except struct.error:
raise error.UnparsableMessage("Incoming message too short for CoAP")
version = (vttkl & 0xC0) >> 6
if version is not 1:
raise error.UnparsableMessage("Fatal Error: Protocol Version must be 1")
mtype = (vttkl & 0x30) >> 4
token_length = (vttkl & 0x0F)
msg = Message(mtype=mtype, mid=mid, code=code)
msg.token = rawdata[4:4 + token_length]
msg.payload = msg.opt.decode(rawdata[4 + token_length:])
msg.remote = remote
return msg
def encode(self):
"""Create binary representation of message from Message object."""
if self.mtype is None or self.mid is None:
raise TypeError("Fatal Error: Message Type and Message ID must not be None.")
rawdata = bytes([(self.version << 6) + ((self.mtype & 0x03) << 4) + (len(self.token) & 0x0F)])
rawdata += struct.pack('!BH', self.code, self.mid)
rawdata += self.token
rawdata += self.opt.encode()
if len(self.payload) > 0:
rawdata += bytes([0xFF])
rawdata += self.payload
return rawdata
def get_cache_key(self, ignore_options=()):
"""Generate a hashable and comparable object (currently a tuple) from
the message's code and all option values that are part of the cache key
and not in the optional list of ignore_options (which is the list of
option numbers that are not technically NoCacheKey but handled by the
application using this method).
>>> m1 = Message(code=GET)
>>> m2 = Message(code=GET)
>>> m1.opt.uri_path = ('s', '1')
>>> m2.opt.uri_path = ('s', '1')
>>> m1.opt.size1 = 10 # the only no-cache-key option in the base spec
>>> m2.opt.size1 = 20
>>> m1.get_cache_key() == m2.get_cache_key()
True
>>> m2.opt.etag = b'000'
>>> m1.get_cache_key() == m2.get_cache_key()
False
>>> ignore = [OptionNumber.ETAG]
>>> m1.get_cache_key(ignore) == m2.get_cache_key(ignore)
True
"""
options = []
for option in self.opt.option_list():
if option.number in ignore_options or (option.number.is_safetoforward() and option.number.is_nocachekey()):
continue
options.append((option.number, option.value))
return (self.code, tuple(options))
#
# splitting and merging messages into and from message blocks
#
def _extract_block(self, number, size_exp):
"""Extract block from current message."""
size = 2 ** (size_exp + 4)
start = number * size
if start < len(self.payload):
end = start + size if start + size < len(self.payload) else len(self.payload)
block = copy.deepcopy(self)
block.payload = block.payload[start:end]
block.mid = None
more = True if end < len(self.payload) else False
if block.code.is_request():
block.opt.block1 = (number, more, size_exp)
else:
block.opt.block2 = (number, more, size_exp)
return block
def _append_request_block(self, next_block):
"""Modify message by appending another block"""
if not self.code.is_request():
raise ValueError("_append_request_block only works on requests.")
block1 = next_block.opt.block1
if block1.start == len(self.payload):
self.payload += next_block.payload
self.opt.block1 = block1
self.token = next_block.token
self.mid = next_block.mid
else:
raise error.NotImplemented()
def _append_response_block(self, next_block):
"""Append next block to current response message.
Used when assembling incoming blockwise responses."""
if not self.code.is_response():
raise ValueError("_append_response_block only works on responses.")
block2 = next_block.opt.block2
if block2.start != len(self.payload):
raise error.NotImplemented()
if next_block.opt.etag != self.opt.etag:
raise error.ResourceChanged()
self.payload += next_block.payload
self.opt.block2 = block2
self.token = next_block.token
self.mid = next_block.mid
def _generate_next_block2_request(self, response):
"""Generate a request for next response block.
This method is used by client after receiving blockwise response from
server with "more" flag set."""
request = copy.deepcopy(self)
request.payload = b""
request.mid = None
if response.opt.block2.block_number == 0 and response.opt.block2.size_exponent > DEFAULT_BLOCK_SIZE_EXP:
new_size_exponent = DEFAULT_BLOCK_SIZE_EXP
new_block_number = 2 ** (response.opt.block2.size_exponent - new_size_exponent)
request.opt.block2 = (new_block_number, False, new_size_exponent)
else:
request.opt.block2 = (response.opt.block2.block_number + 1, False, response.opt.block2.size_exponent)
del request.opt.block1
del request.opt.observe
return request
def _generate_next_block1_response(self):
"""Generate a response to acknowledge incoming request block.
This method is used by server after receiving blockwise request from
client with "more" flag set."""
response = Message(code=CHANGED, token=self.token)
response.remote = self.remote
if self.opt.block1.block_number == 0 and self.opt.block1.size_exponent > DEFAULT_BLOCK_SIZE_EXP:
new_size_exponent = DEFAULT_BLOCK_SIZE_EXP
response.opt.block1 = (0, True, new_size_exponent)
else:
response.opt.block1 = (self.opt.block1.block_number, True, self.opt.block1.size_exponent)
return response
#
# the message in the context of network and addresses
#
@staticmethod
def _build_request_uri(scheme, host, port, path, query):
"""Assemble path components as found in CoAP options into a URL. Helper
for :meth:`get_request_uri`."""
if ':' in host:
host = '[%s]'%host
if port is None:
netloc = host
else:
netloc = "%s:%d"%(host, port)
# FIXME this should follow coap section 6.5 more closely
query = "&".join(query)
path = '/'.join(("",) + path) or '/'
fragment = None
params = "" # are they not there at all?
return urllib.parse.urlunparse((scheme, netloc, path, params, query, fragment))
def get_request_uri(self):
"""The absolute URI this message belongs to.
For requests, this is composed from the options (falling back to the
remote). For responses, this is stored by the Request object not only
to preserve the request information (which could have been kept by the
requesting application), but also because the Request can know about
multicast responses (which would update the host component) and
redirects (FIXME do they exist?)."""
# maybe this function does not belong exactly *here*, but it belongs to
# the results of .request(message), which is currently a message itself.
if self.code.is_response():
proxyuri = self.requested_proxy_uri
scheme = self.requested_scheme or 'coap'
query = self.requested_query
path = self.requested_path
else:
proxyuri = self.opt.proxy_uri
scheme = self.opt.proxy_scheme or 'coap'
query = self.opt.uri_query or ()
path = self.opt.uri_path
if self.code.is_response() and self.requested_host is not None:
host = self.requested_host
elif self.code.is_request() and self.opt.uri_host is not None:
host = self.opt.uri_host
else:
host = self.remote[0]
if self.code.is_response() and self.requested_port is not None:
port = self.requested_port
elif self.code.is_request() and self.opt.uri_port is not None:
port = self.opt.uri_port
elif self.remote is not None:
port = self.remote[1]
if port == COAP_PORT:
# don't explicitly add port if not required
port = None
else:
port = None
if proxyuri is not None:
return proxyuri
return self._build_request_uri(scheme, host, port, path, query)
def set_request_uri(self, uri):
"""Parse a given URI into the uri_* fields of the options.
The remote does not get set automatically; instead, the remote data is
stored in the uri_host and uri_port options. That is because name resolution
is coupled with network specifics the protocol will know better by the
time the message is sent. Whatever sends the message, be it the
protocol itself, a proxy wrapper or an alternative transport, will know
how to handle the information correctly."""
parsed = urllib.parse.urlparse(uri, allow_fragments=False)
if parsed.scheme != 'coap':
self.opt.proxy_uri = uri
return
if parsed.username or parsed.password:
raise ValueError("User name and password not supported.")
# FIXME as with get_request_uri, this hould do encoding/decoding and section 6.5 etc
if parsed.path not in ('', '/'):
self.opt.uri_path = parsed.path.split('/')[1:]
else:
self.opt.uri_path = []
if parsed.query:
self.opt.uri_query = parsed.query.split('&')
else:
self.opt.uri_query = []
if parsed.port:
self.opt.uri_port = parsed.port
self.opt.uri_host = parsed.hostname
def has_multicast_remote(self):
"""Return True if the message's remote needs to be considered a multicast remote."""
address = ipaddress.ip_address(self.remote[0])
return address.is_multicast
aiocoap-0.1+9ae36f4/aiocoap/numbers/ 0000775 0000000 0000000 00000000000 12560602342 0017130 5 ustar 00root root 0000000 0000000 aiocoap-0.1+9ae36f4/aiocoap/numbers/__init__.py 0000664 0000000 0000000 00000002045 12560602342 0021242 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""Module in which all meaningful numbers are collected. Most of the submodules
correspond to IANA registries."""
from . import constants, types, codes
from .constants import *
from .types import *
from .codes import *
from .optionnumbers import OptionNumber
__all__ = constants.__all__ + types.__all__ + codes.__all__ + ['OptionNumber']
media_types = {0: 'text/plain',
40: 'application/link-format',
41: 'application/xml',
42: 'application/octet-stream',
47: 'application/exi',
50: 'application/json'}
"""A map from CoAP-assigned integral codes to Internet media type descriptions."""
media_types_rev = {v:k for k, v in media_types.items()}
aiocoap-0.1+9ae36f4/aiocoap/numbers/codes.py 0000664 0000000 0000000 00000007032 12560602342 0020601 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""List of known values for the CoAP "Code" field.
The values in this module correspond to the IANA registry "`CoRE Parameters`_",
subregistries "CoAP Method Codes" and "CoAP Response Codes".
The codes come with methods that can be used to get their rough meaning, see
the :class:`Code` class for details.
.. _`CoRE Parameters`: https://www.iana.org/assignments/core-parameters/core-parameters.xhtml
"""
from ..util import ExtensibleIntEnum
class Code(ExtensibleIntEnum):
"""Value for the CoAP "Code" field.
As the number range for the code values is separated, the rough meaning of
a code can be determined using the :meth:`is_request`, :meth:`is_response` and
:meth:`is_successful` methods."""
EMPTY = 0
GET = 1
POST = 2
PUT = 3
DELETE = 4
CREATED = 65
DELETED = 66
VALID = 67
CHANGED = 68
CONTENT = 69
CONTINUE = 95
BAD_REQUEST = 128
UNAUTHORIZED = 129
BAD_OPTION = 130
FORBIDDEN = 131
NOT_FOUND = 132
METHOD_NOT_ALLOWED = 133
NOT_ACCEPTABLE = 134
REQUEST_ENTITY_INCOMPLETE = 136
PRECONDITION_FAILED = 140
REQUEST_ENTITY_TOO_LARGE = 141
UNSUPPORTED_MEDIA_TYPE = 143
INTERNAL_SERVER_ERROR = 160
NOT_IMPLEMENTED = 161
BAD_GATEWAY = 162
SERVICE_UNAVAILABLE = 163
GATEWAY_TIMEOUT = 164
PROXYING_NOT_SUPPORTED = 165
def is_request(self):
"""True if the code is in the request code range"""
return True if (self >= 1 and self < 32) else False
def is_response(self):
"""True if the code is in the response code range"""
return True if (self >= 64 and self < 192) else False
def is_successful(self):
"""True if the code is in the successful subrange of the response code range"""
return True if (self >= 64 and self < 96) else False
@property
def dotted(self):
"""The numeric value three-decimal-digits (c.dd) form"""
return "%d.%02d"%divmod(self, 32)
@property
def name_printable(self):
"""The name of the code in human-readable form"""
return self.name.replace('_', ' ').title()
def __str__(self):
if self.is_request() or self is self.EMPTY:
return self.name
elif self.is_response():
return "%s %s"%(self.dotted, self.name_printable)
else:
return "%d"%self
def __repr__(self):
"""
>>> Code.GET
>>> Code.CONTENT
>>> Code.BAD_GATEWAY
>>> Code(32)
"""
return '<%s%sCode %d "%s">'%("Successful " if self.is_successful() else "", "Request " if self.is_request() else "Response " if self.is_response() else "", self, self)
name = property(lambda self: self._name if hasattr(self, "_name") else "(unknown)", lambda self, value: setattr(self, "_name", value), doc="The constant name of the code (equals name_printable readable in all-caps and with underscores)")
for k in vars(Code):
if isinstance(getattr(Code, k), Code):
locals()[k] = getattr(Code, k)
__all__ = ['Code'] + [k for (k,v) in locals().items() if isinstance(v, Code)]
aiocoap-0.1+9ae36f4/aiocoap/numbers/constants.py 0000664 0000000 0000000 00000007236 12560602342 0021526 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""Constants either defined in the CoAP protocol (often default values for lack
of ways to determine eg. the estimated round trip time). Some parameters are
invented here for practical purposes of the implementation (eg.
DEFAULT_BLOCK_SIZE_EXP, EMPTY_ACK_DELAY)."""
COAP_PORT = 5683
"""The IANA-assigned standard port for COAP services."""
# +-------------------+---------------+
# | name | default value |
# +-------------------+---------------+
# | ACK_TIMEOUT | 2 seconds |
# | ACK_RANDOM_FACTOR | 1.5 |
# | MAX_RETRANSMIT | 4 |
# | NSTART | 1 |
# | DEFAULT_LEISURE | 5 seconds |
# | PROBING_RATE | 1 Byte/second |
# +-------------------+---------------+
ACK_TIMEOUT = 2.0
"""The time, in seconds, to wait for an acknowledgement of a
confirmable message. The inter-transmission time doubles
for each retransmission."""
ACK_RANDOM_FACTOR = 1.5
"""Timeout multiplier for anti-synchronization."""
MAX_RETRANSMIT = 4
"""The number of retransmissions of confirmable messages to
non-multicast endpoints before the infrastructure assumes no
acknowledgement will be received."""
NSTART = 1
"""Maximum number of simultaneous outstanding interactions
that endpoint maintains to a given server (including proxies)"""
# +-------------------+---------------+
# | name | default value |
# +-------------------+---------------+
# | MAX_TRANSMIT_SPAN | 45 s |
# | MAX_TRANSMIT_WAIT | 93 s |
# | MAX_LATENCY | 100 s |
# | PROCESSING_DELAY | 2 s |
# | MAX_RTT | 202 s |
# | EXCHANGE_LIFETIME | 247 s |
# | NON_LIFETIME | 145 s |
# +-------------------+---------------+
MAX_TRANSMIT_SPAN = ACK_TIMEOUT * (2 ** MAX_RETRANSMIT - 1) * ACK_RANDOM_FACTOR
"""Maximum time from the first transmission
of a confirmable message to its last retransmission."""
MAX_TRANSMIT_WAIT = ACK_TIMEOUT * (2 ** (MAX_RETRANSMIT + 1) - 1) * ACK_RANDOM_FACTOR
"""Maximum time from the first transmission
of a confirmable message to the time when the sender gives up on
receiving an acknowledgement or reset."""
MAX_LATENCY = 100.0
"""Maximum time a datagram is expected to take from the start
of its transmission to the completion of its reception."""
PROCESSING_DELAY = ACK_TIMEOUT
""""Time a node takes to turn around a
confirmable message into an acknowledgement."""
MAX_RTT = 2 * MAX_LATENCY + PROCESSING_DELAY
"""Maximum round-trip time."""
EXCHANGE_LIFETIME = MAX_TRANSMIT_SPAN + MAX_RTT
"""time from starting to send a confirmable message to the time when an
acknowledgement is no longer expected, i.e. message layer information about the
message exchange can be purged"""
DEFAULT_BLOCK_SIZE_EXP = 6 # maximum block size 1024
"""Default size exponent for blockwise transfers."""
EMPTY_ACK_DELAY = 0.1
"""After this time protocol sends empty ACK, and separate response"""
REQUEST_TIMEOUT = MAX_TRANSMIT_WAIT
"""Time after which server assumes it won't receive any answer.
It is not defined by IETF documents.
For human-operated devices it might be preferable to set some small value
(for example 10 seconds)
For M2M it's application dependent."""
DEFAULT_LEISURE = 5
MULTICAST_REQUEST_TIMEOUT = REQUEST_TIMEOUT + DEFAULT_LEISURE
__all__ = [k for k in dir() if not k.startswith('_')]
aiocoap-0.1+9ae36f4/aiocoap/numbers/optionnumbers.py 0000664 0000000 0000000 00000013563 12560602342 0022416 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""Known values for CoAP option numbers
The values defined in `OptionNumber` correspond to the IANA registry "CoRE
Parameters", subregistries "CoAP Method Codes" and "CoAP Response Codes".
The option numbers come with methods that can be used to evaluate their
properties, see the `OptionNumber` class for details.
"""
from ..util import ExtensibleIntEnum
from .. import optiontypes
#=============================================================================
# coap-18, block-14, observe-11
#=============================================================================
# +-----+---+---+---+---+----------------+------------+--------+-------------+
# | No. | C | U | N | R | Name | Format | Length | Default |
# +-----+---+---+---+---+----------------+------------+--------+-------------+
# | 1 | x | | | x | If-Match | opaque | 0-8 | (none) |
# | 3 | x | x | - | | Uri-Host | string | 1-255 | (see below) |
# | 4 | | | | x | ETag | opaque | 1-8 | (none) |
# | 5 | x | | | | If-None-Match | empty | 0 | (none) |
# | 6 | | x | | | Observe | empty/uint | ? | (none) |
# | 7 | x | x | - | | Uri-Port | uint | 0-2 | (see below) |
# | 8 | | | | x | Location-Path | string | 0-255 | (none) |
# | 11 | x | x | - | x | Uri-Path | string | 0-255 | (none) |
# | 12 | | | | | Content-Format | uint | 0-2 | (none) |
# | 14 | | x | | | Max-Age | uint | 0-4 | 60 |
# | 15 | x | x | - | x | Uri-Query | string | 0-255 | (none) |
# | 17 | x | | | | Accept | uint | 0-2 | (none) |
# | 20 | | | | x | Location-Query | string | 0-255 | (none) |
# | 23 | x | x | - | - | Block2 | uint | 0-3 | (see below) |
# | 27 | x | x | - | - | Block1 | uint | 0-3 | (see below) |
# | 28 | | | x | | Size2 | uint | 0-4 | (none) |
# | 35 | x | x | - | | Proxy-Uri | string | 1-1034 | (none) |
# | 39 | x | x | - | | Proxy-Scheme | string | 1-255 | (none) |
# | 60 | | | x | | Size1 | uint | 0-4 | (none) |
# +-----+---+---+---+---+----------------+------------+--------+-------------+
#=============================================================================
#
# This table should serve as a reference only. It does not confirm that
# txThings conforms to the documents above
#
class OptionNumber(ExtensibleIntEnum):
"""A CoAP option number.
As the option number contains information on whether the option is
critical, and whether it is safe-to-forward, those properties can be
queried using the `is_*` group of methods.
Note that whether an option may be repeated or not does not only depend on
the option, but also on the context, and is thus handled in the `Options`
object instead."""
IF_MATCH = 1
URI_HOST = 3
ETAG = 4
IF_NONE_MATCH = 5
OBSERVE = 6
URI_PORT = 7
LOCATION_PATH = 8
URI_PATH = 11
CONTENT_FORMAT = 12
MAX_AGE = 14
URI_QUERY = 15
ACCEPT = 17
LOCATION_QUERY = 20
BLOCK2 = 23
BLOCK1 = 27
SIZE2 = 28
PROXY_URI = 35
PROXY_SCHEME = 39
SIZE1 = 60
def is_critical(self):
return self & 0x01 == 0x01
def is_elective(self):
return not self.is_critical()
def is_unsafe(self):
return self & 0x02 == 0x02
def is_safetoforward(self):
return not self.is_unsafe()
def is_nocachekey(self):
if self.is_unsafe():
raise ValueError("NoCacheKey is only meaningful for safe options")
return self & 0x1e == 0x1c
def is_cachekey(self):
return not self.is_nocachekey()
def _get_format(self):
if hasattr(self, "_format"):
return self._format
else:
return optiontypes.OpaqueOption
def _set_format(self, value):
self._format = value
format = property(_get_format, _set_format)
def create_option(self, decode=None, value=None):
"""Return an Option element of the appropriate class from this option
number.
An initial value may be set using the decode or value options, and will
be fed to the resulting object's decode method or value property,
respectively."""
option = self.format(self)
if decode is not None:
option.decode(decode)
if value is not None:
option.value = value
return option
# TODO: set format OpaqueOption for options where it is expected to be used
# even though it is the default value; thus, developers can rely on it to
# persist (as opposed to formats of unknown options, which might later be
# added).
OptionNumber.OBSERVE.format = optiontypes.UintOption
OptionNumber.URI_PORT.format = optiontypes.UintOption
OptionNumber.URI_PATH.format = optiontypes.StringOption
OptionNumber.PROXY_URI.format = optiontypes.StringOption
OptionNumber.PROXY_SCHEME.format = optiontypes.StringOption
OptionNumber.CONTENT_FORMAT.format = optiontypes.UintOption
OptionNumber.MAX_AGE.format = optiontypes.UintOption
OptionNumber.URI_QUERY.format = optiontypes.StringOption
OptionNumber.ACCEPT.format = optiontypes.UintOption
OptionNumber.BLOCK2.format = optiontypes.BlockOption
OptionNumber.BLOCK1.format = optiontypes.BlockOption
OptionNumber.SIZE2.format = optiontypes.UintOption
OptionNumber.URI_HOST.format = optiontypes.StringOption
OptionNumber.SIZE1.format = optiontypes.UintOption
aiocoap-0.1+9ae36f4/aiocoap/numbers/types.py 0000664 0000000 0000000 00000001367 12560602342 0020655 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""List of known values for the CoAP "Type" field.
As this field is only 2 bits, its valid values are comprehensively enumerated
in the `Type` object.
"""
from enum import IntEnum
class Type(IntEnum):
CON = 0 # Confirmable
NON = 1 # Non-confirmable
ACK = 2 # Acknowledgement
RST = 3 # Reset
CON, NON, ACK, RST = Type.CON, Type.NON, Type.ACK, Type.RST
__all__ = ['Type', 'CON', 'NON', 'ACK', 'RST']
aiocoap-0.1+9ae36f4/aiocoap/options.py 0000664 0000000 0000000 00000015617 12560602342 0017534 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
from itertools import chain
import struct
from .numbers import *
def _read_extended_field_value(value, rawdata):
"""Used to decode large values of option delta and option length
from raw binary form."""
if value >= 0 and value < 13:
return (value, rawdata)
elif value == 13:
return (rawdata[0] + 13, rawdata[1:])
elif value == 14:
return (struct.unpack('!H', rawdata[:2])[0] + 269, rawdata[2:])
else:
raise ValueError("Value out of range.")
def _write_extended_field_value(value):
"""Used to encode large values of option delta and option length
into raw binary form.
In CoAP option delta and length can be represented by a variable
number of bytes depending on the value."""
if value >= 0 and value < 13:
return (value, b'')
elif value >= 13 and value < 269:
return (13, struct.pack('!B', value - 13))
elif value >= 269 and value < 65804:
return (14, struct.pack('!H', value - 269))
else:
raise ValueError("Value out of range.")
def _single_value_view(option_number, doc=None):
"""Generate a property for a given option number, where the option is not
repeatable. For getting, it will return the value of the first option
object with matching number. For setting, it will remove all options with
that number and create one with the given value. The property can be
deleted, resulting in removal of the option from the header.
For consistency, setting the value to None also clears the option. (Note
that with the currently implemented optiontypes, None is not a valid value
for any of them)."""
def _getter(self, option_number=option_number):
options = self.get_option(option_number)
if not options:
return None
else:
return options[0].value
def _setter(self, value, option_number=option_number):
self.delete_option(option_number)
if value is not None:
self.add_option(option_number.create_option(value=value))
def _deleter(self, option_number=option_number):
self.delete_option(option_number)
return property(_getter, _setter, _deleter, doc or "Single-value view on the %s option."%option_number)
def _items_view(option_number, doc=None):
"""Generate a property for a given option number, where the option is
repeatable. For getting, it will return a tuple of the values of the option
objects with matching number. For setting, it will remove all options with
that number and create new ones from the given iterable."""
def _getter(self, option_number=option_number):
return tuple(o.value for o in self.get_option(option_number))
def _setter(self, value, option_number=option_number):
self.delete_option(option_number)
for v in value:
self.add_option(option_number.create_option(value=v))
def _deleter(self, option_number=option_number):
self.delete_option(option_number)
return property(_getter, _setter, _deleter, doc=doc or "Iterable view on the %s option."%option_number)
class Options(object):
"""Represent CoAP Header Options."""
# this is not so much an optimization as a safeguard -- if custom
# attributes were placed here, they could be accessed but would not be
# serialized
__slots__ = ["_options"]
def __init__(self):
self._options = {}
def __repr__(self):
text = ", ".join("%s: %s"%(OptionNumber(k), " / ".join(map(str, v))) for (k, v) in self._options.items())
return ""%(id(self), text or "empty")
def decode(self, rawdata):
"""Passed a CoAP message body after the token as rawdata, fill self
with the options starting at the beginning of rawdata, an return the
rest of the message (the body)."""
option_number = OptionNumber(0)
while len(rawdata) > 0:
if rawdata[0] == 0xFF:
return rawdata[1:]
dllen = rawdata[0]
delta = (dllen & 0xF0) >> 4
length = (dllen & 0x0F)
rawdata = rawdata[1:]
(delta, rawdata) = _read_extended_field_value(delta, rawdata)
(length, rawdata) = _read_extended_field_value(length, rawdata)
option_number += delta
option = option_number.create_option(decode=rawdata[:length])
self.add_option(option)
rawdata = rawdata[length:]
return b''
def encode(self):
"""Encode all options in option header into string of bytes."""
data = []
current_opt_num = 0
option_list = self.option_list()
for option in option_list:
delta, extended_delta = _write_extended_field_value(option.number - current_opt_num)
length, extended_length = _write_extended_field_value(option.length)
data.append(bytes([((delta & 0x0F) << 4) + (length & 0x0F)]))
data.append(extended_delta)
data.append(extended_length)
data.append(option.encode())
current_opt_num = option.number
return (b''.join(data))
def add_option(self, option):
"""Add option into option header."""
self._options.setdefault(option.number, []).append(option)
def delete_option(self, number):
"""Delete option from option header."""
if number in self._options:
self._options.pop(number)
def get_option(self, number):
"""Get option with specified number."""
return self._options.get(number, ())
def option_list(self):
return chain.from_iterable(sorted(self._options.values(), key=lambda x: x[0].number))
uri_path = _items_view(OptionNumber.URI_PATH)
uri_query = _items_view(OptionNumber.URI_QUERY)
location_path = _items_view(OptionNumber.LOCATION_PATH)
location_query = _items_view(OptionNumber.LOCATION_QUERY)
block2 = _single_value_view(OptionNumber.BLOCK2)
block1 = _single_value_view(OptionNumber.BLOCK1)
content_format = _single_value_view(OptionNumber.CONTENT_FORMAT)
etag = _single_value_view(OptionNumber.ETAG, "Single ETag as used in responses")
etags = _items_view(OptionNumber.ETAG, "List of ETags as used in requests")
observe = _single_value_view(OptionNumber.OBSERVE)
accept = _single_value_view(OptionNumber.ACCEPT)
uri_host = _single_value_view(OptionNumber.URI_HOST)
uri_port = _single_value_view(OptionNumber.URI_PORT)
proxy_uri = _single_value_view(OptionNumber.PROXY_URI)
proxy_scheme = _single_value_view(OptionNumber.PROXY_SCHEME)
size1 = _single_value_view(OptionNumber.SIZE1)
aiocoap-0.1+9ae36f4/aiocoap/optiontypes.py 0000664 0000000 0000000 00000011653 12560602342 0020432 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
import abc
import collections
import struct
class OptionType(metaclass=abc.ABCMeta):
"""Interface for decoding and encoding option values
Instances of :class:`OptionType` are collected in a list in a
:attr:`.Message.opt` :class:`.Options` object, and provide a translation
between the CoAP octet-stream (accessed using the
:meth:`encode()`/:meth:`decode()` method pair) and the interpreted value
(accessed via the :attr:`value` attribute).
Note that OptionType objects usually don't need to be handled by library
users; the recommended way to read and set options is via the Options
object'sproperties (eg. ``message.opt.uri_path = ('.well-known',
'core')``)."""
@abc.abstractmethod
def __init__(self, number, value):
"""Set the `self.name` and `self.value` attributes"""
@abc.abstractmethod
def encode(self):
"""Return the option's value in serialzied form"""
@abc.abstractmethod
def decode(self, rawdata):
"""Set the option's value from the bytes in rawdata"""
@property
def length(self):
"""Indicate the length of the encoded value"""
return len(self.encode())
class StringOption(OptionType):
"""String CoAP option - used to represent string options. Always encoded in
UTF8 per CoAP specification."""
def __init__(self, number, value=""):
self.value = value
self.number = number
def encode(self):
# FIXME: actually, this should be utf8 of the net-unicode form (maybe it is)
rawdata = self.value.encode('utf-8')
return rawdata
def decode(self, rawdata):
self.value = rawdata.decode('utf-8')
def _length(self):
return len(self.value.encode('utf-8'))
length = property(_length)
def __str__(self):
return self.value
class OpaqueOption(OptionType):
"""Opaque CoAP option - used to represent options that just have their
uninterpreted bytes as value."""
def __init__(self, number, value=b""):
self.value = value
self.number = number
def encode(self):
rawdata = self.value
return rawdata
def decode(self, rawdata):
self.value = rawdata # if rawdata is not None else ""
def _length(self):
return len(self.value)
length = property(_length)
def __str__(self):
return repr(self.value)
class UintOption(OptionType):
"""Uint CoAP option - used to represent integer options."""
def __init__(self, number, value=0):
self.value = value
self.number = number
def encode(self):
rawdata = struct.pack("!L", self.value) # For Python >3.1 replace with int.to_bytes()
return rawdata.lstrip(bytes([0]))
def decode(self, rawdata): # For Python >3.1 replace with int.from_bytes()
value = 0
for byte in rawdata:
value = (value * 256) + byte
self.value = value
return self
def _length(self):
if self.value > 0:
return (self.value.bit_length() - 1) // 8 + 1
else:
return 0
length = property(_length)
def __str__(self):
return str(self.value)
class BlockOption(OptionType):
"""Block CoAP option - special option used only for Block1 and Block2 options.
Currently it is the only type of CoAP options that has
internal structure."""
class BlockwiseTuple(collections.namedtuple('_BlockwiseTuple', ['block_number', 'more', 'size_exponent'])):
@property
def size(self):
return 2 ** (self.size_exponent + 4)
@property
def start(self):
return self.block_number * self.size
def __init__(self, number, value=None):
if value is not None:
self._value = self.BlockwiseTuple._make(value)
self.number = number
value = property(lambda self: self._value, lambda self, value: setattr(self, '_value', self.BlockwiseTuple._make(value)))
def encode(self):
as_integer = (self.value.block_number << 4) + (self.value.more * 0x08) + self.value.size_exponent
rawdata = struct.pack("!L", as_integer) # For Python >3.1 replace with int.to_bytes()
return rawdata.lstrip(bytes([0]))
def decode(self, rawdata):
as_integer = 0
for byte in rawdata:
as_integer = (as_integer * 256) + byte
self.value = self.BlockwiseTuple(block_number=(as_integer >> 4), more=bool(as_integer & 0x08), size_exponent=(as_integer & 0x07))
def _length(self):
return ((self.value[0].bit_length() + 3) // 8 + 1)
length = property(_length)
def __str__(self):
return str(self.value)
aiocoap-0.1+9ae36f4/aiocoap/protocol.py 0000664 0000000 0000000 00000167110 12560602342 0017676 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""This module contains the classes that are responsible for keeping track of messages:
* :class:`Context` roughly represents the CoAP endpoint (basically a UDP
socket) -- something that can send requests and possibly can answer incoming
requests.
* a :class:`Request` gets generated whenever a request gets sent to keep
track of the response
* a :class:`Responder` keeps track of a single incoming request
"""
import random
import struct
import binascii
import functools
import socket
import asyncio
import urllib.parse
from .util.queuewithend import QueueWithEnd
from .util.asyncio import cancel_thoroughly
from .dump import TextDumper
import logging
# log levels used:
# * debug is for things that occur even under perfect conditions.
# * info is for things that are well expected, but might be interesting during
# testing a network of nodes and not debugging the library. (timeouts,
# retransmissions, pings)
# * warning is for everything that indicates a malbehaved client. (these don't
# necessarily indicate a client bug, though; things like requesting a
# nonexistent block can just as well happen when a resource's content has
# changed between blocks).
from . import error
from . import interfaces
from .numbers import *
from .message import Message
class Context(asyncio.DatagramProtocol, interfaces.RequestProvider):
"""An object that passes messages between an application and the network
A :class:`.Context` gets bound to a network interface as an asyncio
protocol. It manages the basic CoAP network mechanisms like message
deduplication and retransmissions, and delegates management of blockwise
transfer as well as the details of matching requests with responses to the
:class:`Request` and :class:`Responder` classes.
In that respect, a Context (as currently implemented) is also an endpoint.
It is anticipated, though, that issues arise due to which the
implementation won't get away with creating a single socket, and that it
will be required to deal with multiple endpoints. (E.g. the V6ONLY=0 option
is not portable to some OS, and implementations might need to bind to
different ports on different interfaces in multicast contexts). When those
distinctions will be implemented, message dispatch will stay with the
context, which will then deal with the individual endpoints.
In a way, a :class:`.Context` is the single object all CoAP messages that
get treated by a single application pass by.
Context creation
----------------
Instead of passing a protocol factory to the asyncio loop's
create_datagram_endpoint method, the following convenience functions are
recommended for creating a context:
.. automethod:: create_client_context
.. automethod:: create_server_context
If you choose to create the context manually, make sure to wait for its
:attr:`ready` future to complete, as only then can messages be sent.
Dispatching messages
--------------------
A context's public API consists of the :meth:`send_message` function,
the :attr:`outgoing_requests`, :attr:`incoming_requests` and
:attr:`outgoing_obvservations` dictionaries, and the :attr:`serversite`
object, but those are not stabilized yet, and for most applications the
following convenience functions are more suitable:
.. automethod:: request
.. automethod:: multicast_request
If more control is needed, eg. with observations, create a
:class:`Request` yourself and pass the context to it.
"""
def __init__(self, loop=None, serversite=None, loggername="coap"):
self.message_id = random.randint(0, 65535)
self.token = random.randint(0, 65535)
self.serversite = serversite
self._recent_messages = {} #: recently received messages (remote, message-id): None or result-message
self._active_exchanges = {} #: active exchanges i.e. sent CON messages (remote, message-id): (exchange monitor, cancellable timeout)
self._backlogs = {} #: per-remote list of (backlogged package, exchange-monitor) tupless (keys exist iff there is an active_exchange with that node)
self.outgoing_requests = {} #: Unfinished outgoing requests (identified by token and remote)
self.incoming_requests = {} #: Unfinished incoming requests. ``(path-tuple, remote): Request``
self.outgoing_observations = {} #: Observations where this context acts as client. ``(token, remote) -> ClientObservation``
self.incoming_observations = {} #: Observation where this context acts as server. ``(token, remote) -> ServerObservation``. This is managed by :cls:ServerObservation and :meth:`.Responder.handle_observe_request`.
self.log = logging.getLogger(loggername)
self.loop = loop or asyncio.get_event_loop()
self.ready = asyncio.Future() #: Future that gets fullfilled by connection_made (ie. don't send before this is done; handled by ``create_..._context``
self._shutting_down = None #: Future created and used in the .shutdown() method.
@asyncio.coroutine
def shutdown(self):
"""Take down the listening socket and stop all related timers.
After this coroutine terminates, and once all external references to
the object are dropped, it should be garbage-collectable."""
self._shutting_down = asyncio.Future()
self.log.debug("Shutting down context")
for exchange_monitor, cancellable in self._active_exchanges.values():
if exchange_monitor is not None:
exchange_monitor.cancelled()
cancellable.cancel()
for observation in list(self.incoming_observations.values()):
observation.deregister("Server going down")
self._active_exchanges = None
self.transport.close()
yield from self._shutting_down
#
# implementing the typical DatagramProtocol interfaces.
#
# note from the documentation: we may rely on connection_made to be called
# before datagram_received -- but sending immediately after context
# creation will still fail
def connection_made(self, transport):
"""Implementation of the DatagramProtocol interface, called by the transport."""
self.ready.set_result(True)
self.transport = transport
def datagram_received(self, data, address):
"""Implementation of the DatagramProtocol interface, called by the transport."""
try:
message = Message.decode(data, address)
except error.UnparsableMessage:
self.log.warning("Ignoring unparsable message from %s"%(address,))
return
self._dispatch_message(message)
def error_received(self, exc):
"""Implementation of the DatagramProtocol interface, called by the transport."""
# TODO: set IP_RECVERR to receive icmp "destination unreachable (port
# unreachable)" & co to stop retransmitting and err back quickly
self.log.error("Error received: %s"%exc)
def connection_lost(self, exc):
# TODO better error handling -- find out what can cause this at all
# except for a shutdown
if exc is not None:
self.log.error("Connection lost: %s"%exc)
if self._shutting_down is None:
self.log.error("Connection loss was not expected.")
else:
self._shutting_down.set_result(None)
# pause_writing and resume_writing are not implemented, as the protocol
# should take care of not flooding the output itself anyway (NSTART etc).
#
# coap dispatch
#
def _dispatch_message(self, message):
"""Feed a message through the message-id, message-type and message-code
sublayers of CoAP"""
self.log.debug("Incoming message %r" % message)
if self._deduplicate_message(message) is True:
return
if message.mtype in (ACK, RST):
self._remove_exchange(message)
if message.code is EMPTY and message.mtype is CON:
self._process_ping(message)
elif message.code is EMPTY and message.mtype in (ACK, RST):
pass # empty ack has already been handled above
elif message.code.is_request() and message.mtype in (CON, NON):
# the request handler will have to deal with sending ACK itself, as
# it might be timeout-related
self._process_request(message)
elif message.code.is_response() and message.mtype in (CON, NON, ACK):
success = self._process_response(message)
if success:
if message.mtype is CON:
#TODO: Some variation of send_empty_ack should be used
ack = Message(mtype=ACK, mid=message.mid, code=EMPTY, payload=b"")
ack.remote = message.remote
self.send_message(ack)
else:
self.log.info("Response not recognized - sending RST.")
rst = Message(mtype=RST, mid=message.mid, code=EMPTY, payload='')
rst.remote = message.remote
self.send_message(rst)
else:
self.log.warning("Received a message with code %s and type %s (those don't fit) from %s, ignoring it."%(message.code, message.mtype, message.remote))
#
# coap dispatch, message-id sublayer: duplicate handling
#
def _deduplicate_message(self, message):
"""Return True if a message is a duplicate, and re-send the stored
response if available.
Duplicate is a message with the same Message ID (mid) and sender
(remote), as message received within last EXCHANGE_LIFETIME seconds
(usually 247 seconds)."""
key = (message.remote, message.mid)
if key in self._recent_messages:
if message.mtype is CON:
if self._recent_messages[key] is not None:
self.log.info('Duplicate CON received, sending old response again')
self.send_message(self._recent_messages[key])
else:
self.log.info('Duplicate CON received, no response to send yet')
else:
self.log.info('Duplicate NON, ACK or RST received')
return True
else:
self.log.debug('New unique message received')
self.loop.call_later(EXCHANGE_LIFETIME, functools.partial(self._recent_messages.pop, key))
self._recent_messages[key] = None
return False
def _store_response_for_duplicates(self, message):
"""If the message is the response can be used to satisfy a future
duplicate message, store it."""
key = (message.remote, message.mid)
if key in self._recent_messages:
self._recent_messages[key] = message
#
# coap dispatch, message-type sublayer: retransmission handling
#
def _add_exchange(self, message, exchange_monitor=None):
"""Add an "exchange" for outgoing CON message.
CON (Confirmable) messages are automatically retransmitted by protocol
until ACK or RST message with the same Message ID is received from
target host."""
key = (message.remote, message.mid)
if message.remote not in self._backlogs:
self._backlogs[message.remote] = []
timeout = random.uniform(ACK_TIMEOUT, ACK_TIMEOUT * ACK_RANDOM_FACTOR)
next_retransmission = self._schedule_retransmit(message, timeout, 0)
self._active_exchanges[key] = (exchange_monitor, next_retransmission)
self.log.debug("Exchange added, message ID: %d." % message.mid)
def _remove_exchange(self, message):
"""Remove exchange from active exchanges and cancel the timeout to next
retransmission."""
key = (message.remote, message.mid)
if key not in self._active_exchanges:
self.log.info("Received %s from %s, but could not match it to a running exchange."%(message.mtype, message.remote))
return
exchange_monitor, next_retransmission = self._active_exchanges.pop(key)
cancel_thoroughly(next_retransmission)
if exchange_monitor is not None:
if message.mtype is RST:
exchange_monitor.rst()
else:
exchange_monitor.response(message)
self.log.debug("Exchange removed, message ID: %d." % message.mid)
self._continue_backlog(message.remote)
def _continue_backlog(self, remote):
"""After an exchange has been removed, start working off the backlog or
clear it completely."""
if remote not in self._backlogs:
# if active exchanges were something we could do a
# .register_finally() on, we could chain them like that; if we
# implemented anything but NSTART=1, we'll need a more elaborate
# system anyway
raise AssertionError("backlogs/active_exchange relation violated (implementation error)")
# first iteration is sure to happen, others happen only if the enqueued
# messages were NONs
while not any(r == remote for r, mid in self._active_exchanges.keys()):
if self._backlogs[remote] != []:
next_message, exchange_monitor = self._backlogs[remote].pop(0)
self._send(next_message, exchange_monitor)
else:
del self._backlogs[remote]
break
def _schedule_retransmit(self, message, timeout, retransmission_counter):
"""Create and return a call_later for first or subsequent
retransmissions."""
# while this could just as well be done in a lambda or with the
# arguments passed to call_later, in this form makes the test cases
# easier to debug (it's about finding where references to a Context
# are kept around; contexts should be able to shut down in an orderly
# way without littering references in the loop)
def retr(self=self,
message=message,
timeout=timeout,
retransmission_counter=retransmission_counter,
doc="If you read this, have a look at _schedule_retransmit",
id=object()):
self._retransmit(message, timeout, retransmission_counter)
return self.loop.call_later(timeout, retr)
def _retransmit(self, message, timeout, retransmission_counter):
"""Retransmit CON message that has not been ACKed or RSTed."""
key = (message.remote, message.mid)
exchange_monitor, next_retransmission = self._active_exchanges.pop(key)
# this should be a no-op, but let's be sure
cancel_thoroughly(next_retransmission)
if retransmission_counter < MAX_RETRANSMIT:
self.log.info("Retransmission, Message ID: %d." % message.mid)
self.transport.sendto(message.encode(), message.remote)
retransmission_counter += 1
timeout *= 2
next_retransmission = self._schedule_retransmit(message, timeout, retransmission_counter)
self._active_exchanges[key] = (exchange_monitor, next_retransmission)
if exchange_monitor is not None:
exchange_monitor.retransmitted()
else:
self.log.info("Exchange timed out")
if exchange_monitor is not None:
exchange_monitor.timeout()
self._continue_backlog(message.remote)
#
# coap dispatch, message-code sublayer: triggering custom actions based on incoming messages
#
def _process_ping(self, message):
self.log.info('Received CoAP Ping from %s, replying with RST.'%(message.remote,))
rst = Message(mtype=RST, mid=message.mid, code=EMPTY, payload=b'')
rst.remote = message.remote
self.send_message(rst)
def _process_request(self, request):
"""Spawn a Responder for an incoming request, or feed a long-running
responder if one exists."""
key = tuple(request.opt.uri_path), request.remote
if key in self.incoming_requests:
self.log.debug("Delivering request to existing responder.")
self.incoming_requests.pop(key).handle_next_request(request)
else:
responder = Responder(self, request)
def _process_response(self, response):
"""Feed a response back to whatever might expect it.
Returns True if the response was expected (and should be ACK'd
depending on mtype), ans False if it was not expected (and should be
RST'd)."""
self.log.debug("Received Response: %r" % response)
request = self.outgoing_requests.pop((response.token, response.remote), None)
if request is not None:
request.handle_response(response)
return True
request = self.outgoing_requests.get((response.token, None), None)
if request is not None:
# that's exactly the `MulticastRequest`s so far
request.handle_response(response)
return True
observation = self.outgoing_observations.get((response.token, response.remote), None)
if observation is not None:
## @TODO: deduplication based on observe option value, collecting
# the rest of the resource if blockwise
observation.callback(response)
if response.opt.observe is None:
self.outgoing_observations[(response.token, response.remote)].error(error.ObservationCancelled())
return True
return False
#
# outgoing messages
#
def send_message(self, message, exchange_monitor=None):
"""Encode and send message. This takes care of retransmissions (if
CON), message IDs and rate limiting, but does not hook any events to
responses. (Use the :class:`Request` class or responding resources
instead; those are the typical callers of this function.)
If notification about the progress of the exchange is required, an
ExchangeMonitor can be passed in, which will receive the appropriate
callbacks."""
if message.mtype == CON and message.has_multicast_remote():
raise ValueError("Refusing to send CON message to multicast address")
if message.mid is None:
message.mid = self._next_message_id()
if message.mtype == CON and message.remote in self._backlogs:
self.log.debug("Message to %s put into backlog"%(message.remote,))
if exchange_monitor is not None:
exchange_monitor.enqueued()
self._backlogs[message.remote].append((message, exchange_monitor))
else:
self._send(message, exchange_monitor)
def _send(self, message, exchange_monitor=None):
"""Put the message on the wire, starting retransmission timeouts"""
self.log.debug("Sending message %r" % message)
if message.mtype is CON:
self._add_exchange(message, exchange_monitor)
if exchange_monitor is not None:
exchange_monitor.sent()
self._store_response_for_duplicates(message)
encoded = message.encode()
self.transport.sendto(encoded, message.remote)
def _next_message_id(self):
"""Reserve and return a new message ID."""
message_id = self.message_id
self.message_id = 0xFFFF & (1 + self.message_id)
return message_id
def next_token(self):
"""Reserve and return a new Token for request."""
#TODO: add proper Token handling
token = self.token
self.token = (self.token + 1) & 0xffffffffffffffff
return binascii.a2b_hex("%08x"%self.token)
#
# request interfaces
#
def request(self, request, **kwargs):
"""TODO: create a proper interface to implement and deprecate direct instanciation again"""
return Request(self, request, **kwargs)
def multicast_request(self, request):
return MulticastRequest(self, request).responses
#
# convenience methods for class instanciation
#
@classmethod
@asyncio.coroutine
def create_client_context(cls, *, dump_to=None, loggername="coap"):
"""Create a context bound to all addresses on a random listening port.
This is the easiest way to get an context suitable for sending client
requests.
"""
loop = asyncio.get_event_loop()
protofact = lambda: cls(loop, None, loggername=loggername)
if dump_to is not None:
protofact = TextDumper.endpointfactory(open(dump_to, 'w'), cls)
#transport, protocol = yield from loop.create_datagram_endpoint(protofact, family=socket.AF_INET)
# use the following lines instead, and change the address to `::ffff:127.0.0.1`
# in order to see acknowledgement handling fail with hybrid stack operation
transport, protocol = yield from loop.create_datagram_endpoint(protofact, family=socket.AF_INET6)
transport._sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
if dump_to is not None:
protocol = protocol.protocol
yield from protocol.ready
return protocol
@classmethod
@asyncio.coroutine
def create_server_context(cls, site, bind=("::", COAP_PORT), *, dump_to=None, loggername="coap-server"):
"""Create an context, bound to all addresses on the CoAP port (unless
otherwise specified in the ``bind`` argument).
This is the easiest way to get a context suitable both for sending
client and accepting server requests."""
loop = asyncio.get_event_loop()
protofact = lambda: cls(loop, site, loggername=loggername)
if dump_to is not None:
protofact = TextDumper.endpointfactory(open(dump_to, 'w'), protofact)
transport, protocol = yield from loop.create_datagram_endpoint(protofact, family=socket.AF_INET6)
transport._sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
transport._sock.bind(bind)
if dump_to is not None:
protocol = protocol.protocol
yield from protocol.ready
return protocol
def kill_transactions(self, remote, exception=error.CommunicationKilled):
"""Abort all pending exchanges and observations to a given remote.
The exact semantics of this are not yet completely frozen -- currently,
pending exchanges are treated as if they timeouted, server sides of
observations are droppedn and client sides of observations receive an
errback.
Requests that are not part of an exchange, eg. NON requests or requests
that are waiting for their responses after an empty ACK are currently
not handled."""
for ((exchange_remote, messageid), (exchangemonitor, cancellabletimeout)) in self._active_exchanges.items():
if remote != exchange_remote:
continue
## FIXME: this should receive testing, but a test setup would need
# precise timing to trigger this code path
## FIXME: this does not actually abort the request, as the protocol
# does not have a way to tell a request that it won't complete. so
# actually, the request will just need to time out. (typical
# requests don't use an exchange monitor).
cancellabletimeout.cancel()
if exchangemonitor is not None:
exchangemonitor.rst()
self._active_exchanges.pop((exchange_remote, messageid))
for ((token, obs_remote), clientobservation) in list(self.outgoing_observations.items()):
if remote != obs_remote:
continue
clientobservation.error(exception())
for ((token, obs_remote), serverobservation) in list(self.incoming_observations.items()):
if remote != obs_remote:
continue
## FIXME this is not tested either
serverobservation.deregister("Dropping due to kill_transactions")
class BaseRequest(object):
"""Common mechanisms of :class:`Request` and :class:`MulticastRequest`"""
@asyncio.coroutine
def _fill_remote(self, request):
if request.remote is None:
if request.unresolved_remote is not None or request.opt.uri_host:
## @TODO this is very rudimentary; happy-eyeballs or
# similar could be employed.
if request.unresolved_remote is not None:
pseudoparsed = urllib.parse.SplitResult(None, request.unresolved_remote, None, None, None)
host = pseudoparsed.hostname
port = pseudoparsed.port or COAP_PORT
else:
host = request.opt.uri_host
port = request.opt.uri_port or COAP_PORT
addrinfo = yield from self.protocol.loop.getaddrinfo(
host,
port,
family=self.protocol.transport._sock.family,
type=0,
proto=self.protocol.transport._sock.proto,
flags=socket.AI_V4MAPPED,
)
request.remote = addrinfo[0][-1]
else:
raise ValueError("No location found to send message to (neither in .opt.uri_host nor in .remote)")
class Request(BaseRequest, interfaces.Request):
"""Class used to handle single outgoing request.
Class includes methods that handle sending outgoing blockwise requests and
receiving incoming blockwise responses."""
def __init__(self, protocol, app_request, exchange_monitor_factory=(lambda message: None), handle_blockwise=True):
self.protocol = protocol
self.log = self.protocol.log.getChild("requester")
self.app_request = app_request
self._assembled_response = None
self.handle_blockwise = handle_blockwise
self._exchange_monitor_factory = exchange_monitor_factory
self._request_transmitted_completely = False
self._requesttimeout = None
if self.app_request.code.is_request() is False:
raise ValueError("Message code is not valid for request")
self.response = asyncio.Future()
self.response.add_done_callback(self._response_cancellation_handler)
if self.app_request.opt.observe is not None:
self.observation = ClientObservation(self.app_request)
self.response.add_done_callback(self.register_observation)
asyncio.async(self._init_phase2())
@asyncio.coroutine
def _init_phase2(self):
"""Later aspects of initialization that deal more with sending the
message than with the setup of the requester
Those are split off into a dedicated function because completion might
depend on async results."""
try:
yield from self._fill_remote(self.app_request)
size_exp = DEFAULT_BLOCK_SIZE_EXP
if len(self.app_request.payload) > (2 ** (size_exp + 4)) and self.handle_blockwise:
request = self.app_request._extract_block(0, size_exp)
self.app_request.opt.block1 = request.opt.block1
else:
request = self.app_request
self._request_transmitted_completely = True
self.send_request(request)
except Exception as e:
self.response.set_exception(e)
def cancel(self):
# TODO cancel ongoing exchanges
if self._requesttimeout:
cancel_thoroughly(self._requesttimeout)
self.response.cancel()
def _response_cancellation_handler(self, response_future):
if self._requesttimeout:
cancel_thoroughly(self._requesttimeout)
if self.response.cancelled():
self.cancel()
def send_request(self, request):
"""Send a request or single request block.
This method is used in 3 situations:
- sending non-blockwise request
- sending blockwise (Block1) request block
- asking server to send blockwise (Block2) response block
"""
def timeout_request(self=self):
"""Clean the Request after a timeout."""
self.log.info("Request timed out")
del self.protocol.outgoing_requests[(request.token, request.remote)]
self.response.set_exception(error.RequestTimedOut())
if request.mtype is None:
request.mtype = CON
request.token = self.protocol.next_token()
try:
self.protocol.send_message(request, self._exchange_monitor_factory(request))
except Exception as e:
self.response.set_exception(e)
else:
if self._requesttimeout:
cancel_thoroughly(self._requesttimeout)
self.log.debug("Timeout is %r"%REQUEST_TIMEOUT)
self._requesttimeout = self.protocol.loop.call_later(REQUEST_TIMEOUT, timeout_request)
self.protocol.outgoing_requests[(request.token, request.remote)] = self
self.log.debug("Sending request - Token: %s, Remote: %s" % (binascii.b2a_hex(request.token).decode('ascii'), request.remote))
def handle_response(self, response):
if not self._request_transmitted_completely:
self.process_block1_in_response(response)
else:
self.process_block2_in_response(response)
def process_block1_in_response(self, response):
"""Process incoming response with regard to Block1 option."""
if response.opt.block1 is None:
# it's not up to us here to
if response.code.is_successful(): # an error like "unsupported option" would be ok to return, but success?
self.log.warning("Block1 option completely ignored by server, assuming it knows what it is doing.")
self.process_block2_in_response(response)
return
block1 = response.opt.block1
self.log.debug("Response with Block1 option received, number = %d, more = %d, size_exp = %d." % (block1.block_number, block1.more, block1.size_exponent))
if block1.block_number != self.app_request.opt.block1.block_number:
self.response.set_exception(UnexpectedBlock1Option())
if block1.size_exponent < self.app_request.opt.block1.size_exponent:
next_number = (self.app_request.opt.block1.block_number + 1) * 2 ** (self.app_request.opt.block1.size_exponent - block1.size_exponent)
next_block = self.app_request._extract_block(next_number, block1.size_exponent)
else:
next_block = self.app_request._extract_block(self.app_request.opt.block1.block_number + 1, block1.size_exponent)
if next_block is not None:
self.app_request.opt.block1 = next_block.opt.block1
# TODO: ignoring block1.more so far -- if it is False, we might use
# the information about what has been done so far.
self.send_request(next_block)
else:
if block1.more is False:
self._request_transmitted_completely = True
self.process_block2_in_response(response)
else:
self.response.set_exception(UnexpectedBlock1Option())
def process_block2_in_response(self, response):
"""Process incoming response with regard to Block2 option."""
if self.response.done():
self.log.info("Disregarding incoming message as response Future is done (probably cancelled)")
return
if response.opt.block2 is not None and self.handle_blockwise:
block2 = response.opt.block2
self.log.debug("Response with Block2 option received, number = %d, more = %d, size_exp = %d." % (block2.block_number, block2.more, block2.size_exponent))
if self._assembled_response is not None:
try:
self._assembled_response._append_response_block(response)
except error.Error as e:
self.log.error("Error assembling blockwise response, passing on error %r"%e)
self.response.set_exception(e)
return
else:
if block2.block_number == 0:
self.log.debug("Receiving blockwise response")
self._assembled_response = response
else:
self.log.error("Error assembling blockwise response (expected first block)")
self.response.set_exception(UnexpectedBlock2())
return
if block2.more is True:
self.send_request(self.app_request._generate_next_block2_request(response))
else:
self.handle_final_response(self._assembled_response)
else:
if self._assembled_response is not None:
self.log.warning("Server sent non-blockwise response after having started a blockwise transfer. Blockwise transfer cancelled, accepting single response.")
self.handle_final_response(response)
def handle_final_response(self, response):
response.requested_host = self.app_request.opt.uri_host
response.requested_port = self.app_request.opt.uri_port
response.requested_path = self.app_request.opt.uri_path
response.requested_query = self.app_request.opt.uri_query
self.response.set_result(response)
def register_observation(self, response_future):
# we could have this be a coroutine, then it would be launched
# immediately instead of as add_done_callback to self.response, but it
# doesn't give an advantage, we'd still have to check for whether the
# observation has been cancelled before setting an error, and we'd just
# one more task around
try:
response = response_future.result()
except Exception as e:
if not self.observation.cancelled:
self.observation.error(e)
return
if response.opt.observe is None:
if not self.observation.cancelled:
self.observation.error(error.NotObservable())
else:
self.observation._register(self.protocol.outgoing_observations, (response.token, response.remote))
class MulticastRequest(BaseRequest):
def __init__(self, protocol, request):
self.protocol = protocol
self.log = self.protocol.log.getChild("requester")
self.request = request
if self.request.mtype != NON or self.request.code != GET or self.request.payload:
raise ValueError("Multicast currently only supportet for NON GET")
self.responses = QueueWithEnd()
asyncio.async(self._init_phase2())
@asyncio.coroutine
def _init_phase2(self):
"""See :meth:`Request._init_phase2`"""
try:
yield from self._fill_remote(self.request)
yield from self._send_request(self.request)
except Exception as e:
self.responses.put_exception(e)
def _send_request(self, request):
request.token = self.protocol.next_token()
try:
self.protocol.send_message(request)
except Exception as e:
self.responses.put_exception(e)
return
self.protocol.outgoing_requests[(request.token, None)] = self
self.log.debug("Sending multicast request - Token: %s, Remote: %s" % (binascii.b2a_hex(request.token).decode('ascii'), request.remote))
self.protocol.loop.call_later(MULTICAST_REQUEST_TIMEOUT, self._timeout)
for i in range(5):
# FIXME that's not what the spec says. what does the spec say?
yield from asyncio.sleep(i/2)
self.protocol.send_message(request)
def handle_response(self, response):
# not setting requested_host / port, that needs to come from the remote
response.requested_path = self.request.opt.uri_path
response.requested_query = self.request.opt.get_option(OptionNumber.URI_QUERY) or ()
# FIXME this should somehow backblock, but it's udp
asyncio.async(self.responses.put(response))
def _timeout(self):
self.protocol.outgoing_requests.pop(self.request.token, None)
self.responses.finish()
class Responder(object):
"""Handler for an incoming request or (in blockwise) a group thereof
Class includes methods that handle receiving incoming blockwise requests
(only atomic operation on complete requests), searching for target
resources, preparing responses and sending outgoing blockwise responses.
To keep an eye on exchanges going on, a factory for ExchangeMonitor can be
passed in that generates a monitor for every single message exchange
created during the response."""
def __init__(self, protocol, request, exchange_monitor_factory=(lambda message: None)):
self.protocol = protocol
self.log = self.protocol.log.getChild("responder")
self.key = tuple(request.opt.uri_path), request.remote
self.log.debug("New responder created, key %s"%(self.key,))
# partial request while more block1 messages are incoming
self._assembled_request = None
self.app_response = None
# that will be passed the single request. take care that this does not
# linger -- either enqueue with incoming_requests (and a timeout), or
# send a response which cancels the future.
self.app_request = asyncio.Future()
# used to track whether to reply with ACK or CON
self._sent_empty_ack = False
self._serverobservation = None
self._exchange_monitor_factory = exchange_monitor_factory
self._next_block_timeout = None
asyncio.Task(self.dispatch_request(request))
def handle_next_request(self, request):
if self._next_block_timeout is not None: # that'd be the case only for the first time
cancel_thoroughly(self._next_block_timeout)
if self.app_request.done() == False:
self.process_block1_in_request(request)
else:
self.process_block2_in_request(request)
def process_block1_in_request(self, request):
"""Process an incoming request while in block1 phase.
This method is responsible for finishing the app_request future
and thus indicating that it should not be called any more, or
scheduling itself again."""
if request.opt.block1 is not None:
block1 = request.opt.block1
self.log.debug("Request with Block1 option received, number = %d, more = %d, size_exp = %d." % (block1.block_number, block1.more, block1.size_exponent))
if block1.block_number == 0:
#TODO: Check if resource is available - if not send error immediately
#TODO: Check if method is allowed - if not send error immediately
self.log.debug("New or restarted incoming blockwise request.")
self._assembled_request = request
else:
if self._assembled_request is None:
self.respond_with_error(request, REQUEST_ENTITY_INCOMPLETE, "Beginning of block1 transaction unknown to server")
return
try:
self._assembled_request._append_request_block(request)
except error.NotImplemented:
self.respond_with_error(request, NOT_IMPLEMENTED, "Error: Request block received out of order!")
return
if block1.more is True:
#TODO: SUCCES_CODE Code should be either Changed or Created - Resource check needed
#TODO: SIZE_CHECK1 should check if the size of incoming payload is still acceptable
#TODO: SIZE_CHECK2 should check if Size option is present, and reject the resource if size too large
self.log.debug("Sending block acknowledgement (allowing client to send next block).")
self.send_non_final_response(request._generate_next_block1_response(), request)
else:
self.log.debug("Complete blockwise request received.")
self.app_request.set_result(self._assembled_request)
else:
if self._assembled_request is not None:
self.log.warning("Non-blockwise request received during blockwise transfer. Blockwise transfer cancelled, responding to single request.")
self.app_request.set_result(request)
@asyncio.coroutine
def dispatch_request(self, initial_block):
"""Dispatch incoming request - search context resource tree for
resource in Uri Path and call proper CoAP Method on it."""
if self.protocol.serversite is None:
self.respond_with_error(initial_block, NOT_FOUND, "Context is not a server")
return
try:
needs_blockwise = yield from self.protocol.serversite.needs_blockwise_assembly(initial_block)
except Exception as e:
self.respond_with_error(initial_block, INTERNAL_SERVER_ERROR, "")
self.log.error("An exception occurred while requesting needs_blockwise: %r"%e)
self.log.exception(e)
return
if needs_blockwise:
self.handle_next_request(initial_block)
try:
request = yield from self.app_request
except asyncio.CancelledError:
# error has been handled somewhere else
return
else:
request = initial_block
#TODO: Request with Block2 option and non-zero block number should get error response
delayed_ack = self.protocol.loop.call_later(EMPTY_ACK_DELAY, self.send_empty_ack, request)
yield from self.handle_observe_request(request)
try:
response = yield from self.protocol.serversite.render(request)
except error.RenderableError as e:
self.respond_with_error(request, e.code, e.message)
except Exception as e:
self.respond_with_error(request, INTERNAL_SERVER_ERROR, "")
self.log.error("An exception occurred while rendering a resource: %r"%e)
self.log.exception(e)
else:
if not response.code.is_response():
self.log.warning("Response does not carry response code (%r), application probably violates protocol."%response.code)
if needs_blockwise:
self.respond(response, request)
else:
self.send_final_response(response, request)
finally:
cancel_thoroughly(delayed_ack)
def respond_with_error(self, request, code, payload):
"""Helper method to send error response to client."""
payload = payload.encode('ascii')
self.log.info("Sending error response: %r"%payload)
response = Message(code=code, payload=payload)
self.respond(response, request)
def respond(self, app_response, request):
"""Take application-supplied response and prepare it for sending."""
# if there was an error, make sure nobody hopes to get a result any more
self.app_request.cancel()
self.handle_observe_response(request, app_response)
self.log.debug("Preparing response...")
self.app_response = app_response
size_exp = min(request.opt.block2.size_exponent if request.opt.block2 is not None else DEFAULT_BLOCK_SIZE_EXP, DEFAULT_BLOCK_SIZE_EXP)
if len(self.app_response.payload) > (2 ** (size_exp + 4)):
first_block = self.app_response._extract_block(0, size_exp)
self.app_response.opt.block2 = first_block.opt.block2
self.send_non_final_response(first_block, request)
else:
self.send_final_response(app_response, request)
def process_block2_in_request(self, request):
"""Process incoming request with regard to Block2 option
Method is recursive - calls itself until all response blocks are sent
to client."""
if request.opt.block2 is not None:
block2 = request.opt.block2
self.log.debug("Request with Block2 option received, number = %d, more = %d, size_exp = %d." % (block2.block_number, block2.more, block2.size_exponent))
next_block = self.app_response._extract_block(block2.block_number, block2.size_exponent)
if next_block is None:
# TODO is this the right error code here?
self.respond_with_error(request, REQUEST_ENTITY_INCOMPLETE, "Request out of range")
return
if next_block.opt.block2.more is True:
self.app_response.opt.block2 = next_block.opt.block2
self.send_non_final_response(next_block, request)
else:
self.send_final_response(next_block, request)
else:
# TODO is this the right error code here?
self.respond_with_error(request, REQUEST_ENTITY_INCOMPLETE, "Requests after a block2 response must carry the block2 option.")
def send_non_final_response(self, response, request):
"""Helper method to send a response to client, and setup a timeout for
client. This also registers the responder with the protocol again to
receive the next message."""
key = tuple(request.opt.uri_path), request.remote
def timeout_non_final_response(self):
self.log.info("Waiting for next blockwise request timed out")
self.protocol.incoming_requests.pop(self.key)
self.app_request.cancel()
# we don't want to have this incoming request around forever
self._next_block_timeout = self.protocol.loop.call_later(MAX_TRANSMIT_WAIT, timeout_non_final_response, self)
self.protocol.incoming_requests[self.key] = self
self.send_response(response, request)
def send_final_response(self, response, request):
# no need to unregister anything; the incoming_requests registrations
# only live from one request to the next anyway
self.send_response(response, request)
# break reference. TODO: this helps the protocol free itself, but not
# the responder, which seems to be kept alive by lingering timeout
# handlers.
self.protocol = None
def send_response(self, response, request):
"""Send a response or single response block.
This method is used in 4 situations:
- sending success non-blockwise response
- asking client to send blockwise (Block1) request block
- sending blockwise (Block2) response block
- sending any error response
"""
response.token = request.token
self.log.debug("Sending token: %s" % (binascii.b2a_hex(response.token).decode('ascii'),))
response.remote = request.remote
if request.opt.block1 is not None:
response.opt.block1 = request.opt.block1
# response.opt.block1.more does not need to be touched as we'll
# always send "more" if the client has "more" to say
if response.mtype is None:
if self._sent_empty_ack:
response.mtype = CON
self._sent_empty_ack = False
else:
response.mtype = ACK
if response.mid is None and response.mtype in (ACK, RST):
response.mid = request.mid
self.log.debug("Sending response, type = %s (request type = %s)" % (response.mtype, request.mtype))
self.protocol.send_message(response, self._exchange_monitor_factory(request))
def send_empty_ack(self, request):
"""Send separate empty ACK when response preparation takes too long.
Currently, this can happen only once per Responder, that is, when the
last block1 has been transferred and the first block2 is not ready
yet."""
self.log.debug("Response preparation takes too long - sending empty ACK.")
ack = Message(mtype=ACK, code=EMPTY, payload=b"")
# not going via send_response because it's all only about the message id
ack.remote = request.remote
ack.mid = request.mid
self.protocol.send_message(ack)
self._sent_empty_ack = True
@asyncio.coroutine
def handle_observe_request(self, request):
key = ServerObservation.request_key(request)
if key in self.protocol.incoming_observations:
old_observation = self.protocol.incoming_observations[key]
# there's no real need to distinguish real confirmations and
# pseudorequests so far (as the pseudo requests will always have
# their observe option set to 0), but it's good reading in the logs
# and might be required in case someone wants to keep an eye on
# renewed intesrest that is allowed since ietf-10.
if request.mtype is not None:
self.log.info("This is a real request belonging to an active observation")
if request.opt.observe != 0:
# either it's 1 (deregister) or someone is trying to
# deregister by not sending an observe option at all
old_observation.deregister("Client requested termination" if request.opt.observe == 1 else "Unexpected observe value: %r"%(request.opt.observe,))
return
else:
self.log.info("This is a pseudo-request")
self._serverobservation = old_observation
return
if request.code == GET and request.opt.observe == 0 and hasattr(self.protocol.serversite, "add_observation"):
sobs = ServerObservation(self.protocol, request, self.log)
yield from self.protocol.serversite.add_observation(request, sobs)
if sobs.accepted:
self._serverobservation = sobs
else:
sobs.deregister("Resource does not provide observation")
def handle_observe_response(self, request, response):
if request.mtype is None:
# this is the indicator that the request was just injected
response.mtype = CON
if self._serverobservation is None:
if response.opt.observe is not None:
self.log.info("Dropping observe option from response (no server observation was created for this request)")
response.opt.observe = None
return
# FIXME this is in parts duplicated in ServerObservation.trigger, and
# thus should be moved somewhere else
if response.code not in (VALID, CONTENT):
self._serverobservation.deregister("No successful response code")
return
self.log.debug("Acknowledging observation to client.")
response.opt.observe = self._serverobservation.observe_index
class ExchangeMonitor(object):
"""Callback collection interface to keep track of what happens to an
exchange.
Callbacks will be called in sequence: ``enqueued{0,1} sent
retransmitted{0,MAX_RETRANSMIT} (timeout | rst | cancelled | response)``; everything
after ``sent`` only gets called if the messae that initiated the exchange
was a CON."""
def enqueued(self): pass
def sent(self): pass
def retransmitted(self): pass
def timeout(self): pass
def rst(self): pass
def cancelled(self): pass
def response(self, message): pass
class ServerObservation(object):
"""An active CoAP observation inside a server is described as a
ServerObservation object.
It keeps a complete copy of the original request for simplicity (while it
actually would only need parts of that request, like the accept option).
A ServerObservation has two boolean states: accepted and cancelled. It is
originally neither, gets accepted when a
:meth:`.ObservableResource.add_observation` method does :meth:`.accept()` it,
and gets cancelled by incoming packages of the same identifier, RST/timeout
on notifications or the observed resource. Beware that an accept can happen
after cancellation if the client changes his mind quickly, but the resource
takes time to decide whether it can be observed.
"""
def __init__(self, original_protocol, original_request, requester_log):
self.original_protocol = original_protocol
self.original_request = original_request
self.log = requester_log.getChild("observation")
self.observe_index = 0
self.cancelled = False
self.accepted = False
self.original_protocol.incoming_observations[self.identifier] = self
self.log.debug("Observation created: %r"%self)
def accept(self, cancellation_callback):
assert not self.accepted
self.accepted = True
if self.cancelled:
# poor resource is just establishing that it can observe. let's
# give it the time to finish add_observation and not worry about a
# few milliseconds. (after all, this is a rare condition and people
# will not test for it).
self.original_protocol.loop.call_later(cancellation_callback)
else:
self.resource_cancellation_callback = cancellation_callback
def deregister(self, reason):
self.log.debug("Taking down observation: %s", reason)
self._cancel()
def _cancel(self):
assert not self.cancelled
self.cancelled = True
if self.accepted:
self.resource_cancellation_callback()
del self.resource_cancellation_callback
popped = self.original_protocol.incoming_observations.pop(self.identifier)
assert popped is self
identifier = property(lambda self: self.request_key(self.original_request))
@staticmethod
def request_key(request):
return (request.remote, request.token)
def _create_new_request(self):
# TODO this indicates that the request is injected -- overloading .mtype is not the cleanest thing to do hee
# further TODO this should be a copy once we decide that request receivers may manipulate them
self.original_request.mtype = None
self.original_request.mid = None
return self.original_request
def trigger(self, response=None):
# this implements the second implementation suggestion from
# draft-ietf-coap-observe-11 section 4.4
#
## @TODO handle situations in which this gets called more often than
# 2^32 times in 256 seconds (or document why we can be sure that
# that will not happen)
self.observe_index = (self.observe_index + 1) % (2**24)
request = self._create_new_request()
if response is None:
self.log.debug("Server observation triggered, injecting original request %r again"%request)
# bypassing parsing and duplicate detection, pretend the request came in again
#
# the prediction is that the factory will be called exactly once, as no
# blockwise is involved
Responder(self.original_protocol, request, lambda message: self.ObservationExchangeMonitor(self))
else:
self.log.debug("Server observation triggered, responding with application provided answer")
if response.opt.block2 != None and not (response.opt.block2.more == False and response.opt.block2.block_number == 0):
self.log.warning("Observation trigger with immediate response contained nontrivial block option, failing the observation.")
response = Message(code=INTERNAL_SERVER_ERROR, payload=b"Observation answer contains strange block option")
response.mid = None
# FIXME this is duplicated in parts from Response.send_response
response.token = request.token
response.remote = request.remote
if response.mtype is None or response.opt.observe is None:
# not sure under which conditions this should actually happen
response.mtype = CON
# FIXME this is duplicated in parts from handle_observe_response
if response.code not in (VALID, CONTENT):
self.log.debug("Trigger response produced no valid response code, tearing down observation.")
self._cancel()
else:
response.opt.observe = self.observe_index
self.original_protocol.send_message(response, self.ObservationExchangeMonitor(self))
class ObservationExchangeMonitor(ExchangeMonitor):
"""These objects feed information about the success or failure of a
response back to the observation.
Note that no information flows to the exchange monitor from the
observation, so they may outlive the observation and need to check if
it's not already cancelled before cancelling it.
"""
def __init__(self, observation):
self.observation = observation
self.observation.log.info("creating exchange observation monitor")
# TODO: this should pause/resume furter notifications
def enqueued(self): pass
def sent(self): pass
def rst(self):
self.observation.log.debug("Observation received RST, cancelling")
if not self.observation.cancelled:
self.observation._cancel()
def timeout(self):
self.observation.log.debug("Observation received timeout, cancelling")
if not self.observation.cancelled:
self.observation._cancel()
class ClientObservation(object):
def __init__(self, original_request):
self.original_request = original_request
self.callbacks = []
self.errbacks = []
# the _register and _unregister pair take care that no responses come
# in after cancellation, but they only start after the initial response
# (to take care of "resource not observable" errors). while we have
# those early errors, we need an explicit cancellation indication.
self.cancelled = False
self._registry_data = None
def register_callback(self, callback):
"""Call the callback whenever a response to the message comes in, and
pass the response to it."""
self.callbacks.append(callback)
def register_errback(self, callback):
"""Call the callback whenever something goes wrong with the
observation, and pass an exception to the callback. After such a
callback is called, no more callbacks will be issued."""
self.errbacks.append(callback)
def callback(self, response):
"""Notify all listeners of an incoming response"""
for c in self.callbacks:
c(response)
def error(self, exception):
"""Notify registered listeners that the observation went wrong. This
can only be called once."""
for c in self.errbacks:
c(exception)
self.cancel()
def cancel(self):
"""Cease to generate observation or error events. This will not
generate an error by itself."""
assert self.cancelled == False
# make sure things go wrong when someone tries to continue this
self.errbacks = None
self.callbacks = None
self.cancelled = True
self._unregister()
def _register(self, observation_dict, key):
"""Insert the observation into a dict (observation_dict) at the given
key, and store those details for use during cancellation."""
if key in observation_dict:
raise ValueError("Observation conflicts with a registered observation.")
if self._registry_data is not None:
raise ValueError("Already registered.")
self._registry_data = (observation_dict, key)
observation_dict[key] = self
def _unregister(self):
"""Undo the registration done in _register if it was ever done."""
if self._registry_data is not None:
del self._registry_data[0][self._registry_data[1]]
def __repr__(self):
return '<%s %s at %#x>'%(type(self).__name__, "(cancelled)" if self.cancelled else "(%s call-, %s errback(s))"%(len(self.callbacks), len(self.errbacks)), id(self))
aiocoap-0.1+9ae36f4/aiocoap/proxy/ 0000775 0000000 0000000 00000000000 12560602342 0016636 5 ustar 00root root 0000000 0000000 aiocoap-0.1+9ae36f4/aiocoap/proxy/__init__.py 0000664 0000000 0000000 00000000733 12560602342 0020752 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""Container module, see submodules:
* :mod:`.client` -- using CoAP via a proxy server
* :mod:`.server` -- running a proxy server
"""
aiocoap-0.1+9ae36f4/aiocoap/proxy/client.py 0000664 0000000 0000000 00000005746 12560602342 0020502 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
import socket
import asyncio
from .. import interfaces
from ..protocol import ClientObservation
class ProxyForwarder(interfaces.RequestProvider):
"""Object that behaves like a Context but only provides the request
function and forwards all messages to a proxy.
This is not a proxy itself, it is just the interface for an external
one."""
def __init__(self, proxy_address, context):
self.proxy_address = proxy_address
self.context = context
proxy = property(lambda self: self._proxy)
def request(self, message, **kwargs):
assert message.remote is None, "Message already has a configured "\
"remote, set .opt.uri_{host,port} instead of remote"
assert message.opt.uri_host is not None, "Message does not have a "\
"destination address"
message.opt.proxy_scheme = 'coap'
return ProxyRequest(self, message, **kwargs)
class ProxyRequest(interfaces.Request):
def __init__(self, proxy, app_request, exchange_monitor_factory=lambda x:None):
self.proxy = proxy
self.app_request = app_request
self.response = asyncio.Future()
self._exchange_monitor_factory = exchange_monitor_factory
self.observation = ProxyClientObservation(app_request)
asyncio.async(self._launch())
@asyncio.coroutine
def _launch(self):
try:
self.app_request.remote = None
self.app_request.unresolved_remote = self.proxy.proxy_address
proxyrequest = self.proxy.context.request(self.app_request, exchange_monitor_factory=self._exchange_monitor_factory)
if hasattr(proxyrequest, 'observation'):
self.observation._hook_onto(proxyrequest.observation)
else:
self.observation.error(Exception("No proxied observation, this should not have been created in the first place."))
self.response.set_result((yield from proxyrequest.response))
except Exception as e:
self.response.set_exception(e)
class ProxyClientObservation(ClientObservation):
real_observation = None
_register = None
_unregister = None
def _hook_onto(self, real_observation):
if self.cancelled:
real_observation.cancel()
else:
real_observation.register_callback(self.callback)
real_observation.register_errback(self.error)
def cancel(self):
self.errbacks = None
self.callbacks = None
self.cancelled = True
if self.real_observation is not None:
# delay to _hook_onto, will be cancelled there as cancelled is set to True
self.real_observation.cancel()
aiocoap-0.1+9ae36f4/aiocoap/proxy/server.py 0000664 0000000 0000000 00000033454 12560602342 0020527 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""Basic implementation of CoAP-CoAP proxying
This is work in progress and not yet part of the API."""
import asyncio
import copy
import urllib.parse
import functools
import logging
from .. import numbers, interfaces, message, error
class CanNotRedirect(Exception):
def __init__(self, code, explanation):
super(CanNotRedirect, self).__init__()
self.code = code
self.explanation = explanation
class CanNotRedirectBecauseOfUnsafeOptions(CanNotRedirect):
def __init__(self, options):
self.code = numbers.codes.BAD_OPTION
self.explanation = "Unsafe options in request: %s"%(", ".join(str(o.number) for o in options))
self.options = options
def raise_unless_safe(request, known_options):
"""Raise a BAD_OPTION CanNotRedirect unless all options in request are
safe to forward or known"""
known_options = set(known_options).union({
# it is expected that every proxy is aware of these options even though
# one of them often doesn't need touching
numbers.OptionNumber.URI_HOST,
numbers.OptionNumber.URI_PATH,
numbers.OptionNumber.URI_QUERY,
# handled by the Context
numbers.OptionNumber.BLOCK1,
numbers.OptionNumber.BLOCK2,
# handled by the proxy resource
numbers.OptionNumber.OBSERVE,
})
unsafe_options = [o for o in request.opt.option_list() if o.number.is_unsafe() and o.number not in known_options]
if unsafe_options:
raise CanNotRedirectBecauseOfUnsafeOptions(unsafe_options)
class Proxy(interfaces.Resource):
# other than in special cases, we're trying to be transparent wrt blockwise transfers
interpret_block_options = False
def __init__(self, outgoing_context, logger=None):
self.outgoing_context = outgoing_context
self.log = logger or logging.getLogger('proxy')
self._redirectors = []
def add_redirector(self, redirector):
self._redirectors.append(redirector)
def apply_redirection(self, request):
for r in self._redirectors:
result = r.apply_redirection(request)
if result is not None:
return result
return None
@asyncio.coroutine
def needs_blockwise_assembly(self, request):
return self.interpret_block_options
@asyncio.coroutine
def render(self, request):
# FIXME i'd rather let the application do with the message whatever it
# wants. everything the responder needs of the request should be
# extracted beforehand.
request = copy.deepcopy(request)
request.mid = None
request.remote = None
request.token = None
try:
request = self.apply_redirection(request)
except CanNotRedirect as e:
return message.Message(code=e.code, payload=e.explanation.encode('utf8'))
try:
response = yield from self.outgoing_context.request(request, handle_blockwise=self.interpret_block_options).response
except error.RequestTimedOut as e:
return message.Message(code=numbers.codes.GATEWAY_TIMEOUT)
raise_unless_safe(response, ())
response.mtype = None
response.mid = None
response.remote = None
response.token = None
return response
class ProxyWithPooledObservations(Proxy, interfaces.ObservableResource):
def __init__(self, outgoing_context, logger=None):
super(ProxyWithPooledObservations, self).__init__(outgoing_context, logger)
self._outgoing_observations = {}
@staticmethod
def _cache_key(request):
return request.get_cache_key([numbers.optionnumbers.OptionNumber.OBSERVE])
def _peek_observation_for(self, request):
"""Return the augmented request (see _get_obervation_for) towards a
resource, or raise KeyError"""
cachekey = self._cache_key(request)
return self._outgoing_observations[cachekey]
def _get_observation_for(self, request):
"""Return an existing augmented request towards a resource or create one.
An augmented request is an observation request that has some additional
properties (__users, __cachekey, __latest_response), which are used in
ProxyWithPooledObservations to immediately serve responses from
observed resources, and to tear the observations down again."""
# see ProxiedResource.render
request = copy.deepcopy(request)
request.mid = None
request.remote = None
request.token = None
request = self.apply_redirection(request)
cachekey = self._cache_key(request)
try:
obs = self._outgoing_observations[cachekey]
except KeyError:
obs = self._outgoing_observations[cachekey] = self.outgoing_context.request(request)
obs.__users = set()
obs.__cachekey = cachekey
obs.__latest_response = None # this becomes a cached response right after the .response comes in (so only use this after waiting for it), and gets updated when new responses arrive.
def when_first_request_done(result, obs=obs):
obs.__latest_response = result.result()
obs.response.add_done_callback(when_first_request_done)
def cb(incoming_message, obs=obs):
self.log.info("Received incoming message %r, relaying it to %d clients"%(incoming_message, len(obs.__users)))
obs.__latest_response = incoming_message
for observationserver in set(obs.__users):
observationserver.trigger(copy.deepcopy(incoming_message))
obs.observation.register_callback(cb)
def eb(exception, obs=obs):
if obs.__users:
code = numbers.codes.INTERNAL_SERVER_ERROR
payload = b""
if isinstance(exception, error.RenderableError):
code = exception.code
payload = exception.message.encode('ascii')
self.log.debug("Received error %r, which did not lead to unregistration of the clients. Actively deregistering them with %s %r."%(exception, code, payload))
for u in list(obs.__users):
u.trigger(message.Message(code=code, payload=payload))
if obs.__users:
self.log.error("Observations survived sending them an error message.")
else:
self.log.debug("Received error %r, but that seems to have been passed on cleanly to the observers as they are gone by now."%(exception,))
obs.observation.register_errback(eb)
return obs
def _add_observation_user(self, clientobservationrequest, serverobservation):
clientobservationrequest.__users.add(serverobservation)
def _remove_observation_user(self, clientobservationrequest, serverobservation):
clientobservationrequest.__users.remove(serverobservation)
# give the request that just cancelled time to be dealt with before
# dropping the __latest_response
asyncio.get_event_loop().call_soon(self._consider_dropping, clientobservationrequest)
def _consider_dropping(self, clientobservationrequest):
if not clientobservationrequest.__users:
self.log.debug("Last client of observation went away, deregistering with server.")
self._outgoing_observations.pop(clientobservationrequest.__cachekey)
if not clientobservationrequest.observation.cancelled:
clientobservationrequest.observation.cancel()
@asyncio.coroutine
def add_observation(self, request, serverobservation):
"""As ProxiedResource is intended to be just the proxy's interface
toward the Context, accepting observations is handled here, where the
observations handling can be defined by the subclasses."""
try:
clientobservationrequest = self._get_observation_for(request)
except CanNotRedirect:
pass # just don't accept the observation, the rest will be taken care of at rendering
else:
self._add_observation_user(clientobservationrequest, serverobservation)
serverobservation.accept(functools.partial(self._remove_observation_user, clientobservationrequest, serverobservation))
@asyncio.coroutine
def render(self, request):
# FIXME this is evaulated twice in the implementation (once here, but
# unless it's an observation what matters is inside the super call),
# maybe this needs to hook in differently than by subclassing and
# calling super.
self.log.info("render called")
redirected_request = copy.deepcopy(request)
try:
redirected_request = self.apply_redirection(redirected_request)
clientobservationrequest = self._peek_observation_for(redirected_request)
except (KeyError, CanNotRedirect) as e:
if not isinstance(e, CanNotRedirect) and request.opt.observe is not None:
self.log.warning("No matching observation found: request is %r (cache key %r), outgoing observations %r"%(redirected_request, self._cache_key(redirected_request), self._outgoing_observations))
return message.Message(code=numbers.codes.BAD_OPTION, payload="Observe option can not be proxied without active observation.".encode('utf8'))
self.log.debug("Request is not an observation or can't be proxied, passing it on to regular proxying mechanisms.")
return (yield from super(ProxyWithPooledObservations, self).render(request))
else:
self.log.info("Serving request using latest cached response of %r"%clientobservationrequest)
yield from clientobservationrequest.response
cached_response = clientobservationrequest.__latest_response
cached_response.mid = None
cached_response.token = None
cached_response.remote = None
cached_response.mtype = None
return cached_response
class ForwardProxy(Proxy):
# big FIXME: modifying an object in-place and returning it should not be done.
def apply_redirection(self, request):
if request.opt.proxy_uri is not None:
raise CanNotRedirect(numbers.codes.NOT_IMPLEMENTED, "URI splitting not implemented, please use Proxy-Scheme.")
if request.opt.proxy_scheme is None:
raise CanNotRedirect(numbers.codes.BAD_REQUEST, "This is only a proxy.") # correct error code?
if request.opt.proxy_scheme != 'coap':
raise CanNotRedirect(numbers.codes.BAD_OPTION, "This is only a CoAP proxy (set uri-scheme to coap)")
request.opt.proxy_scheme = None
redirected = super(ForwardProxy, self).apply_redirection(request)
if redirected is not None:
return redirected
raise_unless_safe(request, (numbers.OptionNumber.PROXY_SCHEME, numbers.OptionNumber.URI_HOST))
return request
class ForwardProxyWithPooledObservations(ForwardProxy, ProxyWithPooledObservations):
pass
class ReverseProxy(Proxy):
def apply_redirection(self, request):
if request.opt.proxy_uri is not None or request.opt.proxy_scheme is not None:
# that should somehow be default...
raise CanNotRedirect(numbers.codes.PROXYING_NOT_SUPPORTED, "This is a reverse proxy, not a forward one.")
redirected = super(ReverseProxy, self).apply_redirection(request)
if redirected is None:
raise CanNotRedirect(numbers.codes.NOT_FOUND, "")
return redirected
class ReverseProxyWithPooledObservations(ReverseProxy, ProxyWithPooledObservations):
pass
class Redirector():
def apply_redirection(self, request):
return None
def splitport(hostport):
"""Like urllib.parse.splitport, but return port as int, and as None if it
equals the CoAP default port. Also, it allows giving IPv6 addresses like a netloc:
>>> splitport('foo')
('foo', None)
>>> splitport('foo:5683')
('foo', None)
>>> splitport('[::1]:56830')
('::1', 56830)
"""
pseudoparsed = urllib.parse.SplitResult(None, hostport, None, None, None)
host, port = pseudoparsed.hostname, pseudoparsed.port
if port == numbers.constants.COAP_PORT:
port = None
return host, port
class NameBasedVirtualHost(Redirector):
def __init__(self, match_name, target, rewrite_uri_host=False):
self.match_name = match_name
self.target = target
self.rewrite_uri_host = rewrite_uri_host
def apply_redirection(self, request):
raise_unless_safe(request, ())
if request.opt.uri_host == self.match_name:
if self.rewrite_uri_host:
request.opt.uri_host, request.opt.uri_port = splitport(self.target)
else:
request.unresolved_remote = self.target
return request
class UnconditionalRedirector(Redirector):
def __init__(self, target):
self.target = target
def apply_redirection(self, request):
raise_unless_safe(request, ())
request.unresolved_remote = self.target
return request
class SubresourceVirtualHost(Redirector):
def __init__(self, path, target):
self.path = tuple(path)
self.target = target
def apply_redirection(self, request):
raise_unless_safe(request, ())
if self.path == request.opt.uri_path[:len(self.path)]:
request.opt.uri_path = request.opt.uri_path[len(self.path):]
request.opt.uri_host, request.opt.uri_port = splitport(self.target)
return request
aiocoap-0.1+9ae36f4/aiocoap/resource.py 0000664 0000000 0000000 00000016656 12560602342 0017674 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""Basic resource implementations
A resource in URL / CoAP / REST terminology is the thing identified by a URI.
Here, a :class:`.Resource` is the place where server functionality is
implemented. In many cases, there exists one persistent Resource object for a
given resource (eg. a ``TimeResource()`` is responsible for serving the
``/time`` location). On the other hand, an aiocoap server context accepts only
one thing as its serversite, and that is a Resource too (typically of the
:class:`Site` class).
Resources are most easily implemented by deriving from :class:`.Resource` and
implementing ``render_get``, ``render_post`` and similar coroutine methods.
Those take a single request message object and must return a
:class:`aiocoap.Message` object.
To serve more than one resource on a site, use the :class:`Site` class to
dispatch requests based on the Uri-Path header.
"""
import hashlib
import asyncio
from . import message
from . import error
from . import interfaces
from . import numbers
def hashing_etag(request, response):
"""Helper function for render_get handlers that allows them to use ETags based
on the payload's hash value
Run this on your request and response before returning from render_get; it is
safe to use this function with all kinds of responses, it will only act on
2.05 Content. The hash used are the first 8 bytes of the sha1 sum of the
payload.
Note that this method is not ideal from a server performance point of view
(a file server, for example, might want to hash only the stat() result of a
file instead of reading it in full), but it saves bandwith for the simple
cases.
>>> from aiocoap import *
>>> req = Message(code=GET)
>>> hash_of_hello = b'\\xaa\\xf4\\xc6\\x1d\\xdc\\xc5\\xe8\\xa2'
>>> req.opt.etags = [hash_of_hello]
>>> resp = Message(code=CONTENT)
>>> resp.payload = b'hello'
>>> hashing_etag(req, resp)
>>> resp # doctest: +ELLIPSIS
"""
if response.code != numbers.codes.CONTENT:
return
response.opt.etag = hashlib.sha1(response.payload).digest()[:8]
if request.opt.etags is not None and response.opt.etag in request.opt.etags:
response.code = numbers.codes.VALID
response.payload = b''
class Resource(interfaces.Resource):
"""Simple base implementation of the :class:`interfaces.Resource`
interface
The render method delegates content creation to ``render_$method`` methods,
and responds appropriately to unsupported methods.
Moreover, this class provides a ``get_link_description`` method as used by
.well-known/core to expose a resource's ct, rt and if_ (alternative name
for `if` as that's a Python keyword) attributes.
"""
@asyncio.coroutine
def needs_blockwise_assembly(self, request):
return True
@asyncio.coroutine
def render(self, request):
if not request.code.is_request():
raise error.UnsupportedMethod()
m = getattr(self, 'render_%s' % str(request.code).lower(), None)
if not m:
raise error.UnallowedMethod()
return m(request)
def get_link_description(self):
## FIXME which formats are acceptable, and how much escaping and
# list-to-separated-string conversion needs to happen here
ret = {}
if hasattr(self, 'ct'):
ret['ct'] = str(self.ct)
if hasattr(self, 'rt'):
ret['rt'] = self.rt
if hasattr(self, 'if_'):
ret['if'] = self.if_
return ret
class ObservableResource(Resource, interfaces.ObservableResource):
def __init__(self):
super(ObservableResource, self).__init__()
self._observations = set()
@asyncio.coroutine
def add_observation(self, request, serverobservation):
self._observations.add(serverobservation)
serverobservation.accept((lambda s=self._observations, obs=serverobservation: s.remove(obs)))
def updated_state(self, response=None):
"""Call this whenever the resource was updated, and a notification
should be sent to observers."""
for o in self._observations:
o.trigger(response)
def get_link_description(self):
link = super(ObservableResource, self).get_link_description()
link['obs'] = None
return link
class WKCResource(Resource):
"""Read-only dynamic resource list, suitable as .well-known/core.
This resource renders a link_header.LinkHeader object (which describes a
collection of resources) as application/link-format (RFC 6690).
Currently, the resource does not respect the filte criteria that can be
passed in via query strings; that might be added later.
The list to be rendered is obtained from a function passed into the
constructor; typically, that function would be a bound
Site.get_resources_as_linkheader() method."""
ct = 40
def __init__(self, listgenerator):
self.listgenerator = listgenerator
def render_get(self, request):
links = self.listgenerator()
serialized = str(links)
response = message.Message(code=numbers.codes.CONTENT, payload=serialized.encode('utf8'))
response.opt.content_format = self.ct
return response
class Site(interfaces.ObservableResource):
"""Typical root element that gets passed to a :class:`Context` and contains
all the resources that can be found when the endpoint gets accessed as a
server.
This provides easy registration of statical resources.
Add resources at absolute locations using the :meth:`.add_observation`
method."""
def __init__(self):
self._resources = {}
@asyncio.coroutine
def needs_blockwise_assembly(self, request):
try:
child = self._resources[request.opt.uri_path]
except KeyError:
return True
else:
return child.needs_blockwise_assembly(request)
@asyncio.coroutine
def render(self, request):
try:
child = self._resources[request.opt.uri_path]
except KeyError:
raise error.NoResource()
else:
return child.render(request)
@asyncio.coroutine
def add_observation(self, request, serverobservation):
try:
child = self._resources[request.opt.uri_path]
except KeyError:
return
try:
yield from child.add_observation(request, serverobservation)
except AttributeError:
pass
def add_resource(self, path, resource):
self._resources[tuple(path)] = resource
def remove_resource(self, path):
del self._resources[tuple(path)]
def get_resources_as_linkheader(self):
import link_header
links = []
for path, resource in self._resources.items():
if hasattr(resource, "get_link_description"):
details = resource.get_link_description()
else:
details = {}
lh = link_header.Link('/' + '/'.join(path), **details)
links.append(lh)
return link_header.LinkHeader(links)
aiocoap-0.1+9ae36f4/aiocoap/util/ 0000775 0000000 0000000 00000000000 12560602342 0016432 5 ustar 00root root 0000000 0000000 aiocoap-0.1+9ae36f4/aiocoap/util/__init__.py 0000664 0000000 0000000 00000003351 12560602342 0020545 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""Tools not directly related with CoAP that are needed to provide the API"""
class ExtensibleEnumMeta(type):
"""Metaclass for ExtensibleIntEnum, see there for detailed explanations"""
def __init__(self, name, bases, dict):
self._value2member_map_ = {}
for k, v in dict.items():
if k.startswith('_'):
continue
if callable(v):
continue
if isinstance(v, property):
continue
instance = self(v)
instance.name = k
setattr(self, k, instance)
type.__init__(self, name, bases, dict)
def __call__(self, value):
if isinstance(value, self):
return value
if value not in self._value2member_map_:
self._value2member_map_[value] = super(ExtensibleEnumMeta, self).__call__(value)
return self._value2member_map_[value]
class ExtensibleIntEnum(int, metaclass=ExtensibleEnumMeta):
"""Similar to Python3.4's enum.IntEnum, this type can be used for named
numbers which are not comprehensively known, like CoAP option numbers."""
def __add__(self, delta):
return type(self)(int(self) + delta)
def __repr__(self):
return '<%s %d%s>'%(type(self).__name__, self, ' "%s"'%self.name if hasattr(self, "name") else "")
def __str__(self):
return self.name if hasattr(self, "name") else int.__str__(self)
aiocoap-0.1+9ae36f4/aiocoap/util/asyncio.py 0000664 0000000 0000000 00000001247 12560602342 0020455 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""asyncio workarounds"""
import asyncio.events
def cancel_thoroughly(handle):
"""Use this on a (Timer)Handle when you would .cancel() it, just also drop
the callback and arguments for them to be freed soon."""
assert isinstance(handle, asyncio.events.Handle)
handle.cancel()
handle._args = handle._callback = None
aiocoap-0.1+9ae36f4/aiocoap/util/queuewithend.py 0000664 0000000 0000000 00000014132 12560602342 0021514 0 ustar 00root root 0000000 0000000 """Copy of the queuewithend from
https://gitorious.org/asyncio-for-loop-replacement/, copied in here until it
stabilizes"""
import abc
import enum
import asyncio
class AsyncIterable(metaclass=abc.ABCMeta):
@abc.abstractmethod
@asyncio.coroutine
def can_peek(self):
"""Return True when a result is ready to be fetched with .get_nowait(),
and False when no more items can be fetched."""
@abc.abstractmethod
@asyncio.coroutine
def get_nowait(self):
"""Fetch the next item. This must only be called once after can_peek
has returned True."""
class QueueWithEnd(AsyncIterable):
"""A QueueWithEnd shares a Queue's behavior in that it gets fed with put
and consumed with get_nowait. Contrary to a Queue, this is designed to be
consumed only by one entity, which uses the coroutine can_peek to make sure
the get_nowait will succeed.
Another difference between a Queue and a QueueWithEnd is that the latter
can also terminate (which is indicated by can_peek returning False and set
by the finish coroutine) and raise exceptions (which raise from the
get_nowait function and are set by the put_exception coroutine).
"""
Type = enum.Enum("QueueWithEnd.Type", "notpeeked value end exception")
def __init__(self, maxsize=0):
# (type, value)
self._queue = asyncio.Queue(maxsize)
self._ended = False
self._flag = self.Type.notpeeked
def __repr__(self):
return "<%s %#x flag %s%s>" % (type(self).__name__, id(self), self._flag, " (%s)" %
self._value if self._flag in (self.Type.value,
self.Type.exception) else "")
# AsyncIterable interface
@asyncio.coroutine
def can_peek(self):
if self._flag is not self.Type.notpeeked:
return True
self._flag, self._value = yield from self._queue.get()
return self._flag is not self.Type.end
def get_nowait(self):
if self._flag in (self.Type.notpeeked, self.Type.end):
raise asyncio.QueueEmpty()
elif self._flag is self.Type.exception:
raise self._value
else:
self._flag = self.Type.notpeeked
return self._value
# feeder interface
@asyncio.coroutine
def put(self, value):
yield from self._put(self.Type.value, value)
@asyncio.coroutine
def put_exception(self, value):
yield from self._put(self.Type.exception, value)
self._ended = True
@asyncio.coroutine
def finish(self):
yield from self._put(self.Type.end, None)
self._ended = True
@asyncio.coroutine
def _put(self, type, value):
if self._ended:
raise asyncio.InvalidStateError("%s has already ended"%type(self).__name__)
yield from self._queue.put((type, value))
# a simple way to create a feeder with something like an explicit yield
@classmethod
def cogenerator(cls, maxsize=0):
"""Coroutine decorator that passes a callable `asyncyield` into the function
as the first argument and returns a QueueWithEnd. It is implicitly
finished when the coroutine returns.
>>> @QueueWithEnd.cogenerator()
>>> def count_slowly(asyncyield, count_to=count_to):
... for i in range(count_to):
... yield from asyncio.sleep(1)
... yield from asyncyield(i + 1)
>>> counter = count_slowly(10)
>>> while (yield from counter.can_peek()):
... i = counter.get_nowait()
... print("Current count is %d"%i)
"""
def decorate(function):
cofun = asyncio.coroutine(function)
def wrapped(*args, **kwargs):
result = cls(maxsize=maxsize)
def guarding():
running = cofun(result.put, *args, **kwargs)
try:
yield from running
except Exception as e:
yield from result.put_exception(e)
else:
yield from result.finish()
asyncio.Task(guarding())
return result
return wrapped
return decorate
@classmethod
def merge(cls, queues):
"""Asyncio's `as_completed` does not work with QueueWithEnd objects for
the same reason it can't replace it (missing end-of-loop indication);
the `merge` classmethod can be used instead to fetch results
indiscriminately from queues as they are completed:
>>> @QueueWithEnd.cogenerator()
>>> def count(asyncyield):
... for i in range(3):
... yield from asyncyield(i + 1)
... yield from time.sleep(0.1 * i)
>>> firstcount = count()
>>> secondcount = count()
>>> merged = QueueWithEnd.merged([firstcount, secondcount])
>>> while (yield from merged.can_peek()):
... print(merged.get_nowait())
1
2
1
2
3
3
"""
merged = cls(maxsize=1)
merged.subqueues = queues[:]
@asyncio.coroutine
def feeder(queue, merged):
while (yield from queue.can_peek()):
if queue._flag == cls.Type.end:
merged.subqueues.remove(queue)
if not merged.subqueues:
merged.finish()
return
yield from merged._put(queue._flag, queue._value)
queue._flag = cls.Type.notpeeked
for s in merged.subqueues:
asyncio.async(feeder(s, merged))
return merged
# implementing the Future interface -- note that it's neither a Future by
# inheritance, nor does it offer the complete Future interface; but it can
# be used in `for value in (yield from ...):`
def __iter__(self):
result = []
while (yield from self.can_peek()):
result.append(self.get_nowait())
return result
# compatibility to the original `Its` class
more = can_peek
value = property(get_nowait)
# another old name
consume = get_nowait
aiocoap-0.1+9ae36f4/clientGET.py 0000775 0000000 0000000 00000001674 12560602342 0016245 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python3
# This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
import logging
import asyncio
from aiocoap import *
logging.basicConfig(level=logging.INFO)
@asyncio.coroutine
def main():
protocol = yield from Context.create_client_context()
request = Message(code=GET)
request.set_request_uri('coap://localhost/time')
try:
response = yield from protocol.request(request).response
except Exception as e:
print('Failed to fetch resource:')
print(e)
else:
print('Result: %s\n%r'%(response.code, response.payload))
if __name__ == "__main__":
asyncio.get_event_loop().run_until_complete(main())
aiocoap-0.1+9ae36f4/clientPUT.py 0000775 0000000 0000000 00000002361 12560602342 0016270 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python3
# This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
import logging
import asyncio
from aiocoap import *
logging.basicConfig(level=logging.INFO)
@asyncio.coroutine
def main():
"""
Example class which performs single PUT request to localhost
port 5683 (official IANA assigned CoAP port), URI "/other/block".
Request is sent 2 seconds after initialization.
Payload is bigger than 1kB, and thus is sent as several blocks.
"""
context = yield from Context.create_client_context()
yield from asyncio.sleep(2)
payload = b"The quick brown fox jumps over the lazy dog.\n" * 30
request = Message(code=PUT, payload=payload)
request.opt.uri_host = '127.0.0.1'
request.opt.uri_path = ("other", "block")
response = yield from context.request(request).response
print('Result: %s\n%r'%(response.code, response.payload))
if __name__ == "__main__":
asyncio.get_event_loop().run_until_complete(main())
aiocoap-0.1+9ae36f4/coap-client 0000775 0000000 0000000 00000021667 12560602342 0016202 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python3
# This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""coap-client is a simple command-line tool for interacting with CoAP servers"""
import sys
import asyncio
import argparse
import logging
import subprocess
import socket
import shlex
# even though not used directly, this has side effects on the input() function
# used in interactive mode
import readline
import aiocoap
import aiocoap.proxy.client
def parse_commandline(args):
p = argparse.ArgumentParser(description=__doc__)
p.add_argument('-m', '--method', help="Name or number of request method to use (default: %(default)s)", default="GET")
p.add_argument('--observe', help="Register an observation on the resource", action='store_true')
p.add_argument('--observe-exec', help="Run the specified program whenever the observed resource changes, feeding the response data to its stdin", metavar='CMD')
p.add_argument('--accept', help="Content format to request", metavar="MIME")
p.add_argument('--proxy', help="Relay the CoAP request to a proxy for execution", metavar="HOST[:PORT]")
p.add_argument('--payload', help="Send X as payload in POST or PUT requests. If X starts with an '@', its remainder is treated as a file name and read from.", metavar="X")
p.add_argument('--content-format', help="Content format sent via POST or PUT", metavar="MIME")
p.add_argument('-v', '--verbose', help="Increase the debug output", action="count")
p.add_argument('-q', '--quiet', help="Decrease the debug output", action="count")
p.add_argument('--dump', help="Log network traffic to FILE", metavar="FILE")
p.add_argument('--interactive', help="Enter interactive mode", action="store_true")
p.add_argument('url', help="CoAP address to fetch")
return p, p.parse_args(args)
def configure_logging(verbosity):
logging.basicConfig()
if verbosity <= -2:
logging.getLogger('coap').setLevel(logging.CRITICAL + 1)
elif verbosity == -1:
logging.getLogger('coap').setLevel(logging.ERROR)
elif verbosity == 0:
logging.getLogger('coap').setLevel(logging.WARNING)
elif verbosity == 1:
logging.getLogger('coap').setLevel(logging.INFO)
elif verbosity >= 2:
logging.getLogger('coap').setLevel(logging.DEBUG)
def incoming_observation(options, response):
if options.observe_exec:
p = subprocess.Popen(options.observe_exec, shell=True, stdin=subprocess.PIPE)
# FIXME this blocks
p.communicate(response.payload)
else:
sys.stdout.buffer.write(b'---\n')
if response.code.is_successful():
sys.stdout.buffer.write(response.payload + b'\n' if not response.payload.endswith(b'\n') else b'')
sys.stdout.buffer.flush()
else:
print(response.code, file=sys.stderr)
if response.payload:
print(response.payload.decode('utf-8'), file=sys.stderr)
@asyncio.coroutine
def single_request(args, context=None):
parser, options = parse_commandline(args)
configure_logging((options.verbose or 0) - (options.quiet or 0))
try:
code = getattr(aiocoap.numbers.codes.Code, options.method.upper())
except AttributeError:
try:
code = aiocoap.numbers.codes.Code(int(options.method))
except ValueError:
raise parser.error("Unknown method")
if context is None:
context = yield from aiocoap.Context.create_client_context(dump_to=options.dump)
else:
if options.dump:
print("The --dump option is not implemented in interactive mode.", file=sys.stderr)
request = aiocoap.Message(code=code)
try:
request.set_request_uri(options.url)
except ValueError as e:
raise parser.error(e)
if not request.opt.uri_host:
raise parser.error("Request URLs need to be absolute.")
if options.accept:
try:
request.opt.accept = int(options.accept)
except ValueError:
try:
request.opt.accept = aiocoap.numbers.media_types_rev[options.accept]
except KeyError:
raise parser.error("Unknown accept type")
if options.observe:
request.opt.observe = 0
observation_is_over = asyncio.Future()
if options.payload:
if options.payload.startswith('@'):
try:
request.payload = open(options.payload[1:], 'rb').read()
except OSError as e:
raise parser.error("File could not be opened: %s"%e)
else:
request.payload = options.payload.encode('utf8')
if options.content_format:
try:
request.opt.content_format = int(options.content_format)
except ValueError:
try:
request.opt.content_format = aiocoap.numbers.media_types_rev[options.content_format]
except KeyError:
raise parser.error("Unknown content format")
if options.proxy is None:
interface = context
else:
interface = aiocoap.proxy.client.ProxyForwarder(options.proxy, context)
try:
requester = interface.request(request)
if options.observe:
requester.observation.register_errback(observation_is_over.set_result)
requester.observation.register_callback(lambda data, options=options: incoming_observation(options, data))
try:
response_data = yield from requester.response
except socket.gaierror as e:
print("Name resolution error:", e, file=sys.stderr)
sys.exit(1)
if response_data.code.is_successful():
sys.stdout.buffer.write(response_data.payload)
sys.stdout.buffer.flush()
if not response_data.payload.endswith(b'\n') and not options.quiet:
sys.stderr.write('\n(No newline at end of message)\n')
else:
print(response_data.code, file=sys.stderr)
if response_data.payload:
print(response_data.payload.decode('utf-8'), file=sys.stderr)
sys.exit(1)
if options.observe:
exit_reason = yield from observation_is_over
print("Observation is over: %r"%(exit_reason,), file=sys.stderr)
finally:
if not requester.response.done():
requester.response.cancel()
if options.observe and not requester.observation.cancelled:
requester.observation.cancel()
interactive_expecting_keyboard_interrupt = asyncio.Future()
@asyncio.coroutine
def interactive():
global interactive_expecting_keyboard_interrupt
context = yield from aiocoap.Context.create_client_context()
while True:
try:
# when http://bugs.python.org/issue22412 is resolved, use that instead
line = yield from asyncio.get_event_loop().run_in_executor(None, lambda: input("aiocoap> "))
except EOFError:
line = "exit"
line = shlex.split(line)
if not line:
continue
if line in (["help"], ["?"]):
line = ["--help"]
if line in (["quit"], ["q"], ["exit"]):
return
current_task = asyncio.Task(single_request(line, context=context))
interactive_expecting_keyboard_interrupt = asyncio.Future()
done, pending = yield from asyncio.wait([current_task, interactive_expecting_keyboard_interrupt], return_when=asyncio.FIRST_COMPLETED)
if current_task not in done:
current_task.cancel()
else:
try:
yield from current_task
except SystemExit as e:
if e.code != 0:
print("Exit code: %d"%e.code, file=sys.stderr)
continue
except Exception as e:
print("Unhandled exception raised: %s"%(e,))
if __name__ == "__main__":
args = sys.argv[1:]
if '--interactive' not in args:
try:
asyncio.get_event_loop().run_until_complete(single_request(args))
except KeyboardInterrupt:
sys.exit(3)
else:
if len(args) != 1:
print("No other arguments must be specified when entering interactive mode", file=sys.stderr)
sys.exit(1)
loop = asyncio.get_event_loop()
task = asyncio.Task(interactive())
task.add_done_callback(lambda result: loop.stop())
while not loop.is_closed():
try:
loop.run_forever()
except KeyboardInterrupt:
if not interactive_expecting_keyboard_interrupt.done():
interactive_expecting_keyboard_interrupt.set_result(None)
except SystemExit:
continue # asyncio/tasks.py(242) raises those after setting them as results, but we particularly want them back in the loop
aiocoap-0.1+9ae36f4/coap-proxy 0000775 0000000 0000000 00000000120 12560602342 0016062 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python3
import aiocoap.cli.proxy
aiocoap.cli.proxy.sync_main()
aiocoap-0.1+9ae36f4/doc/ 0000775 0000000 0000000 00000000000 12560602342 0014607 5 ustar 00root root 0000000 0000000 aiocoap-0.1+9ae36f4/doc/LICENSE.rst 0000664 0000000 0000000 00000000051 12560602342 0016417 0 ustar 00root root 0000000 0000000 LICENSE
=======
.. include:: ../LICENSE
aiocoap-0.1+9ae36f4/doc/README.doc 0000664 0000000 0000000 00000001427 12560602342 0016237 0 ustar 00root root 0000000 0000000 in order to have auto-buildable documentation with sphinx (using `./setup.py
build_sphinx`), you must first run
for x in $(find aiocoap -name '*.py' | sed -e 's@.py$@@' -e 's@/__init__$@@' -e 's@/@.@g')
do
rm doc/$x.rst
echo "$x module\n=========================================\n\n.. automodule:: $x" > doc/$x.rst
if [[ $x != "aiocoap.numbers" ]]
then
echo " :members:\n :undoc-members:\n :show-inheritance:" >> doc/$x.rst
else
echo ".. toctree::\n :glob:\n\n aiocoap.numbers.*\n" >> doc/$x.rst
fi
done
(sphinx-apidoc is less customizable and does not seem to do more).
for readthedocs to work, a branch `for-readthedocs` has been generated that
contains those files statically checked in.
aiocoap-0.1+9ae36f4/doc/conf.py 0000664 0000000 0000000 00000013675 12560602342 0016122 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
#
# txThings asyncio branch documentation build configuration file, created by
# sphinx-quickstart on Wed Jun 4 09:40:16 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('.'))
# maybe required for readthedocs
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'aiocoap'
copyright = u'2014, Maciej Wasilak, Christian Amsüss'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# " v documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'aiocoap'
autodoc_member_order = 'bysource'
aiocoap-0.1+9ae36f4/doc/examples.rst 0000664 0000000 0000000 00000001054 12560602342 0017157 0 ustar 00root root 0000000 0000000 Usage Examples
==============
In absence of a complete tutorial, these files can serve as reference
implementations for server and client. In order to test them, run
``./server.py`` in one terminal, and use ``./clientGET.py`` and
``./clientPUT.py`` to interact with it.
Client
------
.. literalinclude:: ../clientGET.py
:language: python
:linenos:
:lines: 10-
.. literalinclude:: ../clientPUT.py
:language: python
:linenos:
:lines: 10-
Server
------
.. literalinclude:: ../server.py
:language: python
:linenos:
:lines: 10-
aiocoap-0.1+9ae36f4/doc/index.rst 0000664 0000000 0000000 00000000710 12560602342 0016446 0 ustar 00root root 0000000 0000000 .. include:: ../README.rst
.. toctree::
:glob:
:titlesonly:
aiocoap
aiocoap.protocol
aiocoap.message
aiocoap.interfaces
aiocoap.proxy
aiocoap.proxy.client
aiocoap.proxy.server
aiocoap.cli
aiocoap.cli.proxy
aiocoap.numbers
aiocoap.error
aiocoap.options
aiocoap.optiontypes
aiocoap.resource
aiocoap.dump
aiocoap.util
aiocoap.util.asyncio
aiocoap.util.queuewithend
examples
tools
LICENSE
aiocoap-0.1+9ae36f4/doc/tools.rst 0000664 0000000 0000000 00000000673 12560602342 0016507 0 ustar 00root root 0000000 0000000 CoAP tools
==========
As opposed to the :doc:`examples`, programs listed here are not tuned to show
the use of aiocoap, but are tools for everyday work with CoAP implemented in
aiocoap. Still, they can serve as examples of how to deal with user-provided
addresses (as opposed to the fixed addresses in the examples), or of
integration in a bigger project in general.
* ``coap-client``: A CoAP client that supports observations and proxying.
aiocoap-0.1+9ae36f4/server.py 0000775 0000000 0000000 00000010305 12560602342 0015724 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python3
# This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
import datetime
import logging
import asyncio
import aiocoap.resource as resource
import aiocoap
class BlockResource(resource.Resource):
"""
Example resource which supports GET and PUT methods. It sends large
responses, which trigger blockwise transfer.
"""
def __init__(self):
super(BlockResource, self).__init__()
self.content = ("This is the resource's default content. It is padded "\
"with numbers to be large enough to trigger blockwise "\
"transfer.\n" + "0123456789\n" * 100).encode("ascii")
@asyncio.coroutine
def render_get(self, request):
response = aiocoap.Message(code=aiocoap.CONTENT, payload=self.content)
return response
@asyncio.coroutine
def render_put(self, request):
print('PUT payload: %s' % request.payload)
self.content = request.payload
payload = ("I've accepted the new payload. You may inspect it here in "\
"Python's repr format:\n\n%r"%self.content).encode('utf8')
return aiocoap.Message(code=aiocoap.CHANGED, payload=payload)
class SeparateLargeResource(resource.Resource):
"""
Example resource which supports GET method. It uses asyncio.sleep to
simulate a long-running operation, and thus forces the protocol to send
empty ACK first.
"""
def __init__(self):
super(SeparateLargeResource, self).__init__()
# self.add_param(resource.LinkParam("title", "Large resource."))
@asyncio.coroutine
def render_get(self, request):
yield from asyncio.sleep(3)
payload = "Three rings for the elven kings under the sky, seven rings"\
"for dwarven lords in their halls of stone, nine rings for"\
"mortal men doomed to die, one ring for the dark lord on his"\
"dark throne.".encode('ascii')
return aiocoap.Message(code=aiocoap.CONTENT, payload=payload)
class TimeResource(resource.ObservableResource):
"""
Example resource that can be observed. The `notify` method keeps scheduling
itself, and calles `update_state` to trigger sending notifications.
"""
def __init__(self):
super(TimeResource, self).__init__()
self.notify()
def notify(self):
self.updated_state()
asyncio.get_event_loop().call_later(60, self.notify)
@asyncio.coroutine
def render_get(self, request):
payload = datetime.datetime.now().strftime("%Y-%m-%d %H:%M").encode('ascii')
return aiocoap.Message(code=aiocoap.CONTENT, payload=payload)
#class CoreResource(resource.Resource):
# """
# Example Resource that provides list of links hosted by a server.
# Normally it should be hosted at /.well-known/core
#
# Notice that self.visible is not set - that means that resource won't
# be listed in the link format it hosts.
# """
#
# def __init__(self, root):
# resource.Resource.__init__(self)
# self.root = root
#
# @asyncio.coroutine
# def render_get(self, request):
# data = []
# self.root.generate_resource_list(data, "")
# payload = ",".join(data).encode('utf-8')
# response = aiocoap.Message(code=aiocoap.CONTENT, payload=payload)
# response.opt.content_format = 40
# return response
# logging setup
logging.basicConfig(level=logging.INFO)
logging.getLogger("coap-server").setLevel(logging.DEBUG)
def main():
# Resource tree creation
root = resource.Site()
root.add_resource(('.well-known', 'core'), resource.WKCResource(root.get_resources_as_linkheader))
root.add_resource(('time',), TimeResource())
root.add_resource(('other', 'block'), BlockResource())
root.add_resource(('other', 'separate'), SeparateLargeResource())
asyncio.async(aiocoap.Context.create_server_context(root))
asyncio.get_event_loop().run_forever()
if __name__ == "__main__":
main()
aiocoap-0.1+9ae36f4/setup.py 0000775 0000000 0000000 00000003074 12560602342 0015563 0 ustar 00root root 0000000 0000000 #!/usr/bin/env python3
# This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""aiocoap is a Python library for writing servers and clients for the CoAP
(Constrained Application) Protocol, which is used mainly in the context of IoT
(Internet of Things) devices."""
from setuptools import setup, find_packages
name = "aiocoap"
version = "0.1+git"
description = "Python CoAP library"
longdescription = __doc__
setup(
name=name,
version=version,
description=description,
packages=find_packages(),
author="Maciej Wasilak, Christian Amsüss",
author_email="Christian Amsüss ",
url="https://github.com/chrysn/aiocoap",
keywords=['coap', 'asyncio', 'iot'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
],
command_options={
'build_sphinx': {
'project': ('setup.py', name),
'version': ('setup.py', version),
'release': ('setup.py', version),
}
},
test_suite='tests',
)
aiocoap-0.1+9ae36f4/tests/ 0000775 0000000 0000000 00000000000 12560602342 0015204 5 ustar 00root root 0000000 0000000 aiocoap-0.1+9ae36f4/tests/__init__.py 0000664 0000000 0000000 00000000710 12560602342 0017313 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""Module that contains the various test scenarios.
Can be used most easily from setup.py as `./setup.py test`."""
aiocoap-0.1+9ae36f4/tests/client.py 0000664 0000000 0000000 00000004566 12560602342 0017047 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
import asyncio
import aiocoap
import unittest
from .server import WithTestServer, WithClient, no_warnings
class TestClient(WithTestServer, WithClient):
@no_warnings
def test_uri_parser(self):
yieldfrom = lambda f: self.loop.run_until_complete(f)
request = aiocoap.Message(code=aiocoap.GET)
request_uri = "coap://" + self.servernetloc + "/empty?query=a&query=b"
request.set_request_uri(request_uri)
self.assertEqual(request.get_request_uri(), request_uri, "Request URL does not round-trip in request")
response = yieldfrom(self.client.request(request).response)
self.assertEqual(response.get_request_uri(), request_uri, "Request URL does not round-trip in response")
self.assertEqual(response.code, aiocoap.CONTENT, "Request URL building failed")
request = aiocoap.Message(code=aiocoap.GET)
request.set_request_uri("coap://" + self.servernamealias + "/empty")
self.assertEqual(request.get_request_uri(), "coap://" + self.servernamealias + "/empty")
response = yieldfrom(self.client.request(request).response)
self.assertEqual(response.code, aiocoap.CONTENT, "Resolving WithTestServer.servernamealias failed")
self.assertEqual(response.get_request_uri(), "coap://" + self.servernamealias + "/empty", "Host name did not get round-tripped")
@no_warnings
def test_uri_parser2(self):
"""A difficult test because it is prone to keeping the transport
around, bothering later tests"""
yieldfrom = lambda f: self.loop.run_until_complete(f)
request = aiocoap.Message(code=aiocoap.GET)
request.set_request_uri("coap://" + self.servernetloc + ":9999/empty")
resp = self.client.request(request).response
try:
# give the request some time to finish getaddrinfo
yieldfrom(asyncio.as_completed([resp], timeout=0.01).__next__())
except asyncio.TimeoutError:
pass
self.assertEqual(request.remote[1], 9999, "Remote port was not parsed")
resp.cancel()
aiocoap-0.1+9ae36f4/tests/doctest.py 0000664 0000000 0000000 00000001473 12560602342 0017230 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
import unittest
import doctest
import aiocoap
import os
def load_tests(loader, tests, ignore):
for root, dn, fn in os.walk('aiocoap'):
for f in fn:
if not f.endswith('.py'):
continue
if "queuewithend" in f:
# exclude queuewithend module, it's unstable yet anyway
continue
p = os.path.join(root, f)
tests.addTests(doctest.DocTestSuite(p[:-3].replace('/', '.')))
return tests
aiocoap-0.1+9ae36f4/tests/encoding.py 0000664 0000000 0000000 00000022732 12560602342 0017352 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
import struct
import copy
import aiocoap
import aiocoap.optiontypes
import unittest
class TestMessage(unittest.TestCase):
def test_encode(self):
msg1 = aiocoap.Message(mtype=aiocoap.CON, mid=0)
binary1 = bytes((64,0,0,0))
self.assertEqual(msg1.encode(), binary1, "wrong encode operation for empty CON message")
msg2 = aiocoap.Message(mtype=aiocoap.ACK, mid=0xBC90, code=aiocoap.CONTENT, payload=b"temp = 22.5 C", token=b'q')
msg2.opt.etag = b"abcd"
binary2 = bytes((97,69,188,144,113,68))+b"abcd"+bytes((255,))+b"temp = 22.5 C"
self.assertEqual(msg2.encode(), binary2, "wrong encode operation for ACK message with payload, and Etag option")
msg2short = binary2[0:5] + binary2[10:] # header, token, marker, data
msg2a = copy.deepcopy(msg2)
del msg2a.opt.etag
self.assertEqual(msg2a.encode(), msg2short, "deleting single property did not succeed")
msg2b = copy.deepcopy(msg2)
del msg2b.opt.etags
self.assertEqual(msg2b.encode(), msg2short, "deleting list property did not succeed")
msg2c = copy.deepcopy(msg2)
msg2c.opt.etags = []
self.assertEqual(msg2c.encode(), msg2short, "emptying list property did not succeed")
msg2d = copy.deepcopy(msg2)
msg2d.opt.etag = None
self.assertEqual(msg2d.encode(), msg2short, "setting single property to None did not succeed")
msg3 = aiocoap.Message()
self.assertRaises(TypeError, msg3.encode)
msg4 = aiocoap.Message(mtype=aiocoap.CON, mid=2<<16)
self.assertRaises(Exception, msg4.encode)
msg5 = aiocoap.Message(mtype=aiocoap.CON, mid=0)
o = aiocoap.optiontypes.OpaqueOption(1234, value=b"abcd")
msg5.opt.add_option(o)
binary5 = binary1 + bytes((0xe4, 0x03, 0xc5)) + b"abcd"
self.assertEqual(msg5.encode(), binary5, "wrong encoding for high option numbers")
msg6 = aiocoap.Message(mtype=aiocoap.CON, mid=0)
o = aiocoap.optiontypes.OpaqueOption(12345678, value=b"abcd")
msg6.opt.add_option(o)
self.assertRaises(ValueError, msg6.encode)
msg7 = aiocoap.Message(mtype=aiocoap.CON, mid=0)
def set_unknown_opt():
msg7.opt.foobar = 42
self.assertRaises(AttributeError, set_unknown_opt)
def test_decode(self):
rawdata1 = bytes((64,0,0,0))
self.assertEqual(aiocoap.Message.decode(rawdata1).mtype, aiocoap.CON, "wrong message type for decode operation")
self.assertEqual(aiocoap.Message.decode(rawdata1).mid, 0, "wrong message ID for decode operation")
self.assertEqual(aiocoap.Message.decode(rawdata1).code, aiocoap.EMPTY, "wrong message code for decode operation")
self.assertEqual(aiocoap.Message.decode(rawdata1).token, b'', "wrong message token for decode operation")
self.assertEqual(aiocoap.Message.decode(rawdata1).payload, b'', "wrong message payload for decode operation")
rawdata2 = bytes((97,69,188,144,113,68))+b"abcd"+bytes((255,))+b"temp = 22.5 C"
self.assertEqual(aiocoap.Message.decode(rawdata2).mtype, aiocoap.ACK, "wrong message type for decode operation")
self.assertEqual(aiocoap.Message.decode(rawdata2).mid, 0xBC90, "wrong message ID for decode operation")
self.assertEqual(aiocoap.Message.decode(rawdata2).code, aiocoap.CONTENT, "wrong message code for decode operation")
self.assertEqual(aiocoap.Message.decode(rawdata2).token, b'q', "wrong message token for decode operation")
self.assertEqual(aiocoap.Message.decode(rawdata2).payload, b'temp = 22.5 C', "wrong message payload for decode operation")
self.assertEqual(aiocoap.Message.decode(rawdata2).opt.etags, (b"abcd",), "problem with etag option decoding for decode operation")
self.assertEqual(len(aiocoap.Message.decode(rawdata2).opt._options), 1, "wrong number of options after decode operation")
rawdata3 = rawdata1 + bytes((0xf0,))
self.assertRaises(ValueError, aiocoap.Message.decode, rawdata3) # message with option delta reserved for payload marker
class TestReadExtendedFieldValue(unittest.TestCase):
def test_read_extended_field_value(self):
arguments = ((0, b"aaaa"),
(0, b""),
(1, b"aaaa"),
(12,b"aaaa"),
(13,b"aaaa"),
(13,b"a"),
(14,b"aaaa"),
(14,b"aa"))
results = ((0, b"aaaa"),
(0, b""),
(1, b"aaaa"),
(12,b"aaaa"),
(110,b"aaa"),
(110,b""),
(25198,b"aa"),
(25198,b""))
for argument, result in zip(arguments, results):
self.assertEqual(aiocoap.options._read_extended_field_value(argument[0], argument[1]), result,'wrong result for value : '+ repr(argument[0]) + ' , rawdata : ' + repr(argument[1]))
class TestUintOption(unittest.TestCase):
def test_encode(self):
arguments = (0,
1,
2,
40,
50,
255,
256,
1000)
results = (b"",
bytes((1,)),
bytes((2,)),
bytes((40,)),
bytes((50,)),
bytes((255,)),
bytes((1,0)),
bytes((3,232)))
for argument, result in zip(arguments, results):
self.assertEqual(aiocoap.optiontypes.UintOption(0,argument).encode(), result,'wrong encode operation for option value : '+ str(argument))
def test_decode(self):
arguments = ("",
bytes((1,)),
bytes((2,)),
bytes((40,)),
bytes((50,)),
bytes((255,)),
bytes((1,0)),
bytes((3,232)))
results = (0,
1,
2,
40,
50,
255,
256,
1000)
for argument, result in zip(arguments, results):
self.assertEqual(aiocoap.optiontypes.UintOption(0).decode(argument).value, result,'wrong decode operation for rawdata : '+ str(argument))
def test_length(self):
arguments = (0,
1,
2,
40,
50,
255,
256,
1000)
results = (0,
1,
1,
1,
1,
1,
2,
2)
for argument, result in zip(arguments, results):
self.assertEqual(aiocoap.optiontypes.UintOption(0,argument)._length(), result,'wrong length for option value : '+ str(argument))
class TestOptions(unittest.TestCase):
def test_set_uri_path(self):
opt1 = aiocoap.options.Options()
opt1.uri_path = ["core"]
self.assertEqual(len(opt1.get_option(aiocoap.OptionNumber.URI_PATH)), 1, 'wrong uri_path setter operation for single string argument')
self.assertEqual(opt1.get_option(aiocoap.OptionNumber.URI_PATH)[0].value, "core", 'wrong uri_path setter operation for single string argument')
opt2 = aiocoap.options.Options()
opt2.uri_path = ("core",".well-known")
self.assertEqual(len(opt2.get_option(aiocoap.OptionNumber.URI_PATH)), 2, 'wrong uri_path setter operation for 2-element tuple argument')
self.assertEqual(opt2.get_option(aiocoap.OptionNumber.URI_PATH)[0].value, "core", 'wrong uri_path setter operation for 2-element tuple argument')
self.assertEqual(opt2.get_option(aiocoap.OptionNumber.URI_PATH)[1].value, ".well-known", 'wrong uri_path setter operation for 2-element tuple argument')
opt3 = aiocoap.options.Options()
self.assertRaises(TypeError, setattr, opt3, "uri_path", 42)
class TestOptiontypes(unittest.TestCase):
def test_optiontypes(self):
# from rfc725 table 4
on = aiocoap.numbers.OptionNumber
options = {
on.IF_MATCH: "C",
on.URI_HOST: "CU",
on.ETAG: "",
on.MAX_AGE: "U",
on.SIZE1: "N"
}
for o, expected in options.items():
self.assertEqual("C" in expected, o.is_critical(), "Unexpected criticalness of %r"%o)
self.assertEqual("C" not in expected, o.is_elective(), "Unexpected electiveness of %r"%o)
self.assertEqual("U" in expected, o.is_unsafe(), "Unexpected unsafeness of %r"%o)
self.assertEqual("U" not in expected, o.is_safetoforward(), "Unexpected safetoforwardness of %r"%o)
if o.is_safetoforward():
self.assertEqual("N" in expected, o.is_nocachekey(), "Unexpected nocachekeyness of %r"%o)
self.assertEqual("N" not in expected, o.is_cachekey(), "Unexpected cachekeyness of %r"%o)
else:
self.assertRaises(ValueError, o.is_nocachekey)
self.assertRaises(ValueError, o.is_cachekey)
aiocoap-0.1+9ae36f4/tests/observe.py 0000664 0000000 0000000 00000012253 12560602342 0017226 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
"""Tests for resource observation
Note that cancellation of observations is checked in neither way; that's
largely because the implementation has fallen behind on the drafts anyway and
needs to be updated."""
import asyncio
import aiocoap
import unittest
from aiocoap.resource import ObservableResource
from .server import WithTestServer, WithClient, no_warnings, ReplacingResource, MultiRepresentationResource
class ObservableCounter(ObservableResource):
def __init__(self):
super(ObservableCounter, self).__init__()
self.count = 0
def increment(self):
self.count += 1
self.updated_state()
@asyncio.coroutine
def render_get(self, request):
return aiocoap.Message(code=aiocoap.CONTENT, payload=str(self.count).encode('ascii'))
class ObservableReplacingResource(ReplacingResource, ObservableResource):
@asyncio.coroutine
def render_put(self, request):
result = yield from super(ObservableReplacingResource, self).render_put(request)
self.updated_state()
return result
class ObserveTestingSite(aiocoap.resource.Site):
def __init__(self):
super(ObserveTestingSite, self).__init__()
self.counter = ObservableCounter()
self.add_resource(('unobservable',), MultiRepresentationResource())
self.add_resource(('count',), self.counter)
self.add_resource(('echo',), ObservableReplacingResource())
class WithObserveTestServer(WithTestServer):
def create_testing_site(self):
self.testingsite = ObserveTestingSite()
return self.testingsite
class TestObserve(WithObserveTestServer, WithClient):
@no_warnings
def test_normal_get(self):
request = aiocoap.Message(code=aiocoap.GET)
request.opt.uri_path = ['count']
request.unresolved_remote = self.servernetloc
response = self.loop.run_until_complete(self.client.request(request).response)
self.assertEqual(response.code, aiocoap.CONTENT, "Normal request did not succede")
self.assertEqual(response.payload, b'0', "Normal request gave unexpected result")
def build_observer(self, path):
request = aiocoap.Message(code=aiocoap.GET)
request.unresolved_remote = self.servernetloc
request.opt.uri_path = path
request.opt.observe = 0
requester = self.client.request(request)
observation_results = []
requester.observation.register_callback(lambda message: observation_results.append(message.payload))
requester.observation.register_errback(lambda reason: observation_results.append(reason))
notinterested = lambda: requester.observation.cancel()
return requester, observation_results, notinterested
@no_warnings
def test_unobservable(self):
yieldfrom = self.loop.run_until_complete
requester, observation_results, notinterested = self.build_observer(['unobservable'])
response = self.loop.run_until_complete(requester.response)
self.assertEqual(response.code, aiocoap.CONTENT, "Unobservable base request did not succede")
self.assertEqual(response.payload, b'', "Unobservable base request gave unexpected result")
yieldfrom(asyncio.sleep(0.1))
self.assertEqual(str(observation_results), '[NotObservable()]')
@no_warnings
def test_counter(self):
yieldfrom = self.loop.run_until_complete
requester, observation_results, notinterested = self.build_observer(['count'])
response = self.loop.run_until_complete(requester.response)
self.assertEqual(response.code, aiocoap.CONTENT, "Observe base request did not succede")
self.assertEqual(response.payload, b'0', "Observe base request gave unexpected result")
self.testingsite.counter.increment()
yieldfrom(asyncio.sleep(0.1))
self.assertEqual(observation_results, [b'1'])
self.testingsite.counter.increment()
yieldfrom(asyncio.sleep(0.1))
self.assertEqual(observation_results, [b'1', b'2'])
@no_warnings
def test_echo(self):
yieldfrom = self.loop.run_until_complete
def put(b):
m = aiocoap.Message(code=aiocoap.PUT, payload=b)
m.unresolved_remote = self.servernetloc
m.opt.uri_path = ['echo']
response = yieldfrom(self.client.request(m).response)
self.assertEqual(response.code, aiocoap.CHANGED)
put(b'test data 1')
requester, observation_results, notinterested = self.build_observer(['echo'])
response = self.loop.run_until_complete(requester.response)
self.assertEqual(response.code, aiocoap.CONTENT, "Observe base request did not succede")
self.assertEqual(response.payload, b'test data 1', "Observe base request gave unexpected result")
put(b'test data 2')
yieldfrom(asyncio.sleep(0.1))
self.assertEqual(observation_results, [b'test data 2'])
aiocoap-0.1+9ae36f4/tests/proxy.py 0000664 0000000 0000000 00000004756 12560602342 0016753 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
import asyncio
from .server import WithAsyncLoop, Destructing, WithClient, TestServer, CLEANUPTIME
from .client import TestClient
import aiocoap.proxy.client
import aiocoap.cli.proxy
class WithProxyServer(WithAsyncLoop, Destructing):
def setUp(self):
super(WithProxyServer, self).setUp()
self.forwardproxy = aiocoap.cli.proxy.Main(["--forward", "--server-port", str(self.proxyport)])
self.loop.run_until_complete(self.forwardproxy.initializing)
def tearDown(self):
super(WithProxyServer, self).tearDown()
self.loop.run_until_complete(self.forwardproxy.shutdown())
# creating a reference loop between the cli instance and its contexts,
# so that the cli instance's gc-ing is linked o the contexts'.
# TODO how can we handle this more smoothly?
self.forwardproxy.outgoing_context._cli = self.reverseproxy
self.forwardproxy.proxy_context._cli = self.reverseproxy
self._del_to_be_sure('forwardproxy')
self.loop.run_until_complete(asyncio.sleep(CLEANUPTIME))
proxyport = 56839
proxyaddress = 'localhost:%d'%proxyport
class WithProxyClient(WithClient, WithProxyServer):
def setUp(self):
super(WithProxyClient, self).setUp()
original_client_log = self.client.log
self.client = aiocoap.proxy.client.ProxyForwarder(self.proxyaddress, self.client)
self.client.log = original_client_log
def tearDown(self):
self.client = self.client.context
class TestServerWithProxy(WithProxyClient, TestServer):
def build_request(self):
# this needs to be run differently because tests/server.py
# doesn't exactly use the high-level apis. (and that's ok because we need
# to test the server with simple messages too.)
request = aiocoap.Message(code=aiocoap.GET)
request.unresolved_remote = self.proxyaddress
request.opt.proxy_scheme = 'coap'
request.opt.uri_host = self.serveraddress
return request
# leaving that out for a moment because it fails more slowly
#class TestClientWithProxy(WithProxyClient, TestClient):
# pass
# no need to run them again
del TestClient
del TestServer
aiocoap-0.1+9ae36f4/tests/reverseproxy.py 0000664 0000000 0000000 00000007674 12560602342 0020351 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
import asyncio
from .server import WithAsyncLoop, Destructing, WithClient, WithTestServer, CLEANUPTIME
import aiocoap.proxy.client
import aiocoap.cli.proxy
class WithReverseProxy(WithAsyncLoop, Destructing):
def setUp(self):
super(WithReverseProxy, self).setUp()
self.reverseproxy = aiocoap.cli.proxy.Main(["--reverse", "--server-port", str(self.proxyport), "--namebased", "%s:%s"%(self.name_for_real_server, self.servernetloc), "--pathbased", "%s:%s"%("/".join(self.path_for_real_server), self.servernetloc)])
self.loop.run_until_complete(self.reverseproxy.initializing)
def tearDown(self):
super(WithReverseProxy, self).tearDown()
self.loop.run_until_complete(self.reverseproxy.shutdown())
# creating a reference loop between the cli instance and its contexts,
# so that the cli instance's gc-ing is linked o the contexts'.
# TODO how can we handle this more smoothly?
self.reverseproxy.outgoing_context._cli = self.reverseproxy
self.reverseproxy.proxy_context._cli = self.reverseproxy
self._del_to_be_sure('reverseproxy')
self.loop.run_until_complete(asyncio.sleep(CLEANUPTIME))
proxyport = 56839
proxyaddress = 'localhost:%d'%proxyport
name_for_real_server = 'aliasedname'
path_for_real_server = ('aliased', 'name')
class TestReverseProxy(WithReverseProxy, WithClient, WithTestServer):
def test_routing(self):
yieldfrom = lambda f: self.loop.run_until_complete(f)
def req():
request = aiocoap.Message(code=aiocoap.GET)
request.unresolved_remote = self.proxyaddress
request.opt.uri_path = ('big',)
return request
request = req()
response = yieldfrom(self.client.request(request).response)
self.assertEqual(response.code, aiocoap.NOT_FOUND, "GET without hostname gave resource (NOT_FOUND expected)")
request = req()
request.opt.uri_host = self.name_for_real_server
response = yieldfrom(self.client.request(request).response)
self.assertEqual(response.code, aiocoap.CONTENT, "GET with hostname based proxying was not successful)")
request = req()
request.opt.uri_path = self.path_for_real_server + request.opt.uri_path
response = yieldfrom(self.client.request(request).response)
self.assertEqual(response.code, aiocoap.CONTENT, "GET with path based proxying was not successful)")
def test_options(self):
yieldfrom = lambda f: self.loop.run_until_complete(f)
def req():
request = aiocoap.Message(code=aiocoap.GET)
request.unresolved_remote = self.proxyaddress
request.opt.uri_path = ('big',)
request.opt.uri_host = self.name_for_real_server
return request
request = req()
request.opt.proxy_scheme = 'coap'
response = yieldfrom(self.client.request(request).response)
self.assertEqual(response.code, aiocoap.PROXYING_NOT_SUPPORTED, "Reverse proxy supports proxying even though it shouldn't.")
request = req()
request.opt.add_option(aiocoap.optiontypes.StringOption(2**10 + 2, "can't proxy this"))
response = yieldfrom(self.client.request(request).response)
self.assertEqual(response.code, aiocoap.BAD_OPTION, "Proxy did not react to unsafe option.")
request = req()
request.opt.add_option(aiocoap.optiontypes.StringOption(2**10, "nothing to see here"))
response = yieldfrom(self.client.request(request).response)
self.assertEqual(response.code, aiocoap.CONTENT, "Proxy did not ignore to safe-to-forward option.")
aiocoap-0.1+9ae36f4/tests/server.py 0000664 0000000 0000000 00000034120 12560602342 0017064 0 ustar 00root root 0000000 0000000 # This file is part of the Python aiocoap library project.
#
# Copyright (c) 2012-2014 Maciej Wasilak ,
# 2013-2014 Christian Amsüss
#
# aiocoap is free software, this file is published under the MIT license as
# described in the accompanying LICENSE file.
import asyncio
import aiocoap
import aiocoap.resource
import unittest
import logging
import pprint
import weakref
import gc
# time granted to asyncio to receive datagrams sent via loopback, and to close
# connections. if tearDown checks fail erratically, tune this up -- but it
# causes per-fixture delays.
CLEANUPTIME = 0.01
class MultiRepresentationResource(aiocoap.resource.Resource):
@asyncio.coroutine
def render_get(self, request):
ct = request.opt.accept or aiocoap.numbers.media_types_rev['text/plain']
if ct == aiocoap.numbers.media_types_rev['application/json']:
response = b'{}'
elif ct == aiocoap.numbers.media_types_rev['application/link-format']:
response = b'<>'
elif ct == aiocoap.numbers.media_types_rev['text/plain']:
response = b''
else:
return aiocoap.Message(code=aiocoap.NOT_ACCEPTABLE)
return aiocoap.Message(code=aiocoap.CONTENT, payload=response)
class SlowResource(aiocoap.resource.Resource):
@asyncio.coroutine
def render_get(self, request):
yield from asyncio.sleep(0.2)
return aiocoap.Message(code=aiocoap.CONTENT)
class BigResource(aiocoap.resource.Resource):
@asyncio.coroutine
def render_get(self, request):
# 10kb
payload = b"0123456789----------" * 512
response = aiocoap.Message(code=aiocoap.CONTENT, payload=payload)
aiocoap.resource.hashing_etag(request, response)
return response
class SlowBigResource(aiocoap.resource.Resource):
@asyncio.coroutine
def render_get(self, request):
yield from asyncio.sleep(0.2)
# 1.6kb
payload = b"0123456789----------" * 80
return aiocoap.Message(code=aiocoap.CONTENT, payload=payload)
class ReplacingResource(aiocoap.resource.Resource):
@asyncio.coroutine
def render_get(self, request):
return aiocoap.Message(code=aiocoap.CONTENT, payload=self.value)
@asyncio.coroutine
def render_put(self, request):
self.value = request.payload.replace(b'0', b'O')
return aiocoap.Message(code=aiocoap.CHANGED)
@asyncio.coroutine
def render_post(self, request):
response = request.payload.replace(b'0', b'O')
return aiocoap.Message(code=aiocoap.CONTENT, payload=response)
class TestingSite(aiocoap.resource.Site):
def __init__(self):
super(TestingSite, self).__init__()
self.add_resource(('empty',), MultiRepresentationResource())
self.add_resource(('slow',), SlowResource())
self.add_resource(('big',), BigResource())
self.add_resource(('slowbig',), SlowBigResource())
self.add_resource(('replacing',), ReplacingResource())
# helpers
class TypeCounter(object):
"""This is an ExchangeMonitor factory and counts the outcomes of all
exchanges"""
def __init__(self):
self.empty_ack_count = 0
def __call__(self, message):
return self.BoundCounter(self)
class BoundCounter(aiocoap.protocol.ExchangeMonitor):
def __init__(self, counter):
self.counter = counter
def response(self, message):
if message.mtype == aiocoap.ACK and message.code == aiocoap.EMPTY:
self.counter.empty_ack_count += 1
def no_warnings(function):
def wrapped(self, *args, function=function):
# assertLogs does not work as assertDoesntLog anyway without major
# tricking, and it interacts badly with WithLogMonitoring as they both
# try to change the root logger's level.
startcount = len(self.handler)
result = function(self, *args)
messages = [m.msg for m in self.handler[startcount:] if m.levelno >= logging.WARNING]
self.assertEqual(messages, [], "Function %s had warnings: %s"%(function.__name__, messages))
return result
wrapped.__name__ = function.__name__
wrapped.__doc__ = function.__doc__
return wrapped
# fixtures
class WithLogMonitoring(unittest.TestCase):
def setUp(self):
self.handler = self.ListHandler()
logging.root.setLevel(0)
logging.root.addHandler(self.handler)
super(WithLogMonitoring, self).setUp()
def tearDown(self):
super(WithLogMonitoring, self).tearDown()
logging.root.removeHandler(self.handler)
#
# formatter = logging.Formatter(fmt='%(levelname)s:%(name)s:%(message)s')
# print("fyi:\n", "\n".join(formatter.format(x) for x in self.handler if x.name != 'asyncio'))
class ListHandler(logging.Handler, list):
def emit(self, record):
self.append(record)
class WithAsyncLoop(unittest.TestCase):
def setUp(self):
super(WithAsyncLoop, self).setUp()
self.loop = asyncio.get_event_loop()
class Destructing(WithLogMonitoring):
def _del_to_be_sure(self, attribute):
weaksurvivor = weakref.ref(getattr(self, attribute))
delattr(self, attribute)
# let everything that gets async-triggered by close() happen
self.loop.run_until_complete(asyncio.sleep(CLEANUPTIME))
gc.collect()
survivor = weaksurvivor()
if survivor is not None:
snapshot = lambda: "Referrers: %s\n\nProperties: %s"%(pprint.pformat(gc.get_referrers(survivor)), pprint.pformat(vars(survivor)))
snapshot1 = snapshot()
if False: # enable this if you think that a longer timeout would help
# this helped finding that timer cancellations don't free the
# callback, but in general, expect to modify this code if you
# have to read it; this will need adjustment to your current
# debugging situation
logging.root.info("Starting extended grace period")
for i in range(10):
self.loop.run_until_complete(asyncio.sleep(1))
del survivor
gc.collect()
survivor = weaksurvivor()
logging.root.info("Now %ds into grace period, survivor is %r"%((i+1)/1, survivor))
if survivor is None:
break
snapshot2 = snapshot() if survivor else "no survivor"
snapshotsmessage = "Before extended grace period:\n" + snapshot1 + "\n\nAfter extended grace period:\n" + snapshot2
else:
snapshotsmessage = snapshot1
formatter = logging.Formatter(fmt='%(levelname)s:%(name)s:%(message)s')
errormessage = "Protocol %s was not garbage collected.\n\n"%attribute + snapshotsmessage + "\n\nLog of the unit test:\n" + "\n".join(formatter.format(x) for x in self.handler)
self.fail(errormessage)
class WithTestServer(WithAsyncLoop, Destructing):
def create_testing_site(self):
return TestingSite()
def setUp(self):
super(WithTestServer, self).setUp()
self.server = self.loop.run_until_complete(aiocoap.Context.create_server_context(self.create_testing_site()))
def tearDown(self):
# let the server receive the acks we just sent
self.loop.run_until_complete(asyncio.sleep(CLEANUPTIME))
self.loop.run_until_complete(self.server.shutdown())
self._del_to_be_sure("server")
super(WithTestServer, self).tearDown()
serveraddress = "::1"
servernetloc = "[%s]"%serveraddress
servernamealias = "ip6-loopback"
class WithClient(WithAsyncLoop, Destructing):
def setUp(self):
super(WithClient, self).setUp()
self.client = self.loop.run_until_complete(aiocoap.Context.create_client_context())
def tearDown(self):
self.loop.run_until_complete(self.client.shutdown())
self._del_to_be_sure("client")
super(WithClient, self).tearDown()
# test cases
class TestServer(WithTestServer, WithClient):
@no_warnings
def build_request(self):
request = aiocoap.Message(code=aiocoap.GET)
request.unresolved_remote = self.servernetloc
return request
@no_warnings
def fetch_response(self, request, exchange_monitor_factory=lambda x:None):
return self.loop.run_until_complete(self.client.request(request, exchange_monitor_factory=exchange_monitor_factory).response)
@no_warnings
def test_empty_accept(self):
request = self.build_request()
request.opt.uri_path = ['empty']
response = self.fetch_response(request)
self.assertEqual(response.code, aiocoap.CONTENT, "Simple request did not succede")
self.assertEqual(response.payload, b'', "Simple request gave unexpected result")
@no_warnings
def test_unacceptable_accept(self):
request = self.build_request()
request.opt.uri_path = ['empty']
request.opt.accept = 9999
response = self.fetch_response(request)
self.assertEqual(response.code, aiocoap.NOT_ACCEPTABLE, "Inacceptable request was not not accepted")
@no_warnings
def test_js_accept(self):
request = self.build_request()
request.opt.uri_path = ['empty']
request.opt.accept = aiocoap.numbers.media_types_rev['application/json']
response = self.fetch_response(request)
self.assertEqual(response.code, aiocoap.CONTENT, "JSON request did not succede")
self.assertEqual(response.payload, b'{}', "JSON request gave unexpected result")
@no_warnings
def test_nonexisting_resource(self):
request = self.build_request()
request.opt.uri_path = ['nonexisting']
response = self.fetch_response(request)
self.assertEqual(response.code, aiocoap.NOT_FOUND, "Nonexisting resource was not not found")
@no_warnings
def test_spurious_resource(self):
request = self.build_request()
request.opt.uri_path = ['..', 'empty']
response = self.fetch_response(request)
# different behavior would be ok-ish, as the .. in the request is forbidden, but returning 4.04 is sane here
self.assertEqual(response.code, aiocoap.NOT_FOUND, "'..' component in path did not get ignored the way it was expected")
@no_warnings
def test_fast_resource(self):
request = self.build_request()
request.opt.uri_path = ['empty']
counter = TypeCounter()
response = self.fetch_response(request, counter)
self.assertEqual(response.code, aiocoap.CONTENT, "Fast request did not succede")
self.assertEqual(counter.empty_ack_count, 0, "Fast resource had an empty ack")
@no_warnings
def test_slow_resource(self):
request = self.build_request()
request.opt.uri_path = ['slow']
counter = TypeCounter()
response = self.fetch_response(request, counter)
self.assertEqual(response.code, aiocoap.CONTENT, "Slow request did not succede")
self.assertEqual(counter.empty_ack_count, 1, "Slow resource was not handled in two exchanges")
@no_warnings
def test_big_resource(self):
request = self.build_request()
request.opt.uri_path = ['big']
response = self.fetch_response(request)
self.assertEqual(response.code, aiocoap.CONTENT, "Big resource request did not succede")
self.assertEqual(len(response.payload), 10240, "Big resource is not as big as expected")
self.assertTrue(response.opt.etag != None, "Big resource does not provide an ETag")
request = self.build_request()
request.opt.uri_path = ['big']
request.opt.etags = [response.opt.etag]
response = self.fetch_response(request)
self.assertEqual(response.code, aiocoap.VALID, "Big resource does not support ETag validation")
self.assertTrue(response.opt.etag != None, "Big resource does not send ETag for validation")
@no_warnings
def test_slowbig_resource(self):
request = self.build_request()
request.opt.uri_path = ['slowbig']
counter = TypeCounter()
response = self.fetch_response(request, counter)
self.assertEqual(response.code, aiocoap.CONTENT, "SlowBig resource request did not succede")
self.assertEqual(len(response.payload), 1600, "SlowBig resource is not as big as expected")
self.assertEqual(counter.empty_ack_count, 1, "SlowBig resource was not handled in two exchanges")
@no_warnings
def test_replacing_resource(self):
testpattern = b"01" * 1024
request = self.build_request()
request.code = aiocoap.PUT
request.payload = testpattern
request.opt.uri_path = ['replacing']
response = self.fetch_response(request)
self.assertEqual(response.code, aiocoap.CHANGED, "PUT did not result in CHANGED")
self.assertEqual(response.payload, b"", "PUT has unexpected payload")
request = self.build_request()
request.code = aiocoap.GET
request.opt.uri_path = ['replacing']
response = self.fetch_response(request)
self.assertEqual(response.code, aiocoap.CONTENT, "Replacing resource could not be GOT (GET'd?) successfully")
self.assertEqual(response.payload, testpattern.replace(b"0", b"O"), "Replacing resource did not replace as expected between PUT and GET")
request = self.build_request()
request.code = aiocoap.POST
request.payload = testpattern
request.opt.uri_path = ['replacing']
response = self.fetch_response(request)
self.assertEqual(response.code, aiocoap.CONTENT, "Replacing resource could not be POSTed to successfully")
self.assertEqual(response.payload, testpattern.replace(b"0", b"O"), "Replacing resource did not replace as expected when POSTed")
#logging.basicConfig()
#logging.getLogger("coap").setLevel(logging.DEBUG)
#logging.getLogger("coap-server").setLevel(logging.INFO)
# for testing the server standalone
if __name__ == "__main__":
print("Running test server")
s = WithTestServer()
s.setUp()
try:
s.loop.run_forever()
except KeyboardInterrupt:
print("Shutting down test server")
s.tearDown()